1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Testsuite for BPF interpreter and BPF JIT compiler
4 *
5 * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
6 */
7
8#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9
10#include <linux/init.h>
11#include <linux/module.h>
12#include <linux/filter.h>
13#include <linux/bpf.h>
14#include <linux/skbuff.h>
15#include <linux/netdevice.h>
16#include <linux/if_vlan.h>
17#include <linux/random.h>
18#include <linux/highmem.h>
19#include <linux/sched.h>
20
21/* General test specific settings */
22#define MAX_SUBTESTS 3
23#define MAX_TESTRUNS 1000
24#define MAX_DATA 128
25#define MAX_INSNS 512
26#define MAX_K 0xffffFFFF
27
28/* Few constants used to init test 'skb' */
29#define SKB_TYPE 3
30#define SKB_MARK 0x1234aaaa
31#define SKB_HASH 0x1234aaab
32#define SKB_QUEUE_MAP 123
33#define SKB_VLAN_TCI 0xffff
34#define SKB_VLAN_PRESENT 1
35#define SKB_DEV_IFINDEX 577
36#define SKB_DEV_TYPE 588
37
38/* Redefine REGs to make tests less verbose */
39#define R0 BPF_REG_0
40#define R1 BPF_REG_1
41#define R2 BPF_REG_2
42#define R3 BPF_REG_3
43#define R4 BPF_REG_4
44#define R5 BPF_REG_5
45#define R6 BPF_REG_6
46#define R7 BPF_REG_7
47#define R8 BPF_REG_8
48#define R9 BPF_REG_9
49#define R10 BPF_REG_10
50
51/* Flags that can be passed to test cases */
52#define FLAG_NO_DATA BIT(0)
53#define FLAG_EXPECTED_FAIL BIT(1)
54#define FLAG_SKB_FRAG BIT(2)
55#define FLAG_VERIFIER_ZEXT BIT(3)
56#define FLAG_LARGE_MEM BIT(4)
57
58enum {
59 CLASSIC = BIT(6), /* Old BPF instructions only. */
60 INTERNAL = BIT(7), /* Extended instruction set. */
61};
62
63#define TEST_TYPE_MASK (CLASSIC | INTERNAL)
64
65struct bpf_test {
66 const char *descr;
67 union {
68 struct sock_filter insns[MAX_INSNS];
69 struct bpf_insn insns_int[MAX_INSNS];
70 struct {
71 void *insns;
72 unsigned int len;
73 } ptr;
74 } u;
75 __u8 aux;
76 __u8 data[MAX_DATA];
77 struct {
78 int data_size;
79 __u32 result;
80 } test[MAX_SUBTESTS];
81 int (*fill_helper)(struct bpf_test *self);
82 int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
83 __u8 frag_data[MAX_DATA];
84 int stack_depth; /* for eBPF only, since tests don't call verifier */
85 int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
86};
87
88/* Large test cases need separate allocation and fill handler. */
89
90static int bpf_fill_maxinsns1(struct bpf_test *self)
91{
92 unsigned int len = BPF_MAXINSNS;
93 struct sock_filter *insn;
94 __u32 k = ~0;
95 int i;
96
97 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
98 if (!insn)
99 return -ENOMEM;
100
101 for (i = 0; i < len; i++, k--)
102 insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
103
104 self->u.ptr.insns = insn;
105 self->u.ptr.len = len;
106
107 return 0;
108}
109
110static int bpf_fill_maxinsns2(struct bpf_test *self)
111{
112 unsigned int len = BPF_MAXINSNS;
113 struct sock_filter *insn;
114 int i;
115
116 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
117 if (!insn)
118 return -ENOMEM;
119
120 for (i = 0; i < len; i++)
121 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
122
123 self->u.ptr.insns = insn;
124 self->u.ptr.len = len;
125
126 return 0;
127}
128
129static int bpf_fill_maxinsns3(struct bpf_test *self)
130{
131 unsigned int len = BPF_MAXINSNS;
132 struct sock_filter *insn;
133 struct rnd_state rnd;
134 int i;
135
136 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
137 if (!insn)
138 return -ENOMEM;
139
140 prandom_seed_state(state: &rnd, seed: 3141592653589793238ULL);
141
142 for (i = 0; i < len - 1; i++) {
143 __u32 k = prandom_u32_state(state: &rnd);
144
145 insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
146 }
147
148 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
149
150 self->u.ptr.insns = insn;
151 self->u.ptr.len = len;
152
153 return 0;
154}
155
156static int bpf_fill_maxinsns4(struct bpf_test *self)
157{
158 unsigned int len = BPF_MAXINSNS + 1;
159 struct sock_filter *insn;
160 int i;
161
162 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
163 if (!insn)
164 return -ENOMEM;
165
166 for (i = 0; i < len; i++)
167 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
168
169 self->u.ptr.insns = insn;
170 self->u.ptr.len = len;
171
172 return 0;
173}
174
175static int bpf_fill_maxinsns5(struct bpf_test *self)
176{
177 unsigned int len = BPF_MAXINSNS;
178 struct sock_filter *insn;
179 int i;
180
181 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
182 if (!insn)
183 return -ENOMEM;
184
185 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
186
187 for (i = 1; i < len - 1; i++)
188 insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
189
190 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
191
192 self->u.ptr.insns = insn;
193 self->u.ptr.len = len;
194
195 return 0;
196}
197
198static int bpf_fill_maxinsns6(struct bpf_test *self)
199{
200 unsigned int len = BPF_MAXINSNS;
201 struct sock_filter *insn;
202 int i;
203
204 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
205 if (!insn)
206 return -ENOMEM;
207
208 for (i = 0; i < len - 1; i++)
209 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
210 SKF_AD_VLAN_TAG_PRESENT);
211
212 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
213
214 self->u.ptr.insns = insn;
215 self->u.ptr.len = len;
216
217 return 0;
218}
219
220static int bpf_fill_maxinsns7(struct bpf_test *self)
221{
222 unsigned int len = BPF_MAXINSNS;
223 struct sock_filter *insn;
224 int i;
225
226 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
227 if (!insn)
228 return -ENOMEM;
229
230 for (i = 0; i < len - 4; i++)
231 insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
232 SKF_AD_CPU);
233
234 insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
235 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
236 SKF_AD_CPU);
237 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
238 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
239
240 self->u.ptr.insns = insn;
241 self->u.ptr.len = len;
242
243 return 0;
244}
245
246static int bpf_fill_maxinsns8(struct bpf_test *self)
247{
248 unsigned int len = BPF_MAXINSNS;
249 struct sock_filter *insn;
250 int i, jmp_off = len - 3;
251
252 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
253 if (!insn)
254 return -ENOMEM;
255
256 insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
257
258 for (i = 1; i < len - 1; i++)
259 insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
260
261 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
262
263 self->u.ptr.insns = insn;
264 self->u.ptr.len = len;
265
266 return 0;
267}
268
269static int bpf_fill_maxinsns9(struct bpf_test *self)
270{
271 unsigned int len = BPF_MAXINSNS;
272 struct bpf_insn *insn;
273 int i;
274
275 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
276 if (!insn)
277 return -ENOMEM;
278
279 insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
280 insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
281 insn[2] = BPF_EXIT_INSN();
282
283 for (i = 3; i < len - 2; i++)
284 insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
285
286 insn[len - 2] = BPF_EXIT_INSN();
287 insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
288
289 self->u.ptr.insns = insn;
290 self->u.ptr.len = len;
291
292 return 0;
293}
294
295static int bpf_fill_maxinsns10(struct bpf_test *self)
296{
297 unsigned int len = BPF_MAXINSNS, hlen = len - 2;
298 struct bpf_insn *insn;
299 int i;
300
301 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
302 if (!insn)
303 return -ENOMEM;
304
305 for (i = 0; i < hlen / 2; i++)
306 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
307 for (i = hlen - 1; i > hlen / 2; i--)
308 insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
309
310 insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
311 insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
312 insn[hlen + 1] = BPF_EXIT_INSN();
313
314 self->u.ptr.insns = insn;
315 self->u.ptr.len = len;
316
317 return 0;
318}
319
320static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
321 unsigned int plen)
322{
323 struct sock_filter *insn;
324 unsigned int rlen;
325 int i, j;
326
327 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
328 if (!insn)
329 return -ENOMEM;
330
331 rlen = (len % plen) - 1;
332
333 for (i = 0; i + plen < len; i += plen)
334 for (j = 0; j < plen; j++)
335 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
336 plen - 1 - j, 0, 0);
337 for (j = 0; j < rlen; j++)
338 insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
339 0, 0);
340
341 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
342
343 self->u.ptr.insns = insn;
344 self->u.ptr.len = len;
345
346 return 0;
347}
348
349static int bpf_fill_maxinsns11(struct bpf_test *self)
350{
351 /* Hits 70 passes on x86_64 and triggers NOPs padding. */
352 return __bpf_fill_ja(self, BPF_MAXINSNS, plen: 68);
353}
354
355static int bpf_fill_maxinsns12(struct bpf_test *self)
356{
357 unsigned int len = BPF_MAXINSNS;
358 struct sock_filter *insn;
359 int i = 0;
360
361 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
362 if (!insn)
363 return -ENOMEM;
364
365 insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
366
367 for (i = 1; i < len - 1; i++)
368 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
369
370 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
371
372 self->u.ptr.insns = insn;
373 self->u.ptr.len = len;
374
375 return 0;
376}
377
378static int bpf_fill_maxinsns13(struct bpf_test *self)
379{
380 unsigned int len = BPF_MAXINSNS;
381 struct sock_filter *insn;
382 int i = 0;
383
384 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
385 if (!insn)
386 return -ENOMEM;
387
388 for (i = 0; i < len - 3; i++)
389 insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
390
391 insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
392 insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
393 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
394
395 self->u.ptr.insns = insn;
396 self->u.ptr.len = len;
397
398 return 0;
399}
400
401static int bpf_fill_ja(struct bpf_test *self)
402{
403 /* Hits exactly 11 passes on x86_64 JIT. */
404 return __bpf_fill_ja(self, len: 12, plen: 9);
405}
406
407static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
408{
409 unsigned int len = BPF_MAXINSNS;
410 struct sock_filter *insn;
411 int i;
412
413 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
414 if (!insn)
415 return -ENOMEM;
416
417 for (i = 0; i < len - 1; i += 2) {
418 insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
419 insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
420 SKF_AD_OFF + SKF_AD_CPU);
421 }
422
423 insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
424
425 self->u.ptr.insns = insn;
426 self->u.ptr.len = len;
427
428 return 0;
429}
430
431static int __bpf_fill_stxdw(struct bpf_test *self, int size)
432{
433 unsigned int len = BPF_MAXINSNS;
434 struct bpf_insn *insn;
435 int i;
436
437 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
438 if (!insn)
439 return -ENOMEM;
440
441 insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
442 insn[1] = BPF_ST_MEM(size, R10, -40, 42);
443
444 for (i = 2; i < len - 2; i++)
445 insn[i] = BPF_STX_XADD(size, R10, R0, -40);
446
447 insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
448 insn[len - 1] = BPF_EXIT_INSN();
449
450 self->u.ptr.insns = insn;
451 self->u.ptr.len = len;
452 self->stack_depth = 40;
453
454 return 0;
455}
456
457static int bpf_fill_stxw(struct bpf_test *self)
458{
459 return __bpf_fill_stxdw(self, BPF_W);
460}
461
462static int bpf_fill_stxdw(struct bpf_test *self)
463{
464 return __bpf_fill_stxdw(self, BPF_DW);
465}
466
467static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
468{
469 struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
470
471 memcpy(insns, tmp, sizeof(tmp));
472 return 2;
473}
474
475/*
476 * Branch conversion tests. Complex operations can expand to a lot
477 * of instructions when JITed. This in turn may cause jump offsets
478 * to overflow the field size of the native instruction, triggering
479 * a branch conversion mechanism in some JITs.
480 */
481static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
482{
483 struct bpf_insn *insns;
484 int len = S16_MAX + 5;
485 int i;
486
487 insns = kmalloc_array(n: len, size: sizeof(*insns), GFP_KERNEL);
488 if (!insns)
489 return -ENOMEM;
490
491 i = __bpf_ld_imm64(insns, R1, imm64: 0x0123456789abcdefULL);
492 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
493 insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
494 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
495 insns[i++] = BPF_EXIT_INSN();
496
497 while (i < len - 1) {
498 static const int ops[] = {
499 BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
500 BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
501 };
502 int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
503
504 if (i & 1)
505 insns[i++] = BPF_ALU32_REG(op, R0, R1);
506 else
507 insns[i++] = BPF_ALU64_REG(op, R0, R1);
508 }
509
510 insns[i++] = BPF_EXIT_INSN();
511 self->u.ptr.insns = insns;
512 self->u.ptr.len = len;
513 BUG_ON(i != len);
514
515 return 0;
516}
517
518/* Branch taken by runtime decision */
519static int bpf_fill_max_jmp_taken(struct bpf_test *self)
520{
521 return __bpf_fill_max_jmp(self, BPF_JEQ, imm: 1);
522}
523
524/* Branch not taken by runtime decision */
525static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
526{
527 return __bpf_fill_max_jmp(self, BPF_JEQ, imm: 0);
528}
529
530/* Branch always taken, known at JIT time */
531static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
532{
533 return __bpf_fill_max_jmp(self, BPF_JGE, imm: 0);
534}
535
536/* Branch never taken, known at JIT time */
537static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
538{
539 return __bpf_fill_max_jmp(self, BPF_JLT, imm: 0);
540}
541
542/* ALU result computation used in tests */
543static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
544{
545 *res = 0;
546 switch (op) {
547 case BPF_MOV:
548 *res = v2;
549 break;
550 case BPF_AND:
551 *res = v1 & v2;
552 break;
553 case BPF_OR:
554 *res = v1 | v2;
555 break;
556 case BPF_XOR:
557 *res = v1 ^ v2;
558 break;
559 case BPF_LSH:
560 *res = v1 << v2;
561 break;
562 case BPF_RSH:
563 *res = v1 >> v2;
564 break;
565 case BPF_ARSH:
566 *res = v1 >> v2;
567 if (v2 > 0 && v1 > S64_MAX)
568 *res |= ~0ULL << (64 - v2);
569 break;
570 case BPF_ADD:
571 *res = v1 + v2;
572 break;
573 case BPF_SUB:
574 *res = v1 - v2;
575 break;
576 case BPF_MUL:
577 *res = v1 * v2;
578 break;
579 case BPF_DIV:
580 if (v2 == 0)
581 return false;
582 *res = div64_u64(dividend: v1, divisor: v2);
583 break;
584 case BPF_MOD:
585 if (v2 == 0)
586 return false;
587 div64_u64_rem(dividend: v1, divisor: v2, remainder: res);
588 break;
589 }
590 return true;
591}
592
593/* Test an ALU shift operation for all valid shift values */
594static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
595 u8 mode, bool alu32)
596{
597 static const s64 regs[] = {
598 0x0123456789abcdefLL, /* dword > 0, word < 0 */
599 0xfedcba9876543210LL, /* dword < 0, word > 0 */
600 0xfedcba0198765432LL, /* dword < 0, word < 0 */
601 0x0123458967abcdefLL, /* dword > 0, word > 0 */
602 };
603 int bits = alu32 ? 32 : 64;
604 int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
605 struct bpf_insn *insn;
606 int imm, k;
607 int i = 0;
608
609 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
610 if (!insn)
611 return -ENOMEM;
612
613 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
614
615 for (k = 0; k < ARRAY_SIZE(regs); k++) {
616 s64 reg = regs[k];
617
618 i += __bpf_ld_imm64(insns: &insn[i], R3, imm64: reg);
619
620 for (imm = 0; imm < bits; imm++) {
621 u64 val;
622
623 /* Perform operation */
624 insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
625 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
626 if (alu32) {
627 if (mode == BPF_K)
628 insn[i++] = BPF_ALU32_IMM(op, R1, imm);
629 else
630 insn[i++] = BPF_ALU32_REG(op, R1, R2);
631
632 if (op == BPF_ARSH)
633 reg = (s32)reg;
634 else
635 reg = (u32)reg;
636 __bpf_alu_result(res: &val, v1: reg, v2: imm, op);
637 val = (u32)val;
638 } else {
639 if (mode == BPF_K)
640 insn[i++] = BPF_ALU64_IMM(op, R1, imm);
641 else
642 insn[i++] = BPF_ALU64_REG(op, R1, R2);
643 __bpf_alu_result(res: &val, v1: reg, v2: imm, op);
644 }
645
646 /*
647 * When debugging a JIT that fails this test, one
648 * can write the immediate value to R0 here to find
649 * out which operand values that fail.
650 */
651
652 /* Load reference and check the result */
653 i += __bpf_ld_imm64(insns: &insn[i], R4, imm64: val);
654 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
655 insn[i++] = BPF_EXIT_INSN();
656 }
657 }
658
659 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
660 insn[i++] = BPF_EXIT_INSN();
661
662 self->u.ptr.insns = insn;
663 self->u.ptr.len = len;
664 BUG_ON(i != len);
665
666 return 0;
667}
668
669static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
670{
671 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, alu32: false);
672}
673
674static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
675{
676 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, alu32: false);
677}
678
679static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
680{
681 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, alu32: false);
682}
683
684static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
685{
686 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, alu32: false);
687}
688
689static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
690{
691 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, alu32: false);
692}
693
694static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
695{
696 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, alu32: false);
697}
698
699static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
700{
701 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, alu32: true);
702}
703
704static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
705{
706 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, alu32: true);
707}
708
709static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
710{
711 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, alu32: true);
712}
713
714static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
715{
716 return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, alu32: true);
717}
718
719static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
720{
721 return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, alu32: true);
722}
723
724static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
725{
726 return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, alu32: true);
727}
728
729/*
730 * Test an ALU register shift operation for all valid shift values
731 * for the case when the source and destination are the same.
732 */
733static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
734 bool alu32)
735{
736 int bits = alu32 ? 32 : 64;
737 int len = 3 + 6 * bits;
738 struct bpf_insn *insn;
739 int i = 0;
740 u64 val;
741
742 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
743 if (!insn)
744 return -ENOMEM;
745
746 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
747
748 for (val = 0; val < bits; val++) {
749 u64 res;
750
751 /* Perform operation */
752 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
753 if (alu32)
754 insn[i++] = BPF_ALU32_REG(op, R1, R1);
755 else
756 insn[i++] = BPF_ALU64_REG(op, R1, R1);
757
758 /* Compute the reference result */
759 __bpf_alu_result(res: &res, v1: val, v2: val, op);
760 if (alu32)
761 res = (u32)res;
762 i += __bpf_ld_imm64(insns: &insn[i], R2, imm64: res);
763
764 /* Check the actual result */
765 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
766 insn[i++] = BPF_EXIT_INSN();
767 }
768
769 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
770 insn[i++] = BPF_EXIT_INSN();
771
772 self->u.ptr.insns = insn;
773 self->u.ptr.len = len;
774 BUG_ON(i != len);
775
776 return 0;
777}
778
779static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
780{
781 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, alu32: false);
782}
783
784static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
785{
786 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, alu32: false);
787}
788
789static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
790{
791 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, alu32: false);
792}
793
794static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
795{
796 return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, alu32: true);
797}
798
799static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
800{
801 return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, alu32: true);
802}
803
804static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
805{
806 return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, alu32: true);
807}
808
809/*
810 * Common operand pattern generator for exhaustive power-of-two magnitudes
811 * tests. The block size parameters can be adjusted to increase/reduce the
812 * number of combinatons tested and thereby execution speed and memory
813 * footprint.
814 */
815
816static inline s64 value(int msb, int delta, int sign)
817{
818 return sign * (1LL << msb) + delta;
819}
820
821static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
822 int dbits, int sbits, int block1, int block2,
823 int (*emit)(struct bpf_test*, void*,
824 struct bpf_insn*, s64, s64))
825{
826 static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
827 struct bpf_insn *insns;
828 int di, si, bt, db, sb;
829 int count, len, k;
830 int extra = 1 + 2;
831 int i = 0;
832
833 /* Total number of iterations for the two pattern */
834 count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
835 count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
836
837 /* Compute the maximum number of insns and allocate the buffer */
838 len = extra + count * (*emit)(self, arg, NULL, 0, 0);
839 insns = kmalloc_array(n: len, size: sizeof(*insns), GFP_KERNEL);
840 if (!insns)
841 return -ENOMEM;
842
843 /* Add head instruction(s) */
844 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
845
846 /*
847 * Pattern 1: all combinations of power-of-two magnitudes and sign,
848 * and with a block of contiguous values around each magnitude.
849 */
850 for (di = 0; di < dbits - 1; di++) /* Dst magnitudes */
851 for (si = 0; si < sbits - 1; si++) /* Src magnitudes */
852 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
853 for (db = -(block1 / 2);
854 db < (block1 + 1) / 2; db++)
855 for (sb = -(block1 / 2);
856 sb < (block1 + 1) / 2; sb++) {
857 s64 dst, src;
858
859 dst = value(msb: di, delta: db, sign: sgn[k][0]);
860 src = value(msb: si, delta: sb, sign: sgn[k][1]);
861 i += (*emit)(self, arg,
862 &insns[i],
863 dst, src);
864 }
865 /*
866 * Pattern 2: all combinations for a larger block of values
867 * for each power-of-two magnitude and sign, where the magnitude is
868 * the same for both operands.
869 */
870 for (bt = 0; bt < max(dbits, sbits) - 1; bt++) /* Magnitude */
871 for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
872 for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
873 for (sb = -(block2 / 2);
874 sb < (block2 + 1) / 2; sb++) {
875 s64 dst, src;
876
877 dst = value(msb: bt % dbits, delta: db, sign: sgn[k][0]);
878 src = value(msb: bt % sbits, delta: sb, sign: sgn[k][1]);
879 i += (*emit)(self, arg, &insns[i],
880 dst, src);
881 }
882
883 /* Append tail instructions */
884 insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
885 insns[i++] = BPF_EXIT_INSN();
886 BUG_ON(i > len);
887
888 self->u.ptr.insns = insns;
889 self->u.ptr.len = i;
890
891 return 0;
892}
893
894/*
895 * Block size parameters used in pattern tests below. une as needed to
896 * increase/reduce the number combinations tested, see following examples.
897 * block values per operand MSB
898 * ----------------------------------------
899 * 0 none
900 * 1 (1 << MSB)
901 * 2 (1 << MSB) + [-1, 0]
902 * 3 (1 << MSB) + [-1, 0, 1]
903 */
904#define PATTERN_BLOCK1 1
905#define PATTERN_BLOCK2 5
906
907/* Number of test runs for a pattern test */
908#define NR_PATTERN_RUNS 1
909
910/*
911 * Exhaustive tests of ALU operations for all combinations of power-of-two
912 * magnitudes of the operands, both for positive and negative values. The
913 * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
914 * emit different code depending on the magnitude of the immediate value.
915 */
916static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
917 struct bpf_insn *insns, s64 dst, s64 imm)
918{
919 int op = *(int *)arg;
920 int i = 0;
921 u64 res;
922
923 if (!insns)
924 return 7;
925
926 if (__bpf_alu_result(res: &res, v1: dst, v2: (s32)imm, op)) {
927 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
928 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: res);
929 insns[i++] = BPF_ALU64_IMM(op, R1, imm);
930 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
931 insns[i++] = BPF_EXIT_INSN();
932 }
933
934 return i;
935}
936
937static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
938 struct bpf_insn *insns, s64 dst, s64 imm)
939{
940 int op = *(int *)arg;
941 int i = 0;
942 u64 res;
943
944 if (!insns)
945 return 7;
946
947 if (__bpf_alu_result(res: &res, v1: (u32)dst, v2: (u32)imm, op)) {
948 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
949 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: (u32)res);
950 insns[i++] = BPF_ALU32_IMM(op, R1, imm);
951 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
952 insns[i++] = BPF_EXIT_INSN();
953 }
954
955 return i;
956}
957
958static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
959 struct bpf_insn *insns, s64 dst, s64 src)
960{
961 int op = *(int *)arg;
962 int i = 0;
963 u64 res;
964
965 if (!insns)
966 return 9;
967
968 if (__bpf_alu_result(res: &res, v1: dst, v2: src, op)) {
969 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
970 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
971 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: res);
972 insns[i++] = BPF_ALU64_REG(op, R1, R2);
973 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
974 insns[i++] = BPF_EXIT_INSN();
975 }
976
977 return i;
978}
979
980static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
981 struct bpf_insn *insns, s64 dst, s64 src)
982{
983 int op = *(int *)arg;
984 int i = 0;
985 u64 res;
986
987 if (!insns)
988 return 9;
989
990 if (__bpf_alu_result(res: &res, v1: (u32)dst, v2: (u32)src, op)) {
991 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
992 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
993 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: (u32)res);
994 insns[i++] = BPF_ALU32_REG(op, R1, R2);
995 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
996 insns[i++] = BPF_EXIT_INSN();
997 }
998
999 return i;
1000}
1001
1002static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1003{
1004 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 32,
1005 PATTERN_BLOCK1, PATTERN_BLOCK2,
1006 emit: &__bpf_emit_alu64_imm);
1007}
1008
1009static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1010{
1011 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 32,
1012 PATTERN_BLOCK1, PATTERN_BLOCK2,
1013 emit: &__bpf_emit_alu32_imm);
1014}
1015
1016static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1017{
1018 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
1019 PATTERN_BLOCK1, PATTERN_BLOCK2,
1020 emit: &__bpf_emit_alu64_reg);
1021}
1022
1023static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1024{
1025 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
1026 PATTERN_BLOCK1, PATTERN_BLOCK2,
1027 emit: &__bpf_emit_alu32_reg);
1028}
1029
1030/* ALU64 immediate operations */
1031static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1032{
1033 return __bpf_fill_alu64_imm(self, BPF_MOV);
1034}
1035
1036static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1037{
1038 return __bpf_fill_alu64_imm(self, BPF_AND);
1039}
1040
1041static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1042{
1043 return __bpf_fill_alu64_imm(self, BPF_OR);
1044}
1045
1046static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1047{
1048 return __bpf_fill_alu64_imm(self, BPF_XOR);
1049}
1050
1051static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1052{
1053 return __bpf_fill_alu64_imm(self, BPF_ADD);
1054}
1055
1056static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1057{
1058 return __bpf_fill_alu64_imm(self, BPF_SUB);
1059}
1060
1061static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1062{
1063 return __bpf_fill_alu64_imm(self, BPF_MUL);
1064}
1065
1066static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1067{
1068 return __bpf_fill_alu64_imm(self, BPF_DIV);
1069}
1070
1071static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1072{
1073 return __bpf_fill_alu64_imm(self, BPF_MOD);
1074}
1075
1076/* ALU32 immediate operations */
1077static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1078{
1079 return __bpf_fill_alu32_imm(self, BPF_MOV);
1080}
1081
1082static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1083{
1084 return __bpf_fill_alu32_imm(self, BPF_AND);
1085}
1086
1087static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1088{
1089 return __bpf_fill_alu32_imm(self, BPF_OR);
1090}
1091
1092static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1093{
1094 return __bpf_fill_alu32_imm(self, BPF_XOR);
1095}
1096
1097static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1098{
1099 return __bpf_fill_alu32_imm(self, BPF_ADD);
1100}
1101
1102static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1103{
1104 return __bpf_fill_alu32_imm(self, BPF_SUB);
1105}
1106
1107static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1108{
1109 return __bpf_fill_alu32_imm(self, BPF_MUL);
1110}
1111
1112static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1113{
1114 return __bpf_fill_alu32_imm(self, BPF_DIV);
1115}
1116
1117static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1118{
1119 return __bpf_fill_alu32_imm(self, BPF_MOD);
1120}
1121
1122/* ALU64 register operations */
1123static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1124{
1125 return __bpf_fill_alu64_reg(self, BPF_MOV);
1126}
1127
1128static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1129{
1130 return __bpf_fill_alu64_reg(self, BPF_AND);
1131}
1132
1133static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1134{
1135 return __bpf_fill_alu64_reg(self, BPF_OR);
1136}
1137
1138static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1139{
1140 return __bpf_fill_alu64_reg(self, BPF_XOR);
1141}
1142
1143static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1144{
1145 return __bpf_fill_alu64_reg(self, BPF_ADD);
1146}
1147
1148static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1149{
1150 return __bpf_fill_alu64_reg(self, BPF_SUB);
1151}
1152
1153static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1154{
1155 return __bpf_fill_alu64_reg(self, BPF_MUL);
1156}
1157
1158static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1159{
1160 return __bpf_fill_alu64_reg(self, BPF_DIV);
1161}
1162
1163static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1164{
1165 return __bpf_fill_alu64_reg(self, BPF_MOD);
1166}
1167
1168/* ALU32 register operations */
1169static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1170{
1171 return __bpf_fill_alu32_reg(self, BPF_MOV);
1172}
1173
1174static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1175{
1176 return __bpf_fill_alu32_reg(self, BPF_AND);
1177}
1178
1179static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1180{
1181 return __bpf_fill_alu32_reg(self, BPF_OR);
1182}
1183
1184static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1185{
1186 return __bpf_fill_alu32_reg(self, BPF_XOR);
1187}
1188
1189static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1190{
1191 return __bpf_fill_alu32_reg(self, BPF_ADD);
1192}
1193
1194static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1195{
1196 return __bpf_fill_alu32_reg(self, BPF_SUB);
1197}
1198
1199static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1200{
1201 return __bpf_fill_alu32_reg(self, BPF_MUL);
1202}
1203
1204static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1205{
1206 return __bpf_fill_alu32_reg(self, BPF_DIV);
1207}
1208
1209static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1210{
1211 return __bpf_fill_alu32_reg(self, BPF_MOD);
1212}
1213
1214/*
1215 * Test JITs that implement complex ALU operations as function
1216 * calls, and must re-arrange operands for argument passing.
1217 */
1218static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1219{
1220 int len = 2 + 10 * 10;
1221 struct bpf_insn *insns;
1222 u64 dst, res;
1223 int i = 0;
1224 u32 imm;
1225 int rd;
1226
1227 insns = kmalloc_array(n: len, size: sizeof(*insns), GFP_KERNEL);
1228 if (!insns)
1229 return -ENOMEM;
1230
1231 /* Operand and result values according to operation */
1232 if (alu32)
1233 dst = 0x76543210U;
1234 else
1235 dst = 0x7edcba9876543210ULL;
1236 imm = 0x01234567U;
1237
1238 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1239 imm &= 31;
1240
1241 __bpf_alu_result(res: &res, v1: dst, v2: imm, op);
1242
1243 if (alu32)
1244 res = (u32)res;
1245
1246 /* Check all operand registers */
1247 for (rd = R0; rd <= R9; rd++) {
1248 i += __bpf_ld_imm64(insns: &insns[i], reg: rd, imm64: dst);
1249
1250 if (alu32)
1251 insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1252 else
1253 insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1254
1255 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1256 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1257 insns[i++] = BPF_EXIT_INSN();
1258
1259 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1260 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1261 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1262 insns[i++] = BPF_EXIT_INSN();
1263 }
1264
1265 insns[i++] = BPF_MOV64_IMM(R0, 1);
1266 insns[i++] = BPF_EXIT_INSN();
1267
1268 self->u.ptr.insns = insns;
1269 self->u.ptr.len = len;
1270 BUG_ON(i != len);
1271
1272 return 0;
1273}
1274
1275/* ALU64 K registers */
1276static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1277{
1278 return __bpf_fill_alu_imm_regs(self, BPF_MOV, alu32: false);
1279}
1280
1281static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1282{
1283 return __bpf_fill_alu_imm_regs(self, BPF_AND, alu32: false);
1284}
1285
1286static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1287{
1288 return __bpf_fill_alu_imm_regs(self, BPF_OR, alu32: false);
1289}
1290
1291static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1292{
1293 return __bpf_fill_alu_imm_regs(self, BPF_XOR, alu32: false);
1294}
1295
1296static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1297{
1298 return __bpf_fill_alu_imm_regs(self, BPF_LSH, alu32: false);
1299}
1300
1301static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1302{
1303 return __bpf_fill_alu_imm_regs(self, BPF_RSH, alu32: false);
1304}
1305
1306static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1307{
1308 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, alu32: false);
1309}
1310
1311static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1312{
1313 return __bpf_fill_alu_imm_regs(self, BPF_ADD, alu32: false);
1314}
1315
1316static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1317{
1318 return __bpf_fill_alu_imm_regs(self, BPF_SUB, alu32: false);
1319}
1320
1321static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1322{
1323 return __bpf_fill_alu_imm_regs(self, BPF_MUL, alu32: false);
1324}
1325
1326static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1327{
1328 return __bpf_fill_alu_imm_regs(self, BPF_DIV, alu32: false);
1329}
1330
1331static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1332{
1333 return __bpf_fill_alu_imm_regs(self, BPF_MOD, alu32: false);
1334}
1335
1336/* ALU32 K registers */
1337static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1338{
1339 return __bpf_fill_alu_imm_regs(self, BPF_MOV, alu32: true);
1340}
1341
1342static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1343{
1344 return __bpf_fill_alu_imm_regs(self, BPF_AND, alu32: true);
1345}
1346
1347static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1348{
1349 return __bpf_fill_alu_imm_regs(self, BPF_OR, alu32: true);
1350}
1351
1352static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1353{
1354 return __bpf_fill_alu_imm_regs(self, BPF_XOR, alu32: true);
1355}
1356
1357static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1358{
1359 return __bpf_fill_alu_imm_regs(self, BPF_LSH, alu32: true);
1360}
1361
1362static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1363{
1364 return __bpf_fill_alu_imm_regs(self, BPF_RSH, alu32: true);
1365}
1366
1367static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1368{
1369 return __bpf_fill_alu_imm_regs(self, BPF_ARSH, alu32: true);
1370}
1371
1372static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1373{
1374 return __bpf_fill_alu_imm_regs(self, BPF_ADD, alu32: true);
1375}
1376
1377static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1378{
1379 return __bpf_fill_alu_imm_regs(self, BPF_SUB, alu32: true);
1380}
1381
1382static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1383{
1384 return __bpf_fill_alu_imm_regs(self, BPF_MUL, alu32: true);
1385}
1386
1387static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1388{
1389 return __bpf_fill_alu_imm_regs(self, BPF_DIV, alu32: true);
1390}
1391
1392static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1393{
1394 return __bpf_fill_alu_imm_regs(self, BPF_MOD, alu32: true);
1395}
1396
1397/*
1398 * Test JITs that implement complex ALU operations as function
1399 * calls, and must re-arrange operands for argument passing.
1400 */
1401static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1402{
1403 int len = 2 + 10 * 10 * 12;
1404 u64 dst, src, res, same;
1405 struct bpf_insn *insns;
1406 int rd, rs;
1407 int i = 0;
1408
1409 insns = kmalloc_array(n: len, size: sizeof(*insns), GFP_KERNEL);
1410 if (!insns)
1411 return -ENOMEM;
1412
1413 /* Operand and result values according to operation */
1414 if (alu32) {
1415 dst = 0x76543210U;
1416 src = 0x01234567U;
1417 } else {
1418 dst = 0x7edcba9876543210ULL;
1419 src = 0x0123456789abcdefULL;
1420 }
1421
1422 if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1423 src &= 31;
1424
1425 __bpf_alu_result(res: &res, v1: dst, v2: src, op);
1426 __bpf_alu_result(res: &same, v1: src, v2: src, op);
1427
1428 if (alu32) {
1429 res = (u32)res;
1430 same = (u32)same;
1431 }
1432
1433 /* Check all combinations of operand registers */
1434 for (rd = R0; rd <= R9; rd++) {
1435 for (rs = R0; rs <= R9; rs++) {
1436 u64 val = rd == rs ? same : res;
1437
1438 i += __bpf_ld_imm64(insns: &insns[i], reg: rd, imm64: dst);
1439 i += __bpf_ld_imm64(insns: &insns[i], reg: rs, imm64: src);
1440
1441 if (alu32)
1442 insns[i++] = BPF_ALU32_REG(op, rd, rs);
1443 else
1444 insns[i++] = BPF_ALU64_REG(op, rd, rs);
1445
1446 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1447 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1448 insns[i++] = BPF_EXIT_INSN();
1449
1450 insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1451 insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1452 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1453 insns[i++] = BPF_EXIT_INSN();
1454 }
1455 }
1456
1457 insns[i++] = BPF_MOV64_IMM(R0, 1);
1458 insns[i++] = BPF_EXIT_INSN();
1459
1460 self->u.ptr.insns = insns;
1461 self->u.ptr.len = len;
1462 BUG_ON(i != len);
1463
1464 return 0;
1465}
1466
1467/* ALU64 X register combinations */
1468static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1469{
1470 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, alu32: false);
1471}
1472
1473static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1474{
1475 return __bpf_fill_alu_reg_pairs(self, BPF_AND, alu32: false);
1476}
1477
1478static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1479{
1480 return __bpf_fill_alu_reg_pairs(self, BPF_OR, alu32: false);
1481}
1482
1483static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1484{
1485 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, alu32: false);
1486}
1487
1488static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1489{
1490 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, alu32: false);
1491}
1492
1493static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1494{
1495 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, alu32: false);
1496}
1497
1498static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1499{
1500 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, alu32: false);
1501}
1502
1503static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1504{
1505 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, alu32: false);
1506}
1507
1508static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1509{
1510 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, alu32: false);
1511}
1512
1513static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1514{
1515 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, alu32: false);
1516}
1517
1518static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1519{
1520 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, alu32: false);
1521}
1522
1523static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1524{
1525 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, alu32: false);
1526}
1527
1528/* ALU32 X register combinations */
1529static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1530{
1531 return __bpf_fill_alu_reg_pairs(self, BPF_MOV, alu32: true);
1532}
1533
1534static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1535{
1536 return __bpf_fill_alu_reg_pairs(self, BPF_AND, alu32: true);
1537}
1538
1539static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1540{
1541 return __bpf_fill_alu_reg_pairs(self, BPF_OR, alu32: true);
1542}
1543
1544static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1545{
1546 return __bpf_fill_alu_reg_pairs(self, BPF_XOR, alu32: true);
1547}
1548
1549static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1550{
1551 return __bpf_fill_alu_reg_pairs(self, BPF_LSH, alu32: true);
1552}
1553
1554static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1555{
1556 return __bpf_fill_alu_reg_pairs(self, BPF_RSH, alu32: true);
1557}
1558
1559static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1560{
1561 return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, alu32: true);
1562}
1563
1564static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1565{
1566 return __bpf_fill_alu_reg_pairs(self, BPF_ADD, alu32: true);
1567}
1568
1569static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1570{
1571 return __bpf_fill_alu_reg_pairs(self, BPF_SUB, alu32: true);
1572}
1573
1574static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1575{
1576 return __bpf_fill_alu_reg_pairs(self, BPF_MUL, alu32: true);
1577}
1578
1579static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1580{
1581 return __bpf_fill_alu_reg_pairs(self, BPF_DIV, alu32: true);
1582}
1583
1584static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1585{
1586 return __bpf_fill_alu_reg_pairs(self, BPF_MOD, alu32: true);
1587}
1588
1589/*
1590 * Exhaustive tests of atomic operations for all power-of-two operand
1591 * magnitudes, both for positive and negative values.
1592 */
1593
1594static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1595 struct bpf_insn *insns, s64 dst, s64 src)
1596{
1597 int op = *(int *)arg;
1598 u64 keep, fetch, res;
1599 int i = 0;
1600
1601 if (!insns)
1602 return 21;
1603
1604 switch (op) {
1605 case BPF_XCHG:
1606 res = src;
1607 break;
1608 default:
1609 __bpf_alu_result(res: &res, v1: dst, v2: src, BPF_OP(op));
1610 }
1611
1612 keep = 0x0123456789abcdefULL;
1613 if (op & BPF_FETCH)
1614 fetch = dst;
1615 else
1616 fetch = src;
1617
1618 i += __bpf_ld_imm64(insns: &insns[i], R0, imm64: keep);
1619 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
1620 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
1621 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: res);
1622 i += __bpf_ld_imm64(insns: &insns[i], R4, imm64: fetch);
1623 i += __bpf_ld_imm64(insns: &insns[i], R5, imm64: keep);
1624
1625 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1626 insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1627 insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1628
1629 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1630 insns[i++] = BPF_EXIT_INSN();
1631
1632 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1633 insns[i++] = BPF_EXIT_INSN();
1634
1635 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1636 insns[i++] = BPF_EXIT_INSN();
1637
1638 return i;
1639}
1640
1641static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1642 struct bpf_insn *insns, s64 dst, s64 src)
1643{
1644 int op = *(int *)arg;
1645 u64 keep, fetch, res;
1646 int i = 0;
1647
1648 if (!insns)
1649 return 21;
1650
1651 switch (op) {
1652 case BPF_XCHG:
1653 res = src;
1654 break;
1655 default:
1656 __bpf_alu_result(res: &res, v1: (u32)dst, v2: (u32)src, BPF_OP(op));
1657 }
1658
1659 keep = 0x0123456789abcdefULL;
1660 if (op & BPF_FETCH)
1661 fetch = (u32)dst;
1662 else
1663 fetch = src;
1664
1665 i += __bpf_ld_imm64(insns: &insns[i], R0, imm64: keep);
1666 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: (u32)dst);
1667 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
1668 i += __bpf_ld_imm64(insns: &insns[i], R3, imm64: (u32)res);
1669 i += __bpf_ld_imm64(insns: &insns[i], R4, imm64: fetch);
1670 i += __bpf_ld_imm64(insns: &insns[i], R5, imm64: keep);
1671
1672 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1673 insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1674 insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1675
1676 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1677 insns[i++] = BPF_EXIT_INSN();
1678
1679 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1680 insns[i++] = BPF_EXIT_INSN();
1681
1682 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1683 insns[i++] = BPF_EXIT_INSN();
1684
1685 return i;
1686}
1687
1688static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1689 struct bpf_insn *insns, s64 dst, s64 src)
1690{
1691 int i = 0;
1692
1693 if (!insns)
1694 return 23;
1695
1696 i += __bpf_ld_imm64(insns: &insns[i], R0, imm64: ~dst);
1697 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
1698 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
1699
1700 /* Result unsuccessful */
1701 insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1702 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1703 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1704
1705 insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1706 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1707 insns[i++] = BPF_EXIT_INSN();
1708
1709 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1710 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1711 insns[i++] = BPF_EXIT_INSN();
1712
1713 /* Result successful */
1714 insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1715 insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1716
1717 insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1718 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1719 insns[i++] = BPF_EXIT_INSN();
1720
1721 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1722 insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1723 insns[i++] = BPF_EXIT_INSN();
1724
1725 return i;
1726}
1727
1728static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1729 struct bpf_insn *insns, s64 dst, s64 src)
1730{
1731 int i = 0;
1732
1733 if (!insns)
1734 return 27;
1735
1736 i += __bpf_ld_imm64(insns: &insns[i], R0, imm64: ~dst);
1737 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: (u32)dst);
1738 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
1739
1740 /* Result unsuccessful */
1741 insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1742 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1743 insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1744 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1745
1746 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1747 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1748 insns[i++] = BPF_EXIT_INSN();
1749
1750 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1751 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1752 insns[i++] = BPF_EXIT_INSN();
1753
1754 /* Result successful */
1755 i += __bpf_ld_imm64(insns: &insns[i], R0, imm64: dst);
1756 insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1757 insns[i++] = BPF_ZEXT_REG(R0), /* Zext always inserted by verifier */
1758 insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1759
1760 insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1761 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1762 insns[i++] = BPF_EXIT_INSN();
1763
1764 insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1765 insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1766 insns[i++] = BPF_EXIT_INSN();
1767
1768 return i;
1769}
1770
1771static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1772{
1773 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
1774 block1: 0, PATTERN_BLOCK2,
1775 emit: &__bpf_emit_atomic64);
1776}
1777
1778static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1779{
1780 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
1781 block1: 0, PATTERN_BLOCK2,
1782 emit: &__bpf_emit_atomic32);
1783}
1784
1785/* 64-bit atomic operations */
1786static int bpf_fill_atomic64_add(struct bpf_test *self)
1787{
1788 return __bpf_fill_atomic64(self, BPF_ADD);
1789}
1790
1791static int bpf_fill_atomic64_and(struct bpf_test *self)
1792{
1793 return __bpf_fill_atomic64(self, BPF_AND);
1794}
1795
1796static int bpf_fill_atomic64_or(struct bpf_test *self)
1797{
1798 return __bpf_fill_atomic64(self, BPF_OR);
1799}
1800
1801static int bpf_fill_atomic64_xor(struct bpf_test *self)
1802{
1803 return __bpf_fill_atomic64(self, BPF_XOR);
1804}
1805
1806static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1807{
1808 return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1809}
1810
1811static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1812{
1813 return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1814}
1815
1816static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1817{
1818 return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1819}
1820
1821static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1822{
1823 return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1824}
1825
1826static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1827{
1828 return __bpf_fill_atomic64(self, BPF_XCHG);
1829}
1830
1831static int bpf_fill_cmpxchg64(struct bpf_test *self)
1832{
1833 return __bpf_fill_pattern(self, NULL, dbits: 64, sbits: 64, block1: 0, PATTERN_BLOCK2,
1834 emit: &__bpf_emit_cmpxchg64);
1835}
1836
1837/* 32-bit atomic operations */
1838static int bpf_fill_atomic32_add(struct bpf_test *self)
1839{
1840 return __bpf_fill_atomic32(self, BPF_ADD);
1841}
1842
1843static int bpf_fill_atomic32_and(struct bpf_test *self)
1844{
1845 return __bpf_fill_atomic32(self, BPF_AND);
1846}
1847
1848static int bpf_fill_atomic32_or(struct bpf_test *self)
1849{
1850 return __bpf_fill_atomic32(self, BPF_OR);
1851}
1852
1853static int bpf_fill_atomic32_xor(struct bpf_test *self)
1854{
1855 return __bpf_fill_atomic32(self, BPF_XOR);
1856}
1857
1858static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1859{
1860 return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1861}
1862
1863static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1864{
1865 return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1866}
1867
1868static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1869{
1870 return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1871}
1872
1873static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1874{
1875 return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1876}
1877
1878static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1879{
1880 return __bpf_fill_atomic32(self, BPF_XCHG);
1881}
1882
1883static int bpf_fill_cmpxchg32(struct bpf_test *self)
1884{
1885 return __bpf_fill_pattern(self, NULL, dbits: 64, sbits: 64, block1: 0, PATTERN_BLOCK2,
1886 emit: &__bpf_emit_cmpxchg32);
1887}
1888
1889/*
1890 * Test JITs that implement ATOMIC operations as function calls or
1891 * other primitives, and must re-arrange operands for argument passing.
1892 */
1893static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1894{
1895 struct bpf_insn *insn;
1896 int len = 2 + 34 * 10 * 10;
1897 u64 mem, upd, res;
1898 int rd, rs, i = 0;
1899
1900 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
1901 if (!insn)
1902 return -ENOMEM;
1903
1904 /* Operand and memory values */
1905 if (width == BPF_DW) {
1906 mem = 0x0123456789abcdefULL;
1907 upd = 0xfedcba9876543210ULL;
1908 } else { /* BPF_W */
1909 mem = 0x01234567U;
1910 upd = 0x76543210U;
1911 }
1912
1913 /* Memory updated according to operation */
1914 switch (op) {
1915 case BPF_XCHG:
1916 res = upd;
1917 break;
1918 case BPF_CMPXCHG:
1919 res = mem;
1920 break;
1921 default:
1922 __bpf_alu_result(res: &res, v1: mem, v2: upd, BPF_OP(op));
1923 }
1924
1925 /* Test all operand registers */
1926 for (rd = R0; rd <= R9; rd++) {
1927 for (rs = R0; rs <= R9; rs++) {
1928 u64 cmp, src;
1929
1930 /* Initialize value in memory */
1931 i += __bpf_ld_imm64(insns: &insn[i], R0, imm64: mem);
1932 insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1933
1934 /* Initialize registers in order */
1935 i += __bpf_ld_imm64(insns: &insn[i], R0, imm64: ~mem);
1936 i += __bpf_ld_imm64(insns: &insn[i], reg: rs, imm64: upd);
1937 insn[i++] = BPF_MOV64_REG(rd, R10);
1938
1939 /* Perform atomic operation */
1940 insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1941 if (op == BPF_CMPXCHG && width == BPF_W)
1942 insn[i++] = BPF_ZEXT_REG(R0);
1943
1944 /* Check R0 register value */
1945 if (op == BPF_CMPXCHG)
1946 cmp = mem; /* Expect value from memory */
1947 else if (R0 == rd || R0 == rs)
1948 cmp = 0; /* Aliased, checked below */
1949 else
1950 cmp = ~mem; /* Expect value to be preserved */
1951 if (cmp) {
1952 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1953 (u32)cmp, 2);
1954 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1955 insn[i++] = BPF_EXIT_INSN();
1956 insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1957 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1958 cmp >> 32, 2);
1959 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1960 insn[i++] = BPF_EXIT_INSN();
1961 }
1962
1963 /* Check source register value */
1964 if (rs == R0 && op == BPF_CMPXCHG)
1965 src = 0; /* Aliased with R0, checked above */
1966 else if (rs == rd && (op == BPF_CMPXCHG ||
1967 !(op & BPF_FETCH)))
1968 src = 0; /* Aliased with rd, checked below */
1969 else if (op == BPF_CMPXCHG)
1970 src = upd; /* Expect value to be preserved */
1971 else if (op & BPF_FETCH)
1972 src = mem; /* Expect fetched value from mem */
1973 else /* no fetch */
1974 src = upd; /* Expect value to be preserved */
1975 if (src) {
1976 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1977 (u32)src, 2);
1978 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1979 insn[i++] = BPF_EXIT_INSN();
1980 insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
1981 insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1982 src >> 32, 2);
1983 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1984 insn[i++] = BPF_EXIT_INSN();
1985 }
1986
1987 /* Check destination register value */
1988 if (!(rd == R0 && op == BPF_CMPXCHG) &&
1989 !(rd == rs && (op & BPF_FETCH))) {
1990 insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
1991 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1992 insn[i++] = BPF_EXIT_INSN();
1993 }
1994
1995 /* Check value in memory */
1996 if (rs != rd) { /* No aliasing */
1997 i += __bpf_ld_imm64(insns: &insn[i], R1, imm64: res);
1998 } else if (op == BPF_XCHG) { /* Aliased, XCHG */
1999 insn[i++] = BPF_MOV64_REG(R1, R10);
2000 } else if (op == BPF_CMPXCHG) { /* Aliased, CMPXCHG */
2001 i += __bpf_ld_imm64(insns: &insn[i], R1, imm64: mem);
2002 } else { /* Aliased, ALU oper */
2003 i += __bpf_ld_imm64(insns: &insn[i], R1, imm64: mem);
2004 insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2005 }
2006
2007 insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2008 if (width == BPF_DW)
2009 insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2010 else /* width == BPF_W */
2011 insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2012 insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2013 insn[i++] = BPF_EXIT_INSN();
2014 }
2015 }
2016
2017 insn[i++] = BPF_MOV64_IMM(R0, 1);
2018 insn[i++] = BPF_EXIT_INSN();
2019
2020 self->u.ptr.insns = insn;
2021 self->u.ptr.len = i;
2022 BUG_ON(i > len);
2023
2024 return 0;
2025}
2026
2027/* 64-bit atomic register tests */
2028static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2029{
2030 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2031}
2032
2033static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2034{
2035 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2036}
2037
2038static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2039{
2040 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2041}
2042
2043static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2044{
2045 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2046}
2047
2048static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2049{
2050 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2051}
2052
2053static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2054{
2055 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2056}
2057
2058static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2059{
2060 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2061}
2062
2063static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2064{
2065 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2066}
2067
2068static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2069{
2070 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2071}
2072
2073static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2074{
2075 return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2076}
2077
2078/* 32-bit atomic register tests */
2079static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2080{
2081 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2082}
2083
2084static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2085{
2086 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2087}
2088
2089static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2090{
2091 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2092}
2093
2094static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2095{
2096 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2097}
2098
2099static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2100{
2101 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2102}
2103
2104static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2105{
2106 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2107}
2108
2109static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2110{
2111 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2112}
2113
2114static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2115{
2116 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2117}
2118
2119static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2120{
2121 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2122}
2123
2124static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2125{
2126 return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2127}
2128
2129/*
2130 * Test the two-instruction 64-bit immediate load operation for all
2131 * power-of-two magnitudes of the immediate operand. For each MSB, a block
2132 * of immediate values centered around the power-of-two MSB are tested,
2133 * both for positive and negative values. The test is designed to verify
2134 * the operation for JITs that emit different code depending on the magnitude
2135 * of the immediate value. This is often the case if the native instruction
2136 * immediate field width is narrower than 32 bits.
2137 */
2138static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
2139{
2140 int block = 64; /* Increase for more tests per MSB position */
2141 int len = 3 + 8 * 63 * block * 2;
2142 struct bpf_insn *insn;
2143 int bit, adj, sign;
2144 int i = 0;
2145
2146 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
2147 if (!insn)
2148 return -ENOMEM;
2149
2150 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2151
2152 for (bit = 0; bit <= 62; bit++) {
2153 for (adj = -block / 2; adj < block / 2; adj++) {
2154 for (sign = -1; sign <= 1; sign += 2) {
2155 s64 imm = sign * ((1LL << bit) + adj);
2156
2157 /* Perform operation */
2158 i += __bpf_ld_imm64(insns: &insn[i], R1, imm64: imm);
2159
2160 /* Load reference */
2161 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2162 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2163 (u32)(imm >> 32));
2164 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2165 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2166
2167 /* Check result */
2168 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2169 insn[i++] = BPF_EXIT_INSN();
2170 }
2171 }
2172 }
2173
2174 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2175 insn[i++] = BPF_EXIT_INSN();
2176
2177 self->u.ptr.insns = insn;
2178 self->u.ptr.len = len;
2179 BUG_ON(i != len);
2180
2181 return 0;
2182}
2183
2184/*
2185 * Test the two-instruction 64-bit immediate load operation for different
2186 * combinations of bytes. Each byte in the 64-bit word is constructed as
2187 * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2188 * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2189 */
2190static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
2191 u8 base1, u8 mask1,
2192 u8 base2, u8 mask2)
2193{
2194 struct bpf_insn *insn;
2195 int len = 3 + 8 * BIT(8);
2196 int pattern, index;
2197 u32 rand = 1;
2198 int i = 0;
2199
2200 insn = kmalloc_array(n: len, size: sizeof(*insn), GFP_KERNEL);
2201 if (!insn)
2202 return -ENOMEM;
2203
2204 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2205
2206 for (pattern = 0; pattern < BIT(8); pattern++) {
2207 u64 imm = 0;
2208
2209 for (index = 0; index < 8; index++) {
2210 int byte;
2211
2212 if (pattern & BIT(index))
2213 byte = (base1 & mask1) | (rand & ~mask1);
2214 else
2215 byte = (base2 & mask2) | (rand & ~mask2);
2216 imm = (imm << 8) | byte;
2217 }
2218
2219 /* Update our LCG */
2220 rand = rand * 1664525 + 1013904223;
2221
2222 /* Perform operation */
2223 i += __bpf_ld_imm64(insns: &insn[i], R1, imm64: imm);
2224
2225 /* Load reference */
2226 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2227 insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
2228 insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2229 insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2230
2231 /* Check result */
2232 insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2233 insn[i++] = BPF_EXIT_INSN();
2234 }
2235
2236 insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2237 insn[i++] = BPF_EXIT_INSN();
2238
2239 self->u.ptr.insns = insn;
2240 self->u.ptr.len = len;
2241 BUG_ON(i != len);
2242
2243 return 0;
2244}
2245
2246static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
2247{
2248 return __bpf_fill_ld_imm64_bytes(self, base1: 0, mask1: 0xff, base2: 0xff, mask2: 0xff);
2249}
2250
2251static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
2252{
2253 return __bpf_fill_ld_imm64_bytes(self, base1: 1, mask1: 0x81, base2: 0x80, mask2: 0x80);
2254}
2255
2256static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
2257{
2258 return __bpf_fill_ld_imm64_bytes(self, base1: 1, mask1: 0x81, base2: 0, mask2: 0xff);
2259}
2260
2261static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
2262{
2263 return __bpf_fill_ld_imm64_bytes(self, base1: 0x80, mask1: 0x80, base2: 0, mask2: 0xff);
2264}
2265
2266/*
2267 * Exhaustive tests of JMP operations for all combinations of power-of-two
2268 * magnitudes of the operands, both for positive and negative values. The
2269 * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2270 * emit different code depending on the magnitude of the immediate value.
2271 */
2272
2273static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2274{
2275 switch (op) {
2276 case BPF_JSET:
2277 return !!(v1 & v2);
2278 case BPF_JEQ:
2279 return v1 == v2;
2280 case BPF_JNE:
2281 return v1 != v2;
2282 case BPF_JGT:
2283 return (u64)v1 > (u64)v2;
2284 case BPF_JGE:
2285 return (u64)v1 >= (u64)v2;
2286 case BPF_JLT:
2287 return (u64)v1 < (u64)v2;
2288 case BPF_JLE:
2289 return (u64)v1 <= (u64)v2;
2290 case BPF_JSGT:
2291 return v1 > v2;
2292 case BPF_JSGE:
2293 return v1 >= v2;
2294 case BPF_JSLT:
2295 return v1 < v2;
2296 case BPF_JSLE:
2297 return v1 <= v2;
2298 }
2299 return false;
2300}
2301
2302static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2303 struct bpf_insn *insns, s64 dst, s64 imm)
2304{
2305 int op = *(int *)arg;
2306
2307 if (insns) {
2308 bool match = __bpf_match_jmp_cond(v1: dst, v2: (s32)imm, op);
2309 int i = 0;
2310
2311 insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2312
2313 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
2314 insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2315 if (!match)
2316 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2317 insns[i++] = BPF_EXIT_INSN();
2318
2319 return i;
2320 }
2321
2322 return 5 + 1;
2323}
2324
2325static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2326 struct bpf_insn *insns, s64 dst, s64 imm)
2327{
2328 int op = *(int *)arg;
2329
2330 if (insns) {
2331 bool match = __bpf_match_jmp_cond(v1: (s32)dst, v2: (s32)imm, op);
2332 int i = 0;
2333
2334 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
2335 insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2336 if (!match)
2337 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2338 insns[i++] = BPF_EXIT_INSN();
2339
2340 return i;
2341 }
2342
2343 return 5;
2344}
2345
2346static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2347 struct bpf_insn *insns, s64 dst, s64 src)
2348{
2349 int op = *(int *)arg;
2350
2351 if (insns) {
2352 bool match = __bpf_match_jmp_cond(v1: dst, v2: src, op);
2353 int i = 0;
2354
2355 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
2356 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
2357 insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2358 if (!match)
2359 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2360 insns[i++] = BPF_EXIT_INSN();
2361
2362 return i;
2363 }
2364
2365 return 7;
2366}
2367
2368static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2369 struct bpf_insn *insns, s64 dst, s64 src)
2370{
2371 int op = *(int *)arg;
2372
2373 if (insns) {
2374 bool match = __bpf_match_jmp_cond(v1: (s32)dst, v2: (s32)src, op);
2375 int i = 0;
2376
2377 i += __bpf_ld_imm64(insns: &insns[i], R1, imm64: dst);
2378 i += __bpf_ld_imm64(insns: &insns[i], R2, imm64: src);
2379 insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2380 if (!match)
2381 insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2382 insns[i++] = BPF_EXIT_INSN();
2383
2384 return i;
2385 }
2386
2387 return 7;
2388}
2389
2390static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2391{
2392 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 32,
2393 PATTERN_BLOCK1, PATTERN_BLOCK2,
2394 emit: &__bpf_emit_jmp_imm);
2395}
2396
2397static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2398{
2399 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 32,
2400 PATTERN_BLOCK1, PATTERN_BLOCK2,
2401 emit: &__bpf_emit_jmp32_imm);
2402}
2403
2404static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2405{
2406 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
2407 PATTERN_BLOCK1, PATTERN_BLOCK2,
2408 emit: &__bpf_emit_jmp_reg);
2409}
2410
2411static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2412{
2413 return __bpf_fill_pattern(self, arg: &op, dbits: 64, sbits: 64,
2414 PATTERN_BLOCK1, PATTERN_BLOCK2,
2415 emit: &__bpf_emit_jmp32_reg);
2416}
2417
2418/* JMP immediate tests */
2419static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2420{
2421 return __bpf_fill_jmp_imm(self, BPF_JSET);
2422}
2423
2424static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2425{
2426 return __bpf_fill_jmp_imm(self, BPF_JEQ);
2427}
2428
2429static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2430{
2431 return __bpf_fill_jmp_imm(self, BPF_JNE);
2432}
2433
2434static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2435{
2436 return __bpf_fill_jmp_imm(self, BPF_JGT);
2437}
2438
2439static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2440{
2441 return __bpf_fill_jmp_imm(self, BPF_JGE);
2442}
2443
2444static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2445{
2446 return __bpf_fill_jmp_imm(self, BPF_JLT);
2447}
2448
2449static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2450{
2451 return __bpf_fill_jmp_imm(self, BPF_JLE);
2452}
2453
2454static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2455{
2456 return __bpf_fill_jmp_imm(self, BPF_JSGT);
2457}
2458
2459static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2460{
2461 return __bpf_fill_jmp_imm(self, BPF_JSGE);
2462}
2463
2464static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2465{
2466 return __bpf_fill_jmp_imm(self, BPF_JSLT);
2467}
2468
2469static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2470{
2471 return __bpf_fill_jmp_imm(self, BPF_JSLE);
2472}
2473
2474/* JMP32 immediate tests */
2475static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2476{
2477 return __bpf_fill_jmp32_imm(self, BPF_JSET);
2478}
2479
2480static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2481{
2482 return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2483}
2484
2485static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2486{
2487 return __bpf_fill_jmp32_imm(self, BPF_JNE);
2488}
2489
2490static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2491{
2492 return __bpf_fill_jmp32_imm(self, BPF_JGT);
2493}
2494
2495static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2496{
2497 return __bpf_fill_jmp32_imm(self, BPF_JGE);
2498}
2499
2500static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2501{
2502 return __bpf_fill_jmp32_imm(self, BPF_JLT);
2503}
2504
2505static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2506{
2507 return __bpf_fill_jmp32_imm(self, BPF_JLE);
2508}
2509
2510static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2511{
2512 return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2513}
2514
2515static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2516{
2517 return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2518}
2519
2520static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2521{
2522 return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2523}
2524
2525static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2526{
2527 return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2528}
2529
2530/* JMP register tests */
2531static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2532{
2533 return __bpf_fill_jmp_reg(self, BPF_JSET);
2534}
2535
2536static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2537{
2538 return __bpf_fill_jmp_reg(self, BPF_JEQ);
2539}
2540
2541static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2542{
2543 return __bpf_fill_jmp_reg(self, BPF_JNE);
2544}
2545
2546static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2547{
2548 return __bpf_fill_jmp_reg(self, BPF_JGT);
2549}
2550
2551static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2552{
2553 return __bpf_fill_jmp_reg(self, BPF_JGE);
2554}
2555
2556static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2557{
2558 return __bpf_fill_jmp_reg(self, BPF_JLT);
2559}
2560
2561static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2562{
2563 return __bpf_fill_jmp_reg(self, BPF_JLE);
2564}
2565
2566static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2567{
2568 return __bpf_fill_jmp_reg(self, BPF_JSGT);
2569}
2570
2571static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2572{
2573 return __bpf_fill_jmp_reg(self, BPF_JSGE);
2574}
2575
2576static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2577{
2578 return __bpf_fill_jmp_reg(self, BPF_JSLT);
2579}
2580
2581static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2582{
2583 return __bpf_fill_jmp_reg(self, BPF_JSLE);
2584}
2585
2586/* JMP32 register tests */
2587static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2588{
2589 return __bpf_fill_jmp32_reg(self, BPF_JSET);
2590}
2591
2592static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2593{
2594 return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2595}
2596
2597static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2598{
2599 return __bpf_fill_jmp32_reg(self, BPF_JNE);
2600}
2601
2602static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2603{
2604 return __bpf_fill_jmp32_reg(self, BPF_JGT);
2605}
2606
2607static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2608{
2609 return __bpf_fill_jmp32_reg(self, BPF_JGE);
2610}
2611
2612static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2613{
2614 return __bpf_fill_jmp32_reg(self, BPF_JLT);
2615}
2616
2617static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2618{
2619 return __bpf_fill_jmp32_reg(self, BPF_JLE);
2620}
2621
2622static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2623{
2624 return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2625}
2626
2627static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2628{
2629 return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2630}
2631
2632static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2633{
2634 return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2635}
2636
2637static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2638{
2639 return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2640}
2641
2642/*
2643 * Set up a sequence of staggered jumps, forwards and backwards with
2644 * increasing offset. This tests the conversion of relative jumps to
2645 * JITed native jumps. On some architectures, for example MIPS, a large
2646 * PC-relative jump offset may overflow the immediate field of the native
2647 * conditional branch instruction, triggering a conversion to use an
2648 * absolute jump instead. Since this changes the jump offsets, another
2649 * offset computation pass is necessary, and that may in turn trigger
2650 * another branch conversion. This jump sequence is particularly nasty
2651 * in that regard.
2652 *
2653 * The sequence generation is parameterized by size and jump type.
2654 * The size must be even, and the expected result is always size + 1.
2655 * Below is an example with size=8 and result=9.
2656 *
2657 * ________________________Start
2658 * R0 = 0
2659 * R1 = r1
2660 * R2 = r2
2661 * ,------- JMP +4 * 3______________Preamble: 4 insns
2662 * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2663 * | | R0 = 8 |
2664 * | | JMP +7 * 3 ------------------------.
2665 * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------. | |
2666 * | | | R0 = 6 | | |
2667 * | | | JMP +5 * 3 ------------------. | |
2668 * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------. | | | |
2669 * | | | | R0 = 4 | | | | |
2670 * | | | | JMP +3 * 3 ------------. | | | |
2671 * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--. | | | | | |
2672 * | | | | | R0 = 2 | | | | | | |
2673 * | | | | | JMP +1 * 3 ------. | | | | | |
2674 * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1 1 2 3 4 5 6 7 8 loc
2675 * | | | | | R0 = 1 -1 +2 -3 +4 -5 +6 -7 +8 off
2676 * | | | | | JMP -2 * 3 ---' | | | | | | |
2677 * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----' | | | | | |
2678 * | | | | | | R0 = 3 | | | | | |
2679 * | | | | | | JMP -4 * 3 ---------' | | | | |
2680 * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------' | | | |
2681 * | | | | | | | R0 = 5 | | | |
2682 * | | | | | | | JMP -6 * 3 ---------------' | | |
2683 * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------' | |
2684 * | | | | | | | | R0 = 7 | |
2685 * | | Error | | | JMP -8 * 3 ---------------------' |
2686 * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2687 * | | | | | | | | | R0 = 9__________________Sequence: 3 * size - 1 insns
2688 * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2689 *
2690 */
2691
2692/* The maximum size parameter */
2693#define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2694
2695/* We use a reduced number of iterations to get a reasonable execution time */
2696#define NR_STAGGERED_JMP_RUNS 10
2697
2698static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2699 const struct bpf_insn *jmp,
2700 u64 r1, u64 r2)
2701{
2702 int size = self->test[0].result - 1;
2703 int len = 4 + 3 * (size + 1);
2704 struct bpf_insn *insns;
2705 int off, ind;
2706
2707 insns = kmalloc_array(n: len, size: sizeof(*insns), GFP_KERNEL);
2708 if (!insns)
2709 return -ENOMEM;
2710
2711 /* Preamble */
2712 insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2713 insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2714 insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2715 insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2716
2717 /* Sequence */
2718 for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2719 struct bpf_insn *ins = &insns[4 + 3 * ind];
2720 int loc;
2721
2722 if (off == 0)
2723 off--;
2724
2725 loc = abs(off);
2726 ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2727 3 * (size - ind) + 1);
2728 ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2729 ins[2] = *jmp;
2730 ins[2].off = 3 * (off - 1);
2731 }
2732
2733 /* Return */
2734 insns[len - 1] = BPF_EXIT_INSN();
2735
2736 self->u.ptr.insns = insns;
2737 self->u.ptr.len = len;
2738
2739 return 0;
2740}
2741
2742/* 64-bit unconditional jump */
2743static int bpf_fill_staggered_ja(struct bpf_test *self)
2744{
2745 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2746
2747 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0, r2: 0);
2748}
2749
2750/* 64-bit immediate jumps */
2751static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2752{
2753 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2754
2755 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2756}
2757
2758static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2759{
2760 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2761
2762 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 4321, r2: 0);
2763}
2764
2765static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2766{
2767 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2768
2769 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x86, r2: 0);
2770}
2771
2772static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2773{
2774 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2775
2776 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x80000000, r2: 0);
2777}
2778
2779static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2780{
2781 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2782
2783 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2784}
2785
2786static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2787{
2788 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2789
2790 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2791}
2792
2793static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2794{
2795 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2796
2797 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2798}
2799
2800static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2801{
2802 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2803
2804 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: 0);
2805}
2806
2807static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2808{
2809 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2810
2811 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: 0);
2812}
2813
2814static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2815{
2816 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2817
2818 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: 0);
2819}
2820
2821static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2822{
2823 struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2824
2825 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: 0);
2826}
2827
2828/* 64-bit register jumps */
2829static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2830{
2831 struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2832
2833 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
2834}
2835
2836static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2837{
2838 struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2839
2840 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 4321, r2: 1234);
2841}
2842
2843static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2844{
2845 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2846
2847 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x86, r2: 0x82);
2848}
2849
2850static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2851{
2852 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2853
2854 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x80000000, r2: 1234);
2855}
2856
2857static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2858{
2859 struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2860
2861 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
2862}
2863
2864static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2865{
2866 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2867
2868 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0x80000000);
2869}
2870
2871static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2872{
2873 struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2874
2875 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
2876}
2877
2878static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2879{
2880 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2881
2882 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: -2);
2883}
2884
2885static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2886{
2887 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2888
2889 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: -2);
2890}
2891
2892static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2893{
2894 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2895
2896 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: -1);
2897}
2898
2899static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2900{
2901 struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2902
2903 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: -1);
2904}
2905
2906/* 32-bit immediate jumps */
2907static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2908{
2909 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2910
2911 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2912}
2913
2914static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2915{
2916 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2917
2918 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 4321, r2: 0);
2919}
2920
2921static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2922{
2923 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2924
2925 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x86, r2: 0);
2926}
2927
2928static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2929{
2930 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2931
2932 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x80000000, r2: 0);
2933}
2934
2935static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2936{
2937 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2938
2939 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2940}
2941
2942static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2943{
2944 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2945
2946 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2947}
2948
2949static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2950{
2951 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2952
2953 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0);
2954}
2955
2956static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2957{
2958 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2959
2960 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: 0);
2961}
2962
2963static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2964{
2965 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2966
2967 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: 0);
2968}
2969
2970static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2971{
2972 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2973
2974 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: 0);
2975}
2976
2977static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2978{
2979 struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
2980
2981 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: 0);
2982}
2983
2984/* 32-bit register jumps */
2985static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
2986{
2987 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
2988
2989 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
2990}
2991
2992static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
2993{
2994 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
2995
2996 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 4321, r2: 1234);
2997}
2998
2999static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
3000{
3001 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
3002
3003 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x86, r2: 0x82);
3004}
3005
3006static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
3007{
3008 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
3009
3010 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 0x80000000, r2: 1234);
3011}
3012
3013static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
3014{
3015 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
3016
3017 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
3018}
3019
3020static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
3021{
3022 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
3023
3024 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 0x80000000);
3025}
3026
3027static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
3028{
3029 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
3030
3031 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: 1234, r2: 1234);
3032}
3033
3034static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
3035{
3036 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
3037
3038 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: -2);
3039}
3040
3041static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
3042{
3043 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
3044
3045 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: -2);
3046}
3047
3048static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
3049{
3050 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
3051
3052 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -2, r2: -1);
3053}
3054
3055static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
3056{
3057 struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
3058
3059 return __bpf_fill_staggered_jumps(self, jmp: &jmp, r1: -1, r2: -1);
3060}
3061
3062
3063static struct bpf_test tests[] = {
3064 {
3065 "TAX",
3066 .u.insns = {
3067 BPF_STMT(BPF_LD | BPF_IMM, 1),
3068 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3069 BPF_STMT(BPF_LD | BPF_IMM, 2),
3070 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3071 BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
3072 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3073 BPF_STMT(BPF_LD | BPF_LEN, 0),
3074 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3075 BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
3076 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
3077 BPF_STMT(BPF_RET | BPF_A, 0)
3078 },
3079 CLASSIC,
3080 { 10, 20, 30, 40, 50 },
3081 { { 2, 10 }, { 3, 20 }, { 4, 30 } },
3082 },
3083 {
3084 "TXA",
3085 .u.insns = {
3086 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3087 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3088 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3089 BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3090 },
3091 CLASSIC,
3092 { 10, 20, 30, 40, 50 },
3093 { { 1, 2 }, { 3, 6 }, { 4, 8 } },
3094 },
3095 {
3096 "ADD_SUB_MUL_K",
3097 .u.insns = {
3098 BPF_STMT(BPF_LD | BPF_IMM, 1),
3099 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3100 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3101 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3102 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3103 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3104 BPF_STMT(BPF_RET | BPF_A, 0)
3105 },
3106 CLASSIC | FLAG_NO_DATA,
3107 { },
3108 { { 0, 0xfffffffd } }
3109 },
3110 {
3111 "DIV_MOD_KX",
3112 .u.insns = {
3113 BPF_STMT(BPF_LD | BPF_IMM, 8),
3114 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3115 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3116 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3117 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3118 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3119 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3120 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3121 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3122 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3123 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3124 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3125 BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3126 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3127 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3128 BPF_STMT(BPF_RET | BPF_A, 0)
3129 },
3130 CLASSIC | FLAG_NO_DATA,
3131 { },
3132 { { 0, 0x20000000 } }
3133 },
3134 {
3135 "AND_OR_LSH_K",
3136 .u.insns = {
3137 BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3138 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3139 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3140 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3141 BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3142 BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3143 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3144 BPF_STMT(BPF_RET | BPF_A, 0)
3145 },
3146 CLASSIC | FLAG_NO_DATA,
3147 { },
3148 { { 0, 0x800000ff }, { 1, 0x800000ff } },
3149 },
3150 {
3151 "LD_IMM_0",
3152 .u.insns = {
3153 BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3154 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3155 BPF_STMT(BPF_RET | BPF_K, 0),
3156 BPF_STMT(BPF_RET | BPF_K, 1),
3157 },
3158 CLASSIC,
3159 { },
3160 { { 1, 1 } },
3161 },
3162 {
3163 "LD_IND",
3164 .u.insns = {
3165 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3166 BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3167 BPF_STMT(BPF_RET | BPF_K, 1)
3168 },
3169 CLASSIC,
3170 { },
3171 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3172 },
3173 {
3174 "LD_ABS",
3175 .u.insns = {
3176 BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3177 BPF_STMT(BPF_RET | BPF_K, 1)
3178 },
3179 CLASSIC,
3180 { },
3181 { { 1, 0 }, { 10, 0 }, { 60, 0 } },
3182 },
3183 {
3184 "LD_ABS_LL",
3185 .u.insns = {
3186 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3187 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3188 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3189 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3190 BPF_STMT(BPF_RET | BPF_A, 0)
3191 },
3192 CLASSIC,
3193 { 1, 2, 3 },
3194 { { 1, 0 }, { 2, 3 } },
3195 },
3196 {
3197 "LD_IND_LL",
3198 .u.insns = {
3199 BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3200 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3201 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3202 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3203 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3204 BPF_STMT(BPF_RET | BPF_A, 0)
3205 },
3206 CLASSIC,
3207 { 1, 2, 3, 0xff },
3208 { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3209 },
3210 {
3211 "LD_ABS_NET",
3212 .u.insns = {
3213 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3214 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3215 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3216 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3217 BPF_STMT(BPF_RET | BPF_A, 0)
3218 },
3219 CLASSIC,
3220 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3221 { { 15, 0 }, { 16, 3 } },
3222 },
3223 {
3224 "LD_IND_NET",
3225 .u.insns = {
3226 BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3227 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3228 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3229 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3230 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3231 BPF_STMT(BPF_RET | BPF_A, 0)
3232 },
3233 CLASSIC,
3234 { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3235 { { 14, 0 }, { 15, 1 }, { 17, 3 } },
3236 },
3237 {
3238 "LD_PKTTYPE",
3239 .u.insns = {
3240 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3241 SKF_AD_OFF + SKF_AD_PKTTYPE),
3242 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3243 BPF_STMT(BPF_RET | BPF_K, 1),
3244 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3245 SKF_AD_OFF + SKF_AD_PKTTYPE),
3246 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3247 BPF_STMT(BPF_RET | BPF_K, 1),
3248 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3249 SKF_AD_OFF + SKF_AD_PKTTYPE),
3250 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3251 BPF_STMT(BPF_RET | BPF_K, 1),
3252 BPF_STMT(BPF_RET | BPF_A, 0)
3253 },
3254 CLASSIC,
3255 { },
3256 { { 1, 3 }, { 10, 3 } },
3257 },
3258 {
3259 "LD_MARK",
3260 .u.insns = {
3261 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3262 SKF_AD_OFF + SKF_AD_MARK),
3263 BPF_STMT(BPF_RET | BPF_A, 0)
3264 },
3265 CLASSIC,
3266 { },
3267 { { 1, SKB_MARK}, { 10, SKB_MARK} },
3268 },
3269 {
3270 "LD_RXHASH",
3271 .u.insns = {
3272 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3273 SKF_AD_OFF + SKF_AD_RXHASH),
3274 BPF_STMT(BPF_RET | BPF_A, 0)
3275 },
3276 CLASSIC,
3277 { },
3278 { { 1, SKB_HASH}, { 10, SKB_HASH} },
3279 },
3280 {
3281 "LD_QUEUE",
3282 .u.insns = {
3283 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3284 SKF_AD_OFF + SKF_AD_QUEUE),
3285 BPF_STMT(BPF_RET | BPF_A, 0)
3286 },
3287 CLASSIC,
3288 { },
3289 { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3290 },
3291 {
3292 "LD_PROTOCOL",
3293 .u.insns = {
3294 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3295 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3296 BPF_STMT(BPF_RET | BPF_K, 0),
3297 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3298 SKF_AD_OFF + SKF_AD_PROTOCOL),
3299 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3300 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3301 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3302 BPF_STMT(BPF_RET | BPF_K, 0),
3303 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3304 BPF_STMT(BPF_RET | BPF_A, 0)
3305 },
3306 CLASSIC,
3307 { 10, 20, 30 },
3308 { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3309 },
3310 {
3311 "LD_VLAN_TAG",
3312 .u.insns = {
3313 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3314 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3315 BPF_STMT(BPF_RET | BPF_A, 0)
3316 },
3317 CLASSIC,
3318 { },
3319 {
3320 { 1, SKB_VLAN_TCI },
3321 { 10, SKB_VLAN_TCI }
3322 },
3323 },
3324 {
3325 "LD_VLAN_TAG_PRESENT",
3326 .u.insns = {
3327 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3328 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3329 BPF_STMT(BPF_RET | BPF_A, 0)
3330 },
3331 CLASSIC,
3332 { },
3333 {
3334 { 1, SKB_VLAN_PRESENT },
3335 { 10, SKB_VLAN_PRESENT }
3336 },
3337 },
3338 {
3339 "LD_IFINDEX",
3340 .u.insns = {
3341 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3342 SKF_AD_OFF + SKF_AD_IFINDEX),
3343 BPF_STMT(BPF_RET | BPF_A, 0)
3344 },
3345 CLASSIC,
3346 { },
3347 { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3348 },
3349 {
3350 "LD_HATYPE",
3351 .u.insns = {
3352 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3353 SKF_AD_OFF + SKF_AD_HATYPE),
3354 BPF_STMT(BPF_RET | BPF_A, 0)
3355 },
3356 CLASSIC,
3357 { },
3358 { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3359 },
3360 {
3361 "LD_CPU",
3362 .u.insns = {
3363 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3364 SKF_AD_OFF + SKF_AD_CPU),
3365 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3366 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3367 SKF_AD_OFF + SKF_AD_CPU),
3368 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3369 BPF_STMT(BPF_RET | BPF_A, 0)
3370 },
3371 CLASSIC,
3372 { },
3373 { { 1, 0 }, { 10, 0 } },
3374 },
3375 {
3376 "LD_NLATTR",
3377 .u.insns = {
3378 BPF_STMT(BPF_LDX | BPF_IMM, 2),
3379 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3380 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3381 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3382 SKF_AD_OFF + SKF_AD_NLATTR),
3383 BPF_STMT(BPF_RET | BPF_A, 0)
3384 },
3385 CLASSIC,
3386#ifdef __BIG_ENDIAN
3387 { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3388#else
3389 { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3390#endif
3391 { { 4, 0 }, { 20, 6 } },
3392 },
3393 {
3394 "LD_NLATTR_NEST",
3395 .u.insns = {
3396 BPF_STMT(BPF_LD | BPF_IMM, 2),
3397 BPF_STMT(BPF_LDX | BPF_IMM, 3),
3398 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3399 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3400 BPF_STMT(BPF_LD | BPF_IMM, 2),
3401 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3402 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3403 BPF_STMT(BPF_LD | BPF_IMM, 2),
3404 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3405 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3406 BPF_STMT(BPF_LD | BPF_IMM, 2),
3407 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3408 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3409 BPF_STMT(BPF_LD | BPF_IMM, 2),
3410 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3411 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3412 BPF_STMT(BPF_LD | BPF_IMM, 2),
3413 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3414 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3415 BPF_STMT(BPF_LD | BPF_IMM, 2),
3416 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3417 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3418 BPF_STMT(BPF_LD | BPF_IMM, 2),
3419 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3420 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3421 BPF_STMT(BPF_RET | BPF_A, 0)
3422 },
3423 CLASSIC,
3424#ifdef __BIG_ENDIAN
3425 { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3426#else
3427 { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3428#endif
3429 { { 4, 0 }, { 20, 10 } },
3430 },
3431 {
3432 "LD_PAYLOAD_OFF",
3433 .u.insns = {
3434 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3435 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3436 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3437 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3438 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3439 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3440 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3441 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3442 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3443 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3444 BPF_STMT(BPF_RET | BPF_A, 0)
3445 },
3446 CLASSIC,
3447 /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3448 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3449 * id 9737, seq 1, length 64
3450 */
3451 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3452 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3453 0x08, 0x00,
3454 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3455 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3456 { { 30, 0 }, { 100, 42 } },
3457 },
3458 {
3459 "LD_ANC_XOR",
3460 .u.insns = {
3461 BPF_STMT(BPF_LD | BPF_IMM, 10),
3462 BPF_STMT(BPF_LDX | BPF_IMM, 300),
3463 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3464 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3465 BPF_STMT(BPF_RET | BPF_A, 0)
3466 },
3467 CLASSIC,
3468 { },
3469 { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3470 },
3471 {
3472 "SPILL_FILL",
3473 .u.insns = {
3474 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3475 BPF_STMT(BPF_LD | BPF_IMM, 2),
3476 BPF_STMT(BPF_ALU | BPF_RSH, 1),
3477 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3478 BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3479 BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3480 BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3481 BPF_STMT(BPF_STX, 15), /* M3 = len */
3482 BPF_STMT(BPF_LDX | BPF_MEM, 1),
3483 BPF_STMT(BPF_LD | BPF_MEM, 2),
3484 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3485 BPF_STMT(BPF_LDX | BPF_MEM, 15),
3486 BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3487 BPF_STMT(BPF_RET | BPF_A, 0)
3488 },
3489 CLASSIC,
3490 { },
3491 { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3492 },
3493 {
3494 "JEQ",
3495 .u.insns = {
3496 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3497 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3498 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3499 BPF_STMT(BPF_RET | BPF_K, 1),
3500 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3501 },
3502 CLASSIC,
3503 { 3, 3, 3, 3, 3 },
3504 { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3505 },
3506 {
3507 "JGT",
3508 .u.insns = {
3509 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3510 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3511 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3512 BPF_STMT(BPF_RET | BPF_K, 1),
3513 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3514 },
3515 CLASSIC,
3516 { 4, 4, 4, 3, 3 },
3517 { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3518 },
3519 {
3520 "JGE (jt 0), test 1",
3521 .u.insns = {
3522 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3523 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3524 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3525 BPF_STMT(BPF_RET | BPF_K, 1),
3526 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3527 },
3528 CLASSIC,
3529 { 4, 4, 4, 3, 3 },
3530 { { 2, 0 }, { 3, 1 }, { 4, 1 } },
3531 },
3532 {
3533 "JGE (jt 0), test 2",
3534 .u.insns = {
3535 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3536 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3537 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3538 BPF_STMT(BPF_RET | BPF_K, 1),
3539 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3540 },
3541 CLASSIC,
3542 { 4, 4, 5, 3, 3 },
3543 { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3544 },
3545 {
3546 "JGE",
3547 .u.insns = {
3548 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3549 BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3550 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3551 BPF_STMT(BPF_RET | BPF_K, 10),
3552 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3553 BPF_STMT(BPF_RET | BPF_K, 20),
3554 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3555 BPF_STMT(BPF_RET | BPF_K, 30),
3556 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3557 BPF_STMT(BPF_RET | BPF_K, 40),
3558 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3559 },
3560 CLASSIC,
3561 { 1, 2, 3, 4, 5 },
3562 { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3563 },
3564 {
3565 "JSET",
3566 .u.insns = {
3567 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3568 BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3569 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3570 BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3571 BPF_STMT(BPF_LDX | BPF_LEN, 0),
3572 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3573 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3574 BPF_STMT(BPF_MISC | BPF_TAX, 0),
3575 BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3576 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3577 BPF_STMT(BPF_RET | BPF_K, 10),
3578 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3579 BPF_STMT(BPF_RET | BPF_K, 20),
3580 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3581 BPF_STMT(BPF_RET | BPF_K, 30),
3582 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3583 BPF_STMT(BPF_RET | BPF_K, 30),
3584 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3585 BPF_STMT(BPF_RET | BPF_K, 30),
3586 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3587 BPF_STMT(BPF_RET | BPF_K, 30),
3588 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3589 BPF_STMT(BPF_RET | BPF_K, 30),
3590 BPF_STMT(BPF_RET | BPF_K, MAX_K)
3591 },
3592 CLASSIC,
3593 { 0, 0xAA, 0x55, 1 },
3594 { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3595 },
3596 {
3597 "tcpdump port 22",
3598 .u.insns = {
3599 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3600 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3601 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3602 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3603 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3604 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3605 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3606 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3607 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3608 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3609 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3610 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3611 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3612 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3613 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3614 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3615 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3616 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3617 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3618 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3619 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3620 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3621 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3622 BPF_STMT(BPF_RET | BPF_K, 0),
3623 },
3624 CLASSIC,
3625 /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3626 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3627 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3628 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3629 */
3630 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3631 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3632 0x08, 0x00,
3633 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3634 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3635 0x0a, 0x01, 0x01, 0x95, /* ip src */
3636 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3637 0xc2, 0x24,
3638 0x00, 0x16 /* dst port */ },
3639 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3640 },
3641 {
3642 "tcpdump complex",
3643 .u.insns = {
3644 /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3645 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3646 * (len > 115 or len < 30000000000)' -d
3647 */
3648 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3649 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3650 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3651 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3652 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3653 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3654 BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3655 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3656 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3657 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3658 BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3659 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3660 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3661 BPF_STMT(BPF_ST, 1),
3662 BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3663 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3664 BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3665 BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3666 BPF_STMT(BPF_LD | BPF_MEM, 1),
3667 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3668 BPF_STMT(BPF_ST, 5),
3669 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3670 BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3671 BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3672 BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3673 BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3674 BPF_STMT(BPF_LD | BPF_MEM, 5),
3675 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3676 BPF_STMT(BPF_LD | BPF_LEN, 0),
3677 BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3678 BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3679 BPF_STMT(BPF_RET | BPF_K, 0xffff),
3680 BPF_STMT(BPF_RET | BPF_K, 0),
3681 },
3682 CLASSIC,
3683 { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3684 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3685 0x08, 0x00,
3686 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3687 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3688 0x0a, 0x01, 0x01, 0x95, /* ip src */
3689 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3690 0xc2, 0x24,
3691 0x00, 0x16 /* dst port */ },
3692 { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3693 },
3694 {
3695 "RET_A",
3696 .u.insns = {
3697 /* check that uninitialized X and A contain zeros */
3698 BPF_STMT(BPF_MISC | BPF_TXA, 0),
3699 BPF_STMT(BPF_RET | BPF_A, 0)
3700 },
3701 CLASSIC,
3702 { },
3703 { {1, 0}, {2, 0} },
3704 },
3705 {
3706 "INT: ADD trivial",
3707 .u.insns_int = {
3708 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3709 BPF_ALU64_IMM(BPF_ADD, R1, 2),
3710 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3711 BPF_ALU64_REG(BPF_SUB, R1, R2),
3712 BPF_ALU64_IMM(BPF_ADD, R1, -1),
3713 BPF_ALU64_IMM(BPF_MUL, R1, 3),
3714 BPF_ALU64_REG(BPF_MOV, R0, R1),
3715 BPF_EXIT_INSN(),
3716 },
3717 INTERNAL,
3718 { },
3719 { { 0, 0xfffffffd } }
3720 },
3721 {
3722 "INT: MUL_X",
3723 .u.insns_int = {
3724 BPF_ALU64_IMM(BPF_MOV, R0, -1),
3725 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3726 BPF_ALU64_IMM(BPF_MOV, R2, 3),
3727 BPF_ALU64_REG(BPF_MUL, R1, R2),
3728 BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3729 BPF_EXIT_INSN(),
3730 BPF_ALU64_IMM(BPF_MOV, R0, 1),
3731 BPF_EXIT_INSN(),
3732 },
3733 INTERNAL,
3734 { },
3735 { { 0, 1 } }
3736 },
3737 {
3738 "INT: MUL_X2",
3739 .u.insns_int = {
3740 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3741 BPF_ALU32_IMM(BPF_MOV, R1, -1),
3742 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3743 BPF_ALU64_REG(BPF_MUL, R1, R2),
3744 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3745 BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3746 BPF_EXIT_INSN(),
3747 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3748 BPF_EXIT_INSN(),
3749 },
3750 INTERNAL,
3751 { },
3752 { { 0, 1 } }
3753 },
3754 {
3755 "INT: MUL32_X",
3756 .u.insns_int = {
3757 BPF_ALU32_IMM(BPF_MOV, R0, -1),
3758 BPF_ALU64_IMM(BPF_MOV, R1, -1),
3759 BPF_ALU32_IMM(BPF_MOV, R2, 3),
3760 BPF_ALU32_REG(BPF_MUL, R1, R2),
3761 BPF_ALU64_IMM(BPF_RSH, R1, 8),
3762 BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3763 BPF_EXIT_INSN(),
3764 BPF_ALU32_IMM(BPF_MOV, R0, 1),
3765 BPF_EXIT_INSN(),
3766 },
3767 INTERNAL,
3768 { },
3769 { { 0, 1 } }
3770 },
3771 {
3772 /* Have to test all register combinations, since
3773 * JITing of different registers will produce
3774 * different asm code.
3775 */
3776 "INT: ADD 64-bit",
3777 .u.insns_int = {
3778 BPF_ALU64_IMM(BPF_MOV, R0, 0),
3779 BPF_ALU64_IMM(BPF_MOV, R1, 1),
3780 BPF_ALU64_IMM(BPF_MOV, R2, 2),
3781 BPF_ALU64_IMM(BPF_MOV, R3, 3),
3782 BPF_ALU64_IMM(BPF_MOV, R4, 4),
3783 BPF_ALU64_IMM(BPF_MOV, R5, 5),
3784 BPF_ALU64_IMM(BPF_MOV, R6, 6),
3785 BPF_ALU64_IMM(BPF_MOV, R7, 7),
3786 BPF_ALU64_IMM(BPF_MOV, R8, 8),
3787 BPF_ALU64_IMM(BPF_MOV, R9, 9),
3788 BPF_ALU64_IMM(BPF_ADD, R0, 20),
3789 BPF_ALU64_IMM(BPF_ADD, R1, 20),
3790 BPF_ALU64_IMM(BPF_ADD, R2, 20),
3791 BPF_ALU64_IMM(BPF_ADD, R3, 20),
3792 BPF_ALU64_IMM(BPF_ADD, R4, 20),
3793 BPF_ALU64_IMM(BPF_ADD, R5, 20),
3794 BPF_ALU64_IMM(BPF_ADD, R6, 20),
3795 BPF_ALU64_IMM(BPF_ADD, R7, 20),
3796 BPF_ALU64_IMM(BPF_ADD, R8, 20),
3797 BPF_ALU64_IMM(BPF_ADD, R9, 20),
3798 BPF_ALU64_IMM(BPF_SUB, R0, 10),
3799 BPF_ALU64_IMM(BPF_SUB, R1, 10),
3800 BPF_ALU64_IMM(BPF_SUB, R2, 10),
3801 BPF_ALU64_IMM(BPF_SUB, R3, 10),
3802 BPF_ALU64_IMM(BPF_SUB, R4, 10),
3803 BPF_ALU64_IMM(BPF_SUB, R5, 10),
3804 BPF_ALU64_IMM(BPF_SUB, R6, 10),
3805 BPF_ALU64_IMM(BPF_SUB, R7, 10),
3806 BPF_ALU64_IMM(BPF_SUB, R8, 10),
3807 BPF_ALU64_IMM(BPF_SUB, R9, 10),
3808 BPF_ALU64_REG(BPF_ADD, R0, R0),
3809 BPF_ALU64_REG(BPF_ADD, R0, R1),
3810 BPF_ALU64_REG(BPF_ADD, R0, R2),
3811 BPF_ALU64_REG(BPF_ADD, R0, R3),
3812 BPF_ALU64_REG(BPF_ADD, R0, R4),
3813 BPF_ALU64_REG(BPF_ADD, R0, R5),
3814 BPF_ALU64_REG(BPF_ADD, R0, R6),
3815 BPF_ALU64_REG(BPF_ADD, R0, R7),
3816 BPF_ALU64_REG(BPF_ADD, R0, R8),
3817 BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3818 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3819 BPF_EXIT_INSN(),
3820 BPF_ALU64_REG(BPF_ADD, R1, R0),
3821 BPF_ALU64_REG(BPF_ADD, R1, R1),
3822 BPF_ALU64_REG(BPF_ADD, R1, R2),
3823 BPF_ALU64_REG(BPF_ADD, R1, R3),
3824 BPF_ALU64_REG(BPF_ADD, R1, R4),
3825 BPF_ALU64_REG(BPF_ADD, R1, R5),
3826 BPF_ALU64_REG(BPF_ADD, R1, R6),
3827 BPF_ALU64_REG(BPF_ADD, R1, R7),
3828 BPF_ALU64_REG(BPF_ADD, R1, R8),
3829 BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3830 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3831 BPF_EXIT_INSN(),
3832 BPF_ALU64_REG(BPF_ADD, R2, R0),
3833 BPF_ALU64_REG(BPF_ADD, R2, R1),
3834 BPF_ALU64_REG(BPF_ADD, R2, R2),
3835 BPF_ALU64_REG(BPF_ADD, R2, R3),
3836 BPF_ALU64_REG(BPF_ADD, R2, R4),
3837 BPF_ALU64_REG(BPF_ADD, R2, R5),
3838 BPF_ALU64_REG(BPF_ADD, R2, R6),
3839 BPF_ALU64_REG(BPF_ADD, R2, R7),
3840 BPF_ALU64_REG(BPF_ADD, R2, R8),
3841 BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3842 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3843 BPF_EXIT_INSN(),
3844 BPF_ALU64_REG(BPF_ADD, R3, R0),
3845 BPF_ALU64_REG(BPF_ADD, R3, R1),
3846 BPF_ALU64_REG(BPF_ADD, R3, R2),
3847 BPF_ALU64_REG(BPF_ADD, R3, R3),
3848 BPF_ALU64_REG(BPF_ADD, R3, R4),
3849 BPF_ALU64_REG(BPF_ADD, R3, R5),
3850 BPF_ALU64_REG(BPF_ADD, R3, R6),
3851 BPF_ALU64_REG(BPF_ADD, R3, R7),
3852 BPF_ALU64_REG(BPF_ADD, R3, R8),
3853 BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3854 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3855 BPF_EXIT_INSN(),
3856 BPF_ALU64_REG(BPF_ADD, R4, R0),
3857 BPF_ALU64_REG(BPF_ADD, R4, R1),
3858 BPF_ALU64_REG(BPF_ADD, R4, R2),
3859 BPF_ALU64_REG(BPF_ADD, R4, R3),
3860 BPF_ALU64_REG(BPF_ADD, R4, R4),
3861 BPF_ALU64_REG(BPF_ADD, R4, R5),
3862 BPF_ALU64_REG(BPF_ADD, R4, R6),
3863 BPF_ALU64_REG(BPF_ADD, R4, R7),
3864 BPF_ALU64_REG(BPF_ADD, R4, R8),
3865 BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3866 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3867 BPF_EXIT_INSN(),
3868 BPF_ALU64_REG(BPF_ADD, R5, R0),
3869 BPF_ALU64_REG(BPF_ADD, R5, R1),
3870 BPF_ALU64_REG(BPF_ADD, R5, R2),
3871 BPF_ALU64_REG(BPF_ADD, R5, R3),
3872 BPF_ALU64_REG(BPF_ADD, R5, R4),
3873 BPF_ALU64_REG(BPF_ADD, R5, R5),
3874 BPF_ALU64_REG(BPF_ADD, R5, R6),
3875 BPF_ALU64_REG(BPF_ADD, R5, R7),
3876 BPF_ALU64_REG(BPF_ADD, R5, R8),
3877 BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3878 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3879 BPF_EXIT_INSN(),
3880 BPF_ALU64_REG(BPF_ADD, R6, R0),
3881 BPF_ALU64_REG(BPF_ADD, R6, R1),
3882 BPF_ALU64_REG(BPF_ADD, R6, R2),
3883 BPF_ALU64_REG(BPF_ADD, R6, R3),
3884 BPF_ALU64_REG(BPF_ADD, R6, R4),
3885 BPF_ALU64_REG(BPF_ADD, R6, R5),
3886 BPF_ALU64_REG(BPF_ADD, R6, R6),
3887 BPF_ALU64_REG(BPF_ADD, R6, R7),
3888 BPF_ALU64_REG(BPF_ADD, R6, R8),
3889 BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3890 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3891 BPF_EXIT_INSN(),
3892 BPF_ALU64_REG(BPF_ADD, R7, R0),
3893 BPF_ALU64_REG(BPF_ADD, R7, R1),
3894 BPF_ALU64_REG(BPF_ADD, R7, R2),
3895 BPF_ALU64_REG(BPF_ADD, R7, R3),
3896 BPF_ALU64_REG(BPF_ADD, R7, R4),
3897 BPF_ALU64_REG(BPF_ADD, R7, R5),
3898 BPF_ALU64_REG(BPF_ADD, R7, R6),
3899 BPF_ALU64_REG(BPF_ADD, R7, R7),
3900 BPF_ALU64_REG(BPF_ADD, R7, R8),
3901 BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3902 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3903 BPF_EXIT_INSN(),
3904 BPF_ALU64_REG(BPF_ADD, R8, R0),
3905 BPF_ALU64_REG(BPF_ADD, R8, R1),
3906 BPF_ALU64_REG(BPF_ADD, R8, R2),
3907 BPF_ALU64_REG(BPF_ADD, R8, R3),
3908 BPF_ALU64_REG(BPF_ADD, R8, R4),
3909 BPF_ALU64_REG(BPF_ADD, R8, R5),
3910 BPF_ALU64_REG(BPF_ADD, R8, R6),
3911 BPF_ALU64_REG(BPF_ADD, R8, R7),
3912 BPF_ALU64_REG(BPF_ADD, R8, R8),
3913 BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3914 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3915 BPF_EXIT_INSN(),
3916 BPF_ALU64_REG(BPF_ADD, R9, R0),
3917 BPF_ALU64_REG(BPF_ADD, R9, R1),
3918 BPF_ALU64_REG(BPF_ADD, R9, R2),
3919 BPF_ALU64_REG(BPF_ADD, R9, R3),
3920 BPF_ALU64_REG(BPF_ADD, R9, R4),
3921 BPF_ALU64_REG(BPF_ADD, R9, R5),
3922 BPF_ALU64_REG(BPF_ADD, R9, R6),
3923 BPF_ALU64_REG(BPF_ADD, R9, R7),
3924 BPF_ALU64_REG(BPF_ADD, R9, R8),
3925 BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3926 BPF_ALU64_REG(BPF_MOV, R0, R9),
3927 BPF_EXIT_INSN(),
3928 },
3929 INTERNAL,
3930 { },
3931 { { 0, 2957380 } }
3932 },
3933 {
3934 "INT: ADD 32-bit",
3935 .u.insns_int = {
3936 BPF_ALU32_IMM(BPF_MOV, R0, 20),
3937 BPF_ALU32_IMM(BPF_MOV, R1, 1),
3938 BPF_ALU32_IMM(BPF_MOV, R2, 2),
3939 BPF_ALU32_IMM(BPF_MOV, R3, 3),
3940 BPF_ALU32_IMM(BPF_MOV, R4, 4),
3941 BPF_ALU32_IMM(BPF_MOV, R5, 5),
3942 BPF_ALU32_IMM(BPF_MOV, R6, 6),
3943 BPF_ALU32_IMM(BPF_MOV, R7, 7),
3944 BPF_ALU32_IMM(BPF_MOV, R8, 8),
3945 BPF_ALU32_IMM(BPF_MOV, R9, 9),
3946 BPF_ALU64_IMM(BPF_ADD, R1, 10),
3947 BPF_ALU64_IMM(BPF_ADD, R2, 10),
3948 BPF_ALU64_IMM(BPF_ADD, R3, 10),
3949 BPF_ALU64_IMM(BPF_ADD, R4, 10),
3950 BPF_ALU64_IMM(BPF_ADD, R5, 10),
3951 BPF_ALU64_IMM(BPF_ADD, R6, 10),
3952 BPF_ALU64_IMM(BPF_ADD, R7, 10),
3953 BPF_ALU64_IMM(BPF_ADD, R8, 10),
3954 BPF_ALU64_IMM(BPF_ADD, R9, 10),
3955 BPF_ALU32_REG(BPF_ADD, R0, R1),
3956 BPF_ALU32_REG(BPF_ADD, R0, R2),
3957 BPF_ALU32_REG(BPF_ADD, R0, R3),
3958 BPF_ALU32_REG(BPF_ADD, R0, R4),
3959 BPF_ALU32_REG(BPF_ADD, R0, R5),
3960 BPF_ALU32_REG(BPF_ADD, R0, R6),
3961 BPF_ALU32_REG(BPF_ADD, R0, R7),
3962 BPF_ALU32_REG(BPF_ADD, R0, R8),
3963 BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3964 BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3965 BPF_EXIT_INSN(),
3966 BPF_ALU32_REG(BPF_ADD, R1, R0),
3967 BPF_ALU32_REG(BPF_ADD, R1, R1),
3968 BPF_ALU32_REG(BPF_ADD, R1, R2),
3969 BPF_ALU32_REG(BPF_ADD, R1, R3),
3970 BPF_ALU32_REG(BPF_ADD, R1, R4),
3971 BPF_ALU32_REG(BPF_ADD, R1, R5),
3972 BPF_ALU32_REG(BPF_ADD, R1, R6),
3973 BPF_ALU32_REG(BPF_ADD, R1, R7),
3974 BPF_ALU32_REG(BPF_ADD, R1, R8),
3975 BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3976 BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3977 BPF_EXIT_INSN(),
3978 BPF_ALU32_REG(BPF_ADD, R2, R0),
3979 BPF_ALU32_REG(BPF_ADD, R2, R1),
3980 BPF_ALU32_REG(BPF_ADD, R2, R2),
3981 BPF_ALU32_REG(BPF_ADD, R2, R3),
3982 BPF_ALU32_REG(BPF_ADD, R2, R4),
3983 BPF_ALU32_REG(BPF_ADD, R2, R5),
3984 BPF_ALU32_REG(BPF_ADD, R2, R6),
3985 BPF_ALU32_REG(BPF_ADD, R2, R7),
3986 BPF_ALU32_REG(BPF_ADD, R2, R8),
3987 BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3988 BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3989 BPF_EXIT_INSN(),
3990 BPF_ALU32_REG(BPF_ADD, R3, R0),
3991 BPF_ALU32_REG(BPF_ADD, R3, R1),
3992 BPF_ALU32_REG(BPF_ADD, R3, R2),
3993 BPF_ALU32_REG(BPF_ADD, R3, R3),
3994 BPF_ALU32_REG(BPF_ADD, R3, R4),
3995 BPF_ALU32_REG(BPF_ADD, R3, R5),
3996 BPF_ALU32_REG(BPF_ADD, R3, R6),
3997 BPF_ALU32_REG(BPF_ADD, R3, R7),
3998 BPF_ALU32_REG(BPF_ADD, R3, R8),
3999 BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
4000 BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
4001 BPF_EXIT_INSN(),
4002 BPF_ALU32_REG(BPF_ADD, R4, R0),
4003 BPF_ALU32_REG(BPF_ADD, R4, R1),
4004 BPF_ALU32_REG(BPF_ADD, R4, R2),
4005 BPF_ALU32_REG(BPF_ADD, R4, R3),
4006 BPF_ALU32_REG(BPF_ADD, R4, R4),
4007 BPF_ALU32_REG(BPF_ADD, R4, R5),
4008 BPF_ALU32_REG(BPF_ADD, R4, R6),
4009 BPF_ALU32_REG(BPF_ADD, R4, R7),
4010 BPF_ALU32_REG(BPF_ADD, R4, R8),
4011 BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
4012 BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
4013 BPF_EXIT_INSN(),
4014 BPF_ALU32_REG(BPF_ADD, R5, R0),
4015 BPF_ALU32_REG(BPF_ADD, R5, R1),
4016 BPF_ALU32_REG(BPF_ADD, R5, R2),
4017 BPF_ALU32_REG(BPF_ADD, R5, R3),
4018 BPF_ALU32_REG(BPF_ADD, R5, R4),
4019 BPF_ALU32_REG(BPF_ADD, R5, R5),
4020 BPF_ALU32_REG(BPF_ADD, R5, R6),
4021 BPF_ALU32_REG(BPF_ADD, R5, R7),
4022 BPF_ALU32_REG(BPF_ADD, R5, R8),
4023 BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
4024 BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
4025 BPF_EXIT_INSN(),
4026 BPF_ALU32_REG(BPF_ADD, R6, R0),
4027 BPF_ALU32_REG(BPF_ADD, R6, R1),
4028 BPF_ALU32_REG(BPF_ADD, R6, R2),
4029 BPF_ALU32_REG(BPF_ADD, R6, R3),
4030 BPF_ALU32_REG(BPF_ADD, R6, R4),
4031 BPF_ALU32_REG(BPF_ADD, R6, R5),
4032 BPF_ALU32_REG(BPF_ADD, R6, R6),
4033 BPF_ALU32_REG(BPF_ADD, R6, R7),
4034 BPF_ALU32_REG(BPF_ADD, R6, R8),
4035 BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
4036 BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
4037 BPF_EXIT_INSN(),
4038 BPF_ALU32_REG(BPF_ADD, R7, R0),
4039 BPF_ALU32_REG(BPF_ADD, R7, R1),
4040 BPF_ALU32_REG(BPF_ADD, R7, R2),
4041 BPF_ALU32_REG(BPF_ADD, R7, R3),
4042 BPF_ALU32_REG(BPF_ADD, R7, R4),
4043 BPF_ALU32_REG(BPF_ADD, R7, R5),
4044 BPF_ALU32_REG(BPF_ADD, R7, R6),
4045 BPF_ALU32_REG(BPF_ADD, R7, R7),
4046 BPF_ALU32_REG(BPF_ADD, R7, R8),
4047 BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
4048 BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
4049 BPF_EXIT_INSN(),
4050 BPF_ALU32_REG(BPF_ADD, R8, R0),
4051 BPF_ALU32_REG(BPF_ADD, R8, R1),
4052 BPF_ALU32_REG(BPF_ADD, R8, R2),
4053 BPF_ALU32_REG(BPF_ADD, R8, R3),
4054 BPF_ALU32_REG(BPF_ADD, R8, R4),
4055 BPF_ALU32_REG(BPF_ADD, R8, R5),
4056 BPF_ALU32_REG(BPF_ADD, R8, R6),
4057 BPF_ALU32_REG(BPF_ADD, R8, R7),
4058 BPF_ALU32_REG(BPF_ADD, R8, R8),
4059 BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
4060 BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
4061 BPF_EXIT_INSN(),
4062 BPF_ALU32_REG(BPF_ADD, R9, R0),
4063 BPF_ALU32_REG(BPF_ADD, R9, R1),
4064 BPF_ALU32_REG(BPF_ADD, R9, R2),
4065 BPF_ALU32_REG(BPF_ADD, R9, R3),
4066 BPF_ALU32_REG(BPF_ADD, R9, R4),
4067 BPF_ALU32_REG(BPF_ADD, R9, R5),
4068 BPF_ALU32_REG(BPF_ADD, R9, R6),
4069 BPF_ALU32_REG(BPF_ADD, R9, R7),
4070 BPF_ALU32_REG(BPF_ADD, R9, R8),
4071 BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
4072 BPF_ALU32_REG(BPF_MOV, R0, R9),
4073 BPF_EXIT_INSN(),
4074 },
4075 INTERNAL,
4076 { },
4077 { { 0, 2957380 } }
4078 },
4079 { /* Mainly checking JIT here. */
4080 "INT: SUB",
4081 .u.insns_int = {
4082 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4083 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4084 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4085 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4086 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4087 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4088 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4089 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4090 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4091 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4092 BPF_ALU64_REG(BPF_SUB, R0, R0),
4093 BPF_ALU64_REG(BPF_SUB, R0, R1),
4094 BPF_ALU64_REG(BPF_SUB, R0, R2),
4095 BPF_ALU64_REG(BPF_SUB, R0, R3),
4096 BPF_ALU64_REG(BPF_SUB, R0, R4),
4097 BPF_ALU64_REG(BPF_SUB, R0, R5),
4098 BPF_ALU64_REG(BPF_SUB, R0, R6),
4099 BPF_ALU64_REG(BPF_SUB, R0, R7),
4100 BPF_ALU64_REG(BPF_SUB, R0, R8),
4101 BPF_ALU64_REG(BPF_SUB, R0, R9),
4102 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4103 BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4104 BPF_EXIT_INSN(),
4105 BPF_ALU64_REG(BPF_SUB, R1, R0),
4106 BPF_ALU64_REG(BPF_SUB, R1, R2),
4107 BPF_ALU64_REG(BPF_SUB, R1, R3),
4108 BPF_ALU64_REG(BPF_SUB, R1, R4),
4109 BPF_ALU64_REG(BPF_SUB, R1, R5),
4110 BPF_ALU64_REG(BPF_SUB, R1, R6),
4111 BPF_ALU64_REG(BPF_SUB, R1, R7),
4112 BPF_ALU64_REG(BPF_SUB, R1, R8),
4113 BPF_ALU64_REG(BPF_SUB, R1, R9),
4114 BPF_ALU64_IMM(BPF_SUB, R1, 10),
4115 BPF_ALU64_REG(BPF_SUB, R2, R0),
4116 BPF_ALU64_REG(BPF_SUB, R2, R1),
4117 BPF_ALU64_REG(BPF_SUB, R2, R3),
4118 BPF_ALU64_REG(BPF_SUB, R2, R4),
4119 BPF_ALU64_REG(BPF_SUB, R2, R5),
4120 BPF_ALU64_REG(BPF_SUB, R2, R6),
4121 BPF_ALU64_REG(BPF_SUB, R2, R7),
4122 BPF_ALU64_REG(BPF_SUB, R2, R8),
4123 BPF_ALU64_REG(BPF_SUB, R2, R9),
4124 BPF_ALU64_IMM(BPF_SUB, R2, 10),
4125 BPF_ALU64_REG(BPF_SUB, R3, R0),
4126 BPF_ALU64_REG(BPF_SUB, R3, R1),
4127 BPF_ALU64_REG(BPF_SUB, R3, R2),
4128 BPF_ALU64_REG(BPF_SUB, R3, R4),
4129 BPF_ALU64_REG(BPF_SUB, R3, R5),
4130 BPF_ALU64_REG(BPF_SUB, R3, R6),
4131 BPF_ALU64_REG(BPF_SUB, R3, R7),
4132 BPF_ALU64_REG(BPF_SUB, R3, R8),
4133 BPF_ALU64_REG(BPF_SUB, R3, R9),
4134 BPF_ALU64_IMM(BPF_SUB, R3, 10),
4135 BPF_ALU64_REG(BPF_SUB, R4, R0),
4136 BPF_ALU64_REG(BPF_SUB, R4, R1),
4137 BPF_ALU64_REG(BPF_SUB, R4, R2),
4138 BPF_ALU64_REG(BPF_SUB, R4, R3),
4139 BPF_ALU64_REG(BPF_SUB, R4, R5),
4140 BPF_ALU64_REG(BPF_SUB, R4, R6),
4141 BPF_ALU64_REG(BPF_SUB, R4, R7),
4142 BPF_ALU64_REG(BPF_SUB, R4, R8),
4143 BPF_ALU64_REG(BPF_SUB, R4, R9),
4144 BPF_ALU64_IMM(BPF_SUB, R4, 10),
4145 BPF_ALU64_REG(BPF_SUB, R5, R0),
4146 BPF_ALU64_REG(BPF_SUB, R5, R1),
4147 BPF_ALU64_REG(BPF_SUB, R5, R2),
4148 BPF_ALU64_REG(BPF_SUB, R5, R3),
4149 BPF_ALU64_REG(BPF_SUB, R5, R4),
4150 BPF_ALU64_REG(BPF_SUB, R5, R6),
4151 BPF_ALU64_REG(BPF_SUB, R5, R7),
4152 BPF_ALU64_REG(BPF_SUB, R5, R8),
4153 BPF_ALU64_REG(BPF_SUB, R5, R9),
4154 BPF_ALU64_IMM(BPF_SUB, R5, 10),
4155 BPF_ALU64_REG(BPF_SUB, R6, R0),
4156 BPF_ALU64_REG(BPF_SUB, R6, R1),
4157 BPF_ALU64_REG(BPF_SUB, R6, R2),
4158 BPF_ALU64_REG(BPF_SUB, R6, R3),
4159 BPF_ALU64_REG(BPF_SUB, R6, R4),
4160 BPF_ALU64_REG(BPF_SUB, R6, R5),
4161 BPF_ALU64_REG(BPF_SUB, R6, R7),
4162 BPF_ALU64_REG(BPF_SUB, R6, R8),
4163 BPF_ALU64_REG(BPF_SUB, R6, R9),
4164 BPF_ALU64_IMM(BPF_SUB, R6, 10),
4165 BPF_ALU64_REG(BPF_SUB, R7, R0),
4166 BPF_ALU64_REG(BPF_SUB, R7, R1),
4167 BPF_ALU64_REG(BPF_SUB, R7, R2),
4168 BPF_ALU64_REG(BPF_SUB, R7, R3),
4169 BPF_ALU64_REG(BPF_SUB, R7, R4),
4170 BPF_ALU64_REG(BPF_SUB, R7, R5),
4171 BPF_ALU64_REG(BPF_SUB, R7, R6),
4172 BPF_ALU64_REG(BPF_SUB, R7, R8),
4173 BPF_ALU64_REG(BPF_SUB, R7, R9),
4174 BPF_ALU64_IMM(BPF_SUB, R7, 10),
4175 BPF_ALU64_REG(BPF_SUB, R8, R0),
4176 BPF_ALU64_REG(BPF_SUB, R8, R1),
4177 BPF_ALU64_REG(BPF_SUB, R8, R2),
4178 BPF_ALU64_REG(BPF_SUB, R8, R3),
4179 BPF_ALU64_REG(BPF_SUB, R8, R4),
4180 BPF_ALU64_REG(BPF_SUB, R8, R5),
4181 BPF_ALU64_REG(BPF_SUB, R8, R6),
4182 BPF_ALU64_REG(BPF_SUB, R8, R7),
4183 BPF_ALU64_REG(BPF_SUB, R8, R9),
4184 BPF_ALU64_IMM(BPF_SUB, R8, 10),
4185 BPF_ALU64_REG(BPF_SUB, R9, R0),
4186 BPF_ALU64_REG(BPF_SUB, R9, R1),
4187 BPF_ALU64_REG(BPF_SUB, R9, R2),
4188 BPF_ALU64_REG(BPF_SUB, R9, R3),
4189 BPF_ALU64_REG(BPF_SUB, R9, R4),
4190 BPF_ALU64_REG(BPF_SUB, R9, R5),
4191 BPF_ALU64_REG(BPF_SUB, R9, R6),
4192 BPF_ALU64_REG(BPF_SUB, R9, R7),
4193 BPF_ALU64_REG(BPF_SUB, R9, R8),
4194 BPF_ALU64_IMM(BPF_SUB, R9, 10),
4195 BPF_ALU64_IMM(BPF_SUB, R0, 10),
4196 BPF_ALU64_IMM(BPF_NEG, R0, 0),
4197 BPF_ALU64_REG(BPF_SUB, R0, R1),
4198 BPF_ALU64_REG(BPF_SUB, R0, R2),
4199 BPF_ALU64_REG(BPF_SUB, R0, R3),
4200 BPF_ALU64_REG(BPF_SUB, R0, R4),
4201 BPF_ALU64_REG(BPF_SUB, R0, R5),
4202 BPF_ALU64_REG(BPF_SUB, R0, R6),
4203 BPF_ALU64_REG(BPF_SUB, R0, R7),
4204 BPF_ALU64_REG(BPF_SUB, R0, R8),
4205 BPF_ALU64_REG(BPF_SUB, R0, R9),
4206 BPF_EXIT_INSN(),
4207 },
4208 INTERNAL,
4209 { },
4210 { { 0, 11 } }
4211 },
4212 { /* Mainly checking JIT here. */
4213 "INT: XOR",
4214 .u.insns_int = {
4215 BPF_ALU64_REG(BPF_SUB, R0, R0),
4216 BPF_ALU64_REG(BPF_XOR, R1, R1),
4217 BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4218 BPF_EXIT_INSN(),
4219 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4220 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4221 BPF_ALU64_REG(BPF_SUB, R1, R1),
4222 BPF_ALU64_REG(BPF_XOR, R2, R2),
4223 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4224 BPF_EXIT_INSN(),
4225 BPF_ALU64_REG(BPF_SUB, R2, R2),
4226 BPF_ALU64_REG(BPF_XOR, R3, R3),
4227 BPF_ALU64_IMM(BPF_MOV, R0, 10),
4228 BPF_ALU64_IMM(BPF_MOV, R1, -1),
4229 BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4230 BPF_EXIT_INSN(),
4231 BPF_ALU64_REG(BPF_SUB, R3, R3),
4232 BPF_ALU64_REG(BPF_XOR, R4, R4),
4233 BPF_ALU64_IMM(BPF_MOV, R2, 1),
4234 BPF_ALU64_IMM(BPF_MOV, R5, -1),
4235 BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4236 BPF_EXIT_INSN(),
4237 BPF_ALU64_REG(BPF_SUB, R4, R4),
4238 BPF_ALU64_REG(BPF_XOR, R5, R5),
4239 BPF_ALU64_IMM(BPF_MOV, R3, 1),
4240 BPF_ALU64_IMM(BPF_MOV, R7, -1),
4241 BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4242 BPF_EXIT_INSN(),
4243 BPF_ALU64_IMM(BPF_MOV, R5, 1),
4244 BPF_ALU64_REG(BPF_SUB, R5, R5),
4245 BPF_ALU64_REG(BPF_XOR, R6, R6),
4246 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4247 BPF_ALU64_IMM(BPF_MOV, R8, -1),
4248 BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4249 BPF_EXIT_INSN(),
4250 BPF_ALU64_REG(BPF_SUB, R6, R6),
4251 BPF_ALU64_REG(BPF_XOR, R7, R7),
4252 BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4253 BPF_EXIT_INSN(),
4254 BPF_ALU64_REG(BPF_SUB, R7, R7),
4255 BPF_ALU64_REG(BPF_XOR, R8, R8),
4256 BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4257 BPF_EXIT_INSN(),
4258 BPF_ALU64_REG(BPF_SUB, R8, R8),
4259 BPF_ALU64_REG(BPF_XOR, R9, R9),
4260 BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4261 BPF_EXIT_INSN(),
4262 BPF_ALU64_REG(BPF_SUB, R9, R9),
4263 BPF_ALU64_REG(BPF_XOR, R0, R0),
4264 BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4265 BPF_EXIT_INSN(),
4266 BPF_ALU64_REG(BPF_SUB, R1, R1),
4267 BPF_ALU64_REG(BPF_XOR, R0, R0),
4268 BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4269 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4270 BPF_EXIT_INSN(),
4271 BPF_ALU64_IMM(BPF_MOV, R0, 1),
4272 BPF_EXIT_INSN(),
4273 },
4274 INTERNAL,
4275 { },
4276 { { 0, 1 } }
4277 },
4278 { /* Mainly checking JIT here. */
4279 "INT: MUL",
4280 .u.insns_int = {
4281 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4282 BPF_ALU64_IMM(BPF_MOV, R1, 1),
4283 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4284 BPF_ALU64_IMM(BPF_MOV, R3, 3),
4285 BPF_ALU64_IMM(BPF_MOV, R4, 4),
4286 BPF_ALU64_IMM(BPF_MOV, R5, 5),
4287 BPF_ALU64_IMM(BPF_MOV, R6, 6),
4288 BPF_ALU64_IMM(BPF_MOV, R7, 7),
4289 BPF_ALU64_IMM(BPF_MOV, R8, 8),
4290 BPF_ALU64_IMM(BPF_MOV, R9, 9),
4291 BPF_ALU64_REG(BPF_MUL, R0, R0),
4292 BPF_ALU64_REG(BPF_MUL, R0, R1),
4293 BPF_ALU64_REG(BPF_MUL, R0, R2),
4294 BPF_ALU64_REG(BPF_MUL, R0, R3),
4295 BPF_ALU64_REG(BPF_MUL, R0, R4),
4296 BPF_ALU64_REG(BPF_MUL, R0, R5),
4297 BPF_ALU64_REG(BPF_MUL, R0, R6),
4298 BPF_ALU64_REG(BPF_MUL, R0, R7),
4299 BPF_ALU64_REG(BPF_MUL, R0, R8),
4300 BPF_ALU64_REG(BPF_MUL, R0, R9),
4301 BPF_ALU64_IMM(BPF_MUL, R0, 10),
4302 BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4303 BPF_EXIT_INSN(),
4304 BPF_ALU64_REG(BPF_MUL, R1, R0),
4305 BPF_ALU64_REG(BPF_MUL, R1, R2),
4306 BPF_ALU64_REG(BPF_MUL, R1, R3),
4307 BPF_ALU64_REG(BPF_MUL, R1, R4),
4308 BPF_ALU64_REG(BPF_MUL, R1, R5),
4309 BPF_ALU64_REG(BPF_MUL, R1, R6),
4310 BPF_ALU64_REG(BPF_MUL, R1, R7),
4311 BPF_ALU64_REG(BPF_MUL, R1, R8),
4312 BPF_ALU64_REG(BPF_MUL, R1, R9),
4313 BPF_ALU64_IMM(BPF_MUL, R1, 10),
4314 BPF_ALU64_REG(BPF_MOV, R2, R1),
4315 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4316 BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4317 BPF_EXIT_INSN(),
4318 BPF_ALU64_IMM(BPF_LSH, R1, 32),
4319 BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4320 BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4321 BPF_EXIT_INSN(),
4322 BPF_ALU64_REG(BPF_MUL, R2, R0),
4323 BPF_ALU64_REG(BPF_MUL, R2, R1),
4324 BPF_ALU64_REG(BPF_MUL, R2, R3),
4325 BPF_ALU64_REG(BPF_MUL, R2, R4),
4326 BPF_ALU64_REG(BPF_MUL, R2, R5),
4327 BPF_ALU64_REG(BPF_MUL, R2, R6),
4328 BPF_ALU64_REG(BPF_MUL, R2, R7),
4329 BPF_ALU64_REG(BPF_MUL, R2, R8),
4330 BPF_ALU64_REG(BPF_MUL, R2, R9),
4331 BPF_ALU64_IMM(BPF_MUL, R2, 10),
4332 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4333 BPF_ALU64_REG(BPF_MOV, R0, R2),
4334 BPF_EXIT_INSN(),
4335 },
4336 INTERNAL,
4337 { },
4338 { { 0, 0x35d97ef2 } }
4339 },
4340 { /* Mainly checking JIT here. */
4341 "MOV REG64",
4342 .u.insns_int = {
4343 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4344 BPF_MOV64_REG(R1, R0),
4345 BPF_MOV64_REG(R2, R1),
4346 BPF_MOV64_REG(R3, R2),
4347 BPF_MOV64_REG(R4, R3),
4348 BPF_MOV64_REG(R5, R4),
4349 BPF_MOV64_REG(R6, R5),
4350 BPF_MOV64_REG(R7, R6),
4351 BPF_MOV64_REG(R8, R7),
4352 BPF_MOV64_REG(R9, R8),
4353 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4354 BPF_ALU64_IMM(BPF_MOV, R1, 0),
4355 BPF_ALU64_IMM(BPF_MOV, R2, 0),
4356 BPF_ALU64_IMM(BPF_MOV, R3, 0),
4357 BPF_ALU64_IMM(BPF_MOV, R4, 0),
4358 BPF_ALU64_IMM(BPF_MOV, R5, 0),
4359 BPF_ALU64_IMM(BPF_MOV, R6, 0),
4360 BPF_ALU64_IMM(BPF_MOV, R7, 0),
4361 BPF_ALU64_IMM(BPF_MOV, R8, 0),
4362 BPF_ALU64_IMM(BPF_MOV, R9, 0),
4363 BPF_ALU64_REG(BPF_ADD, R0, R0),
4364 BPF_ALU64_REG(BPF_ADD, R0, R1),
4365 BPF_ALU64_REG(BPF_ADD, R0, R2),
4366 BPF_ALU64_REG(BPF_ADD, R0, R3),
4367 BPF_ALU64_REG(BPF_ADD, R0, R4),
4368 BPF_ALU64_REG(BPF_ADD, R0, R5),
4369 BPF_ALU64_REG(BPF_ADD, R0, R6),
4370 BPF_ALU64_REG(BPF_ADD, R0, R7),
4371 BPF_ALU64_REG(BPF_ADD, R0, R8),
4372 BPF_ALU64_REG(BPF_ADD, R0, R9),
4373 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4374 BPF_EXIT_INSN(),
4375 },
4376 INTERNAL,
4377 { },
4378 { { 0, 0xfefe } }
4379 },
4380 { /* Mainly checking JIT here. */
4381 "MOV REG32",
4382 .u.insns_int = {
4383 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4384 BPF_MOV64_REG(R1, R0),
4385 BPF_MOV64_REG(R2, R1),
4386 BPF_MOV64_REG(R3, R2),
4387 BPF_MOV64_REG(R4, R3),
4388 BPF_MOV64_REG(R5, R4),
4389 BPF_MOV64_REG(R6, R5),
4390 BPF_MOV64_REG(R7, R6),
4391 BPF_MOV64_REG(R8, R7),
4392 BPF_MOV64_REG(R9, R8),
4393 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4394 BPF_ALU32_IMM(BPF_MOV, R1, 0),
4395 BPF_ALU32_IMM(BPF_MOV, R2, 0),
4396 BPF_ALU32_IMM(BPF_MOV, R3, 0),
4397 BPF_ALU32_IMM(BPF_MOV, R4, 0),
4398 BPF_ALU32_IMM(BPF_MOV, R5, 0),
4399 BPF_ALU32_IMM(BPF_MOV, R6, 0),
4400 BPF_ALU32_IMM(BPF_MOV, R7, 0),
4401 BPF_ALU32_IMM(BPF_MOV, R8, 0),
4402 BPF_ALU32_IMM(BPF_MOV, R9, 0),
4403 BPF_ALU64_REG(BPF_ADD, R0, R0),
4404 BPF_ALU64_REG(BPF_ADD, R0, R1),
4405 BPF_ALU64_REG(BPF_ADD, R0, R2),
4406 BPF_ALU64_REG(BPF_ADD, R0, R3),
4407 BPF_ALU64_REG(BPF_ADD, R0, R4),
4408 BPF_ALU64_REG(BPF_ADD, R0, R5),
4409 BPF_ALU64_REG(BPF_ADD, R0, R6),
4410 BPF_ALU64_REG(BPF_ADD, R0, R7),
4411 BPF_ALU64_REG(BPF_ADD, R0, R8),
4412 BPF_ALU64_REG(BPF_ADD, R0, R9),
4413 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4414 BPF_EXIT_INSN(),
4415 },
4416 INTERNAL,
4417 { },
4418 { { 0, 0xfefe } }
4419 },
4420 { /* Mainly checking JIT here. */
4421 "LD IMM64",
4422 .u.insns_int = {
4423 BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4424 BPF_MOV64_REG(R1, R0),
4425 BPF_MOV64_REG(R2, R1),
4426 BPF_MOV64_REG(R3, R2),
4427 BPF_MOV64_REG(R4, R3),
4428 BPF_MOV64_REG(R5, R4),
4429 BPF_MOV64_REG(R6, R5),
4430 BPF_MOV64_REG(R7, R6),
4431 BPF_MOV64_REG(R8, R7),
4432 BPF_MOV64_REG(R9, R8),
4433 BPF_LD_IMM64(R0, 0x0LL),
4434 BPF_LD_IMM64(R1, 0x0LL),
4435 BPF_LD_IMM64(R2, 0x0LL),
4436 BPF_LD_IMM64(R3, 0x0LL),
4437 BPF_LD_IMM64(R4, 0x0LL),
4438 BPF_LD_IMM64(R5, 0x0LL),
4439 BPF_LD_IMM64(R6, 0x0LL),
4440 BPF_LD_IMM64(R7, 0x0LL),
4441 BPF_LD_IMM64(R8, 0x0LL),
4442 BPF_LD_IMM64(R9, 0x0LL),
4443 BPF_ALU64_REG(BPF_ADD, R0, R0),
4444 BPF_ALU64_REG(BPF_ADD, R0, R1),
4445 BPF_ALU64_REG(BPF_ADD, R0, R2),
4446 BPF_ALU64_REG(BPF_ADD, R0, R3),
4447 BPF_ALU64_REG(BPF_ADD, R0, R4),
4448 BPF_ALU64_REG(BPF_ADD, R0, R5),
4449 BPF_ALU64_REG(BPF_ADD, R0, R6),
4450 BPF_ALU64_REG(BPF_ADD, R0, R7),
4451 BPF_ALU64_REG(BPF_ADD, R0, R8),
4452 BPF_ALU64_REG(BPF_ADD, R0, R9),
4453 BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4454 BPF_EXIT_INSN(),
4455 },
4456 INTERNAL,
4457 { },
4458 { { 0, 0xfefe } }
4459 },
4460 {
4461 "INT: ALU MIX",
4462 .u.insns_int = {
4463 BPF_ALU64_IMM(BPF_MOV, R0, 11),
4464 BPF_ALU64_IMM(BPF_ADD, R0, -1),
4465 BPF_ALU64_IMM(BPF_MOV, R2, 2),
4466 BPF_ALU64_IMM(BPF_XOR, R2, 3),
4467 BPF_ALU64_REG(BPF_DIV, R0, R2),
4468 BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4469 BPF_EXIT_INSN(),
4470 BPF_ALU64_IMM(BPF_MOD, R0, 3),
4471 BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4472 BPF_EXIT_INSN(),
4473 BPF_ALU64_IMM(BPF_MOV, R0, -1),
4474 BPF_EXIT_INSN(),
4475 },
4476 INTERNAL,
4477 { },
4478 { { 0, -1 } }
4479 },
4480 {
4481 "INT: shifts by register",
4482 .u.insns_int = {
4483 BPF_MOV64_IMM(R0, -1234),
4484 BPF_MOV64_IMM(R1, 1),
4485 BPF_ALU32_REG(BPF_RSH, R0, R1),
4486 BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4487 BPF_EXIT_INSN(),
4488 BPF_MOV64_IMM(R2, 1),
4489 BPF_ALU64_REG(BPF_LSH, R0, R2),
4490 BPF_MOV32_IMM(R4, -1234),
4491 BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4492 BPF_EXIT_INSN(),
4493 BPF_ALU64_IMM(BPF_AND, R4, 63),
4494 BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4495 BPF_MOV64_IMM(R3, 47),
4496 BPF_ALU64_REG(BPF_ARSH, R0, R3),
4497 BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4498 BPF_EXIT_INSN(),
4499 BPF_MOV64_IMM(R2, 1),
4500 BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4501 BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4502 BPF_EXIT_INSN(),
4503 BPF_MOV64_IMM(R4, 4),
4504 BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4505 BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4506 BPF_EXIT_INSN(),
4507 BPF_MOV64_IMM(R4, 5),
4508 BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4509 BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4510 BPF_EXIT_INSN(),
4511 BPF_MOV64_IMM(R0, -1),
4512 BPF_EXIT_INSN(),
4513 },
4514 INTERNAL,
4515 { },
4516 { { 0, -1 } }
4517 },
4518#ifdef CONFIG_32BIT
4519 {
4520 "INT: 32-bit context pointer word order and zero-extension",
4521 .u.insns_int = {
4522 BPF_ALU32_IMM(BPF_MOV, R0, 0),
4523 BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4524 BPF_ALU64_IMM(BPF_RSH, R1, 32),
4525 BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4526 BPF_ALU32_IMM(BPF_MOV, R0, 1),
4527 BPF_EXIT_INSN(),
4528 },
4529 INTERNAL,
4530 { },
4531 { { 0, 1 } }
4532 },
4533#endif
4534 {
4535 "check: missing ret",
4536 .u.insns = {
4537 BPF_STMT(BPF_LD | BPF_IMM, 1),
4538 },
4539 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4540 { },
4541 { },
4542 .fill_helper = NULL,
4543 .expected_errcode = -EINVAL,
4544 },
4545 {
4546 "check: div_k_0",
4547 .u.insns = {
4548 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4549 BPF_STMT(BPF_RET | BPF_K, 0)
4550 },
4551 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4552 { },
4553 { },
4554 .fill_helper = NULL,
4555 .expected_errcode = -EINVAL,
4556 },
4557 {
4558 "check: unknown insn",
4559 .u.insns = {
4560 /* seccomp insn, rejected in socket filter */
4561 BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4562 BPF_STMT(BPF_RET | BPF_K, 0)
4563 },
4564 CLASSIC | FLAG_EXPECTED_FAIL,
4565 { },
4566 { },
4567 .fill_helper = NULL,
4568 .expected_errcode = -EINVAL,
4569 },
4570 {
4571 "check: out of range spill/fill",
4572 .u.insns = {
4573 BPF_STMT(BPF_STX, 16),
4574 BPF_STMT(BPF_RET | BPF_K, 0)
4575 },
4576 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4577 { },
4578 { },
4579 .fill_helper = NULL,
4580 .expected_errcode = -EINVAL,
4581 },
4582 {
4583 "JUMPS + HOLES",
4584 .u.insns = {
4585 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4586 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4587 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4588 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4589 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4590 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4591 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4592 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4593 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4594 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4595 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4596 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4597 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4598 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4599 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4600 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4601 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4602 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4603 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4604 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4605 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4606 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4607 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4608 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4609 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4610 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4611 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4612 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4613 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4614 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4615 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4616 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4617 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4618 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4619 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4620 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4621 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4622 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4623 BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4624 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4625 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4626 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4627 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4628 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4629 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4630 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4631 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4632 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4633 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4634 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4635 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4636 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4637 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4638 BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4639 BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4640 BPF_STMT(BPF_RET | BPF_A, 0),
4641 BPF_STMT(BPF_RET | BPF_A, 0),
4642 },
4643 CLASSIC,
4644 { 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4645 0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4646 0x08, 0x00,
4647 0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4648 0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4649 0xc0, 0xa8, 0x33, 0x01,
4650 0xc0, 0xa8, 0x33, 0x02,
4651 0xbb, 0xb6,
4652 0xa9, 0xfa,
4653 0x00, 0x14, 0x00, 0x00,
4654 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4655 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4656 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4657 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4658 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4659 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4660 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4661 0xcc, 0xcc, 0xcc, 0xcc },
4662 { { 88, 0x001b } }
4663 },
4664 {
4665 "check: RET X",
4666 .u.insns = {
4667 BPF_STMT(BPF_RET | BPF_X, 0),
4668 },
4669 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4670 { },
4671 { },
4672 .fill_helper = NULL,
4673 .expected_errcode = -EINVAL,
4674 },
4675 {
4676 "check: LDX + RET X",
4677 .u.insns = {
4678 BPF_STMT(BPF_LDX | BPF_IMM, 42),
4679 BPF_STMT(BPF_RET | BPF_X, 0),
4680 },
4681 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4682 { },
4683 { },
4684 .fill_helper = NULL,
4685 .expected_errcode = -EINVAL,
4686 },
4687 { /* Mainly checking JIT here. */
4688 "M[]: alt STX + LDX",
4689 .u.insns = {
4690 BPF_STMT(BPF_LDX | BPF_IMM, 100),
4691 BPF_STMT(BPF_STX, 0),
4692 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4693 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4694 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4695 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4696 BPF_STMT(BPF_STX, 1),
4697 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4698 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4699 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4700 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4701 BPF_STMT(BPF_STX, 2),
4702 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4703 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4704 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4705 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4706 BPF_STMT(BPF_STX, 3),
4707 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4708 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4709 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4710 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4711 BPF_STMT(BPF_STX, 4),
4712 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4713 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4714 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4715 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4716 BPF_STMT(BPF_STX, 5),
4717 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4718 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4719 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4720 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4721 BPF_STMT(BPF_STX, 6),
4722 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4723 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4724 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4725 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4726 BPF_STMT(BPF_STX, 7),
4727 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4728 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4729 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4730 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4731 BPF_STMT(BPF_STX, 8),
4732 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4733 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4734 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4735 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4736 BPF_STMT(BPF_STX, 9),
4737 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4738 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4739 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4740 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4741 BPF_STMT(BPF_STX, 10),
4742 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4743 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4744 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4745 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4746 BPF_STMT(BPF_STX, 11),
4747 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4748 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4749 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4750 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4751 BPF_STMT(BPF_STX, 12),
4752 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4753 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4754 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4755 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4756 BPF_STMT(BPF_STX, 13),
4757 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4758 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4759 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4760 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4761 BPF_STMT(BPF_STX, 14),
4762 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4763 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4764 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4765 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4766 BPF_STMT(BPF_STX, 15),
4767 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4768 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4769 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4770 BPF_STMT(BPF_MISC | BPF_TAX, 0),
4771 BPF_STMT(BPF_RET | BPF_A, 0),
4772 },
4773 CLASSIC | FLAG_NO_DATA,
4774 { },
4775 { { 0, 116 } },
4776 },
4777 { /* Mainly checking JIT here. */
4778 "M[]: full STX + full LDX",
4779 .u.insns = {
4780 BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4781 BPF_STMT(BPF_STX, 0),
4782 BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4783 BPF_STMT(BPF_STX, 1),
4784 BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4785 BPF_STMT(BPF_STX, 2),
4786 BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4787 BPF_STMT(BPF_STX, 3),
4788 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4789 BPF_STMT(BPF_STX, 4),
4790 BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4791 BPF_STMT(BPF_STX, 5),
4792 BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4793 BPF_STMT(BPF_STX, 6),
4794 BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4795 BPF_STMT(BPF_STX, 7),
4796 BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4797 BPF_STMT(BPF_STX, 8),
4798 BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4799 BPF_STMT(BPF_STX, 9),
4800 BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4801 BPF_STMT(BPF_STX, 10),
4802 BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4803 BPF_STMT(BPF_STX, 11),
4804 BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4805 BPF_STMT(BPF_STX, 12),
4806 BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4807 BPF_STMT(BPF_STX, 13),
4808 BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4809 BPF_STMT(BPF_STX, 14),
4810 BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4811 BPF_STMT(BPF_STX, 15),
4812 BPF_STMT(BPF_LDX | BPF_MEM, 0),
4813 BPF_STMT(BPF_MISC | BPF_TXA, 0),
4814 BPF_STMT(BPF_LDX | BPF_MEM, 1),
4815 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4816 BPF_STMT(BPF_LDX | BPF_MEM, 2),
4817 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4818 BPF_STMT(BPF_LDX | BPF_MEM, 3),
4819 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4820 BPF_STMT(BPF_LDX | BPF_MEM, 4),
4821 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4822 BPF_STMT(BPF_LDX | BPF_MEM, 5),
4823 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4824 BPF_STMT(BPF_LDX | BPF_MEM, 6),
4825 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4826 BPF_STMT(BPF_LDX | BPF_MEM, 7),
4827 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4828 BPF_STMT(BPF_LDX | BPF_MEM, 8),
4829 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4830 BPF_STMT(BPF_LDX | BPF_MEM, 9),
4831 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4832 BPF_STMT(BPF_LDX | BPF_MEM, 10),
4833 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4834 BPF_STMT(BPF_LDX | BPF_MEM, 11),
4835 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4836 BPF_STMT(BPF_LDX | BPF_MEM, 12),
4837 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4838 BPF_STMT(BPF_LDX | BPF_MEM, 13),
4839 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4840 BPF_STMT(BPF_LDX | BPF_MEM, 14),
4841 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4842 BPF_STMT(BPF_LDX | BPF_MEM, 15),
4843 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4844 BPF_STMT(BPF_RET | BPF_A, 0),
4845 },
4846 CLASSIC | FLAG_NO_DATA,
4847 { },
4848 { { 0, 0x2a5a5e5 } },
4849 },
4850 {
4851 "check: SKF_AD_MAX",
4852 .u.insns = {
4853 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4854 SKF_AD_OFF + SKF_AD_MAX),
4855 BPF_STMT(BPF_RET | BPF_A, 0),
4856 },
4857 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4858 { },
4859 { },
4860 .fill_helper = NULL,
4861 .expected_errcode = -EINVAL,
4862 },
4863 { /* Passes checker but fails during runtime. */
4864 "LD [SKF_AD_OFF-1]",
4865 .u.insns = {
4866 BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4867 SKF_AD_OFF - 1),
4868 BPF_STMT(BPF_RET | BPF_K, 1),
4869 },
4870 CLASSIC,
4871 { },
4872 { { 1, 0 } },
4873 },
4874 {
4875 "load 64-bit immediate",
4876 .u.insns_int = {
4877 BPF_LD_IMM64(R1, 0x567800001234LL),
4878 BPF_MOV64_REG(R2, R1),
4879 BPF_MOV64_REG(R3, R2),
4880 BPF_ALU64_IMM(BPF_RSH, R2, 32),
4881 BPF_ALU64_IMM(BPF_LSH, R3, 32),
4882 BPF_ALU64_IMM(BPF_RSH, R3, 32),
4883 BPF_ALU64_IMM(BPF_MOV, R0, 0),
4884 BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4885 BPF_EXIT_INSN(),
4886 BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4887 BPF_EXIT_INSN(),
4888 BPF_LD_IMM64(R0, 0x1ffffffffLL),
4889 BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4890 BPF_EXIT_INSN(),
4891 },
4892 INTERNAL,
4893 { },
4894 { { 0, 1 } }
4895 },
4896 /* BPF_ALU | BPF_MOV | BPF_X */
4897 {
4898 "ALU_MOV_X: dst = 2",
4899 .u.insns_int = {
4900 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4901 BPF_ALU32_REG(BPF_MOV, R0, R1),
4902 BPF_EXIT_INSN(),
4903 },
4904 INTERNAL,
4905 { },
4906 { { 0, 2 } },
4907 },
4908 {
4909 "ALU_MOV_X: dst = 4294967295",
4910 .u.insns_int = {
4911 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4912 BPF_ALU32_REG(BPF_MOV, R0, R1),
4913 BPF_EXIT_INSN(),
4914 },
4915 INTERNAL,
4916 { },
4917 { { 0, 4294967295U } },
4918 },
4919 {
4920 "ALU64_MOV_X: dst = 2",
4921 .u.insns_int = {
4922 BPF_ALU32_IMM(BPF_MOV, R1, 2),
4923 BPF_ALU64_REG(BPF_MOV, R0, R1),
4924 BPF_EXIT_INSN(),
4925 },
4926 INTERNAL,
4927 { },
4928 { { 0, 2 } },
4929 },
4930 {
4931 "ALU64_MOV_X: dst = 4294967295",
4932 .u.insns_int = {
4933 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4934 BPF_ALU64_REG(BPF_MOV, R0, R1),
4935 BPF_EXIT_INSN(),
4936 },
4937 INTERNAL,
4938 { },
4939 { { 0, 4294967295U } },
4940 },
4941 /* BPF_ALU | BPF_MOV | BPF_K */
4942 {
4943 "ALU_MOV_K: dst = 2",
4944 .u.insns_int = {
4945 BPF_ALU32_IMM(BPF_MOV, R0, 2),
4946 BPF_EXIT_INSN(),
4947 },
4948 INTERNAL,
4949 { },
4950 { { 0, 2 } },
4951 },
4952 {
4953 "ALU_MOV_K: dst = 4294967295",
4954 .u.insns_int = {
4955 BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4956 BPF_EXIT_INSN(),
4957 },
4958 INTERNAL,
4959 { },
4960 { { 0, 4294967295U } },
4961 },
4962 {
4963 "ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4964 .u.insns_int = {
4965 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4966 BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4967 BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4968 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4969 BPF_MOV32_IMM(R0, 2),
4970 BPF_EXIT_INSN(),
4971 BPF_MOV32_IMM(R0, 1),
4972 BPF_EXIT_INSN(),
4973 },
4974 INTERNAL,
4975 { },
4976 { { 0, 0x1 } },
4977 },
4978 {
4979 "ALU_MOV_K: small negative",
4980 .u.insns_int = {
4981 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4982 BPF_EXIT_INSN(),
4983 },
4984 INTERNAL,
4985 { },
4986 { { 0, -123 } }
4987 },
4988 {
4989 "ALU_MOV_K: small negative zero extension",
4990 .u.insns_int = {
4991 BPF_ALU32_IMM(BPF_MOV, R0, -123),
4992 BPF_ALU64_IMM(BPF_RSH, R0, 32),
4993 BPF_EXIT_INSN(),
4994 },
4995 INTERNAL,
4996 { },
4997 { { 0, 0 } }
4998 },
4999 {
5000 "ALU_MOV_K: large negative",
5001 .u.insns_int = {
5002 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5003 BPF_EXIT_INSN(),
5004 },
5005 INTERNAL,
5006 { },
5007 { { 0, -123456789 } }
5008 },
5009 {
5010 "ALU_MOV_K: large negative zero extension",
5011 .u.insns_int = {
5012 BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5013 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5014 BPF_EXIT_INSN(),
5015 },
5016 INTERNAL,
5017 { },
5018 { { 0, 0 } }
5019 },
5020 {
5021 "ALU64_MOV_K: dst = 2",
5022 .u.insns_int = {
5023 BPF_ALU64_IMM(BPF_MOV, R0, 2),
5024 BPF_EXIT_INSN(),
5025 },
5026 INTERNAL,
5027 { },
5028 { { 0, 2 } },
5029 },
5030 {
5031 "ALU64_MOV_K: dst = 2147483647",
5032 .u.insns_int = {
5033 BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
5034 BPF_EXIT_INSN(),
5035 },
5036 INTERNAL,
5037 { },
5038 { { 0, 2147483647 } },
5039 },
5040 {
5041 "ALU64_OR_K: dst = 0x0",
5042 .u.insns_int = {
5043 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5044 BPF_LD_IMM64(R3, 0x0),
5045 BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
5046 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5047 BPF_MOV32_IMM(R0, 2),
5048 BPF_EXIT_INSN(),
5049 BPF_MOV32_IMM(R0, 1),
5050 BPF_EXIT_INSN(),
5051 },
5052 INTERNAL,
5053 { },
5054 { { 0, 0x1 } },
5055 },
5056 {
5057 "ALU64_MOV_K: dst = -1",
5058 .u.insns_int = {
5059 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5060 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5061 BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
5062 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5063 BPF_MOV32_IMM(R0, 2),
5064 BPF_EXIT_INSN(),
5065 BPF_MOV32_IMM(R0, 1),
5066 BPF_EXIT_INSN(),
5067 },
5068 INTERNAL,
5069 { },
5070 { { 0, 0x1 } },
5071 },
5072 {
5073 "ALU64_MOV_K: small negative",
5074 .u.insns_int = {
5075 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5076 BPF_EXIT_INSN(),
5077 },
5078 INTERNAL,
5079 { },
5080 { { 0, -123 } }
5081 },
5082 {
5083 "ALU64_MOV_K: small negative sign extension",
5084 .u.insns_int = {
5085 BPF_ALU64_IMM(BPF_MOV, R0, -123),
5086 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5087 BPF_EXIT_INSN(),
5088 },
5089 INTERNAL,
5090 { },
5091 { { 0, 0xffffffff } }
5092 },
5093 {
5094 "ALU64_MOV_K: large negative",
5095 .u.insns_int = {
5096 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5097 BPF_EXIT_INSN(),
5098 },
5099 INTERNAL,
5100 { },
5101 { { 0, -123456789 } }
5102 },
5103 {
5104 "ALU64_MOV_K: large negative sign extension",
5105 .u.insns_int = {
5106 BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5107 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5108 BPF_EXIT_INSN(),
5109 },
5110 INTERNAL,
5111 { },
5112 { { 0, 0xffffffff } }
5113 },
5114 /* MOVSX32 */
5115 {
5116 "ALU_MOVSX | BPF_B",
5117 .u.insns_int = {
5118 BPF_LD_IMM64(R2, 0x00000000ffffffefLL),
5119 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5120 BPF_MOVSX32_REG(R1, R3, 8),
5121 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5122 BPF_MOV32_IMM(R0, 2),
5123 BPF_EXIT_INSN(),
5124 BPF_MOV32_IMM(R0, 1),
5125 BPF_EXIT_INSN(),
5126 },
5127 INTERNAL,
5128 { },
5129 { { 0, 0x1 } },
5130 },
5131 {
5132 "ALU_MOVSX | BPF_H",
5133 .u.insns_int = {
5134 BPF_LD_IMM64(R2, 0x00000000ffffbeefLL),
5135 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5136 BPF_MOVSX32_REG(R1, R3, 16),
5137 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5138 BPF_MOV32_IMM(R0, 2),
5139 BPF_EXIT_INSN(),
5140 BPF_MOV32_IMM(R0, 1),
5141 BPF_EXIT_INSN(),
5142 },
5143 INTERNAL,
5144 { },
5145 { { 0, 0x1 } },
5146 },
5147 {
5148 "ALU_MOVSX | BPF_W",
5149 .u.insns_int = {
5150 BPF_LD_IMM64(R2, 0x00000000deadbeefLL),
5151 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5152 BPF_MOVSX32_REG(R1, R3, 32),
5153 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5154 BPF_MOV32_IMM(R0, 2),
5155 BPF_EXIT_INSN(),
5156 BPF_MOV32_IMM(R0, 1),
5157 BPF_EXIT_INSN(),
5158 },
5159 INTERNAL,
5160 { },
5161 { { 0, 0x1 } },
5162 },
5163 /* MOVSX64 REG */
5164 {
5165 "ALU64_MOVSX | BPF_B",
5166 .u.insns_int = {
5167 BPF_LD_IMM64(R2, 0xffffffffffffffefLL),
5168 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5169 BPF_MOVSX64_REG(R1, R3, 8),
5170 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5171 BPF_MOV32_IMM(R0, 2),
5172 BPF_EXIT_INSN(),
5173 BPF_MOV32_IMM(R0, 1),
5174 BPF_EXIT_INSN(),
5175 },
5176 INTERNAL,
5177 { },
5178 { { 0, 0x1 } },
5179 },
5180 {
5181 "ALU64_MOVSX | BPF_H",
5182 .u.insns_int = {
5183 BPF_LD_IMM64(R2, 0xffffffffffffbeefLL),
5184 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5185 BPF_MOVSX64_REG(R1, R3, 16),
5186 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5187 BPF_MOV32_IMM(R0, 2),
5188 BPF_EXIT_INSN(),
5189 BPF_MOV32_IMM(R0, 1),
5190 BPF_EXIT_INSN(),
5191 },
5192 INTERNAL,
5193 { },
5194 { { 0, 0x1 } },
5195 },
5196 {
5197 "ALU64_MOVSX | BPF_W",
5198 .u.insns_int = {
5199 BPF_LD_IMM64(R2, 0xffffffffdeadbeefLL),
5200 BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5201 BPF_MOVSX64_REG(R1, R3, 32),
5202 BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5203 BPF_MOV32_IMM(R0, 2),
5204 BPF_EXIT_INSN(),
5205 BPF_MOV32_IMM(R0, 1),
5206 BPF_EXIT_INSN(),
5207 },
5208 INTERNAL,
5209 { },
5210 { { 0, 0x1 } },
5211 },
5212 /* BPF_ALU | BPF_ADD | BPF_X */
5213 {
5214 "ALU_ADD_X: 1 + 2 = 3",
5215 .u.insns_int = {
5216 BPF_LD_IMM64(R0, 1),
5217 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5218 BPF_ALU32_REG(BPF_ADD, R0, R1),
5219 BPF_EXIT_INSN(),
5220 },
5221 INTERNAL,
5222 { },
5223 { { 0, 3 } },
5224 },
5225 {
5226 "ALU_ADD_X: 1 + 4294967294 = 4294967295",
5227 .u.insns_int = {
5228 BPF_LD_IMM64(R0, 1),
5229 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5230 BPF_ALU32_REG(BPF_ADD, R0, R1),
5231 BPF_EXIT_INSN(),
5232 },
5233 INTERNAL,
5234 { },
5235 { { 0, 4294967295U } },
5236 },
5237 {
5238 "ALU_ADD_X: 2 + 4294967294 = 0",
5239 .u.insns_int = {
5240 BPF_LD_IMM64(R0, 2),
5241 BPF_LD_IMM64(R1, 4294967294U),
5242 BPF_ALU32_REG(BPF_ADD, R0, R1),
5243 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5244 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5245 BPF_EXIT_INSN(),
5246 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5247 BPF_EXIT_INSN(),
5248 },
5249 INTERNAL,
5250 { },
5251 { { 0, 1 } },
5252 },
5253 {
5254 "ALU64_ADD_X: 1 + 2 = 3",
5255 .u.insns_int = {
5256 BPF_LD_IMM64(R0, 1),
5257 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5258 BPF_ALU64_REG(BPF_ADD, R0, R1),
5259 BPF_EXIT_INSN(),
5260 },
5261 INTERNAL,
5262 { },
5263 { { 0, 3 } },
5264 },
5265 {
5266 "ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5267 .u.insns_int = {
5268 BPF_LD_IMM64(R0, 1),
5269 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5270 BPF_ALU64_REG(BPF_ADD, R0, R1),
5271 BPF_EXIT_INSN(),
5272 },
5273 INTERNAL,
5274 { },
5275 { { 0, 4294967295U } },
5276 },
5277 {
5278 "ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5279 .u.insns_int = {
5280 BPF_LD_IMM64(R0, 2),
5281 BPF_LD_IMM64(R1, 4294967294U),
5282 BPF_LD_IMM64(R2, 4294967296ULL),
5283 BPF_ALU64_REG(BPF_ADD, R0, R1),
5284 BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5285 BPF_MOV32_IMM(R0, 0),
5286 BPF_EXIT_INSN(),
5287 BPF_MOV32_IMM(R0, 1),
5288 BPF_EXIT_INSN(),
5289 },
5290 INTERNAL,
5291 { },
5292 { { 0, 1 } },
5293 },
5294 /* BPF_ALU | BPF_ADD | BPF_K */
5295 {
5296 "ALU_ADD_K: 1 + 2 = 3",
5297 .u.insns_int = {
5298 BPF_LD_IMM64(R0, 1),
5299 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5300 BPF_EXIT_INSN(),
5301 },
5302 INTERNAL,
5303 { },
5304 { { 0, 3 } },
5305 },
5306 {
5307 "ALU_ADD_K: 3 + 0 = 3",
5308 .u.insns_int = {
5309 BPF_LD_IMM64(R0, 3),
5310 BPF_ALU32_IMM(BPF_ADD, R0, 0),
5311 BPF_EXIT_INSN(),
5312 },
5313 INTERNAL,
5314 { },
5315 { { 0, 3 } },
5316 },
5317 {
5318 "ALU_ADD_K: 1 + 4294967294 = 4294967295",
5319 .u.insns_int = {
5320 BPF_LD_IMM64(R0, 1),
5321 BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5322 BPF_EXIT_INSN(),
5323 },
5324 INTERNAL,
5325 { },
5326 { { 0, 4294967295U } },
5327 },
5328 {
5329 "ALU_ADD_K: 4294967294 + 2 = 0",
5330 .u.insns_int = {
5331 BPF_LD_IMM64(R0, 4294967294U),
5332 BPF_ALU32_IMM(BPF_ADD, R0, 2),
5333 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5334 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5335 BPF_EXIT_INSN(),
5336 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5337 BPF_EXIT_INSN(),
5338 },
5339 INTERNAL,
5340 { },
5341 { { 0, 1 } },
5342 },
5343 {
5344 "ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5345 .u.insns_int = {
5346 BPF_LD_IMM64(R2, 0x0),
5347 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5348 BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5349 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5350 BPF_MOV32_IMM(R0, 2),
5351 BPF_EXIT_INSN(),
5352 BPF_MOV32_IMM(R0, 1),
5353 BPF_EXIT_INSN(),
5354 },
5355 INTERNAL,
5356 { },
5357 { { 0, 0x1 } },
5358 },
5359 {
5360 "ALU_ADD_K: 0 + 0xffff = 0xffff",
5361 .u.insns_int = {
5362 BPF_LD_IMM64(R2, 0x0),
5363 BPF_LD_IMM64(R3, 0xffff),
5364 BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5365 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5366 BPF_MOV32_IMM(R0, 2),
5367 BPF_EXIT_INSN(),
5368 BPF_MOV32_IMM(R0, 1),
5369 BPF_EXIT_INSN(),
5370 },
5371 INTERNAL,
5372 { },
5373 { { 0, 0x1 } },
5374 },
5375 {
5376 "ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5377 .u.insns_int = {
5378 BPF_LD_IMM64(R2, 0x0),
5379 BPF_LD_IMM64(R3, 0x7fffffff),
5380 BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5381 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5382 BPF_MOV32_IMM(R0, 2),
5383 BPF_EXIT_INSN(),
5384 BPF_MOV32_IMM(R0, 1),
5385 BPF_EXIT_INSN(),
5386 },
5387 INTERNAL,
5388 { },
5389 { { 0, 0x1 } },
5390 },
5391 {
5392 "ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5393 .u.insns_int = {
5394 BPF_LD_IMM64(R2, 0x0),
5395 BPF_LD_IMM64(R3, 0x80000000),
5396 BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5397 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5398 BPF_MOV32_IMM(R0, 2),
5399 BPF_EXIT_INSN(),
5400 BPF_MOV32_IMM(R0, 1),
5401 BPF_EXIT_INSN(),
5402 },
5403 INTERNAL,
5404 { },
5405 { { 0, 0x1 } },
5406 },
5407 {
5408 "ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5409 .u.insns_int = {
5410 BPF_LD_IMM64(R2, 0x0),
5411 BPF_LD_IMM64(R3, 0x80008000),
5412 BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5413 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5414 BPF_MOV32_IMM(R0, 2),
5415 BPF_EXIT_INSN(),
5416 BPF_MOV32_IMM(R0, 1),
5417 BPF_EXIT_INSN(),
5418 },
5419 INTERNAL,
5420 { },
5421 { { 0, 0x1 } },
5422 },
5423 {
5424 "ALU64_ADD_K: 1 + 2 = 3",
5425 .u.insns_int = {
5426 BPF_LD_IMM64(R0, 1),
5427 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5428 BPF_EXIT_INSN(),
5429 },
5430 INTERNAL,
5431 { },
5432 { { 0, 3 } },
5433 },
5434 {
5435 "ALU64_ADD_K: 3 + 0 = 3",
5436 .u.insns_int = {
5437 BPF_LD_IMM64(R0, 3),
5438 BPF_ALU64_IMM(BPF_ADD, R0, 0),
5439 BPF_EXIT_INSN(),
5440 },
5441 INTERNAL,
5442 { },
5443 { { 0, 3 } },
5444 },
5445 {
5446 "ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5447 .u.insns_int = {
5448 BPF_LD_IMM64(R0, 1),
5449 BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5450 BPF_EXIT_INSN(),
5451 },
5452 INTERNAL,
5453 { },
5454 { { 0, 2147483647 } },
5455 },
5456 {
5457 "ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5458 .u.insns_int = {
5459 BPF_LD_IMM64(R0, 4294967294U),
5460 BPF_LD_IMM64(R1, 4294967296ULL),
5461 BPF_ALU64_IMM(BPF_ADD, R0, 2),
5462 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5463 BPF_ALU32_IMM(BPF_MOV, R0, 0),
5464 BPF_EXIT_INSN(),
5465 BPF_ALU32_IMM(BPF_MOV, R0, 1),
5466 BPF_EXIT_INSN(),
5467 },
5468 INTERNAL,
5469 { },
5470 { { 0, 1 } },
5471 },
5472 {
5473 "ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5474 .u.insns_int = {
5475 BPF_LD_IMM64(R0, 2147483646),
5476 BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5477 BPF_EXIT_INSN(),
5478 },
5479 INTERNAL,
5480 { },
5481 { { 0, -1 } },
5482 },
5483 {
5484 "ALU64_ADD_K: 1 + 0 = 1",
5485 .u.insns_int = {
5486 BPF_LD_IMM64(R2, 0x1),
5487 BPF_LD_IMM64(R3, 0x1),
5488 BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5489 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5490 BPF_MOV32_IMM(R0, 2),
5491 BPF_EXIT_INSN(),
5492 BPF_MOV32_IMM(R0, 1),
5493 BPF_EXIT_INSN(),
5494 },
5495 INTERNAL,
5496 { },
5497 { { 0, 0x1 } },
5498 },
5499 {
5500 "ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5501 .u.insns_int = {
5502 BPF_LD_IMM64(R2, 0x0),
5503 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5504 BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5505 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5506 BPF_MOV32_IMM(R0, 2),
5507 BPF_EXIT_INSN(),
5508 BPF_MOV32_IMM(R0, 1),
5509 BPF_EXIT_INSN(),
5510 },
5511 INTERNAL,
5512 { },
5513 { { 0, 0x1 } },
5514 },
5515 {
5516 "ALU64_ADD_K: 0 + 0xffff = 0xffff",
5517 .u.insns_int = {
5518 BPF_LD_IMM64(R2, 0x0),
5519 BPF_LD_IMM64(R3, 0xffff),
5520 BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5521 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5522 BPF_MOV32_IMM(R0, 2),
5523 BPF_EXIT_INSN(),
5524 BPF_MOV32_IMM(R0, 1),
5525 BPF_EXIT_INSN(),
5526 },
5527 INTERNAL,
5528 { },
5529 { { 0, 0x1 } },
5530 },
5531 {
5532 "ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5533 .u.insns_int = {
5534 BPF_LD_IMM64(R2, 0x0),
5535 BPF_LD_IMM64(R3, 0x7fffffff),
5536 BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5537 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5538 BPF_MOV32_IMM(R0, 2),
5539 BPF_EXIT_INSN(),
5540 BPF_MOV32_IMM(R0, 1),
5541 BPF_EXIT_INSN(),
5542 },
5543 INTERNAL,
5544 { },
5545 { { 0, 0x1 } },
5546 },
5547 {
5548 "ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5549 .u.insns_int = {
5550 BPF_LD_IMM64(R2, 0x0),
5551 BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5552 BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5553 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5554 BPF_MOV32_IMM(R0, 2),
5555 BPF_EXIT_INSN(),
5556 BPF_MOV32_IMM(R0, 1),
5557 BPF_EXIT_INSN(),
5558 },
5559 INTERNAL,
5560 { },
5561 { { 0, 0x1 } },
5562 },
5563 {
5564 "ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5565 .u.insns_int = {
5566 BPF_LD_IMM64(R2, 0x0),
5567 BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5568 BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5569 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5570 BPF_MOV32_IMM(R0, 2),
5571 BPF_EXIT_INSN(),
5572 BPF_MOV32_IMM(R0, 1),
5573 BPF_EXIT_INSN(),
5574 },
5575 INTERNAL,
5576 { },
5577 { { 0, 0x1 } },
5578 },
5579 /* BPF_ALU | BPF_SUB | BPF_X */
5580 {
5581 "ALU_SUB_X: 3 - 1 = 2",
5582 .u.insns_int = {
5583 BPF_LD_IMM64(R0, 3),
5584 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5585 BPF_ALU32_REG(BPF_SUB, R0, R1),
5586 BPF_EXIT_INSN(),
5587 },
5588 INTERNAL,
5589 { },
5590 { { 0, 2 } },
5591 },
5592 {
5593 "ALU_SUB_X: 4294967295 - 4294967294 = 1",
5594 .u.insns_int = {
5595 BPF_LD_IMM64(R0, 4294967295U),
5596 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5597 BPF_ALU32_REG(BPF_SUB, R0, R1),
5598 BPF_EXIT_INSN(),
5599 },
5600 INTERNAL,
5601 { },
5602 { { 0, 1 } },
5603 },
5604 {
5605 "ALU64_SUB_X: 3 - 1 = 2",
5606 .u.insns_int = {
5607 BPF_LD_IMM64(R0, 3),
5608 BPF_ALU32_IMM(BPF_MOV, R1, 1),
5609 BPF_ALU64_REG(BPF_SUB, R0, R1),
5610 BPF_EXIT_INSN(),
5611 },
5612 INTERNAL,
5613 { },
5614 { { 0, 2 } },
5615 },
5616 {
5617 "ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5618 .u.insns_int = {
5619 BPF_LD_IMM64(R0, 4294967295U),
5620 BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5621 BPF_ALU64_REG(BPF_SUB, R0, R1),
5622 BPF_EXIT_INSN(),
5623 },
5624 INTERNAL,
5625 { },
5626 { { 0, 1 } },
5627 },
5628 /* BPF_ALU | BPF_SUB | BPF_K */
5629 {
5630 "ALU_SUB_K: 3 - 1 = 2",
5631 .u.insns_int = {
5632 BPF_LD_IMM64(R0, 3),
5633 BPF_ALU32_IMM(BPF_SUB, R0, 1),
5634 BPF_EXIT_INSN(),
5635 },
5636 INTERNAL,
5637 { },
5638 { { 0, 2 } },
5639 },
5640 {
5641 "ALU_SUB_K: 3 - 0 = 3",
5642 .u.insns_int = {
5643 BPF_LD_IMM64(R0, 3),
5644 BPF_ALU32_IMM(BPF_SUB, R0, 0),
5645 BPF_EXIT_INSN(),
5646 },
5647 INTERNAL,
5648 { },
5649 { { 0, 3 } },
5650 },
5651 {
5652 "ALU_SUB_K: 4294967295 - 4294967294 = 1",
5653 .u.insns_int = {
5654 BPF_LD_IMM64(R0, 4294967295U),
5655 BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5656 BPF_EXIT_INSN(),
5657 },
5658 INTERNAL,
5659 { },
5660 { { 0, 1 } },
5661 },
5662 {
5663 "ALU64_SUB_K: 3 - 1 = 2",
5664 .u.insns_int = {
5665 BPF_LD_IMM64(R0, 3),
5666 BPF_ALU64_IMM(BPF_SUB, R0, 1),
5667 BPF_EXIT_INSN(),
5668 },
5669 INTERNAL,
5670 { },
5671 { { 0, 2 } },
5672 },
5673 {
5674 "ALU64_SUB_K: 3 - 0 = 3",
5675 .u.insns_int = {
5676 BPF_LD_IMM64(R0, 3),
5677 BPF_ALU64_IMM(BPF_SUB, R0, 0),
5678 BPF_EXIT_INSN(),
5679 },
5680 INTERNAL,
5681 { },
5682 { { 0, 3 } },
5683 },
5684 {
5685 "ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5686 .u.insns_int = {
5687 BPF_LD_IMM64(R0, 4294967294U),
5688 BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5689 BPF_EXIT_INSN(),
5690 },
5691 INTERNAL,
5692 { },
5693 { { 0, -1 } },
5694 },
5695 {
5696 "ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5697 .u.insns_int = {
5698 BPF_LD_IMM64(R0, 2147483646),
5699 BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5700 BPF_EXIT_INSN(),
5701 },
5702 INTERNAL,
5703 { },
5704 { { 0, -1 } },
5705 },
5706 /* BPF_ALU | BPF_MUL | BPF_X */
5707 {
5708 "ALU_MUL_X: 2 * 3 = 6",
5709 .u.insns_int = {
5710 BPF_LD_IMM64(R0, 2),
5711 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5712 BPF_ALU32_REG(BPF_MUL, R0, R1),
5713 BPF_EXIT_INSN(),
5714 },
5715 INTERNAL,
5716 { },
5717 { { 0, 6 } },
5718 },
5719 {
5720 "ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5721 .u.insns_int = {
5722 BPF_LD_IMM64(R0, 2),
5723 BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5724 BPF_ALU32_REG(BPF_MUL, R0, R1),
5725 BPF_EXIT_INSN(),
5726 },
5727 INTERNAL,
5728 { },
5729 { { 0, 0xFFFFFFF0 } },
5730 },
5731 {
5732 "ALU_MUL_X: -1 * -1 = 1",
5733 .u.insns_int = {
5734 BPF_LD_IMM64(R0, -1),
5735 BPF_ALU32_IMM(BPF_MOV, R1, -1),
5736 BPF_ALU32_REG(BPF_MUL, R0, R1),
5737 BPF_EXIT_INSN(),
5738 },
5739 INTERNAL,
5740 { },
5741 { { 0, 1 } },
5742 },
5743 {
5744 "ALU64_MUL_X: 2 * 3 = 6",
5745 .u.insns_int = {
5746 BPF_LD_IMM64(R0, 2),
5747 BPF_ALU32_IMM(BPF_MOV, R1, 3),
5748 BPF_ALU64_REG(BPF_MUL, R0, R1),
5749 BPF_EXIT_INSN(),
5750 },
5751 INTERNAL,
5752 { },
5753 { { 0, 6 } },
5754 },
5755 {
5756 "ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5757 .u.insns_int = {
5758 BPF_LD_IMM64(R0, 1),
5759 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5760 BPF_ALU64_REG(BPF_MUL, R0, R1),
5761 BPF_EXIT_INSN(),
5762 },
5763 INTERNAL,
5764 { },
5765 { { 0, 2147483647 } },
5766 },
5767 {
5768 "ALU64_MUL_X: 64x64 multiply, low word",
5769 .u.insns_int = {
5770 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5771 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5772 BPF_ALU64_REG(BPF_MUL, R0, R1),
5773 BPF_EXIT_INSN(),
5774 },
5775 INTERNAL,
5776 { },
5777 { { 0, 0xe5618cf0 } }
5778 },
5779 {
5780 "ALU64_MUL_X: 64x64 multiply, high word",
5781 .u.insns_int = {
5782 BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5783 BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5784 BPF_ALU64_REG(BPF_MUL, R0, R1),
5785 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5786 BPF_EXIT_INSN(),
5787 },
5788 INTERNAL,
5789 { },
5790 { { 0, 0x2236d88f } }
5791 },
5792 /* BPF_ALU | BPF_MUL | BPF_K */
5793 {
5794 "ALU_MUL_K: 2 * 3 = 6",
5795 .u.insns_int = {
5796 BPF_LD_IMM64(R0, 2),
5797 BPF_ALU32_IMM(BPF_MUL, R0, 3),
5798 BPF_EXIT_INSN(),
5799 },
5800 INTERNAL,
5801 { },
5802 { { 0, 6 } },
5803 },
5804 {
5805 "ALU_MUL_K: 3 * 1 = 3",
5806 .u.insns_int = {
5807 BPF_LD_IMM64(R0, 3),
5808 BPF_ALU32_IMM(BPF_MUL, R0, 1),
5809 BPF_EXIT_INSN(),
5810 },
5811 INTERNAL,
5812 { },
5813 { { 0, 3 } },
5814 },
5815 {
5816 "ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5817 .u.insns_int = {
5818 BPF_LD_IMM64(R0, 2),
5819 BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5820 BPF_EXIT_INSN(),
5821 },
5822 INTERNAL,
5823 { },
5824 { { 0, 0xFFFFFFF0 } },
5825 },
5826 {
5827 "ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5828 .u.insns_int = {
5829 BPF_LD_IMM64(R2, 0x1),
5830 BPF_LD_IMM64(R3, 0x00000000ffffffff),
5831 BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5832 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5833 BPF_MOV32_IMM(R0, 2),
5834 BPF_EXIT_INSN(),
5835 BPF_MOV32_IMM(R0, 1),
5836 BPF_EXIT_INSN(),
5837 },
5838 INTERNAL,
5839 { },
5840 { { 0, 0x1 } },
5841 },
5842 {
5843 "ALU64_MUL_K: 2 * 3 = 6",
5844 .u.insns_int = {
5845 BPF_LD_IMM64(R0, 2),
5846 BPF_ALU64_IMM(BPF_MUL, R0, 3),
5847 BPF_EXIT_INSN(),
5848 },
5849 INTERNAL,
5850 { },
5851 { { 0, 6 } },
5852 },
5853 {
5854 "ALU64_MUL_K: 3 * 1 = 3",
5855 .u.insns_int = {
5856 BPF_LD_IMM64(R0, 3),
5857 BPF_ALU64_IMM(BPF_MUL, R0, 1),
5858 BPF_EXIT_INSN(),
5859 },
5860 INTERNAL,
5861 { },
5862 { { 0, 3 } },
5863 },
5864 {
5865 "ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5866 .u.insns_int = {
5867 BPF_LD_IMM64(R0, 1),
5868 BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5869 BPF_EXIT_INSN(),
5870 },
5871 INTERNAL,
5872 { },
5873 { { 0, 2147483647 } },
5874 },
5875 {
5876 "ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5877 .u.insns_int = {
5878 BPF_LD_IMM64(R0, 1),
5879 BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5880 BPF_EXIT_INSN(),
5881 },
5882 INTERNAL,
5883 { },
5884 { { 0, -2147483647 } },
5885 },
5886 {
5887 "ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5888 .u.insns_int = {
5889 BPF_LD_IMM64(R2, 0x1),
5890 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5891 BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5892 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5893 BPF_MOV32_IMM(R0, 2),
5894 BPF_EXIT_INSN(),
5895 BPF_MOV32_IMM(R0, 1),
5896 BPF_EXIT_INSN(),
5897 },
5898 INTERNAL,
5899 { },
5900 { { 0, 0x1 } },
5901 },
5902 {
5903 "ALU64_MUL_K: 64x32 multiply, low word",
5904 .u.insns_int = {
5905 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5906 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5907 BPF_EXIT_INSN(),
5908 },
5909 INTERNAL,
5910 { },
5911 { { 0, 0xe242d208 } }
5912 },
5913 {
5914 "ALU64_MUL_K: 64x32 multiply, high word",
5915 .u.insns_int = {
5916 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5917 BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5918 BPF_ALU64_IMM(BPF_RSH, R0, 32),
5919 BPF_EXIT_INSN(),
5920 },
5921 INTERNAL,
5922 { },
5923 { { 0, 0xc28f5c28 } }
5924 },
5925 /* BPF_ALU | BPF_DIV | BPF_X */
5926 {
5927 "ALU_DIV_X: 6 / 2 = 3",
5928 .u.insns_int = {
5929 BPF_LD_IMM64(R0, 6),
5930 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5931 BPF_ALU32_REG(BPF_DIV, R0, R1),
5932 BPF_EXIT_INSN(),
5933 },
5934 INTERNAL,
5935 { },
5936 { { 0, 3 } },
5937 },
5938 {
5939 "ALU_DIV_X: 4294967295 / 4294967295 = 1",
5940 .u.insns_int = {
5941 BPF_LD_IMM64(R0, 4294967295U),
5942 BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5943 BPF_ALU32_REG(BPF_DIV, R0, R1),
5944 BPF_EXIT_INSN(),
5945 },
5946 INTERNAL,
5947 { },
5948 { { 0, 1 } },
5949 },
5950 {
5951 "ALU64_DIV_X: 6 / 2 = 3",
5952 .u.insns_int = {
5953 BPF_LD_IMM64(R0, 6),
5954 BPF_ALU32_IMM(BPF_MOV, R1, 2),
5955 BPF_ALU64_REG(BPF_DIV, R0, R1),
5956 BPF_EXIT_INSN(),
5957 },
5958 INTERNAL,
5959 { },
5960 { { 0, 3 } },
5961 },
5962 {
5963 "ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5964 .u.insns_int = {
5965 BPF_LD_IMM64(R0, 2147483647),
5966 BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5967 BPF_ALU64_REG(BPF_DIV, R0, R1),
5968 BPF_EXIT_INSN(),
5969 },
5970 INTERNAL,
5971 { },
5972 { { 0, 1 } },
5973 },
5974 {
5975 "ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5976 .u.insns_int = {
5977 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5978 BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5979 BPF_LD_IMM64(R3, 0x0000000000000001LL),
5980 BPF_ALU64_REG(BPF_DIV, R2, R4),
5981 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5982 BPF_MOV32_IMM(R0, 2),
5983 BPF_EXIT_INSN(),
5984 BPF_MOV32_IMM(R0, 1),
5985 BPF_EXIT_INSN(),
5986 },
5987 INTERNAL,
5988 { },
5989 { { 0, 0x1 } },
5990 },
5991 /* BPF_ALU | BPF_DIV | BPF_K */
5992 {
5993 "ALU_DIV_K: 6 / 2 = 3",
5994 .u.insns_int = {
5995 BPF_LD_IMM64(R0, 6),
5996 BPF_ALU32_IMM(BPF_DIV, R0, 2),
5997 BPF_EXIT_INSN(),
5998 },
5999 INTERNAL,
6000 { },
6001 { { 0, 3 } },
6002 },
6003 {
6004 "ALU_DIV_K: 3 / 1 = 3",
6005 .u.insns_int = {
6006 BPF_LD_IMM64(R0, 3),
6007 BPF_ALU32_IMM(BPF_DIV, R0, 1),
6008 BPF_EXIT_INSN(),
6009 },
6010 INTERNAL,
6011 { },
6012 { { 0, 3 } },
6013 },
6014 {
6015 "ALU_DIV_K: 4294967295 / 4294967295 = 1",
6016 .u.insns_int = {
6017 BPF_LD_IMM64(R0, 4294967295U),
6018 BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
6019 BPF_EXIT_INSN(),
6020 },
6021 INTERNAL,
6022 { },
6023 { { 0, 1 } },
6024 },
6025 {
6026 "ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
6027 .u.insns_int = {
6028 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6029 BPF_LD_IMM64(R3, 0x1UL),
6030 BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
6031 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6032 BPF_MOV32_IMM(R0, 2),
6033 BPF_EXIT_INSN(),
6034 BPF_MOV32_IMM(R0, 1),
6035 BPF_EXIT_INSN(),
6036 },
6037 INTERNAL,
6038 { },
6039 { { 0, 0x1 } },
6040 },
6041 {
6042 "ALU64_DIV_K: 6 / 2 = 3",
6043 .u.insns_int = {
6044 BPF_LD_IMM64(R0, 6),
6045 BPF_ALU64_IMM(BPF_DIV, R0, 2),
6046 BPF_EXIT_INSN(),
6047 },
6048 INTERNAL,
6049 { },
6050 { { 0, 3 } },
6051 },
6052 {
6053 "ALU64_DIV_K: 3 / 1 = 3",
6054 .u.insns_int = {
6055 BPF_LD_IMM64(R0, 3),
6056 BPF_ALU64_IMM(BPF_DIV, R0, 1),
6057 BPF_EXIT_INSN(),
6058 },
6059 INTERNAL,
6060 { },
6061 { { 0, 3 } },
6062 },
6063 {
6064 "ALU64_DIV_K: 2147483647 / 2147483647 = 1",
6065 .u.insns_int = {
6066 BPF_LD_IMM64(R0, 2147483647),
6067 BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
6068 BPF_EXIT_INSN(),
6069 },
6070 INTERNAL,
6071 { },
6072 { { 0, 1 } },
6073 },
6074 {
6075 "ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
6076 .u.insns_int = {
6077 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6078 BPF_LD_IMM64(R3, 0x0000000000000001LL),
6079 BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
6080 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6081 BPF_MOV32_IMM(R0, 2),
6082 BPF_EXIT_INSN(),
6083 BPF_MOV32_IMM(R0, 1),
6084 BPF_EXIT_INSN(),
6085 },
6086 INTERNAL,
6087 { },
6088 { { 0, 0x1 } },
6089 },
6090 /* BPF_ALU | BPF_MOD | BPF_X */
6091 {
6092 "ALU_MOD_X: 3 % 2 = 1",
6093 .u.insns_int = {
6094 BPF_LD_IMM64(R0, 3),
6095 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6096 BPF_ALU32_REG(BPF_MOD, R0, R1),
6097 BPF_EXIT_INSN(),
6098 },
6099 INTERNAL,
6100 { },
6101 { { 0, 1 } },
6102 },
6103 {
6104 "ALU_MOD_X: 4294967295 % 4294967293 = 2",
6105 .u.insns_int = {
6106 BPF_LD_IMM64(R0, 4294967295U),
6107 BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
6108 BPF_ALU32_REG(BPF_MOD, R0, R1),
6109 BPF_EXIT_INSN(),
6110 },
6111 INTERNAL,
6112 { },
6113 { { 0, 2 } },
6114 },
6115 {
6116 "ALU64_MOD_X: 3 % 2 = 1",
6117 .u.insns_int = {
6118 BPF_LD_IMM64(R0, 3),
6119 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6120 BPF_ALU64_REG(BPF_MOD, R0, R1),
6121 BPF_EXIT_INSN(),
6122 },
6123 INTERNAL,
6124 { },
6125 { { 0, 1 } },
6126 },
6127 {
6128 "ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6129 .u.insns_int = {
6130 BPF_LD_IMM64(R0, 2147483647),
6131 BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
6132 BPF_ALU64_REG(BPF_MOD, R0, R1),
6133 BPF_EXIT_INSN(),
6134 },
6135 INTERNAL,
6136 { },
6137 { { 0, 2 } },
6138 },
6139 /* BPF_ALU | BPF_MOD | BPF_K */
6140 {
6141 "ALU_MOD_K: 3 % 2 = 1",
6142 .u.insns_int = {
6143 BPF_LD_IMM64(R0, 3),
6144 BPF_ALU32_IMM(BPF_MOD, R0, 2),
6145 BPF_EXIT_INSN(),
6146 },
6147 INTERNAL,
6148 { },
6149 { { 0, 1 } },
6150 },
6151 {
6152 "ALU_MOD_K: 3 % 1 = 0",
6153 .u.insns_int = {
6154 BPF_LD_IMM64(R0, 3),
6155 BPF_ALU32_IMM(BPF_MOD, R0, 1),
6156 BPF_EXIT_INSN(),
6157 },
6158 INTERNAL,
6159 { },
6160 { { 0, 0 } },
6161 },
6162 {
6163 "ALU_MOD_K: 4294967295 % 4294967293 = 2",
6164 .u.insns_int = {
6165 BPF_LD_IMM64(R0, 4294967295U),
6166 BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
6167 BPF_EXIT_INSN(),
6168 },
6169 INTERNAL,
6170 { },
6171 { { 0, 2 } },
6172 },
6173 {
6174 "ALU64_MOD_K: 3 % 2 = 1",
6175 .u.insns_int = {
6176 BPF_LD_IMM64(R0, 3),
6177 BPF_ALU64_IMM(BPF_MOD, R0, 2),
6178 BPF_EXIT_INSN(),
6179 },
6180 INTERNAL,
6181 { },
6182 { { 0, 1 } },
6183 },
6184 {
6185 "ALU64_MOD_K: 3 % 1 = 0",
6186 .u.insns_int = {
6187 BPF_LD_IMM64(R0, 3),
6188 BPF_ALU64_IMM(BPF_MOD, R0, 1),
6189 BPF_EXIT_INSN(),
6190 },
6191 INTERNAL,
6192 { },
6193 { { 0, 0 } },
6194 },
6195 {
6196 "ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6197 .u.insns_int = {
6198 BPF_LD_IMM64(R0, 2147483647),
6199 BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6200 BPF_EXIT_INSN(),
6201 },
6202 INTERNAL,
6203 { },
6204 { { 0, 2 } },
6205 },
6206 /* BPF_ALU | BPF_DIV | BPF_X off=1 (SDIV) */
6207 {
6208 "ALU_SDIV_X: -6 / 2 = -3",
6209 .u.insns_int = {
6210 BPF_LD_IMM64(R0, -6),
6211 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6212 BPF_ALU32_REG_OFF(BPF_DIV, R0, R1, 1),
6213 BPF_EXIT_INSN(),
6214 },
6215 INTERNAL,
6216 { },
6217 { { 0, -3 } },
6218 },
6219 /* BPF_ALU | BPF_DIV | BPF_K off=1 (SDIV) */
6220 {
6221 "ALU_SDIV_K: -6 / 2 = -3",
6222 .u.insns_int = {
6223 BPF_LD_IMM64(R0, -6),
6224 BPF_ALU32_IMM_OFF(BPF_DIV, R0, 2, 1),
6225 BPF_EXIT_INSN(),
6226 },
6227 INTERNAL,
6228 { },
6229 { { 0, -3 } },
6230 },
6231 /* BPF_ALU64 | BPF_DIV | BPF_X off=1 (SDIV64) */
6232 {
6233 "ALU64_SDIV_X: -6 / 2 = -3",
6234 .u.insns_int = {
6235 BPF_LD_IMM64(R0, -6),
6236 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6237 BPF_ALU64_REG_OFF(BPF_DIV, R0, R1, 1),
6238 BPF_EXIT_INSN(),
6239 },
6240 INTERNAL,
6241 { },
6242 { { 0, -3 } },
6243 },
6244 /* BPF_ALU64 | BPF_DIV | BPF_K off=1 (SDIV64) */
6245 {
6246 "ALU64_SDIV_K: -6 / 2 = -3",
6247 .u.insns_int = {
6248 BPF_LD_IMM64(R0, -6),
6249 BPF_ALU64_IMM_OFF(BPF_DIV, R0, 2, 1),
6250 BPF_EXIT_INSN(),
6251 },
6252 INTERNAL,
6253 { },
6254 { { 0, -3 } },
6255 },
6256 /* BPF_ALU | BPF_MOD | BPF_X off=1 (SMOD) */
6257 {
6258 "ALU_SMOD_X: -7 % 2 = -1",
6259 .u.insns_int = {
6260 BPF_LD_IMM64(R0, -7),
6261 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6262 BPF_ALU32_REG_OFF(BPF_MOD, R0, R1, 1),
6263 BPF_EXIT_INSN(),
6264 },
6265 INTERNAL,
6266 { },
6267 { { 0, -1 } },
6268 },
6269 /* BPF_ALU | BPF_MOD | BPF_K off=1 (SMOD) */
6270 {
6271 "ALU_SMOD_K: -7 % 2 = -1",
6272 .u.insns_int = {
6273 BPF_LD_IMM64(R0, -7),
6274 BPF_ALU32_IMM_OFF(BPF_MOD, R0, 2, 1),
6275 BPF_EXIT_INSN(),
6276 },
6277 INTERNAL,
6278 { },
6279 { { 0, -1 } },
6280 },
6281 /* BPF_ALU64 | BPF_MOD | BPF_X off=1 (SMOD64) */
6282 {
6283 "ALU64_SMOD_X: -7 % 2 = -1",
6284 .u.insns_int = {
6285 BPF_LD_IMM64(R0, -7),
6286 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6287 BPF_ALU64_REG_OFF(BPF_MOD, R0, R1, 1),
6288 BPF_EXIT_INSN(),
6289 },
6290 INTERNAL,
6291 { },
6292 { { 0, -1 } },
6293 },
6294 /* BPF_ALU64 | BPF_MOD | BPF_K off=1 (SMOD64) */
6295 {
6296 "ALU64_SMOD_X: -7 % 2 = -1",
6297 .u.insns_int = {
6298 BPF_LD_IMM64(R0, -7),
6299 BPF_ALU64_IMM_OFF(BPF_MOD, R0, 2, 1),
6300 BPF_EXIT_INSN(),
6301 },
6302 INTERNAL,
6303 { },
6304 { { 0, -1 } },
6305 },
6306 /* BPF_ALU | BPF_AND | BPF_X */
6307 {
6308 "ALU_AND_X: 3 & 2 = 2",
6309 .u.insns_int = {
6310 BPF_LD_IMM64(R0, 3),
6311 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6312 BPF_ALU32_REG(BPF_AND, R0, R1),
6313 BPF_EXIT_INSN(),
6314 },
6315 INTERNAL,
6316 { },
6317 { { 0, 2 } },
6318 },
6319 {
6320 "ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6321 .u.insns_int = {
6322 BPF_LD_IMM64(R0, 0xffffffff),
6323 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6324 BPF_ALU32_REG(BPF_AND, R0, R1),
6325 BPF_EXIT_INSN(),
6326 },
6327 INTERNAL,
6328 { },
6329 { { 0, 0xffffffff } },
6330 },
6331 {
6332 "ALU64_AND_X: 3 & 2 = 2",
6333 .u.insns_int = {
6334 BPF_LD_IMM64(R0, 3),
6335 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6336 BPF_ALU64_REG(BPF_AND, R0, R1),
6337 BPF_EXIT_INSN(),
6338 },
6339 INTERNAL,
6340 { },
6341 { { 0, 2 } },
6342 },
6343 {
6344 "ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6345 .u.insns_int = {
6346 BPF_LD_IMM64(R0, 0xffffffff),
6347 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6348 BPF_ALU64_REG(BPF_AND, R0, R1),
6349 BPF_EXIT_INSN(),
6350 },
6351 INTERNAL,
6352 { },
6353 { { 0, 0xffffffff } },
6354 },
6355 /* BPF_ALU | BPF_AND | BPF_K */
6356 {
6357 "ALU_AND_K: 3 & 2 = 2",
6358 .u.insns_int = {
6359 BPF_LD_IMM64(R0, 3),
6360 BPF_ALU32_IMM(BPF_AND, R0, 2),
6361 BPF_EXIT_INSN(),
6362 },
6363 INTERNAL,
6364 { },
6365 { { 0, 2 } },
6366 },
6367 {
6368 "ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6369 .u.insns_int = {
6370 BPF_LD_IMM64(R0, 0xffffffff),
6371 BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6372 BPF_EXIT_INSN(),
6373 },
6374 INTERNAL,
6375 { },
6376 { { 0, 0xffffffff } },
6377 },
6378 {
6379 "ALU_AND_K: Small immediate",
6380 .u.insns_int = {
6381 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6382 BPF_ALU32_IMM(BPF_AND, R0, 15),
6383 BPF_EXIT_INSN(),
6384 },
6385 INTERNAL,
6386 { },
6387 { { 0, 4 } }
6388 },
6389 {
6390 "ALU_AND_K: Large immediate",
6391 .u.insns_int = {
6392 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6393 BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6394 BPF_EXIT_INSN(),
6395 },
6396 INTERNAL,
6397 { },
6398 { { 0, 0xa1b2c3d4 } }
6399 },
6400 {
6401 "ALU_AND_K: Zero extension",
6402 .u.insns_int = {
6403 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6404 BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6405 BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6406 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6407 BPF_MOV32_IMM(R0, 2),
6408 BPF_EXIT_INSN(),
6409 BPF_MOV32_IMM(R0, 1),
6410 BPF_EXIT_INSN(),
6411 },
6412 INTERNAL,
6413 { },
6414 { { 0, 1 } }
6415 },
6416 {
6417 "ALU64_AND_K: 3 & 2 = 2",
6418 .u.insns_int = {
6419 BPF_LD_IMM64(R0, 3),
6420 BPF_ALU64_IMM(BPF_AND, R0, 2),
6421 BPF_EXIT_INSN(),
6422 },
6423 INTERNAL,
6424 { },
6425 { { 0, 2 } },
6426 },
6427 {
6428 "ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6429 .u.insns_int = {
6430 BPF_LD_IMM64(R0, 0xffffffff),
6431 BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6432 BPF_EXIT_INSN(),
6433 },
6434 INTERNAL,
6435 { },
6436 { { 0, 0xffffffff } },
6437 },
6438 {
6439 "ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6440 .u.insns_int = {
6441 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6442 BPF_LD_IMM64(R3, 0x0000000000000000LL),
6443 BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6444 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6445 BPF_MOV32_IMM(R0, 2),
6446 BPF_EXIT_INSN(),
6447 BPF_MOV32_IMM(R0, 1),
6448 BPF_EXIT_INSN(),
6449 },
6450 INTERNAL,
6451 { },
6452 { { 0, 0x1 } },
6453 },
6454 {
6455 "ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6456 .u.insns_int = {
6457 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6458 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6459 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6460 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6461 BPF_MOV32_IMM(R0, 2),
6462 BPF_EXIT_INSN(),
6463 BPF_MOV32_IMM(R0, 1),
6464 BPF_EXIT_INSN(),
6465 },
6466 INTERNAL,
6467 { },
6468 { { 0, 0x1 } },
6469 },
6470 {
6471 "ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6472 .u.insns_int = {
6473 BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6474 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6475 BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6476 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6477 BPF_MOV32_IMM(R0, 2),
6478 BPF_EXIT_INSN(),
6479 BPF_MOV32_IMM(R0, 1),
6480 BPF_EXIT_INSN(),
6481 },
6482 INTERNAL,
6483 { },
6484 { { 0, 0x1 } },
6485 },
6486 {
6487 "ALU64_AND_K: Sign extension 1",
6488 .u.insns_int = {
6489 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6490 BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6491 BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6492 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6493 BPF_MOV32_IMM(R0, 2),
6494 BPF_EXIT_INSN(),
6495 BPF_MOV32_IMM(R0, 1),
6496 BPF_EXIT_INSN(),
6497 },
6498 INTERNAL,
6499 { },
6500 { { 0, 1 } }
6501 },
6502 {
6503 "ALU64_AND_K: Sign extension 2",
6504 .u.insns_int = {
6505 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6506 BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6507 BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6508 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6509 BPF_MOV32_IMM(R0, 2),
6510 BPF_EXIT_INSN(),
6511 BPF_MOV32_IMM(R0, 1),
6512 BPF_EXIT_INSN(),
6513 },
6514 INTERNAL,
6515 { },
6516 { { 0, 1 } }
6517 },
6518 /* BPF_ALU | BPF_OR | BPF_X */
6519 {
6520 "ALU_OR_X: 1 | 2 = 3",
6521 .u.insns_int = {
6522 BPF_LD_IMM64(R0, 1),
6523 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6524 BPF_ALU32_REG(BPF_OR, R0, R1),
6525 BPF_EXIT_INSN(),
6526 },
6527 INTERNAL,
6528 { },
6529 { { 0, 3 } },
6530 },
6531 {
6532 "ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6533 .u.insns_int = {
6534 BPF_LD_IMM64(R0, 0),
6535 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6536 BPF_ALU32_REG(BPF_OR, R0, R1),
6537 BPF_EXIT_INSN(),
6538 },
6539 INTERNAL,
6540 { },
6541 { { 0, 0xffffffff } },
6542 },
6543 {
6544 "ALU64_OR_X: 1 | 2 = 3",
6545 .u.insns_int = {
6546 BPF_LD_IMM64(R0, 1),
6547 BPF_ALU32_IMM(BPF_MOV, R1, 2),
6548 BPF_ALU64_REG(BPF_OR, R0, R1),
6549 BPF_EXIT_INSN(),
6550 },
6551 INTERNAL,
6552 { },
6553 { { 0, 3 } },
6554 },
6555 {
6556 "ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6557 .u.insns_int = {
6558 BPF_LD_IMM64(R0, 0),
6559 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6560 BPF_ALU64_REG(BPF_OR, R0, R1),
6561 BPF_EXIT_INSN(),
6562 },
6563 INTERNAL,
6564 { },
6565 { { 0, 0xffffffff } },
6566 },
6567 /* BPF_ALU | BPF_OR | BPF_K */
6568 {
6569 "ALU_OR_K: 1 | 2 = 3",
6570 .u.insns_int = {
6571 BPF_LD_IMM64(R0, 1),
6572 BPF_ALU32_IMM(BPF_OR, R0, 2),
6573 BPF_EXIT_INSN(),
6574 },
6575 INTERNAL,
6576 { },
6577 { { 0, 3 } },
6578 },
6579 {
6580 "ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6581 .u.insns_int = {
6582 BPF_LD_IMM64(R0, 0),
6583 BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6584 BPF_EXIT_INSN(),
6585 },
6586 INTERNAL,
6587 { },
6588 { { 0, 0xffffffff } },
6589 },
6590 {
6591 "ALU_OR_K: Small immediate",
6592 .u.insns_int = {
6593 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6594 BPF_ALU32_IMM(BPF_OR, R0, 1),
6595 BPF_EXIT_INSN(),
6596 },
6597 INTERNAL,
6598 { },
6599 { { 0, 0x01020305 } }
6600 },
6601 {
6602 "ALU_OR_K: Large immediate",
6603 .u.insns_int = {
6604 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6605 BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6606 BPF_EXIT_INSN(),
6607 },
6608 INTERNAL,
6609 { },
6610 { { 0, 0xa1b2c3d4 } }
6611 },
6612 {
6613 "ALU_OR_K: Zero extension",
6614 .u.insns_int = {
6615 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6616 BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6617 BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6618 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6619 BPF_MOV32_IMM(R0, 2),
6620 BPF_EXIT_INSN(),
6621 BPF_MOV32_IMM(R0, 1),
6622 BPF_EXIT_INSN(),
6623 },
6624 INTERNAL,
6625 { },
6626 { { 0, 1 } }
6627 },
6628 {
6629 "ALU64_OR_K: 1 | 2 = 3",
6630 .u.insns_int = {
6631 BPF_LD_IMM64(R0, 1),
6632 BPF_ALU64_IMM(BPF_OR, R0, 2),
6633 BPF_EXIT_INSN(),
6634 },
6635 INTERNAL,
6636 { },
6637 { { 0, 3 } },
6638 },
6639 {
6640 "ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6641 .u.insns_int = {
6642 BPF_LD_IMM64(R0, 0),
6643 BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6644 BPF_EXIT_INSN(),
6645 },
6646 INTERNAL,
6647 { },
6648 { { 0, 0xffffffff } },
6649 },
6650 {
6651 "ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6652 .u.insns_int = {
6653 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6654 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6655 BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6656 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6657 BPF_MOV32_IMM(R0, 2),
6658 BPF_EXIT_INSN(),
6659 BPF_MOV32_IMM(R0, 1),
6660 BPF_EXIT_INSN(),
6661 },
6662 INTERNAL,
6663 { },
6664 { { 0, 0x1 } },
6665 },
6666 {
6667 "ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6668 .u.insns_int = {
6669 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6670 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6671 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6672 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6673 BPF_MOV32_IMM(R0, 2),
6674 BPF_EXIT_INSN(),
6675 BPF_MOV32_IMM(R0, 1),
6676 BPF_EXIT_INSN(),
6677 },
6678 INTERNAL,
6679 { },
6680 { { 0, 0x1 } },
6681 },
6682 {
6683 "ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6684 .u.insns_int = {
6685 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6686 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6687 BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6688 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6689 BPF_MOV32_IMM(R0, 2),
6690 BPF_EXIT_INSN(),
6691 BPF_MOV32_IMM(R0, 1),
6692 BPF_EXIT_INSN(),
6693 },
6694 INTERNAL,
6695 { },
6696 { { 0, 0x1 } },
6697 },
6698 {
6699 "ALU64_OR_K: Sign extension 1",
6700 .u.insns_int = {
6701 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6702 BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6703 BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6704 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6705 BPF_MOV32_IMM(R0, 2),
6706 BPF_EXIT_INSN(),
6707 BPF_MOV32_IMM(R0, 1),
6708 BPF_EXIT_INSN(),
6709 },
6710 INTERNAL,
6711 { },
6712 { { 0, 1 } }
6713 },
6714 {
6715 "ALU64_OR_K: Sign extension 2",
6716 .u.insns_int = {
6717 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6718 BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6719 BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6720 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6721 BPF_MOV32_IMM(R0, 2),
6722 BPF_EXIT_INSN(),
6723 BPF_MOV32_IMM(R0, 1),
6724 BPF_EXIT_INSN(),
6725 },
6726 INTERNAL,
6727 { },
6728 { { 0, 1 } }
6729 },
6730 /* BPF_ALU | BPF_XOR | BPF_X */
6731 {
6732 "ALU_XOR_X: 5 ^ 6 = 3",
6733 .u.insns_int = {
6734 BPF_LD_IMM64(R0, 5),
6735 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6736 BPF_ALU32_REG(BPF_XOR, R0, R1),
6737 BPF_EXIT_INSN(),
6738 },
6739 INTERNAL,
6740 { },
6741 { { 0, 3 } },
6742 },
6743 {
6744 "ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6745 .u.insns_int = {
6746 BPF_LD_IMM64(R0, 1),
6747 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6748 BPF_ALU32_REG(BPF_XOR, R0, R1),
6749 BPF_EXIT_INSN(),
6750 },
6751 INTERNAL,
6752 { },
6753 { { 0, 0xfffffffe } },
6754 },
6755 {
6756 "ALU64_XOR_X: 5 ^ 6 = 3",
6757 .u.insns_int = {
6758 BPF_LD_IMM64(R0, 5),
6759 BPF_ALU32_IMM(BPF_MOV, R1, 6),
6760 BPF_ALU64_REG(BPF_XOR, R0, R1),
6761 BPF_EXIT_INSN(),
6762 },
6763 INTERNAL,
6764 { },
6765 { { 0, 3 } },
6766 },
6767 {
6768 "ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6769 .u.insns_int = {
6770 BPF_LD_IMM64(R0, 1),
6771 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6772 BPF_ALU64_REG(BPF_XOR, R0, R1),
6773 BPF_EXIT_INSN(),
6774 },
6775 INTERNAL,
6776 { },
6777 { { 0, 0xfffffffe } },
6778 },
6779 /* BPF_ALU | BPF_XOR | BPF_K */
6780 {
6781 "ALU_XOR_K: 5 ^ 6 = 3",
6782 .u.insns_int = {
6783 BPF_LD_IMM64(R0, 5),
6784 BPF_ALU32_IMM(BPF_XOR, R0, 6),
6785 BPF_EXIT_INSN(),
6786 },
6787 INTERNAL,
6788 { },
6789 { { 0, 3 } },
6790 },
6791 {
6792 "ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6793 .u.insns_int = {
6794 BPF_LD_IMM64(R0, 1),
6795 BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6796 BPF_EXIT_INSN(),
6797 },
6798 INTERNAL,
6799 { },
6800 { { 0, 0xfffffffe } },
6801 },
6802 {
6803 "ALU_XOR_K: Small immediate",
6804 .u.insns_int = {
6805 BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6806 BPF_ALU32_IMM(BPF_XOR, R0, 15),
6807 BPF_EXIT_INSN(),
6808 },
6809 INTERNAL,
6810 { },
6811 { { 0, 0x0102030b } }
6812 },
6813 {
6814 "ALU_XOR_K: Large immediate",
6815 .u.insns_int = {
6816 BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6817 BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6818 BPF_EXIT_INSN(),
6819 },
6820 INTERNAL,
6821 { },
6822 { { 0, 0x5e4d3c2b } }
6823 },
6824 {
6825 "ALU_XOR_K: Zero extension",
6826 .u.insns_int = {
6827 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6828 BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6829 BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6830 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6831 BPF_MOV32_IMM(R0, 2),
6832 BPF_EXIT_INSN(),
6833 BPF_MOV32_IMM(R0, 1),
6834 BPF_EXIT_INSN(),
6835 },
6836 INTERNAL,
6837 { },
6838 { { 0, 1 } }
6839 },
6840 {
6841 "ALU64_XOR_K: 5 ^ 6 = 3",
6842 .u.insns_int = {
6843 BPF_LD_IMM64(R0, 5),
6844 BPF_ALU64_IMM(BPF_XOR, R0, 6),
6845 BPF_EXIT_INSN(),
6846 },
6847 INTERNAL,
6848 { },
6849 { { 0, 3 } },
6850 },
6851 {
6852 "ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6853 .u.insns_int = {
6854 BPF_LD_IMM64(R0, 1),
6855 BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6856 BPF_EXIT_INSN(),
6857 },
6858 INTERNAL,
6859 { },
6860 { { 0, 0xfffffffe } },
6861 },
6862 {
6863 "ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6864 .u.insns_int = {
6865 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6866 BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6867 BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6868 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6869 BPF_MOV32_IMM(R0, 2),
6870 BPF_EXIT_INSN(),
6871 BPF_MOV32_IMM(R0, 1),
6872 BPF_EXIT_INSN(),
6873 },
6874 INTERNAL,
6875 { },
6876 { { 0, 0x1 } },
6877 },
6878 {
6879 "ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6880 .u.insns_int = {
6881 BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6882 BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6883 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6884 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6885 BPF_MOV32_IMM(R0, 2),
6886 BPF_EXIT_INSN(),
6887 BPF_MOV32_IMM(R0, 1),
6888 BPF_EXIT_INSN(),
6889 },
6890 INTERNAL,
6891 { },
6892 { { 0, 0x1 } },
6893 },
6894 {
6895 "ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6896 .u.insns_int = {
6897 BPF_LD_IMM64(R2, 0x0000000000000000LL),
6898 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6899 BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6900 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6901 BPF_MOV32_IMM(R0, 2),
6902 BPF_EXIT_INSN(),
6903 BPF_MOV32_IMM(R0, 1),
6904 BPF_EXIT_INSN(),
6905 },
6906 INTERNAL,
6907 { },
6908 { { 0, 0x1 } },
6909 },
6910 {
6911 "ALU64_XOR_K: Sign extension 1",
6912 .u.insns_int = {
6913 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6914 BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6915 BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6916 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6917 BPF_MOV32_IMM(R0, 2),
6918 BPF_EXIT_INSN(),
6919 BPF_MOV32_IMM(R0, 1),
6920 BPF_EXIT_INSN(),
6921 },
6922 INTERNAL,
6923 { },
6924 { { 0, 1 } }
6925 },
6926 {
6927 "ALU64_XOR_K: Sign extension 2",
6928 .u.insns_int = {
6929 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6930 BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6931 BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6932 BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6933 BPF_MOV32_IMM(R0, 2),
6934 BPF_EXIT_INSN(),
6935 BPF_MOV32_IMM(R0, 1),
6936 BPF_EXIT_INSN(),
6937 },
6938 INTERNAL,
6939 { },
6940 { { 0, 1 } }
6941 },
6942 /* BPF_ALU | BPF_LSH | BPF_X */
6943 {
6944 "ALU_LSH_X: 1 << 1 = 2",
6945 .u.insns_int = {
6946 BPF_LD_IMM64(R0, 1),
6947 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6948 BPF_ALU32_REG(BPF_LSH, R0, R1),
6949 BPF_EXIT_INSN(),
6950 },
6951 INTERNAL,
6952 { },
6953 { { 0, 2 } },
6954 },
6955 {
6956 "ALU_LSH_X: 1 << 31 = 0x80000000",
6957 .u.insns_int = {
6958 BPF_LD_IMM64(R0, 1),
6959 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6960 BPF_ALU32_REG(BPF_LSH, R0, R1),
6961 BPF_EXIT_INSN(),
6962 },
6963 INTERNAL,
6964 { },
6965 { { 0, 0x80000000 } },
6966 },
6967 {
6968 "ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6969 .u.insns_int = {
6970 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6971 BPF_ALU32_IMM(BPF_MOV, R1, 12),
6972 BPF_ALU32_REG(BPF_LSH, R0, R1),
6973 BPF_EXIT_INSN(),
6974 },
6975 INTERNAL,
6976 { },
6977 { { 0, 0x45678000 } }
6978 },
6979 {
6980 "ALU64_LSH_X: 1 << 1 = 2",
6981 .u.insns_int = {
6982 BPF_LD_IMM64(R0, 1),
6983 BPF_ALU32_IMM(BPF_MOV, R1, 1),
6984 BPF_ALU64_REG(BPF_LSH, R0, R1),
6985 BPF_EXIT_INSN(),
6986 },
6987 INTERNAL,
6988 { },
6989 { { 0, 2 } },
6990 },
6991 {
6992 "ALU64_LSH_X: 1 << 31 = 0x80000000",
6993 .u.insns_int = {
6994 BPF_LD_IMM64(R0, 1),
6995 BPF_ALU32_IMM(BPF_MOV, R1, 31),
6996 BPF_ALU64_REG(BPF_LSH, R0, R1),
6997 BPF_EXIT_INSN(),
6998 },
6999 INTERNAL,
7000 { },
7001 { { 0, 0x80000000 } },
7002 },
7003 {
7004 "ALU64_LSH_X: Shift < 32, low word",
7005 .u.insns_int = {
7006 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7007 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7008 BPF_ALU64_REG(BPF_LSH, R0, R1),
7009 BPF_EXIT_INSN(),
7010 },
7011 INTERNAL,
7012 { },
7013 { { 0, 0xbcdef000 } }
7014 },
7015 {
7016 "ALU64_LSH_X: Shift < 32, high word",
7017 .u.insns_int = {
7018 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7019 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7020 BPF_ALU64_REG(BPF_LSH, R0, R1),
7021 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7022 BPF_EXIT_INSN(),
7023 },
7024 INTERNAL,
7025 { },
7026 { { 0, 0x3456789a } }
7027 },
7028 {
7029 "ALU64_LSH_X: Shift > 32, low word",
7030 .u.insns_int = {
7031 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7032 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7033 BPF_ALU64_REG(BPF_LSH, R0, R1),
7034 BPF_EXIT_INSN(),
7035 },
7036 INTERNAL,
7037 { },
7038 { { 0, 0 } }
7039 },
7040 {
7041 "ALU64_LSH_X: Shift > 32, high word",
7042 .u.insns_int = {
7043 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7044 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7045 BPF_ALU64_REG(BPF_LSH, R0, R1),
7046 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7047 BPF_EXIT_INSN(),
7048 },
7049 INTERNAL,
7050 { },
7051 { { 0, 0x9abcdef0 } }
7052 },
7053 {
7054 "ALU64_LSH_X: Shift == 32, low word",
7055 .u.insns_int = {
7056 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7057 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7058 BPF_ALU64_REG(BPF_LSH, R0, R1),
7059 BPF_EXIT_INSN(),
7060 },
7061 INTERNAL,
7062 { },
7063 { { 0, 0 } }
7064 },
7065 {
7066 "ALU64_LSH_X: Shift == 32, high word",
7067 .u.insns_int = {
7068 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7069 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7070 BPF_ALU64_REG(BPF_LSH, R0, R1),
7071 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7072 BPF_EXIT_INSN(),
7073 },
7074 INTERNAL,
7075 { },
7076 { { 0, 0x89abcdef } }
7077 },
7078 {
7079 "ALU64_LSH_X: Zero shift, low word",
7080 .u.insns_int = {
7081 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7082 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7083 BPF_ALU64_REG(BPF_LSH, R0, R1),
7084 BPF_EXIT_INSN(),
7085 },
7086 INTERNAL,
7087 { },
7088 { { 0, 0x89abcdef } }
7089 },
7090 {
7091 "ALU64_LSH_X: Zero shift, high word",
7092 .u.insns_int = {
7093 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7094 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7095 BPF_ALU64_REG(BPF_LSH, R0, R1),
7096 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7097 BPF_EXIT_INSN(),
7098 },
7099 INTERNAL,
7100 { },
7101 { { 0, 0x01234567 } }
7102 },
7103 /* BPF_ALU | BPF_LSH | BPF_K */
7104 {
7105 "ALU_LSH_K: 1 << 1 = 2",
7106 .u.insns_int = {
7107 BPF_LD_IMM64(R0, 1),
7108 BPF_ALU32_IMM(BPF_LSH, R0, 1),
7109 BPF_EXIT_INSN(),
7110 },
7111 INTERNAL,
7112 { },
7113 { { 0, 2 } },
7114 },
7115 {
7116 "ALU_LSH_K: 1 << 31 = 0x80000000",
7117 .u.insns_int = {
7118 BPF_LD_IMM64(R0, 1),
7119 BPF_ALU32_IMM(BPF_LSH, R0, 31),
7120 BPF_EXIT_INSN(),
7121 },
7122 INTERNAL,
7123 { },
7124 { { 0, 0x80000000 } },
7125 },
7126 {
7127 "ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
7128 .u.insns_int = {
7129 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7130 BPF_ALU32_IMM(BPF_LSH, R0, 12),
7131 BPF_EXIT_INSN(),
7132 },
7133 INTERNAL,
7134 { },
7135 { { 0, 0x45678000 } }
7136 },
7137 {
7138 "ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
7139 .u.insns_int = {
7140 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7141 BPF_ALU32_IMM(BPF_LSH, R0, 0),
7142 BPF_EXIT_INSN(),
7143 },
7144 INTERNAL,
7145 { },
7146 { { 0, 0x12345678 } }
7147 },
7148 {
7149 "ALU64_LSH_K: 1 << 1 = 2",
7150 .u.insns_int = {
7151 BPF_LD_IMM64(R0, 1),
7152 BPF_ALU64_IMM(BPF_LSH, R0, 1),
7153 BPF_EXIT_INSN(),
7154 },
7155 INTERNAL,
7156 { },
7157 { { 0, 2 } },
7158 },
7159 {
7160 "ALU64_LSH_K: 1 << 31 = 0x80000000",
7161 .u.insns_int = {
7162 BPF_LD_IMM64(R0, 1),
7163 BPF_ALU64_IMM(BPF_LSH, R0, 31),
7164 BPF_EXIT_INSN(),
7165 },
7166 INTERNAL,
7167 { },
7168 { { 0, 0x80000000 } },
7169 },
7170 {
7171 "ALU64_LSH_K: Shift < 32, low word",
7172 .u.insns_int = {
7173 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7174 BPF_ALU64_IMM(BPF_LSH, R0, 12),
7175 BPF_EXIT_INSN(),
7176 },
7177 INTERNAL,
7178 { },
7179 { { 0, 0xbcdef000 } }
7180 },
7181 {
7182 "ALU64_LSH_K: Shift < 32, high word",
7183 .u.insns_int = {
7184 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7185 BPF_ALU64_IMM(BPF_LSH, R0, 12),
7186 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7187 BPF_EXIT_INSN(),
7188 },
7189 INTERNAL,
7190 { },
7191 { { 0, 0x3456789a } }
7192 },
7193 {
7194 "ALU64_LSH_K: Shift > 32, low word",
7195 .u.insns_int = {
7196 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7197 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7198 BPF_EXIT_INSN(),
7199 },
7200 INTERNAL,
7201 { },
7202 { { 0, 0 } }
7203 },
7204 {
7205 "ALU64_LSH_K: Shift > 32, high word",
7206 .u.insns_int = {
7207 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7208 BPF_ALU64_IMM(BPF_LSH, R0, 36),
7209 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7210 BPF_EXIT_INSN(),
7211 },
7212 INTERNAL,
7213 { },
7214 { { 0, 0x9abcdef0 } }
7215 },
7216 {
7217 "ALU64_LSH_K: Shift == 32, low word",
7218 .u.insns_int = {
7219 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7220 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7221 BPF_EXIT_INSN(),
7222 },
7223 INTERNAL,
7224 { },
7225 { { 0, 0 } }
7226 },
7227 {
7228 "ALU64_LSH_K: Shift == 32, high word",
7229 .u.insns_int = {
7230 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7231 BPF_ALU64_IMM(BPF_LSH, R0, 32),
7232 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7233 BPF_EXIT_INSN(),
7234 },
7235 INTERNAL,
7236 { },
7237 { { 0, 0x89abcdef } }
7238 },
7239 {
7240 "ALU64_LSH_K: Zero shift",
7241 .u.insns_int = {
7242 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7243 BPF_ALU64_IMM(BPF_LSH, R0, 0),
7244 BPF_EXIT_INSN(),
7245 },
7246 INTERNAL,
7247 { },
7248 { { 0, 0x89abcdef } }
7249 },
7250 /* BPF_ALU | BPF_RSH | BPF_X */
7251 {
7252 "ALU_RSH_X: 2 >> 1 = 1",
7253 .u.insns_int = {
7254 BPF_LD_IMM64(R0, 2),
7255 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7256 BPF_ALU32_REG(BPF_RSH, R0, R1),
7257 BPF_EXIT_INSN(),
7258 },
7259 INTERNAL,
7260 { },
7261 { { 0, 1 } },
7262 },
7263 {
7264 "ALU_RSH_X: 0x80000000 >> 31 = 1",
7265 .u.insns_int = {
7266 BPF_LD_IMM64(R0, 0x80000000),
7267 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7268 BPF_ALU32_REG(BPF_RSH, R0, R1),
7269 BPF_EXIT_INSN(),
7270 },
7271 INTERNAL,
7272 { },
7273 { { 0, 1 } },
7274 },
7275 {
7276 "ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7277 .u.insns_int = {
7278 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7279 BPF_ALU32_IMM(BPF_MOV, R1, 20),
7280 BPF_ALU32_REG(BPF_RSH, R0, R1),
7281 BPF_EXIT_INSN(),
7282 },
7283 INTERNAL,
7284 { },
7285 { { 0, 0x123 } }
7286 },
7287 {
7288 "ALU64_RSH_X: 2 >> 1 = 1",
7289 .u.insns_int = {
7290 BPF_LD_IMM64(R0, 2),
7291 BPF_ALU32_IMM(BPF_MOV, R1, 1),
7292 BPF_ALU64_REG(BPF_RSH, R0, R1),
7293 BPF_EXIT_INSN(),
7294 },
7295 INTERNAL,
7296 { },
7297 { { 0, 1 } },
7298 },
7299 {
7300 "ALU64_RSH_X: 0x80000000 >> 31 = 1",
7301 .u.insns_int = {
7302 BPF_LD_IMM64(R0, 0x80000000),
7303 BPF_ALU32_IMM(BPF_MOV, R1, 31),
7304 BPF_ALU64_REG(BPF_RSH, R0, R1),
7305 BPF_EXIT_INSN(),
7306 },
7307 INTERNAL,
7308 { },
7309 { { 0, 1 } },
7310 },
7311 {
7312 "ALU64_RSH_X: Shift < 32, low word",
7313 .u.insns_int = {
7314 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7315 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7316 BPF_ALU64_REG(BPF_RSH, R0, R1),
7317 BPF_EXIT_INSN(),
7318 },
7319 INTERNAL,
7320 { },
7321 { { 0, 0x56789abc } }
7322 },
7323 {
7324 "ALU64_RSH_X: Shift < 32, high word",
7325 .u.insns_int = {
7326 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7327 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7328 BPF_ALU64_REG(BPF_RSH, R0, R1),
7329 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7330 BPF_EXIT_INSN(),
7331 },
7332 INTERNAL,
7333 { },
7334 { { 0, 0x00081234 } }
7335 },
7336 {
7337 "ALU64_RSH_X: Shift > 32, low word",
7338 .u.insns_int = {
7339 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7340 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7341 BPF_ALU64_REG(BPF_RSH, R0, R1),
7342 BPF_EXIT_INSN(),
7343 },
7344 INTERNAL,
7345 { },
7346 { { 0, 0x08123456 } }
7347 },
7348 {
7349 "ALU64_RSH_X: Shift > 32, high word",
7350 .u.insns_int = {
7351 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7352 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7353 BPF_ALU64_REG(BPF_RSH, R0, R1),
7354 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7355 BPF_EXIT_INSN(),
7356 },
7357 INTERNAL,
7358 { },
7359 { { 0, 0 } }
7360 },
7361 {
7362 "ALU64_RSH_X: Shift == 32, low word",
7363 .u.insns_int = {
7364 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7365 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7366 BPF_ALU64_REG(BPF_RSH, R0, R1),
7367 BPF_EXIT_INSN(),
7368 },
7369 INTERNAL,
7370 { },
7371 { { 0, 0x81234567 } }
7372 },
7373 {
7374 "ALU64_RSH_X: Shift == 32, high word",
7375 .u.insns_int = {
7376 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7377 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7378 BPF_ALU64_REG(BPF_RSH, R0, R1),
7379 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7380 BPF_EXIT_INSN(),
7381 },
7382 INTERNAL,
7383 { },
7384 { { 0, 0 } }
7385 },
7386 {
7387 "ALU64_RSH_X: Zero shift, low word",
7388 .u.insns_int = {
7389 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7390 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7391 BPF_ALU64_REG(BPF_RSH, R0, R1),
7392 BPF_EXIT_INSN(),
7393 },
7394 INTERNAL,
7395 { },
7396 { { 0, 0x89abcdef } }
7397 },
7398 {
7399 "ALU64_RSH_X: Zero shift, high word",
7400 .u.insns_int = {
7401 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7402 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7403 BPF_ALU64_REG(BPF_RSH, R0, R1),
7404 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7405 BPF_EXIT_INSN(),
7406 },
7407 INTERNAL,
7408 { },
7409 { { 0, 0x81234567 } }
7410 },
7411 /* BPF_ALU | BPF_RSH | BPF_K */
7412 {
7413 "ALU_RSH_K: 2 >> 1 = 1",
7414 .u.insns_int = {
7415 BPF_LD_IMM64(R0, 2),
7416 BPF_ALU32_IMM(BPF_RSH, R0, 1),
7417 BPF_EXIT_INSN(),
7418 },
7419 INTERNAL,
7420 { },
7421 { { 0, 1 } },
7422 },
7423 {
7424 "ALU_RSH_K: 0x80000000 >> 31 = 1",
7425 .u.insns_int = {
7426 BPF_LD_IMM64(R0, 0x80000000),
7427 BPF_ALU32_IMM(BPF_RSH, R0, 31),
7428 BPF_EXIT_INSN(),
7429 },
7430 INTERNAL,
7431 { },
7432 { { 0, 1 } },
7433 },
7434 {
7435 "ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7436 .u.insns_int = {
7437 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7438 BPF_ALU32_IMM(BPF_RSH, R0, 20),
7439 BPF_EXIT_INSN(),
7440 },
7441 INTERNAL,
7442 { },
7443 { { 0, 0x123 } }
7444 },
7445 {
7446 "ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7447 .u.insns_int = {
7448 BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7449 BPF_ALU32_IMM(BPF_RSH, R0, 0),
7450 BPF_EXIT_INSN(),
7451 },
7452 INTERNAL,
7453 { },
7454 { { 0, 0x12345678 } }
7455 },
7456 {
7457 "ALU64_RSH_K: 2 >> 1 = 1",
7458 .u.insns_int = {
7459 BPF_LD_IMM64(R0, 2),
7460 BPF_ALU64_IMM(BPF_RSH, R0, 1),
7461 BPF_EXIT_INSN(),
7462 },
7463 INTERNAL,
7464 { },
7465 { { 0, 1 } },
7466 },
7467 {
7468 "ALU64_RSH_K: 0x80000000 >> 31 = 1",
7469 .u.insns_int = {
7470 BPF_LD_IMM64(R0, 0x80000000),
7471 BPF_ALU64_IMM(BPF_RSH, R0, 31),
7472 BPF_EXIT_INSN(),
7473 },
7474 INTERNAL,
7475 { },
7476 { { 0, 1 } },
7477 },
7478 {
7479 "ALU64_RSH_K: Shift < 32, low word",
7480 .u.insns_int = {
7481 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7482 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7483 BPF_EXIT_INSN(),
7484 },
7485 INTERNAL,
7486 { },
7487 { { 0, 0x56789abc } }
7488 },
7489 {
7490 "ALU64_RSH_K: Shift < 32, high word",
7491 .u.insns_int = {
7492 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7493 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7494 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7495 BPF_EXIT_INSN(),
7496 },
7497 INTERNAL,
7498 { },
7499 { { 0, 0x00081234 } }
7500 },
7501 {
7502 "ALU64_RSH_K: Shift > 32, low word",
7503 .u.insns_int = {
7504 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7505 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7506 BPF_EXIT_INSN(),
7507 },
7508 INTERNAL,
7509 { },
7510 { { 0, 0x08123456 } }
7511 },
7512 {
7513 "ALU64_RSH_K: Shift > 32, high word",
7514 .u.insns_int = {
7515 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7516 BPF_ALU64_IMM(BPF_RSH, R0, 36),
7517 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7518 BPF_EXIT_INSN(),
7519 },
7520 INTERNAL,
7521 { },
7522 { { 0, 0 } }
7523 },
7524 {
7525 "ALU64_RSH_K: Shift == 32, low word",
7526 .u.insns_int = {
7527 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7528 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7529 BPF_EXIT_INSN(),
7530 },
7531 INTERNAL,
7532 { },
7533 { { 0, 0x81234567 } }
7534 },
7535 {
7536 "ALU64_RSH_K: Shift == 32, high word",
7537 .u.insns_int = {
7538 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7539 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7540 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7541 BPF_EXIT_INSN(),
7542 },
7543 INTERNAL,
7544 { },
7545 { { 0, 0 } }
7546 },
7547 {
7548 "ALU64_RSH_K: Zero shift",
7549 .u.insns_int = {
7550 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7551 BPF_ALU64_IMM(BPF_RSH, R0, 0),
7552 BPF_EXIT_INSN(),
7553 },
7554 INTERNAL,
7555 { },
7556 { { 0, 0x89abcdef } }
7557 },
7558 /* BPF_ALU | BPF_ARSH | BPF_X */
7559 {
7560 "ALU32_ARSH_X: -1234 >> 7 = -10",
7561 .u.insns_int = {
7562 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7563 BPF_ALU32_IMM(BPF_MOV, R1, 7),
7564 BPF_ALU32_REG(BPF_ARSH, R0, R1),
7565 BPF_EXIT_INSN(),
7566 },
7567 INTERNAL,
7568 { },
7569 { { 0, -10 } }
7570 },
7571 {
7572 "ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7573 .u.insns_int = {
7574 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7575 BPF_ALU32_IMM(BPF_MOV, R1, 40),
7576 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7577 BPF_EXIT_INSN(),
7578 },
7579 INTERNAL,
7580 { },
7581 { { 0, 0xffff00ff } },
7582 },
7583 {
7584 "ALU64_ARSH_X: Shift < 32, low word",
7585 .u.insns_int = {
7586 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7587 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7588 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7589 BPF_EXIT_INSN(),
7590 },
7591 INTERNAL,
7592 { },
7593 { { 0, 0x56789abc } }
7594 },
7595 {
7596 "ALU64_ARSH_X: Shift < 32, high word",
7597 .u.insns_int = {
7598 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7599 BPF_ALU32_IMM(BPF_MOV, R1, 12),
7600 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7601 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7602 BPF_EXIT_INSN(),
7603 },
7604 INTERNAL,
7605 { },
7606 { { 0, 0xfff81234 } }
7607 },
7608 {
7609 "ALU64_ARSH_X: Shift > 32, low word",
7610 .u.insns_int = {
7611 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7612 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7613 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7614 BPF_EXIT_INSN(),
7615 },
7616 INTERNAL,
7617 { },
7618 { { 0, 0xf8123456 } }
7619 },
7620 {
7621 "ALU64_ARSH_X: Shift > 32, high word",
7622 .u.insns_int = {
7623 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7624 BPF_ALU32_IMM(BPF_MOV, R1, 36),
7625 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7626 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7627 BPF_EXIT_INSN(),
7628 },
7629 INTERNAL,
7630 { },
7631 { { 0, -1 } }
7632 },
7633 {
7634 "ALU64_ARSH_X: Shift == 32, low word",
7635 .u.insns_int = {
7636 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7637 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7638 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7639 BPF_EXIT_INSN(),
7640 },
7641 INTERNAL,
7642 { },
7643 { { 0, 0x81234567 } }
7644 },
7645 {
7646 "ALU64_ARSH_X: Shift == 32, high word",
7647 .u.insns_int = {
7648 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7649 BPF_ALU32_IMM(BPF_MOV, R1, 32),
7650 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7651 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7652 BPF_EXIT_INSN(),
7653 },
7654 INTERNAL,
7655 { },
7656 { { 0, -1 } }
7657 },
7658 {
7659 "ALU64_ARSH_X: Zero shift, low word",
7660 .u.insns_int = {
7661 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7662 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7663 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7664 BPF_EXIT_INSN(),
7665 },
7666 INTERNAL,
7667 { },
7668 { { 0, 0x89abcdef } }
7669 },
7670 {
7671 "ALU64_ARSH_X: Zero shift, high word",
7672 .u.insns_int = {
7673 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7674 BPF_ALU32_IMM(BPF_MOV, R1, 0),
7675 BPF_ALU64_REG(BPF_ARSH, R0, R1),
7676 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7677 BPF_EXIT_INSN(),
7678 },
7679 INTERNAL,
7680 { },
7681 { { 0, 0x81234567 } }
7682 },
7683 /* BPF_ALU | BPF_ARSH | BPF_K */
7684 {
7685 "ALU32_ARSH_K: -1234 >> 7 = -10",
7686 .u.insns_int = {
7687 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7688 BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7689 BPF_EXIT_INSN(),
7690 },
7691 INTERNAL,
7692 { },
7693 { { 0, -10 } }
7694 },
7695 {
7696 "ALU32_ARSH_K: -1234 >> 0 = -1234",
7697 .u.insns_int = {
7698 BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7699 BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7700 BPF_EXIT_INSN(),
7701 },
7702 INTERNAL,
7703 { },
7704 { { 0, -1234 } }
7705 },
7706 {
7707 "ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7708 .u.insns_int = {
7709 BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7710 BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7711 BPF_EXIT_INSN(),
7712 },
7713 INTERNAL,
7714 { },
7715 { { 0, 0xffff00ff } },
7716 },
7717 {
7718 "ALU64_ARSH_K: Shift < 32, low word",
7719 .u.insns_int = {
7720 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7721 BPF_ALU64_IMM(BPF_RSH, R0, 12),
7722 BPF_EXIT_INSN(),
7723 },
7724 INTERNAL,
7725 { },
7726 { { 0, 0x56789abc } }
7727 },
7728 {
7729 "ALU64_ARSH_K: Shift < 32, high word",
7730 .u.insns_int = {
7731 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7732 BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7733 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7734 BPF_EXIT_INSN(),
7735 },
7736 INTERNAL,
7737 { },
7738 { { 0, 0xfff81234 } }
7739 },
7740 {
7741 "ALU64_ARSH_K: Shift > 32, low word",
7742 .u.insns_int = {
7743 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7744 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7745 BPF_EXIT_INSN(),
7746 },
7747 INTERNAL,
7748 { },
7749 { { 0, 0xf8123456 } }
7750 },
7751 {
7752 "ALU64_ARSH_K: Shift > 32, high word",
7753 .u.insns_int = {
7754 BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7755 BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7756 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7757 BPF_EXIT_INSN(),
7758 },
7759 INTERNAL,
7760 { },
7761 { { 0, -1 } }
7762 },
7763 {
7764 "ALU64_ARSH_K: Shift == 32, low word",
7765 .u.insns_int = {
7766 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7767 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7768 BPF_EXIT_INSN(),
7769 },
7770 INTERNAL,
7771 { },
7772 { { 0, 0x81234567 } }
7773 },
7774 {
7775 "ALU64_ARSH_K: Shift == 32, high word",
7776 .u.insns_int = {
7777 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7778 BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7779 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7780 BPF_EXIT_INSN(),
7781 },
7782 INTERNAL,
7783 { },
7784 { { 0, -1 } }
7785 },
7786 {
7787 "ALU64_ARSH_K: Zero shift",
7788 .u.insns_int = {
7789 BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7790 BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7791 BPF_EXIT_INSN(),
7792 },
7793 INTERNAL,
7794 { },
7795 { { 0, 0x89abcdef } }
7796 },
7797 /* BPF_ALU | BPF_NEG */
7798 {
7799 "ALU_NEG: -(3) = -3",
7800 .u.insns_int = {
7801 BPF_ALU32_IMM(BPF_MOV, R0, 3),
7802 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7803 BPF_EXIT_INSN(),
7804 },
7805 INTERNAL,
7806 { },
7807 { { 0, -3 } },
7808 },
7809 {
7810 "ALU_NEG: -(-3) = 3",
7811 .u.insns_int = {
7812 BPF_ALU32_IMM(BPF_MOV, R0, -3),
7813 BPF_ALU32_IMM(BPF_NEG, R0, 0),
7814 BPF_EXIT_INSN(),
7815 },
7816 INTERNAL,
7817 { },
7818 { { 0, 3 } },
7819 },
7820 {
7821 "ALU64_NEG: -(3) = -3",
7822 .u.insns_int = {
7823 BPF_LD_IMM64(R0, 3),
7824 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7825 BPF_EXIT_INSN(),
7826 },
7827 INTERNAL,
7828 { },
7829 { { 0, -3 } },
7830 },
7831 {
7832 "ALU64_NEG: -(-3) = 3",
7833 .u.insns_int = {
7834 BPF_LD_IMM64(R0, -3),
7835 BPF_ALU64_IMM(BPF_NEG, R0, 0),
7836 BPF_EXIT_INSN(),
7837 },
7838 INTERNAL,
7839 { },
7840 { { 0, 3 } },
7841 },
7842 /* BPF_ALU | BPF_END | BPF_FROM_BE */
7843 {
7844 "ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7845 .u.insns_int = {
7846 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7847 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7848 BPF_EXIT_INSN(),
7849 },
7850 INTERNAL,
7851 { },
7852 { { 0, cpu_to_be16(0xcdef) } },
7853 },
7854 {
7855 "ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7856 .u.insns_int = {
7857 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7858 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7859 BPF_ALU64_REG(BPF_MOV, R1, R0),
7860 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7861 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7862 BPF_EXIT_INSN(),
7863 },
7864 INTERNAL,
7865 { },
7866 { { 0, cpu_to_be32(0x89abcdef) } },
7867 },
7868 {
7869 "ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7870 .u.insns_int = {
7871 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7872 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7873 BPF_EXIT_INSN(),
7874 },
7875 INTERNAL,
7876 { },
7877 { { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7878 },
7879 {
7880 "ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7881 .u.insns_int = {
7882 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7883 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7884 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7885 BPF_EXIT_INSN(),
7886 },
7887 INTERNAL,
7888 { },
7889 { { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7890 },
7891 /* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7892 {
7893 "ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7894 .u.insns_int = {
7895 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7896 BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7897 BPF_EXIT_INSN(),
7898 },
7899 INTERNAL,
7900 { },
7901 { { 0, cpu_to_be16(0x3210) } },
7902 },
7903 {
7904 "ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7905 .u.insns_int = {
7906 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7907 BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7908 BPF_ALU64_REG(BPF_MOV, R1, R0),
7909 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7910 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7911 BPF_EXIT_INSN(),
7912 },
7913 INTERNAL,
7914 { },
7915 { { 0, cpu_to_be32(0x76543210) } },
7916 },
7917 {
7918 "ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7919 .u.insns_int = {
7920 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7921 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7922 BPF_EXIT_INSN(),
7923 },
7924 INTERNAL,
7925 { },
7926 { { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7927 },
7928 {
7929 "ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7930 .u.insns_int = {
7931 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7932 BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7933 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7934 BPF_EXIT_INSN(),
7935 },
7936 INTERNAL,
7937 { },
7938 { { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7939 },
7940 /* BPF_ALU | BPF_END | BPF_FROM_LE */
7941 {
7942 "ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7943 .u.insns_int = {
7944 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7945 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7946 BPF_EXIT_INSN(),
7947 },
7948 INTERNAL,
7949 { },
7950 { { 0, cpu_to_le16(0xcdef) } },
7951 },
7952 {
7953 "ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7954 .u.insns_int = {
7955 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7956 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7957 BPF_ALU64_REG(BPF_MOV, R1, R0),
7958 BPF_ALU64_IMM(BPF_RSH, R1, 32),
7959 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7960 BPF_EXIT_INSN(),
7961 },
7962 INTERNAL,
7963 { },
7964 { { 0, cpu_to_le32(0x89abcdef) } },
7965 },
7966 {
7967 "ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7968 .u.insns_int = {
7969 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7970 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7971 BPF_EXIT_INSN(),
7972 },
7973 INTERNAL,
7974 { },
7975 { { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7976 },
7977 {
7978 "ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7979 .u.insns_int = {
7980 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7981 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7982 BPF_ALU64_IMM(BPF_RSH, R0, 32),
7983 BPF_EXIT_INSN(),
7984 },
7985 INTERNAL,
7986 { },
7987 { { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7988 },
7989 /* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7990 {
7991 "ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7992 .u.insns_int = {
7993 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7994 BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7995 BPF_EXIT_INSN(),
7996 },
7997 INTERNAL,
7998 { },
7999 { { 0, cpu_to_le16(0x3210) } },
8000 },
8001 {
8002 "ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
8003 .u.insns_int = {
8004 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8005 BPF_ENDIAN(BPF_FROM_LE, R0, 32),
8006 BPF_ALU64_REG(BPF_MOV, R1, R0),
8007 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8008 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8009 BPF_EXIT_INSN(),
8010 },
8011 INTERNAL,
8012 { },
8013 { { 0, cpu_to_le32(0x76543210) } },
8014 },
8015 {
8016 "ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
8017 .u.insns_int = {
8018 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8019 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8020 BPF_EXIT_INSN(),
8021 },
8022 INTERNAL,
8023 { },
8024 { { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
8025 },
8026 {
8027 "ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
8028 .u.insns_int = {
8029 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8030 BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8031 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8032 BPF_EXIT_INSN(),
8033 },
8034 INTERNAL,
8035 { },
8036 { { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
8037 },
8038 /* BSWAP */
8039 {
8040 "BSWAP 16: 0x0123456789abcdef -> 0xefcd",
8041 .u.insns_int = {
8042 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8043 BPF_BSWAP(R0, 16),
8044 BPF_EXIT_INSN(),
8045 },
8046 INTERNAL,
8047 { },
8048 { { 0, 0xefcd } },
8049 },
8050 {
8051 "BSWAP 32: 0x0123456789abcdef -> 0xefcdab89",
8052 .u.insns_int = {
8053 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8054 BPF_BSWAP(R0, 32),
8055 BPF_ALU64_REG(BPF_MOV, R1, R0),
8056 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8057 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8058 BPF_EXIT_INSN(),
8059 },
8060 INTERNAL,
8061 { },
8062 { { 0, 0xefcdab89 } },
8063 },
8064 {
8065 "BSWAP 64: 0x0123456789abcdef -> 0x67452301",
8066 .u.insns_int = {
8067 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8068 BPF_BSWAP(R0, 64),
8069 BPF_EXIT_INSN(),
8070 },
8071 INTERNAL,
8072 { },
8073 { { 0, 0x67452301 } },
8074 },
8075 {
8076 "BSWAP 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
8077 .u.insns_int = {
8078 BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8079 BPF_BSWAP(R0, 64),
8080 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8081 BPF_EXIT_INSN(),
8082 },
8083 INTERNAL,
8084 { },
8085 { { 0, 0xefcdab89 } },
8086 },
8087 /* BSWAP, reversed */
8088 {
8089 "BSWAP 16: 0xfedcba9876543210 -> 0x1032",
8090 .u.insns_int = {
8091 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8092 BPF_BSWAP(R0, 16),
8093 BPF_EXIT_INSN(),
8094 },
8095 INTERNAL,
8096 { },
8097 { { 0, 0x1032 } },
8098 },
8099 {
8100 "BSWAP 32: 0xfedcba9876543210 -> 0x10325476",
8101 .u.insns_int = {
8102 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8103 BPF_BSWAP(R0, 32),
8104 BPF_ALU64_REG(BPF_MOV, R1, R0),
8105 BPF_ALU64_IMM(BPF_RSH, R1, 32),
8106 BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8107 BPF_EXIT_INSN(),
8108 },
8109 INTERNAL,
8110 { },
8111 { { 0, 0x10325476 } },
8112 },
8113 {
8114 "BSWAP 64: 0xfedcba9876543210 -> 0x98badcfe",
8115 .u.insns_int = {
8116 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8117 BPF_BSWAP(R0, 64),
8118 BPF_EXIT_INSN(),
8119 },
8120 INTERNAL,
8121 { },
8122 { { 0, 0x98badcfe } },
8123 },
8124 {
8125 "BSWAP 64: 0xfedcba9876543210 >> 32 -> 0x10325476",
8126 .u.insns_int = {
8127 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8128 BPF_BSWAP(R0, 64),
8129 BPF_ALU64_IMM(BPF_RSH, R0, 32),
8130 BPF_EXIT_INSN(),
8131 },
8132 INTERNAL,
8133 { },
8134 { { 0, 0x10325476 } },
8135 },
8136 /* BPF_LDX_MEM B/H/W/DW */
8137 {
8138 "BPF_LDX_MEM | BPF_B, base",
8139 .u.insns_int = {
8140 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8141 BPF_LD_IMM64(R2, 0x0000000000000008ULL),
8142 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8143#ifdef __BIG_ENDIAN
8144 BPF_LDX_MEM(BPF_B, R0, R10, -1),
8145#else
8146 BPF_LDX_MEM(BPF_B, R0, R10, -8),
8147#endif
8148 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8149 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8150 BPF_EXIT_INSN(),
8151 },
8152 INTERNAL,
8153 { },
8154 { { 0, 0 } },
8155 .stack_depth = 8,
8156 },
8157 {
8158 "BPF_LDX_MEM | BPF_B, MSB set",
8159 .u.insns_int = {
8160 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8161 BPF_LD_IMM64(R2, 0x0000000000000088ULL),
8162 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8163#ifdef __BIG_ENDIAN
8164 BPF_LDX_MEM(BPF_B, R0, R10, -1),
8165#else
8166 BPF_LDX_MEM(BPF_B, R0, R10, -8),
8167#endif
8168 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8169 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8170 BPF_EXIT_INSN(),
8171 },
8172 INTERNAL,
8173 { },
8174 { { 0, 0 } },
8175 .stack_depth = 8,
8176 },
8177 {
8178 "BPF_LDX_MEM | BPF_B, negative offset",
8179 .u.insns_int = {
8180 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8181 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8182 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8183 BPF_STX_MEM(BPF_B, R1, R2, -256),
8184 BPF_LDX_MEM(BPF_B, R0, R1, -256),
8185 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8186 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8187 BPF_EXIT_INSN(),
8188 },
8189 INTERNAL | FLAG_LARGE_MEM,
8190 { },
8191 { { 512, 0 } },
8192 .stack_depth = 0,
8193 },
8194 {
8195 "BPF_LDX_MEM | BPF_B, small positive offset",
8196 .u.insns_int = {
8197 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8198 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8199 BPF_STX_MEM(BPF_B, R1, R2, 256),
8200 BPF_LDX_MEM(BPF_B, R0, R1, 256),
8201 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8202 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8203 BPF_EXIT_INSN(),
8204 },
8205 INTERNAL | FLAG_LARGE_MEM,
8206 { },
8207 { { 512, 0 } },
8208 .stack_depth = 0,
8209 },
8210 {
8211 "BPF_LDX_MEM | BPF_B, large positive offset",
8212 .u.insns_int = {
8213 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8214 BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8215 BPF_STX_MEM(BPF_B, R1, R2, 4096),
8216 BPF_LDX_MEM(BPF_B, R0, R1, 4096),
8217 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8218 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8219 BPF_EXIT_INSN(),
8220 },
8221 INTERNAL | FLAG_LARGE_MEM,
8222 { },
8223 { { 4096 + 16, 0 } },
8224 .stack_depth = 0,
8225 },
8226 {
8227 "BPF_LDX_MEM | BPF_H, base",
8228 .u.insns_int = {
8229 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8230 BPF_LD_IMM64(R2, 0x0000000000000708ULL),
8231 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8232#ifdef __BIG_ENDIAN
8233 BPF_LDX_MEM(BPF_H, R0, R10, -2),
8234#else
8235 BPF_LDX_MEM(BPF_H, R0, R10, -8),
8236#endif
8237 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8238 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8239 BPF_EXIT_INSN(),
8240 },
8241 INTERNAL,
8242 { },
8243 { { 0, 0 } },
8244 .stack_depth = 8,
8245 },
8246 {
8247 "BPF_LDX_MEM | BPF_H, MSB set",
8248 .u.insns_int = {
8249 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8250 BPF_LD_IMM64(R2, 0x0000000000008788ULL),
8251 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8252#ifdef __BIG_ENDIAN
8253 BPF_LDX_MEM(BPF_H, R0, R10, -2),
8254#else
8255 BPF_LDX_MEM(BPF_H, R0, R10, -8),
8256#endif
8257 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8258 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8259 BPF_EXIT_INSN(),
8260 },
8261 INTERNAL,
8262 { },
8263 { { 0, 0 } },
8264 .stack_depth = 8,
8265 },
8266 {
8267 "BPF_LDX_MEM | BPF_H, negative offset",
8268 .u.insns_int = {
8269 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8270 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8271 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8272 BPF_STX_MEM(BPF_H, R1, R2, -256),
8273 BPF_LDX_MEM(BPF_H, R0, R1, -256),
8274 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8275 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8276 BPF_EXIT_INSN(),
8277 },
8278 INTERNAL | FLAG_LARGE_MEM,
8279 { },
8280 { { 512, 0 } },
8281 .stack_depth = 0,
8282 },
8283 {
8284 "BPF_LDX_MEM | BPF_H, small positive offset",
8285 .u.insns_int = {
8286 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8287 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8288 BPF_STX_MEM(BPF_H, R1, R2, 256),
8289 BPF_LDX_MEM(BPF_H, R0, R1, 256),
8290 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8291 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8292 BPF_EXIT_INSN(),
8293 },
8294 INTERNAL | FLAG_LARGE_MEM,
8295 { },
8296 { { 512, 0 } },
8297 .stack_depth = 0,
8298 },
8299 {
8300 "BPF_LDX_MEM | BPF_H, large positive offset",
8301 .u.insns_int = {
8302 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8303 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8304 BPF_STX_MEM(BPF_H, R1, R2, 8192),
8305 BPF_LDX_MEM(BPF_H, R0, R1, 8192),
8306 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8307 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8308 BPF_EXIT_INSN(),
8309 },
8310 INTERNAL | FLAG_LARGE_MEM,
8311 { },
8312 { { 8192 + 16, 0 } },
8313 .stack_depth = 0,
8314 },
8315 {
8316 "BPF_LDX_MEM | BPF_H, unaligned positive offset",
8317 .u.insns_int = {
8318 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8319 BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8320 BPF_STX_MEM(BPF_H, R1, R2, 13),
8321 BPF_LDX_MEM(BPF_H, R0, R1, 13),
8322 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8323 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8324 BPF_EXIT_INSN(),
8325 },
8326 INTERNAL | FLAG_LARGE_MEM,
8327 { },
8328 { { 32, 0 } },
8329 .stack_depth = 0,
8330 },
8331 {
8332 "BPF_LDX_MEM | BPF_W, base",
8333 .u.insns_int = {
8334 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8335 BPF_LD_IMM64(R2, 0x0000000005060708ULL),
8336 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8337#ifdef __BIG_ENDIAN
8338 BPF_LDX_MEM(BPF_W, R0, R10, -4),
8339#else
8340 BPF_LDX_MEM(BPF_W, R0, R10, -8),
8341#endif
8342 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8343 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8344 BPF_EXIT_INSN(),
8345 },
8346 INTERNAL,
8347 { },
8348 { { 0, 0 } },
8349 .stack_depth = 8,
8350 },
8351 {
8352 "BPF_LDX_MEM | BPF_W, MSB set",
8353 .u.insns_int = {
8354 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8355 BPF_LD_IMM64(R2, 0x0000000085868788ULL),
8356 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8357#ifdef __BIG_ENDIAN
8358 BPF_LDX_MEM(BPF_W, R0, R10, -4),
8359#else
8360 BPF_LDX_MEM(BPF_W, R0, R10, -8),
8361#endif
8362 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8363 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8364 BPF_EXIT_INSN(),
8365 },
8366 INTERNAL,
8367 { },
8368 { { 0, 0 } },
8369 .stack_depth = 8,
8370 },
8371 {
8372 "BPF_LDX_MEM | BPF_W, negative offset",
8373 .u.insns_int = {
8374 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8375 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8376 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8377 BPF_STX_MEM(BPF_W, R1, R2, -256),
8378 BPF_LDX_MEM(BPF_W, R0, R1, -256),
8379 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8380 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8381 BPF_EXIT_INSN(),
8382 },
8383 INTERNAL | FLAG_LARGE_MEM,
8384 { },
8385 { { 512, 0 } },
8386 .stack_depth = 0,
8387 },
8388 {
8389 "BPF_LDX_MEM | BPF_W, small positive offset",
8390 .u.insns_int = {
8391 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8392 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8393 BPF_STX_MEM(BPF_W, R1, R2, 256),
8394 BPF_LDX_MEM(BPF_W, R0, R1, 256),
8395 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8396 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8397 BPF_EXIT_INSN(),
8398 },
8399 INTERNAL | FLAG_LARGE_MEM,
8400 { },
8401 { { 512, 0 } },
8402 .stack_depth = 0,
8403 },
8404 {
8405 "BPF_LDX_MEM | BPF_W, large positive offset",
8406 .u.insns_int = {
8407 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8408 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8409 BPF_STX_MEM(BPF_W, R1, R2, 16384),
8410 BPF_LDX_MEM(BPF_W, R0, R1, 16384),
8411 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8412 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8413 BPF_EXIT_INSN(),
8414 },
8415 INTERNAL | FLAG_LARGE_MEM,
8416 { },
8417 { { 16384 + 16, 0 } },
8418 .stack_depth = 0,
8419 },
8420 {
8421 "BPF_LDX_MEM | BPF_W, unaligned positive offset",
8422 .u.insns_int = {
8423 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8424 BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8425 BPF_STX_MEM(BPF_W, R1, R2, 13),
8426 BPF_LDX_MEM(BPF_W, R0, R1, 13),
8427 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8428 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8429 BPF_EXIT_INSN(),
8430 },
8431 INTERNAL | FLAG_LARGE_MEM,
8432 { },
8433 { { 32, 0 } },
8434 .stack_depth = 0,
8435 },
8436 {
8437 "BPF_LDX_MEM | BPF_DW, base",
8438 .u.insns_int = {
8439 BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8440 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8441 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8442 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8443 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8444 BPF_EXIT_INSN(),
8445 },
8446 INTERNAL,
8447 { },
8448 { { 0, 0 } },
8449 .stack_depth = 8,
8450 },
8451 {
8452 "BPF_LDX_MEM | BPF_DW, MSB set",
8453 .u.insns_int = {
8454 BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8455 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8456 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8457 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8458 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8459 BPF_EXIT_INSN(),
8460 },
8461 INTERNAL,
8462 { },
8463 { { 0, 0 } },
8464 .stack_depth = 8,
8465 },
8466 {
8467 "BPF_LDX_MEM | BPF_DW, negative offset",
8468 .u.insns_int = {
8469 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8470 BPF_ALU64_IMM(BPF_ADD, R1, 512),
8471 BPF_STX_MEM(BPF_DW, R1, R2, -256),
8472 BPF_LDX_MEM(BPF_DW, R0, R1, -256),
8473 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8474 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8475 BPF_EXIT_INSN(),
8476 },
8477 INTERNAL | FLAG_LARGE_MEM,
8478 { },
8479 { { 512, 0 } },
8480 .stack_depth = 0,
8481 },
8482 {
8483 "BPF_LDX_MEM | BPF_DW, small positive offset",
8484 .u.insns_int = {
8485 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8486 BPF_STX_MEM(BPF_DW, R1, R2, 256),
8487 BPF_LDX_MEM(BPF_DW, R0, R1, 256),
8488 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8489 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8490 BPF_EXIT_INSN(),
8491 },
8492 INTERNAL | FLAG_LARGE_MEM,
8493 { },
8494 { { 512, 0 } },
8495 .stack_depth = 8,
8496 },
8497 {
8498 "BPF_LDX_MEM | BPF_DW, large positive offset",
8499 .u.insns_int = {
8500 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8501 BPF_STX_MEM(BPF_DW, R1, R2, 32760),
8502 BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
8503 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8504 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8505 BPF_EXIT_INSN(),
8506 },
8507 INTERNAL | FLAG_LARGE_MEM,
8508 { },
8509 { { 32768, 0 } },
8510 .stack_depth = 0,
8511 },
8512 {
8513 "BPF_LDX_MEM | BPF_DW, unaligned positive offset",
8514 .u.insns_int = {
8515 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8516 BPF_STX_MEM(BPF_DW, R1, R2, 13),
8517 BPF_LDX_MEM(BPF_DW, R0, R1, 13),
8518 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8519 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8520 BPF_EXIT_INSN(),
8521 },
8522 INTERNAL | FLAG_LARGE_MEM,
8523 { },
8524 { { 32, 0 } },
8525 .stack_depth = 0,
8526 },
8527 /* BPF_LDX_MEMSX B/H/W */
8528 {
8529 "BPF_LDX_MEMSX | BPF_B",
8530 .u.insns_int = {
8531 BPF_LD_IMM64(R1, 0xdead0000000000f0ULL),
8532 BPF_LD_IMM64(R2, 0xfffffffffffffff0ULL),
8533 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8534#ifdef __BIG_ENDIAN
8535 BPF_LDX_MEMSX(BPF_B, R0, R10, -1),
8536#else
8537 BPF_LDX_MEMSX(BPF_B, R0, R10, -8),
8538#endif
8539 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8540 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8541 BPF_EXIT_INSN(),
8542 },
8543 INTERNAL,
8544 { },
8545 { { 0, 0 } },
8546 .stack_depth = 8,
8547 },
8548 {
8549 "BPF_LDX_MEMSX | BPF_H",
8550 .u.insns_int = {
8551 BPF_LD_IMM64(R1, 0xdead00000000f123ULL),
8552 BPF_LD_IMM64(R2, 0xfffffffffffff123ULL),
8553 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8554#ifdef __BIG_ENDIAN
8555 BPF_LDX_MEMSX(BPF_H, R0, R10, -2),
8556#else
8557 BPF_LDX_MEMSX(BPF_H, R0, R10, -8),
8558#endif
8559 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8560 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8561 BPF_EXIT_INSN(),
8562 },
8563 INTERNAL,
8564 { },
8565 { { 0, 0 } },
8566 .stack_depth = 8,
8567 },
8568 {
8569 "BPF_LDX_MEMSX | BPF_W",
8570 .u.insns_int = {
8571 BPF_LD_IMM64(R1, 0x00000000deadbeefULL),
8572 BPF_LD_IMM64(R2, 0xffffffffdeadbeefULL),
8573 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8574#ifdef __BIG_ENDIAN
8575 BPF_LDX_MEMSX(BPF_W, R0, R10, -4),
8576#else
8577 BPF_LDX_MEMSX(BPF_W, R0, R10, -8),
8578#endif
8579 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8580 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8581 BPF_EXIT_INSN(),
8582 },
8583 INTERNAL,
8584 { },
8585 { { 0, 0 } },
8586 .stack_depth = 8,
8587 },
8588 /* BPF_STX_MEM B/H/W/DW */
8589 {
8590 "BPF_STX_MEM | BPF_B",
8591 .u.insns_int = {
8592 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8593 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8594 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
8595 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8596#ifdef __BIG_ENDIAN
8597 BPF_STX_MEM(BPF_B, R10, R2, -1),
8598#else
8599 BPF_STX_MEM(BPF_B, R10, R2, -8),
8600#endif
8601 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8602 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8603 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8604 BPF_EXIT_INSN(),
8605 },
8606 INTERNAL,
8607 { },
8608 { { 0, 0 } },
8609 .stack_depth = 8,
8610 },
8611 {
8612 "BPF_STX_MEM | BPF_B, MSB set",
8613 .u.insns_int = {
8614 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8615 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8616 BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
8617 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8618#ifdef __BIG_ENDIAN
8619 BPF_STX_MEM(BPF_B, R10, R2, -1),
8620#else
8621 BPF_STX_MEM(BPF_B, R10, R2, -8),
8622#endif
8623 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8624 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8625 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8626 BPF_EXIT_INSN(),
8627 },
8628 INTERNAL,
8629 { },
8630 { { 0, 0 } },
8631 .stack_depth = 8,
8632 },
8633 {
8634 "BPF_STX_MEM | BPF_H",
8635 .u.insns_int = {
8636 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8637 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8638 BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
8639 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8640#ifdef __BIG_ENDIAN
8641 BPF_STX_MEM(BPF_H, R10, R2, -2),
8642#else
8643 BPF_STX_MEM(BPF_H, R10, R2, -8),
8644#endif
8645 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8646 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8647 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8648 BPF_EXIT_INSN(),
8649 },
8650 INTERNAL,
8651 { },
8652 { { 0, 0 } },
8653 .stack_depth = 8,
8654 },
8655 {
8656 "BPF_STX_MEM | BPF_H, MSB set",
8657 .u.insns_int = {
8658 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8659 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8660 BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
8661 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8662#ifdef __BIG_ENDIAN
8663 BPF_STX_MEM(BPF_H, R10, R2, -2),
8664#else
8665 BPF_STX_MEM(BPF_H, R10, R2, -8),
8666#endif
8667 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8668 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8669 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8670 BPF_EXIT_INSN(),
8671 },
8672 INTERNAL,
8673 { },
8674 { { 0, 0 } },
8675 .stack_depth = 8,
8676 },
8677 {
8678 "BPF_STX_MEM | BPF_W",
8679 .u.insns_int = {
8680 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8681 BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8682 BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
8683 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8684#ifdef __BIG_ENDIAN
8685 BPF_STX_MEM(BPF_W, R10, R2, -4),
8686#else
8687 BPF_STX_MEM(BPF_W, R10, R2, -8),
8688#endif
8689 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8690 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8691 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8692 BPF_EXIT_INSN(),
8693 },
8694 INTERNAL,
8695 { },
8696 { { 0, 0 } },
8697 .stack_depth = 8,
8698 },
8699 {
8700 "BPF_STX_MEM | BPF_W, MSB set",
8701 .u.insns_int = {
8702 BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8703 BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8704 BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
8705 BPF_STX_MEM(BPF_DW, R10, R1, -8),
8706#ifdef __BIG_ENDIAN
8707 BPF_STX_MEM(BPF_W, R10, R2, -4),
8708#else
8709 BPF_STX_MEM(BPF_W, R10, R2, -8),
8710#endif
8711 BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8712 BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8713 BPF_ALU64_IMM(BPF_MOV, R0, 0),
8714 BPF_EXIT_INSN(),
8715 },
8716 INTERNAL,
8717 { },
8718 { { 0, 0 } },
8719 .stack_depth = 8,
8720 },
8721 /* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8722 {
8723 "ST_MEM_B: Store/Load byte: max negative",
8724 .u.insns_int = {
8725 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8726 BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8727 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8728 BPF_EXIT_INSN(),
8729 },
8730 INTERNAL,
8731 { },
8732 { { 0, 0xff } },
8733 .stack_depth = 40,
8734 },
8735 {
8736 "ST_MEM_B: Store/Load byte: max positive",
8737 .u.insns_int = {
8738 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8739 BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8740 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8741 BPF_EXIT_INSN(),
8742 },
8743 INTERNAL,
8744 { },
8745 { { 0, 0x7f } },
8746 .stack_depth = 40,
8747 },
8748 {
8749 "STX_MEM_B: Store/Load byte: max negative",
8750 .u.insns_int = {
8751 BPF_LD_IMM64(R0, 0),
8752 BPF_LD_IMM64(R1, 0xffLL),
8753 BPF_STX_MEM(BPF_B, R10, R1, -40),
8754 BPF_LDX_MEM(BPF_B, R0, R10, -40),
8755 BPF_EXIT_INSN(),
8756 },
8757 INTERNAL,
8758 { },
8759 { { 0, 0xff } },
8760 .stack_depth = 40,
8761 },
8762 {
8763 "ST_MEM_H: Store/Load half word: max negative",
8764 .u.insns_int = {
8765 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8766 BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8767 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8768 BPF_EXIT_INSN(),
8769 },
8770 INTERNAL,
8771 { },
8772 { { 0, 0xffff } },
8773 .stack_depth = 40,
8774 },
8775 {
8776 "ST_MEM_H: Store/Load half word: max positive",
8777 .u.insns_int = {
8778 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8779 BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8780 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8781 BPF_EXIT_INSN(),
8782 },
8783 INTERNAL,
8784 { },
8785 { { 0, 0x7fff } },
8786 .stack_depth = 40,
8787 },
8788 {
8789 "STX_MEM_H: Store/Load half word: max negative",
8790 .u.insns_int = {
8791 BPF_LD_IMM64(R0, 0),
8792 BPF_LD_IMM64(R1, 0xffffLL),
8793 BPF_STX_MEM(BPF_H, R10, R1, -40),
8794 BPF_LDX_MEM(BPF_H, R0, R10, -40),
8795 BPF_EXIT_INSN(),
8796 },
8797 INTERNAL,
8798 { },
8799 { { 0, 0xffff } },
8800 .stack_depth = 40,
8801 },
8802 {
8803 "ST_MEM_W: Store/Load word: max negative",
8804 .u.insns_int = {
8805 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8806 BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8807 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8808 BPF_EXIT_INSN(),
8809 },
8810 INTERNAL,
8811 { },
8812 { { 0, 0xffffffff } },
8813 .stack_depth = 40,
8814 },
8815 {
8816 "ST_MEM_W: Store/Load word: max positive",
8817 .u.insns_int = {
8818 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8819 BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8820 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8821 BPF_EXIT_INSN(),
8822 },
8823 INTERNAL,
8824 { },
8825 { { 0, 0x7fffffff } },
8826 .stack_depth = 40,
8827 },
8828 {
8829 "STX_MEM_W: Store/Load word: max negative",
8830 .u.insns_int = {
8831 BPF_LD_IMM64(R0, 0),
8832 BPF_LD_IMM64(R1, 0xffffffffLL),
8833 BPF_STX_MEM(BPF_W, R10, R1, -40),
8834 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8835 BPF_EXIT_INSN(),
8836 },
8837 INTERNAL,
8838 { },
8839 { { 0, 0xffffffff } },
8840 .stack_depth = 40,
8841 },
8842 {
8843 "ST_MEM_DW: Store/Load double word: max negative",
8844 .u.insns_int = {
8845 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8846 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8847 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8848 BPF_EXIT_INSN(),
8849 },
8850 INTERNAL,
8851 { },
8852 { { 0, 0xffffffff } },
8853 .stack_depth = 40,
8854 },
8855 {
8856 "ST_MEM_DW: Store/Load double word: max negative 2",
8857 .u.insns_int = {
8858 BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8859 BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8860 BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8861 BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8862 BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8863 BPF_MOV32_IMM(R0, 2),
8864 BPF_EXIT_INSN(),
8865 BPF_MOV32_IMM(R0, 1),
8866 BPF_EXIT_INSN(),
8867 },
8868 INTERNAL,
8869 { },
8870 { { 0, 0x1 } },
8871 .stack_depth = 40,
8872 },
8873 {
8874 "ST_MEM_DW: Store/Load double word: max positive",
8875 .u.insns_int = {
8876 BPF_ALU32_IMM(BPF_MOV, R0, 1),
8877 BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8878 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8879 BPF_EXIT_INSN(),
8880 },
8881 INTERNAL,
8882 { },
8883 { { 0, 0x7fffffff } },
8884 .stack_depth = 40,
8885 },
8886 {
8887 "STX_MEM_DW: Store/Load double word: max negative",
8888 .u.insns_int = {
8889 BPF_LD_IMM64(R0, 0),
8890 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8891 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8892 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8893 BPF_EXIT_INSN(),
8894 },
8895 INTERNAL,
8896 { },
8897 { { 0, 0xffffffff } },
8898 .stack_depth = 40,
8899 },
8900 {
8901 "STX_MEM_DW: Store double word: first word in memory",
8902 .u.insns_int = {
8903 BPF_LD_IMM64(R0, 0),
8904 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8905 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8906 BPF_LDX_MEM(BPF_W, R0, R10, -40),
8907 BPF_EXIT_INSN(),
8908 },
8909 INTERNAL,
8910 { },
8911#ifdef __BIG_ENDIAN
8912 { { 0, 0x01234567 } },
8913#else
8914 { { 0, 0x89abcdef } },
8915#endif
8916 .stack_depth = 40,
8917 },
8918 {
8919 "STX_MEM_DW: Store double word: second word in memory",
8920 .u.insns_int = {
8921 BPF_LD_IMM64(R0, 0),
8922 BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8923 BPF_STX_MEM(BPF_DW, R10, R1, -40),
8924 BPF_LDX_MEM(BPF_W, R0, R10, -36),
8925 BPF_EXIT_INSN(),
8926 },
8927 INTERNAL,
8928 { },
8929#ifdef __BIG_ENDIAN
8930 { { 0, 0x89abcdef } },
8931#else
8932 { { 0, 0x01234567 } },
8933#endif
8934 .stack_depth = 40,
8935 },
8936 /* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8937 {
8938 "STX_XADD_W: X + 1 + 1 + 1 + ...",
8939 { },
8940 INTERNAL,
8941 { },
8942 { { 0, 4134 } },
8943 .fill_helper = bpf_fill_stxw,
8944 },
8945 {
8946 "STX_XADD_DW: X + 1 + 1 + 1 + ...",
8947 { },
8948 INTERNAL,
8949 { },
8950 { { 0, 4134 } },
8951 .fill_helper = bpf_fill_stxdw,
8952 },
8953 /*
8954 * Exhaustive tests of atomic operation variants.
8955 * Individual tests are expanded from template macros for all
8956 * combinations of ALU operation, word size and fetching.
8957 */
8958#define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8959
8960#define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result) \
8961{ \
8962 "BPF_ATOMIC | " #width ", " #op ": Test: " \
8963 #old " " #logic " " #update " = " #result, \
8964 .u.insns_int = { \
8965 BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
8966 BPF_ST_MEM(width, R10, -40, old), \
8967 BPF_ATOMIC_OP(width, op, R10, R5, -40), \
8968 BPF_LDX_MEM(width, R0, R10, -40), \
8969 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8970 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8971 BPF_ALU64_REG(BPF_OR, R0, R1), \
8972 BPF_EXIT_INSN(), \
8973 }, \
8974 INTERNAL, \
8975 { }, \
8976 { { 0, result } }, \
8977 .stack_depth = 40, \
8978}
8979#define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result) \
8980{ \
8981 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: " \
8982 #old " " #logic " " #update " = " #result, \
8983 .u.insns_int = { \
8984 BPF_ALU64_REG(BPF_MOV, R1, R10), \
8985 BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)), \
8986 BPF_ST_MEM(BPF_W, R10, -40, old), \
8987 BPF_ATOMIC_OP(width, op, R10, R0, -40), \
8988 BPF_ALU64_REG(BPF_MOV, R0, R10), \
8989 BPF_ALU64_REG(BPF_SUB, R0, R1), \
8990 BPF_ALU64_REG(BPF_MOV, R1, R0), \
8991 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
8992 BPF_ALU64_REG(BPF_OR, R0, R1), \
8993 BPF_EXIT_INSN(), \
8994 }, \
8995 INTERNAL, \
8996 { }, \
8997 { { 0, 0 } }, \
8998 .stack_depth = 40, \
8999}
9000#define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result) \
9001{ \
9002 "BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: " \
9003 #old " " #logic " " #update " = " #result, \
9004 .u.insns_int = { \
9005 BPF_ALU64_REG(BPF_MOV, R0, R10), \
9006 BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
9007 BPF_ST_MEM(width, R10, -40, old), \
9008 BPF_ATOMIC_OP(width, op, R10, R1, -40), \
9009 BPF_ALU64_REG(BPF_SUB, R0, R10), \
9010 BPF_ALU64_REG(BPF_MOV, R1, R0), \
9011 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
9012 BPF_ALU64_REG(BPF_OR, R0, R1), \
9013 BPF_EXIT_INSN(), \
9014 }, \
9015 INTERNAL, \
9016 { }, \
9017 { { 0, 0 } }, \
9018 .stack_depth = 40, \
9019}
9020#define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result) \
9021{ \
9022 "BPF_ATOMIC | " #width ", " #op ": Test fetch: " \
9023 #old " " #logic " " #update " = " #result, \
9024 .u.insns_int = { \
9025 BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)), \
9026 BPF_ST_MEM(width, R10, -40, old), \
9027 BPF_ATOMIC_OP(width, op, R10, R3, -40), \
9028 BPF_ALU32_REG(BPF_MOV, R0, R3), \
9029 BPF_EXIT_INSN(), \
9030 }, \
9031 INTERNAL, \
9032 { }, \
9033 { { 0, (op) & BPF_FETCH ? old : update } }, \
9034 .stack_depth = 40, \
9035}
9036 /* BPF_ATOMIC | BPF_W: BPF_ADD */
9037 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9038 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9039 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9040 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9041 /* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
9042 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9043 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9044 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9045 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9046 /* BPF_ATOMIC | BPF_DW: BPF_ADD */
9047 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9048 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9049 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9050 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9051 /* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
9052 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9053 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9054 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9055 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9056 /* BPF_ATOMIC | BPF_W: BPF_AND */
9057 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9058 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9059 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9060 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9061 /* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
9062 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9063 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9064 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9065 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9066 /* BPF_ATOMIC | BPF_DW: BPF_AND */
9067 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9068 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9069 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9070 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9071 /* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
9072 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9073 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9074 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9075 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9076 /* BPF_ATOMIC | BPF_W: BPF_OR */
9077 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9078 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9079 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9080 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9081 /* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
9082 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9083 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9084 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9085 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9086 /* BPF_ATOMIC | BPF_DW: BPF_OR */
9087 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9088 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9089 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9090 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9091 /* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
9092 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9093 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9094 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9095 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9096 /* BPF_ATOMIC | BPF_W: BPF_XOR */
9097 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9098 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9099 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9100 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9101 /* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
9102 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9103 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9104 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9105 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9106 /* BPF_ATOMIC | BPF_DW: BPF_XOR */
9107 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9108 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9109 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9110 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9111 /* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
9112 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9113 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9114 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9115 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9116 /* BPF_ATOMIC | BPF_W: BPF_XCHG */
9117 BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9118 BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9119 BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9120 BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9121 /* BPF_ATOMIC | BPF_DW: BPF_XCHG */
9122 BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9123 BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9124 BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9125 BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9126#undef BPF_ATOMIC_POISON
9127#undef BPF_ATOMIC_OP_TEST1
9128#undef BPF_ATOMIC_OP_TEST2
9129#undef BPF_ATOMIC_OP_TEST3
9130#undef BPF_ATOMIC_OP_TEST4
9131 /* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
9132 {
9133 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
9134 .u.insns_int = {
9135 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9136 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9137 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9138 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9139 BPF_EXIT_INSN(),
9140 },
9141 INTERNAL,
9142 { },
9143 { { 0, 0x01234567 } },
9144 .stack_depth = 40,
9145 },
9146 {
9147 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
9148 .u.insns_int = {
9149 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9150 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9151 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9152 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9153 BPF_LDX_MEM(BPF_W, R0, R10, -40),
9154 BPF_EXIT_INSN(),
9155 },
9156 INTERNAL,
9157 { },
9158 { { 0, 0x89abcdef } },
9159 .stack_depth = 40,
9160 },
9161 {
9162 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
9163 .u.insns_int = {
9164 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9165 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9166 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9167 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9168 BPF_EXIT_INSN(),
9169 },
9170 INTERNAL,
9171 { },
9172 { { 0, 0x01234567 } },
9173 .stack_depth = 40,
9174 },
9175 {
9176 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
9177 .u.insns_int = {
9178 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9179 BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9180 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9181 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9182 BPF_LDX_MEM(BPF_W, R0, R10, -40),
9183 BPF_EXIT_INSN(),
9184 },
9185 INTERNAL,
9186 { },
9187 { { 0, 0x01234567 } },
9188 .stack_depth = 40,
9189 },
9190 {
9191 "BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
9192 .u.insns_int = {
9193 BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9194 BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9195 BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9196 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9197 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9198 BPF_ALU32_REG(BPF_MOV, R0, R3),
9199 BPF_EXIT_INSN(),
9200 },
9201 INTERNAL,
9202 { },
9203 { { 0, 0x89abcdef } },
9204 .stack_depth = 40,
9205 },
9206 /* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
9207 {
9208 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
9209 .u.insns_int = {
9210 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9211 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9212 BPF_ALU64_REG(BPF_MOV, R0, R1),
9213 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9214 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9215 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9216 BPF_ALU64_REG(BPF_SUB, R0, R1),
9217 BPF_EXIT_INSN(),
9218 },
9219 INTERNAL,
9220 { },
9221 { { 0, 0 } },
9222 .stack_depth = 40,
9223 },
9224 {
9225 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
9226 .u.insns_int = {
9227 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9228 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9229 BPF_ALU64_REG(BPF_MOV, R0, R1),
9230 BPF_STX_MEM(BPF_DW, R10, R0, -40),
9231 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9232 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9233 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9234 BPF_ALU64_REG(BPF_SUB, R0, R2),
9235 BPF_EXIT_INSN(),
9236 },
9237 INTERNAL,
9238 { },
9239 { { 0, 0 } },
9240 .stack_depth = 40,
9241 },
9242 {
9243 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
9244 .u.insns_int = {
9245 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9246 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9247 BPF_ALU64_REG(BPF_MOV, R0, R1),
9248 BPF_ALU64_IMM(BPF_ADD, R0, 1),
9249 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9250 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9251 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9252 BPF_ALU64_REG(BPF_SUB, R0, R1),
9253 BPF_EXIT_INSN(),
9254 },
9255 INTERNAL,
9256 { },
9257 { { 0, 0 } },
9258 .stack_depth = 40,
9259 },
9260 {
9261 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
9262 .u.insns_int = {
9263 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9264 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9265 BPF_ALU64_REG(BPF_MOV, R0, R1),
9266 BPF_ALU64_IMM(BPF_ADD, R0, 1),
9267 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9268 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9269 BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9270 BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9271 BPF_ALU64_REG(BPF_SUB, R0, R1),
9272 BPF_EXIT_INSN(),
9273 },
9274 INTERNAL,
9275 { },
9276 { { 0, 0 } },
9277 .stack_depth = 40,
9278 },
9279 {
9280 "BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
9281 .u.insns_int = {
9282 BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9283 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9284 BPF_ALU64_REG(BPF_MOV, R0, R1),
9285 BPF_STX_MEM(BPF_DW, R10, R1, -40),
9286 BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9287 BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
9288 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9289 BPF_ALU64_REG(BPF_SUB, R0, R2),
9290 BPF_EXIT_INSN(),
9291 },
9292 INTERNAL,
9293 { },
9294 { { 0, 0 } },
9295 .stack_depth = 40,
9296 },
9297 /* BPF_JMP32 | BPF_JEQ | BPF_K */
9298 {
9299 "JMP32_JEQ_K: Small immediate",
9300 .u.insns_int = {
9301 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9302 BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
9303 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
9304 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9305 BPF_EXIT_INSN(),
9306 },
9307 INTERNAL,
9308 { },
9309 { { 0, 123 } }
9310 },
9311 {
9312 "JMP32_JEQ_K: Large immediate",
9313 .u.insns_int = {
9314 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9315 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
9316 BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
9317 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9318 BPF_EXIT_INSN(),
9319 },
9320 INTERNAL,
9321 { },
9322 { { 0, 12345678 } }
9323 },
9324 {
9325 "JMP32_JEQ_K: negative immediate",
9326 .u.insns_int = {
9327 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9328 BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
9329 BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
9330 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9331 BPF_EXIT_INSN(),
9332 },
9333 INTERNAL,
9334 { },
9335 { { 0, -123 } }
9336 },
9337 /* BPF_JMP32 | BPF_JEQ | BPF_X */
9338 {
9339 "JMP32_JEQ_X",
9340 .u.insns_int = {
9341 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9342 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9343 BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
9344 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9345 BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
9346 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9347 BPF_EXIT_INSN(),
9348 },
9349 INTERNAL,
9350 { },
9351 { { 0, 1234 } }
9352 },
9353 /* BPF_JMP32 | BPF_JNE | BPF_K */
9354 {
9355 "JMP32_JNE_K: Small immediate",
9356 .u.insns_int = {
9357 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9358 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
9359 BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
9360 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9361 BPF_EXIT_INSN(),
9362 },
9363 INTERNAL,
9364 { },
9365 { { 0, 123 } }
9366 },
9367 {
9368 "JMP32_JNE_K: Large immediate",
9369 .u.insns_int = {
9370 BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9371 BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
9372 BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
9373 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9374 BPF_EXIT_INSN(),
9375 },
9376 INTERNAL,
9377 { },
9378 { { 0, 12345678 } }
9379 },
9380 {
9381 "JMP32_JNE_K: negative immediate",
9382 .u.insns_int = {
9383 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9384 BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
9385 BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
9386 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9387 BPF_EXIT_INSN(),
9388 },
9389 INTERNAL,
9390 { },
9391 { { 0, -123 } }
9392 },
9393 /* BPF_JMP32 | BPF_JNE | BPF_X */
9394 {
9395 "JMP32_JNE_X",
9396 .u.insns_int = {
9397 BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9398 BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9399 BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
9400 BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9401 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9402 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9403 BPF_EXIT_INSN(),
9404 },
9405 INTERNAL,
9406 { },
9407 { { 0, 1234 } }
9408 },
9409 /* BPF_JMP32 | BPF_JSET | BPF_K */
9410 {
9411 "JMP32_JSET_K: Small immediate",
9412 .u.insns_int = {
9413 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9414 BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
9415 BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
9416 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9417 BPF_EXIT_INSN(),
9418 },
9419 INTERNAL,
9420 { },
9421 { { 0, 1 } }
9422 },
9423 {
9424 "JMP32_JSET_K: Large immediate",
9425 .u.insns_int = {
9426 BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
9427 BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
9428 BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
9429 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9430 BPF_EXIT_INSN(),
9431 },
9432 INTERNAL,
9433 { },
9434 { { 0, 0x40000000 } }
9435 },
9436 {
9437 "JMP32_JSET_K: negative immediate",
9438 .u.insns_int = {
9439 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9440 BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
9441 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9442 BPF_EXIT_INSN(),
9443 },
9444 INTERNAL,
9445 { },
9446 { { 0, -123 } }
9447 },
9448 /* BPF_JMP32 | BPF_JSET | BPF_X */
9449 {
9450 "JMP32_JSET_X",
9451 .u.insns_int = {
9452 BPF_ALU32_IMM(BPF_MOV, R0, 8),
9453 BPF_ALU32_IMM(BPF_MOV, R1, 7),
9454 BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
9455 BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
9456 BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9457 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9458 BPF_EXIT_INSN(),
9459 },
9460 INTERNAL,
9461 { },
9462 { { 0, 8 } }
9463 },
9464 /* BPF_JMP32 | BPF_JGT | BPF_K */
9465 {
9466 "JMP32_JGT_K: Small immediate",
9467 .u.insns_int = {
9468 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9469 BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
9470 BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
9471 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9472 BPF_EXIT_INSN(),
9473 },
9474 INTERNAL,
9475 { },
9476 { { 0, 123 } }
9477 },
9478 {
9479 "JMP32_JGT_K: Large immediate",
9480 .u.insns_int = {
9481 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9482 BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
9483 BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
9484 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9485 BPF_EXIT_INSN(),
9486 },
9487 INTERNAL,
9488 { },
9489 { { 0, 0xfffffffe } }
9490 },
9491 /* BPF_JMP32 | BPF_JGT | BPF_X */
9492 {
9493 "JMP32_JGT_X",
9494 .u.insns_int = {
9495 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9496 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9497 BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
9498 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9499 BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
9500 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9501 BPF_EXIT_INSN(),
9502 },
9503 INTERNAL,
9504 { },
9505 { { 0, 0xfffffffe } }
9506 },
9507 /* BPF_JMP32 | BPF_JGE | BPF_K */
9508 {
9509 "JMP32_JGE_K: Small immediate",
9510 .u.insns_int = {
9511 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9512 BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
9513 BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
9514 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9515 BPF_EXIT_INSN(),
9516 },
9517 INTERNAL,
9518 { },
9519 { { 0, 123 } }
9520 },
9521 {
9522 "JMP32_JGE_K: Large immediate",
9523 .u.insns_int = {
9524 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9525 BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
9526 BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
9527 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9528 BPF_EXIT_INSN(),
9529 },
9530 INTERNAL,
9531 { },
9532 { { 0, 0xfffffffe } }
9533 },
9534 /* BPF_JMP32 | BPF_JGE | BPF_X */
9535 {
9536 "JMP32_JGE_X",
9537 .u.insns_int = {
9538 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9539 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9540 BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
9541 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9542 BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
9543 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9544 BPF_EXIT_INSN(),
9545 },
9546 INTERNAL,
9547 { },
9548 { { 0, 0xfffffffe } }
9549 },
9550 /* BPF_JMP32 | BPF_JLT | BPF_K */
9551 {
9552 "JMP32_JLT_K: Small immediate",
9553 .u.insns_int = {
9554 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9555 BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
9556 BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
9557 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9558 BPF_EXIT_INSN(),
9559 },
9560 INTERNAL,
9561 { },
9562 { { 0, 123 } }
9563 },
9564 {
9565 "JMP32_JLT_K: Large immediate",
9566 .u.insns_int = {
9567 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9568 BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
9569 BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
9570 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9571 BPF_EXIT_INSN(),
9572 },
9573 INTERNAL,
9574 { },
9575 { { 0, 0xfffffffe } }
9576 },
9577 /* BPF_JMP32 | BPF_JLT | BPF_X */
9578 {
9579 "JMP32_JLT_X",
9580 .u.insns_int = {
9581 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9582 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9583 BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
9584 BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9585 BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
9586 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9587 BPF_EXIT_INSN(),
9588 },
9589 INTERNAL,
9590 { },
9591 { { 0, 0xfffffffe } }
9592 },
9593 /* BPF_JMP32 | BPF_JLE | BPF_K */
9594 {
9595 "JMP32_JLE_K: Small immediate",
9596 .u.insns_int = {
9597 BPF_ALU32_IMM(BPF_MOV, R0, 123),
9598 BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
9599 BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
9600 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9601 BPF_EXIT_INSN(),
9602 },
9603 INTERNAL,
9604 { },
9605 { { 0, 123 } }
9606 },
9607 {
9608 "JMP32_JLE_K: Large immediate",
9609 .u.insns_int = {
9610 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9611 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
9612 BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
9613 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9614 BPF_EXIT_INSN(),
9615 },
9616 INTERNAL,
9617 { },
9618 { { 0, 0xfffffffe } }
9619 },
9620 /* BPF_JMP32 | BPF_JLE | BPF_X */
9621 {
9622 "JMP32_JLE_X",
9623 .u.insns_int = {
9624 BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9625 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9626 BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
9627 BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9628 BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
9629 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9630 BPF_EXIT_INSN(),
9631 },
9632 INTERNAL,
9633 { },
9634 { { 0, 0xfffffffe } }
9635 },
9636 /* BPF_JMP32 | BPF_JSGT | BPF_K */
9637 {
9638 "JMP32_JSGT_K: Small immediate",
9639 .u.insns_int = {
9640 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9641 BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
9642 BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
9643 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9644 BPF_EXIT_INSN(),
9645 },
9646 INTERNAL,
9647 { },
9648 { { 0, -123 } }
9649 },
9650 {
9651 "JMP32_JSGT_K: Large immediate",
9652 .u.insns_int = {
9653 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9654 BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
9655 BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
9656 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9657 BPF_EXIT_INSN(),
9658 },
9659 INTERNAL,
9660 { },
9661 { { 0, -12345678 } }
9662 },
9663 /* BPF_JMP32 | BPF_JSGT | BPF_X */
9664 {
9665 "JMP32_JSGT_X",
9666 .u.insns_int = {
9667 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9668 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9669 BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
9670 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9671 BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
9672 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9673 BPF_EXIT_INSN(),
9674 },
9675 INTERNAL,
9676 { },
9677 { { 0, -12345678 } }
9678 },
9679 /* BPF_JMP32 | BPF_JSGE | BPF_K */
9680 {
9681 "JMP32_JSGE_K: Small immediate",
9682 .u.insns_int = {
9683 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9684 BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
9685 BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
9686 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9687 BPF_EXIT_INSN(),
9688 },
9689 INTERNAL,
9690 { },
9691 { { 0, -123 } }
9692 },
9693 {
9694 "JMP32_JSGE_K: Large immediate",
9695 .u.insns_int = {
9696 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9697 BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
9698 BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
9699 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9700 BPF_EXIT_INSN(),
9701 },
9702 INTERNAL,
9703 { },
9704 { { 0, -12345678 } }
9705 },
9706 /* BPF_JMP32 | BPF_JSGE | BPF_X */
9707 {
9708 "JMP32_JSGE_X",
9709 .u.insns_int = {
9710 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9711 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9712 BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9713 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9714 BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9715 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9716 BPF_EXIT_INSN(),
9717 },
9718 INTERNAL,
9719 { },
9720 { { 0, -12345678 } }
9721 },
9722 /* BPF_JMP32 | BPF_JSLT | BPF_K */
9723 {
9724 "JMP32_JSLT_K: Small immediate",
9725 .u.insns_int = {
9726 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9727 BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9728 BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9729 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9730 BPF_EXIT_INSN(),
9731 },
9732 INTERNAL,
9733 { },
9734 { { 0, -123 } }
9735 },
9736 {
9737 "JMP32_JSLT_K: Large immediate",
9738 .u.insns_int = {
9739 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9740 BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9741 BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9742 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9743 BPF_EXIT_INSN(),
9744 },
9745 INTERNAL,
9746 { },
9747 { { 0, -12345678 } }
9748 },
9749 /* BPF_JMP32 | BPF_JSLT | BPF_X */
9750 {
9751 "JMP32_JSLT_X",
9752 .u.insns_int = {
9753 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9754 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9755 BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9756 BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9757 BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9758 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9759 BPF_EXIT_INSN(),
9760 },
9761 INTERNAL,
9762 { },
9763 { { 0, -12345678 } }
9764 },
9765 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9766 {
9767 "JMP32_JSLE_K: Small immediate",
9768 .u.insns_int = {
9769 BPF_ALU32_IMM(BPF_MOV, R0, -123),
9770 BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9771 BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9772 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9773 BPF_EXIT_INSN(),
9774 },
9775 INTERNAL,
9776 { },
9777 { { 0, -123 } }
9778 },
9779 {
9780 "JMP32_JSLE_K: Large immediate",
9781 .u.insns_int = {
9782 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9783 BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9784 BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9785 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9786 BPF_EXIT_INSN(),
9787 },
9788 INTERNAL,
9789 { },
9790 { { 0, -12345678 } }
9791 },
9792 /* BPF_JMP32 | BPF_JSLE | BPF_K */
9793 {
9794 "JMP32_JSLE_X",
9795 .u.insns_int = {
9796 BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9797 BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9798 BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9799 BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9800 BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9801 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9802 BPF_EXIT_INSN(),
9803 },
9804 INTERNAL,
9805 { },
9806 { { 0, -12345678 } }
9807 },
9808 /* BPF_JMP | BPF_EXIT */
9809 {
9810 "JMP_EXIT",
9811 .u.insns_int = {
9812 BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9813 BPF_EXIT_INSN(),
9814 BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9815 },
9816 INTERNAL,
9817 { },
9818 { { 0, 0x4711 } },
9819 },
9820 /* BPF_JMP | BPF_JA */
9821 {
9822 "JMP_JA: Unconditional jump: if (true) return 1",
9823 .u.insns_int = {
9824 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9825 BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9826 BPF_EXIT_INSN(),
9827 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9828 BPF_EXIT_INSN(),
9829 },
9830 INTERNAL,
9831 { },
9832 { { 0, 1 } },
9833 },
9834 /* BPF_JMP32 | BPF_JA */
9835 {
9836 "JMP32_JA: Unconditional jump: if (true) return 1",
9837 .u.insns_int = {
9838 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9839 BPF_JMP32_IMM(BPF_JA, 0, 1, 0),
9840 BPF_EXIT_INSN(),
9841 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9842 BPF_EXIT_INSN(),
9843 },
9844 INTERNAL,
9845 { },
9846 { { 0, 1 } },
9847 },
9848 /* BPF_JMP | BPF_JSLT | BPF_K */
9849 {
9850 "JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9851 .u.insns_int = {
9852 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9853 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9854 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9855 BPF_EXIT_INSN(),
9856 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9857 BPF_EXIT_INSN(),
9858 },
9859 INTERNAL,
9860 { },
9861 { { 0, 1 } },
9862 },
9863 {
9864 "JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9865 .u.insns_int = {
9866 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9867 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9868 BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9869 BPF_EXIT_INSN(),
9870 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9871 BPF_EXIT_INSN(),
9872 },
9873 INTERNAL,
9874 { },
9875 { { 0, 1 } },
9876 },
9877 /* BPF_JMP | BPF_JSGT | BPF_K */
9878 {
9879 "JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9880 .u.insns_int = {
9881 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9882 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9883 BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9884 BPF_EXIT_INSN(),
9885 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9886 BPF_EXIT_INSN(),
9887 },
9888 INTERNAL,
9889 { },
9890 { { 0, 1 } },
9891 },
9892 {
9893 "JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9894 .u.insns_int = {
9895 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9896 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9897 BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9898 BPF_EXIT_INSN(),
9899 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9900 BPF_EXIT_INSN(),
9901 },
9902 INTERNAL,
9903 { },
9904 { { 0, 1 } },
9905 },
9906 /* BPF_JMP | BPF_JSLE | BPF_K */
9907 {
9908 "JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9909 .u.insns_int = {
9910 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9911 BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9912 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9913 BPF_EXIT_INSN(),
9914 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9915 BPF_EXIT_INSN(),
9916 },
9917 INTERNAL,
9918 { },
9919 { { 0, 1 } },
9920 },
9921 {
9922 "JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9923 .u.insns_int = {
9924 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9925 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9926 BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9927 BPF_EXIT_INSN(),
9928 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9929 BPF_EXIT_INSN(),
9930 },
9931 INTERNAL,
9932 { },
9933 { { 0, 1 } },
9934 },
9935 {
9936 "JMP_JSLE_K: Signed jump: value walk 1",
9937 .u.insns_int = {
9938 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9939 BPF_LD_IMM64(R1, 3),
9940 BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9941 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9942 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9943 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9944 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9945 BPF_ALU64_IMM(BPF_SUB, R1, 1),
9946 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9947 BPF_EXIT_INSN(), /* bad exit */
9948 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9949 BPF_EXIT_INSN(),
9950 },
9951 INTERNAL,
9952 { },
9953 { { 0, 1 } },
9954 },
9955 {
9956 "JMP_JSLE_K: Signed jump: value walk 2",
9957 .u.insns_int = {
9958 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9959 BPF_LD_IMM64(R1, 3),
9960 BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9961 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9962 BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9963 BPF_ALU64_IMM(BPF_SUB, R1, 2),
9964 BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9965 BPF_EXIT_INSN(), /* bad exit */
9966 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
9967 BPF_EXIT_INSN(),
9968 },
9969 INTERNAL,
9970 { },
9971 { { 0, 1 } },
9972 },
9973 /* BPF_JMP | BPF_JSGE | BPF_K */
9974 {
9975 "JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9976 .u.insns_int = {
9977 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9978 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9979 BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9980 BPF_EXIT_INSN(),
9981 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9982 BPF_EXIT_INSN(),
9983 },
9984 INTERNAL,
9985 { },
9986 { { 0, 1 } },
9987 },
9988 {
9989 "JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9990 .u.insns_int = {
9991 BPF_ALU32_IMM(BPF_MOV, R0, 0),
9992 BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9993 BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9994 BPF_EXIT_INSN(),
9995 BPF_ALU32_IMM(BPF_MOV, R0, 1),
9996 BPF_EXIT_INSN(),
9997 },
9998 INTERNAL,
9999 { },
10000 { { 0, 1 } },
10001 },
10002 {
10003 "JMP_JSGE_K: Signed jump: value walk 1",
10004 .u.insns_int = {
10005 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10006 BPF_LD_IMM64(R1, -3),
10007 BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
10008 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10009 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
10010 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10011 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
10012 BPF_ALU64_IMM(BPF_ADD, R1, 1),
10013 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
10014 BPF_EXIT_INSN(), /* bad exit */
10015 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
10016 BPF_EXIT_INSN(),
10017 },
10018 INTERNAL,
10019 { },
10020 { { 0, 1 } },
10021 },
10022 {
10023 "JMP_JSGE_K: Signed jump: value walk 2",
10024 .u.insns_int = {
10025 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10026 BPF_LD_IMM64(R1, -3),
10027 BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
10028 BPF_ALU64_IMM(BPF_ADD, R1, 2),
10029 BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
10030 BPF_ALU64_IMM(BPF_ADD, R1, 2),
10031 BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
10032 BPF_EXIT_INSN(), /* bad exit */
10033 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* good exit */
10034 BPF_EXIT_INSN(),
10035 },
10036 INTERNAL,
10037 { },
10038 { { 0, 1 } },
10039 },
10040 /* BPF_JMP | BPF_JGT | BPF_K */
10041 {
10042 "JMP_JGT_K: if (3 > 2) return 1",
10043 .u.insns_int = {
10044 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10045 BPF_LD_IMM64(R1, 3),
10046 BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
10047 BPF_EXIT_INSN(),
10048 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10049 BPF_EXIT_INSN(),
10050 },
10051 INTERNAL,
10052 { },
10053 { { 0, 1 } },
10054 },
10055 {
10056 "JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
10057 .u.insns_int = {
10058 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10059 BPF_LD_IMM64(R1, -1),
10060 BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
10061 BPF_EXIT_INSN(),
10062 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10063 BPF_EXIT_INSN(),
10064 },
10065 INTERNAL,
10066 { },
10067 { { 0, 1 } },
10068 },
10069 /* BPF_JMP | BPF_JLT | BPF_K */
10070 {
10071 "JMP_JLT_K: if (2 < 3) return 1",
10072 .u.insns_int = {
10073 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10074 BPF_LD_IMM64(R1, 2),
10075 BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
10076 BPF_EXIT_INSN(),
10077 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10078 BPF_EXIT_INSN(),
10079 },
10080 INTERNAL,
10081 { },
10082 { { 0, 1 } },
10083 },
10084 {
10085 "JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
10086 .u.insns_int = {
10087 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10088 BPF_LD_IMM64(R1, 1),
10089 BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
10090 BPF_EXIT_INSN(),
10091 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10092 BPF_EXIT_INSN(),
10093 },
10094 INTERNAL,
10095 { },
10096 { { 0, 1 } },
10097 },
10098 /* BPF_JMP | BPF_JGE | BPF_K */
10099 {
10100 "JMP_JGE_K: if (3 >= 2) return 1",
10101 .u.insns_int = {
10102 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10103 BPF_LD_IMM64(R1, 3),
10104 BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
10105 BPF_EXIT_INSN(),
10106 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10107 BPF_EXIT_INSN(),
10108 },
10109 INTERNAL,
10110 { },
10111 { { 0, 1 } },
10112 },
10113 /* BPF_JMP | BPF_JLE | BPF_K */
10114 {
10115 "JMP_JLE_K: if (2 <= 3) return 1",
10116 .u.insns_int = {
10117 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10118 BPF_LD_IMM64(R1, 2),
10119 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10120 BPF_EXIT_INSN(),
10121 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10122 BPF_EXIT_INSN(),
10123 },
10124 INTERNAL,
10125 { },
10126 { { 0, 1 } },
10127 },
10128 /* BPF_JMP | BPF_JGT | BPF_K jump backwards */
10129 {
10130 "JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
10131 .u.insns_int = {
10132 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10133 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10134 BPF_EXIT_INSN(),
10135 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10136 BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
10137 BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
10138 BPF_EXIT_INSN(),
10139 },
10140 INTERNAL,
10141 { },
10142 { { 0, 1 } },
10143 },
10144 {
10145 "JMP_JGE_K: if (3 >= 3) return 1",
10146 .u.insns_int = {
10147 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10148 BPF_LD_IMM64(R1, 3),
10149 BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
10150 BPF_EXIT_INSN(),
10151 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10152 BPF_EXIT_INSN(),
10153 },
10154 INTERNAL,
10155 { },
10156 { { 0, 1 } },
10157 },
10158 /* BPF_JMP | BPF_JLT | BPF_K jump backwards */
10159 {
10160 "JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
10161 .u.insns_int = {
10162 BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10163 BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10164 BPF_EXIT_INSN(),
10165 BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10166 BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
10167 BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
10168 BPF_EXIT_INSN(),
10169 },
10170 INTERNAL,
10171 { },
10172 { { 0, 1 } },
10173 },
10174 {
10175 "JMP_JLE_K: if (3 <= 3) return 1",
10176 .u.insns_int = {
10177 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10178 BPF_LD_IMM64(R1, 3),
10179 BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10180 BPF_EXIT_INSN(),
10181 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10182 BPF_EXIT_INSN(),
10183 },
10184 INTERNAL,
10185 { },
10186 { { 0, 1 } },
10187 },
10188 /* BPF_JMP | BPF_JNE | BPF_K */
10189 {
10190 "JMP_JNE_K: if (3 != 2) return 1",
10191 .u.insns_int = {
10192 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10193 BPF_LD_IMM64(R1, 3),
10194 BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
10195 BPF_EXIT_INSN(),
10196 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10197 BPF_EXIT_INSN(),
10198 },
10199 INTERNAL,
10200 { },
10201 { { 0, 1 } },
10202 },
10203 /* BPF_JMP | BPF_JEQ | BPF_K */
10204 {
10205 "JMP_JEQ_K: if (3 == 3) return 1",
10206 .u.insns_int = {
10207 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10208 BPF_LD_IMM64(R1, 3),
10209 BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
10210 BPF_EXIT_INSN(),
10211 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10212 BPF_EXIT_INSN(),
10213 },
10214 INTERNAL,
10215 { },
10216 { { 0, 1 } },
10217 },
10218 /* BPF_JMP | BPF_JSET | BPF_K */
10219 {
10220 "JMP_JSET_K: if (0x3 & 0x2) return 1",
10221 .u.insns_int = {
10222 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10223 BPF_LD_IMM64(R1, 3),
10224 BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
10225 BPF_EXIT_INSN(),
10226 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10227 BPF_EXIT_INSN(),
10228 },
10229 INTERNAL,
10230 { },
10231 { { 0, 1 } },
10232 },
10233 {
10234 "JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
10235 .u.insns_int = {
10236 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10237 BPF_LD_IMM64(R1, 3),
10238 BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
10239 BPF_EXIT_INSN(),
10240 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10241 BPF_EXIT_INSN(),
10242 },
10243 INTERNAL,
10244 { },
10245 { { 0, 1 } },
10246 },
10247 /* BPF_JMP | BPF_JSGT | BPF_X */
10248 {
10249 "JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
10250 .u.insns_int = {
10251 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10252 BPF_LD_IMM64(R1, -1),
10253 BPF_LD_IMM64(R2, -2),
10254 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10255 BPF_EXIT_INSN(),
10256 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10257 BPF_EXIT_INSN(),
10258 },
10259 INTERNAL,
10260 { },
10261 { { 0, 1 } },
10262 },
10263 {
10264 "JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
10265 .u.insns_int = {
10266 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10267 BPF_LD_IMM64(R1, -1),
10268 BPF_LD_IMM64(R2, -1),
10269 BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10270 BPF_EXIT_INSN(),
10271 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10272 BPF_EXIT_INSN(),
10273 },
10274 INTERNAL,
10275 { },
10276 { { 0, 1 } },
10277 },
10278 /* BPF_JMP | BPF_JSLT | BPF_X */
10279 {
10280 "JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
10281 .u.insns_int = {
10282 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10283 BPF_LD_IMM64(R1, -1),
10284 BPF_LD_IMM64(R2, -2),
10285 BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
10286 BPF_EXIT_INSN(),
10287 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10288 BPF_EXIT_INSN(),
10289 },
10290 INTERNAL,
10291 { },
10292 { { 0, 1 } },
10293 },
10294 {
10295 "JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
10296 .u.insns_int = {
10297 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10298 BPF_LD_IMM64(R1, -1),
10299 BPF_LD_IMM64(R2, -1),
10300 BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
10301 BPF_EXIT_INSN(),
10302 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10303 BPF_EXIT_INSN(),
10304 },
10305 INTERNAL,
10306 { },
10307 { { 0, 1 } },
10308 },
10309 /* BPF_JMP | BPF_JSGE | BPF_X */
10310 {
10311 "JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
10312 .u.insns_int = {
10313 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10314 BPF_LD_IMM64(R1, -1),
10315 BPF_LD_IMM64(R2, -2),
10316 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10317 BPF_EXIT_INSN(),
10318 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10319 BPF_EXIT_INSN(),
10320 },
10321 INTERNAL,
10322 { },
10323 { { 0, 1 } },
10324 },
10325 {
10326 "JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
10327 .u.insns_int = {
10328 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10329 BPF_LD_IMM64(R1, -1),
10330 BPF_LD_IMM64(R2, -1),
10331 BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10332 BPF_EXIT_INSN(),
10333 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10334 BPF_EXIT_INSN(),
10335 },
10336 INTERNAL,
10337 { },
10338 { { 0, 1 } },
10339 },
10340 /* BPF_JMP | BPF_JSLE | BPF_X */
10341 {
10342 "JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
10343 .u.insns_int = {
10344 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10345 BPF_LD_IMM64(R1, -1),
10346 BPF_LD_IMM64(R2, -2),
10347 BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
10348 BPF_EXIT_INSN(),
10349 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10350 BPF_EXIT_INSN(),
10351 },
10352 INTERNAL,
10353 { },
10354 { { 0, 1 } },
10355 },
10356 {
10357 "JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
10358 .u.insns_int = {
10359 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10360 BPF_LD_IMM64(R1, -1),
10361 BPF_LD_IMM64(R2, -1),
10362 BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
10363 BPF_EXIT_INSN(),
10364 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10365 BPF_EXIT_INSN(),
10366 },
10367 INTERNAL,
10368 { },
10369 { { 0, 1 } },
10370 },
10371 /* BPF_JMP | BPF_JGT | BPF_X */
10372 {
10373 "JMP_JGT_X: if (3 > 2) return 1",
10374 .u.insns_int = {
10375 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10376 BPF_LD_IMM64(R1, 3),
10377 BPF_LD_IMM64(R2, 2),
10378 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10379 BPF_EXIT_INSN(),
10380 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10381 BPF_EXIT_INSN(),
10382 },
10383 INTERNAL,
10384 { },
10385 { { 0, 1 } },
10386 },
10387 {
10388 "JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
10389 .u.insns_int = {
10390 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10391 BPF_LD_IMM64(R1, -1),
10392 BPF_LD_IMM64(R2, 1),
10393 BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10394 BPF_EXIT_INSN(),
10395 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10396 BPF_EXIT_INSN(),
10397 },
10398 INTERNAL,
10399 { },
10400 { { 0, 1 } },
10401 },
10402 /* BPF_JMP | BPF_JLT | BPF_X */
10403 {
10404 "JMP_JLT_X: if (2 < 3) return 1",
10405 .u.insns_int = {
10406 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10407 BPF_LD_IMM64(R1, 3),
10408 BPF_LD_IMM64(R2, 2),
10409 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10410 BPF_EXIT_INSN(),
10411 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10412 BPF_EXIT_INSN(),
10413 },
10414 INTERNAL,
10415 { },
10416 { { 0, 1 } },
10417 },
10418 {
10419 "JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
10420 .u.insns_int = {
10421 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10422 BPF_LD_IMM64(R1, -1),
10423 BPF_LD_IMM64(R2, 1),
10424 BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10425 BPF_EXIT_INSN(),
10426 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10427 BPF_EXIT_INSN(),
10428 },
10429 INTERNAL,
10430 { },
10431 { { 0, 1 } },
10432 },
10433 /* BPF_JMP | BPF_JGE | BPF_X */
10434 {
10435 "JMP_JGE_X: if (3 >= 2) return 1",
10436 .u.insns_int = {
10437 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10438 BPF_LD_IMM64(R1, 3),
10439 BPF_LD_IMM64(R2, 2),
10440 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10441 BPF_EXIT_INSN(),
10442 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10443 BPF_EXIT_INSN(),
10444 },
10445 INTERNAL,
10446 { },
10447 { { 0, 1 } },
10448 },
10449 {
10450 "JMP_JGE_X: if (3 >= 3) return 1",
10451 .u.insns_int = {
10452 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10453 BPF_LD_IMM64(R1, 3),
10454 BPF_LD_IMM64(R2, 3),
10455 BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10456 BPF_EXIT_INSN(),
10457 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10458 BPF_EXIT_INSN(),
10459 },
10460 INTERNAL,
10461 { },
10462 { { 0, 1 } },
10463 },
10464 /* BPF_JMP | BPF_JLE | BPF_X */
10465 {
10466 "JMP_JLE_X: if (2 <= 3) return 1",
10467 .u.insns_int = {
10468 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10469 BPF_LD_IMM64(R1, 3),
10470 BPF_LD_IMM64(R2, 2),
10471 BPF_JMP_REG(BPF_JLE, R2, R1, 1),
10472 BPF_EXIT_INSN(),
10473 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10474 BPF_EXIT_INSN(),
10475 },
10476 INTERNAL,
10477 { },
10478 { { 0, 1 } },
10479 },
10480 {
10481 "JMP_JLE_X: if (3 <= 3) return 1",
10482 .u.insns_int = {
10483 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10484 BPF_LD_IMM64(R1, 3),
10485 BPF_LD_IMM64(R2, 3),
10486 BPF_JMP_REG(BPF_JLE, R1, R2, 1),
10487 BPF_EXIT_INSN(),
10488 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10489 BPF_EXIT_INSN(),
10490 },
10491 INTERNAL,
10492 { },
10493 { { 0, 1 } },
10494 },
10495 {
10496 /* Mainly testing JIT + imm64 here. */
10497 "JMP_JGE_X: ldimm64 test 1",
10498 .u.insns_int = {
10499 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10500 BPF_LD_IMM64(R1, 3),
10501 BPF_LD_IMM64(R2, 2),
10502 BPF_JMP_REG(BPF_JGE, R1, R2, 2),
10503 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10504 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10505 BPF_EXIT_INSN(),
10506 },
10507 INTERNAL,
10508 { },
10509 { { 0, 0xeeeeeeeeU } },
10510 },
10511 {
10512 "JMP_JGE_X: ldimm64 test 2",
10513 .u.insns_int = {
10514 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10515 BPF_LD_IMM64(R1, 3),
10516 BPF_LD_IMM64(R2, 2),
10517 BPF_JMP_REG(BPF_JGE, R1, R2, 0),
10518 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10519 BPF_EXIT_INSN(),
10520 },
10521 INTERNAL,
10522 { },
10523 { { 0, 0xffffffffU } },
10524 },
10525 {
10526 "JMP_JGE_X: ldimm64 test 3",
10527 .u.insns_int = {
10528 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10529 BPF_LD_IMM64(R1, 3),
10530 BPF_LD_IMM64(R2, 2),
10531 BPF_JMP_REG(BPF_JGE, R1, R2, 4),
10532 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10533 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10534 BPF_EXIT_INSN(),
10535 },
10536 INTERNAL,
10537 { },
10538 { { 0, 1 } },
10539 },
10540 {
10541 "JMP_JLE_X: ldimm64 test 1",
10542 .u.insns_int = {
10543 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10544 BPF_LD_IMM64(R1, 3),
10545 BPF_LD_IMM64(R2, 2),
10546 BPF_JMP_REG(BPF_JLE, R2, R1, 2),
10547 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10548 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10549 BPF_EXIT_INSN(),
10550 },
10551 INTERNAL,
10552 { },
10553 { { 0, 0xeeeeeeeeU } },
10554 },
10555 {
10556 "JMP_JLE_X: ldimm64 test 2",
10557 .u.insns_int = {
10558 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10559 BPF_LD_IMM64(R1, 3),
10560 BPF_LD_IMM64(R2, 2),
10561 BPF_JMP_REG(BPF_JLE, R2, R1, 0),
10562 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10563 BPF_EXIT_INSN(),
10564 },
10565 INTERNAL,
10566 { },
10567 { { 0, 0xffffffffU } },
10568 },
10569 {
10570 "JMP_JLE_X: ldimm64 test 3",
10571 .u.insns_int = {
10572 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10573 BPF_LD_IMM64(R1, 3),
10574 BPF_LD_IMM64(R2, 2),
10575 BPF_JMP_REG(BPF_JLE, R2, R1, 4),
10576 BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10577 BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10578 BPF_EXIT_INSN(),
10579 },
10580 INTERNAL,
10581 { },
10582 { { 0, 1 } },
10583 },
10584 /* BPF_JMP | BPF_JNE | BPF_X */
10585 {
10586 "JMP_JNE_X: if (3 != 2) return 1",
10587 .u.insns_int = {
10588 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10589 BPF_LD_IMM64(R1, 3),
10590 BPF_LD_IMM64(R2, 2),
10591 BPF_JMP_REG(BPF_JNE, R1, R2, 1),
10592 BPF_EXIT_INSN(),
10593 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10594 BPF_EXIT_INSN(),
10595 },
10596 INTERNAL,
10597 { },
10598 { { 0, 1 } },
10599 },
10600 /* BPF_JMP | BPF_JEQ | BPF_X */
10601 {
10602 "JMP_JEQ_X: if (3 == 3) return 1",
10603 .u.insns_int = {
10604 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10605 BPF_LD_IMM64(R1, 3),
10606 BPF_LD_IMM64(R2, 3),
10607 BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
10608 BPF_EXIT_INSN(),
10609 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10610 BPF_EXIT_INSN(),
10611 },
10612 INTERNAL,
10613 { },
10614 { { 0, 1 } },
10615 },
10616 /* BPF_JMP | BPF_JSET | BPF_X */
10617 {
10618 "JMP_JSET_X: if (0x3 & 0x2) return 1",
10619 .u.insns_int = {
10620 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10621 BPF_LD_IMM64(R1, 3),
10622 BPF_LD_IMM64(R2, 2),
10623 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10624 BPF_EXIT_INSN(),
10625 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10626 BPF_EXIT_INSN(),
10627 },
10628 INTERNAL,
10629 { },
10630 { { 0, 1 } },
10631 },
10632 {
10633 "JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
10634 .u.insns_int = {
10635 BPF_ALU32_IMM(BPF_MOV, R0, 0),
10636 BPF_LD_IMM64(R1, 3),
10637 BPF_LD_IMM64(R2, 0xffffffff),
10638 BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10639 BPF_EXIT_INSN(),
10640 BPF_ALU32_IMM(BPF_MOV, R0, 1),
10641 BPF_EXIT_INSN(),
10642 },
10643 INTERNAL,
10644 { },
10645 { { 0, 1 } },
10646 },
10647 {
10648 "JMP_JA: Jump, gap, jump, ...",
10649 { },
10650 CLASSIC | FLAG_NO_DATA,
10651 { },
10652 { { 0, 0xababcbac } },
10653 .fill_helper = bpf_fill_ja,
10654 },
10655 { /* Mainly checking JIT here. */
10656 "BPF_MAXINSNS: Maximum possible literals",
10657 { },
10658 CLASSIC | FLAG_NO_DATA,
10659 { },
10660 { { 0, 0xffffffff } },
10661 .fill_helper = bpf_fill_maxinsns1,
10662 },
10663 { /* Mainly checking JIT here. */
10664 "BPF_MAXINSNS: Single literal",
10665 { },
10666 CLASSIC | FLAG_NO_DATA,
10667 { },
10668 { { 0, 0xfefefefe } },
10669 .fill_helper = bpf_fill_maxinsns2,
10670 },
10671 { /* Mainly checking JIT here. */
10672 "BPF_MAXINSNS: Run/add until end",
10673 { },
10674 CLASSIC | FLAG_NO_DATA,
10675 { },
10676 { { 0, 0x947bf368 } },
10677 .fill_helper = bpf_fill_maxinsns3,
10678 },
10679 {
10680 "BPF_MAXINSNS: Too many instructions",
10681 { },
10682 CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
10683 { },
10684 { },
10685 .fill_helper = bpf_fill_maxinsns4,
10686 .expected_errcode = -EINVAL,
10687 },
10688 { /* Mainly checking JIT here. */
10689 "BPF_MAXINSNS: Very long jump",
10690 { },
10691 CLASSIC | FLAG_NO_DATA,
10692 { },
10693 { { 0, 0xabababab } },
10694 .fill_helper = bpf_fill_maxinsns5,
10695 },
10696 { /* Mainly checking JIT here. */
10697 "BPF_MAXINSNS: Ctx heavy transformations",
10698 { },
10699 CLASSIC,
10700 { },
10701 {
10702 { 1, SKB_VLAN_PRESENT },
10703 { 10, SKB_VLAN_PRESENT }
10704 },
10705 .fill_helper = bpf_fill_maxinsns6,
10706 },
10707 { /* Mainly checking JIT here. */
10708 "BPF_MAXINSNS: Call heavy transformations",
10709 { },
10710 CLASSIC | FLAG_NO_DATA,
10711 { },
10712 { { 1, 0 }, { 10, 0 } },
10713 .fill_helper = bpf_fill_maxinsns7,
10714 },
10715 { /* Mainly checking JIT here. */
10716 "BPF_MAXINSNS: Jump heavy test",
10717 { },
10718 CLASSIC | FLAG_NO_DATA,
10719 { },
10720 { { 0, 0xffffffff } },
10721 .fill_helper = bpf_fill_maxinsns8,
10722 },
10723 { /* Mainly checking JIT here. */
10724 "BPF_MAXINSNS: Very long jump backwards",
10725 { },
10726 INTERNAL | FLAG_NO_DATA,
10727 { },
10728 { { 0, 0xcbababab } },
10729 .fill_helper = bpf_fill_maxinsns9,
10730 },
10731 { /* Mainly checking JIT here. */
10732 "BPF_MAXINSNS: Edge hopping nuthouse",
10733 { },
10734 INTERNAL | FLAG_NO_DATA,
10735 { },
10736 { { 0, 0xabababac } },
10737 .fill_helper = bpf_fill_maxinsns10,
10738 },
10739 {
10740 "BPF_MAXINSNS: Jump, gap, jump, ...",
10741 { },
10742 CLASSIC | FLAG_NO_DATA,
10743 { },
10744 { { 0, 0xababcbac } },
10745 .fill_helper = bpf_fill_maxinsns11,
10746 },
10747 {
10748 "BPF_MAXINSNS: jump over MSH",
10749 { },
10750 CLASSIC | FLAG_EXPECTED_FAIL,
10751 { 0xfa, 0xfb, 0xfc, 0xfd, },
10752 { { 4, 0xabababab } },
10753 .fill_helper = bpf_fill_maxinsns12,
10754 .expected_errcode = -EINVAL,
10755 },
10756 {
10757 "BPF_MAXINSNS: exec all MSH",
10758 { },
10759 CLASSIC,
10760 { 0xfa, 0xfb, 0xfc, 0xfd, },
10761 { { 4, 0xababab83 } },
10762 .fill_helper = bpf_fill_maxinsns13,
10763 },
10764 {
10765 "BPF_MAXINSNS: ld_abs+get_processor_id",
10766 { },
10767 CLASSIC,
10768 { },
10769 { { 1, 0xbee } },
10770 .fill_helper = bpf_fill_ld_abs_get_processor_id,
10771 },
10772 /*
10773 * LD_IND / LD_ABS on fragmented SKBs
10774 */
10775 {
10776 "LD_IND byte frag",
10777 .u.insns = {
10778 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10779 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10780 BPF_STMT(BPF_RET | BPF_A, 0x0),
10781 },
10782 CLASSIC | FLAG_SKB_FRAG,
10783 { },
10784 { {0x40, 0x42} },
10785 .frag_data = {
10786 0x42, 0x00, 0x00, 0x00,
10787 0x43, 0x44, 0x00, 0x00,
10788 0x21, 0x07, 0x19, 0x83,
10789 },
10790 },
10791 {
10792 "LD_IND halfword frag",
10793 .u.insns = {
10794 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10795 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10796 BPF_STMT(BPF_RET | BPF_A, 0x0),
10797 },
10798 CLASSIC | FLAG_SKB_FRAG,
10799 { },
10800 { {0x40, 0x4344} },
10801 .frag_data = {
10802 0x42, 0x00, 0x00, 0x00,
10803 0x43, 0x44, 0x00, 0x00,
10804 0x21, 0x07, 0x19, 0x83,
10805 },
10806 },
10807 {
10808 "LD_IND word frag",
10809 .u.insns = {
10810 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10811 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10812 BPF_STMT(BPF_RET | BPF_A, 0x0),
10813 },
10814 CLASSIC | FLAG_SKB_FRAG,
10815 { },
10816 { {0x40, 0x21071983} },
10817 .frag_data = {
10818 0x42, 0x00, 0x00, 0x00,
10819 0x43, 0x44, 0x00, 0x00,
10820 0x21, 0x07, 0x19, 0x83,
10821 },
10822 },
10823 {
10824 "LD_IND halfword mixed head/frag",
10825 .u.insns = {
10826 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10827 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10828 BPF_STMT(BPF_RET | BPF_A, 0x0),
10829 },
10830 CLASSIC | FLAG_SKB_FRAG,
10831 { [0x3e] = 0x25, [0x3f] = 0x05, },
10832 { {0x40, 0x0519} },
10833 .frag_data = { 0x19, 0x82 },
10834 },
10835 {
10836 "LD_IND word mixed head/frag",
10837 .u.insns = {
10838 BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10839 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10840 BPF_STMT(BPF_RET | BPF_A, 0x0),
10841 },
10842 CLASSIC | FLAG_SKB_FRAG,
10843 { [0x3e] = 0x25, [0x3f] = 0x05, },
10844 { {0x40, 0x25051982} },
10845 .frag_data = { 0x19, 0x82 },
10846 },
10847 {
10848 "LD_ABS byte frag",
10849 .u.insns = {
10850 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10851 BPF_STMT(BPF_RET | BPF_A, 0x0),
10852 },
10853 CLASSIC | FLAG_SKB_FRAG,
10854 { },
10855 { {0x40, 0x42} },
10856 .frag_data = {
10857 0x42, 0x00, 0x00, 0x00,
10858 0x43, 0x44, 0x00, 0x00,
10859 0x21, 0x07, 0x19, 0x83,
10860 },
10861 },
10862 {
10863 "LD_ABS halfword frag",
10864 .u.insns = {
10865 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10866 BPF_STMT(BPF_RET | BPF_A, 0x0),
10867 },
10868 CLASSIC | FLAG_SKB_FRAG,
10869 { },
10870 { {0x40, 0x4344} },
10871 .frag_data = {
10872 0x42, 0x00, 0x00, 0x00,
10873 0x43, 0x44, 0x00, 0x00,
10874 0x21, 0x07, 0x19, 0x83,
10875 },
10876 },
10877 {
10878 "LD_ABS word frag",
10879 .u.insns = {
10880 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10881 BPF_STMT(BPF_RET | BPF_A, 0x0),
10882 },
10883 CLASSIC | FLAG_SKB_FRAG,
10884 { },
10885 { {0x40, 0x21071983} },
10886 .frag_data = {
10887 0x42, 0x00, 0x00, 0x00,
10888 0x43, 0x44, 0x00, 0x00,
10889 0x21, 0x07, 0x19, 0x83,
10890 },
10891 },
10892 {
10893 "LD_ABS halfword mixed head/frag",
10894 .u.insns = {
10895 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10896 BPF_STMT(BPF_RET | BPF_A, 0x0),
10897 },
10898 CLASSIC | FLAG_SKB_FRAG,
10899 { [0x3e] = 0x25, [0x3f] = 0x05, },
10900 { {0x40, 0x0519} },
10901 .frag_data = { 0x19, 0x82 },
10902 },
10903 {
10904 "LD_ABS word mixed head/frag",
10905 .u.insns = {
10906 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10907 BPF_STMT(BPF_RET | BPF_A, 0x0),
10908 },
10909 CLASSIC | FLAG_SKB_FRAG,
10910 { [0x3e] = 0x25, [0x3f] = 0x05, },
10911 { {0x40, 0x25051982} },
10912 .frag_data = { 0x19, 0x82 },
10913 },
10914 /*
10915 * LD_IND / LD_ABS on non fragmented SKBs
10916 */
10917 {
10918 /*
10919 * this tests that the JIT/interpreter correctly resets X
10920 * before using it in an LD_IND instruction.
10921 */
10922 "LD_IND byte default X",
10923 .u.insns = {
10924 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10925 BPF_STMT(BPF_RET | BPF_A, 0x0),
10926 },
10927 CLASSIC,
10928 { [0x1] = 0x42 },
10929 { {0x40, 0x42 } },
10930 },
10931 {
10932 "LD_IND byte positive offset",
10933 .u.insns = {
10934 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10935 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10936 BPF_STMT(BPF_RET | BPF_A, 0x0),
10937 },
10938 CLASSIC,
10939 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10940 { {0x40, 0x82 } },
10941 },
10942 {
10943 "LD_IND byte negative offset",
10944 .u.insns = {
10945 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10946 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10947 BPF_STMT(BPF_RET | BPF_A, 0x0),
10948 },
10949 CLASSIC,
10950 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10951 { {0x40, 0x05 } },
10952 },
10953 {
10954 "LD_IND byte positive offset, all ff",
10955 .u.insns = {
10956 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10957 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10958 BPF_STMT(BPF_RET | BPF_A, 0x0),
10959 },
10960 CLASSIC,
10961 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
10962 { {0x40, 0xff } },
10963 },
10964 {
10965 "LD_IND byte positive offset, out of bounds",
10966 .u.insns = {
10967 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10968 BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10969 BPF_STMT(BPF_RET | BPF_A, 0x0),
10970 },
10971 CLASSIC,
10972 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10973 { {0x3f, 0 }, },
10974 },
10975 {
10976 "LD_IND byte negative offset, out of bounds",
10977 .u.insns = {
10978 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10979 BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10980 BPF_STMT(BPF_RET | BPF_A, 0x0),
10981 },
10982 CLASSIC,
10983 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10984 { {0x3f, 0 } },
10985 },
10986 {
10987 "LD_IND byte negative offset, multiple calls",
10988 .u.insns = {
10989 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10990 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10991 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10992 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10993 BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10994 BPF_STMT(BPF_RET | BPF_A, 0x0),
10995 },
10996 CLASSIC,
10997 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
10998 { {0x40, 0x82 }, },
10999 },
11000 {
11001 "LD_IND halfword positive offset",
11002 .u.insns = {
11003 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11004 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
11005 BPF_STMT(BPF_RET | BPF_A, 0x0),
11006 },
11007 CLASSIC,
11008 {
11009 [0x1c] = 0xaa, [0x1d] = 0x55,
11010 [0x1e] = 0xbb, [0x1f] = 0x66,
11011 [0x20] = 0xcc, [0x21] = 0x77,
11012 [0x22] = 0xdd, [0x23] = 0x88,
11013 },
11014 { {0x40, 0xdd88 } },
11015 },
11016 {
11017 "LD_IND halfword negative offset",
11018 .u.insns = {
11019 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11020 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
11021 BPF_STMT(BPF_RET | BPF_A, 0x0),
11022 },
11023 CLASSIC,
11024 {
11025 [0x1c] = 0xaa, [0x1d] = 0x55,
11026 [0x1e] = 0xbb, [0x1f] = 0x66,
11027 [0x20] = 0xcc, [0x21] = 0x77,
11028 [0x22] = 0xdd, [0x23] = 0x88,
11029 },
11030 { {0x40, 0xbb66 } },
11031 },
11032 {
11033 "LD_IND halfword unaligned",
11034 .u.insns = {
11035 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11036 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
11037 BPF_STMT(BPF_RET | BPF_A, 0x0),
11038 },
11039 CLASSIC,
11040 {
11041 [0x1c] = 0xaa, [0x1d] = 0x55,
11042 [0x1e] = 0xbb, [0x1f] = 0x66,
11043 [0x20] = 0xcc, [0x21] = 0x77,
11044 [0x22] = 0xdd, [0x23] = 0x88,
11045 },
11046 { {0x40, 0x66cc } },
11047 },
11048 {
11049 "LD_IND halfword positive offset, all ff",
11050 .u.insns = {
11051 BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
11052 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11053 BPF_STMT(BPF_RET | BPF_A, 0x0),
11054 },
11055 CLASSIC,
11056 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11057 { {0x40, 0xffff } },
11058 },
11059 {
11060 "LD_IND halfword positive offset, out of bounds",
11061 .u.insns = {
11062 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11063 BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11064 BPF_STMT(BPF_RET | BPF_A, 0x0),
11065 },
11066 CLASSIC,
11067 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11068 { {0x3f, 0 }, },
11069 },
11070 {
11071 "LD_IND halfword negative offset, out of bounds",
11072 .u.insns = {
11073 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11074 BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
11075 BPF_STMT(BPF_RET | BPF_A, 0x0),
11076 },
11077 CLASSIC,
11078 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11079 { {0x3f, 0 } },
11080 },
11081 {
11082 "LD_IND word positive offset",
11083 .u.insns = {
11084 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11085 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
11086 BPF_STMT(BPF_RET | BPF_A, 0x0),
11087 },
11088 CLASSIC,
11089 {
11090 [0x1c] = 0xaa, [0x1d] = 0x55,
11091 [0x1e] = 0xbb, [0x1f] = 0x66,
11092 [0x20] = 0xcc, [0x21] = 0x77,
11093 [0x22] = 0xdd, [0x23] = 0x88,
11094 [0x24] = 0xee, [0x25] = 0x99,
11095 [0x26] = 0xff, [0x27] = 0xaa,
11096 },
11097 { {0x40, 0xee99ffaa } },
11098 },
11099 {
11100 "LD_IND word negative offset",
11101 .u.insns = {
11102 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11103 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
11104 BPF_STMT(BPF_RET | BPF_A, 0x0),
11105 },
11106 CLASSIC,
11107 {
11108 [0x1c] = 0xaa, [0x1d] = 0x55,
11109 [0x1e] = 0xbb, [0x1f] = 0x66,
11110 [0x20] = 0xcc, [0x21] = 0x77,
11111 [0x22] = 0xdd, [0x23] = 0x88,
11112 [0x24] = 0xee, [0x25] = 0x99,
11113 [0x26] = 0xff, [0x27] = 0xaa,
11114 },
11115 { {0x40, 0xaa55bb66 } },
11116 },
11117 {
11118 "LD_IND word unaligned (addr & 3 == 2)",
11119 .u.insns = {
11120 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11121 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
11122 BPF_STMT(BPF_RET | BPF_A, 0x0),
11123 },
11124 CLASSIC,
11125 {
11126 [0x1c] = 0xaa, [0x1d] = 0x55,
11127 [0x1e] = 0xbb, [0x1f] = 0x66,
11128 [0x20] = 0xcc, [0x21] = 0x77,
11129 [0x22] = 0xdd, [0x23] = 0x88,
11130 [0x24] = 0xee, [0x25] = 0x99,
11131 [0x26] = 0xff, [0x27] = 0xaa,
11132 },
11133 { {0x40, 0xbb66cc77 } },
11134 },
11135 {
11136 "LD_IND word unaligned (addr & 3 == 1)",
11137 .u.insns = {
11138 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11139 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
11140 BPF_STMT(BPF_RET | BPF_A, 0x0),
11141 },
11142 CLASSIC,
11143 {
11144 [0x1c] = 0xaa, [0x1d] = 0x55,
11145 [0x1e] = 0xbb, [0x1f] = 0x66,
11146 [0x20] = 0xcc, [0x21] = 0x77,
11147 [0x22] = 0xdd, [0x23] = 0x88,
11148 [0x24] = 0xee, [0x25] = 0x99,
11149 [0x26] = 0xff, [0x27] = 0xaa,
11150 },
11151 { {0x40, 0x55bb66cc } },
11152 },
11153 {
11154 "LD_IND word unaligned (addr & 3 == 3)",
11155 .u.insns = {
11156 BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11157 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
11158 BPF_STMT(BPF_RET | BPF_A, 0x0),
11159 },
11160 CLASSIC,
11161 {
11162 [0x1c] = 0xaa, [0x1d] = 0x55,
11163 [0x1e] = 0xbb, [0x1f] = 0x66,
11164 [0x20] = 0xcc, [0x21] = 0x77,
11165 [0x22] = 0xdd, [0x23] = 0x88,
11166 [0x24] = 0xee, [0x25] = 0x99,
11167 [0x26] = 0xff, [0x27] = 0xaa,
11168 },
11169 { {0x40, 0x66cc77dd } },
11170 },
11171 {
11172 "LD_IND word positive offset, all ff",
11173 .u.insns = {
11174 BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
11175 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11176 BPF_STMT(BPF_RET | BPF_A, 0x0),
11177 },
11178 CLASSIC,
11179 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11180 { {0x40, 0xffffffff } },
11181 },
11182 {
11183 "LD_IND word positive offset, out of bounds",
11184 .u.insns = {
11185 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11186 BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11187 BPF_STMT(BPF_RET | BPF_A, 0x0),
11188 },
11189 CLASSIC,
11190 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11191 { {0x3f, 0 }, },
11192 },
11193 {
11194 "LD_IND word negative offset, out of bounds",
11195 .u.insns = {
11196 BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11197 BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
11198 BPF_STMT(BPF_RET | BPF_A, 0x0),
11199 },
11200 CLASSIC,
11201 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11202 { {0x3f, 0 } },
11203 },
11204 {
11205 "LD_ABS byte",
11206 .u.insns = {
11207 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
11208 BPF_STMT(BPF_RET | BPF_A, 0x0),
11209 },
11210 CLASSIC,
11211 {
11212 [0x1c] = 0xaa, [0x1d] = 0x55,
11213 [0x1e] = 0xbb, [0x1f] = 0x66,
11214 [0x20] = 0xcc, [0x21] = 0x77,
11215 [0x22] = 0xdd, [0x23] = 0x88,
11216 [0x24] = 0xee, [0x25] = 0x99,
11217 [0x26] = 0xff, [0x27] = 0xaa,
11218 },
11219 { {0x40, 0xcc } },
11220 },
11221 {
11222 "LD_ABS byte positive offset, all ff",
11223 .u.insns = {
11224 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11225 BPF_STMT(BPF_RET | BPF_A, 0x0),
11226 },
11227 CLASSIC,
11228 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11229 { {0x40, 0xff } },
11230 },
11231 {
11232 "LD_ABS byte positive offset, out of bounds",
11233 .u.insns = {
11234 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11235 BPF_STMT(BPF_RET | BPF_A, 0x0),
11236 },
11237 CLASSIC,
11238 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11239 { {0x3f, 0 }, },
11240 },
11241 {
11242 "LD_ABS byte negative offset, out of bounds load",
11243 .u.insns = {
11244 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
11245 BPF_STMT(BPF_RET | BPF_A, 0x0),
11246 },
11247 CLASSIC | FLAG_EXPECTED_FAIL,
11248 .expected_errcode = -EINVAL,
11249 },
11250 {
11251 "LD_ABS byte negative offset, in bounds",
11252 .u.insns = {
11253 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11254 BPF_STMT(BPF_RET | BPF_A, 0x0),
11255 },
11256 CLASSIC,
11257 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11258 { {0x40, 0x82 }, },
11259 },
11260 {
11261 "LD_ABS byte negative offset, out of bounds",
11262 .u.insns = {
11263 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11264 BPF_STMT(BPF_RET | BPF_A, 0x0),
11265 },
11266 CLASSIC,
11267 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11268 { {0x3f, 0 }, },
11269 },
11270 {
11271 "LD_ABS byte negative offset, multiple calls",
11272 .u.insns = {
11273 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
11274 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
11275 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
11276 BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11277 BPF_STMT(BPF_RET | BPF_A, 0x0),
11278 },
11279 CLASSIC,
11280 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11281 { {0x40, 0x82 }, },
11282 },
11283 {
11284 "LD_ABS halfword",
11285 .u.insns = {
11286 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
11287 BPF_STMT(BPF_RET | BPF_A, 0x0),
11288 },
11289 CLASSIC,
11290 {
11291 [0x1c] = 0xaa, [0x1d] = 0x55,
11292 [0x1e] = 0xbb, [0x1f] = 0x66,
11293 [0x20] = 0xcc, [0x21] = 0x77,
11294 [0x22] = 0xdd, [0x23] = 0x88,
11295 [0x24] = 0xee, [0x25] = 0x99,
11296 [0x26] = 0xff, [0x27] = 0xaa,
11297 },
11298 { {0x40, 0xdd88 } },
11299 },
11300 {
11301 "LD_ABS halfword unaligned",
11302 .u.insns = {
11303 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
11304 BPF_STMT(BPF_RET | BPF_A, 0x0),
11305 },
11306 CLASSIC,
11307 {
11308 [0x1c] = 0xaa, [0x1d] = 0x55,
11309 [0x1e] = 0xbb, [0x1f] = 0x66,
11310 [0x20] = 0xcc, [0x21] = 0x77,
11311 [0x22] = 0xdd, [0x23] = 0x88,
11312 [0x24] = 0xee, [0x25] = 0x99,
11313 [0x26] = 0xff, [0x27] = 0xaa,
11314 },
11315 { {0x40, 0x99ff } },
11316 },
11317 {
11318 "LD_ABS halfword positive offset, all ff",
11319 .u.insns = {
11320 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
11321 BPF_STMT(BPF_RET | BPF_A, 0x0),
11322 },
11323 CLASSIC,
11324 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11325 { {0x40, 0xffff } },
11326 },
11327 {
11328 "LD_ABS halfword positive offset, out of bounds",
11329 .u.insns = {
11330 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
11331 BPF_STMT(BPF_RET | BPF_A, 0x0),
11332 },
11333 CLASSIC,
11334 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11335 { {0x3f, 0 }, },
11336 },
11337 {
11338 "LD_ABS halfword negative offset, out of bounds load",
11339 .u.insns = {
11340 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
11341 BPF_STMT(BPF_RET | BPF_A, 0x0),
11342 },
11343 CLASSIC | FLAG_EXPECTED_FAIL,
11344 .expected_errcode = -EINVAL,
11345 },
11346 {
11347 "LD_ABS halfword negative offset, in bounds",
11348 .u.insns = {
11349 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11350 BPF_STMT(BPF_RET | BPF_A, 0x0),
11351 },
11352 CLASSIC,
11353 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11354 { {0x40, 0x1982 }, },
11355 },
11356 {
11357 "LD_ABS halfword negative offset, out of bounds",
11358 .u.insns = {
11359 BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11360 BPF_STMT(BPF_RET | BPF_A, 0x0),
11361 },
11362 CLASSIC,
11363 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11364 { {0x3f, 0 }, },
11365 },
11366 {
11367 "LD_ABS word",
11368 .u.insns = {
11369 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
11370 BPF_STMT(BPF_RET | BPF_A, 0x0),
11371 },
11372 CLASSIC,
11373 {
11374 [0x1c] = 0xaa, [0x1d] = 0x55,
11375 [0x1e] = 0xbb, [0x1f] = 0x66,
11376 [0x20] = 0xcc, [0x21] = 0x77,
11377 [0x22] = 0xdd, [0x23] = 0x88,
11378 [0x24] = 0xee, [0x25] = 0x99,
11379 [0x26] = 0xff, [0x27] = 0xaa,
11380 },
11381 { {0x40, 0xaa55bb66 } },
11382 },
11383 {
11384 "LD_ABS word unaligned (addr & 3 == 2)",
11385 .u.insns = {
11386 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
11387 BPF_STMT(BPF_RET | BPF_A, 0x0),
11388 },
11389 CLASSIC,
11390 {
11391 [0x1c] = 0xaa, [0x1d] = 0x55,
11392 [0x1e] = 0xbb, [0x1f] = 0x66,
11393 [0x20] = 0xcc, [0x21] = 0x77,
11394 [0x22] = 0xdd, [0x23] = 0x88,
11395 [0x24] = 0xee, [0x25] = 0x99,
11396 [0x26] = 0xff, [0x27] = 0xaa,
11397 },
11398 { {0x40, 0xdd88ee99 } },
11399 },
11400 {
11401 "LD_ABS word unaligned (addr & 3 == 1)",
11402 .u.insns = {
11403 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
11404 BPF_STMT(BPF_RET | BPF_A, 0x0),
11405 },
11406 CLASSIC,
11407 {
11408 [0x1c] = 0xaa, [0x1d] = 0x55,
11409 [0x1e] = 0xbb, [0x1f] = 0x66,
11410 [0x20] = 0xcc, [0x21] = 0x77,
11411 [0x22] = 0xdd, [0x23] = 0x88,
11412 [0x24] = 0xee, [0x25] = 0x99,
11413 [0x26] = 0xff, [0x27] = 0xaa,
11414 },
11415 { {0x40, 0x77dd88ee } },
11416 },
11417 {
11418 "LD_ABS word unaligned (addr & 3 == 3)",
11419 .u.insns = {
11420 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
11421 BPF_STMT(BPF_RET | BPF_A, 0x0),
11422 },
11423 CLASSIC,
11424 {
11425 [0x1c] = 0xaa, [0x1d] = 0x55,
11426 [0x1e] = 0xbb, [0x1f] = 0x66,
11427 [0x20] = 0xcc, [0x21] = 0x77,
11428 [0x22] = 0xdd, [0x23] = 0x88,
11429 [0x24] = 0xee, [0x25] = 0x99,
11430 [0x26] = 0xff, [0x27] = 0xaa,
11431 },
11432 { {0x40, 0x88ee99ff } },
11433 },
11434 {
11435 "LD_ABS word positive offset, all ff",
11436 .u.insns = {
11437 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
11438 BPF_STMT(BPF_RET | BPF_A, 0x0),
11439 },
11440 CLASSIC,
11441 { [0x3c] = 0xff, [0x3d] = 0xff, [0x3e] = 0xff, [0x3f] = 0xff },
11442 { {0x40, 0xffffffff } },
11443 },
11444 {
11445 "LD_ABS word positive offset, out of bounds",
11446 .u.insns = {
11447 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
11448 BPF_STMT(BPF_RET | BPF_A, 0x0),
11449 },
11450 CLASSIC,
11451 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11452 { {0x3f, 0 }, },
11453 },
11454 {
11455 "LD_ABS word negative offset, out of bounds load",
11456 .u.insns = {
11457 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
11458 BPF_STMT(BPF_RET | BPF_A, 0x0),
11459 },
11460 CLASSIC | FLAG_EXPECTED_FAIL,
11461 .expected_errcode = -EINVAL,
11462 },
11463 {
11464 "LD_ABS word negative offset, in bounds",
11465 .u.insns = {
11466 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11467 BPF_STMT(BPF_RET | BPF_A, 0x0),
11468 },
11469 CLASSIC,
11470 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11471 { {0x40, 0x25051982 }, },
11472 },
11473 {
11474 "LD_ABS word negative offset, out of bounds",
11475 .u.insns = {
11476 BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11477 BPF_STMT(BPF_RET | BPF_A, 0x0),
11478 },
11479 CLASSIC,
11480 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11481 { {0x3f, 0 }, },
11482 },
11483 {
11484 "LDX_MSH standalone, preserved A",
11485 .u.insns = {
11486 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11487 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11488 BPF_STMT(BPF_RET | BPF_A, 0x0),
11489 },
11490 CLASSIC,
11491 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11492 { {0x40, 0xffeebbaa }, },
11493 },
11494 {
11495 "LDX_MSH standalone, preserved A 2",
11496 .u.insns = {
11497 BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
11498 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11499 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
11500 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11501 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
11502 BPF_STMT(BPF_RET | BPF_A, 0x0),
11503 },
11504 CLASSIC,
11505 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11506 { {0x40, 0x175e9d63 }, },
11507 },
11508 {
11509 "LDX_MSH standalone, test result 1",
11510 .u.insns = {
11511 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11512 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11513 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11514 BPF_STMT(BPF_RET | BPF_A, 0x0),
11515 },
11516 CLASSIC,
11517 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11518 { {0x40, 0x14 }, },
11519 },
11520 {
11521 "LDX_MSH standalone, test result 2",
11522 .u.insns = {
11523 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11524 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11525 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11526 BPF_STMT(BPF_RET | BPF_A, 0x0),
11527 },
11528 CLASSIC,
11529 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11530 { {0x40, 0x24 }, },
11531 },
11532 {
11533 "LDX_MSH standalone, negative offset",
11534 .u.insns = {
11535 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11536 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
11537 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11538 BPF_STMT(BPF_RET | BPF_A, 0x0),
11539 },
11540 CLASSIC,
11541 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11542 { {0x40, 0 }, },
11543 },
11544 {
11545 "LDX_MSH standalone, negative offset 2",
11546 .u.insns = {
11547 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11548 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
11549 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11550 BPF_STMT(BPF_RET | BPF_A, 0x0),
11551 },
11552 CLASSIC,
11553 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11554 { {0x40, 0x24 }, },
11555 },
11556 {
11557 "LDX_MSH standalone, out of bounds",
11558 .u.insns = {
11559 BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11560 BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
11561 BPF_STMT(BPF_MISC | BPF_TXA, 0),
11562 BPF_STMT(BPF_RET | BPF_A, 0x0),
11563 },
11564 CLASSIC,
11565 { [0x3c] = 0x25, [0x3d] = 0x05, [0x3e] = 0x19, [0x3f] = 0x82 },
11566 { {0x40, 0 }, },
11567 },
11568 /*
11569 * verify that the interpreter or JIT correctly sets A and X
11570 * to 0.
11571 */
11572 {
11573 "ADD default X",
11574 .u.insns = {
11575 /*
11576 * A = 0x42
11577 * A = A + X
11578 * ret A
11579 */
11580 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11581 BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
11582 BPF_STMT(BPF_RET | BPF_A, 0x0),
11583 },
11584 CLASSIC | FLAG_NO_DATA,
11585 {},
11586 { {0x1, 0x42 } },
11587 },
11588 {
11589 "ADD default A",
11590 .u.insns = {
11591 /*
11592 * A = A + 0x42
11593 * ret A
11594 */
11595 BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
11596 BPF_STMT(BPF_RET | BPF_A, 0x0),
11597 },
11598 CLASSIC | FLAG_NO_DATA,
11599 {},
11600 { {0x1, 0x42 } },
11601 },
11602 {
11603 "SUB default X",
11604 .u.insns = {
11605 /*
11606 * A = 0x66
11607 * A = A - X
11608 * ret A
11609 */
11610 BPF_STMT(BPF_LD | BPF_IMM, 0x66),
11611 BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
11612 BPF_STMT(BPF_RET | BPF_A, 0x0),
11613 },
11614 CLASSIC | FLAG_NO_DATA,
11615 {},
11616 { {0x1, 0x66 } },
11617 },
11618 {
11619 "SUB default A",
11620 .u.insns = {
11621 /*
11622 * A = A - -0x66
11623 * ret A
11624 */
11625 BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
11626 BPF_STMT(BPF_RET | BPF_A, 0x0),
11627 },
11628 CLASSIC | FLAG_NO_DATA,
11629 {},
11630 { {0x1, 0x66 } },
11631 },
11632 {
11633 "MUL default X",
11634 .u.insns = {
11635 /*
11636 * A = 0x42
11637 * A = A * X
11638 * ret A
11639 */
11640 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11641 BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
11642 BPF_STMT(BPF_RET | BPF_A, 0x0),
11643 },
11644 CLASSIC | FLAG_NO_DATA,
11645 {},
11646 { {0x1, 0x0 } },
11647 },
11648 {
11649 "MUL default A",
11650 .u.insns = {
11651 /*
11652 * A = A * 0x66
11653 * ret A
11654 */
11655 BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
11656 BPF_STMT(BPF_RET | BPF_A, 0x0),
11657 },
11658 CLASSIC | FLAG_NO_DATA,
11659 {},
11660 { {0x1, 0x0 } },
11661 },
11662 {
11663 "DIV default X",
11664 .u.insns = {
11665 /*
11666 * A = 0x42
11667 * A = A / X ; this halt the filter execution if X is 0
11668 * ret 0x42
11669 */
11670 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11671 BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
11672 BPF_STMT(BPF_RET | BPF_K, 0x42),
11673 },
11674 CLASSIC | FLAG_NO_DATA,
11675 {},
11676 { {0x1, 0x0 } },
11677 },
11678 {
11679 "DIV default A",
11680 .u.insns = {
11681 /*
11682 * A = A / 1
11683 * ret A
11684 */
11685 BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
11686 BPF_STMT(BPF_RET | BPF_A, 0x0),
11687 },
11688 CLASSIC | FLAG_NO_DATA,
11689 {},
11690 { {0x1, 0x0 } },
11691 },
11692 {
11693 "MOD default X",
11694 .u.insns = {
11695 /*
11696 * A = 0x42
11697 * A = A mod X ; this halt the filter execution if X is 0
11698 * ret 0x42
11699 */
11700 BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11701 BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
11702 BPF_STMT(BPF_RET | BPF_K, 0x42),
11703 },
11704 CLASSIC | FLAG_NO_DATA,
11705 {},
11706 { {0x1, 0x0 } },
11707 },
11708 {
11709 "MOD default A",
11710 .u.insns = {
11711 /*
11712 * A = A mod 1
11713 * ret A
11714 */
11715 BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
11716 BPF_STMT(BPF_RET | BPF_A, 0x0),
11717 },
11718 CLASSIC | FLAG_NO_DATA,
11719 {},
11720 { {0x1, 0x0 } },
11721 },
11722 {
11723 "JMP EQ default A",
11724 .u.insns = {
11725 /*
11726 * cmp A, 0x0, 0, 1
11727 * ret 0x42
11728 * ret 0x66
11729 */
11730 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11731 BPF_STMT(BPF_RET | BPF_K, 0x42),
11732 BPF_STMT(BPF_RET | BPF_K, 0x66),
11733 },
11734 CLASSIC | FLAG_NO_DATA,
11735 {},
11736 { {0x1, 0x42 } },
11737 },
11738 {
11739 "JMP EQ default X",
11740 .u.insns = {
11741 /*
11742 * A = 0x0
11743 * cmp A, X, 0, 1
11744 * ret 0x42
11745 * ret 0x66
11746 */
11747 BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11748 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11749 BPF_STMT(BPF_RET | BPF_K, 0x42),
11750 BPF_STMT(BPF_RET | BPF_K, 0x66),
11751 },
11752 CLASSIC | FLAG_NO_DATA,
11753 {},
11754 { {0x1, 0x42 } },
11755 },
11756 /* Checking interpreter vs JIT wrt signed extended imms. */
11757 {
11758 "JNE signed compare, test 1",
11759 .u.insns_int = {
11760 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11761 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11762 BPF_MOV64_REG(R2, R1),
11763 BPF_ALU64_REG(BPF_AND, R2, R3),
11764 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11765 BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11766 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11767 BPF_EXIT_INSN(),
11768 },
11769 INTERNAL,
11770 { },
11771 { { 0, 1 } },
11772 },
11773 {
11774 "JNE signed compare, test 2",
11775 .u.insns_int = {
11776 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11777 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11778 BPF_MOV64_REG(R2, R1),
11779 BPF_ALU64_REG(BPF_AND, R2, R3),
11780 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11781 BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11782 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11783 BPF_EXIT_INSN(),
11784 },
11785 INTERNAL,
11786 { },
11787 { { 0, 1 } },
11788 },
11789 {
11790 "JNE signed compare, test 3",
11791 .u.insns_int = {
11792 BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11793 BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11794 BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11795 BPF_MOV64_REG(R2, R1),
11796 BPF_ALU64_REG(BPF_AND, R2, R3),
11797 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11798 BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11799 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11800 BPF_EXIT_INSN(),
11801 },
11802 INTERNAL,
11803 { },
11804 { { 0, 2 } },
11805 },
11806 {
11807 "JNE signed compare, test 4",
11808 .u.insns_int = {
11809 BPF_LD_IMM64(R1, -17104896),
11810 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11811 BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11812 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11813 BPF_EXIT_INSN(),
11814 },
11815 INTERNAL,
11816 { },
11817 { { 0, 2 } },
11818 },
11819 {
11820 "JNE signed compare, test 5",
11821 .u.insns_int = {
11822 BPF_LD_IMM64(R1, 0xfefb0000),
11823 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11824 BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11825 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11826 BPF_EXIT_INSN(),
11827 },
11828 INTERNAL,
11829 { },
11830 { { 0, 1 } },
11831 },
11832 {
11833 "JNE signed compare, test 6",
11834 .u.insns_int = {
11835 BPF_LD_IMM64(R1, 0x7efb0000),
11836 BPF_ALU32_IMM(BPF_MOV, R0, 1),
11837 BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11838 BPF_ALU32_IMM(BPF_MOV, R0, 2),
11839 BPF_EXIT_INSN(),
11840 },
11841 INTERNAL,
11842 { },
11843 { { 0, 2 } },
11844 },
11845 {
11846 "JNE signed compare, test 7",
11847 .u.insns = {
11848 BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11849 BPF_STMT(BPF_MISC | BPF_TAX, 0),
11850 BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11851 BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11852 BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11853 BPF_STMT(BPF_RET | BPF_K, 1),
11854 BPF_STMT(BPF_RET | BPF_K, 2),
11855 },
11856 CLASSIC | FLAG_NO_DATA,
11857 {},
11858 { { 0, 2 } },
11859 },
11860 /* BPF_LDX_MEM with operand aliasing */
11861 {
11862 "LDX_MEM_B: operand register aliasing",
11863 .u.insns_int = {
11864 BPF_ST_MEM(BPF_B, R10, -8, 123),
11865 BPF_MOV64_REG(R0, R10),
11866 BPF_LDX_MEM(BPF_B, R0, R0, -8),
11867 BPF_EXIT_INSN(),
11868 },
11869 INTERNAL,
11870 { },
11871 { { 0, 123 } },
11872 .stack_depth = 8,
11873 },
11874 {
11875 "LDX_MEM_H: operand register aliasing",
11876 .u.insns_int = {
11877 BPF_ST_MEM(BPF_H, R10, -8, 12345),
11878 BPF_MOV64_REG(R0, R10),
11879 BPF_LDX_MEM(BPF_H, R0, R0, -8),
11880 BPF_EXIT_INSN(),
11881 },
11882 INTERNAL,
11883 { },
11884 { { 0, 12345 } },
11885 .stack_depth = 8,
11886 },
11887 {
11888 "LDX_MEM_W: operand register aliasing",
11889 .u.insns_int = {
11890 BPF_ST_MEM(BPF_W, R10, -8, 123456789),
11891 BPF_MOV64_REG(R0, R10),
11892 BPF_LDX_MEM(BPF_W, R0, R0, -8),
11893 BPF_EXIT_INSN(),
11894 },
11895 INTERNAL,
11896 { },
11897 { { 0, 123456789 } },
11898 .stack_depth = 8,
11899 },
11900 {
11901 "LDX_MEM_DW: operand register aliasing",
11902 .u.insns_int = {
11903 BPF_LD_IMM64(R1, 0x123456789abcdefULL),
11904 BPF_STX_MEM(BPF_DW, R10, R1, -8),
11905 BPF_MOV64_REG(R0, R10),
11906 BPF_LDX_MEM(BPF_DW, R0, R0, -8),
11907 BPF_ALU64_REG(BPF_SUB, R0, R1),
11908 BPF_MOV64_REG(R1, R0),
11909 BPF_ALU64_IMM(BPF_RSH, R1, 32),
11910 BPF_ALU64_REG(BPF_OR, R0, R1),
11911 BPF_EXIT_INSN(),
11912 },
11913 INTERNAL,
11914 { },
11915 { { 0, 0 } },
11916 .stack_depth = 8,
11917 },
11918 /*
11919 * Register (non-)clobbering tests for the case where a JIT implements
11920 * complex ALU or ATOMIC operations via function calls. If so, the
11921 * function call must be transparent to the eBPF registers. The JIT
11922 * must therefore save and restore relevant registers across the call.
11923 * The following tests check that the eBPF registers retain their
11924 * values after such an operation. Mainly intended for complex ALU
11925 * and atomic operation, but we run it for all. You never know...
11926 *
11927 * Note that each operations should be tested twice with different
11928 * destinations, to check preservation for all registers.
11929 */
11930#define BPF_TEST_CLOBBER_ALU(alu, op, dst, src) \
11931 { \
11932 #alu "_" #op " to " #dst ": no clobbering", \
11933 .u.insns_int = { \
11934 BPF_ALU64_IMM(BPF_MOV, R0, R0), \
11935 BPF_ALU64_IMM(BPF_MOV, R1, R1), \
11936 BPF_ALU64_IMM(BPF_MOV, R2, R2), \
11937 BPF_ALU64_IMM(BPF_MOV, R3, R3), \
11938 BPF_ALU64_IMM(BPF_MOV, R4, R4), \
11939 BPF_ALU64_IMM(BPF_MOV, R5, R5), \
11940 BPF_ALU64_IMM(BPF_MOV, R6, R6), \
11941 BPF_ALU64_IMM(BPF_MOV, R7, R7), \
11942 BPF_ALU64_IMM(BPF_MOV, R8, R8), \
11943 BPF_ALU64_IMM(BPF_MOV, R9, R9), \
11944 BPF_##alu(BPF_ ##op, dst, src), \
11945 BPF_ALU32_IMM(BPF_MOV, dst, dst), \
11946 BPF_JMP_IMM(BPF_JNE, R0, R0, 10), \
11947 BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
11948 BPF_JMP_IMM(BPF_JNE, R2, R2, 8), \
11949 BPF_JMP_IMM(BPF_JNE, R3, R3, 7), \
11950 BPF_JMP_IMM(BPF_JNE, R4, R4, 6), \
11951 BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
11952 BPF_JMP_IMM(BPF_JNE, R6, R6, 4), \
11953 BPF_JMP_IMM(BPF_JNE, R7, R7, 3), \
11954 BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
11955 BPF_JMP_IMM(BPF_JNE, R9, R9, 1), \
11956 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
11957 BPF_EXIT_INSN(), \
11958 }, \
11959 INTERNAL, \
11960 { }, \
11961 { { 0, 1 } } \
11962 }
11963 /* ALU64 operations, register clobbering */
11964 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11965 BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11966 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11967 BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11968 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11969 BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11970 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11971 BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11972 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11973 BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11974 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11975 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11976 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11977 BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11978 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11979 BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11980 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11981 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11982 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11983 BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11984 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11985 BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11986 /* ALU32 immediate operations, register clobbering */
11987 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11988 BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11989 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11990 BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11991 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11992 BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11993 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11994 BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11995 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
11996 BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
11997 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
11998 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
11999 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
12000 BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
12001 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
12002 BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
12003 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
12004 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
12005 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
12006 BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
12007 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
12008 BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
12009 /* ALU64 register operations, register clobbering */
12010 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
12011 BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
12012 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
12013 BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
12014 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
12015 BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
12016 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
12017 BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
12018 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
12019 BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
12020 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
12021 BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
12022 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
12023 BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
12024 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
12025 BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
12026 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
12027 BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
12028 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
12029 BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
12030 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
12031 BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
12032 /* ALU32 register operations, register clobbering */
12033 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
12034 BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
12035 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
12036 BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
12037 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
12038 BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
12039 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
12040 BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
12041 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
12042 BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
12043 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
12044 BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
12045 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
12046 BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
12047 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
12048 BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
12049 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
12050 BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
12051 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
12052 BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
12053 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
12054 BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
12055#undef BPF_TEST_CLOBBER_ALU
12056#define BPF_TEST_CLOBBER_ATOMIC(width, op) \
12057 { \
12058 "Atomic_" #width " " #op ": no clobbering", \
12059 .u.insns_int = { \
12060 BPF_ALU64_IMM(BPF_MOV, R0, 0), \
12061 BPF_ALU64_IMM(BPF_MOV, R1, 1), \
12062 BPF_ALU64_IMM(BPF_MOV, R2, 2), \
12063 BPF_ALU64_IMM(BPF_MOV, R3, 3), \
12064 BPF_ALU64_IMM(BPF_MOV, R4, 4), \
12065 BPF_ALU64_IMM(BPF_MOV, R5, 5), \
12066 BPF_ALU64_IMM(BPF_MOV, R6, 6), \
12067 BPF_ALU64_IMM(BPF_MOV, R7, 7), \
12068 BPF_ALU64_IMM(BPF_MOV, R8, 8), \
12069 BPF_ALU64_IMM(BPF_MOV, R9, 9), \
12070 BPF_ST_MEM(width, R10, -8, \
12071 (op) == BPF_CMPXCHG ? 0 : \
12072 (op) & BPF_FETCH ? 1 : 0), \
12073 BPF_ATOMIC_OP(width, op, R10, R1, -8), \
12074 BPF_JMP_IMM(BPF_JNE, R0, 0, 10), \
12075 BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
12076 BPF_JMP_IMM(BPF_JNE, R2, 2, 8), \
12077 BPF_JMP_IMM(BPF_JNE, R3, 3, 7), \
12078 BPF_JMP_IMM(BPF_JNE, R4, 4, 6), \
12079 BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
12080 BPF_JMP_IMM(BPF_JNE, R6, 6, 4), \
12081 BPF_JMP_IMM(BPF_JNE, R7, 7, 3), \
12082 BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
12083 BPF_JMP_IMM(BPF_JNE, R9, 9, 1), \
12084 BPF_ALU64_IMM(BPF_MOV, R0, 1), \
12085 BPF_EXIT_INSN(), \
12086 }, \
12087 INTERNAL, \
12088 { }, \
12089 { { 0, 1 } }, \
12090 .stack_depth = 8, \
12091 }
12092 /* 64-bit atomic operations, register clobbering */
12093 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
12094 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
12095 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
12096 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
12097 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
12098 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
12099 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
12100 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
12101 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
12102 BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
12103 /* 32-bit atomic operations, register clobbering */
12104 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
12105 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
12106 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
12107 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
12108 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
12109 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
12110 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
12111 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
12112 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
12113 BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
12114#undef BPF_TEST_CLOBBER_ATOMIC
12115 /* Checking that ALU32 src is not zero extended in place */
12116#define BPF_ALU32_SRC_ZEXT(op) \
12117 { \
12118 "ALU32_" #op "_X: src preserved in zext", \
12119 .u.insns_int = { \
12120 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
12121 BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
12122 BPF_ALU64_REG(BPF_MOV, R0, R1), \
12123 BPF_ALU32_REG(BPF_##op, R2, R1), \
12124 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12125 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12126 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12127 BPF_ALU64_REG(BPF_OR, R0, R1), \
12128 BPF_EXIT_INSN(), \
12129 }, \
12130 INTERNAL, \
12131 { }, \
12132 { { 0, 0 } }, \
12133 }
12134 BPF_ALU32_SRC_ZEXT(MOV),
12135 BPF_ALU32_SRC_ZEXT(AND),
12136 BPF_ALU32_SRC_ZEXT(OR),
12137 BPF_ALU32_SRC_ZEXT(XOR),
12138 BPF_ALU32_SRC_ZEXT(ADD),
12139 BPF_ALU32_SRC_ZEXT(SUB),
12140 BPF_ALU32_SRC_ZEXT(MUL),
12141 BPF_ALU32_SRC_ZEXT(DIV),
12142 BPF_ALU32_SRC_ZEXT(MOD),
12143#undef BPF_ALU32_SRC_ZEXT
12144 /* Checking that ATOMIC32 src is not zero extended in place */
12145#define BPF_ATOMIC32_SRC_ZEXT(op) \
12146 { \
12147 "ATOMIC_W_" #op ": src preserved in zext", \
12148 .u.insns_int = { \
12149 BPF_LD_IMM64(R0, 0x0123456789acbdefULL), \
12150 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12151 BPF_ST_MEM(BPF_W, R10, -4, 0), \
12152 BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
12153 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12154 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12155 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12156 BPF_ALU64_REG(BPF_OR, R0, R1), \
12157 BPF_EXIT_INSN(), \
12158 }, \
12159 INTERNAL, \
12160 { }, \
12161 { { 0, 0 } }, \
12162 .stack_depth = 8, \
12163 }
12164 BPF_ATOMIC32_SRC_ZEXT(ADD),
12165 BPF_ATOMIC32_SRC_ZEXT(AND),
12166 BPF_ATOMIC32_SRC_ZEXT(OR),
12167 BPF_ATOMIC32_SRC_ZEXT(XOR),
12168#undef BPF_ATOMIC32_SRC_ZEXT
12169 /* Checking that CMPXCHG32 src is not zero extended in place */
12170 {
12171 "ATOMIC_W_CMPXCHG: src preserved in zext",
12172 .u.insns_int = {
12173 BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
12174 BPF_ALU64_REG(BPF_MOV, R2, R1),
12175 BPF_ALU64_REG(BPF_MOV, R0, 0),
12176 BPF_ST_MEM(BPF_W, R10, -4, 0),
12177 BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
12178 BPF_ALU64_REG(BPF_SUB, R1, R2),
12179 BPF_ALU64_REG(BPF_MOV, R2, R1),
12180 BPF_ALU64_IMM(BPF_RSH, R2, 32),
12181 BPF_ALU64_REG(BPF_OR, R1, R2),
12182 BPF_ALU64_REG(BPF_MOV, R0, R1),
12183 BPF_EXIT_INSN(),
12184 },
12185 INTERNAL,
12186 { },
12187 { { 0, 0 } },
12188 .stack_depth = 8,
12189 },
12190 /* Checking that JMP32 immediate src is not zero extended in place */
12191#define BPF_JMP32_IMM_ZEXT(op) \
12192 { \
12193 "JMP32_" #op "_K: operand preserved in zext", \
12194 .u.insns_int = { \
12195 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12196 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12197 BPF_JMP32_IMM(BPF_##op, R0, 1234, 1), \
12198 BPF_JMP_A(0), /* Nop */ \
12199 BPF_ALU64_REG(BPF_SUB, R0, R1), \
12200 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12201 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12202 BPF_ALU64_REG(BPF_OR, R0, R1), \
12203 BPF_EXIT_INSN(), \
12204 }, \
12205 INTERNAL, \
12206 { }, \
12207 { { 0, 0 } }, \
12208 }
12209 BPF_JMP32_IMM_ZEXT(JEQ),
12210 BPF_JMP32_IMM_ZEXT(JNE),
12211 BPF_JMP32_IMM_ZEXT(JSET),
12212 BPF_JMP32_IMM_ZEXT(JGT),
12213 BPF_JMP32_IMM_ZEXT(JGE),
12214 BPF_JMP32_IMM_ZEXT(JLT),
12215 BPF_JMP32_IMM_ZEXT(JLE),
12216 BPF_JMP32_IMM_ZEXT(JSGT),
12217 BPF_JMP32_IMM_ZEXT(JSGE),
12218 BPF_JMP32_IMM_ZEXT(JSGT),
12219 BPF_JMP32_IMM_ZEXT(JSLT),
12220 BPF_JMP32_IMM_ZEXT(JSLE),
12221#undef BPF_JMP2_IMM_ZEXT
12222 /* Checking that JMP32 dst & src are not zero extended in place */
12223#define BPF_JMP32_REG_ZEXT(op) \
12224 { \
12225 "JMP32_" #op "_X: operands preserved in zext", \
12226 .u.insns_int = { \
12227 BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12228 BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
12229 BPF_ALU64_REG(BPF_MOV, R2, R0), \
12230 BPF_ALU64_REG(BPF_MOV, R3, R1), \
12231 BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
12232 BPF_JMP_A(0), /* Nop */ \
12233 BPF_ALU64_REG(BPF_SUB, R0, R2), \
12234 BPF_ALU64_REG(BPF_SUB, R1, R3), \
12235 BPF_ALU64_REG(BPF_OR, R0, R1), \
12236 BPF_ALU64_REG(BPF_MOV, R1, R0), \
12237 BPF_ALU64_IMM(BPF_RSH, R1, 32), \
12238 BPF_ALU64_REG(BPF_OR, R0, R1), \
12239 BPF_EXIT_INSN(), \
12240 }, \
12241 INTERNAL, \
12242 { }, \
12243 { { 0, 0 } }, \
12244 }
12245 BPF_JMP32_REG_ZEXT(JEQ),
12246 BPF_JMP32_REG_ZEXT(JNE),
12247 BPF_JMP32_REG_ZEXT(JSET),
12248 BPF_JMP32_REG_ZEXT(JGT),
12249 BPF_JMP32_REG_ZEXT(JGE),
12250 BPF_JMP32_REG_ZEXT(JLT),
12251 BPF_JMP32_REG_ZEXT(JLE),
12252 BPF_JMP32_REG_ZEXT(JSGT),
12253 BPF_JMP32_REG_ZEXT(JSGE),
12254 BPF_JMP32_REG_ZEXT(JSGT),
12255 BPF_JMP32_REG_ZEXT(JSLT),
12256 BPF_JMP32_REG_ZEXT(JSLE),
12257#undef BPF_JMP2_REG_ZEXT
12258 /* ALU64 K register combinations */
12259 {
12260 "ALU64_MOV_K: registers",
12261 { },
12262 INTERNAL,
12263 { },
12264 { { 0, 1 } },
12265 .fill_helper = bpf_fill_alu64_mov_imm_regs,
12266 },
12267 {
12268 "ALU64_AND_K: registers",
12269 { },
12270 INTERNAL,
12271 { },
12272 { { 0, 1 } },
12273 .fill_helper = bpf_fill_alu64_and_imm_regs,
12274 },
12275 {
12276 "ALU64_OR_K: registers",
12277 { },
12278 INTERNAL,
12279 { },
12280 { { 0, 1 } },
12281 .fill_helper = bpf_fill_alu64_or_imm_regs,
12282 },
12283 {
12284 "ALU64_XOR_K: registers",
12285 { },
12286 INTERNAL,
12287 { },
12288 { { 0, 1 } },
12289 .fill_helper = bpf_fill_alu64_xor_imm_regs,
12290 },
12291 {
12292 "ALU64_LSH_K: registers",
12293 { },
12294 INTERNAL,
12295 { },
12296 { { 0, 1 } },
12297 .fill_helper = bpf_fill_alu64_lsh_imm_regs,
12298 },
12299 {
12300 "ALU64_RSH_K: registers",
12301 { },
12302 INTERNAL,
12303 { },
12304 { { 0, 1 } },
12305 .fill_helper = bpf_fill_alu64_rsh_imm_regs,
12306 },
12307 {
12308 "ALU64_ARSH_K: registers",
12309 { },
12310 INTERNAL,
12311 { },
12312 { { 0, 1 } },
12313 .fill_helper = bpf_fill_alu64_arsh_imm_regs,
12314 },
12315 {
12316 "ALU64_ADD_K: registers",
12317 { },
12318 INTERNAL,
12319 { },
12320 { { 0, 1 } },
12321 .fill_helper = bpf_fill_alu64_add_imm_regs,
12322 },
12323 {
12324 "ALU64_SUB_K: registers",
12325 { },
12326 INTERNAL,
12327 { },
12328 { { 0, 1 } },
12329 .fill_helper = bpf_fill_alu64_sub_imm_regs,
12330 },
12331 {
12332 "ALU64_MUL_K: registers",
12333 { },
12334 INTERNAL,
12335 { },
12336 { { 0, 1 } },
12337 .fill_helper = bpf_fill_alu64_mul_imm_regs,
12338 },
12339 {
12340 "ALU64_DIV_K: registers",
12341 { },
12342 INTERNAL,
12343 { },
12344 { { 0, 1 } },
12345 .fill_helper = bpf_fill_alu64_div_imm_regs,
12346 },
12347 {
12348 "ALU64_MOD_K: registers",
12349 { },
12350 INTERNAL,
12351 { },
12352 { { 0, 1 } },
12353 .fill_helper = bpf_fill_alu64_mod_imm_regs,
12354 },
12355 /* ALU32 K registers */
12356 {
12357 "ALU32_MOV_K: registers",
12358 { },
12359 INTERNAL,
12360 { },
12361 { { 0, 1 } },
12362 .fill_helper = bpf_fill_alu32_mov_imm_regs,
12363 },
12364 {
12365 "ALU32_AND_K: registers",
12366 { },
12367 INTERNAL,
12368 { },
12369 { { 0, 1 } },
12370 .fill_helper = bpf_fill_alu32_and_imm_regs,
12371 },
12372 {
12373 "ALU32_OR_K: registers",
12374 { },
12375 INTERNAL,
12376 { },
12377 { { 0, 1 } },
12378 .fill_helper = bpf_fill_alu32_or_imm_regs,
12379 },
12380 {
12381 "ALU32_XOR_K: registers",
12382 { },
12383 INTERNAL,
12384 { },
12385 { { 0, 1 } },
12386 .fill_helper = bpf_fill_alu32_xor_imm_regs,
12387 },
12388 {
12389 "ALU32_LSH_K: registers",
12390 { },
12391 INTERNAL,
12392 { },
12393 { { 0, 1 } },
12394 .fill_helper = bpf_fill_alu32_lsh_imm_regs,
12395 },
12396 {
12397 "ALU32_RSH_K: registers",
12398 { },
12399 INTERNAL,
12400 { },
12401 { { 0, 1 } },
12402 .fill_helper = bpf_fill_alu32_rsh_imm_regs,
12403 },
12404 {
12405 "ALU32_ARSH_K: registers",
12406 { },
12407 INTERNAL,
12408 { },
12409 { { 0, 1 } },
12410 .fill_helper = bpf_fill_alu32_arsh_imm_regs,
12411 },
12412 {
12413 "ALU32_ADD_K: registers",
12414 { },
12415 INTERNAL,
12416 { },
12417 { { 0, 1 } },
12418 .fill_helper = bpf_fill_alu32_add_imm_regs,
12419 },
12420 {
12421 "ALU32_SUB_K: registers",
12422 { },
12423 INTERNAL,
12424 { },
12425 { { 0, 1 } },
12426 .fill_helper = bpf_fill_alu32_sub_imm_regs,
12427 },
12428 {
12429 "ALU32_MUL_K: registers",
12430 { },
12431 INTERNAL,
12432 { },
12433 { { 0, 1 } },
12434 .fill_helper = bpf_fill_alu32_mul_imm_regs,
12435 },
12436 {
12437 "ALU32_DIV_K: registers",
12438 { },
12439 INTERNAL,
12440 { },
12441 { { 0, 1 } },
12442 .fill_helper = bpf_fill_alu32_div_imm_regs,
12443 },
12444 {
12445 "ALU32_MOD_K: registers",
12446 { },
12447 INTERNAL,
12448 { },
12449 { { 0, 1 } },
12450 .fill_helper = bpf_fill_alu32_mod_imm_regs,
12451 },
12452 /* ALU64 X register combinations */
12453 {
12454 "ALU64_MOV_X: register combinations",
12455 { },
12456 INTERNAL,
12457 { },
12458 { { 0, 1 } },
12459 .fill_helper = bpf_fill_alu64_mov_reg_pairs,
12460 },
12461 {
12462 "ALU64_AND_X: register combinations",
12463 { },
12464 INTERNAL,
12465 { },
12466 { { 0, 1 } },
12467 .fill_helper = bpf_fill_alu64_and_reg_pairs,
12468 },
12469 {
12470 "ALU64_OR_X: register combinations",
12471 { },
12472 INTERNAL,
12473 { },
12474 { { 0, 1 } },
12475 .fill_helper = bpf_fill_alu64_or_reg_pairs,
12476 },
12477 {
12478 "ALU64_XOR_X: register combinations",
12479 { },
12480 INTERNAL,
12481 { },
12482 { { 0, 1 } },
12483 .fill_helper = bpf_fill_alu64_xor_reg_pairs,
12484 },
12485 {
12486 "ALU64_LSH_X: register combinations",
12487 { },
12488 INTERNAL,
12489 { },
12490 { { 0, 1 } },
12491 .fill_helper = bpf_fill_alu64_lsh_reg_pairs,
12492 },
12493 {
12494 "ALU64_RSH_X: register combinations",
12495 { },
12496 INTERNAL,
12497 { },
12498 { { 0, 1 } },
12499 .fill_helper = bpf_fill_alu64_rsh_reg_pairs,
12500 },
12501 {
12502 "ALU64_ARSH_X: register combinations",
12503 { },
12504 INTERNAL,
12505 { },
12506 { { 0, 1 } },
12507 .fill_helper = bpf_fill_alu64_arsh_reg_pairs,
12508 },
12509 {
12510 "ALU64_ADD_X: register combinations",
12511 { },
12512 INTERNAL,
12513 { },
12514 { { 0, 1 } },
12515 .fill_helper = bpf_fill_alu64_add_reg_pairs,
12516 },
12517 {
12518 "ALU64_SUB_X: register combinations",
12519 { },
12520 INTERNAL,
12521 { },
12522 { { 0, 1 } },
12523 .fill_helper = bpf_fill_alu64_sub_reg_pairs,
12524 },
12525 {
12526 "ALU64_MUL_X: register combinations",
12527 { },
12528 INTERNAL,
12529 { },
12530 { { 0, 1 } },
12531 .fill_helper = bpf_fill_alu64_mul_reg_pairs,
12532 },
12533 {
12534 "ALU64_DIV_X: register combinations",
12535 { },
12536 INTERNAL,
12537 { },
12538 { { 0, 1 } },
12539 .fill_helper = bpf_fill_alu64_div_reg_pairs,
12540 },
12541 {
12542 "ALU64_MOD_X: register combinations",
12543 { },
12544 INTERNAL,
12545 { },
12546 { { 0, 1 } },
12547 .fill_helper = bpf_fill_alu64_mod_reg_pairs,
12548 },
12549 /* ALU32 X register combinations */
12550 {
12551 "ALU32_MOV_X: register combinations",
12552 { },
12553 INTERNAL,
12554 { },
12555 { { 0, 1 } },
12556 .fill_helper = bpf_fill_alu32_mov_reg_pairs,
12557 },
12558 {
12559 "ALU32_AND_X: register combinations",
12560 { },
12561 INTERNAL,
12562 { },
12563 { { 0, 1 } },
12564 .fill_helper = bpf_fill_alu32_and_reg_pairs,
12565 },
12566 {
12567 "ALU32_OR_X: register combinations",
12568 { },
12569 INTERNAL,
12570 { },
12571 { { 0, 1 } },
12572 .fill_helper = bpf_fill_alu32_or_reg_pairs,
12573 },
12574 {
12575 "ALU32_XOR_X: register combinations",
12576 { },
12577 INTERNAL,
12578 { },
12579 { { 0, 1 } },
12580 .fill_helper = bpf_fill_alu32_xor_reg_pairs,
12581 },
12582 {
12583 "ALU32_LSH_X: register combinations",
12584 { },
12585 INTERNAL,
12586 { },
12587 { { 0, 1 } },
12588 .fill_helper = bpf_fill_alu32_lsh_reg_pairs,
12589 },
12590 {
12591 "ALU32_RSH_X: register combinations",
12592 { },
12593 INTERNAL,
12594 { },
12595 { { 0, 1 } },
12596 .fill_helper = bpf_fill_alu32_rsh_reg_pairs,
12597 },
12598 {
12599 "ALU32_ARSH_X: register combinations",
12600 { },
12601 INTERNAL,
12602 { },
12603 { { 0, 1 } },
12604 .fill_helper = bpf_fill_alu32_arsh_reg_pairs,
12605 },
12606 {
12607 "ALU32_ADD_X: register combinations",
12608 { },
12609 INTERNAL,
12610 { },
12611 { { 0, 1 } },
12612 .fill_helper = bpf_fill_alu32_add_reg_pairs,
12613 },
12614 {
12615 "ALU32_SUB_X: register combinations",
12616 { },
12617 INTERNAL,
12618 { },
12619 { { 0, 1 } },
12620 .fill_helper = bpf_fill_alu32_sub_reg_pairs,
12621 },
12622 {
12623 "ALU32_MUL_X: register combinations",
12624 { },
12625 INTERNAL,
12626 { },
12627 { { 0, 1 } },
12628 .fill_helper = bpf_fill_alu32_mul_reg_pairs,
12629 },
12630 {
12631 "ALU32_DIV_X: register combinations",
12632 { },
12633 INTERNAL,
12634 { },
12635 { { 0, 1 } },
12636 .fill_helper = bpf_fill_alu32_div_reg_pairs,
12637 },
12638 {
12639 "ALU32_MOD_X register combinations",
12640 { },
12641 INTERNAL,
12642 { },
12643 { { 0, 1 } },
12644 .fill_helper = bpf_fill_alu32_mod_reg_pairs,
12645 },
12646 /* Exhaustive test of ALU64 shift operations */
12647 {
12648 "ALU64_LSH_K: all shift values",
12649 { },
12650 INTERNAL | FLAG_NO_DATA,
12651 { },
12652 { { 0, 1 } },
12653 .fill_helper = bpf_fill_alu64_lsh_imm,
12654 },
12655 {
12656 "ALU64_RSH_K: all shift values",
12657 { },
12658 INTERNAL | FLAG_NO_DATA,
12659 { },
12660 { { 0, 1 } },
12661 .fill_helper = bpf_fill_alu64_rsh_imm,
12662 },
12663 {
12664 "ALU64_ARSH_K: all shift values",
12665 { },
12666 INTERNAL | FLAG_NO_DATA,
12667 { },
12668 { { 0, 1 } },
12669 .fill_helper = bpf_fill_alu64_arsh_imm,
12670 },
12671 {
12672 "ALU64_LSH_X: all shift values",
12673 { },
12674 INTERNAL | FLAG_NO_DATA,
12675 { },
12676 { { 0, 1 } },
12677 .fill_helper = bpf_fill_alu64_lsh_reg,
12678 },
12679 {
12680 "ALU64_RSH_X: all shift values",
12681 { },
12682 INTERNAL | FLAG_NO_DATA,
12683 { },
12684 { { 0, 1 } },
12685 .fill_helper = bpf_fill_alu64_rsh_reg,
12686 },
12687 {
12688 "ALU64_ARSH_X: all shift values",
12689 { },
12690 INTERNAL | FLAG_NO_DATA,
12691 { },
12692 { { 0, 1 } },
12693 .fill_helper = bpf_fill_alu64_arsh_reg,
12694 },
12695 /* Exhaustive test of ALU32 shift operations */
12696 {
12697 "ALU32_LSH_K: all shift values",
12698 { },
12699 INTERNAL | FLAG_NO_DATA,
12700 { },
12701 { { 0, 1 } },
12702 .fill_helper = bpf_fill_alu32_lsh_imm,
12703 },
12704 {
12705 "ALU32_RSH_K: all shift values",
12706 { },
12707 INTERNAL | FLAG_NO_DATA,
12708 { },
12709 { { 0, 1 } },
12710 .fill_helper = bpf_fill_alu32_rsh_imm,
12711 },
12712 {
12713 "ALU32_ARSH_K: all shift values",
12714 { },
12715 INTERNAL | FLAG_NO_DATA,
12716 { },
12717 { { 0, 1 } },
12718 .fill_helper = bpf_fill_alu32_arsh_imm,
12719 },
12720 {
12721 "ALU32_LSH_X: all shift values",
12722 { },
12723 INTERNAL | FLAG_NO_DATA,
12724 { },
12725 { { 0, 1 } },
12726 .fill_helper = bpf_fill_alu32_lsh_reg,
12727 },
12728 {
12729 "ALU32_RSH_X: all shift values",
12730 { },
12731 INTERNAL | FLAG_NO_DATA,
12732 { },
12733 { { 0, 1 } },
12734 .fill_helper = bpf_fill_alu32_rsh_reg,
12735 },
12736 {
12737 "ALU32_ARSH_X: all shift values",
12738 { },
12739 INTERNAL | FLAG_NO_DATA,
12740 { },
12741 { { 0, 1 } },
12742 .fill_helper = bpf_fill_alu32_arsh_reg,
12743 },
12744 /*
12745 * Exhaustive test of ALU64 shift operations when
12746 * source and destination register are the same.
12747 */
12748 {
12749 "ALU64_LSH_X: all shift values with the same register",
12750 { },
12751 INTERNAL | FLAG_NO_DATA,
12752 { },
12753 { { 0, 1 } },
12754 .fill_helper = bpf_fill_alu64_lsh_same_reg,
12755 },
12756 {
12757 "ALU64_RSH_X: all shift values with the same register",
12758 { },
12759 INTERNAL | FLAG_NO_DATA,
12760 { },
12761 { { 0, 1 } },
12762 .fill_helper = bpf_fill_alu64_rsh_same_reg,
12763 },
12764 {
12765 "ALU64_ARSH_X: all shift values with the same register",
12766 { },
12767 INTERNAL | FLAG_NO_DATA,
12768 { },
12769 { { 0, 1 } },
12770 .fill_helper = bpf_fill_alu64_arsh_same_reg,
12771 },
12772 /*
12773 * Exhaustive test of ALU32 shift operations when
12774 * source and destination register are the same.
12775 */
12776 {
12777 "ALU32_LSH_X: all shift values with the same register",
12778 { },
12779 INTERNAL | FLAG_NO_DATA,
12780 { },
12781 { { 0, 1 } },
12782 .fill_helper = bpf_fill_alu32_lsh_same_reg,
12783 },
12784 {
12785 "ALU32_RSH_X: all shift values with the same register",
12786 { },
12787 INTERNAL | FLAG_NO_DATA,
12788 { },
12789 { { 0, 1 } },
12790 .fill_helper = bpf_fill_alu32_rsh_same_reg,
12791 },
12792 {
12793 "ALU32_ARSH_X: all shift values with the same register",
12794 { },
12795 INTERNAL | FLAG_NO_DATA,
12796 { },
12797 { { 0, 1 } },
12798 .fill_helper = bpf_fill_alu32_arsh_same_reg,
12799 },
12800 /* ALU64 immediate magnitudes */
12801 {
12802 "ALU64_MOV_K: all immediate value magnitudes",
12803 { },
12804 INTERNAL | FLAG_NO_DATA,
12805 { },
12806 { { 0, 1 } },
12807 .fill_helper = bpf_fill_alu64_mov_imm,
12808 .nr_testruns = NR_PATTERN_RUNS,
12809 },
12810 {
12811 "ALU64_AND_K: all immediate value magnitudes",
12812 { },
12813 INTERNAL | FLAG_NO_DATA,
12814 { },
12815 { { 0, 1 } },
12816 .fill_helper = bpf_fill_alu64_and_imm,
12817 .nr_testruns = NR_PATTERN_RUNS,
12818 },
12819 {
12820 "ALU64_OR_K: all immediate value magnitudes",
12821 { },
12822 INTERNAL | FLAG_NO_DATA,
12823 { },
12824 { { 0, 1 } },
12825 .fill_helper = bpf_fill_alu64_or_imm,
12826 .nr_testruns = NR_PATTERN_RUNS,
12827 },
12828 {
12829 "ALU64_XOR_K: all immediate value magnitudes",
12830 { },
12831 INTERNAL | FLAG_NO_DATA,
12832 { },
12833 { { 0, 1 } },
12834 .fill_helper = bpf_fill_alu64_xor_imm,
12835 .nr_testruns = NR_PATTERN_RUNS,
12836 },
12837 {
12838 "ALU64_ADD_K: all immediate value magnitudes",
12839 { },
12840 INTERNAL | FLAG_NO_DATA,
12841 { },
12842 { { 0, 1 } },
12843 .fill_helper = bpf_fill_alu64_add_imm,
12844 .nr_testruns = NR_PATTERN_RUNS,
12845 },
12846 {
12847 "ALU64_SUB_K: all immediate value magnitudes",
12848 { },
12849 INTERNAL | FLAG_NO_DATA,
12850 { },
12851 { { 0, 1 } },
12852 .fill_helper = bpf_fill_alu64_sub_imm,
12853 .nr_testruns = NR_PATTERN_RUNS,
12854 },
12855 {
12856 "ALU64_MUL_K: all immediate value magnitudes",
12857 { },
12858 INTERNAL | FLAG_NO_DATA,
12859 { },
12860 { { 0, 1 } },
12861 .fill_helper = bpf_fill_alu64_mul_imm,
12862 .nr_testruns = NR_PATTERN_RUNS,
12863 },
12864 {
12865 "ALU64_DIV_K: all immediate value magnitudes",
12866 { },
12867 INTERNAL | FLAG_NO_DATA,
12868 { },
12869 { { 0, 1 } },
12870 .fill_helper = bpf_fill_alu64_div_imm,
12871 .nr_testruns = NR_PATTERN_RUNS,
12872 },
12873 {
12874 "ALU64_MOD_K: all immediate value magnitudes",
12875 { },
12876 INTERNAL | FLAG_NO_DATA,
12877 { },
12878 { { 0, 1 } },
12879 .fill_helper = bpf_fill_alu64_mod_imm,
12880 .nr_testruns = NR_PATTERN_RUNS,
12881 },
12882 /* ALU32 immediate magnitudes */
12883 {
12884 "ALU32_MOV_K: all immediate value magnitudes",
12885 { },
12886 INTERNAL | FLAG_NO_DATA,
12887 { },
12888 { { 0, 1 } },
12889 .fill_helper = bpf_fill_alu32_mov_imm,
12890 .nr_testruns = NR_PATTERN_RUNS,
12891 },
12892 {
12893 "ALU32_AND_K: all immediate value magnitudes",
12894 { },
12895 INTERNAL | FLAG_NO_DATA,
12896 { },
12897 { { 0, 1 } },
12898 .fill_helper = bpf_fill_alu32_and_imm,
12899 .nr_testruns = NR_PATTERN_RUNS,
12900 },
12901 {
12902 "ALU32_OR_K: all immediate value magnitudes",
12903 { },
12904 INTERNAL | FLAG_NO_DATA,
12905 { },
12906 { { 0, 1 } },
12907 .fill_helper = bpf_fill_alu32_or_imm,
12908 .nr_testruns = NR_PATTERN_RUNS,
12909 },
12910 {
12911 "ALU32_XOR_K: all immediate value magnitudes",
12912 { },
12913 INTERNAL | FLAG_NO_DATA,
12914 { },
12915 { { 0, 1 } },
12916 .fill_helper = bpf_fill_alu32_xor_imm,
12917 .nr_testruns = NR_PATTERN_RUNS,
12918 },
12919 {
12920 "ALU32_ADD_K: all immediate value magnitudes",
12921 { },
12922 INTERNAL | FLAG_NO_DATA,
12923 { },
12924 { { 0, 1 } },
12925 .fill_helper = bpf_fill_alu32_add_imm,
12926 .nr_testruns = NR_PATTERN_RUNS,
12927 },
12928 {
12929 "ALU32_SUB_K: all immediate value magnitudes",
12930 { },
12931 INTERNAL | FLAG_NO_DATA,
12932 { },
12933 { { 0, 1 } },
12934 .fill_helper = bpf_fill_alu32_sub_imm,
12935 .nr_testruns = NR_PATTERN_RUNS,
12936 },
12937 {
12938 "ALU32_MUL_K: all immediate value magnitudes",
12939 { },
12940 INTERNAL | FLAG_NO_DATA,
12941 { },
12942 { { 0, 1 } },
12943 .fill_helper = bpf_fill_alu32_mul_imm,
12944 .nr_testruns = NR_PATTERN_RUNS,
12945 },
12946 {
12947 "ALU32_DIV_K: all immediate value magnitudes",
12948 { },
12949 INTERNAL | FLAG_NO_DATA,
12950 { },
12951 { { 0, 1 } },
12952 .fill_helper = bpf_fill_alu32_div_imm,
12953 .nr_testruns = NR_PATTERN_RUNS,
12954 },
12955 {
12956 "ALU32_MOD_K: all immediate value magnitudes",
12957 { },
12958 INTERNAL | FLAG_NO_DATA,
12959 { },
12960 { { 0, 1 } },
12961 .fill_helper = bpf_fill_alu32_mod_imm,
12962 .nr_testruns = NR_PATTERN_RUNS,
12963 },
12964 /* ALU64 register magnitudes */
12965 {
12966 "ALU64_MOV_X: all register value magnitudes",
12967 { },
12968 INTERNAL | FLAG_NO_DATA,
12969 { },
12970 { { 0, 1 } },
12971 .fill_helper = bpf_fill_alu64_mov_reg,
12972 .nr_testruns = NR_PATTERN_RUNS,
12973 },
12974 {
12975 "ALU64_AND_X: all register value magnitudes",
12976 { },
12977 INTERNAL | FLAG_NO_DATA,
12978 { },
12979 { { 0, 1 } },
12980 .fill_helper = bpf_fill_alu64_and_reg,
12981 .nr_testruns = NR_PATTERN_RUNS,
12982 },
12983 {
12984 "ALU64_OR_X: all register value magnitudes",
12985 { },
12986 INTERNAL | FLAG_NO_DATA,
12987 { },
12988 { { 0, 1 } },
12989 .fill_helper = bpf_fill_alu64_or_reg,
12990 .nr_testruns = NR_PATTERN_RUNS,
12991 },
12992 {
12993 "ALU64_XOR_X: all register value magnitudes",
12994 { },
12995 INTERNAL | FLAG_NO_DATA,
12996 { },
12997 { { 0, 1 } },
12998 .fill_helper = bpf_fill_alu64_xor_reg,
12999 .nr_testruns = NR_PATTERN_RUNS,
13000 },
13001 {
13002 "ALU64_ADD_X: all register value magnitudes",
13003 { },
13004 INTERNAL | FLAG_NO_DATA,
13005 { },
13006 { { 0, 1 } },
13007 .fill_helper = bpf_fill_alu64_add_reg,
13008 .nr_testruns = NR_PATTERN_RUNS,
13009 },
13010 {
13011 "ALU64_SUB_X: all register value magnitudes",
13012 { },
13013 INTERNAL | FLAG_NO_DATA,
13014 { },
13015 { { 0, 1 } },
13016 .fill_helper = bpf_fill_alu64_sub_reg,
13017 .nr_testruns = NR_PATTERN_RUNS,
13018 },
13019 {
13020 "ALU64_MUL_X: all register value magnitudes",
13021 { },
13022 INTERNAL | FLAG_NO_DATA,
13023 { },
13024 { { 0, 1 } },
13025 .fill_helper = bpf_fill_alu64_mul_reg,
13026 .nr_testruns = NR_PATTERN_RUNS,
13027 },
13028 {
13029 "ALU64_DIV_X: all register value magnitudes",
13030 { },
13031 INTERNAL | FLAG_NO_DATA,
13032 { },
13033 { { 0, 1 } },
13034 .fill_helper = bpf_fill_alu64_div_reg,
13035 .nr_testruns = NR_PATTERN_RUNS,
13036 },
13037 {
13038 "ALU64_MOD_X: all register value magnitudes",
13039 { },
13040 INTERNAL | FLAG_NO_DATA,
13041 { },
13042 { { 0, 1 } },
13043 .fill_helper = bpf_fill_alu64_mod_reg,
13044 .nr_testruns = NR_PATTERN_RUNS,
13045 },
13046 /* ALU32 register magnitudes */
13047 {
13048 "ALU32_MOV_X: all register value magnitudes",
13049 { },
13050 INTERNAL | FLAG_NO_DATA,
13051 { },
13052 { { 0, 1 } },
13053 .fill_helper = bpf_fill_alu32_mov_reg,
13054 .nr_testruns = NR_PATTERN_RUNS,
13055 },
13056 {
13057 "ALU32_AND_X: all register value magnitudes",
13058 { },
13059 INTERNAL | FLAG_NO_DATA,
13060 { },
13061 { { 0, 1 } },
13062 .fill_helper = bpf_fill_alu32_and_reg,
13063 .nr_testruns = NR_PATTERN_RUNS,
13064 },
13065 {
13066 "ALU32_OR_X: all register value magnitudes",
13067 { },
13068 INTERNAL | FLAG_NO_DATA,
13069 { },
13070 { { 0, 1 } },
13071 .fill_helper = bpf_fill_alu32_or_reg,
13072 .nr_testruns = NR_PATTERN_RUNS,
13073 },
13074 {
13075 "ALU32_XOR_X: all register value magnitudes",
13076 { },
13077 INTERNAL | FLAG_NO_DATA,
13078 { },
13079 { { 0, 1 } },
13080 .fill_helper = bpf_fill_alu32_xor_reg,
13081 .nr_testruns = NR_PATTERN_RUNS,
13082 },
13083 {
13084 "ALU32_ADD_X: all register value magnitudes",
13085 { },
13086 INTERNAL | FLAG_NO_DATA,
13087 { },
13088 { { 0, 1 } },
13089 .fill_helper = bpf_fill_alu32_add_reg,
13090 .nr_testruns = NR_PATTERN_RUNS,
13091 },
13092 {
13093 "ALU32_SUB_X: all register value magnitudes",
13094 { },
13095 INTERNAL | FLAG_NO_DATA,
13096 { },
13097 { { 0, 1 } },
13098 .fill_helper = bpf_fill_alu32_sub_reg,
13099 .nr_testruns = NR_PATTERN_RUNS,
13100 },
13101 {
13102 "ALU32_MUL_X: all register value magnitudes",
13103 { },
13104 INTERNAL | FLAG_NO_DATA,
13105 { },
13106 { { 0, 1 } },
13107 .fill_helper = bpf_fill_alu32_mul_reg,
13108 .nr_testruns = NR_PATTERN_RUNS,
13109 },
13110 {
13111 "ALU32_DIV_X: all register value magnitudes",
13112 { },
13113 INTERNAL | FLAG_NO_DATA,
13114 { },
13115 { { 0, 1 } },
13116 .fill_helper = bpf_fill_alu32_div_reg,
13117 .nr_testruns = NR_PATTERN_RUNS,
13118 },
13119 {
13120 "ALU32_MOD_X: all register value magnitudes",
13121 { },
13122 INTERNAL | FLAG_NO_DATA,
13123 { },
13124 { { 0, 1 } },
13125 .fill_helper = bpf_fill_alu32_mod_reg,
13126 .nr_testruns = NR_PATTERN_RUNS,
13127 },
13128 /* LD_IMM64 immediate magnitudes and byte patterns */
13129 {
13130 "LD_IMM64: all immediate value magnitudes",
13131 { },
13132 INTERNAL | FLAG_NO_DATA,
13133 { },
13134 { { 0, 1 } },
13135 .fill_helper = bpf_fill_ld_imm64_magn,
13136 },
13137 {
13138 "LD_IMM64: checker byte patterns",
13139 { },
13140 INTERNAL | FLAG_NO_DATA,
13141 { },
13142 { { 0, 1 } },
13143 .fill_helper = bpf_fill_ld_imm64_checker,
13144 },
13145 {
13146 "LD_IMM64: random positive and zero byte patterns",
13147 { },
13148 INTERNAL | FLAG_NO_DATA,
13149 { },
13150 { { 0, 1 } },
13151 .fill_helper = bpf_fill_ld_imm64_pos_zero,
13152 },
13153 {
13154 "LD_IMM64: random negative and zero byte patterns",
13155 { },
13156 INTERNAL | FLAG_NO_DATA,
13157 { },
13158 { { 0, 1 } },
13159 .fill_helper = bpf_fill_ld_imm64_neg_zero,
13160 },
13161 {
13162 "LD_IMM64: random positive and negative byte patterns",
13163 { },
13164 INTERNAL | FLAG_NO_DATA,
13165 { },
13166 { { 0, 1 } },
13167 .fill_helper = bpf_fill_ld_imm64_pos_neg,
13168 },
13169 /* 64-bit ATOMIC register combinations */
13170 {
13171 "ATOMIC_DW_ADD: register combinations",
13172 { },
13173 INTERNAL,
13174 { },
13175 { { 0, 1 } },
13176 .fill_helper = bpf_fill_atomic64_add_reg_pairs,
13177 .stack_depth = 8,
13178 },
13179 {
13180 "ATOMIC_DW_AND: register combinations",
13181 { },
13182 INTERNAL,
13183 { },
13184 { { 0, 1 } },
13185 .fill_helper = bpf_fill_atomic64_and_reg_pairs,
13186 .stack_depth = 8,
13187 },
13188 {
13189 "ATOMIC_DW_OR: register combinations",
13190 { },
13191 INTERNAL,
13192 { },
13193 { { 0, 1 } },
13194 .fill_helper = bpf_fill_atomic64_or_reg_pairs,
13195 .stack_depth = 8,
13196 },
13197 {
13198 "ATOMIC_DW_XOR: register combinations",
13199 { },
13200 INTERNAL,
13201 { },
13202 { { 0, 1 } },
13203 .fill_helper = bpf_fill_atomic64_xor_reg_pairs,
13204 .stack_depth = 8,
13205 },
13206 {
13207 "ATOMIC_DW_ADD_FETCH: register combinations",
13208 { },
13209 INTERNAL,
13210 { },
13211 { { 0, 1 } },
13212 .fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
13213 .stack_depth = 8,
13214 },
13215 {
13216 "ATOMIC_DW_AND_FETCH: register combinations",
13217 { },
13218 INTERNAL,
13219 { },
13220 { { 0, 1 } },
13221 .fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
13222 .stack_depth = 8,
13223 },
13224 {
13225 "ATOMIC_DW_OR_FETCH: register combinations",
13226 { },
13227 INTERNAL,
13228 { },
13229 { { 0, 1 } },
13230 .fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
13231 .stack_depth = 8,
13232 },
13233 {
13234 "ATOMIC_DW_XOR_FETCH: register combinations",
13235 { },
13236 INTERNAL,
13237 { },
13238 { { 0, 1 } },
13239 .fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
13240 .stack_depth = 8,
13241 },
13242 {
13243 "ATOMIC_DW_XCHG: register combinations",
13244 { },
13245 INTERNAL,
13246 { },
13247 { { 0, 1 } },
13248 .fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
13249 .stack_depth = 8,
13250 },
13251 {
13252 "ATOMIC_DW_CMPXCHG: register combinations",
13253 { },
13254 INTERNAL,
13255 { },
13256 { { 0, 1 } },
13257 .fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
13258 .stack_depth = 8,
13259 },
13260 /* 32-bit ATOMIC register combinations */
13261 {
13262 "ATOMIC_W_ADD: register combinations",
13263 { },
13264 INTERNAL,
13265 { },
13266 { { 0, 1 } },
13267 .fill_helper = bpf_fill_atomic32_add_reg_pairs,
13268 .stack_depth = 8,
13269 },
13270 {
13271 "ATOMIC_W_AND: register combinations",
13272 { },
13273 INTERNAL,
13274 { },
13275 { { 0, 1 } },
13276 .fill_helper = bpf_fill_atomic32_and_reg_pairs,
13277 .stack_depth = 8,
13278 },
13279 {
13280 "ATOMIC_W_OR: register combinations",
13281 { },
13282 INTERNAL,
13283 { },
13284 { { 0, 1 } },
13285 .fill_helper = bpf_fill_atomic32_or_reg_pairs,
13286 .stack_depth = 8,
13287 },
13288 {
13289 "ATOMIC_W_XOR: register combinations",
13290 { },
13291 INTERNAL,
13292 { },
13293 { { 0, 1 } },
13294 .fill_helper = bpf_fill_atomic32_xor_reg_pairs,
13295 .stack_depth = 8,
13296 },
13297 {
13298 "ATOMIC_W_ADD_FETCH: register combinations",
13299 { },
13300 INTERNAL,
13301 { },
13302 { { 0, 1 } },
13303 .fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
13304 .stack_depth = 8,
13305 },
13306 {
13307 "ATOMIC_W_AND_FETCH: register combinations",
13308 { },
13309 INTERNAL,
13310 { },
13311 { { 0, 1 } },
13312 .fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
13313 .stack_depth = 8,
13314 },
13315 {
13316 "ATOMIC_W_OR_FETCH: register combinations",
13317 { },
13318 INTERNAL,
13319 { },
13320 { { 0, 1 } },
13321 .fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
13322 .stack_depth = 8,
13323 },
13324 {
13325 "ATOMIC_W_XOR_FETCH: register combinations",
13326 { },
13327 INTERNAL,
13328 { },
13329 { { 0, 1 } },
13330 .fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
13331 .stack_depth = 8,
13332 },
13333 {
13334 "ATOMIC_W_XCHG: register combinations",
13335 { },
13336 INTERNAL,
13337 { },
13338 { { 0, 1 } },
13339 .fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
13340 .stack_depth = 8,
13341 },
13342 {
13343 "ATOMIC_W_CMPXCHG: register combinations",
13344 { },
13345 INTERNAL,
13346 { },
13347 { { 0, 1 } },
13348 .fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
13349 .stack_depth = 8,
13350 },
13351 /* 64-bit ATOMIC magnitudes */
13352 {
13353 "ATOMIC_DW_ADD: all operand magnitudes",
13354 { },
13355 INTERNAL | FLAG_NO_DATA,
13356 { },
13357 { { 0, 1 } },
13358 .fill_helper = bpf_fill_atomic64_add,
13359 .stack_depth = 8,
13360 .nr_testruns = NR_PATTERN_RUNS,
13361 },
13362 {
13363 "ATOMIC_DW_AND: all operand magnitudes",
13364 { },
13365 INTERNAL | FLAG_NO_DATA,
13366 { },
13367 { { 0, 1 } },
13368 .fill_helper = bpf_fill_atomic64_and,
13369 .stack_depth = 8,
13370 .nr_testruns = NR_PATTERN_RUNS,
13371 },
13372 {
13373 "ATOMIC_DW_OR: all operand magnitudes",
13374 { },
13375 INTERNAL | FLAG_NO_DATA,
13376 { },
13377 { { 0, 1 } },
13378 .fill_helper = bpf_fill_atomic64_or,
13379 .stack_depth = 8,
13380 .nr_testruns = NR_PATTERN_RUNS,
13381 },
13382 {
13383 "ATOMIC_DW_XOR: all operand magnitudes",
13384 { },
13385 INTERNAL | FLAG_NO_DATA,
13386 { },
13387 { { 0, 1 } },
13388 .fill_helper = bpf_fill_atomic64_xor,
13389 .stack_depth = 8,
13390 .nr_testruns = NR_PATTERN_RUNS,
13391 },
13392 {
13393 "ATOMIC_DW_ADD_FETCH: all operand magnitudes",
13394 { },
13395 INTERNAL | FLAG_NO_DATA,
13396 { },
13397 { { 0, 1 } },
13398 .fill_helper = bpf_fill_atomic64_add_fetch,
13399 .stack_depth = 8,
13400 .nr_testruns = NR_PATTERN_RUNS,
13401 },
13402 {
13403 "ATOMIC_DW_AND_FETCH: all operand magnitudes",
13404 { },
13405 INTERNAL | FLAG_NO_DATA,
13406 { },
13407 { { 0, 1 } },
13408 .fill_helper = bpf_fill_atomic64_and_fetch,
13409 .stack_depth = 8,
13410 .nr_testruns = NR_PATTERN_RUNS,
13411 },
13412 {
13413 "ATOMIC_DW_OR_FETCH: all operand magnitudes",
13414 { },
13415 INTERNAL | FLAG_NO_DATA,
13416 { },
13417 { { 0, 1 } },
13418 .fill_helper = bpf_fill_atomic64_or_fetch,
13419 .stack_depth = 8,
13420 .nr_testruns = NR_PATTERN_RUNS,
13421 },
13422 {
13423 "ATOMIC_DW_XOR_FETCH: all operand magnitudes",
13424 { },
13425 INTERNAL | FLAG_NO_DATA,
13426 { },
13427 { { 0, 1 } },
13428 .fill_helper = bpf_fill_atomic64_xor_fetch,
13429 .stack_depth = 8,
13430 .nr_testruns = NR_PATTERN_RUNS,
13431 },
13432 {
13433 "ATOMIC_DW_XCHG: all operand magnitudes",
13434 { },
13435 INTERNAL | FLAG_NO_DATA,
13436 { },
13437 { { 0, 1 } },
13438 .fill_helper = bpf_fill_atomic64_xchg,
13439 .stack_depth = 8,
13440 .nr_testruns = NR_PATTERN_RUNS,
13441 },
13442 {
13443 "ATOMIC_DW_CMPXCHG: all operand magnitudes",
13444 { },
13445 INTERNAL | FLAG_NO_DATA,
13446 { },
13447 { { 0, 1 } },
13448 .fill_helper = bpf_fill_cmpxchg64,
13449 .stack_depth = 8,
13450 .nr_testruns = NR_PATTERN_RUNS,
13451 },
13452 /* 64-bit atomic magnitudes */
13453 {
13454 "ATOMIC_W_ADD: all operand magnitudes",
13455 { },
13456 INTERNAL | FLAG_NO_DATA,
13457 { },
13458 { { 0, 1 } },
13459 .fill_helper = bpf_fill_atomic32_add,
13460 .stack_depth = 8,
13461 .nr_testruns = NR_PATTERN_RUNS,
13462 },
13463 {
13464 "ATOMIC_W_AND: all operand magnitudes",
13465 { },
13466 INTERNAL | FLAG_NO_DATA,
13467 { },
13468 { { 0, 1 } },
13469 .fill_helper = bpf_fill_atomic32_and,
13470 .stack_depth = 8,
13471 .nr_testruns = NR_PATTERN_RUNS,
13472 },
13473 {
13474 "ATOMIC_W_OR: all operand magnitudes",
13475 { },
13476 INTERNAL | FLAG_NO_DATA,
13477 { },
13478 { { 0, 1 } },
13479 .fill_helper = bpf_fill_atomic32_or,
13480 .stack_depth = 8,
13481 .nr_testruns = NR_PATTERN_RUNS,
13482 },
13483 {
13484 "ATOMIC_W_XOR: all operand magnitudes",
13485 { },
13486 INTERNAL | FLAG_NO_DATA,
13487 { },
13488 { { 0, 1 } },
13489 .fill_helper = bpf_fill_atomic32_xor,
13490 .stack_depth = 8,
13491 .nr_testruns = NR_PATTERN_RUNS,
13492 },
13493 {
13494 "ATOMIC_W_ADD_FETCH: all operand magnitudes",
13495 { },
13496 INTERNAL | FLAG_NO_DATA,
13497 { },
13498 { { 0, 1 } },
13499 .fill_helper = bpf_fill_atomic32_add_fetch,
13500 .stack_depth = 8,
13501 .nr_testruns = NR_PATTERN_RUNS,
13502 },
13503 {
13504 "ATOMIC_W_AND_FETCH: all operand magnitudes",
13505 { },
13506 INTERNAL | FLAG_NO_DATA,
13507 { },
13508 { { 0, 1 } },
13509 .fill_helper = bpf_fill_atomic32_and_fetch,
13510 .stack_depth = 8,
13511 .nr_testruns = NR_PATTERN_RUNS,
13512 },
13513 {
13514 "ATOMIC_W_OR_FETCH: all operand magnitudes",
13515 { },
13516 INTERNAL | FLAG_NO_DATA,
13517 { },
13518 { { 0, 1 } },
13519 .fill_helper = bpf_fill_atomic32_or_fetch,
13520 .stack_depth = 8,
13521 .nr_testruns = NR_PATTERN_RUNS,
13522 },
13523 {
13524 "ATOMIC_W_XOR_FETCH: all operand magnitudes",
13525 { },
13526 INTERNAL | FLAG_NO_DATA,
13527 { },
13528 { { 0, 1 } },
13529 .fill_helper = bpf_fill_atomic32_xor_fetch,
13530 .stack_depth = 8,
13531 .nr_testruns = NR_PATTERN_RUNS,
13532 },
13533 {
13534 "ATOMIC_W_XCHG: all operand magnitudes",
13535 { },
13536 INTERNAL | FLAG_NO_DATA,
13537 { },
13538 { { 0, 1 } },
13539 .fill_helper = bpf_fill_atomic32_xchg,
13540 .stack_depth = 8,
13541 .nr_testruns = NR_PATTERN_RUNS,
13542 },
13543 {
13544 "ATOMIC_W_CMPXCHG: all operand magnitudes",
13545 { },
13546 INTERNAL | FLAG_NO_DATA,
13547 { },
13548 { { 0, 1 } },
13549 .fill_helper = bpf_fill_cmpxchg32,
13550 .stack_depth = 8,
13551 .nr_testruns = NR_PATTERN_RUNS,
13552 },
13553 /* JMP immediate magnitudes */
13554 {
13555 "JMP_JSET_K: all immediate value magnitudes",
13556 { },
13557 INTERNAL | FLAG_NO_DATA,
13558 { },
13559 { { 0, 1 } },
13560 .fill_helper = bpf_fill_jmp_jset_imm,
13561 .nr_testruns = NR_PATTERN_RUNS,
13562 },
13563 {
13564 "JMP_JEQ_K: all immediate value magnitudes",
13565 { },
13566 INTERNAL | FLAG_NO_DATA,
13567 { },
13568 { { 0, 1 } },
13569 .fill_helper = bpf_fill_jmp_jeq_imm,
13570 .nr_testruns = NR_PATTERN_RUNS,
13571 },
13572 {
13573 "JMP_JNE_K: all immediate value magnitudes",
13574 { },
13575 INTERNAL | FLAG_NO_DATA,
13576 { },
13577 { { 0, 1 } },
13578 .fill_helper = bpf_fill_jmp_jne_imm,
13579 .nr_testruns = NR_PATTERN_RUNS,
13580 },
13581 {
13582 "JMP_JGT_K: all immediate value magnitudes",
13583 { },
13584 INTERNAL | FLAG_NO_DATA,
13585 { },
13586 { { 0, 1 } },
13587 .fill_helper = bpf_fill_jmp_jgt_imm,
13588 .nr_testruns = NR_PATTERN_RUNS,
13589 },
13590 {
13591 "JMP_JGE_K: all immediate value magnitudes",
13592 { },
13593 INTERNAL | FLAG_NO_DATA,
13594 { },
13595 { { 0, 1 } },
13596 .fill_helper = bpf_fill_jmp_jge_imm,
13597 .nr_testruns = NR_PATTERN_RUNS,
13598 },
13599 {
13600 "JMP_JLT_K: all immediate value magnitudes",
13601 { },
13602 INTERNAL | FLAG_NO_DATA,
13603 { },
13604 { { 0, 1 } },
13605 .fill_helper = bpf_fill_jmp_jlt_imm,
13606 .nr_testruns = NR_PATTERN_RUNS,
13607 },
13608 {
13609 "JMP_JLE_K: all immediate value magnitudes",
13610 { },
13611 INTERNAL | FLAG_NO_DATA,
13612 { },
13613 { { 0, 1 } },
13614 .fill_helper = bpf_fill_jmp_jle_imm,
13615 .nr_testruns = NR_PATTERN_RUNS,
13616 },
13617 {
13618 "JMP_JSGT_K: all immediate value magnitudes",
13619 { },
13620 INTERNAL | FLAG_NO_DATA,
13621 { },
13622 { { 0, 1 } },
13623 .fill_helper = bpf_fill_jmp_jsgt_imm,
13624 .nr_testruns = NR_PATTERN_RUNS,
13625 },
13626 {
13627 "JMP_JSGE_K: all immediate value magnitudes",
13628 { },
13629 INTERNAL | FLAG_NO_DATA,
13630 { },
13631 { { 0, 1 } },
13632 .fill_helper = bpf_fill_jmp_jsge_imm,
13633 .nr_testruns = NR_PATTERN_RUNS,
13634 },
13635 {
13636 "JMP_JSLT_K: all immediate value magnitudes",
13637 { },
13638 INTERNAL | FLAG_NO_DATA,
13639 { },
13640 { { 0, 1 } },
13641 .fill_helper = bpf_fill_jmp_jslt_imm,
13642 .nr_testruns = NR_PATTERN_RUNS,
13643 },
13644 {
13645 "JMP_JSLE_K: all immediate value magnitudes",
13646 { },
13647 INTERNAL | FLAG_NO_DATA,
13648 { },
13649 { { 0, 1 } },
13650 .fill_helper = bpf_fill_jmp_jsle_imm,
13651 .nr_testruns = NR_PATTERN_RUNS,
13652 },
13653 /* JMP register magnitudes */
13654 {
13655 "JMP_JSET_X: all register value magnitudes",
13656 { },
13657 INTERNAL | FLAG_NO_DATA,
13658 { },
13659 { { 0, 1 } },
13660 .fill_helper = bpf_fill_jmp_jset_reg,
13661 .nr_testruns = NR_PATTERN_RUNS,
13662 },
13663 {
13664 "JMP_JEQ_X: all register value magnitudes",
13665 { },
13666 INTERNAL | FLAG_NO_DATA,
13667 { },
13668 { { 0, 1 } },
13669 .fill_helper = bpf_fill_jmp_jeq_reg,
13670 .nr_testruns = NR_PATTERN_RUNS,
13671 },
13672 {
13673 "JMP_JNE_X: all register value magnitudes",
13674 { },
13675 INTERNAL | FLAG_NO_DATA,
13676 { },
13677 { { 0, 1 } },
13678 .fill_helper = bpf_fill_jmp_jne_reg,
13679 .nr_testruns = NR_PATTERN_RUNS,
13680 },
13681 {
13682 "JMP_JGT_X: all register value magnitudes",
13683 { },
13684 INTERNAL | FLAG_NO_DATA,
13685 { },
13686 { { 0, 1 } },
13687 .fill_helper = bpf_fill_jmp_jgt_reg,
13688 .nr_testruns = NR_PATTERN_RUNS,
13689 },
13690 {
13691 "JMP_JGE_X: all register value magnitudes",
13692 { },
13693 INTERNAL | FLAG_NO_DATA,
13694 { },
13695 { { 0, 1 } },
13696 .fill_helper = bpf_fill_jmp_jge_reg,
13697 .nr_testruns = NR_PATTERN_RUNS,
13698 },
13699 {
13700 "JMP_JLT_X: all register value magnitudes",
13701 { },
13702 INTERNAL | FLAG_NO_DATA,
13703 { },
13704 { { 0, 1 } },
13705 .fill_helper = bpf_fill_jmp_jlt_reg,
13706 .nr_testruns = NR_PATTERN_RUNS,
13707 },
13708 {
13709 "JMP_JLE_X: all register value magnitudes",
13710 { },
13711 INTERNAL | FLAG_NO_DATA,
13712 { },
13713 { { 0, 1 } },
13714 .fill_helper = bpf_fill_jmp_jle_reg,
13715 .nr_testruns = NR_PATTERN_RUNS,
13716 },
13717 {
13718 "JMP_JSGT_X: all register value magnitudes",
13719 { },
13720 INTERNAL | FLAG_NO_DATA,
13721 { },
13722 { { 0, 1 } },
13723 .fill_helper = bpf_fill_jmp_jsgt_reg,
13724 .nr_testruns = NR_PATTERN_RUNS,
13725 },
13726 {
13727 "JMP_JSGE_X: all register value magnitudes",
13728 { },
13729 INTERNAL | FLAG_NO_DATA,
13730 { },
13731 { { 0, 1 } },
13732 .fill_helper = bpf_fill_jmp_jsge_reg,
13733 .nr_testruns = NR_PATTERN_RUNS,
13734 },
13735 {
13736 "JMP_JSLT_X: all register value magnitudes",
13737 { },
13738 INTERNAL | FLAG_NO_DATA,
13739 { },
13740 { { 0, 1 } },
13741 .fill_helper = bpf_fill_jmp_jslt_reg,
13742 .nr_testruns = NR_PATTERN_RUNS,
13743 },
13744 {
13745 "JMP_JSLE_X: all register value magnitudes",
13746 { },
13747 INTERNAL | FLAG_NO_DATA,
13748 { },
13749 { { 0, 1 } },
13750 .fill_helper = bpf_fill_jmp_jsle_reg,
13751 .nr_testruns = NR_PATTERN_RUNS,
13752 },
13753 /* JMP32 immediate magnitudes */
13754 {
13755 "JMP32_JSET_K: all immediate value magnitudes",
13756 { },
13757 INTERNAL | FLAG_NO_DATA,
13758 { },
13759 { { 0, 1 } },
13760 .fill_helper = bpf_fill_jmp32_jset_imm,
13761 .nr_testruns = NR_PATTERN_RUNS,
13762 },
13763 {
13764 "JMP32_JEQ_K: all immediate value magnitudes",
13765 { },
13766 INTERNAL | FLAG_NO_DATA,
13767 { },
13768 { { 0, 1 } },
13769 .fill_helper = bpf_fill_jmp32_jeq_imm,
13770 .nr_testruns = NR_PATTERN_RUNS,
13771 },
13772 {
13773 "JMP32_JNE_K: all immediate value magnitudes",
13774 { },
13775 INTERNAL | FLAG_NO_DATA,
13776 { },
13777 { { 0, 1 } },
13778 .fill_helper = bpf_fill_jmp32_jne_imm,
13779 .nr_testruns = NR_PATTERN_RUNS,
13780 },
13781 {
13782 "JMP32_JGT_K: all immediate value magnitudes",
13783 { },
13784 INTERNAL | FLAG_NO_DATA,
13785 { },
13786 { { 0, 1 } },
13787 .fill_helper = bpf_fill_jmp32_jgt_imm,
13788 .nr_testruns = NR_PATTERN_RUNS,
13789 },
13790 {
13791 "JMP32_JGE_K: all immediate value magnitudes",
13792 { },
13793 INTERNAL | FLAG_NO_DATA,
13794 { },
13795 { { 0, 1 } },
13796 .fill_helper = bpf_fill_jmp32_jge_imm,
13797 .nr_testruns = NR_PATTERN_RUNS,
13798 },
13799 {
13800 "JMP32_JLT_K: all immediate value magnitudes",
13801 { },
13802 INTERNAL | FLAG_NO_DATA,
13803 { },
13804 { { 0, 1 } },
13805 .fill_helper = bpf_fill_jmp32_jlt_imm,
13806 .nr_testruns = NR_PATTERN_RUNS,
13807 },
13808 {
13809 "JMP32_JLE_K: all immediate value magnitudes",
13810 { },
13811 INTERNAL | FLAG_NO_DATA,
13812 { },
13813 { { 0, 1 } },
13814 .fill_helper = bpf_fill_jmp32_jle_imm,
13815 .nr_testruns = NR_PATTERN_RUNS,
13816 },
13817 {
13818 "JMP32_JSGT_K: all immediate value magnitudes",
13819 { },
13820 INTERNAL | FLAG_NO_DATA,
13821 { },
13822 { { 0, 1 } },
13823 .fill_helper = bpf_fill_jmp32_jsgt_imm,
13824 .nr_testruns = NR_PATTERN_RUNS,
13825 },
13826 {
13827 "JMP32_JSGE_K: all immediate value magnitudes",
13828 { },
13829 INTERNAL | FLAG_NO_DATA,
13830 { },
13831 { { 0, 1 } },
13832 .fill_helper = bpf_fill_jmp32_jsge_imm,
13833 .nr_testruns = NR_PATTERN_RUNS,
13834 },
13835 {
13836 "JMP32_JSLT_K: all immediate value magnitudes",
13837 { },
13838 INTERNAL | FLAG_NO_DATA,
13839 { },
13840 { { 0, 1 } },
13841 .fill_helper = bpf_fill_jmp32_jslt_imm,
13842 .nr_testruns = NR_PATTERN_RUNS,
13843 },
13844 {
13845 "JMP32_JSLE_K: all immediate value magnitudes",
13846 { },
13847 INTERNAL | FLAG_NO_DATA,
13848 { },
13849 { { 0, 1 } },
13850 .fill_helper = bpf_fill_jmp32_jsle_imm,
13851 .nr_testruns = NR_PATTERN_RUNS,
13852 },
13853 /* JMP32 register magnitudes */
13854 {
13855 "JMP32_JSET_X: all register value magnitudes",
13856 { },
13857 INTERNAL | FLAG_NO_DATA,
13858 { },
13859 { { 0, 1 } },
13860 .fill_helper = bpf_fill_jmp32_jset_reg,
13861 .nr_testruns = NR_PATTERN_RUNS,
13862 },
13863 {
13864 "JMP32_JEQ_X: all register value magnitudes",
13865 { },
13866 INTERNAL | FLAG_NO_DATA,
13867 { },
13868 { { 0, 1 } },
13869 .fill_helper = bpf_fill_jmp32_jeq_reg,
13870 .nr_testruns = NR_PATTERN_RUNS,
13871 },
13872 {
13873 "JMP32_JNE_X: all register value magnitudes",
13874 { },
13875 INTERNAL | FLAG_NO_DATA,
13876 { },
13877 { { 0, 1 } },
13878 .fill_helper = bpf_fill_jmp32_jne_reg,
13879 .nr_testruns = NR_PATTERN_RUNS,
13880 },
13881 {
13882 "JMP32_JGT_X: all register value magnitudes",
13883 { },
13884 INTERNAL | FLAG_NO_DATA,
13885 { },
13886 { { 0, 1 } },
13887 .fill_helper = bpf_fill_jmp32_jgt_reg,
13888 .nr_testruns = NR_PATTERN_RUNS,
13889 },
13890 {
13891 "JMP32_JGE_X: all register value magnitudes",
13892 { },
13893 INTERNAL | FLAG_NO_DATA,
13894 { },
13895 { { 0, 1 } },
13896 .fill_helper = bpf_fill_jmp32_jge_reg,
13897 .nr_testruns = NR_PATTERN_RUNS,
13898 },
13899 {
13900 "JMP32_JLT_X: all register value magnitudes",
13901 { },
13902 INTERNAL | FLAG_NO_DATA,
13903 { },
13904 { { 0, 1 } },
13905 .fill_helper = bpf_fill_jmp32_jlt_reg,
13906 .nr_testruns = NR_PATTERN_RUNS,
13907 },
13908 {
13909 "JMP32_JLE_X: all register value magnitudes",
13910 { },
13911 INTERNAL | FLAG_NO_DATA,
13912 { },
13913 { { 0, 1 } },
13914 .fill_helper = bpf_fill_jmp32_jle_reg,
13915 .nr_testruns = NR_PATTERN_RUNS,
13916 },
13917 {
13918 "JMP32_JSGT_X: all register value magnitudes",
13919 { },
13920 INTERNAL | FLAG_NO_DATA,
13921 { },
13922 { { 0, 1 } },
13923 .fill_helper = bpf_fill_jmp32_jsgt_reg,
13924 .nr_testruns = NR_PATTERN_RUNS,
13925 },
13926 {
13927 "JMP32_JSGE_X: all register value magnitudes",
13928 { },
13929 INTERNAL | FLAG_NO_DATA,
13930 { },
13931 { { 0, 1 } },
13932 .fill_helper = bpf_fill_jmp32_jsge_reg,
13933 .nr_testruns = NR_PATTERN_RUNS,
13934 },
13935 {
13936 "JMP32_JSLT_X: all register value magnitudes",
13937 { },
13938 INTERNAL | FLAG_NO_DATA,
13939 { },
13940 { { 0, 1 } },
13941 .fill_helper = bpf_fill_jmp32_jslt_reg,
13942 .nr_testruns = NR_PATTERN_RUNS,
13943 },
13944 {
13945 "JMP32_JSLE_X: all register value magnitudes",
13946 { },
13947 INTERNAL | FLAG_NO_DATA,
13948 { },
13949 { { 0, 1 } },
13950 .fill_helper = bpf_fill_jmp32_jsle_reg,
13951 .nr_testruns = NR_PATTERN_RUNS,
13952 },
13953 /* Conditional jumps with constant decision */
13954 {
13955 "JMP_JSET_K: imm = 0 -> never taken",
13956 .u.insns_int = {
13957 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13958 BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13959 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13960 BPF_EXIT_INSN(),
13961 },
13962 INTERNAL | FLAG_NO_DATA,
13963 { },
13964 { { 0, 0 } },
13965 },
13966 {
13967 "JMP_JLT_K: imm = 0 -> never taken",
13968 .u.insns_int = {
13969 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13970 BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13971 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13972 BPF_EXIT_INSN(),
13973 },
13974 INTERNAL | FLAG_NO_DATA,
13975 { },
13976 { { 0, 0 } },
13977 },
13978 {
13979 "JMP_JGE_K: imm = 0 -> always taken",
13980 .u.insns_int = {
13981 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13982 BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13983 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13984 BPF_EXIT_INSN(),
13985 },
13986 INTERNAL | FLAG_NO_DATA,
13987 { },
13988 { { 0, 1 } },
13989 },
13990 {
13991 "JMP_JGT_K: imm = 0xffffffff -> never taken",
13992 .u.insns_int = {
13993 BPF_ALU64_IMM(BPF_MOV, R0, 1),
13994 BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13995 BPF_ALU64_IMM(BPF_MOV, R0, 0),
13996 BPF_EXIT_INSN(),
13997 },
13998 INTERNAL | FLAG_NO_DATA,
13999 { },
14000 { { 0, 0 } },
14001 },
14002 {
14003 "JMP_JLE_K: imm = 0xffffffff -> always taken",
14004 .u.insns_int = {
14005 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14006 BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
14007 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14008 BPF_EXIT_INSN(),
14009 },
14010 INTERNAL | FLAG_NO_DATA,
14011 { },
14012 { { 0, 1 } },
14013 },
14014 {
14015 "JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
14016 .u.insns_int = {
14017 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14018 BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
14019 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14020 BPF_EXIT_INSN(),
14021 },
14022 INTERNAL | FLAG_NO_DATA,
14023 { },
14024 { { 0, 0 } },
14025 },
14026 {
14027 "JMP32_JSGE_K: imm = -0x80000000 -> always taken",
14028 .u.insns_int = {
14029 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14030 BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
14031 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14032 BPF_EXIT_INSN(),
14033 },
14034 INTERNAL | FLAG_NO_DATA,
14035 { },
14036 { { 0, 1 } },
14037 },
14038 {
14039 "JMP32_JSLT_K: imm = -0x80000000 -> never taken",
14040 .u.insns_int = {
14041 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14042 BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
14043 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14044 BPF_EXIT_INSN(),
14045 },
14046 INTERNAL | FLAG_NO_DATA,
14047 { },
14048 { { 0, 0 } },
14049 },
14050 {
14051 "JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
14052 .u.insns_int = {
14053 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14054 BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
14055 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14056 BPF_EXIT_INSN(),
14057 },
14058 INTERNAL | FLAG_NO_DATA,
14059 { },
14060 { { 0, 1 } },
14061 },
14062 {
14063 "JMP_JEQ_X: dst = src -> always taken",
14064 .u.insns_int = {
14065 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14066 BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
14067 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14068 BPF_EXIT_INSN(),
14069 },
14070 INTERNAL | FLAG_NO_DATA,
14071 { },
14072 { { 0, 1 } },
14073 },
14074 {
14075 "JMP_JGE_X: dst = src -> always taken",
14076 .u.insns_int = {
14077 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14078 BPF_JMP_REG(BPF_JGE, R1, R1, 1),
14079 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14080 BPF_EXIT_INSN(),
14081 },
14082 INTERNAL | FLAG_NO_DATA,
14083 { },
14084 { { 0, 1 } },
14085 },
14086 {
14087 "JMP_JLE_X: dst = src -> always taken",
14088 .u.insns_int = {
14089 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14090 BPF_JMP_REG(BPF_JLE, R1, R1, 1),
14091 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14092 BPF_EXIT_INSN(),
14093 },
14094 INTERNAL | FLAG_NO_DATA,
14095 { },
14096 { { 0, 1 } },
14097 },
14098 {
14099 "JMP_JSGE_X: dst = src -> always taken",
14100 .u.insns_int = {
14101 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14102 BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
14103 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14104 BPF_EXIT_INSN(),
14105 },
14106 INTERNAL | FLAG_NO_DATA,
14107 { },
14108 { { 0, 1 } },
14109 },
14110 {
14111 "JMP_JSLE_X: dst = src -> always taken",
14112 .u.insns_int = {
14113 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14114 BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
14115 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14116 BPF_EXIT_INSN(),
14117 },
14118 INTERNAL | FLAG_NO_DATA,
14119 { },
14120 { { 0, 1 } },
14121 },
14122 {
14123 "JMP_JNE_X: dst = src -> never taken",
14124 .u.insns_int = {
14125 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14126 BPF_JMP_REG(BPF_JNE, R1, R1, 1),
14127 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14128 BPF_EXIT_INSN(),
14129 },
14130 INTERNAL | FLAG_NO_DATA,
14131 { },
14132 { { 0, 0 } },
14133 },
14134 {
14135 "JMP_JGT_X: dst = src -> never taken",
14136 .u.insns_int = {
14137 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14138 BPF_JMP_REG(BPF_JGT, R1, R1, 1),
14139 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14140 BPF_EXIT_INSN(),
14141 },
14142 INTERNAL | FLAG_NO_DATA,
14143 { },
14144 { { 0, 0 } },
14145 },
14146 {
14147 "JMP_JLT_X: dst = src -> never taken",
14148 .u.insns_int = {
14149 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14150 BPF_JMP_REG(BPF_JLT, R1, R1, 1),
14151 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14152 BPF_EXIT_INSN(),
14153 },
14154 INTERNAL | FLAG_NO_DATA,
14155 { },
14156 { { 0, 0 } },
14157 },
14158 {
14159 "JMP_JSGT_X: dst = src -> never taken",
14160 .u.insns_int = {
14161 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14162 BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
14163 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14164 BPF_EXIT_INSN(),
14165 },
14166 INTERNAL | FLAG_NO_DATA,
14167 { },
14168 { { 0, 0 } },
14169 },
14170 {
14171 "JMP_JSLT_X: dst = src -> never taken",
14172 .u.insns_int = {
14173 BPF_ALU64_IMM(BPF_MOV, R0, 1),
14174 BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
14175 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14176 BPF_EXIT_INSN(),
14177 },
14178 INTERNAL | FLAG_NO_DATA,
14179 { },
14180 { { 0, 0 } },
14181 },
14182 /* Short relative jumps */
14183 {
14184 "Short relative jump: offset=0",
14185 .u.insns_int = {
14186 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14187 BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
14188 BPF_EXIT_INSN(),
14189 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14190 },
14191 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14192 { },
14193 { { 0, 0 } },
14194 },
14195 {
14196 "Short relative jump: offset=1",
14197 .u.insns_int = {
14198 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14199 BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
14200 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14201 BPF_EXIT_INSN(),
14202 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14203 },
14204 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14205 { },
14206 { { 0, 0 } },
14207 },
14208 {
14209 "Short relative jump: offset=2",
14210 .u.insns_int = {
14211 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14212 BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
14213 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14214 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14215 BPF_EXIT_INSN(),
14216 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14217 },
14218 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14219 { },
14220 { { 0, 0 } },
14221 },
14222 {
14223 "Short relative jump: offset=3",
14224 .u.insns_int = {
14225 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14226 BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
14227 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14228 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14229 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14230 BPF_EXIT_INSN(),
14231 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14232 },
14233 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14234 { },
14235 { { 0, 0 } },
14236 },
14237 {
14238 "Short relative jump: offset=4",
14239 .u.insns_int = {
14240 BPF_ALU64_IMM(BPF_MOV, R0, 0),
14241 BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
14242 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14243 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14244 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14245 BPF_ALU32_IMM(BPF_ADD, R0, 1),
14246 BPF_EXIT_INSN(),
14247 BPF_ALU32_IMM(BPF_MOV, R0, -1),
14248 },
14249 INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14250 { },
14251 { { 0, 0 } },
14252 },
14253 /* Conditional branch conversions */
14254 {
14255 "Long conditional jump: taken at runtime",
14256 { },
14257 INTERNAL | FLAG_NO_DATA,
14258 { },
14259 { { 0, 1 } },
14260 .fill_helper = bpf_fill_max_jmp_taken,
14261 },
14262 {
14263 "Long conditional jump: not taken at runtime",
14264 { },
14265 INTERNAL | FLAG_NO_DATA,
14266 { },
14267 { { 0, 2 } },
14268 .fill_helper = bpf_fill_max_jmp_not_taken,
14269 },
14270 {
14271 "Long conditional jump: always taken, known at JIT time",
14272 { },
14273 INTERNAL | FLAG_NO_DATA,
14274 { },
14275 { { 0, 1 } },
14276 .fill_helper = bpf_fill_max_jmp_always_taken,
14277 },
14278 {
14279 "Long conditional jump: never taken, known at JIT time",
14280 { },
14281 INTERNAL | FLAG_NO_DATA,
14282 { },
14283 { { 0, 2 } },
14284 .fill_helper = bpf_fill_max_jmp_never_taken,
14285 },
14286 /* Staggered jump sequences, immediate */
14287 {
14288 "Staggered jumps: JMP_JA",
14289 { },
14290 INTERNAL | FLAG_NO_DATA,
14291 { },
14292 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14293 .fill_helper = bpf_fill_staggered_ja,
14294 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14295 },
14296 {
14297 "Staggered jumps: JMP_JEQ_K",
14298 { },
14299 INTERNAL | FLAG_NO_DATA,
14300 { },
14301 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14302 .fill_helper = bpf_fill_staggered_jeq_imm,
14303 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14304 },
14305 {
14306 "Staggered jumps: JMP_JNE_K",
14307 { },
14308 INTERNAL | FLAG_NO_DATA,
14309 { },
14310 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14311 .fill_helper = bpf_fill_staggered_jne_imm,
14312 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14313 },
14314 {
14315 "Staggered jumps: JMP_JSET_K",
14316 { },
14317 INTERNAL | FLAG_NO_DATA,
14318 { },
14319 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14320 .fill_helper = bpf_fill_staggered_jset_imm,
14321 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14322 },
14323 {
14324 "Staggered jumps: JMP_JGT_K",
14325 { },
14326 INTERNAL | FLAG_NO_DATA,
14327 { },
14328 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14329 .fill_helper = bpf_fill_staggered_jgt_imm,
14330 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14331 },
14332 {
14333 "Staggered jumps: JMP_JGE_K",
14334 { },
14335 INTERNAL | FLAG_NO_DATA,
14336 { },
14337 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14338 .fill_helper = bpf_fill_staggered_jge_imm,
14339 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14340 },
14341 {
14342 "Staggered jumps: JMP_JLT_K",
14343 { },
14344 INTERNAL | FLAG_NO_DATA,
14345 { },
14346 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14347 .fill_helper = bpf_fill_staggered_jlt_imm,
14348 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14349 },
14350 {
14351 "Staggered jumps: JMP_JLE_K",
14352 { },
14353 INTERNAL | FLAG_NO_DATA,
14354 { },
14355 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14356 .fill_helper = bpf_fill_staggered_jle_imm,
14357 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14358 },
14359 {
14360 "Staggered jumps: JMP_JSGT_K",
14361 { },
14362 INTERNAL | FLAG_NO_DATA,
14363 { },
14364 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14365 .fill_helper = bpf_fill_staggered_jsgt_imm,
14366 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14367 },
14368 {
14369 "Staggered jumps: JMP_JSGE_K",
14370 { },
14371 INTERNAL | FLAG_NO_DATA,
14372 { },
14373 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14374 .fill_helper = bpf_fill_staggered_jsge_imm,
14375 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14376 },
14377 {
14378 "Staggered jumps: JMP_JSLT_K",
14379 { },
14380 INTERNAL | FLAG_NO_DATA,
14381 { },
14382 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14383 .fill_helper = bpf_fill_staggered_jslt_imm,
14384 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14385 },
14386 {
14387 "Staggered jumps: JMP_JSLE_K",
14388 { },
14389 INTERNAL | FLAG_NO_DATA,
14390 { },
14391 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14392 .fill_helper = bpf_fill_staggered_jsle_imm,
14393 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14394 },
14395 /* Staggered jump sequences, register */
14396 {
14397 "Staggered jumps: JMP_JEQ_X",
14398 { },
14399 INTERNAL | FLAG_NO_DATA,
14400 { },
14401 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14402 .fill_helper = bpf_fill_staggered_jeq_reg,
14403 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14404 },
14405 {
14406 "Staggered jumps: JMP_JNE_X",
14407 { },
14408 INTERNAL | FLAG_NO_DATA,
14409 { },
14410 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14411 .fill_helper = bpf_fill_staggered_jne_reg,
14412 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14413 },
14414 {
14415 "Staggered jumps: JMP_JSET_X",
14416 { },
14417 INTERNAL | FLAG_NO_DATA,
14418 { },
14419 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14420 .fill_helper = bpf_fill_staggered_jset_reg,
14421 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14422 },
14423 {
14424 "Staggered jumps: JMP_JGT_X",
14425 { },
14426 INTERNAL | FLAG_NO_DATA,
14427 { },
14428 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14429 .fill_helper = bpf_fill_staggered_jgt_reg,
14430 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14431 },
14432 {
14433 "Staggered jumps: JMP_JGE_X",
14434 { },
14435 INTERNAL | FLAG_NO_DATA,
14436 { },
14437 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14438 .fill_helper = bpf_fill_staggered_jge_reg,
14439 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14440 },
14441 {
14442 "Staggered jumps: JMP_JLT_X",
14443 { },
14444 INTERNAL | FLAG_NO_DATA,
14445 { },
14446 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14447 .fill_helper = bpf_fill_staggered_jlt_reg,
14448 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14449 },
14450 {
14451 "Staggered jumps: JMP_JLE_X",
14452 { },
14453 INTERNAL | FLAG_NO_DATA,
14454 { },
14455 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14456 .fill_helper = bpf_fill_staggered_jle_reg,
14457 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14458 },
14459 {
14460 "Staggered jumps: JMP_JSGT_X",
14461 { },
14462 INTERNAL | FLAG_NO_DATA,
14463 { },
14464 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14465 .fill_helper = bpf_fill_staggered_jsgt_reg,
14466 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14467 },
14468 {
14469 "Staggered jumps: JMP_JSGE_X",
14470 { },
14471 INTERNAL | FLAG_NO_DATA,
14472 { },
14473 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14474 .fill_helper = bpf_fill_staggered_jsge_reg,
14475 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14476 },
14477 {
14478 "Staggered jumps: JMP_JSLT_X",
14479 { },
14480 INTERNAL | FLAG_NO_DATA,
14481 { },
14482 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14483 .fill_helper = bpf_fill_staggered_jslt_reg,
14484 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14485 },
14486 {
14487 "Staggered jumps: JMP_JSLE_X",
14488 { },
14489 INTERNAL | FLAG_NO_DATA,
14490 { },
14491 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14492 .fill_helper = bpf_fill_staggered_jsle_reg,
14493 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14494 },
14495 /* Staggered jump sequences, JMP32 immediate */
14496 {
14497 "Staggered jumps: JMP32_JEQ_K",
14498 { },
14499 INTERNAL | FLAG_NO_DATA,
14500 { },
14501 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14502 .fill_helper = bpf_fill_staggered_jeq32_imm,
14503 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14504 },
14505 {
14506 "Staggered jumps: JMP32_JNE_K",
14507 { },
14508 INTERNAL | FLAG_NO_DATA,
14509 { },
14510 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14511 .fill_helper = bpf_fill_staggered_jne32_imm,
14512 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14513 },
14514 {
14515 "Staggered jumps: JMP32_JSET_K",
14516 { },
14517 INTERNAL | FLAG_NO_DATA,
14518 { },
14519 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14520 .fill_helper = bpf_fill_staggered_jset32_imm,
14521 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14522 },
14523 {
14524 "Staggered jumps: JMP32_JGT_K",
14525 { },
14526 INTERNAL | FLAG_NO_DATA,
14527 { },
14528 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14529 .fill_helper = bpf_fill_staggered_jgt32_imm,
14530 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14531 },
14532 {
14533 "Staggered jumps: JMP32_JGE_K",
14534 { },
14535 INTERNAL | FLAG_NO_DATA,
14536 { },
14537 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14538 .fill_helper = bpf_fill_staggered_jge32_imm,
14539 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14540 },
14541 {
14542 "Staggered jumps: JMP32_JLT_K",
14543 { },
14544 INTERNAL | FLAG_NO_DATA,
14545 { },
14546 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14547 .fill_helper = bpf_fill_staggered_jlt32_imm,
14548 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14549 },
14550 {
14551 "Staggered jumps: JMP32_JLE_K",
14552 { },
14553 INTERNAL | FLAG_NO_DATA,
14554 { },
14555 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14556 .fill_helper = bpf_fill_staggered_jle32_imm,
14557 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14558 },
14559 {
14560 "Staggered jumps: JMP32_JSGT_K",
14561 { },
14562 INTERNAL | FLAG_NO_DATA,
14563 { },
14564 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14565 .fill_helper = bpf_fill_staggered_jsgt32_imm,
14566 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14567 },
14568 {
14569 "Staggered jumps: JMP32_JSGE_K",
14570 { },
14571 INTERNAL | FLAG_NO_DATA,
14572 { },
14573 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14574 .fill_helper = bpf_fill_staggered_jsge32_imm,
14575 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14576 },
14577 {
14578 "Staggered jumps: JMP32_JSLT_K",
14579 { },
14580 INTERNAL | FLAG_NO_DATA,
14581 { },
14582 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14583 .fill_helper = bpf_fill_staggered_jslt32_imm,
14584 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14585 },
14586 {
14587 "Staggered jumps: JMP32_JSLE_K",
14588 { },
14589 INTERNAL | FLAG_NO_DATA,
14590 { },
14591 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14592 .fill_helper = bpf_fill_staggered_jsle32_imm,
14593 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14594 },
14595 /* Staggered jump sequences, JMP32 register */
14596 {
14597 "Staggered jumps: JMP32_JEQ_X",
14598 { },
14599 INTERNAL | FLAG_NO_DATA,
14600 { },
14601 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14602 .fill_helper = bpf_fill_staggered_jeq32_reg,
14603 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14604 },
14605 {
14606 "Staggered jumps: JMP32_JNE_X",
14607 { },
14608 INTERNAL | FLAG_NO_DATA,
14609 { },
14610 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14611 .fill_helper = bpf_fill_staggered_jne32_reg,
14612 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14613 },
14614 {
14615 "Staggered jumps: JMP32_JSET_X",
14616 { },
14617 INTERNAL | FLAG_NO_DATA,
14618 { },
14619 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14620 .fill_helper = bpf_fill_staggered_jset32_reg,
14621 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14622 },
14623 {
14624 "Staggered jumps: JMP32_JGT_X",
14625 { },
14626 INTERNAL | FLAG_NO_DATA,
14627 { },
14628 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14629 .fill_helper = bpf_fill_staggered_jgt32_reg,
14630 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14631 },
14632 {
14633 "Staggered jumps: JMP32_JGE_X",
14634 { },
14635 INTERNAL | FLAG_NO_DATA,
14636 { },
14637 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14638 .fill_helper = bpf_fill_staggered_jge32_reg,
14639 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14640 },
14641 {
14642 "Staggered jumps: JMP32_JLT_X",
14643 { },
14644 INTERNAL | FLAG_NO_DATA,
14645 { },
14646 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14647 .fill_helper = bpf_fill_staggered_jlt32_reg,
14648 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14649 },
14650 {
14651 "Staggered jumps: JMP32_JLE_X",
14652 { },
14653 INTERNAL | FLAG_NO_DATA,
14654 { },
14655 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14656 .fill_helper = bpf_fill_staggered_jle32_reg,
14657 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14658 },
14659 {
14660 "Staggered jumps: JMP32_JSGT_X",
14661 { },
14662 INTERNAL | FLAG_NO_DATA,
14663 { },
14664 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14665 .fill_helper = bpf_fill_staggered_jsgt32_reg,
14666 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14667 },
14668 {
14669 "Staggered jumps: JMP32_JSGE_X",
14670 { },
14671 INTERNAL | FLAG_NO_DATA,
14672 { },
14673 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14674 .fill_helper = bpf_fill_staggered_jsge32_reg,
14675 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14676 },
14677 {
14678 "Staggered jumps: JMP32_JSLT_X",
14679 { },
14680 INTERNAL | FLAG_NO_DATA,
14681 { },
14682 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14683 .fill_helper = bpf_fill_staggered_jslt32_reg,
14684 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14685 },
14686 {
14687 "Staggered jumps: JMP32_JSLE_X",
14688 { },
14689 INTERNAL | FLAG_NO_DATA,
14690 { },
14691 { { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14692 .fill_helper = bpf_fill_staggered_jsle32_reg,
14693 .nr_testruns = NR_STAGGERED_JMP_RUNS,
14694 },
14695};
14696
14697static struct net_device dev;
14698
14699static struct sk_buff *populate_skb(char *buf, int size)
14700{
14701 struct sk_buff *skb;
14702
14703 if (size >= MAX_DATA)
14704 return NULL;
14705
14706 skb = alloc_skb(MAX_DATA, GFP_KERNEL);
14707 if (!skb)
14708 return NULL;
14709
14710 __skb_put_data(skb, data: buf, len: size);
14711
14712 /* Initialize a fake skb with test pattern. */
14713 skb_reset_mac_header(skb);
14714 skb->protocol = htons(ETH_P_IP);
14715 skb->pkt_type = SKB_TYPE;
14716 skb->mark = SKB_MARK;
14717 skb->hash = SKB_HASH;
14718 skb->queue_mapping = SKB_QUEUE_MAP;
14719 skb->vlan_tci = SKB_VLAN_TCI;
14720 skb->vlan_proto = htons(ETH_P_IP);
14721 dev_net_set(dev: &dev, net: &init_net);
14722 skb->dev = &dev;
14723 skb->dev->ifindex = SKB_DEV_IFINDEX;
14724 skb->dev->type = SKB_DEV_TYPE;
14725 skb_set_network_header(skb, min(size, ETH_HLEN));
14726
14727 return skb;
14728}
14729
14730static void *generate_test_data(struct bpf_test *test, int sub)
14731{
14732 struct sk_buff *skb;
14733 struct page *page;
14734
14735 if (test->aux & FLAG_NO_DATA)
14736 return NULL;
14737
14738 if (test->aux & FLAG_LARGE_MEM)
14739 return kmalloc(size: test->test[sub].data_size, GFP_KERNEL);
14740
14741 /* Test case expects an skb, so populate one. Various
14742 * subtests generate skbs of different sizes based on
14743 * the same data.
14744 */
14745 skb = populate_skb(buf: test->data, size: test->test[sub].data_size);
14746 if (!skb)
14747 return NULL;
14748
14749 if (test->aux & FLAG_SKB_FRAG) {
14750 /*
14751 * when the test requires a fragmented skb, add a
14752 * single fragment to the skb, filled with
14753 * test->frag_data.
14754 */
14755 page = alloc_page(GFP_KERNEL);
14756 if (!page)
14757 goto err_kfree_skb;
14758
14759 memcpy(page_address(page), test->frag_data, MAX_DATA);
14760 skb_add_rx_frag(skb, i: 0, page, off: 0, MAX_DATA, MAX_DATA);
14761 }
14762
14763 return skb;
14764err_kfree_skb:
14765 kfree_skb(skb);
14766 return NULL;
14767}
14768
14769static void release_test_data(const struct bpf_test *test, void *data)
14770{
14771 if (test->aux & FLAG_NO_DATA)
14772 return;
14773
14774 if (test->aux & FLAG_LARGE_MEM)
14775 kfree(objp: data);
14776 else
14777 kfree_skb(skb: data);
14778}
14779
14780static int filter_length(int which)
14781{
14782 struct sock_filter *fp;
14783 int len;
14784
14785 if (tests[which].fill_helper)
14786 return tests[which].u.ptr.len;
14787
14788 fp = tests[which].u.insns;
14789 for (len = MAX_INSNS - 1; len > 0; --len)
14790 if (fp[len].code != 0 || fp[len].k != 0)
14791 break;
14792
14793 return len + 1;
14794}
14795
14796static void *filter_pointer(int which)
14797{
14798 if (tests[which].fill_helper)
14799 return tests[which].u.ptr.insns;
14800 else
14801 return tests[which].u.insns;
14802}
14803
14804static struct bpf_prog *generate_filter(int which, int *err)
14805{
14806 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14807 unsigned int flen = filter_length(which);
14808 void *fptr = filter_pointer(which);
14809 struct sock_fprog_kern fprog;
14810 struct bpf_prog *fp;
14811
14812 switch (test_type) {
14813 case CLASSIC:
14814 fprog.filter = fptr;
14815 fprog.len = flen;
14816
14817 *err = bpf_prog_create(pfp: &fp, fprog: &fprog);
14818 if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14819 if (*err == tests[which].expected_errcode) {
14820 pr_cont("PASS\n");
14821 /* Verifier rejected filter as expected. */
14822 *err = 0;
14823 return NULL;
14824 } else {
14825 pr_cont("UNEXPECTED_PASS\n");
14826 /* Verifier didn't reject the test that's
14827 * bad enough, just return!
14828 */
14829 *err = -EINVAL;
14830 return NULL;
14831 }
14832 }
14833 if (*err) {
14834 pr_cont("FAIL to prog_create err=%d len=%d\n",
14835 *err, fprog.len);
14836 return NULL;
14837 }
14838 break;
14839
14840 case INTERNAL:
14841 fp = bpf_prog_alloc(size: bpf_prog_size(proglen: flen), gfp_extra_flags: 0);
14842 if (fp == NULL) {
14843 pr_cont("UNEXPECTED_FAIL no memory left\n");
14844 *err = -ENOMEM;
14845 return NULL;
14846 }
14847
14848 fp->len = flen;
14849 /* Type doesn't really matter here as long as it's not unspec. */
14850 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14851 memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14852 fp->aux->stack_depth = tests[which].stack_depth;
14853 fp->aux->verifier_zext = !!(tests[which].aux &
14854 FLAG_VERIFIER_ZEXT);
14855
14856 /* We cannot error here as we don't need type compatibility
14857 * checks.
14858 */
14859 fp = bpf_prog_select_runtime(fp, err);
14860 if (*err) {
14861 pr_cont("FAIL to select_runtime err=%d\n", *err);
14862 return NULL;
14863 }
14864 break;
14865 }
14866
14867 *err = 0;
14868 return fp;
14869}
14870
14871static void release_filter(struct bpf_prog *fp, int which)
14872{
14873 __u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14874
14875 switch (test_type) {
14876 case CLASSIC:
14877 bpf_prog_destroy(fp);
14878 break;
14879 case INTERNAL:
14880 bpf_prog_free(fp);
14881 break;
14882 }
14883}
14884
14885static int __run_one(const struct bpf_prog *fp, const void *data,
14886 int runs, u64 *duration)
14887{
14888 u64 start, finish;
14889 int ret = 0, i;
14890
14891 migrate_disable();
14892 start = ktime_get_ns();
14893
14894 for (i = 0; i < runs; i++)
14895 ret = bpf_prog_run(prog: fp, ctx: data);
14896
14897 finish = ktime_get_ns();
14898 migrate_enable();
14899
14900 *duration = finish - start;
14901 do_div(*duration, runs);
14902
14903 return ret;
14904}
14905
14906static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14907{
14908 int err_cnt = 0, i, runs = MAX_TESTRUNS;
14909
14910 if (test->nr_testruns)
14911 runs = min(test->nr_testruns, MAX_TESTRUNS);
14912
14913 for (i = 0; i < MAX_SUBTESTS; i++) {
14914 void *data;
14915 u64 duration;
14916 u32 ret;
14917
14918 /*
14919 * NOTE: Several sub-tests may be present, in which case
14920 * a zero {data_size, result} tuple indicates the end of
14921 * the sub-test array. The first test is always run,
14922 * even if both data_size and result happen to be zero.
14923 */
14924 if (i > 0 &&
14925 test->test[i].data_size == 0 &&
14926 test->test[i].result == 0)
14927 break;
14928
14929 data = generate_test_data(test, sub: i);
14930 if (!data && !(test->aux & FLAG_NO_DATA)) {
14931 pr_cont("data generation failed ");
14932 err_cnt++;
14933 break;
14934 }
14935 ret = __run_one(fp, data, runs, duration: &duration);
14936 release_test_data(test, data);
14937
14938 if (ret == test->test[i].result) {
14939 pr_cont("%lld ", duration);
14940 } else {
14941 s32 res = test->test[i].result;
14942
14943 pr_cont("ret %d != %d (%#x != %#x)",
14944 ret, res, ret, res);
14945 err_cnt++;
14946 }
14947 }
14948
14949 return err_cnt;
14950}
14951
14952static char test_name[64];
14953module_param_string(test_name, test_name, sizeof(test_name), 0);
14954
14955static int test_id = -1;
14956module_param(test_id, int, 0);
14957
14958static int test_range[2] = { 0, INT_MAX };
14959module_param_array(test_range, int, NULL, 0);
14960
14961static bool exclude_test(int test_id)
14962{
14963 return test_id < test_range[0] || test_id > test_range[1];
14964}
14965
14966static __init struct sk_buff *build_test_skb(void)
14967{
14968 u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
14969 struct sk_buff *skb[2];
14970 struct page *page[2];
14971 int i, data_size = 8;
14972
14973 for (i = 0; i < 2; i++) {
14974 page[i] = alloc_page(GFP_KERNEL);
14975 if (!page[i]) {
14976 if (i == 0)
14977 goto err_page0;
14978 else
14979 goto err_page1;
14980 }
14981
14982 /* this will set skb[i]->head_frag */
14983 skb[i] = dev_alloc_skb(length: headroom + data_size);
14984 if (!skb[i]) {
14985 if (i == 0)
14986 goto err_skb0;
14987 else
14988 goto err_skb1;
14989 }
14990
14991 skb_reserve(skb: skb[i], len: headroom);
14992 skb_put(skb: skb[i], len: data_size);
14993 skb[i]->protocol = htons(ETH_P_IP);
14994 skb_reset_network_header(skb: skb[i]);
14995 skb_set_mac_header(skb: skb[i], offset: -ETH_HLEN);
14996
14997 skb_add_rx_frag(skb: skb[i], i: 0, page: page[i], off: 0, size: 64, truesize: 64);
14998 // skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
14999 }
15000
15001 /* setup shinfo */
15002 skb_shinfo(skb[0])->gso_size = 1448;
15003 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
15004 skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
15005 skb_shinfo(skb[0])->gso_segs = 0;
15006 skb_shinfo(skb[0])->frag_list = skb[1];
15007 skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
15008
15009 /* adjust skb[0]'s len */
15010 skb[0]->len += skb[1]->len;
15011 skb[0]->data_len += skb[1]->data_len;
15012 skb[0]->truesize += skb[1]->truesize;
15013
15014 return skb[0];
15015
15016err_skb1:
15017 __free_page(page[1]);
15018err_page1:
15019 kfree_skb(skb: skb[0]);
15020err_skb0:
15021 __free_page(page[0]);
15022err_page0:
15023 return NULL;
15024}
15025
15026static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
15027{
15028 unsigned int alloc_size = 2000;
15029 unsigned int headroom = 102, doffset = 72, data_size = 1308;
15030 struct sk_buff *skb[2];
15031 int i;
15032
15033 /* skbs linked in a frag_list, both with linear data, with head_frag=0
15034 * (data allocated by kmalloc), both have tcp data of 1308 bytes
15035 * (total payload is 2616 bytes).
15036 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
15037 */
15038 for (i = 0; i < 2; i++) {
15039 skb[i] = alloc_skb(size: alloc_size, GFP_KERNEL);
15040 if (!skb[i]) {
15041 if (i == 0)
15042 goto err_skb0;
15043 else
15044 goto err_skb1;
15045 }
15046
15047 skb[i]->protocol = htons(ETH_P_IPV6);
15048 skb_reserve(skb: skb[i], len: headroom);
15049 skb_put(skb: skb[i], len: doffset + data_size);
15050 skb_reset_network_header(skb: skb[i]);
15051 if (i == 0)
15052 skb_reset_mac_header(skb: skb[i]);
15053 else
15054 skb_set_mac_header(skb: skb[i], offset: -ETH_HLEN);
15055 __skb_pull(skb: skb[i], len: doffset);
15056 }
15057
15058 /* setup shinfo.
15059 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
15060 * reduced gso_size.
15061 */
15062 skb_shinfo(skb[0])->gso_size = 1288;
15063 skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
15064 skb_shinfo(skb[0])->gso_segs = 0;
15065 skb_shinfo(skb[0])->frag_list = skb[1];
15066
15067 /* adjust skb[0]'s len */
15068 skb[0]->len += skb[1]->len;
15069 skb[0]->data_len += skb[1]->len;
15070 skb[0]->truesize += skb[1]->truesize;
15071
15072 return skb[0];
15073
15074err_skb1:
15075 kfree_skb(skb: skb[0]);
15076err_skb0:
15077 return NULL;
15078}
15079
15080struct skb_segment_test {
15081 const char *descr;
15082 struct sk_buff *(*build_skb)(void);
15083 netdev_features_t features;
15084};
15085
15086static struct skb_segment_test skb_segment_tests[] __initconst = {
15087 {
15088 .descr = "gso_with_rx_frags",
15089 .build_skb = build_test_skb,
15090 .features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
15091 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
15092 },
15093 {
15094 .descr = "gso_linear_no_head_frag",
15095 .build_skb = build_test_skb_linear_no_head_frag,
15096 .features = NETIF_F_SG | NETIF_F_FRAGLIST |
15097 NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
15098 NETIF_F_LLTX | NETIF_F_GRO |
15099 NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
15100 NETIF_F_HW_VLAN_STAG_TX
15101 }
15102};
15103
15104static __init int test_skb_segment_single(const struct skb_segment_test *test)
15105{
15106 struct sk_buff *skb, *segs;
15107 int ret = -1;
15108
15109 skb = test->build_skb();
15110 if (!skb) {
15111 pr_info("%s: failed to build_test_skb", __func__);
15112 goto done;
15113 }
15114
15115 segs = skb_segment(skb, features: test->features);
15116 if (!IS_ERR(ptr: segs)) {
15117 kfree_skb_list(segs);
15118 ret = 0;
15119 }
15120 kfree_skb(skb);
15121done:
15122 return ret;
15123}
15124
15125static __init int test_skb_segment(void)
15126{
15127 int i, err_cnt = 0, pass_cnt = 0;
15128
15129 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15130 const struct skb_segment_test *test = &skb_segment_tests[i];
15131
15132 cond_resched();
15133 if (exclude_test(test_id: i))
15134 continue;
15135
15136 pr_info("#%d %s ", i, test->descr);
15137
15138 if (test_skb_segment_single(test)) {
15139 pr_cont("FAIL\n");
15140 err_cnt++;
15141 } else {
15142 pr_cont("PASS\n");
15143 pass_cnt++;
15144 }
15145 }
15146
15147 pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
15148 pass_cnt, err_cnt);
15149 return err_cnt ? -EINVAL : 0;
15150}
15151
15152static __init int test_bpf(void)
15153{
15154 int i, err_cnt = 0, pass_cnt = 0;
15155 int jit_cnt = 0, run_cnt = 0;
15156
15157 for (i = 0; i < ARRAY_SIZE(tests); i++) {
15158 struct bpf_prog *fp;
15159 int err;
15160
15161 cond_resched();
15162 if (exclude_test(test_id: i))
15163 continue;
15164
15165 pr_info("#%d %s ", i, tests[i].descr);
15166
15167 if (tests[i].fill_helper &&
15168 tests[i].fill_helper(&tests[i]) < 0) {
15169 pr_cont("FAIL to prog_fill\n");
15170 continue;
15171 }
15172
15173 fp = generate_filter(which: i, err: &err);
15174
15175 if (tests[i].fill_helper) {
15176 kfree(objp: tests[i].u.ptr.insns);
15177 tests[i].u.ptr.insns = NULL;
15178 }
15179
15180 if (fp == NULL) {
15181 if (err == 0) {
15182 pass_cnt++;
15183 continue;
15184 }
15185 err_cnt++;
15186 continue;
15187 }
15188
15189 pr_cont("jited:%u ", fp->jited);
15190
15191 run_cnt++;
15192 if (fp->jited)
15193 jit_cnt++;
15194
15195 err = run_one(fp, test: &tests[i]);
15196 release_filter(fp, which: i);
15197
15198 if (err) {
15199 pr_cont("FAIL (%d times)\n", err);
15200 err_cnt++;
15201 } else {
15202 pr_cont("PASS\n");
15203 pass_cnt++;
15204 }
15205 }
15206
15207 pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15208 pass_cnt, err_cnt, jit_cnt, run_cnt);
15209
15210 return err_cnt ? -EINVAL : 0;
15211}
15212
15213struct tail_call_test {
15214 const char *descr;
15215 struct bpf_insn insns[MAX_INSNS];
15216 int flags;
15217 int result;
15218 int stack_depth;
15219};
15220
15221/* Flags that can be passed to tail call test cases */
15222#define FLAG_NEED_STATE BIT(0)
15223#define FLAG_RESULT_IN_STATE BIT(1)
15224
15225/*
15226 * Magic marker used in test snippets for tail calls below.
15227 * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
15228 * with the proper values by the test runner.
15229 */
15230#define TAIL_CALL_MARKER 0x7a11ca11
15231
15232/* Special offset to indicate a NULL call target */
15233#define TAIL_CALL_NULL 0x7fff
15234
15235/* Special offset to indicate an out-of-range index */
15236#define TAIL_CALL_INVALID 0x7ffe
15237
15238#define TAIL_CALL(offset) \
15239 BPF_LD_IMM64(R2, TAIL_CALL_MARKER), \
15240 BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
15241 offset, TAIL_CALL_MARKER), \
15242 BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
15243
15244/*
15245 * A test function to be called from a BPF program, clobbering a lot of
15246 * CPU registers in the process. A JITed BPF program calling this function
15247 * must save and restore any caller-saved registers it uses for internal
15248 * state, for example the current tail call count.
15249 */
15250BPF_CALL_1(bpf_test_func, u64, arg)
15251{
15252 char buf[64];
15253 long a = 0;
15254 long b = 1;
15255 long c = 2;
15256 long d = 3;
15257 long e = 4;
15258 long f = 5;
15259 long g = 6;
15260 long h = 7;
15261
15262 return snprintf(buf, size: sizeof(buf),
15263 fmt: "%ld %lu %lx %ld %lu %lx %ld %lu %x",
15264 a, b, c, d, e, f, g, h, (int)arg);
15265}
15266#define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
15267
15268/*
15269 * Tail call tests. Each test case may call any other test in the table,
15270 * including itself, specified as a relative index offset from the calling
15271 * test. The index TAIL_CALL_NULL can be used to specify a NULL target
15272 * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
15273 * results in a target index that is out of range.
15274 */
15275static struct tail_call_test tail_call_tests[] = {
15276 {
15277 "Tail call leaf",
15278 .insns = {
15279 BPF_ALU64_REG(BPF_MOV, R0, R1),
15280 BPF_ALU64_IMM(BPF_ADD, R0, 1),
15281 BPF_EXIT_INSN(),
15282 },
15283 .result = 1,
15284 },
15285 {
15286 "Tail call 2",
15287 .insns = {
15288 BPF_ALU64_IMM(BPF_ADD, R1, 2),
15289 TAIL_CALL(-1),
15290 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15291 BPF_EXIT_INSN(),
15292 },
15293 .result = 3,
15294 },
15295 {
15296 "Tail call 3",
15297 .insns = {
15298 BPF_ALU64_IMM(BPF_ADD, R1, 3),
15299 TAIL_CALL(-1),
15300 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15301 BPF_EXIT_INSN(),
15302 },
15303 .result = 6,
15304 },
15305 {
15306 "Tail call 4",
15307 .insns = {
15308 BPF_ALU64_IMM(BPF_ADD, R1, 4),
15309 TAIL_CALL(-1),
15310 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15311 BPF_EXIT_INSN(),
15312 },
15313 .result = 10,
15314 },
15315 {
15316 "Tail call load/store leaf",
15317 .insns = {
15318 BPF_ALU64_IMM(BPF_MOV, R1, 1),
15319 BPF_ALU64_IMM(BPF_MOV, R2, 2),
15320 BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
15321 BPF_STX_MEM(BPF_DW, R3, R1, -8),
15322 BPF_STX_MEM(BPF_DW, R3, R2, -16),
15323 BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
15324 BPF_JMP_REG(BPF_JNE, R0, R1, 3),
15325 BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
15326 BPF_JMP_REG(BPF_JNE, R0, R2, 1),
15327 BPF_ALU64_IMM(BPF_MOV, R0, 0),
15328 BPF_EXIT_INSN(),
15329 },
15330 .result = 0,
15331 .stack_depth = 32,
15332 },
15333 {
15334 "Tail call load/store",
15335 .insns = {
15336 BPF_ALU64_IMM(BPF_MOV, R0, 3),
15337 BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
15338 TAIL_CALL(-1),
15339 BPF_ALU64_IMM(BPF_MOV, R0, -1),
15340 BPF_EXIT_INSN(),
15341 },
15342 .result = 0,
15343 .stack_depth = 16,
15344 },
15345 {
15346 "Tail call error path, max count reached",
15347 .insns = {
15348 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15349 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15350 BPF_STX_MEM(BPF_W, R1, R2, 0),
15351 TAIL_CALL(0),
15352 BPF_EXIT_INSN(),
15353 },
15354 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15355 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15356 },
15357 {
15358 "Tail call count preserved across function calls",
15359 .insns = {
15360 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15361 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15362 BPF_STX_MEM(BPF_W, R1, R2, 0),
15363 BPF_STX_MEM(BPF_DW, R10, R1, -8),
15364 BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
15365 BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
15366 BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
15367 BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
15368 BPF_CALL_REL(BPF_FUNC_jiffies64),
15369 BPF_CALL_REL(BPF_FUNC_test_func),
15370 BPF_LDX_MEM(BPF_DW, R1, R10, -8),
15371 BPF_ALU32_REG(BPF_MOV, R0, R1),
15372 TAIL_CALL(0),
15373 BPF_EXIT_INSN(),
15374 },
15375 .stack_depth = 8,
15376 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15377 .result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15378 },
15379 {
15380 "Tail call error path, NULL target",
15381 .insns = {
15382 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15383 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15384 BPF_STX_MEM(BPF_W, R1, R2, 0),
15385 TAIL_CALL(TAIL_CALL_NULL),
15386 BPF_EXIT_INSN(),
15387 },
15388 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15389 .result = MAX_TESTRUNS,
15390 },
15391 {
15392 "Tail call error path, index out of range",
15393 .insns = {
15394 BPF_LDX_MEM(BPF_W, R2, R1, 0),
15395 BPF_ALU64_IMM(BPF_ADD, R2, 1),
15396 BPF_STX_MEM(BPF_W, R1, R2, 0),
15397 TAIL_CALL(TAIL_CALL_INVALID),
15398 BPF_EXIT_INSN(),
15399 },
15400 .flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15401 .result = MAX_TESTRUNS,
15402 },
15403};
15404
15405static void __init destroy_tail_call_tests(struct bpf_array *progs)
15406{
15407 int i;
15408
15409 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
15410 if (progs->ptrs[i])
15411 bpf_prog_free(fp: progs->ptrs[i]);
15412 kfree(objp: progs);
15413}
15414
15415static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
15416{
15417 int ntests = ARRAY_SIZE(tail_call_tests);
15418 struct bpf_array *progs;
15419 int which, err;
15420
15421 /* Allocate the table of programs to be used for tail calls */
15422 progs = kzalloc(struct_size(progs, ptrs, ntests + 1), GFP_KERNEL);
15423 if (!progs)
15424 goto out_nomem;
15425
15426 /* Create all eBPF programs and populate the table */
15427 for (which = 0; which < ntests; which++) {
15428 struct tail_call_test *test = &tail_call_tests[which];
15429 struct bpf_prog *fp;
15430 int len, i;
15431
15432 /* Compute the number of program instructions */
15433 for (len = 0; len < MAX_INSNS; len++) {
15434 struct bpf_insn *insn = &test->insns[len];
15435
15436 if (len < MAX_INSNS - 1 &&
15437 insn->code == (BPF_LD | BPF_DW | BPF_IMM))
15438 len++;
15439 if (insn->code == 0)
15440 break;
15441 }
15442
15443 /* Allocate and initialize the program */
15444 fp = bpf_prog_alloc(size: bpf_prog_size(proglen: len), gfp_extra_flags: 0);
15445 if (!fp)
15446 goto out_nomem;
15447
15448 fp->len = len;
15449 fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
15450 fp->aux->stack_depth = test->stack_depth;
15451 memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
15452
15453 /* Relocate runtime tail call offsets and addresses */
15454 for (i = 0; i < len; i++) {
15455 struct bpf_insn *insn = &fp->insnsi[i];
15456 long addr = 0;
15457
15458 switch (insn->code) {
15459 case BPF_LD | BPF_DW | BPF_IMM:
15460 if (insn->imm != TAIL_CALL_MARKER)
15461 break;
15462 insn[0].imm = (u32)(long)progs;
15463 insn[1].imm = ((u64)(long)progs) >> 32;
15464 break;
15465
15466 case BPF_ALU | BPF_MOV | BPF_K:
15467 if (insn->imm != TAIL_CALL_MARKER)
15468 break;
15469 if (insn->off == TAIL_CALL_NULL)
15470 insn->imm = ntests;
15471 else if (insn->off == TAIL_CALL_INVALID)
15472 insn->imm = ntests + 1;
15473 else
15474 insn->imm = which + insn->off;
15475 insn->off = 0;
15476 break;
15477
15478 case BPF_JMP | BPF_CALL:
15479 if (insn->src_reg != BPF_PSEUDO_CALL)
15480 break;
15481 switch (insn->imm) {
15482 case BPF_FUNC_get_numa_node_id:
15483 addr = (long)&numa_node_id;
15484 break;
15485 case BPF_FUNC_ktime_get_ns:
15486 addr = (long)&ktime_get_ns;
15487 break;
15488 case BPF_FUNC_ktime_get_boot_ns:
15489 addr = (long)&ktime_get_boot_fast_ns;
15490 break;
15491 case BPF_FUNC_ktime_get_coarse_ns:
15492 addr = (long)&ktime_get_coarse_ns;
15493 break;
15494 case BPF_FUNC_jiffies64:
15495 addr = (long)&get_jiffies_64;
15496 break;
15497 case BPF_FUNC_test_func:
15498 addr = (long)&bpf_test_func;
15499 break;
15500 default:
15501 err = -EFAULT;
15502 goto out_err;
15503 }
15504 *insn = BPF_EMIT_CALL(addr);
15505 if ((long)__bpf_call_base + insn->imm != addr)
15506 *insn = BPF_JMP_A(0); /* Skip: NOP */
15507 break;
15508 }
15509 }
15510
15511 fp = bpf_prog_select_runtime(fp, err: &err);
15512 if (err)
15513 goto out_err;
15514
15515 progs->ptrs[which] = fp;
15516 }
15517
15518 /* The last entry contains a NULL program pointer */
15519 progs->map.max_entries = ntests + 1;
15520 *pprogs = progs;
15521 return 0;
15522
15523out_nomem:
15524 err = -ENOMEM;
15525
15526out_err:
15527 if (progs)
15528 destroy_tail_call_tests(progs);
15529 return err;
15530}
15531
15532static __init int test_tail_calls(struct bpf_array *progs)
15533{
15534 int i, err_cnt = 0, pass_cnt = 0;
15535 int jit_cnt = 0, run_cnt = 0;
15536
15537 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15538 struct tail_call_test *test = &tail_call_tests[i];
15539 struct bpf_prog *fp = progs->ptrs[i];
15540 int *data = NULL;
15541 int state = 0;
15542 u64 duration;
15543 int ret;
15544
15545 cond_resched();
15546 if (exclude_test(test_id: i))
15547 continue;
15548
15549 pr_info("#%d %s ", i, test->descr);
15550 if (!fp) {
15551 err_cnt++;
15552 continue;
15553 }
15554 pr_cont("jited:%u ", fp->jited);
15555
15556 run_cnt++;
15557 if (fp->jited)
15558 jit_cnt++;
15559
15560 if (test->flags & FLAG_NEED_STATE)
15561 data = &state;
15562 ret = __run_one(fp, data, MAX_TESTRUNS, duration: &duration);
15563 if (test->flags & FLAG_RESULT_IN_STATE)
15564 ret = state;
15565 if (ret == test->result) {
15566 pr_cont("%lld PASS", duration);
15567 pass_cnt++;
15568 } else {
15569 pr_cont("ret %d != %d FAIL", ret, test->result);
15570 err_cnt++;
15571 }
15572 }
15573
15574 pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15575 __func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
15576
15577 return err_cnt ? -EINVAL : 0;
15578}
15579
15580static char test_suite[32];
15581module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
15582
15583static __init int find_test_index(const char *test_name)
15584{
15585 int i;
15586
15587 if (!strcmp(test_suite, "test_bpf")) {
15588 for (i = 0; i < ARRAY_SIZE(tests); i++) {
15589 if (!strcmp(tests[i].descr, test_name))
15590 return i;
15591 }
15592 }
15593
15594 if (!strcmp(test_suite, "test_tail_calls")) {
15595 for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15596 if (!strcmp(tail_call_tests[i].descr, test_name))
15597 return i;
15598 }
15599 }
15600
15601 if (!strcmp(test_suite, "test_skb_segment")) {
15602 for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15603 if (!strcmp(skb_segment_tests[i].descr, test_name))
15604 return i;
15605 }
15606 }
15607
15608 return -1;
15609}
15610
15611static __init int prepare_test_range(void)
15612{
15613 int valid_range;
15614
15615 if (!strcmp(test_suite, "test_bpf"))
15616 valid_range = ARRAY_SIZE(tests);
15617 else if (!strcmp(test_suite, "test_tail_calls"))
15618 valid_range = ARRAY_SIZE(tail_call_tests);
15619 else if (!strcmp(test_suite, "test_skb_segment"))
15620 valid_range = ARRAY_SIZE(skb_segment_tests);
15621 else
15622 return 0;
15623
15624 if (test_id >= 0) {
15625 /*
15626 * if a test_id was specified, use test_range to
15627 * cover only that test.
15628 */
15629 if (test_id >= valid_range) {
15630 pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
15631 test_suite);
15632 return -EINVAL;
15633 }
15634
15635 test_range[0] = test_id;
15636 test_range[1] = test_id;
15637 } else if (*test_name) {
15638 /*
15639 * if a test_name was specified, find it and setup
15640 * test_range to cover only that test.
15641 */
15642 int idx = find_test_index(test_name);
15643
15644 if (idx < 0) {
15645 pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
15646 test_name, test_suite);
15647 return -EINVAL;
15648 }
15649 test_range[0] = idx;
15650 test_range[1] = idx;
15651 } else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
15652 /*
15653 * check that the supplied test_range is valid.
15654 */
15655 if (test_range[0] < 0 || test_range[1] >= valid_range) {
15656 pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
15657 test_suite);
15658 return -EINVAL;
15659 }
15660
15661 if (test_range[1] < test_range[0]) {
15662 pr_err("test_bpf: test_range is ending before it starts.\n");
15663 return -EINVAL;
15664 }
15665 }
15666
15667 return 0;
15668}
15669
15670static int __init test_bpf_init(void)
15671{
15672 struct bpf_array *progs = NULL;
15673 int ret;
15674
15675 if (strlen(test_suite) &&
15676 strcmp(test_suite, "test_bpf") &&
15677 strcmp(test_suite, "test_tail_calls") &&
15678 strcmp(test_suite, "test_skb_segment")) {
15679 pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
15680 return -EINVAL;
15681 }
15682
15683 /*
15684 * if test_suite is not specified, but test_id, test_name or test_range
15685 * is specified, set 'test_bpf' as the default test suite.
15686 */
15687 if (!strlen(test_suite) &&
15688 (test_id != -1 || strlen(test_name) ||
15689 (test_range[0] != 0 || test_range[1] != INT_MAX))) {
15690 pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15691 strscpy(p: test_suite, q: "test_bpf", size: sizeof(test_suite));
15692 }
15693
15694 ret = prepare_test_range();
15695 if (ret < 0)
15696 return ret;
15697
15698 if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
15699 ret = test_bpf();
15700 if (ret)
15701 return ret;
15702 }
15703
15704 if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
15705 ret = prepare_tail_call_tests(pprogs: &progs);
15706 if (ret)
15707 return ret;
15708 ret = test_tail_calls(progs);
15709 destroy_tail_call_tests(progs);
15710 if (ret)
15711 return ret;
15712 }
15713
15714 if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
15715 return test_skb_segment();
15716
15717 return 0;
15718}
15719
15720static void __exit test_bpf_exit(void)
15721{
15722}
15723
15724module_init(test_bpf_init);
15725module_exit(test_bpf_exit);
15726
15727MODULE_LICENSE("GPL");
15728

source code of linux/lib/test_bpf.c