PageRenderTime 47ms CodeModel.GetById 12ms RepoModel.GetById 0ms app.codeStats 1ms

/lib/test_bpf.c

http://github.com/mirrors/linux-2.6
C | 7056 lines | 6641 code | 213 blank | 202 comment | 118 complexity | 0fd381440d1ca2c149064fdc373bbc55 MD5 | raw file
Possible License(s): LGPL-2.0, AGPL-1.0, GPL-2.0

Large files files are truncated, but you can click here to view the full file

  1. // SPDX-License-Identifier: GPL-2.0-only
  2. /*
  3. * Testsuite for BPF interpreter and BPF JIT compiler
  4. *
  5. * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
  6. */
  7. #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
  8. #include <linux/init.h>
  9. #include <linux/module.h>
  10. #include <linux/filter.h>
  11. #include <linux/bpf.h>
  12. #include <linux/skbuff.h>
  13. #include <linux/netdevice.h>
  14. #include <linux/if_vlan.h>
  15. #include <linux/random.h>
  16. #include <linux/highmem.h>
  17. #include <linux/sched.h>
  18. /* General test specific settings */
  19. #define MAX_SUBTESTS 3
  20. #define MAX_TESTRUNS 1000
  21. #define MAX_DATA 128
  22. #define MAX_INSNS 512
  23. #define MAX_K 0xffffFFFF
  24. /* Few constants used to init test 'skb' */
  25. #define SKB_TYPE 3
  26. #define SKB_MARK 0x1234aaaa
  27. #define SKB_HASH 0x1234aaab
  28. #define SKB_QUEUE_MAP 123
  29. #define SKB_VLAN_TCI 0xffff
  30. #define SKB_VLAN_PRESENT 1
  31. #define SKB_DEV_IFINDEX 577
  32. #define SKB_DEV_TYPE 588
  33. /* Redefine REGs to make tests less verbose */
  34. #define R0 BPF_REG_0
  35. #define R1 BPF_REG_1
  36. #define R2 BPF_REG_2
  37. #define R3 BPF_REG_3
  38. #define R4 BPF_REG_4
  39. #define R5 BPF_REG_5
  40. #define R6 BPF_REG_6
  41. #define R7 BPF_REG_7
  42. #define R8 BPF_REG_8
  43. #define R9 BPF_REG_9
  44. #define R10 BPF_REG_10
  45. /* Flags that can be passed to test cases */
  46. #define FLAG_NO_DATA BIT(0)
  47. #define FLAG_EXPECTED_FAIL BIT(1)
  48. #define FLAG_SKB_FRAG BIT(2)
  49. enum {
  50. CLASSIC = BIT(6), /* Old BPF instructions only. */
  51. INTERNAL = BIT(7), /* Extended instruction set. */
  52. };
  53. #define TEST_TYPE_MASK (CLASSIC | INTERNAL)
  54. struct bpf_test {
  55. const char *descr;
  56. union {
  57. struct sock_filter insns[MAX_INSNS];
  58. struct bpf_insn insns_int[MAX_INSNS];
  59. struct {
  60. void *insns;
  61. unsigned int len;
  62. } ptr;
  63. } u;
  64. __u8 aux;
  65. __u8 data[MAX_DATA];
  66. struct {
  67. int data_size;
  68. __u32 result;
  69. } test[MAX_SUBTESTS];
  70. int (*fill_helper)(struct bpf_test *self);
  71. int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
  72. __u8 frag_data[MAX_DATA];
  73. int stack_depth; /* for eBPF only, since tests don't call verifier */
  74. };
  75. /* Large test cases need separate allocation and fill handler. */
  76. static int bpf_fill_maxinsns1(struct bpf_test *self)
  77. {
  78. unsigned int len = BPF_MAXINSNS;
  79. struct sock_filter *insn;
  80. __u32 k = ~0;
  81. int i;
  82. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  83. if (!insn)
  84. return -ENOMEM;
  85. for (i = 0; i < len; i++, k--)
  86. insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
  87. self->u.ptr.insns = insn;
  88. self->u.ptr.len = len;
  89. return 0;
  90. }
  91. static int bpf_fill_maxinsns2(struct bpf_test *self)
  92. {
  93. unsigned int len = BPF_MAXINSNS;
  94. struct sock_filter *insn;
  95. int i;
  96. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  97. if (!insn)
  98. return -ENOMEM;
  99. for (i = 0; i < len; i++)
  100. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  101. self->u.ptr.insns = insn;
  102. self->u.ptr.len = len;
  103. return 0;
  104. }
  105. static int bpf_fill_maxinsns3(struct bpf_test *self)
  106. {
  107. unsigned int len = BPF_MAXINSNS;
  108. struct sock_filter *insn;
  109. struct rnd_state rnd;
  110. int i;
  111. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  112. if (!insn)
  113. return -ENOMEM;
  114. prandom_seed_state(&rnd, 3141592653589793238ULL);
  115. for (i = 0; i < len - 1; i++) {
  116. __u32 k = prandom_u32_state(&rnd);
  117. insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
  118. }
  119. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  120. self->u.ptr.insns = insn;
  121. self->u.ptr.len = len;
  122. return 0;
  123. }
  124. static int bpf_fill_maxinsns4(struct bpf_test *self)
  125. {
  126. unsigned int len = BPF_MAXINSNS + 1;
  127. struct sock_filter *insn;
  128. int i;
  129. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  130. if (!insn)
  131. return -ENOMEM;
  132. for (i = 0; i < len; i++)
  133. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  134. self->u.ptr.insns = insn;
  135. self->u.ptr.len = len;
  136. return 0;
  137. }
  138. static int bpf_fill_maxinsns5(struct bpf_test *self)
  139. {
  140. unsigned int len = BPF_MAXINSNS;
  141. struct sock_filter *insn;
  142. int i;
  143. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  144. if (!insn)
  145. return -ENOMEM;
  146. insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
  147. for (i = 1; i < len - 1; i++)
  148. insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
  149. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
  150. self->u.ptr.insns = insn;
  151. self->u.ptr.len = len;
  152. return 0;
  153. }
  154. static int bpf_fill_maxinsns6(struct bpf_test *self)
  155. {
  156. unsigned int len = BPF_MAXINSNS;
  157. struct sock_filter *insn;
  158. int i;
  159. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  160. if (!insn)
  161. return -ENOMEM;
  162. for (i = 0; i < len - 1; i++)
  163. insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  164. SKF_AD_VLAN_TAG_PRESENT);
  165. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  166. self->u.ptr.insns = insn;
  167. self->u.ptr.len = len;
  168. return 0;
  169. }
  170. static int bpf_fill_maxinsns7(struct bpf_test *self)
  171. {
  172. unsigned int len = BPF_MAXINSNS;
  173. struct sock_filter *insn;
  174. int i;
  175. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  176. if (!insn)
  177. return -ENOMEM;
  178. for (i = 0; i < len - 4; i++)
  179. insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  180. SKF_AD_CPU);
  181. insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
  182. insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
  183. SKF_AD_CPU);
  184. insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
  185. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  186. self->u.ptr.insns = insn;
  187. self->u.ptr.len = len;
  188. return 0;
  189. }
  190. static int bpf_fill_maxinsns8(struct bpf_test *self)
  191. {
  192. unsigned int len = BPF_MAXINSNS;
  193. struct sock_filter *insn;
  194. int i, jmp_off = len - 3;
  195. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  196. if (!insn)
  197. return -ENOMEM;
  198. insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
  199. for (i = 1; i < len - 1; i++)
  200. insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
  201. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  202. self->u.ptr.insns = insn;
  203. self->u.ptr.len = len;
  204. return 0;
  205. }
  206. static int bpf_fill_maxinsns9(struct bpf_test *self)
  207. {
  208. unsigned int len = BPF_MAXINSNS;
  209. struct bpf_insn *insn;
  210. int i;
  211. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  212. if (!insn)
  213. return -ENOMEM;
  214. insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
  215. insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
  216. insn[2] = BPF_EXIT_INSN();
  217. for (i = 3; i < len - 2; i++)
  218. insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
  219. insn[len - 2] = BPF_EXIT_INSN();
  220. insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
  221. self->u.ptr.insns = insn;
  222. self->u.ptr.len = len;
  223. return 0;
  224. }
  225. static int bpf_fill_maxinsns10(struct bpf_test *self)
  226. {
  227. unsigned int len = BPF_MAXINSNS, hlen = len - 2;
  228. struct bpf_insn *insn;
  229. int i;
  230. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  231. if (!insn)
  232. return -ENOMEM;
  233. for (i = 0; i < hlen / 2; i++)
  234. insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
  235. for (i = hlen - 1; i > hlen / 2; i--)
  236. insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
  237. insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
  238. insn[hlen] = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
  239. insn[hlen + 1] = BPF_EXIT_INSN();
  240. self->u.ptr.insns = insn;
  241. self->u.ptr.len = len;
  242. return 0;
  243. }
  244. static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
  245. unsigned int plen)
  246. {
  247. struct sock_filter *insn;
  248. unsigned int rlen;
  249. int i, j;
  250. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  251. if (!insn)
  252. return -ENOMEM;
  253. rlen = (len % plen) - 1;
  254. for (i = 0; i + plen < len; i += plen)
  255. for (j = 0; j < plen; j++)
  256. insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
  257. plen - 1 - j, 0, 0);
  258. for (j = 0; j < rlen; j++)
  259. insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
  260. 0, 0);
  261. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
  262. self->u.ptr.insns = insn;
  263. self->u.ptr.len = len;
  264. return 0;
  265. }
  266. static int bpf_fill_maxinsns11(struct bpf_test *self)
  267. {
  268. /* Hits 70 passes on x86_64, so cannot get JITed there. */
  269. return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
  270. }
  271. static int bpf_fill_maxinsns12(struct bpf_test *self)
  272. {
  273. unsigned int len = BPF_MAXINSNS;
  274. struct sock_filter *insn;
  275. int i = 0;
  276. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  277. if (!insn)
  278. return -ENOMEM;
  279. insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
  280. for (i = 1; i < len - 1; i++)
  281. insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
  282. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
  283. self->u.ptr.insns = insn;
  284. self->u.ptr.len = len;
  285. return 0;
  286. }
  287. static int bpf_fill_maxinsns13(struct bpf_test *self)
  288. {
  289. unsigned int len = BPF_MAXINSNS;
  290. struct sock_filter *insn;
  291. int i = 0;
  292. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  293. if (!insn)
  294. return -ENOMEM;
  295. for (i = 0; i < len - 3; i++)
  296. insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
  297. insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
  298. insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
  299. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
  300. self->u.ptr.insns = insn;
  301. self->u.ptr.len = len;
  302. return 0;
  303. }
  304. static int bpf_fill_ja(struct bpf_test *self)
  305. {
  306. /* Hits exactly 11 passes on x86_64 JIT. */
  307. return __bpf_fill_ja(self, 12, 9);
  308. }
  309. static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
  310. {
  311. unsigned int len = BPF_MAXINSNS;
  312. struct sock_filter *insn;
  313. int i;
  314. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  315. if (!insn)
  316. return -ENOMEM;
  317. for (i = 0; i < len - 1; i += 2) {
  318. insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
  319. insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  320. SKF_AD_OFF + SKF_AD_CPU);
  321. }
  322. insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
  323. self->u.ptr.insns = insn;
  324. self->u.ptr.len = len;
  325. return 0;
  326. }
  327. static int __bpf_fill_stxdw(struct bpf_test *self, int size)
  328. {
  329. unsigned int len = BPF_MAXINSNS;
  330. struct bpf_insn *insn;
  331. int i;
  332. insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
  333. if (!insn)
  334. return -ENOMEM;
  335. insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
  336. insn[1] = BPF_ST_MEM(size, R10, -40, 42);
  337. for (i = 2; i < len - 2; i++)
  338. insn[i] = BPF_STX_XADD(size, R10, R0, -40);
  339. insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
  340. insn[len - 1] = BPF_EXIT_INSN();
  341. self->u.ptr.insns = insn;
  342. self->u.ptr.len = len;
  343. self->stack_depth = 40;
  344. return 0;
  345. }
  346. static int bpf_fill_stxw(struct bpf_test *self)
  347. {
  348. return __bpf_fill_stxdw(self, BPF_W);
  349. }
  350. static int bpf_fill_stxdw(struct bpf_test *self)
  351. {
  352. return __bpf_fill_stxdw(self, BPF_DW);
  353. }
  354. static struct bpf_test tests[] = {
  355. {
  356. "TAX",
  357. .u.insns = {
  358. BPF_STMT(BPF_LD | BPF_IMM, 1),
  359. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  360. BPF_STMT(BPF_LD | BPF_IMM, 2),
  361. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  362. BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
  363. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  364. BPF_STMT(BPF_LD | BPF_LEN, 0),
  365. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  366. BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
  367. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
  368. BPF_STMT(BPF_RET | BPF_A, 0)
  369. },
  370. CLASSIC,
  371. { 10, 20, 30, 40, 50 },
  372. { { 2, 10 }, { 3, 20 }, { 4, 30 } },
  373. },
  374. {
  375. "TXA",
  376. .u.insns = {
  377. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  378. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  379. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  380. BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
  381. },
  382. CLASSIC,
  383. { 10, 20, 30, 40, 50 },
  384. { { 1, 2 }, { 3, 6 }, { 4, 8 } },
  385. },
  386. {
  387. "ADD_SUB_MUL_K",
  388. .u.insns = {
  389. BPF_STMT(BPF_LD | BPF_IMM, 1),
  390. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
  391. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  392. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  393. BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
  394. BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
  395. BPF_STMT(BPF_RET | BPF_A, 0)
  396. },
  397. CLASSIC | FLAG_NO_DATA,
  398. { },
  399. { { 0, 0xfffffffd } }
  400. },
  401. {
  402. "DIV_MOD_KX",
  403. .u.insns = {
  404. BPF_STMT(BPF_LD | BPF_IMM, 8),
  405. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
  406. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  407. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  408. BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
  409. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  410. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  411. BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
  412. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  413. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  414. BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
  415. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  416. BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
  417. BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
  418. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  419. BPF_STMT(BPF_RET | BPF_A, 0)
  420. },
  421. CLASSIC | FLAG_NO_DATA,
  422. { },
  423. { { 0, 0x20000000 } }
  424. },
  425. {
  426. "AND_OR_LSH_K",
  427. .u.insns = {
  428. BPF_STMT(BPF_LD | BPF_IMM, 0xff),
  429. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
  430. BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
  431. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  432. BPF_STMT(BPF_LD | BPF_IMM, 0xf),
  433. BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
  434. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  435. BPF_STMT(BPF_RET | BPF_A, 0)
  436. },
  437. CLASSIC | FLAG_NO_DATA,
  438. { },
  439. { { 0, 0x800000ff }, { 1, 0x800000ff } },
  440. },
  441. {
  442. "LD_IMM_0",
  443. .u.insns = {
  444. BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
  445. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
  446. BPF_STMT(BPF_RET | BPF_K, 0),
  447. BPF_STMT(BPF_RET | BPF_K, 1),
  448. },
  449. CLASSIC,
  450. { },
  451. { { 1, 1 } },
  452. },
  453. {
  454. "LD_IND",
  455. .u.insns = {
  456. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  457. BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
  458. BPF_STMT(BPF_RET | BPF_K, 1)
  459. },
  460. CLASSIC,
  461. { },
  462. { { 1, 0 }, { 10, 0 }, { 60, 0 } },
  463. },
  464. {
  465. "LD_ABS",
  466. .u.insns = {
  467. BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
  468. BPF_STMT(BPF_RET | BPF_K, 1)
  469. },
  470. CLASSIC,
  471. { },
  472. { { 1, 0 }, { 10, 0 }, { 60, 0 } },
  473. },
  474. {
  475. "LD_ABS_LL",
  476. .u.insns = {
  477. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
  478. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  479. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
  480. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  481. BPF_STMT(BPF_RET | BPF_A, 0)
  482. },
  483. CLASSIC,
  484. { 1, 2, 3 },
  485. { { 1, 0 }, { 2, 3 } },
  486. },
  487. {
  488. "LD_IND_LL",
  489. .u.insns = {
  490. BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
  491. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  492. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  493. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  494. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
  495. BPF_STMT(BPF_RET | BPF_A, 0)
  496. },
  497. CLASSIC,
  498. { 1, 2, 3, 0xff },
  499. { { 1, 1 }, { 3, 3 }, { 4, 0xff } },
  500. },
  501. {
  502. "LD_ABS_NET",
  503. .u.insns = {
  504. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
  505. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  506. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
  507. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  508. BPF_STMT(BPF_RET | BPF_A, 0)
  509. },
  510. CLASSIC,
  511. { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
  512. { { 15, 0 }, { 16, 3 } },
  513. },
  514. {
  515. "LD_IND_NET",
  516. .u.insns = {
  517. BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
  518. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  519. BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
  520. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  521. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
  522. BPF_STMT(BPF_RET | BPF_A, 0)
  523. },
  524. CLASSIC,
  525. { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
  526. { { 14, 0 }, { 15, 1 }, { 17, 3 } },
  527. },
  528. {
  529. "LD_PKTTYPE",
  530. .u.insns = {
  531. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  532. SKF_AD_OFF + SKF_AD_PKTTYPE),
  533. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  534. BPF_STMT(BPF_RET | BPF_K, 1),
  535. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  536. SKF_AD_OFF + SKF_AD_PKTTYPE),
  537. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  538. BPF_STMT(BPF_RET | BPF_K, 1),
  539. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  540. SKF_AD_OFF + SKF_AD_PKTTYPE),
  541. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
  542. BPF_STMT(BPF_RET | BPF_K, 1),
  543. BPF_STMT(BPF_RET | BPF_A, 0)
  544. },
  545. CLASSIC,
  546. { },
  547. { { 1, 3 }, { 10, 3 } },
  548. },
  549. {
  550. "LD_MARK",
  551. .u.insns = {
  552. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  553. SKF_AD_OFF + SKF_AD_MARK),
  554. BPF_STMT(BPF_RET | BPF_A, 0)
  555. },
  556. CLASSIC,
  557. { },
  558. { { 1, SKB_MARK}, { 10, SKB_MARK} },
  559. },
  560. {
  561. "LD_RXHASH",
  562. .u.insns = {
  563. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  564. SKF_AD_OFF + SKF_AD_RXHASH),
  565. BPF_STMT(BPF_RET | BPF_A, 0)
  566. },
  567. CLASSIC,
  568. { },
  569. { { 1, SKB_HASH}, { 10, SKB_HASH} },
  570. },
  571. {
  572. "LD_QUEUE",
  573. .u.insns = {
  574. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  575. SKF_AD_OFF + SKF_AD_QUEUE),
  576. BPF_STMT(BPF_RET | BPF_A, 0)
  577. },
  578. CLASSIC,
  579. { },
  580. { { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
  581. },
  582. {
  583. "LD_PROTOCOL",
  584. .u.insns = {
  585. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
  586. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
  587. BPF_STMT(BPF_RET | BPF_K, 0),
  588. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  589. SKF_AD_OFF + SKF_AD_PROTOCOL),
  590. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  591. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  592. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
  593. BPF_STMT(BPF_RET | BPF_K, 0),
  594. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  595. BPF_STMT(BPF_RET | BPF_A, 0)
  596. },
  597. CLASSIC,
  598. { 10, 20, 30 },
  599. { { 10, ETH_P_IP }, { 100, ETH_P_IP } },
  600. },
  601. {
  602. "LD_VLAN_TAG",
  603. .u.insns = {
  604. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  605. SKF_AD_OFF + SKF_AD_VLAN_TAG),
  606. BPF_STMT(BPF_RET | BPF_A, 0)
  607. },
  608. CLASSIC,
  609. { },
  610. {
  611. { 1, SKB_VLAN_TCI },
  612. { 10, SKB_VLAN_TCI }
  613. },
  614. },
  615. {
  616. "LD_VLAN_TAG_PRESENT",
  617. .u.insns = {
  618. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  619. SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
  620. BPF_STMT(BPF_RET | BPF_A, 0)
  621. },
  622. CLASSIC,
  623. { },
  624. {
  625. { 1, SKB_VLAN_PRESENT },
  626. { 10, SKB_VLAN_PRESENT }
  627. },
  628. },
  629. {
  630. "LD_IFINDEX",
  631. .u.insns = {
  632. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  633. SKF_AD_OFF + SKF_AD_IFINDEX),
  634. BPF_STMT(BPF_RET | BPF_A, 0)
  635. },
  636. CLASSIC,
  637. { },
  638. { { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
  639. },
  640. {
  641. "LD_HATYPE",
  642. .u.insns = {
  643. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  644. SKF_AD_OFF + SKF_AD_HATYPE),
  645. BPF_STMT(BPF_RET | BPF_A, 0)
  646. },
  647. CLASSIC,
  648. { },
  649. { { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
  650. },
  651. {
  652. "LD_CPU",
  653. .u.insns = {
  654. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  655. SKF_AD_OFF + SKF_AD_CPU),
  656. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  657. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  658. SKF_AD_OFF + SKF_AD_CPU),
  659. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  660. BPF_STMT(BPF_RET | BPF_A, 0)
  661. },
  662. CLASSIC,
  663. { },
  664. { { 1, 0 }, { 10, 0 } },
  665. },
  666. {
  667. "LD_NLATTR",
  668. .u.insns = {
  669. BPF_STMT(BPF_LDX | BPF_IMM, 2),
  670. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  671. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  672. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  673. SKF_AD_OFF + SKF_AD_NLATTR),
  674. BPF_STMT(BPF_RET | BPF_A, 0)
  675. },
  676. CLASSIC,
  677. #ifdef __BIG_ENDIAN
  678. { 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
  679. #else
  680. { 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
  681. #endif
  682. { { 4, 0 }, { 20, 6 } },
  683. },
  684. {
  685. "LD_NLATTR_NEST",
  686. .u.insns = {
  687. BPF_STMT(BPF_LD | BPF_IMM, 2),
  688. BPF_STMT(BPF_LDX | BPF_IMM, 3),
  689. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  690. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  691. BPF_STMT(BPF_LD | BPF_IMM, 2),
  692. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  693. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  694. BPF_STMT(BPF_LD | BPF_IMM, 2),
  695. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  696. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  697. BPF_STMT(BPF_LD | BPF_IMM, 2),
  698. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  699. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  700. BPF_STMT(BPF_LD | BPF_IMM, 2),
  701. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  702. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  703. BPF_STMT(BPF_LD | BPF_IMM, 2),
  704. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  705. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  706. BPF_STMT(BPF_LD | BPF_IMM, 2),
  707. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  708. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  709. BPF_STMT(BPF_LD | BPF_IMM, 2),
  710. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  711. SKF_AD_OFF + SKF_AD_NLATTR_NEST),
  712. BPF_STMT(BPF_RET | BPF_A, 0)
  713. },
  714. CLASSIC,
  715. #ifdef __BIG_ENDIAN
  716. { 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
  717. #else
  718. { 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
  719. #endif
  720. { { 4, 0 }, { 20, 10 } },
  721. },
  722. {
  723. "LD_PAYLOAD_OFF",
  724. .u.insns = {
  725. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  726. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  727. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  728. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  729. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  730. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  731. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  732. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  733. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  734. SKF_AD_OFF + SKF_AD_PAY_OFFSET),
  735. BPF_STMT(BPF_RET | BPF_A, 0)
  736. },
  737. CLASSIC,
  738. /* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
  739. * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
  740. * id 9737, seq 1, length 64
  741. */
  742. { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  743. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  744. 0x08, 0x00,
  745. 0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
  746. 0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
  747. { { 30, 0 }, { 100, 42 } },
  748. },
  749. {
  750. "LD_ANC_XOR",
  751. .u.insns = {
  752. BPF_STMT(BPF_LD | BPF_IMM, 10),
  753. BPF_STMT(BPF_LDX | BPF_IMM, 300),
  754. BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
  755. SKF_AD_OFF + SKF_AD_ALU_XOR_X),
  756. BPF_STMT(BPF_RET | BPF_A, 0)
  757. },
  758. CLASSIC,
  759. { },
  760. { { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
  761. },
  762. {
  763. "SPILL_FILL",
  764. .u.insns = {
  765. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  766. BPF_STMT(BPF_LD | BPF_IMM, 2),
  767. BPF_STMT(BPF_ALU | BPF_RSH, 1),
  768. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  769. BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
  770. BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
  771. BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
  772. BPF_STMT(BPF_STX, 15), /* M3 = len */
  773. BPF_STMT(BPF_LDX | BPF_MEM, 1),
  774. BPF_STMT(BPF_LD | BPF_MEM, 2),
  775. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  776. BPF_STMT(BPF_LDX | BPF_MEM, 15),
  777. BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
  778. BPF_STMT(BPF_RET | BPF_A, 0)
  779. },
  780. CLASSIC,
  781. { },
  782. { { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
  783. },
  784. {
  785. "JEQ",
  786. .u.insns = {
  787. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  788. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  789. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
  790. BPF_STMT(BPF_RET | BPF_K, 1),
  791. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  792. },
  793. CLASSIC,
  794. { 3, 3, 3, 3, 3 },
  795. { { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
  796. },
  797. {
  798. "JGT",
  799. .u.insns = {
  800. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  801. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  802. BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
  803. BPF_STMT(BPF_RET | BPF_K, 1),
  804. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  805. },
  806. CLASSIC,
  807. { 4, 4, 4, 3, 3 },
  808. { { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
  809. },
  810. {
  811. "JGE (jt 0), test 1",
  812. .u.insns = {
  813. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  814. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  815. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
  816. BPF_STMT(BPF_RET | BPF_K, 1),
  817. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  818. },
  819. CLASSIC,
  820. { 4, 4, 4, 3, 3 },
  821. { { 2, 0 }, { 3, 1 }, { 4, 1 } },
  822. },
  823. {
  824. "JGE (jt 0), test 2",
  825. .u.insns = {
  826. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  827. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
  828. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
  829. BPF_STMT(BPF_RET | BPF_K, 1),
  830. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  831. },
  832. CLASSIC,
  833. { 4, 4, 5, 3, 3 },
  834. { { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
  835. },
  836. {
  837. "JGE",
  838. .u.insns = {
  839. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  840. BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
  841. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
  842. BPF_STMT(BPF_RET | BPF_K, 10),
  843. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
  844. BPF_STMT(BPF_RET | BPF_K, 20),
  845. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
  846. BPF_STMT(BPF_RET | BPF_K, 30),
  847. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
  848. BPF_STMT(BPF_RET | BPF_K, 40),
  849. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  850. },
  851. CLASSIC,
  852. { 1, 2, 3, 4, 5 },
  853. { { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
  854. },
  855. {
  856. "JSET",
  857. .u.insns = {
  858. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  859. BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
  860. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  861. BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
  862. BPF_STMT(BPF_LDX | BPF_LEN, 0),
  863. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  864. BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
  865. BPF_STMT(BPF_MISC | BPF_TAX, 0),
  866. BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
  867. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
  868. BPF_STMT(BPF_RET | BPF_K, 10),
  869. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
  870. BPF_STMT(BPF_RET | BPF_K, 20),
  871. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  872. BPF_STMT(BPF_RET | BPF_K, 30),
  873. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  874. BPF_STMT(BPF_RET | BPF_K, 30),
  875. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  876. BPF_STMT(BPF_RET | BPF_K, 30),
  877. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  878. BPF_STMT(BPF_RET | BPF_K, 30),
  879. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
  880. BPF_STMT(BPF_RET | BPF_K, 30),
  881. BPF_STMT(BPF_RET | BPF_K, MAX_K)
  882. },
  883. CLASSIC,
  884. { 0, 0xAA, 0x55, 1 },
  885. { { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
  886. },
  887. {
  888. "tcpdump port 22",
  889. .u.insns = {
  890. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
  891. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
  892. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
  893. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
  894. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
  895. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
  896. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
  897. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
  898. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
  899. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
  900. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
  901. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
  902. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
  903. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
  904. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
  905. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
  906. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
  907. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  908. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
  909. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
  910. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
  911. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
  912. BPF_STMT(BPF_RET | BPF_K, 0xffff),
  913. BPF_STMT(BPF_RET | BPF_K, 0),
  914. },
  915. CLASSIC,
  916. /* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
  917. * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
  918. * seq 1305692979:1305693027, ack 3650467037, win 65535,
  919. * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
  920. */
  921. { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
  922. 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
  923. 0x08, 0x00,
  924. 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
  925. 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
  926. 0x0a, 0x01, 0x01, 0x95, /* ip src */
  927. 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
  928. 0xc2, 0x24,
  929. 0x00, 0x16 /* dst port */ },
  930. { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
  931. },
  932. {
  933. "tcpdump complex",
  934. .u.insns = {
  935. /* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
  936. * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
  937. * (len > 115 or len < 30000000000)' -d
  938. */
  939. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
  940. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
  941. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
  942. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
  943. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
  944. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
  945. BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
  946. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  947. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
  948. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
  949. BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
  950. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
  951. BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
  952. BPF_STMT(BPF_ST, 1),
  953. BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
  954. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
  955. BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
  956. BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
  957. BPF_STMT(BPF_LD | BPF_MEM, 1),
  958. BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
  959. BPF_STMT(BPF_ST, 5),
  960. BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
  961. BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
  962. BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
  963. BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
  964. BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
  965. BPF_STMT(BPF_LD | BPF_MEM, 5),
  966. BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
  967. BPF_STMT(BPF_LD | BPF_LEN, 0),
  968. BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
  969. BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
  970. BPF_STMT(BPF_RET | BPF_K, 0xffff),
  971. BPF_STMT(BPF_RET | BPF_K, 0),
  972. },
  973. CLASSIC,
  974. { 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
  975. 0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
  976. 0x08, 0x00,
  977. 0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
  978. 0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
  979. 0x0a, 0x01, 0x01, 0x95, /* ip src */
  980. 0x0a, 0x01, 0x02, 0x0a, /* ip dst */
  981. 0xc2, 0x24,
  982. 0x00, 0x16 /* dst port */ },
  983. { { 10, 0 }, { 30, 0 }, { 100, 65535 } },
  984. },
  985. {
  986. "RET_A",
  987. .u.insns = {
  988. /* check that unitialized X and A contain zeros */
  989. BPF_STMT(BPF_MISC | BPF_TXA, 0),
  990. BPF_STMT(BPF_RET | BPF_A, 0)
  991. },
  992. CLASSIC,
  993. { },
  994. { {1, 0}, {2, 0} },
  995. },
  996. {
  997. "INT: ADD trivial",
  998. .u.insns_int = {
  999. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  1000. BPF_ALU64_IMM(BPF_ADD, R1, 2),
  1001. BPF_ALU64_IMM(BPF_MOV, R2, 3),
  1002. BPF_ALU64_REG(BPF_SUB, R1, R2),
  1003. BPF_ALU64_IMM(BPF_ADD, R1, -1),
  1004. BPF_ALU64_IMM(BPF_MUL, R1, 3),
  1005. BPF_ALU64_REG(BPF_MOV, R0, R1),
  1006. BPF_EXIT_INSN(),
  1007. },
  1008. INTERNAL,
  1009. { },
  1010. { { 0, 0xfffffffd } }
  1011. },
  1012. {
  1013. "INT: MUL_X",
  1014. .u.insns_int = {
  1015. BPF_ALU64_IMM(BPF_MOV, R0, -1),
  1016. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  1017. BPF_ALU64_IMM(BPF_MOV, R2, 3),
  1018. BPF_ALU64_REG(BPF_MUL, R1, R2),
  1019. BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
  1020. BPF_EXIT_INSN(),
  1021. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  1022. BPF_EXIT_INSN(),
  1023. },
  1024. INTERNAL,
  1025. { },
  1026. { { 0, 1 } }
  1027. },
  1028. {
  1029. "INT: MUL_X2",
  1030. .u.insns_int = {
  1031. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  1032. BPF_ALU32_IMM(BPF_MOV, R1, -1),
  1033. BPF_ALU32_IMM(BPF_MOV, R2, 3),
  1034. BPF_ALU64_REG(BPF_MUL, R1, R2),
  1035. BPF_ALU64_IMM(BPF_RSH, R1, 8),
  1036. BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
  1037. BPF_EXIT_INSN(),
  1038. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  1039. BPF_EXIT_INSN(),
  1040. },
  1041. INTERNAL,
  1042. { },
  1043. { { 0, 1 } }
  1044. },
  1045. {
  1046. "INT: MUL32_X",
  1047. .u.insns_int = {
  1048. BPF_ALU32_IMM(BPF_MOV, R0, -1),
  1049. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  1050. BPF_ALU32_IMM(BPF_MOV, R2, 3),
  1051. BPF_ALU32_REG(BPF_MUL, R1, R2),
  1052. BPF_ALU64_IMM(BPF_RSH, R1, 8),
  1053. BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
  1054. BPF_EXIT_INSN(),
  1055. BPF_ALU32_IMM(BPF_MOV, R0, 1),
  1056. BPF_EXIT_INSN(),
  1057. },
  1058. INTERNAL,
  1059. { },
  1060. { { 0, 1 } }
  1061. },
  1062. {
  1063. /* Have to test all register combinations, since
  1064. * JITing of different registers will produce
  1065. * different asm code.
  1066. */
  1067. "INT: ADD 64-bit",
  1068. .u.insns_int = {
  1069. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  1070. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  1071. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  1072. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  1073. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  1074. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  1075. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  1076. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  1077. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  1078. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  1079. BPF_ALU64_IMM(BPF_ADD, R0, 20),
  1080. BPF_ALU64_IMM(BPF_ADD, R1, 20),
  1081. BPF_ALU64_IMM(BPF_ADD, R2, 20),
  1082. BPF_ALU64_IMM(BPF_ADD, R3, 20),
  1083. BPF_ALU64_IMM(BPF_ADD, R4, 20),
  1084. BPF_ALU64_IMM(BPF_ADD, R5, 20),
  1085. BPF_ALU64_IMM(BPF_ADD, R6, 20),
  1086. BPF_ALU64_IMM(BPF_ADD, R7, 20),
  1087. BPF_ALU64_IMM(BPF_ADD, R8, 20),
  1088. BPF_ALU64_IMM(BPF_ADD, R9, 20),
  1089. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  1090. BPF_ALU64_IMM(BPF_SUB, R1, 10),
  1091. BPF_ALU64_IMM(BPF_SUB, R2, 10),
  1092. BPF_ALU64_IMM(BPF_SUB, R3, 10),
  1093. BPF_ALU64_IMM(BPF_SUB, R4, 10),
  1094. BPF_ALU64_IMM(BPF_SUB, R5, 10),
  1095. BPF_ALU64_IMM(BPF_SUB, R6, 10),
  1096. BPF_ALU64_IMM(BPF_SUB, R7, 10),
  1097. BPF_ALU64_IMM(BPF_SUB, R8, 10),
  1098. BPF_ALU64_IMM(BPF_SUB, R9, 10),
  1099. BPF_ALU64_REG(BPF_ADD, R0, R0),
  1100. BPF_ALU64_REG(BPF_ADD, R0, R1),
  1101. BPF_ALU64_REG(BPF_ADD, R0, R2),
  1102. BPF_ALU64_REG(BPF_ADD, R0, R3),
  1103. BPF_ALU64_REG(BPF_ADD, R0, R4),
  1104. BPF_ALU64_REG(BPF_ADD, R0, R5),
  1105. BPF_ALU64_REG(BPF_ADD, R0, R6),
  1106. BPF_ALU64_REG(BPF_ADD, R0, R7),
  1107. BPF_ALU64_REG(BPF_ADD, R0, R8),
  1108. BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
  1109. BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
  1110. BPF_EXIT_INSN(),
  1111. BPF_ALU64_REG(BPF_ADD, R1, R0),
  1112. BPF_ALU64_REG(BPF_ADD, R1, R1),
  1113. BPF_ALU64_REG(BPF_ADD, R1, R2),
  1114. BPF_ALU64_REG(BPF_ADD, R1, R3),
  1115. BPF_ALU64_REG(BPF_ADD, R1, R4),
  1116. BPF_ALU64_REG(BPF_ADD, R1, R5),
  1117. BPF_ALU64_REG(BPF_ADD, R1, R6),
  1118. BPF_ALU64_REG(BPF_ADD, R1, R7),
  1119. BPF_ALU64_REG(BPF_ADD, R1, R8),
  1120. BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
  1121. BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
  1122. BPF_EXIT_INSN(),
  1123. BPF_ALU64_REG(BPF_ADD, R2, R0),
  1124. BPF_ALU64_REG(BPF_ADD, R2, R1),
  1125. BPF_ALU64_REG(BPF_ADD, R2, R2),
  1126. BPF_ALU64_REG(BPF_ADD, R2, R3),
  1127. BPF_ALU64_REG(BPF_ADD, R2, R4),
  1128. BPF_ALU64_REG(BPF_ADD, R2, R5),
  1129. BPF_ALU64_REG(BPF_ADD, R2, R6),
  1130. BPF_ALU64_REG(BPF_ADD, R2, R7),
  1131. BPF_ALU64_REG(BPF_ADD, R2, R8),
  1132. BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
  1133. BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
  1134. BPF_EXIT_INSN(),
  1135. BPF_ALU64_REG(BPF_ADD, R3, R0),
  1136. BPF_ALU64_REG(BPF_ADD, R3, R1),
  1137. BPF_ALU64_REG(BPF_ADD, R3, R2),
  1138. BPF_ALU64_REG(BPF_ADD, R3, R3),
  1139. BPF_ALU64_REG(BPF_ADD, R3, R4),
  1140. BPF_ALU64_REG(BPF_ADD, R3, R5),
  1141. BPF_ALU64_REG(BPF_ADD, R3, R6),
  1142. BPF_ALU64_REG(BPF_ADD, R3, R7),
  1143. BPF_ALU64_REG(BPF_ADD, R3, R8),
  1144. BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
  1145. BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
  1146. BPF_EXIT_INSN(),
  1147. BPF_ALU64_REG(BPF_ADD, R4, R0),
  1148. BPF_ALU64_REG(BPF_ADD, R4, R1),
  1149. BPF_ALU64_REG(BPF_ADD, R4, R2),
  1150. BPF_ALU64_REG(BPF_ADD, R4, R3),
  1151. BPF_ALU64_REG(BPF_ADD, R4, R4),
  1152. BPF_ALU64_REG(BPF_ADD, R4, R5),
  1153. BPF_ALU64_REG(BPF_ADD, R4, R6),
  1154. BPF_ALU64_REG(BPF_ADD, R4, R7),
  1155. BPF_ALU64_REG(BPF_ADD, R4, R8),
  1156. BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
  1157. BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
  1158. BPF_EXIT_INSN(),
  1159. BPF_ALU64_REG(BPF_ADD, R5, R0),
  1160. BPF_ALU64_REG(BPF_ADD, R5, R1),
  1161. BPF_ALU64_REG(BPF_ADD, R5, R2),
  1162. BPF_ALU64_REG(BPF_ADD, R5, R3),
  1163. BPF_ALU64_REG(BPF_ADD, R5, R4),
  1164. BPF_ALU64_REG(BPF_ADD, R5, R5),
  1165. BPF_ALU64_REG(BPF_ADD, R5, R6),
  1166. BPF_ALU64_REG(BPF_ADD, R5, R7),
  1167. BPF_ALU64_REG(BPF_ADD, R5, R8),
  1168. BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
  1169. BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
  1170. BPF_EXIT_INSN(),
  1171. BPF_ALU64_REG(BPF_ADD, R6, R0),
  1172. BPF_ALU64_REG(BPF_ADD, R6, R1),
  1173. BPF_ALU64_REG(BPF_ADD, R6, R2),
  1174. BPF_ALU64_REG(BPF_ADD, R6, R3),
  1175. BPF_ALU64_REG(BPF_ADD, R6, R4),
  1176. BPF_ALU64_REG(BPF_ADD, R6, R5),
  1177. BPF_ALU64_REG(BPF_ADD, R6, R6),
  1178. BPF_ALU64_REG(BPF_ADD, R6, R7),
  1179. BPF_ALU64_REG(BPF_ADD, R6, R8),
  1180. BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
  1181. BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
  1182. BPF_EXIT_INSN(),
  1183. BPF_ALU64_REG(BPF_ADD, R7, R0),
  1184. BPF_ALU64_REG(BPF_ADD, R7, R1),
  1185. BPF_ALU64_REG(BPF_ADD, R7, R2),
  1186. BPF_ALU64_REG(BPF_ADD, R7, R3),
  1187. BPF_ALU64_REG(BPF_ADD, R7, R4),
  1188. BPF_ALU64_REG(BPF_ADD, R7, R5),
  1189. BPF_ALU64_REG(BPF_ADD, R7, R6),
  1190. BPF_ALU64_REG(BPF_ADD, R7, R7),
  1191. BPF_ALU64_REG(BPF_ADD, R7, R8),
  1192. BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
  1193. BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
  1194. BPF_EXIT_INSN(),
  1195. BPF_ALU64_REG(BPF_ADD, R8, R0),
  1196. BPF_ALU64_REG(BPF_ADD, R8, R1),
  1197. BPF_ALU64_REG(BPF_ADD, R8, R2),
  1198. BPF_ALU64_REG(BPF_ADD, R8, R3),
  1199. BPF_ALU64_REG(BPF_ADD, R8, R4),
  1200. BPF_ALU64_REG(BPF_ADD, R8, R5),
  1201. BPF_ALU64_REG(BPF_ADD, R8, R6),
  1202. BPF_ALU64_REG(BPF_ADD, R8, R7),
  1203. BPF_ALU64_REG(BPF_ADD, R8, R8),
  1204. BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
  1205. BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
  1206. BPF_EXIT_INSN(),
  1207. BPF_ALU64_REG(BPF_ADD, R9, R0),
  1208. BPF_ALU64_REG(BPF_ADD, R9, R1),
  1209. BPF_ALU64_REG(BPF_ADD, R9, R2),
  1210. BPF_ALU64_REG(BPF_ADD, R9, R3),
  1211. BPF_ALU64_REG(BPF_ADD, R9, R4),
  1212. BPF_ALU64_REG(BPF_ADD, R9, R5),
  1213. BPF_ALU64_REG(BPF_ADD, R9, R6),
  1214. BPF_ALU64_REG(BPF_ADD, R9, R7),
  1215. BPF_ALU64_REG(BPF_ADD, R9, R8),
  1216. BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
  1217. BPF_ALU64_REG(BPF_MOV, R0, R9),
  1218. BPF_EXIT_INSN(),
  1219. },
  1220. INTERNAL,
  1221. { },
  1222. { { 0, 2957380 } }
  1223. },
  1224. {
  1225. "INT: ADD 32-bit",
  1226. .u.insns_int = {
  1227. BPF_ALU32_IMM(BPF_MOV, R0, 20),
  1228. BPF_ALU32_IMM(BPF_MOV, R1, 1),
  1229. BPF_ALU32_IMM(BPF_MOV, R2, 2),
  1230. BPF_ALU32_IMM(BPF_MOV, R3, 3),
  1231. BPF_ALU32_IMM(BPF_MOV, R4, 4),
  1232. BPF_ALU32_IMM(BPF_MOV, R5, 5),
  1233. BPF_ALU32_IMM(BPF_MOV, R6, 6),
  1234. BPF_ALU32_IMM(BPF_MOV, R7, 7),
  1235. BPF_ALU32_IMM(BPF_MOV, R8, 8),
  1236. BPF_ALU32_IMM(BPF_MOV, R9, 9),
  1237. BPF_ALU64_IMM(BPF_ADD, R1, 10),
  1238. BPF_ALU64_IMM(BPF_ADD, R2, 10),
  1239. BPF_ALU64_IMM(BPF_ADD, R3, 10),
  1240. BPF_ALU64_IMM(BPF_ADD, R4, 10),
  1241. BPF_ALU64_IMM(BPF_ADD, R5, 10),
  1242. BPF_ALU64_IMM(BPF_ADD, R6, 10),
  1243. BPF_ALU64_IMM(BPF_ADD, R7, 10),
  1244. BPF_ALU64_IMM(BPF_ADD, R8, 10),
  1245. BPF_ALU64_IMM(BPF_ADD, R9, 10),
  1246. BPF_ALU32_REG(BPF_ADD, R0, R1),
  1247. BPF_ALU32_REG(BPF_ADD, R0, R2),
  1248. BPF_ALU32_REG(BPF_ADD, R0, R3),
  1249. BPF_ALU32_REG(BPF_ADD, R0, R4),
  1250. BPF_ALU32_REG(BPF_ADD, R0, R5),
  1251. BPF_ALU32_REG(BPF_ADD, R0, R6),
  1252. BPF_ALU32_REG(BPF_ADD, R0, R7),
  1253. BPF_ALU32_REG(BPF_ADD, R0, R8),
  1254. BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
  1255. BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
  1256. BPF_EXIT_INSN(),
  1257. BPF_ALU32_REG(BPF_ADD, R1, R0),
  1258. BPF_ALU32_REG(BPF_ADD, R1, R1),
  1259. BPF_ALU32_REG(BPF_ADD, R1, R2),
  1260. BPF_ALU32_REG(BPF_ADD, R1, R3),
  1261. BPF_ALU32_REG(BPF_ADD, R1, R4),
  1262. BPF_ALU32_REG(BPF_ADD, R1, R5),
  1263. BPF_ALU32_REG(BPF_ADD, R1, R6),
  1264. BPF_ALU32_REG(BPF_ADD, R1, R7),
  1265. BPF_ALU32_REG(BPF_ADD, R1, R8),
  1266. BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
  1267. BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
  1268. BPF_EXIT_INSN(),
  1269. BPF_ALU32_REG(BPF_ADD, R2, R0),
  1270. BPF_ALU32_REG(BPF_ADD, R2, R1),
  1271. BPF_ALU32_REG(BPF_ADD, R2, R2),
  1272. BPF_ALU32_REG(BPF_ADD, R2, R3),
  1273. BPF_ALU32_REG(BPF_ADD, R2, R4),
  1274. BPF_ALU32_REG(BPF_ADD, R2, R5),
  1275. BPF_ALU32_REG(BPF_ADD, R2, R6),
  1276. BPF_ALU32_REG(BPF_ADD, R2, R7),
  1277. BPF_ALU32_REG(BPF_ADD, R2, R8),
  1278. BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
  1279. BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
  1280. BPF_EXIT_INSN(),
  1281. BPF_ALU32_REG(BPF_ADD, R3, R0),
  1282. BPF_ALU32_REG(BPF_ADD, R3, R1),
  1283. BPF_ALU32_REG(BPF_ADD, R3, R2),
  1284. BPF_ALU32_REG(BPF_ADD, R3, R3),
  1285. BPF_ALU32_REG(BPF_ADD, R3, R4),
  1286. BPF_ALU32_REG(BPF_ADD, R3, R5),
  1287. BPF_ALU32_REG(BPF_ADD, R3, R6),
  1288. BPF_ALU32_REG(BPF_ADD, R3, R7),
  1289. BPF_ALU32_REG(BPF_ADD, R3, R8),
  1290. BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
  1291. BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
  1292. BPF_EXIT_INSN(),
  1293. BPF_ALU32_REG(BPF_ADD, R4, R0),
  1294. BPF_ALU32_REG(BPF_ADD, R4, R1),
  1295. BPF_ALU32_REG(BPF_ADD, R4, R2),
  1296. BPF_ALU32_REG(BPF_ADD, R4, R3),
  1297. BPF_ALU32_REG(BPF_ADD, R4, R4),
  1298. BPF_ALU32_REG(BPF_ADD, R4, R5),
  1299. BPF_ALU32_REG(BPF_ADD, R4, R6),
  1300. BPF_ALU32_REG(BPF_ADD, R4, R7),
  1301. BPF_ALU32_REG(BPF_ADD, R4, R8),
  1302. BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
  1303. BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
  1304. BPF_EXIT_INSN(),
  1305. BPF_ALU32_REG(BPF_ADD, R5, R0),
  1306. BPF_ALU32_REG(BPF_ADD, R5, R1),
  1307. BPF_ALU32_REG(BPF_ADD, R5, R2),
  1308. BPF_ALU32_REG(BPF_ADD, R5, R3),
  1309. BPF_ALU32_REG(BPF_ADD, R5, R4),
  1310. BPF_ALU32_REG(BPF_ADD, R5, R5),
  1311. BPF_ALU32_REG(BPF_ADD, R5, R6),
  1312. BPF_ALU32_REG(BPF_ADD, R5, R7),
  1313. BPF_ALU32_REG(BPF_ADD, R5, R8),
  1314. BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
  1315. BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
  1316. BPF_EXIT_INSN(),
  1317. BPF_ALU32_REG(BPF_ADD, R6, R0),
  1318. BPF_ALU32_REG(BPF_ADD, R6, R1),
  1319. BPF_ALU32_REG(BPF_ADD, R6, R2),
  1320. BPF_ALU32_REG(BPF_ADD, R6, R3),
  1321. BPF_ALU32_REG(BPF_ADD, R6, R4),
  1322. BPF_ALU32_REG(BPF_ADD, R6, R5),
  1323. BPF_ALU32_REG(BPF_ADD, R6, R6),
  1324. BPF_ALU32_REG(BPF_ADD, R6, R7),
  1325. BPF_ALU32_REG(BPF_ADD, R6, R8),
  1326. BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
  1327. BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
  1328. BPF_EXIT_INSN(),
  1329. BPF_ALU32_REG(BPF_ADD, R7, R0),
  1330. BPF_ALU32_REG(BPF_ADD, R7, R1),
  1331. BPF_ALU32_REG(BPF_ADD, R7, R2),
  1332. BPF_ALU32_REG(BPF_ADD, R7, R3),
  1333. BPF_ALU32_REG(BPF_ADD, R7, R4),
  1334. BPF_ALU32_REG(BPF_ADD, R7, R5),
  1335. BPF_ALU32_REG(BPF_ADD, R7, R6),
  1336. BPF_ALU32_REG(BPF_ADD, R7, R7),
  1337. BPF_ALU32_REG(BPF_ADD, R7, R8),
  1338. BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
  1339. BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
  1340. BPF_EXIT_INSN(),
  1341. BPF_ALU32_REG(BPF_ADD, R8, R0),
  1342. BPF_ALU32_REG(BPF_ADD, R8, R1),
  1343. BPF_ALU32_REG(BPF_ADD, R8, R2),
  1344. BPF_ALU32_REG(BPF_ADD, R8, R3),
  1345. BPF_ALU32_REG(BPF_ADD, R8, R4),
  1346. BPF_ALU32_REG(BPF_ADD, R8, R5),
  1347. BPF_ALU32_REG(BPF_ADD, R8, R6),
  1348. BPF_ALU32_REG(BPF_ADD, R8, R7),
  1349. BPF_ALU32_REG(BPF_ADD, R8, R8),
  1350. BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
  1351. BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
  1352. BPF_EXIT_INSN(),
  1353. BPF_ALU32_REG(BPF_ADD, R9, R0),
  1354. BPF_ALU32_REG(BPF_ADD, R9, R1),
  1355. BPF_ALU32_REG(BPF_ADD, R9, R2),
  1356. BPF_ALU32_REG(BPF_ADD, R9, R3),
  1357. BPF_ALU32_REG(BPF_ADD, R9, R4),
  1358. BPF_ALU32_REG(BPF_ADD, R9, R5),
  1359. BPF_ALU32_REG(BPF_ADD, R9, R6),
  1360. BPF_ALU32_REG(BPF_ADD, R9, R7),
  1361. BPF_ALU32_REG(BPF_ADD, R9, R8),
  1362. BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
  1363. BPF_ALU32_REG(BPF_MOV, R0, R9),
  1364. BPF_EXIT_INSN(),
  1365. },
  1366. INTERNAL,
  1367. { },
  1368. { { 0, 2957380 } }
  1369. },
  1370. { /* Mainly checking JIT here. */
  1371. "INT: SUB",
  1372. .u.insns_int = {
  1373. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  1374. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  1375. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  1376. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  1377. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  1378. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  1379. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  1380. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  1381. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  1382. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  1383. BPF_ALU64_REG(BPF_SUB, R0, R0),
  1384. BPF_ALU64_REG(BPF_SUB, R0, R1),
  1385. BPF_ALU64_REG(BPF_SUB, R0, R2),
  1386. BPF_ALU64_REG(BPF_SUB, R0, R3),
  1387. BPF_ALU64_REG(BPF_SUB, R0, R4),
  1388. BPF_ALU64_REG(BPF_SUB, R0, R5),
  1389. BPF_ALU64_REG(BPF_SUB, R0, R6),
  1390. BPF_ALU64_REG(BPF_SUB, R0, R7),
  1391. BPF_ALU64_REG(BPF_SUB, R0, R8),
  1392. BPF_ALU64_REG(BPF_SUB, R0, R9),
  1393. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  1394. BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
  1395. BPF_EXIT_INSN(),
  1396. BPF_ALU64_REG(BPF_SUB, R1, R0),
  1397. BPF_ALU64_REG(BPF_SUB, R1, R2),
  1398. BPF_ALU64_REG(BPF_SUB, R1, R3),
  1399. BPF_ALU64_REG(BPF_SUB, R1, R4),
  1400. BPF_ALU64_REG(BPF_SUB, R1, R5),
  1401. BPF_ALU64_REG(BPF_SUB, R1, R6),
  1402. BPF_ALU64_REG(BPF_SUB, R1, R7),
  1403. BPF_ALU64_REG(BPF_SUB, R1, R8),
  1404. BPF_ALU64_REG(BPF_SUB, R1, R9),
  1405. BPF_ALU64_IMM(BPF_SUB, R1, 10),
  1406. BPF_ALU64_REG(BPF_SUB, R2, R0),
  1407. BPF_ALU64_REG(BPF_SUB, R2, R1),
  1408. BPF_ALU64_REG(BPF_SUB, R2, R3),
  1409. BPF_ALU64_REG(BPF_SUB, R2, R4),
  1410. BPF_ALU64_REG(BPF_SUB, R2, R5),
  1411. BPF_ALU64_REG(BPF_SUB, R2, R6),
  1412. BPF_ALU64_REG(BPF_SUB, R2, R7),
  1413. BPF_ALU64_REG(BPF_SUB, R2, R8),
  1414. BPF_ALU64_REG(BPF_SUB, R2, R9),
  1415. BPF_ALU64_IMM(BPF_SUB, R2, 10),
  1416. BPF_ALU64_REG(BPF_SUB, R3, R0),
  1417. BPF_ALU64_REG(BPF_SUB, R3, R1),
  1418. BPF_ALU64_REG(BPF_SUB, R3, R2),
  1419. BPF_ALU64_REG(BPF_SUB, R3, R4),
  1420. BPF_ALU64_REG(BPF_SUB, R3, R5),
  1421. BPF_ALU64_REG(BPF_SUB, R3, R6),
  1422. BPF_ALU64_REG(BPF_SUB, R3, R7),
  1423. BPF_ALU64_REG(BPF_SUB, R3, R8),
  1424. BPF_ALU64_REG(BPF_SUB, R3, R9),
  1425. BPF_ALU64_IMM(BPF_SUB, R3, 10),
  1426. BPF_ALU64_REG(BPF_SUB, R4, R0),
  1427. BPF_ALU64_REG(BPF_SUB, R4, R1),
  1428. BPF_ALU64_REG(BPF_SUB, R4, R2),
  1429. BPF_ALU64_REG(BPF_SUB, R4, R3),
  1430. BPF_ALU64_REG(BPF_SUB, R4, R5),
  1431. BPF_ALU64_REG(BPF_SUB, R4, R6),
  1432. BPF_ALU64_REG(BPF_SUB, R4, R7),
  1433. BPF_ALU64_REG(BPF_SUB, R4, R8),
  1434. BPF_ALU64_REG(BPF_SUB, R4, R9),
  1435. BPF_ALU64_IMM(BPF_SUB, R4, 10),
  1436. BPF_ALU64_REG(BPF_SUB, R5, R0),
  1437. BPF_ALU64_REG(BPF_SUB, R5, R1),
  1438. BPF_ALU64_REG(BPF_SUB, R5, R2),
  1439. BPF_ALU64_REG(BPF_SUB, R5, R3),
  1440. BPF_ALU64_REG(BPF_SUB, R5, R4),
  1441. BPF_ALU64_REG(BPF_SUB, R5, R6),
  1442. BPF_ALU64_REG(BPF_SUB, R5, R7),
  1443. BPF_ALU64_REG(BPF_SUB, R5, R8),
  1444. BPF_ALU64_REG(BPF_SUB, R5, R9),
  1445. BPF_ALU64_IMM(BPF_SUB, R5, 10),
  1446. BPF_ALU64_REG(BPF_SUB, R6, R0),
  1447. BPF_ALU64_REG(BPF_SUB, R6, R1),
  1448. BPF_ALU64_REG(BPF_SUB, R6, R2),
  1449. BPF_ALU64_REG(BPF_SUB, R6, R3),
  1450. BPF_ALU64_REG(BPF_SUB, R6, R4),
  1451. BPF_ALU64_REG(BPF_SUB, R6, R5),
  1452. BPF_ALU64_REG(BPF_SUB, R6, R7),
  1453. BPF_ALU64_REG(BPF_SUB, R6, R8),
  1454. BPF_ALU64_REG(BPF_SUB, R6, R9),
  1455. BPF_ALU64_IMM(BPF_SUB, R6, 10),
  1456. BPF_ALU64_REG(BPF_SUB, R7, R0),
  1457. BPF_ALU64_REG(BPF_SUB, R7, R1),
  1458. BPF_ALU64_REG(BPF_SUB, R7, R2),
  1459. BPF_ALU64_REG(BPF_SUB, R7, R3),
  1460. BPF_ALU64_REG(BPF_SUB, R7, R4),
  1461. BPF_ALU64_REG(BPF_SUB, R7, R5),
  1462. BPF_ALU64_REG(BPF_SUB, R7, R6),
  1463. BPF_ALU64_REG(BPF_SUB, R7, R8),
  1464. BPF_ALU64_REG(BPF_SUB, R7, R9),
  1465. BPF_ALU64_IMM(BPF_SUB, R7, 10),
  1466. BPF_ALU64_REG(BPF_SUB, R8, R0),
  1467. BPF_ALU64_REG(BPF_SUB, R8, R1),
  1468. BPF_ALU64_REG(BPF_SUB, R8, R2),
  1469. BPF_ALU64_REG(BPF_SUB, R8, R3),
  1470. BPF_ALU64_REG(BPF_SUB, R8, R4),
  1471. BPF_ALU64_REG(BPF_SUB, R8, R5),
  1472. BPF_ALU64_REG(BPF_SUB, R8, R6),
  1473. BPF_ALU64_REG(BPF_SUB, R8, R7),
  1474. BPF_ALU64_REG(BPF_SUB, R8, R9),
  1475. BPF_ALU64_IMM(BPF_SUB, R8, 10),
  1476. BPF_ALU64_REG(BPF_SUB, R9, R0),
  1477. BPF_ALU64_REG(BPF_SUB, R9, R1),
  1478. BPF_ALU64_REG(BPF_SUB, R9, R2),
  1479. BPF_ALU64_REG(BPF_SUB, R9, R3),
  1480. BPF_ALU64_REG(BPF_SUB, R9, R4),
  1481. BPF_ALU64_REG(BPF_SUB, R9, R5),
  1482. BPF_ALU64_REG(BPF_SUB, R9, R6),
  1483. BPF_ALU64_REG(BPF_SUB, R9, R7),
  1484. BPF_ALU64_REG(BPF_SUB, R9, R8),
  1485. BPF_ALU64_IMM(BPF_SUB, R9, 10),
  1486. BPF_ALU64_IMM(BPF_SUB, R0, 10),
  1487. BPF_ALU64_IMM(BPF_NEG, R0, 0),
  1488. BPF_ALU64_REG(BPF_SUB, R0, R1),
  1489. BPF_ALU64_REG(BPF_SUB, R0, R2),
  1490. BPF_ALU64_REG(BPF_SUB, R0, R3),
  1491. BPF_ALU64_REG(BPF_SUB, R0, R4),
  1492. BPF_ALU64_REG(BPF_SUB, R0, R5),
  1493. BPF_ALU64_REG(BPF_SUB, R0, R6),
  1494. BPF_ALU64_REG(BPF_SUB, R0, R7),
  1495. BPF_ALU64_REG(BPF_SUB, R0, R8),
  1496. BPF_ALU64_REG(BPF_SUB, R0, R9),
  1497. BPF_EXIT_INSN(),
  1498. },
  1499. INTERNAL,
  1500. { },
  1501. { { 0, 11 } }
  1502. },
  1503. { /* Mainly checking JIT here. */
  1504. "INT: XOR",
  1505. .u.insns_int = {
  1506. BPF_ALU64_REG(BPF_SUB, R0, R0),
  1507. BPF_ALU64_REG(BPF_XOR, R1, R1),
  1508. BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
  1509. BPF_EXIT_INSN(),
  1510. BPF_ALU64_IMM(BPF_MOV, R0, 10),
  1511. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  1512. BPF_ALU64_REG(BPF_SUB, R1, R1),
  1513. BPF_ALU64_REG(BPF_XOR, R2, R2),
  1514. BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
  1515. BPF_EXIT_INSN(),
  1516. BPF_ALU64_REG(BPF_SUB, R2, R2),
  1517. BPF_ALU64_REG(BPF_XOR, R3, R3),
  1518. BPF_ALU64_IMM(BPF_MOV, R0, 10),
  1519. BPF_ALU64_IMM(BPF_MOV, R1, -1),
  1520. BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
  1521. BPF_EXIT_INSN(),
  1522. BPF_ALU64_REG(BPF_SUB, R3, R3),
  1523. BPF_ALU64_REG(BPF_XOR, R4, R4),
  1524. BPF_ALU64_IMM(BPF_MOV, R2, 1),
  1525. BPF_ALU64_IMM(BPF_MOV, R5, -1),
  1526. BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
  1527. BPF_EXIT_INSN(),
  1528. BPF_ALU64_REG(BPF_SUB, R4, R4),
  1529. BPF_ALU64_REG(BPF_XOR, R5, R5),
  1530. BPF_ALU64_IMM(BPF_MOV, R3, 1),
  1531. BPF_ALU64_IMM(BPF_MOV, R7, -1),
  1532. BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
  1533. BPF_EXIT_INSN(),
  1534. BPF_ALU64_IMM(BPF_MOV, R5, 1),
  1535. BPF_ALU64_REG(BPF_SUB, R5, R5),
  1536. BPF_ALU64_REG(BPF_XOR, R6, R6),
  1537. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  1538. BPF_ALU64_IMM(BPF_MOV, R8, -1),
  1539. BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
  1540. BPF_EXIT_INSN(),
  1541. BPF_ALU64_REG(BPF_SUB, R6, R6),
  1542. BPF_ALU64_REG(BPF_XOR, R7, R7),
  1543. BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
  1544. BPF_EXIT_INSN(),
  1545. BPF_ALU64_REG(BPF_SUB, R7, R7),
  1546. BPF_ALU64_REG(BPF_XOR, R8, R8),
  1547. BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
  1548. BPF_EXIT_INSN(),
  1549. BPF_ALU64_REG(BPF_SUB, R8, R8),
  1550. BPF_ALU64_REG(BPF_XOR, R9, R9),
  1551. BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
  1552. BPF_EXIT_INSN(),
  1553. BPF_ALU64_REG(BPF_SUB, R9, R9),
  1554. BPF_ALU64_REG(BPF_XOR, R0, R0),
  1555. BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
  1556. BPF_EXIT_INSN(),
  1557. BPF_ALU64_REG(BPF_SUB, R1, R1),
  1558. BPF_ALU64_REG(BPF_XOR, R0, R0),
  1559. BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
  1560. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  1561. BPF_EXIT_INSN(),
  1562. BPF_ALU64_IMM(BPF_MOV, R0, 1),
  1563. BPF_EXIT_INSN(),
  1564. },
  1565. INTERNAL,
  1566. { },
  1567. { { 0, 1 } }
  1568. },
  1569. { /* Mainly checking JIT here. */
  1570. "INT: MUL",
  1571. .u.insns_int = {
  1572. BPF_ALU64_IMM(BPF_MOV, R0, 11),
  1573. BPF_ALU64_IMM(BPF_MOV, R1, 1),
  1574. BPF_ALU64_IMM(BPF_MOV, R2, 2),
  1575. BPF_ALU64_IMM(BPF_MOV, R3, 3),
  1576. BPF_ALU64_IMM(BPF_MOV, R4, 4),
  1577. BPF_ALU64_IMM(BPF_MOV, R5, 5),
  1578. BPF_ALU64_IMM(BPF_MOV, R6, 6),
  1579. BPF_ALU64_IMM(BPF_MOV, R7, 7),
  1580. BPF_ALU64_IMM(BPF_MOV, R8, 8),
  1581. BPF_ALU64_IMM(BPF_MOV, R9, 9),
  1582. BPF_ALU64_REG(BPF_MUL, R0, R0),
  1583. BPF_ALU64_REG(BPF_MUL, R0, R1),
  1584. BPF_ALU64_REG(BPF_MUL, R0, R2),
  1585. BPF_ALU64_REG(BPF_MUL, R0, R3),
  1586. BPF_ALU64_REG(BPF_MUL, R0, R4),
  1587. BPF_ALU64_REG(BPF_MUL, R0, R5),
  1588. BPF_ALU64_REG(BPF_MUL, R0, R6),
  1589. BPF_ALU64_REG(BPF_MUL, R0, R7),
  1590. BPF_ALU64_REG(BPF_MUL, R0, R8),
  1591. BPF_ALU64_REG(BPF_MUL, R0, R9),
  1592. BPF_ALU64_IMM(BPF_MUL, R0, 10),
  1593. BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
  1594. BPF_EXIT_INSN(),
  1595. BPF_ALU64_REG(BPF_MUL, R1, R0),
  1596. BPF_ALU64_REG(BPF_MUL, R1, R2),
  1597. BPF_ALU64_REG(BPF_MUL, R1, R3),
  1598. BPF_ALU64_REG(BPF_MUL, R1, R4),
  1599. BPF_ALU64_REG(BPF_MUL, R1, R5),
  1600. BPF_ALU64_REG(BPF_MUL, R1, R6),
  1601. BPF_ALU64_REG(BPF_MUL, R1, R7),
  1602. BPF_ALU64_REG(BPF_MUL, R1, R8),
  1603. BPF_ALU64_REG(BPF_MUL, R1, R9),
  1604. BPF_ALU64_IMM(BPF_MUL, R1, 10),
  1605. BPF_ALU64_REG(BPF_MOV, R2, R1),
  1606. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  1607. BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
  1608. BPF_EXIT_INSN(),
  1609. BPF_ALU64_IMM(BPF_LSH, R1, 32),
  1610. BPF_ALU64_IMM(BPF_ARSH, R1, 32),
  1611. BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
  1612. BPF_EXIT_INSN(),
  1613. BPF_ALU64_REG(BPF_MUL, R2, R0),
  1614. BPF_ALU64_REG(BPF_MUL, R2, R1),
  1615. BPF_ALU64_REG(BPF_MUL, R2, R3),
  1616. BPF_ALU64_REG(BPF_MUL, R2, R4),
  1617. BPF_ALU64_REG(BPF_MUL, R2, R5),
  1618. BPF_ALU64_REG(BPF_MUL, R2, R6),
  1619. BPF_ALU64_REG(BPF_MUL, R2, R7),
  1620. BPF_ALU64_REG(BPF_MUL, R2, R8),
  1621. BPF_ALU64_REG(BPF_MUL, R2, R9),
  1622. BPF_ALU64_IMM(BPF_MUL, R2, 10),
  1623. BPF_ALU64_IMM(BPF_RSH, R2, 32),
  1624. BPF_ALU64_REG(BPF_MOV, R0, R2),
  1625. BPF_EXIT_INSN(),
  1626. },
  1627. INTERNAL,
  1628. { },
  1629. { { 0, 0x35d97ef2 } }
  1630. },
  1631. { /* Mainly checking JIT here. */
  1632. "MOV REG64",
  1633. .u.insns_int = {
  1634. BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
  1635. BPF_MOV64_REG(R1, R0),
  1636. BPF_MOV64_REG(R2, R1),
  1637. BPF_MOV64_REG(R3, R2),
  1638. BPF_MOV64_REG(R4, R3),
  1639. BPF_MOV64_REG(R5, R4),
  1640. BPF_MOV64_REG(R6, R5),
  1641. BPF_MOV64_REG(R7, R6),
  1642. BPF_MOV64_REG(R8, R7),
  1643. BPF_MOV64_REG(R9, R8),
  1644. BPF_ALU64_IMM(BPF_MOV, R0, 0),
  1645. BPF_ALU64_IMM(BPF_MOV, R1, 0),
  1646. BPF_ALU64_IMM(BPF_MOV, R2, 0),
  1647. BPF_ALU64_IMM(BPF_MOV, R3, 0),
  1648. BPF_ALU64_IMM(BPF_MOV, R4, 0),
  1649. BPF_ALU64_IMM(BPF_MOV, R5, 0),
  1650. BPF_ALU64_IMM(BPF_MOV, R6, 0),
  1651. BPF_ALU64_IMM(BPF_MOV, R7, 0),
  1652. BPF_ALU64_IMM(BPF_MOV, R8, 0),
  1653. BPF_ALU64_IMM(BPF_MOV, R9, 0),
  1654. BPF_ALU64_REG(BPF_ADD, R0, R0),
  1655. BPF_ALU64_REG(BPF_ADD, R0, R1),
  1656. BPF_ALU64_REG(BPF_ADD, R0, R2),
  1657. BPF_ALU64_REG(BPF_ADD, R0, R3),
  1658. BPF_ALU64_REG(BPF_ADD, R0, R4),
  1659. BPF_ALU64_REG(BPF_ADD, R0, R5),
  1660. BPF_ALU64_REG(BPF_ADD, R0, R6),
  1661. BPF_ALU64_REG(BPF_ADD, R0, R7),
  1662. BPF_ALU64_REG(BPF_ADD, R0, R8),
  1663. BPF_ALU64_REG(BPF_ADD, R0, R9),
  1664. BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
  1665. BPF_EXIT_INSN(),
  1666. },
  1667. INTERNAL,
  1668. { },
  1669. { { 0, 0xfefe } }
  1670. },
  1671. { /* Mainly checking JIT here. */
  1672. "MOV REG32",
  1673. .u.insns_int = {
  1674. BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
  1675. BPF_MOV64_REG(R1, R0),
  1676. BPF_MOV64_REG(R2, R1),
  1677. BPF_MOV64_REG(R3, R2),
  1678. BPF_MOV64_REG(R4, R3),
  1679. BPF_MOV64_REG(R5, R4),
  1680. BPF_MOV64_REG(R6, R5),
  1681. BPF_MOV64_REG(R7, R6),
  1682. BPF_MOV64_REG(R8, R7),
  1683. BPF_MOV64_REG(R9, R8),
  1684. BPF_ALU32_IMM(BPF_MOV, R0, 0),
  1685. BPF_ALU32_IMM(BPF_MOV, R1, 0),
  1686. BPF_ALU32_IMM(

Large files files are truncated, but you can click here to view the full file