1 {
2 	"map element value store of cleared call register",
3 	.insns = {
4 	BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
5 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
6 	BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
7 	BPF_LD_MAP_FD(BPF_REG_1, 0),
8 	BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
9 	BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
10 	BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_1, 0),
11 	BPF_EXIT_INSN(),
12 	},
13 	.fixup_map_hash_48b = { 3 },
14 	.errstr_unpriv = "R1 !read_ok",
15 	.errstr = "R1 !read_ok",
16 	.result = REJECT,
17 	.result_unpriv = REJECT,
18 },
19 {
20 	"map element value with unaligned store",
21 	.insns = {
22 	BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
23 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
24 	BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
25 	BPF_LD_MAP_FD(BPF_REG_1, 0),
26 	BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
27 	BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 17),
28 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 3),
29 	BPF_ST_MEM(BPF_DW, BPF_REG_0, 0, 42),
30 	BPF_ST_MEM(BPF_DW, BPF_REG_0, 2, 43),
31 	BPF_ST_MEM(BPF_DW, BPF_REG_0, -2, 44),
32 	BPF_MOV64_REG(BPF_REG_8, BPF_REG_0),
33 	BPF_ST_MEM(BPF_DW, BPF_REG_8, 0, 32),
34 	BPF_ST_MEM(BPF_DW, BPF_REG_8, 2, 33),
35 	BPF_ST_MEM(BPF_DW, BPF_REG_8, -2, 34),
36 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_8, 5),
37 	BPF_ST_MEM(BPF_DW, BPF_REG_8, 0, 22),
38 	BPF_ST_MEM(BPF_DW, BPF_REG_8, 4, 23),
39 	BPF_ST_MEM(BPF_DW, BPF_REG_8, -7, 24),
40 	BPF_MOV64_REG(BPF_REG_7, BPF_REG_8),
41 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_7, 3),
42 	BPF_ST_MEM(BPF_DW, BPF_REG_7, 0, 22),
43 	BPF_ST_MEM(BPF_DW, BPF_REG_7, 4, 23),
44 	BPF_ST_MEM(BPF_DW, BPF_REG_7, -4, 24),
45 	BPF_EXIT_INSN(),
46 	},
47 	.fixup_map_hash_48b = { 3 },
48 	.errstr_unpriv = "R0 leaks addr",
49 	.result = ACCEPT,
50 	.result_unpriv = REJECT,
51 	.flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,
52 },
53 {
54 	"map element value with unaligned load",
55 	.insns = {
56 	BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
57 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
58 	BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
59 	BPF_LD_MAP_FD(BPF_REG_1, 0),
60 	BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
61 	BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 11),
62 	BPF_LDX_MEM(BPF_W, BPF_REG_1, BPF_REG_0, 0),
63 	BPF_JMP_IMM(BPF_JGE, BPF_REG_1, MAX_ENTRIES, 9),
64 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 3),
65 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_0, 0),
66 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_0, 2),
67 	BPF_MOV64_REG(BPF_REG_8, BPF_REG_0),
68 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_8, 0),
69 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_8, 2),
70 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, 5),
71 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_0, 0),
72 	BPF_LDX_MEM(BPF_DW, BPF_REG_7, BPF_REG_0, 4),
73 	BPF_EXIT_INSN(),
74 	},
75 	.fixup_map_hash_48b = { 3 },
76 	.errstr_unpriv = "R0 leaks addr",
77 	.result = ACCEPT,
78 	.result_unpriv = REJECT,
79 	.flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,
80 },
81 {
82 	"map element value is preserved across register spilling",
83 	.insns = {
84 	BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
85 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
86 	BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
87 	BPF_LD_MAP_FD(BPF_REG_1, 0),
88 	BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
89 	BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
90 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_0, offsetof(struct test_val, foo)),
91 	BPF_ST_MEM(BPF_DW, BPF_REG_0, 0, 42),
92 	BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
93 	BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -184),
94 	BPF_STX_MEM(BPF_DW, BPF_REG_1, BPF_REG_0, 0),
95 	BPF_LDX_MEM(BPF_DW, BPF_REG_3, BPF_REG_1, 0),
96 	BPF_ST_MEM(BPF_DW, BPF_REG_3, 0, 42),
97 	BPF_EXIT_INSN(),
98 	},
99 	.fixup_map_hash_48b = { 3 },
100 	.errstr_unpriv = "R0 leaks addr",
101 	.result = ACCEPT,
102 	.result_unpriv = REJECT,
103 	.flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,
104 },
105