Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1// SPDX-License-Identifier: GPL-2.0
2/* Converted from tools/testing/selftests/bpf/verifier/loops1.c */
3
4#include <linux/bpf.h>
5#include <bpf/bpf_helpers.h>
6#include "bpf_misc.h"
7
8SEC("xdp")
9__description("bounded loop, count to 4")
10__success __retval(4)
11__naked void bounded_loop_count_to_4(void)
12{
13 asm volatile (" \
14 r0 = 0; \
15l0_%=: r0 += 1; \
16 if r0 < 4 goto l0_%=; \
17 exit; \
18" ::: __clobber_all);
19}
20
21SEC("tracepoint")
22__description("bounded loop, count to 20")
23__success
24__naked void bounded_loop_count_to_20(void)
25{
26 asm volatile (" \
27 r0 = 0; \
28l0_%=: r0 += 3; \
29 if r0 < 20 goto l0_%=; \
30 exit; \
31" ::: __clobber_all);
32}
33
34SEC("tracepoint")
35__description("bounded loop, count from positive unknown to 4")
36__success
37__naked void from_positive_unknown_to_4(void)
38{
39 asm volatile (" \
40 call %[bpf_get_prandom_u32]; \
41 if r0 s< 0 goto l0_%=; \
42l1_%=: r0 += 1; \
43 if r0 < 4 goto l1_%=; \
44l0_%=: exit; \
45" :
46 : __imm(bpf_get_prandom_u32)
47 : __clobber_all);
48}
49
50SEC("tracepoint")
51__description("bounded loop, count from totally unknown to 4")
52__success
53__naked void from_totally_unknown_to_4(void)
54{
55 asm volatile (" \
56 call %[bpf_get_prandom_u32]; \
57l0_%=: r0 += 1; \
58 if r0 < 4 goto l0_%=; \
59 exit; \
60" :
61 : __imm(bpf_get_prandom_u32)
62 : __clobber_all);
63}
64
65SEC("tracepoint")
66__description("bounded loop, count to 4 with equality")
67__success
68__naked void count_to_4_with_equality(void)
69{
70 asm volatile (" \
71 r0 = 0; \
72l0_%=: r0 += 1; \
73 if r0 != 4 goto l0_%=; \
74 exit; \
75" ::: __clobber_all);
76}
77
78SEC("socket")
79__description("bounded loop, start in the middle")
80__success
81__failure_unpriv __msg_unpriv("back-edge")
82__naked void loop_start_in_the_middle(void)
83{
84 asm volatile (" \
85 r0 = 0; \
86 goto l0_%=; \
87l1_%=: r0 += 1; \
88l0_%=: if r0 < 4 goto l1_%=; \
89 exit; \
90" ::: __clobber_all);
91}
92
93SEC("xdp")
94__description("bounded loop containing a forward jump")
95__success __retval(4)
96__naked void loop_containing_a_forward_jump(void)
97{
98 asm volatile (" \
99 r0 = 0; \
100l1_%=: r0 += 1; \
101 if r0 == r0 goto l0_%=; \
102l0_%=: if r0 < 4 goto l1_%=; \
103 exit; \
104" ::: __clobber_all);
105}
106
107SEC("tracepoint")
108__description("bounded loop that jumps out rather than in")
109__success
110__naked void jumps_out_rather_than_in(void)
111{
112 asm volatile (" \
113 r6 = 0; \
114l1_%=: r6 += 1; \
115 if r6 > 10000 goto l0_%=; \
116 call %[bpf_get_prandom_u32]; \
117 goto l1_%=; \
118l0_%=: exit; \
119" :
120 : __imm(bpf_get_prandom_u32)
121 : __clobber_all);
122}
123
124SEC("tracepoint")
125__description("infinite loop after a conditional jump")
126__failure __msg("program is too large")
127__naked void loop_after_a_conditional_jump(void)
128{
129 asm volatile (" \
130 r0 = 5; \
131 if r0 < 4 goto l0_%=; \
132l1_%=: r0 += 1; \
133 goto l1_%=; \
134l0_%=: exit; \
135" ::: __clobber_all);
136}
137
138SEC("tracepoint")
139__description("bounded recursion")
140__failure
141__msg("recursive call from")
142__naked void bounded_recursion(void)
143{
144 asm volatile (" \
145 r1 = 0; \
146 call bounded_recursion__1; \
147 exit; \
148" ::: __clobber_all);
149}
150
151static __naked __noinline __attribute__((used))
152void bounded_recursion__1(void)
153{
154 asm volatile (" \
155 r1 += 1; \
156 r0 = r1; \
157 if r1 < 4 goto l0_%=; \
158 exit; \
159l0_%=: call bounded_recursion__1; \
160 exit; \
161" ::: __clobber_all);
162}
163
164SEC("tracepoint")
165__description("infinite loop in two jumps")
166__failure __msg("loop detected")
167__naked void infinite_loop_in_two_jumps(void)
168{
169 asm volatile (" \
170 r0 = 0; \
171l1_%=: goto l0_%=; \
172l0_%=: if r0 < 4 goto l1_%=; \
173 exit; \
174" ::: __clobber_all);
175}
176
177SEC("tracepoint")
178__description("infinite loop: three-jump trick")
179__failure __msg("loop detected")
180__naked void infinite_loop_three_jump_trick(void)
181{
182 asm volatile (" \
183 r0 = 0; \
184l2_%=: r0 += 1; \
185 r0 &= 1; \
186 if r0 < 2 goto l0_%=; \
187 exit; \
188l0_%=: r0 += 1; \
189 r0 &= 1; \
190 if r0 < 2 goto l1_%=; \
191 exit; \
192l1_%=: r0 += 1; \
193 r0 &= 1; \
194 if r0 < 2 goto l2_%=; \
195 exit; \
196" ::: __clobber_all);
197}
198
199SEC("xdp")
200__description("not-taken loop with back jump to 1st insn")
201__success __retval(123)
202__naked void back_jump_to_1st_insn_1(void)
203{
204 asm volatile (" \
205l0_%=: r0 = 123; \
206 if r0 == 4 goto l0_%=; \
207 exit; \
208" ::: __clobber_all);
209}
210
211SEC("xdp")
212__description("taken loop with back jump to 1st insn")
213__success __retval(55)
214__naked void back_jump_to_1st_insn_2(void)
215{
216 asm volatile (" \
217 r1 = 10; \
218 r2 = 0; \
219 call back_jump_to_1st_insn_2__1; \
220 exit; \
221" ::: __clobber_all);
222}
223
224static __naked __noinline __attribute__((used))
225void back_jump_to_1st_insn_2__1(void)
226{
227 asm volatile (" \
228l0_%=: r2 += r1; \
229 r1 -= 1; \
230 if r1 != 0 goto l0_%=; \
231 r0 = r2; \
232 exit; \
233" ::: __clobber_all);
234}
235
236SEC("xdp")
237__description("taken loop with back jump to 1st insn, 2")
238__success __retval(55)
239__naked void jump_to_1st_insn_2(void)
240{
241 asm volatile (" \
242 r1 = 10; \
243 r2 = 0; \
244 call jump_to_1st_insn_2__1; \
245 exit; \
246" ::: __clobber_all);
247}
248
249static __naked __noinline __attribute__((used))
250void jump_to_1st_insn_2__1(void)
251{
252 asm volatile (" \
253l0_%=: r2 += r1; \
254 r1 -= 1; \
255 if w1 != 0 goto l0_%=; \
256 r0 = r2; \
257 exit; \
258" ::: __clobber_all);
259}
260
261SEC("xdp")
262__success
263__naked void not_an_inifinite_loop(void)
264{
265 asm volatile (" \
266 call %[bpf_get_prandom_u32]; \
267 r0 &= 0xff; \
268 *(u64 *)(r10 - 8) = r0; \
269 r0 = 0; \
270loop_%=: \
271 r0 = *(u64 *)(r10 - 8); \
272 if r0 > 10 goto exit_%=; \
273 r0 += 1; \
274 *(u64 *)(r10 - 8) = r0; \
275 r0 = 0; \
276 goto loop_%=; \
277exit_%=: \
278 r0 = 0; \
279 exit; \
280" :
281 : __imm(bpf_get_prandom_u32)
282 : __clobber_all);
283}
284
285/*
286 * This test case triggered a bug in verifier.c:maybe_exit_scc().
287 * Speculative execution path reaches stack access instruction,
288 * stops and triggers maybe_exit_scc() w/o accompanying maybe_enter_scc() call.
289 */
290SEC("socket")
291__arch_x86_64
292__caps_unpriv(CAP_BPF)
293__naked void maybe_exit_scc_bug1(void)
294{
295 asm volatile (
296 "r0 = 100;"
297"1:"
298 /* Speculative execution path reaches and stops here. */
299 "*(u64 *)(r10 - 512) = r0;"
300 /* Condition is always false, but verifier speculatively executes the true branch. */
301 "if r0 <= 0x0 goto 1b;"
302 "exit;"
303 ::: __clobber_all);
304}
305
306char _license[] SEC("license") = "GPL";