Skip to content

Commit a038eac

Browse files
eddyz87Alexei Starovoitov
authored andcommitted
selftests/bpf: validate __xlated same way as __jited
Both __xlated and __jited work with disassembly. It is logical to have both work in a similar manner. This commit updates __xlated macro handling in test_loader.c by making it expect matches on sequential lines, same way as __jited operates. For example: __xlated("1: *(u64 *)(r10 -16) = r1") ;; matched on line N __xlated("3: r0 = &(void __percpu *)(r0)") ;; matched on line N+1 Also: __xlated("1: *(u64 *)(r10 -16) = r1") ;; matched on line N __xlated("...") ;; not matched __xlated("3: r0 = &(void __percpu *)(r0)") ;; mantched on any ;; line >= N Signed-off-by: Eduard Zingerman <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Alexei Starovoitov <[email protected]>
1 parent e5bdd6a commit a038eac

File tree

2 files changed

+57
-4
lines changed

2 files changed

+57
-4
lines changed

tools/testing/selftests/bpf/progs/verifier_nocsr.c

Lines changed: 51 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ __naked void canary_arm64_riscv64(void)
7878
SEC("raw_tp")
7979
__arch_x86_64
8080
__xlated("1: r0 = &(void __percpu *)(r0)")
81+
__xlated("...")
8182
__xlated("3: exit")
8283
__success
8384
__naked void canary_zero_spills(void)
@@ -94,7 +95,9 @@ SEC("raw_tp")
9495
__arch_x86_64
9596
__log_level(4) __msg("stack depth 16")
9697
__xlated("1: *(u64 *)(r10 -16) = r1")
98+
__xlated("...")
9799
__xlated("3: r0 = &(void __percpu *)(r0)")
100+
__xlated("...")
98101
__xlated("5: r2 = *(u64 *)(r10 -16)")
99102
__success
100103
__naked void wrong_reg_in_pattern1(void)
@@ -113,7 +116,9 @@ __naked void wrong_reg_in_pattern1(void)
113116
SEC("raw_tp")
114117
__arch_x86_64
115118
__xlated("1: *(u64 *)(r10 -16) = r6")
119+
__xlated("...")
116120
__xlated("3: r0 = &(void __percpu *)(r0)")
121+
__xlated("...")
117122
__xlated("5: r6 = *(u64 *)(r10 -16)")
118123
__success
119124
__naked void wrong_reg_in_pattern2(void)
@@ -132,7 +137,9 @@ __naked void wrong_reg_in_pattern2(void)
132137
SEC("raw_tp")
133138
__arch_x86_64
134139
__xlated("1: *(u64 *)(r10 -16) = r0")
140+
__xlated("...")
135141
__xlated("3: r0 = &(void __percpu *)(r0)")
142+
__xlated("...")
136143
__xlated("5: r0 = *(u64 *)(r10 -16)")
137144
__success
138145
__naked void wrong_reg_in_pattern3(void)
@@ -151,7 +158,9 @@ __naked void wrong_reg_in_pattern3(void)
151158
SEC("raw_tp")
152159
__arch_x86_64
153160
__xlated("2: *(u64 *)(r2 -16) = r1")
161+
__xlated("...")
154162
__xlated("4: r0 = &(void __percpu *)(r0)")
163+
__xlated("...")
155164
__xlated("6: r1 = *(u64 *)(r10 -16)")
156165
__success
157166
__naked void wrong_base_in_pattern(void)
@@ -171,7 +180,9 @@ __naked void wrong_base_in_pattern(void)
171180
SEC("raw_tp")
172181
__arch_x86_64
173182
__xlated("1: *(u64 *)(r10 -16) = r1")
183+
__xlated("...")
174184
__xlated("3: r0 = &(void __percpu *)(r0)")
185+
__xlated("...")
175186
__xlated("5: r2 = 1")
176187
__success
177188
__naked void wrong_insn_in_pattern(void)
@@ -191,7 +202,9 @@ __naked void wrong_insn_in_pattern(void)
191202
SEC("raw_tp")
192203
__arch_x86_64
193204
__xlated("2: *(u64 *)(r10 -16) = r1")
205+
__xlated("...")
194206
__xlated("4: r0 = &(void __percpu *)(r0)")
207+
__xlated("...")
195208
__xlated("6: r1 = *(u64 *)(r10 -8)")
196209
__success
197210
__naked void wrong_off_in_pattern1(void)
@@ -211,7 +224,9 @@ __naked void wrong_off_in_pattern1(void)
211224
SEC("raw_tp")
212225
__arch_x86_64
213226
__xlated("1: *(u32 *)(r10 -4) = r1")
227+
__xlated("...")
214228
__xlated("3: r0 = &(void __percpu *)(r0)")
229+
__xlated("...")
215230
__xlated("5: r1 = *(u32 *)(r10 -4)")
216231
__success
217232
__naked void wrong_off_in_pattern2(void)
@@ -230,7 +245,9 @@ __naked void wrong_off_in_pattern2(void)
230245
SEC("raw_tp")
231246
__arch_x86_64
232247
__xlated("1: *(u32 *)(r10 -16) = r1")
248+
__xlated("...")
233249
__xlated("3: r0 = &(void __percpu *)(r0)")
250+
__xlated("...")
234251
__xlated("5: r1 = *(u32 *)(r10 -16)")
235252
__success
236253
__naked void wrong_size_in_pattern(void)
@@ -249,7 +266,9 @@ __naked void wrong_size_in_pattern(void)
249266
SEC("raw_tp")
250267
__arch_x86_64
251268
__xlated("2: *(u32 *)(r10 -8) = r1")
269+
__xlated("...")
252270
__xlated("4: r0 = &(void __percpu *)(r0)")
271+
__xlated("...")
253272
__xlated("6: r1 = *(u32 *)(r10 -8)")
254273
__success
255274
__naked void partial_pattern(void)
@@ -275,11 +294,15 @@ __xlated("1: r2 = 2")
275294
/* not patched, spills for -8, -16 not removed */
276295
__xlated("2: *(u64 *)(r10 -8) = r1")
277296
__xlated("3: *(u64 *)(r10 -16) = r2")
297+
__xlated("...")
278298
__xlated("5: r0 = &(void __percpu *)(r0)")
299+
__xlated("...")
279300
__xlated("7: r2 = *(u64 *)(r10 -16)")
280301
__xlated("8: r1 = *(u64 *)(r10 -8)")
281302
/* patched, spills for -24, -32 removed */
303+
__xlated("...")
282304
__xlated("10: r0 = &(void __percpu *)(r0)")
305+
__xlated("...")
283306
__xlated("12: exit")
284307
__success
285308
__naked void min_stack_offset(void)
@@ -308,7 +331,9 @@ __naked void min_stack_offset(void)
308331
SEC("raw_tp")
309332
__arch_x86_64
310333
__xlated("1: *(u64 *)(r10 -8) = r1")
334+
__xlated("...")
311335
__xlated("3: r0 = &(void __percpu *)(r0)")
336+
__xlated("...")
312337
__xlated("5: r1 = *(u64 *)(r10 -8)")
313338
__success
314339
__naked void bad_fixed_read(void)
@@ -330,7 +355,9 @@ __naked void bad_fixed_read(void)
330355
SEC("raw_tp")
331356
__arch_x86_64
332357
__xlated("1: *(u64 *)(r10 -8) = r1")
358+
__xlated("...")
333359
__xlated("3: r0 = &(void __percpu *)(r0)")
360+
__xlated("...")
334361
__xlated("5: r1 = *(u64 *)(r10 -8)")
335362
__success
336363
__naked void bad_fixed_write(void)
@@ -352,7 +379,9 @@ __naked void bad_fixed_write(void)
352379
SEC("raw_tp")
353380
__arch_x86_64
354381
__xlated("6: *(u64 *)(r10 -16) = r1")
382+
__xlated("...")
355383
__xlated("8: r0 = &(void __percpu *)(r0)")
384+
__xlated("...")
356385
__xlated("10: r1 = *(u64 *)(r10 -16)")
357386
__success
358387
__naked void bad_varying_read(void)
@@ -379,7 +408,9 @@ __naked void bad_varying_read(void)
379408
SEC("raw_tp")
380409
__arch_x86_64
381410
__xlated("6: *(u64 *)(r10 -16) = r1")
411+
__xlated("...")
382412
__xlated("8: r0 = &(void __percpu *)(r0)")
413+
__xlated("...")
383414
__xlated("10: r1 = *(u64 *)(r10 -16)")
384415
__success
385416
__naked void bad_varying_write(void)
@@ -406,7 +437,9 @@ __naked void bad_varying_write(void)
406437
SEC("raw_tp")
407438
__arch_x86_64
408439
__xlated("1: *(u64 *)(r10 -8) = r1")
440+
__xlated("...")
409441
__xlated("3: r0 = &(void __percpu *)(r0)")
442+
__xlated("...")
410443
__xlated("5: r1 = *(u64 *)(r10 -8)")
411444
__success
412445
__naked void bad_write_in_subprog(void)
@@ -438,7 +471,9 @@ __naked static void bad_write_in_subprog_aux(void)
438471
SEC("raw_tp")
439472
__arch_x86_64
440473
__xlated("1: *(u64 *)(r10 -8) = r1")
474+
__xlated("...")
441475
__xlated("3: r0 = &(void __percpu *)(r0)")
476+
__xlated("...")
442477
__xlated("5: r1 = *(u64 *)(r10 -8)")
443478
__success
444479
__naked void bad_helper_write(void)
@@ -466,13 +501,19 @@ SEC("raw_tp")
466501
__arch_x86_64
467502
/* main, not patched */
468503
__xlated("1: *(u64 *)(r10 -8) = r1")
504+
__xlated("...")
469505
__xlated("3: r0 = &(void __percpu *)(r0)")
506+
__xlated("...")
470507
__xlated("5: r1 = *(u64 *)(r10 -8)")
508+
__xlated("...")
471509
__xlated("9: call pc+1")
510+
__xlated("...")
472511
__xlated("10: exit")
473512
/* subprogram, patched */
474513
__xlated("11: r1 = 1")
514+
__xlated("...")
475515
__xlated("13: r0 = &(void __percpu *)(r0)")
516+
__xlated("...")
476517
__xlated("15: exit")
477518
__success
478519
__naked void invalidate_one_subprog(void)
@@ -510,12 +551,16 @@ SEC("raw_tp")
510551
__arch_x86_64
511552
/* main */
512553
__xlated("0: r1 = 1")
554+
__xlated("...")
513555
__xlated("2: r0 = &(void __percpu *)(r0)")
556+
__xlated("...")
514557
__xlated("4: call pc+1")
515558
__xlated("5: exit")
516559
/* subprogram */
517560
__xlated("6: r1 = 1")
561+
__xlated("...")
518562
__xlated("8: r0 = &(void __percpu *)(r0)")
563+
__xlated("...")
519564
__xlated("10: *(u64 *)(r10 -16) = r1")
520565
__xlated("11: exit")
521566
__success
@@ -576,7 +621,9 @@ __log_level(4) __msg("stack depth 16")
576621
/* may_goto counter at -16 */
577622
__xlated("0: *(u64 *)(r10 -16) =")
578623
__xlated("1: r1 = 1")
624+
__xlated("...")
579625
__xlated("3: r0 = &(void __percpu *)(r0)")
626+
__xlated("...")
580627
/* may_goto expansion starts */
581628
__xlated("5: r11 = *(u64 *)(r10 -16)")
582629
__xlated("6: if r11 == 0x0 goto pc+3")
@@ -623,13 +670,15 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
623670
__xlated("6: r2 =")
624671
__xlated("7: r3 = 0")
625672
__xlated("8: r4 = 0")
673+
__xlated("...")
626674
/* ... part of the inlined bpf_loop */
627675
__xlated("12: *(u64 *)(r10 -32) = r6")
628676
__xlated("13: *(u64 *)(r10 -24) = r7")
629677
__xlated("14: *(u64 *)(r10 -16) = r8")
630-
/* ... */
678+
__xlated("...")
631679
__xlated("21: call pc+8") /* dummy_loop_callback */
632680
/* ... last insns of the bpf_loop_interaction1 */
681+
__xlated("...")
633682
__xlated("28: r0 = 0")
634683
__xlated("29: exit")
635684
/* dummy_loop_callback */
@@ -670,7 +719,7 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
670719
__xlated("6: *(u64 *)(r10 -16) = r1")
671720
__xlated("7: call")
672721
__xlated("8: r1 = *(u64 *)(r10 -16)")
673-
/* ... */
722+
__xlated("...")
674723
/* ... part of the inlined bpf_loop */
675724
__xlated("15: *(u64 *)(r10 -40) = r6")
676725
__xlated("16: *(u64 *)(r10 -32) = r7")

tools/testing/selftests/bpf/test_loader.c

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -365,6 +365,8 @@ static int parse_test_spec(struct test_loader *tester,
365365
const char *description = NULL;
366366
bool has_unpriv_result = false;
367367
bool has_unpriv_retval = false;
368+
bool unpriv_xlated_on_next_line = true;
369+
bool xlated_on_next_line = true;
368370
bool unpriv_jit_on_next_line;
369371
bool jit_on_next_line;
370372
bool collect_jit = false;
@@ -461,12 +463,14 @@ static int parse_test_spec(struct test_loader *tester,
461463
spec->mode_mask |= UNPRIV;
462464
}
463465
} else if ((msg = skip_dynamic_pfx(s, TEST_TAG_EXPECT_XLATED_PFX))) {
464-
err = push_msg(msg, &spec->priv.expect_xlated);
466+
err = push_disasm_msg(msg, &xlated_on_next_line,
467+
&spec->priv.expect_xlated);
465468
if (err)
466469
goto cleanup;
467470
spec->mode_mask |= PRIV;
468471
} else if ((msg = skip_dynamic_pfx(s, TEST_TAG_EXPECT_XLATED_PFX_UNPRIV))) {
469-
err = push_msg(msg, &spec->unpriv.expect_xlated);
472+
err = push_disasm_msg(msg, &unpriv_xlated_on_next_line,
473+
&spec->unpriv.expect_xlated);
470474
if (err)
471475
goto cleanup;
472476
spec->mode_mask |= UNPRIV;

0 commit comments

Comments
 (0)