@@ -78,6 +78,7 @@ __naked void canary_arm64_riscv64(void)
78
78
SEC ("raw_tp" )
79
79
__arch_x86_64
80
80
__xlated ("1: r0 = &(void __percpu *)(r0)" )
81
+ __xlated ("..." )
81
82
__xlated ("3: exit" )
82
83
__success
83
84
__naked void canary_zero_spills (void )
@@ -94,7 +95,9 @@ SEC("raw_tp")
94
95
__arch_x86_64
95
96
__log_level (4 ) __msg ("stack depth 16" )
96
97
__xlated ("1: *(u64 *)(r10 -16) = r1" )
98
+ __xlated ("..." )
97
99
__xlated ("3: r0 = &(void __percpu *)(r0)" )
100
+ __xlated ("..." )
98
101
__xlated ("5: r2 = *(u64 *)(r10 -16)" )
99
102
__success
100
103
__naked void wrong_reg_in_pattern1 (void )
@@ -113,7 +116,9 @@ __naked void wrong_reg_in_pattern1(void)
113
116
SEC ("raw_tp" )
114
117
__arch_x86_64
115
118
__xlated ("1: *(u64 *)(r10 -16) = r6" )
119
+ __xlated ("..." )
116
120
__xlated ("3: r0 = &(void __percpu *)(r0)" )
121
+ __xlated ("..." )
117
122
__xlated ("5: r6 = *(u64 *)(r10 -16)" )
118
123
__success
119
124
__naked void wrong_reg_in_pattern2 (void )
@@ -132,7 +137,9 @@ __naked void wrong_reg_in_pattern2(void)
132
137
SEC ("raw_tp" )
133
138
__arch_x86_64
134
139
__xlated ("1: *(u64 *)(r10 -16) = r0" )
140
+ __xlated ("..." )
135
141
__xlated ("3: r0 = &(void __percpu *)(r0)" )
142
+ __xlated ("..." )
136
143
__xlated ("5: r0 = *(u64 *)(r10 -16)" )
137
144
__success
138
145
__naked void wrong_reg_in_pattern3 (void )
@@ -151,7 +158,9 @@ __naked void wrong_reg_in_pattern3(void)
151
158
SEC ("raw_tp" )
152
159
__arch_x86_64
153
160
__xlated ("2: *(u64 *)(r2 -16) = r1" )
161
+ __xlated ("..." )
154
162
__xlated ("4: r0 = &(void __percpu *)(r0)" )
163
+ __xlated ("..." )
155
164
__xlated ("6: r1 = *(u64 *)(r10 -16)" )
156
165
__success
157
166
__naked void wrong_base_in_pattern (void )
@@ -171,7 +180,9 @@ __naked void wrong_base_in_pattern(void)
171
180
SEC ("raw_tp" )
172
181
__arch_x86_64
173
182
__xlated ("1: *(u64 *)(r10 -16) = r1" )
183
+ __xlated ("..." )
174
184
__xlated ("3: r0 = &(void __percpu *)(r0)" )
185
+ __xlated ("..." )
175
186
__xlated ("5: r2 = 1" )
176
187
__success
177
188
__naked void wrong_insn_in_pattern (void )
@@ -191,7 +202,9 @@ __naked void wrong_insn_in_pattern(void)
191
202
SEC ("raw_tp" )
192
203
__arch_x86_64
193
204
__xlated ("2: *(u64 *)(r10 -16) = r1" )
205
+ __xlated ("..." )
194
206
__xlated ("4: r0 = &(void __percpu *)(r0)" )
207
+ __xlated ("..." )
195
208
__xlated ("6: r1 = *(u64 *)(r10 -8)" )
196
209
__success
197
210
__naked void wrong_off_in_pattern1 (void )
@@ -211,7 +224,9 @@ __naked void wrong_off_in_pattern1(void)
211
224
SEC ("raw_tp" )
212
225
__arch_x86_64
213
226
__xlated ("1: *(u32 *)(r10 -4) = r1" )
227
+ __xlated ("..." )
214
228
__xlated ("3: r0 = &(void __percpu *)(r0)" )
229
+ __xlated ("..." )
215
230
__xlated ("5: r1 = *(u32 *)(r10 -4)" )
216
231
__success
217
232
__naked void wrong_off_in_pattern2 (void )
@@ -230,7 +245,9 @@ __naked void wrong_off_in_pattern2(void)
230
245
SEC ("raw_tp" )
231
246
__arch_x86_64
232
247
__xlated ("1: *(u32 *)(r10 -16) = r1" )
248
+ __xlated ("..." )
233
249
__xlated ("3: r0 = &(void __percpu *)(r0)" )
250
+ __xlated ("..." )
234
251
__xlated ("5: r1 = *(u32 *)(r10 -16)" )
235
252
__success
236
253
__naked void wrong_size_in_pattern (void )
@@ -249,7 +266,9 @@ __naked void wrong_size_in_pattern(void)
249
266
SEC ("raw_tp" )
250
267
__arch_x86_64
251
268
__xlated ("2: *(u32 *)(r10 -8) = r1" )
269
+ __xlated ("..." )
252
270
__xlated ("4: r0 = &(void __percpu *)(r0)" )
271
+ __xlated ("..." )
253
272
__xlated ("6: r1 = *(u32 *)(r10 -8)" )
254
273
__success
255
274
__naked void partial_pattern (void )
@@ -275,11 +294,15 @@ __xlated("1: r2 = 2")
275
294
/* not patched, spills for -8, -16 not removed */
276
295
__xlated ("2: *(u64 *)(r10 -8) = r1" )
277
296
__xlated ("3: *(u64 *)(r10 -16) = r2" )
297
+ __xlated ("..." )
278
298
__xlated ("5: r0 = &(void __percpu *)(r0)" )
299
+ __xlated ("..." )
279
300
__xlated ("7: r2 = *(u64 *)(r10 -16)" )
280
301
__xlated ("8: r1 = *(u64 *)(r10 -8)" )
281
302
/* patched, spills for -24, -32 removed */
303
+ __xlated ("..." )
282
304
__xlated ("10: r0 = &(void __percpu *)(r0)" )
305
+ __xlated ("..." )
283
306
__xlated ("12: exit" )
284
307
__success
285
308
__naked void min_stack_offset (void )
@@ -308,7 +331,9 @@ __naked void min_stack_offset(void)
308
331
SEC ("raw_tp" )
309
332
__arch_x86_64
310
333
__xlated ("1: *(u64 *)(r10 -8) = r1" )
334
+ __xlated ("..." )
311
335
__xlated ("3: r0 = &(void __percpu *)(r0)" )
336
+ __xlated ("..." )
312
337
__xlated ("5: r1 = *(u64 *)(r10 -8)" )
313
338
__success
314
339
__naked void bad_fixed_read (void )
@@ -330,7 +355,9 @@ __naked void bad_fixed_read(void)
330
355
SEC ("raw_tp" )
331
356
__arch_x86_64
332
357
__xlated ("1: *(u64 *)(r10 -8) = r1" )
358
+ __xlated ("..." )
333
359
__xlated ("3: r0 = &(void __percpu *)(r0)" )
360
+ __xlated ("..." )
334
361
__xlated ("5: r1 = *(u64 *)(r10 -8)" )
335
362
__success
336
363
__naked void bad_fixed_write (void )
@@ -352,7 +379,9 @@ __naked void bad_fixed_write(void)
352
379
SEC ("raw_tp" )
353
380
__arch_x86_64
354
381
__xlated ("6: *(u64 *)(r10 -16) = r1" )
382
+ __xlated ("..." )
355
383
__xlated ("8: r0 = &(void __percpu *)(r0)" )
384
+ __xlated ("..." )
356
385
__xlated ("10: r1 = *(u64 *)(r10 -16)" )
357
386
__success
358
387
__naked void bad_varying_read (void )
@@ -379,7 +408,9 @@ __naked void bad_varying_read(void)
379
408
SEC ("raw_tp" )
380
409
__arch_x86_64
381
410
__xlated ("6: *(u64 *)(r10 -16) = r1" )
411
+ __xlated ("..." )
382
412
__xlated ("8: r0 = &(void __percpu *)(r0)" )
413
+ __xlated ("..." )
383
414
__xlated ("10: r1 = *(u64 *)(r10 -16)" )
384
415
__success
385
416
__naked void bad_varying_write (void )
@@ -406,7 +437,9 @@ __naked void bad_varying_write(void)
406
437
SEC ("raw_tp" )
407
438
__arch_x86_64
408
439
__xlated ("1: *(u64 *)(r10 -8) = r1" )
440
+ __xlated ("..." )
409
441
__xlated ("3: r0 = &(void __percpu *)(r0)" )
442
+ __xlated ("..." )
410
443
__xlated ("5: r1 = *(u64 *)(r10 -8)" )
411
444
__success
412
445
__naked void bad_write_in_subprog (void )
@@ -438,7 +471,9 @@ __naked static void bad_write_in_subprog_aux(void)
438
471
SEC ("raw_tp" )
439
472
__arch_x86_64
440
473
__xlated ("1: *(u64 *)(r10 -8) = r1" )
474
+ __xlated ("..." )
441
475
__xlated ("3: r0 = &(void __percpu *)(r0)" )
476
+ __xlated ("..." )
442
477
__xlated ("5: r1 = *(u64 *)(r10 -8)" )
443
478
__success
444
479
__naked void bad_helper_write (void )
@@ -466,13 +501,19 @@ SEC("raw_tp")
466
501
__arch_x86_64
467
502
/* main, not patched */
468
503
__xlated ("1: *(u64 *)(r10 -8) = r1" )
504
+ __xlated ("..." )
469
505
__xlated ("3: r0 = &(void __percpu *)(r0)" )
506
+ __xlated ("..." )
470
507
__xlated ("5: r1 = *(u64 *)(r10 -8)" )
508
+ __xlated ("..." )
471
509
__xlated ("9: call pc+1" )
510
+ __xlated ("..." )
472
511
__xlated ("10: exit" )
473
512
/* subprogram, patched */
474
513
__xlated ("11: r1 = 1" )
514
+ __xlated ("..." )
475
515
__xlated ("13: r0 = &(void __percpu *)(r0)" )
516
+ __xlated ("..." )
476
517
__xlated ("15: exit" )
477
518
__success
478
519
__naked void invalidate_one_subprog (void )
@@ -510,12 +551,16 @@ SEC("raw_tp")
510
551
__arch_x86_64
511
552
/* main */
512
553
__xlated ("0: r1 = 1" )
554
+ __xlated ("..." )
513
555
__xlated ("2: r0 = &(void __percpu *)(r0)" )
556
+ __xlated ("..." )
514
557
__xlated ("4: call pc+1" )
515
558
__xlated ("5: exit" )
516
559
/* subprogram */
517
560
__xlated ("6: r1 = 1" )
561
+ __xlated ("..." )
518
562
__xlated ("8: r0 = &(void __percpu *)(r0)" )
563
+ __xlated ("..." )
519
564
__xlated ("10: *(u64 *)(r10 -16) = r1" )
520
565
__xlated ("11: exit" )
521
566
__success
@@ -576,7 +621,9 @@ __log_level(4) __msg("stack depth 16")
576
621
/* may_goto counter at -16 */
577
622
__xlated ("0: *(u64 *)(r10 -16) =" )
578
623
__xlated ("1: r1 = 1" )
624
+ __xlated ("..." )
579
625
__xlated ("3: r0 = &(void __percpu *)(r0)" )
626
+ __xlated ("..." )
580
627
/* may_goto expansion starts */
581
628
__xlated ("5: r11 = *(u64 *)(r10 -16)" )
582
629
__xlated ("6: if r11 == 0x0 goto pc+3" )
@@ -623,13 +670,15 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
623
670
__xlated ("6: r2 =" )
624
671
__xlated ("7: r3 = 0" )
625
672
__xlated ("8: r4 = 0" )
673
+ __xlated ("..." )
626
674
/* ... part of the inlined bpf_loop */
627
675
__xlated ("12: *(u64 *)(r10 -32) = r6" )
628
676
__xlated ("13: *(u64 *)(r10 -24) = r7" )
629
677
__xlated ("14: *(u64 *)(r10 -16) = r8" )
630
- /* ... */
678
+ __xlated ( " ..." )
631
679
__xlated ("21: call pc+8" ) /* dummy_loop_callback */
632
680
/* ... last insns of the bpf_loop_interaction1 */
681
+ __xlated ("..." )
633
682
__xlated ("28: r0 = 0" )
634
683
__xlated ("29: exit" )
635
684
/* dummy_loop_callback */
@@ -670,7 +719,7 @@ __xlated("5: r0 = *(u32 *)(r0 +0)")
670
719
__xlated ("6: *(u64 *)(r10 -16) = r1" )
671
720
__xlated ("7: call" )
672
721
__xlated ("8: r1 = *(u64 *)(r10 -16)" )
673
- /* ... */
722
+ __xlated ( " ..." )
674
723
/* ... part of the inlined bpf_loop */
675
724
__xlated ("15: *(u64 *)(r10 -40) = r6" )
676
725
__xlated ("16: *(u64 *)(r10 -32) = r7" )
0 commit comments