diff options
author | Alexei Starovoitov <ast@kernel.org> | 2023-12-26 11:11:45 -0800 |
---|---|---|
committer | Andrii Nakryiko <andrii@kernel.org> | 2024-01-03 11:08:23 -0800 |
commit | 624cd2a17672f4596fee97a5558bc990778bbcf9 (patch) | |
tree | 8b24caf8e239bea1bbc4313d0f4a9aaed3c832e0 /tools/testing/selftests/bpf/progs/exceptions_assert.c | |
parent | a8b242d77bd72556b7a9d8be779f7d27b95ba73c (diff) | |
download | lwn-624cd2a17672f4596fee97a5558bc990778bbcf9.tar.gz lwn-624cd2a17672f4596fee97a5558bc990778bbcf9.zip |
selftests/bpf: Convert exceptions_assert.c to bpf_cmp
Convert exceptions_assert.c to bpf_cmp_unlikely() macro.
Since
bpf_assert(bpf_cmp_unlikely(var, ==, 100));
other code;
will generate assembly code:
if r1 == 100 goto L2;
r0 = 0
call bpf_throw
L1:
other code;
...
L2: goto L1;
LLVM generates redundant basic block with extra goto. LLVM will be fixed eventually.
Right now it's less efficient than __bpf_assert(var, ==, 100) macro that produces:
if r1 == 100 goto L1;
r0 = 0
call bpf_throw
L1:
other code;
But extra goto doesn't hurt the verification process.
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
Signed-off-by: Andrii Nakryiko <andrii@kernel.org>
Acked-by: Jiri Olsa <jolsa@kernel.org>
Acked-by: Kumar Kartikeya Dwivedi <memxor@gmail.com>
Link: https://lore.kernel.org/bpf/20231226191148.48536-4-alexei.starovoitov@gmail.com
Diffstat (limited to 'tools/testing/selftests/bpf/progs/exceptions_assert.c')
-rw-r--r-- | tools/testing/selftests/bpf/progs/exceptions_assert.c | 80 |
1 files changed, 40 insertions, 40 deletions
diff --git a/tools/testing/selftests/bpf/progs/exceptions_assert.c b/tools/testing/selftests/bpf/progs/exceptions_assert.c index 0ef81040da59..5e0a1ca96d4e 100644 --- a/tools/testing/selftests/bpf/progs/exceptions_assert.c +++ b/tools/testing/selftests/bpf/progs/exceptions_assert.c @@ -11,51 +11,51 @@ #define check_assert(type, op, name, value) \ SEC("?tc") \ __log_level(2) __failure \ - int check_assert_##op##_##name(void *ctx) \ + int check_assert_##name(void *ctx) \ { \ type num = bpf_ktime_get_ns(); \ - bpf_assert_##op(num, value); \ + bpf_assert(bpf_cmp_unlikely(num, op, value)); \ return *(u64 *)num; \ } -__msg(": R0_w=0xffffffff80000000 R10=fp0") -check_assert(s64, eq, int_min, INT_MIN); -__msg(": R0_w=0x7fffffff R10=fp0") -check_assert(s64, eq, int_max, INT_MAX); -__msg(": R0_w=0 R10=fp0") -check_assert(s64, eq, zero, 0); -__msg(": R0_w=0x8000000000000000 R1_w=0x8000000000000000 R10=fp0") -check_assert(s64, eq, llong_min, LLONG_MIN); -__msg(": R0_w=0x7fffffffffffffff R1_w=0x7fffffffffffffff R10=fp0") -check_assert(s64, eq, llong_max, LLONG_MAX); - -__msg(": R0_w=scalar(smax=0x7ffffffe) R10=fp0") -check_assert(s64, lt, pos, INT_MAX); -__msg(": R0_w=scalar(smax=-1,umin=0x8000000000000000,var_off=(0x8000000000000000; 0x7fffffffffffffff))") -check_assert(s64, lt, zero, 0); -__msg(": R0_w=scalar(smax=0xffffffff7fffffff,umin=0x8000000000000000,umax=0xffffffff7fffffff,var_off=(0x8000000000000000; 0x7fffffffffffffff))") -check_assert(s64, lt, neg, INT_MIN); - -__msg(": R0_w=scalar(smax=0x7fffffff) R10=fp0") -check_assert(s64, le, pos, INT_MAX); -__msg(": R0_w=scalar(smax=0) R10=fp0") -check_assert(s64, le, zero, 0); -__msg(": R0_w=scalar(smax=0xffffffff80000000,umin=0x8000000000000000,umax=0xffffffff80000000,var_off=(0x8000000000000000; 0x7fffffffffffffff))") -check_assert(s64, le, neg, INT_MIN); - -__msg(": R0_w=scalar(smin=umin=0x80000000,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") -check_assert(s64, gt, pos, INT_MAX); -__msg(": R0_w=scalar(smin=umin=1,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") -check_assert(s64, gt, zero, 0); -__msg(": R0_w=scalar(smin=0xffffffff80000001) R10=fp0") -check_assert(s64, gt, neg, INT_MIN); - -__msg(": R0_w=scalar(smin=umin=0x7fffffff,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") -check_assert(s64, ge, pos, INT_MAX); -__msg(": R0_w=scalar(smin=0,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff)) R10=fp0") -check_assert(s64, ge, zero, 0); -__msg(": R0_w=scalar(smin=0xffffffff80000000) R10=fp0") -check_assert(s64, ge, neg, INT_MIN); +__msg(": R0_w=0xffffffff80000000") +check_assert(s64, ==, eq_int_min, INT_MIN); +__msg(": R0_w=0x7fffffff") +check_assert(s64, ==, eq_int_max, INT_MAX); +__msg(": R0_w=0") +check_assert(s64, ==, eq_zero, 0); +__msg(": R0_w=0x8000000000000000 R1_w=0x8000000000000000") +check_assert(s64, ==, eq_llong_min, LLONG_MIN); +__msg(": R0_w=0x7fffffffffffffff R1_w=0x7fffffffffffffff") +check_assert(s64, ==, eq_llong_max, LLONG_MAX); + +__msg(": R0_w=scalar(id=1,smax=0x7ffffffe)") +check_assert(s64, <, lt_pos, INT_MAX); +__msg(": R0_w=scalar(id=1,smax=-1,umin=0x8000000000000000,var_off=(0x8000000000000000; 0x7fffffffffffffff))") +check_assert(s64, <, lt_zero, 0); +__msg(": R0_w=scalar(id=1,smax=0xffffffff7fffffff") +check_assert(s64, <, lt_neg, INT_MIN); + +__msg(": R0_w=scalar(id=1,smax=0x7fffffff)") +check_assert(s64, <=, le_pos, INT_MAX); +__msg(": R0_w=scalar(id=1,smax=0)") +check_assert(s64, <=, le_zero, 0); +__msg(": R0_w=scalar(id=1,smax=0xffffffff80000000") +check_assert(s64, <=, le_neg, INT_MIN); + +__msg(": R0_w=scalar(id=1,smin=umin=0x80000000,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") +check_assert(s64, >, gt_pos, INT_MAX); +__msg(": R0_w=scalar(id=1,smin=umin=1,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") +check_assert(s64, >, gt_zero, 0); +__msg(": R0_w=scalar(id=1,smin=0xffffffff80000001") +check_assert(s64, >, gt_neg, INT_MIN); + +__msg(": R0_w=scalar(id=1,smin=umin=0x7fffffff,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") +check_assert(s64, >=, ge_pos, INT_MAX); +__msg(": R0_w=scalar(id=1,smin=0,umax=0x7fffffffffffffff,var_off=(0x0; 0x7fffffffffffffff))") +check_assert(s64, >=, ge_zero, 0); +__msg(": R0_w=scalar(id=1,smin=0xffffffff80000000") +check_assert(s64, >=, ge_neg, INT_MIN); SEC("?tc") __log_level(2) __failure |