0001 {
0002 "xadd/w check unaligned stack",
0003 .insns = {
0004 BPF_MOV64_IMM(BPF_REG_0, 1),
0005 BPF_STX_MEM(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
0006 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_10, BPF_REG_0, -7),
0007 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8),
0008 BPF_EXIT_INSN(),
0009 },
0010 .result = REJECT,
0011 .errstr = "misaligned stack access off",
0012 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
0013 },
0014 {
0015 "xadd/w check unaligned map",
0016 .insns = {
0017 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
0018 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
0019 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
0020 BPF_LD_MAP_FD(BPF_REG_1, 0),
0021 BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0, BPF_FUNC_map_lookup_elem),
0022 BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
0023 BPF_EXIT_INSN(),
0024 BPF_MOV64_IMM(BPF_REG_1, 1),
0025 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_0, BPF_REG_1, 3),
0026 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_0, 3),
0027 BPF_EXIT_INSN(),
0028 },
0029 .fixup_map_hash_8b = { 3 },
0030 .result = REJECT,
0031 .errstr = "misaligned value access off",
0032 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
0033 },
0034 {
0035 "xadd/w check unaligned pkt",
0036 .insns = {
0037 BPF_LDX_MEM(BPF_W, BPF_REG_2, BPF_REG_1, offsetof(struct xdp_md, data)),
0038 BPF_LDX_MEM(BPF_W, BPF_REG_3, BPF_REG_1,
0039 offsetof(struct xdp_md, data_end)),
0040 BPF_MOV64_REG(BPF_REG_1, BPF_REG_2),
0041 BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 8),
0042 BPF_JMP_REG(BPF_JLT, BPF_REG_1, BPF_REG_3, 2),
0043 BPF_MOV64_IMM(BPF_REG_0, 99),
0044 BPF_JMP_IMM(BPF_JA, 0, 0, 6),
0045 BPF_MOV64_IMM(BPF_REG_0, 1),
0046 BPF_ST_MEM(BPF_W, BPF_REG_2, 0, 0),
0047 BPF_ST_MEM(BPF_W, BPF_REG_2, 3, 0),
0048 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_2, BPF_REG_0, 1),
0049 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_2, BPF_REG_0, 2),
0050 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_2, 1),
0051 BPF_EXIT_INSN(),
0052 },
0053 .result = REJECT,
0054 .errstr = "BPF_ATOMIC stores into R2 pkt is not allowed",
0055 .prog_type = BPF_PROG_TYPE_XDP,
0056 .flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS,
0057 },
0058 {
0059 "xadd/w check whether src/dst got mangled, 1",
0060 .insns = {
0061 BPF_MOV64_IMM(BPF_REG_0, 1),
0062 BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
0063 BPF_MOV64_REG(BPF_REG_7, BPF_REG_10),
0064 BPF_STX_MEM(BPF_DW, BPF_REG_10, BPF_REG_0, -8),
0065 BPF_ATOMIC_OP(BPF_DW, BPF_ADD, BPF_REG_10, BPF_REG_0, -8),
0066 BPF_ATOMIC_OP(BPF_DW, BPF_ADD, BPF_REG_10, BPF_REG_0, -8),
0067 BPF_JMP_REG(BPF_JNE, BPF_REG_6, BPF_REG_0, 3),
0068 BPF_JMP_REG(BPF_JNE, BPF_REG_7, BPF_REG_10, 2),
0069 BPF_LDX_MEM(BPF_DW, BPF_REG_0, BPF_REG_10, -8),
0070 BPF_EXIT_INSN(),
0071 BPF_MOV64_IMM(BPF_REG_0, 42),
0072 BPF_EXIT_INSN(),
0073 },
0074 .result = ACCEPT,
0075 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
0076 .retval = 3,
0077 },
0078 {
0079 "xadd/w check whether src/dst got mangled, 2",
0080 .insns = {
0081 BPF_MOV64_IMM(BPF_REG_0, 1),
0082 BPF_MOV64_REG(BPF_REG_6, BPF_REG_0),
0083 BPF_MOV64_REG(BPF_REG_7, BPF_REG_10),
0084 BPF_STX_MEM(BPF_W, BPF_REG_10, BPF_REG_0, -8),
0085 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_10, BPF_REG_0, -8),
0086 BPF_ATOMIC_OP(BPF_W, BPF_ADD, BPF_REG_10, BPF_REG_0, -8),
0087 BPF_JMP_REG(BPF_JNE, BPF_REG_6, BPF_REG_0, 3),
0088 BPF_JMP_REG(BPF_JNE, BPF_REG_7, BPF_REG_10, 2),
0089 BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_10, -8),
0090 BPF_EXIT_INSN(),
0091 BPF_MOV64_IMM(BPF_REG_0, 42),
0092 BPF_EXIT_INSN(),
0093 },
0094 .result = ACCEPT,
0095 .prog_type = BPF_PROG_TYPE_SCHED_CLS,
0096 .retval = 3,
0097 },