0001 {
0002 "atomic dw/fetch and address leakage of (map ptr & -1) via stack slot",
0003 .insns = {
0004 BPF_LD_IMM64(BPF_REG_1, -1),
0005 BPF_LD_MAP_FD(BPF_REG_8, 0),
0006 BPF_LD_MAP_FD(BPF_REG_9, 0),
0007 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
0008 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
0009 BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
0010 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
0011 BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
0012 BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
0013 BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
0014 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
0015 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
0016 BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
0017 BPF_MOV64_IMM(BPF_REG_0, 0),
0018 BPF_EXIT_INSN(),
0019 },
0020 .fixup_map_array_48b = { 2, 4 },
0021 .result = ACCEPT,
0022 .result_unpriv = REJECT,
0023 .errstr_unpriv = "leaking pointer from stack off -8",
0024 },
0025 {
0026 "atomic dw/fetch and address leakage of (map ptr & -1) via returned value",
0027 .insns = {
0028 BPF_LD_IMM64(BPF_REG_1, -1),
0029 BPF_LD_MAP_FD(BPF_REG_8, 0),
0030 BPF_LD_MAP_FD(BPF_REG_9, 0),
0031 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
0032 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
0033 BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
0034 BPF_ATOMIC_OP(BPF_DW, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
0035 BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
0036 BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
0037 BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
0038 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
0039 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
0040 BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
0041 BPF_MOV64_IMM(BPF_REG_0, 0),
0042 BPF_EXIT_INSN(),
0043 },
0044 .fixup_map_array_48b = { 2, 4 },
0045 .result = ACCEPT,
0046 .result_unpriv = REJECT,
0047 .errstr_unpriv = "leaking pointer from stack off -8",
0048 },
0049 {
0050 "atomic w/fetch and address leakage of (map ptr & -1) via stack slot",
0051 .insns = {
0052 BPF_LD_IMM64(BPF_REG_1, -1),
0053 BPF_LD_MAP_FD(BPF_REG_8, 0),
0054 BPF_LD_MAP_FD(BPF_REG_9, 0),
0055 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
0056 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
0057 BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
0058 BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
0059 BPF_LDX_MEM(BPF_DW, BPF_REG_9, BPF_REG_2, 0),
0060 BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
0061 BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
0062 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
0063 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
0064 BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
0065 BPF_MOV64_IMM(BPF_REG_0, 0),
0066 BPF_EXIT_INSN(),
0067 },
0068 .fixup_map_array_48b = { 2, 4 },
0069 .result = REJECT,
0070 .errstr = "invalid size of register fill",
0071 },
0072 {
0073 "atomic w/fetch and address leakage of (map ptr & -1) via returned value",
0074 .insns = {
0075 BPF_LD_IMM64(BPF_REG_1, -1),
0076 BPF_LD_MAP_FD(BPF_REG_8, 0),
0077 BPF_LD_MAP_FD(BPF_REG_9, 0),
0078 BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
0079 BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
0080 BPF_STX_MEM(BPF_DW, BPF_REG_2, BPF_REG_9, 0),
0081 BPF_ATOMIC_OP(BPF_W, BPF_AND | BPF_FETCH, BPF_REG_2, BPF_REG_1, 0),
0082 BPF_MOV64_REG(BPF_REG_9, BPF_REG_1),
0083 BPF_ST_MEM(BPF_DW, BPF_REG_2, 0, 0),
0084 BPF_MOV64_REG(BPF_REG_1, BPF_REG_8),
0085 BPF_EMIT_CALL(BPF_FUNC_map_lookup_elem),
0086 BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 1),
0087 BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
0088 BPF_MOV64_IMM(BPF_REG_0, 0),
0089 BPF_EXIT_INSN(),
0090 },
0091 .fixup_map_array_48b = { 2, 4 },
0092 .result = REJECT,
0093 .errstr = "invalid size of register fill",
0094 },
0095 #define __ATOMIC_FETCH_OP_TEST(src_reg, dst_reg, operand1, op, operand2, expect) \
0096 { \
0097 "atomic fetch " #op ", src=" #dst_reg " dst=" #dst_reg, \
0098 .insns = { \
0099 \
0100 BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, operand1), \
0101 \
0102 BPF_MOV64_REG(dst_reg, BPF_REG_10), \
0103 BPF_MOV64_IMM(src_reg, operand2), \
0104 BPF_ATOMIC_OP(BPF_DW, op, \
0105 dst_reg, src_reg, -8), \
0106 \
0107 BPF_JMP_IMM(BPF_JEQ, src_reg, operand1, 2), \
0108 BPF_MOV64_IMM(BPF_REG_0, 1), \
0109 BPF_EXIT_INSN(), \
0110 \
0111 BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_10, -8), \
0112 BPF_JMP_IMM(BPF_JEQ, BPF_REG_1, expect, 2), \
0113 BPF_MOV64_IMM(BPF_REG_0, 2), \
0114 BPF_EXIT_INSN(), \
0115 \
0116 BPF_MOV64_IMM(BPF_REG_0, 0), \
0117 BPF_EXIT_INSN(), \
0118 }, \
0119 .result = ACCEPT, \
0120 }
0121 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 1, BPF_ADD | BPF_FETCH, 2, 3),
0122 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 1, BPF_ADD | BPF_FETCH, 2, 3),
0123 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 1, BPF_ADD | BPF_FETCH, 2, 3),
0124 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 1, BPF_ADD | BPF_FETCH, 2, 3),
0125 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3),
0126 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 1, BPF_ADD | BPF_FETCH, 2, 3),
0127 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0128 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0129 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0130 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0131 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0132 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
0133 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0134 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0135 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0136 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0137 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0138 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
0139 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0140 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0141 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0142 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0143 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0144 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
0145 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_2, 0x010, BPF_XCHG, 0x011, 0x011),
0146 __ATOMIC_FETCH_OP_TEST(BPF_REG_0, BPF_REG_1, 0x010, BPF_XCHG, 0x011, 0x011),
0147 __ATOMIC_FETCH_OP_TEST(BPF_REG_1, BPF_REG_0, 0x010, BPF_XCHG, 0x011, 0x011),
0148 __ATOMIC_FETCH_OP_TEST(BPF_REG_2, BPF_REG_3, 0x010, BPF_XCHG, 0x011, 0x011),
0149 __ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XCHG, 0x011, 0x011),
0150 __ATOMIC_FETCH_OP_TEST(BPF_REG_9, BPF_REG_8, 0x010, BPF_XCHG, 0x011, 0x011),
0151 #undef __ATOMIC_FETCH_OP_TEST