Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 #include <linux/static_call.h>
0003 #include <linux/memory.h>
0004 #include <linux/bug.h>
0005 #include <asm/text-patching.h>
0006 
0007 enum insn_type {
0008     CALL = 0, /* site call */
0009     NOP = 1,  /* site cond-call */
0010     JMP = 2,  /* tramp / site tail-call */
0011     RET = 3,  /* tramp / site cond-tail-call */
0012 };
0013 
0014 /*
0015  * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
0016  * that there is no false-positive trampoline identification while also being a
0017  * speculation stop.
0018  */
0019 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
0020 
0021 /*
0022  * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax
0023  */
0024 static const u8 xor5rax[] = { 0x2e, 0x2e, 0x2e, 0x31, 0xc0 };
0025 
0026 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
0027 
0028 static void __ref __static_call_transform(void *insn, enum insn_type type,
0029                       void *func, bool modinit)
0030 {
0031     const void *emulate = NULL;
0032     int size = CALL_INSN_SIZE;
0033     const void *code;
0034 
0035     switch (type) {
0036     case CALL:
0037         code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
0038         if (func == &__static_call_return0) {
0039             emulate = code;
0040             code = &xor5rax;
0041         }
0042 
0043         break;
0044 
0045     case NOP:
0046         code = x86_nops[5];
0047         break;
0048 
0049     case JMP:
0050         code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
0051         break;
0052 
0053     case RET:
0054         if (cpu_feature_enabled(X86_FEATURE_RETHUNK))
0055             code = text_gen_insn(JMP32_INSN_OPCODE, insn, &__x86_return_thunk);
0056         else
0057             code = &retinsn;
0058         break;
0059     }
0060 
0061     if (memcmp(insn, code, size) == 0)
0062         return;
0063 
0064     if (system_state == SYSTEM_BOOTING || modinit)
0065         return text_poke_early(insn, code, size);
0066 
0067     text_poke_bp(insn, code, size, emulate);
0068 }
0069 
0070 static void __static_call_validate(void *insn, bool tail, bool tramp)
0071 {
0072     u8 opcode = *(u8 *)insn;
0073 
0074     if (tramp && memcmp(insn+5, tramp_ud, 3)) {
0075         pr_err("trampoline signature fail");
0076         BUG();
0077     }
0078 
0079     if (tail) {
0080         if (opcode == JMP32_INSN_OPCODE ||
0081             opcode == RET_INSN_OPCODE)
0082             return;
0083     } else {
0084         if (opcode == CALL_INSN_OPCODE ||
0085             !memcmp(insn, x86_nops[5], 5) ||
0086             !memcmp(insn, xor5rax, 5))
0087             return;
0088     }
0089 
0090     /*
0091      * If we ever trigger this, our text is corrupt, we'll probably not live long.
0092      */
0093     pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
0094     BUG();
0095 }
0096 
0097 static inline enum insn_type __sc_insn(bool null, bool tail)
0098 {
0099     /*
0100      * Encode the following table without branches:
0101      *
0102      *  tail    null    insn
0103      *  -----+-------+------
0104      *    0  |   0   |  CALL
0105      *    0  |   1   |  NOP
0106      *    1  |   0   |  JMP
0107      *    1  |   1   |  RET
0108      */
0109     return 2*tail + null;
0110 }
0111 
0112 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
0113 {
0114     mutex_lock(&text_mutex);
0115 
0116     if (tramp) {
0117         __static_call_validate(tramp, true, true);
0118         __static_call_transform(tramp, __sc_insn(!func, true), func, false);
0119     }
0120 
0121     if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
0122         __static_call_validate(site, tail, false);
0123         __static_call_transform(site, __sc_insn(!func, tail), func, false);
0124     }
0125 
0126     mutex_unlock(&text_mutex);
0127 }
0128 EXPORT_SYMBOL_GPL(arch_static_call_transform);
0129 
0130 #ifdef CONFIG_RETHUNK
0131 /*
0132  * This is called by apply_returns() to fix up static call trampolines,
0133  * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
0134  * having a return trampoline.
0135  *
0136  * The problem is that static_call() is available before determining
0137  * X86_FEATURE_RETHUNK and, by implication, running alternatives.
0138  *
0139  * This means that __static_call_transform() above can have overwritten the
0140  * return trampoline and we now need to fix things up to be consistent.
0141  */
0142 bool __static_call_fixup(void *tramp, u8 op, void *dest)
0143 {
0144     if (memcmp(tramp+5, tramp_ud, 3)) {
0145         /* Not a trampoline site, not our problem. */
0146         return false;
0147     }
0148 
0149     mutex_lock(&text_mutex);
0150     if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
0151         __static_call_transform(tramp, RET, NULL, true);
0152     mutex_unlock(&text_mutex);
0153 
0154     return true;
0155 }
0156 #endif