summaryrefslogtreecommitdiff
path: root/arch/arm64/kernel/insn.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm64/kernel/insn.c')
-rw-r--r--arch/arm64/kernel/insn.c36
1 files changed, 25 insertions, 11 deletions
diff --git a/arch/arm64/kernel/insn.c b/arch/arm64/kernel/insn.c
index d801a7094076..53bcf5386907 100644
--- a/arch/arm64/kernel/insn.c
+++ b/arch/arm64/kernel/insn.c
@@ -21,6 +21,7 @@
#include <asm/fixmap.h>
#include <asm/insn.h>
#include <asm/kprobes.h>
+#include <asm/sections.h>
#define AARCH64_INSN_SF_BIT BIT(31)
#define AARCH64_INSN_N_BIT BIT(22)
@@ -78,16 +79,29 @@ bool aarch64_insn_is_branch_imm(u32 insn)
static DEFINE_RAW_SPINLOCK(patch_lock);
+static bool is_exit_text(unsigned long addr)
+{
+ /* discarded with init text/data */
+ return system_state < SYSTEM_RUNNING &&
+ addr >= (unsigned long)__exittext_begin &&
+ addr < (unsigned long)__exittext_end;
+}
+
+static bool is_image_text(unsigned long addr)
+{
+ return core_kernel_text(addr) || is_exit_text(addr);
+}
+
static void __kprobes *patch_map(void *addr, int fixmap)
{
unsigned long uintaddr = (uintptr_t) addr;
- bool module = !core_kernel_text(uintaddr);
+ bool image = is_image_text(uintaddr);
struct page *page;
- if (module && IS_ENABLED(CONFIG_STRICT_MODULE_RWX))
- page = vmalloc_to_page(addr);
- else if (!module)
+ if (image)
page = phys_to_page(__pa_symbol(addr));
+ else if (IS_ENABLED(CONFIG_STRICT_MODULE_RWX))
+ page = vmalloc_to_page(addr);
else
return addr;
@@ -1508,16 +1522,10 @@ static u32 aarch64_encode_immediate(u64 imm,
u32 insn)
{
unsigned int immr, imms, n, ones, ror, esz, tmp;
- u64 mask = ~0UL;
-
- /* Can't encode full zeroes or full ones */
- if (!imm || !~imm)
- return AARCH64_BREAK_FAULT;
+ u64 mask;
switch (variant) {
case AARCH64_INSN_VARIANT_32BIT:
- if (upper_32_bits(imm))
- return AARCH64_BREAK_FAULT;
esz = 32;
break;
case AARCH64_INSN_VARIANT_64BIT:
@@ -1529,6 +1537,12 @@ static u32 aarch64_encode_immediate(u64 imm,
return AARCH64_BREAK_FAULT;
}
+ mask = GENMASK(esz - 1, 0);
+
+ /* Can't encode full zeroes, full ones, or value wider than the mask */
+ if (!imm || imm == mask || imm & ~mask)
+ return AARCH64_BREAK_FAULT;
+
/*
* Inverse of Replicate(). Try to spot a repeating pattern
* with a pow2 stride.