diff --git a/arch/arm64/include/asm/asm-extable.h b/arch/arm64/include/asm/asm-extable.h index c39f2437e08e..980d1dd8e1a3 100644 --- a/arch/arm64/include/asm/asm-extable.h +++ b/arch/arm64/include/asm/asm-extable.h @@ -2,12 +2,27 @@ #ifndef __ASM_ASM_EXTABLE_H #define __ASM_ASM_EXTABLE_H +#include +#include + #define EX_TYPE_NONE 0 -#define EX_TYPE_FIXUP 1 -#define EX_TYPE_BPF 2 -#define EX_TYPE_UACCESS_ERR_ZERO 3 +#define EX_TYPE_BPF 1 +#define EX_TYPE_UACCESS_ERR_ZERO 2 +#define EX_TYPE_KACCESS_ERR_ZERO 3 #define EX_TYPE_LOAD_UNALIGNED_ZEROPAD 4 +/* Data fields for EX_TYPE_UACCESS_ERR_ZERO */ +#define EX_DATA_REG_ERR_SHIFT 0 +#define EX_DATA_REG_ERR GENMASK(4, 0) +#define EX_DATA_REG_ZERO_SHIFT 5 +#define EX_DATA_REG_ZERO GENMASK(9, 5) + +/* Data fields for EX_TYPE_LOAD_UNALIGNED_ZEROPAD */ +#define EX_DATA_REG_DATA_SHIFT 0 +#define EX_DATA_REG_DATA GENMASK(4, 0) +#define EX_DATA_REG_ADDR_SHIFT 5 +#define EX_DATA_REG_ADDR GENMASK(9, 5) + #ifdef __ASSEMBLY__ #define __ASM_EXTABLE_RAW(insn, fixup, type, data) \ @@ -19,31 +34,45 @@ .short (data); \ .popsection; +#define EX_DATA_REG(reg, gpr) \ + (.L__gpr_num_##gpr << EX_DATA_REG_##reg##_SHIFT) + +#define _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero) \ + __ASM_EXTABLE_RAW(insn, fixup, \ + EX_TYPE_UACCESS_ERR_ZERO, \ + ( \ + EX_DATA_REG(ERR, err) | \ + EX_DATA_REG(ZERO, zero) \ + )) + +#define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \ + _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, wzr) + +#define _ASM_EXTABLE_UACCESS(insn, fixup) \ + _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, wzr, wzr) + /* - * Create an exception table entry for `insn`, which will branch to `fixup` + * Create an exception table entry for uaccess `insn`, which will branch to `fixup` * when an unhandled fault is taken. */ - .macro _asm_extable, insn, fixup - __ASM_EXTABLE_RAW(\insn, \fixup, EX_TYPE_FIXUP, 0) + .macro _asm_extable_uaccess, insn, fixup + _ASM_EXTABLE_UACCESS(\insn, \fixup) .endm /* * Create an exception table entry for `insn` if `fixup` is provided. Otherwise * do nothing. */ - .macro _cond_extable, insn, fixup - .ifnc \fixup, - _asm_extable \insn, \fixup + .macro _cond_uaccess_extable, insn, fixup + .ifnc \fixup, + _asm_extable_uaccess \insn, \fixup .endif .endm #else /* __ASSEMBLY__ */ -#include #include -#include - #define __ASM_EXTABLE_RAW(insn, fixup, type, data) \ ".pushsection __ex_table, \"a\"\n" \ ".align 2\n" \ @@ -53,14 +82,6 @@ ".short (" data ")\n" \ ".popsection\n" -#define _ASM_EXTABLE(insn, fixup) \ - __ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0") - -#define EX_DATA_REG_ERR_SHIFT 0 -#define EX_DATA_REG_ERR GENMASK(4, 0) -#define EX_DATA_REG_ZERO_SHIFT 5 -#define EX_DATA_REG_ZERO GENMASK(9, 5) - #define EX_DATA_REG(reg, gpr) \ "((.L__gpr_num_" #gpr ") << " __stringify(EX_DATA_REG_##reg##_SHIFT) ")" @@ -73,13 +94,23 @@ EX_DATA_REG(ZERO, zero) \ ")") +#define _ASM_EXTABLE_KACCESS_ERR_ZERO(insn, fixup, err, zero) \ + __DEFINE_ASM_GPR_NUMS \ + __ASM_EXTABLE_RAW(#insn, #fixup, \ + __stringify(EX_TYPE_KACCESS_ERR_ZERO), \ + "(" \ + EX_DATA_REG(ERR, err) " | " \ + EX_DATA_REG(ZERO, zero) \ + ")") + #define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \ _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, wzr) -#define EX_DATA_REG_DATA_SHIFT 0 -#define EX_DATA_REG_DATA GENMASK(4, 0) -#define EX_DATA_REG_ADDR_SHIFT 5 -#define EX_DATA_REG_ADDR GENMASK(9, 5) +#define _ASM_EXTABLE_UACCESS(insn, fixup) \ + _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, wzr, wzr) + +#define _ASM_EXTABLE_KACCESS_ERR(insn, fixup, err) \ + _ASM_EXTABLE_KACCESS_ERR_ZERO(insn, fixup, err, wzr) #define _ASM_EXTABLE_LOAD_UNALIGNED_ZEROPAD(insn, fixup, data, addr) \ __DEFINE_ASM_GPR_NUMS \ diff --git a/arch/arm64/include/asm/asm-uaccess.h b/arch/arm64/include/asm/asm-uaccess.h index 0557af834e03..75b211c98dea 100644 --- a/arch/arm64/include/asm/asm-uaccess.h +++ b/arch/arm64/include/asm/asm-uaccess.h @@ -61,7 +61,7 @@ alternative_else_nop_endif #define USER(l, x...) \ 9999: x; \ - _asm_extable 9999b, l + _asm_extable_uaccess 9999b, l /* * Generate the assembly for LDTR/STTR with exception table entries. @@ -73,8 +73,8 @@ alternative_else_nop_endif 8889: ldtr \reg2, [\addr, #8]; add \addr, \addr, \post_inc; - _asm_extable 8888b,\l; - _asm_extable 8889b,\l; + _asm_extable_uaccess 8888b, \l; + _asm_extable_uaccess 8889b, \l; .endm .macro user_stp l, reg1, reg2, addr, post_inc @@ -82,14 +82,14 @@ alternative_else_nop_endif 8889: sttr \reg2, [\addr, #8]; add \addr, \addr, \post_inc; - _asm_extable 8888b,\l; - _asm_extable 8889b,\l; + _asm_extable_uaccess 8888b,\l; + _asm_extable_uaccess 8889b,\l; .endm .macro user_ldst l, inst, reg, addr, post_inc 8888: \inst \reg, [\addr]; add \addr, \addr, \post_inc; - _asm_extable 8888b,\l; + _asm_extable_uaccess 8888b, \l; .endm #endif diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index 8c5a61aeaf8e..dc422fa437c2 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h @@ -423,7 +423,7 @@ alternative_endif b.lo .Ldcache_op\@ dsb \domain - _cond_extable .Ldcache_op\@, \fixup + _cond_uaccess_extable .Ldcache_op\@, \fixup .endm /* @@ -462,7 +462,7 @@ alternative_endif dsb ish isb - _cond_extable .Licache_op\@, \fixup + _cond_uaccess_extable .Licache_op\@, \fixup .endm /* diff --git a/arch/arm64/include/asm/uaccess.h b/arch/arm64/include/asm/uaccess.h index 63f9c828f1a7..2fc9f0861769 100644 --- a/arch/arm64/include/asm/uaccess.h +++ b/arch/arm64/include/asm/uaccess.h @@ -232,34 +232,34 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr) * The "__xxx_error" versions set the third argument to -EFAULT if an error * occurs, and leave it unchanged on success. */ -#define __get_mem_asm(load, reg, x, addr, err) \ +#define __get_mem_asm(load, reg, x, addr, err, type) \ asm volatile( \ "1: " load " " reg "1, [%2]\n" \ "2:\n" \ - _ASM_EXTABLE_UACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \ + _ASM_EXTABLE_##type##ACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \ : "+r" (err), "=&r" (x) \ : "r" (addr)) -#define __raw_get_mem(ldr, x, ptr, err) \ -do { \ - unsigned long __gu_val; \ - switch (sizeof(*(ptr))) { \ - case 1: \ - __get_mem_asm(ldr "b", "%w", __gu_val, (ptr), (err)); \ - break; \ - case 2: \ - __get_mem_asm(ldr "h", "%w", __gu_val, (ptr), (err)); \ - break; \ - case 4: \ - __get_mem_asm(ldr, "%w", __gu_val, (ptr), (err)); \ - break; \ - case 8: \ - __get_mem_asm(ldr, "%x", __gu_val, (ptr), (err)); \ - break; \ - default: \ - BUILD_BUG(); \ - } \ - (x) = (__force __typeof__(*(ptr)))__gu_val; \ +#define __raw_get_mem(ldr, x, ptr, err, type) \ +do { \ + unsigned long __gu_val; \ + switch (sizeof(*(ptr))) { \ + case 1: \ + __get_mem_asm(ldr "b", "%w", __gu_val, (ptr), (err), type); \ + break; \ + case 2: \ + __get_mem_asm(ldr "h", "%w", __gu_val, (ptr), (err), type); \ + break; \ + case 4: \ + __get_mem_asm(ldr, "%w", __gu_val, (ptr), (err), type); \ + break; \ + case 8: \ + __get_mem_asm(ldr, "%x", __gu_val, (ptr), (err), type); \ + break; \ + default: \ + BUILD_BUG(); \ + } \ + (x) = (__force __typeof__(*(ptr)))__gu_val; \ } while (0) /* @@ -274,7 +274,7 @@ do { \ __chk_user_ptr(ptr); \ \ uaccess_ttbr0_enable(); \ - __raw_get_mem("ldtr", __rgu_val, __rgu_ptr, err); \ + __raw_get_mem("ldtr", __rgu_val, __rgu_ptr, err, U); \ uaccess_ttbr0_disable(); \ \ (x) = __rgu_val; \ @@ -314,40 +314,40 @@ do { \ \ __uaccess_enable_tco_async(); \ __raw_get_mem("ldr", *((type *)(__gkn_dst)), \ - (__force type *)(__gkn_src), __gkn_err); \ + (__force type *)(__gkn_src), __gkn_err, K); \ __uaccess_disable_tco_async(); \ \ if (unlikely(__gkn_err)) \ goto err_label; \ } while (0) -#define __put_mem_asm(store, reg, x, addr, err) \ +#define __put_mem_asm(store, reg, x, addr, err, type) \ asm volatile( \ "1: " store " " reg "1, [%2]\n" \ "2:\n" \ - _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0) \ + _ASM_EXTABLE_##type##ACCESS_ERR(1b, 2b, %w0) \ : "+r" (err) \ : "r" (x), "r" (addr)) -#define __raw_put_mem(str, x, ptr, err) \ -do { \ - __typeof__(*(ptr)) __pu_val = (x); \ - switch (sizeof(*(ptr))) { \ - case 1: \ - __put_mem_asm(str "b", "%w", __pu_val, (ptr), (err)); \ - break; \ - case 2: \ - __put_mem_asm(str "h", "%w", __pu_val, (ptr), (err)); \ - break; \ - case 4: \ - __put_mem_asm(str, "%w", __pu_val, (ptr), (err)); \ - break; \ - case 8: \ - __put_mem_asm(str, "%x", __pu_val, (ptr), (err)); \ - break; \ - default: \ - BUILD_BUG(); \ - } \ +#define __raw_put_mem(str, x, ptr, err, type) \ +do { \ + __typeof__(*(ptr)) __pu_val = (x); \ + switch (sizeof(*(ptr))) { \ + case 1: \ + __put_mem_asm(str "b", "%w", __pu_val, (ptr), (err), type); \ + break; \ + case 2: \ + __put_mem_asm(str "h", "%w", __pu_val, (ptr), (err), type); \ + break; \ + case 4: \ + __put_mem_asm(str, "%w", __pu_val, (ptr), (err), type); \ + break; \ + case 8: \ + __put_mem_asm(str, "%x", __pu_val, (ptr), (err), type); \ + break; \ + default: \ + BUILD_BUG(); \ + } \ } while (0) /* @@ -362,7 +362,7 @@ do { \ __chk_user_ptr(__rpu_ptr); \ \ uaccess_ttbr0_enable(); \ - __raw_put_mem("sttr", __rpu_val, __rpu_ptr, err); \ + __raw_put_mem("sttr", __rpu_val, __rpu_ptr, err, U); \ uaccess_ttbr0_disable(); \ } while (0) @@ -400,7 +400,7 @@ do { \ \ __uaccess_enable_tco_async(); \ __raw_put_mem("str", *((type *)(__pkn_src)), \ - (__force type *)(__pkn_dst), __pkn_err); \ + (__force type *)(__pkn_dst), __pkn_err, K); \ __uaccess_disable_tco_async(); \ \ if (unlikely(__pkn_err)) \ diff --git a/arch/arm64/mm/extable.c b/arch/arm64/mm/extable.c index 489455309695..228d681a8715 100644 --- a/arch/arm64/mm/extable.c +++ b/arch/arm64/mm/extable.c @@ -16,13 +16,6 @@ get_ex_fixup(const struct exception_table_entry *ex) return ((unsigned long)&ex->fixup + ex->fixup); } -static bool ex_handler_fixup(const struct exception_table_entry *ex, - struct pt_regs *regs) -{ - regs->pc = get_ex_fixup(ex); - return true; -} - static bool ex_handler_uaccess_err_zero(const struct exception_table_entry *ex, struct pt_regs *regs) { @@ -72,11 +65,10 @@ bool fixup_exception(struct pt_regs *regs) return false; switch (ex->type) { - case EX_TYPE_FIXUP: - return ex_handler_fixup(ex, regs); case EX_TYPE_BPF: return ex_handler_bpf(ex, regs); case EX_TYPE_UACCESS_ERR_ZERO: + case EX_TYPE_KACCESS_ERR_ZERO: return ex_handler_uaccess_err_zero(ex, regs); case EX_TYPE_LOAD_UNALIGNED_ZEROPAD: return ex_handler_load_unaligned_zeropad(ex, regs);