From 3289868758a0f06d2dec9afaccec6056e2d1936b Mon Sep 17 00:00:00 2001 From: Haryslee Date: Wed, 3 May 2023 11:59:56 +0800 Subject: [PATCH] feat: Support pointer authentication for switch context Signed-off-by: Haryslee --- pac/arm64/include/asm_pointer_auth_context.h | 191 ++++++++++++++ pac/arm64/include/pointer_auth_context.h | 173 +++++++++++++ pac/arm64/src/asm_pointer_auth_context.S | 258 +++++++++++++++++++ pac/arm64/src/pointer_auth_context.c | 99 +++++++ 4 files changed, 721 insertions(+) create mode 100644 pac/arm64/include/asm_pointer_auth_context.h create mode 100644 pac/arm64/include/pointer_auth_context.h create mode 100644 pac/arm64/src/asm_pointer_auth_context.S create mode 100644 pac/arm64/src/pointer_auth_context.c diff --git a/pac/arm64/include/asm_pointer_auth_context.h b/pac/arm64/include/asm_pointer_auth_context.h new file mode 100644 index 0000000..ce46391 --- /dev/null +++ b/pac/arm64/include/asm_pointer_auth_context.h @@ -0,0 +1,191 @@ +// SPDX-License-Identifier: GPL-2.0-or-later +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + */ + +#ifndef __ASM_POINTER_AUTH_CONTEXT_H +#define __ASM_POINTER_AUTH_CONTEXT_H + +#include +#include + +#ifdef CONFIG_ARM64_PTR_AUTH + + /* Compute pac hash. */ + .macro compute_pac_hash + pacga x1, x0, x1 + pacga x1, x2, x1 + pacga x1, x3, x1 + pacga x1, x4, x1 + pacga x1, x5, x1 + pacga x1, x6, x1 + .endm + + /* Compute and store hash value of cpu context. */ + .macro sign_thread_context_common + pacga x1, x0, x1 + pacga x1, x2, x1 + str x1, [x0, CPU_CONTEXT_PAC_HASH] + .endm + + /* Compute and auth hash value of cpu context. */ + .macro auth_thread_context_common + pacga x1, x0, x1 + pacga x1, x2, x1 + ldr x2, [x0, CPU_CONTEXT_PAC_HASH] + cmp x1, x2 + b.ne .Lthread_context_pac_panic\@ + b .Lauth_thread_context_done\@ +.Lthread_context_pac_panic\@: + adrp x0, .Lthread_context_pac_str + add x0, x0, :lo12:.Lthread_context_pac_str + bl panic +.Lauth_thread_context_done\@: + .endm + + /* Compute and store hash value of the regs. */ + .macro sign_exception_context_common + compute_pac_hash + str x1, [x0, S_PAC_HASH] + .endm + + /* Compute and auth hash value of the regs. */ + .macro auth_exception_context_common + compute_pac_hash + ldr x2, [x0, S_PAC_HASH] + cmp x1, x2 + b.ne .Lpt_regs_pac_panic\@ + b .Lauth_exception_context_done\@ +.Lpt_regs_pac_panic\@: + adrp x0, .Lpt_regs_pac_panic_str + add x0, x0, :lo12:.Lpt_regs_pac_panic_str + bl panic +.Lauth_exception_context_done\@: + .endm + +.Lpt_regs_pac_panic_str: + .asciz "Failed to match pac hash of exception context!\n" + .align 2 + +.Lthread_context_pac_str: + .asciz "Failed to match pac hash of cpu context!\n" + .align 2 + + .macro pac_cpu_context sign_or_auth + stp x0, x1, [sp, #-16]! + stp x2, x3, [sp, #-16]! + .if \sign_or_auth == 1 + /* x1: base of next task */ + mov x0, x1 + .endif + add x0, x0, #THREAD_CPU_CONTEXT + /* sign sp, lr of cpu context. */ + mov x1, lr + mov x2, x9 + .if \sign_or_auth == 0 + sign_thread_context_common + .else + auth_thread_context_common + .endif + ldp x2, x3, [sp], #16 + ldp x0, x1, [sp], #16 + .endm + + .macro sign_cpu_context sign=0 + pac_cpu_context \sign + .endm + + .macro auth_cpu_context auth=1 + pac_cpu_context \auth + .endm + + .macro prepare_compat_pt_regs + /* base of pt_regs */ + add x0, sp, #56 + mov x1, #0 + mov x2, #0 + /* sign lr, sp, pc, pstate of compat task */ + mov x3, x14 + mov x4, x13 + mrs x5, elr_el1 + mrs x6, spsr_el1 + .endm + + .macro prepare_pt_regs, el, sign_or_auth + /* base of pt_regs */ + add x0, sp, #56 + /* sign x16, x17, lr, sp, pc, pstate of task */ + mov x1, x16 + mov x2, x17 + .if \sign_or_auth == 0 + mov x3, lr + .else + ldr x3, [x0, #S_LR] + .endif + .if \el == 0 + mrs x4, sp_el0 + .else + add x4, x0, #S_FRAME_SIZE + .endif + mrs x5, elr_el1 + mrs x6, spsr_el1 + .endm + + .macro pac_pt_regs, el, sign_or_auth + stp x0, x1, [sp, #-16]! + stp x2, x3, [sp, #-16]! + stp x4, x5, [sp, #-16]! + str x6, [sp, #-8]! + .if \el == 0 + /* Test the task is in the mode of 32-bit or 64-bit */ + mrs x0, spsr_el1 + mov x1, #(PSR_MODE32_BIT | PSR_MODE_MASK) + mov x2, #(PSR_MODE32_BIT | PSR_MODE_EL0t) + and x0, x0, x1 + sub x0, x0, x2 + cbnz x0, .Lis_not_compat_task\@ + /* Task in 32-bit mode */ + prepare_compat_pt_regs + b .Lpac_handle\@ + .endif + /* Task in 64-bit mode */ +.Lis_not_compat_task\@: + prepare_pt_regs \el, \sign_or_auth + /* Call the sign or auth function. */ +.Lpac_handle\@: + .if \sign_or_auth == 0 + sign_exception_context_common + .else + auth_exception_context_common + .endif + ldr x6, [sp], #8 + ldp x4, x5, [sp], #16 + ldp x2, x3, [sp], #16 + ldp x0, x1, [sp], #16 + .endm + + .macro sign_pt_regs, el, sign=0 + pac_pt_regs \el, \sign + .endm + + .macro auth_pt_regs, el, auth=1 + pac_pt_regs \el, \auth + .endm + +#else /* CONFIG_ARM64_PTR_AUTH */ + + .macro sign_cpu_context sign=0 + .endm + + .macro auth_cpu_context auth=1 + .endm + + .macro sign_pt_regs, el, sign=0 + .endm + + .macro auth_pt_regs, el, auth=1 + .endm + +#endif /* CONFIG_ARM64_PTR_AUTH */ + +#endif /* __ASM_POINTER_AUTH_CONTEXT_H */ diff --git a/pac/arm64/include/pointer_auth_context.h b/pac/arm64/include/pointer_auth_context.h new file mode 100644 index 0000000..ed8ff69 --- /dev/null +++ b/pac/arm64/include/pointer_auth_context.h @@ -0,0 +1,173 @@ +// SPDX-License-Identifier: GPL-2.0-or-later +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + */ + +#ifndef __POINTER_AUTH_CONTEXT_H +#define __POINTER_AUTH_CONTEXT_H + +#include +#include +#include + +enum pac_pt_regs { + REGS_X16 = 0, + REGS_X17, + REGS_LR, + REGS_SP, + REGS_PC, + REGS_PSTATE, +}; + +#ifdef CONFIG_ARM64_PTR_AUTH +void sign_thread_context(void *cpu_context); +void auth_thread_context(void *cpu_context); + +void sign_exception_context_asm(void *regs); +void auth_exception_context_asm(void *regs); + +int set_exception_context_register_asm(void *regs, int offset, u64 val); + +#ifdef CONFIG_COMPAT +void sign_compat_exception_context_asm(void *regs); +void auth_compat_exception_context_asm(void *regs); + +int set_compat_exception_context_register_asm(void *regs, int offset, u64 val); +#else +static inline void sign_compat_exception_context_asm(void *regs) +{ +} + +static inline void auth_compat_exception_context_asm(void *regs) +{ +} + +static inline int set_compat_exception_context_register_asm(void *regs, int offset, u64 val) +{ + return 0; +} +#endif + +static inline void sign_compat_exception_context(void *regs) +{ + return sign_compat_exception_context_asm(regs); +} + +static inline void auth_compat_exception_context(void *regs) +{ + return auth_compat_exception_context_asm(regs); +} + +static inline void sign_exception_context(void *regs) +{ + if (compat_user_mode((struct pt_regs *)regs)) { + sign_compat_exception_context_asm(regs); + } else { + sign_exception_context_asm(regs); + } +} + +static inline void auth_exception_context(void *regs) +{ + if (compat_user_mode((struct pt_regs *)regs)) { + auth_compat_exception_context_asm(regs); + } else { + auth_exception_context_asm(regs); + } +} + +#define resign_compat_exception_context_start(regs) \ +do { \ + unsigned long irq_flags; \ + local_irq_save(irq_flags); \ + auth_compat_exception_context_asm(regs); + +#define resign_compat_exception_context_end(regs) \ + sign_compat_exception_context_asm(regs); \ + local_irq_restore(irq_flags); \ +} while(0) + +#define resign_exception_context_start(regs) \ +do { \ + unsigned long irq_flags; \ + local_irq_save(irq_flags); \ + auth_exception_context(regs); + +#define resign_exception_context_end(regs) \ + sign_exception_context(regs); \ + local_irq_restore(irq_flags); \ +} while(0) + +#define sign_exception_context_start(regs) \ +do { \ + unsigned long irq_flags; \ + local_irq_save(irq_flags); + +#define sign_exception_context_end(regs) \ + sign_exception_context(regs); \ + local_irq_restore(irq_flags); \ +} while(0) + +int set_compat_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val); +int set_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val); + +void set_compat_exception_context_register_index(struct pt_regs *regs, int index, u64 val); +void set_exception_context_register_index(struct pt_regs *regs, int index, u64 val); + +#else /* CONFIG_ARM64_PTR_AUTH */ + +static inline void sign_thread_context(void *cpu_context) +{ +} + +static inline void auth_thread_context(void *cpu_context) +{ +} + +static inline void sign_compat_exception_context(void *regs) +{ +} + +static inline void auth_compat_exception_context(void *regs) +{ +} + +static inline void sign_exception_context(void *regs) +{ +} + +static inline void auth_exception_context(void *regs) +{ +} + +#define resign_compat_exception_context_start(regs) +#define resign_compat_exception_context_end(regs) + +#define resign_exception_context_start(regs) +#define resign_exception_context_end(regs) + +#define sign_exception_context_start(regs) +#define sign_exception_context_end(regs) + +static inline int set_compat_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val) +{ + return 0; +} + +static inline int set_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val) +{ + return 0; +} + +static inline void set_compat_exception_context_register_index(void *regs, int index, u64 val) +{ +} + +static inline void set_exception_context_register_index(void *regs, int index, u64 val) +{ +} + +#endif /* CONFIG_ARM64_PTR_AUTH */ + +#endif /* __POINTER_AUTH_CONTEXT_H */ + diff --git a/pac/arm64/src/asm_pointer_auth_context.S b/pac/arm64/src/asm_pointer_auth_context.S new file mode 100644 index 0000000..03b3a62 --- /dev/null +++ b/pac/arm64/src/asm_pointer_auth_context.S @@ -0,0 +1,258 @@ +// SPDX-License-Identifier: GPL-2.0-or-later +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + */ + +#include "asm_pointer_auth_context.h" + +#include +#include +#include + +#ifdef CONFIG_ARM64_PTR_AUTH + +#ifdef CONFIG_COMPAT + /* Obtain the regs of compat task to sign or authenticate. */ + .macro ldr_compat_pt_regs + mov x1, #0 + mov x2, #0 + /* load lr, sp, pc, pstate of compat task */ + ldr x3, [x0, #S_COMPAT_LR] + ldr x4, [x0, #S_COMPAT_SP] + ldr x5, [x0, #S_PC] + ldr x6, [x0, #S_PSTATE] + .endm +#endif + + /* Obtain the regs of task to sign or authenticate. */ + .macro ldr_pt_regs + /* load x16, x17, lr, sp, pc, pstate of task */ + ldp x1, x2, [x0, #S_X16] + ldr x3, [x0, #S_LR] + ldr x4, [x0, #S_SP] + ldr x5, [x0, #S_PC] + ldr x6, [x0, #S_PSTATE] + .endm + +/* + * Register sign_thread_context for AArch64. + * void sign_thread_context(struct cpu_context *cpu_context) + * On entry: + * x0: the pointer of cpu_context + */ +SYM_FUNC_START(sign_thread_context) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr x1, [x0, #CPU_CONTEXT_PC] + ldr x2, [x0, #CPU_CONTEXT_SP] + sign_thread_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(sign_thread_context) + +/* + * Register auth_thread_context for AArch64. + * void auth_thread_context(struct cpu_context *cpu_context) + * On entry: + * x0: the pointer of cpu_context + */ +SYM_FUNC_START(auth_thread_context) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr x1, [x0, #CPU_CONTEXT_PC] + ldr x2, [x0, #CPU_CONTEXT_SP] + auth_thread_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(auth_thread_context) + +/* + * Register set_exception_context_register_asm for AArch64. + * int set_exception_context_register_asm(struct pt_regs *regs, int offset, u64 val); + * On entry: + * x0: the regs of task + * x1: the offset of member in pt_regs struct + * x2: the value need to be update + */ +SYM_FUNC_START(set_exception_context_register_asm) + stp x29, x30, [sp, #-80]! + mov x29, sp + stp x8, x9, [sp, #16] + stp x1, x2, [sp, #32] + str x19, [sp, #48] + mrs x19, daif + msr daifset, #0x2 + ldr_pt_regs + stp x1, x2, [sp, #64] + auth_exception_context_common + ldp x1, x2, [sp, #64] + ldp x8, x9, [sp, #32] + cmp x8, #S_LR + b.eq .Lupdate_lr + b.ls .Lchoose_lower + cmp x8, #S_PC + b.eq .Lupdate_pc + b.cc .Lupdate_sp + cmp x8, #S_PSTATE + b.eq .Lupdate_pstate +.Lerror_return: + /* invalid value: return -EINVAL */ + mov x0, #-22 + b .Lreturn +.Lchoose_lower: + cmp x8, #S_X16 + b.eq .Lupdate_x16 + b.hi .Lupdate_x17 + b .Lerror_return +.Lupdate_pstate: + mov x6, x9 +.Lupdate_done: + str x9, [x0, x8] + sign_exception_context_common +.Lreturn: + msr daif, x19 + ldr x19, [sp, #48] + ldp x8, x9, [sp, #16] + ldp x29, x30, [sp], #80 + ret + +.Lupdate_x16: + mov x1, x9 + b .Lupdate_done +.Lupdate_x17: + mov x2, x9 + b .Lupdate_done +.Lupdate_lr: + mov x3, x9 + b .Lupdate_done +.Lupdate_sp: + mov x4, x9 + b .Lupdate_done +.Lupdate_pc: + mov x5, x9 + b .Lupdate_done +SYM_FUNC_END(set_exception_context_register_asm) + +#ifdef CONFIG_COMPAT +/* + * Register set_compat_exception_context_register_asm for AArch64. + * int set_compat_exception_context_register_asm(struct pt_regs *regs, int offset, u64 val); + * On entry: + * x0: the regs of compat task + * x1: the offset of member in pt_regs struct + * x2: the value need to be update + */ +SYM_FUNC_START(set_compat_exception_context_register_asm) + stp x29, x30, [sp, #-80]! + mov x29, sp + stp x8, x9, [sp, #16] + stp x1, x2, [sp, #32] + str x19, [sp, #48] + mrs x19, daif + msr daifset, #0x2 + ldr_compat_pt_regs + stp x1, x2, [sp, #64] + auth_exception_context_common + ldp x1, x2, [sp, #64] + ldp x8, x9, [sp, #32] + cmp x8, #S_COMPAT_LR + b.eq .Lupdate_compat_lr + b.ls .Lcompat_choose_lower + cmp x8, #S_PSTATE + b.eq .Lupdate_compat_pstate + b.cc .Lupdate_compat_pc +.Lcompat_error_return: + /* invalid value: return -EINVAL */ + mov x0, #-22 + b .Lcompat_return +.Lcompat_choose_lower: + cmp x8, #S_COMPAT_SP + b.eq .Lupdate_compat_sp + b .Lcompat_error_return +.Lupdate_compat_pstate: + mov x6, x9 +.Lcompat_update_done: + str x9, [x0, x8] + sign_exception_context_common +.Lcompat_return: + msr daif, x19 + ldr x19, [sp, #48] + ldp x8, x9, [sp, #16] + ldp x29, x30, [sp], #80 + ret + +.Lupdate_compat_lr: + mov x3, x9 + b .Lcompat_update_done +.Lupdate_compat_sp: + mov x4, x9 + b .Lcompat_update_done +.Lupdate_compat_pc: + mov x5, x9 + b .Lcompat_update_done +SYM_FUNC_END(set_compat_exception_context_register_asm) +#endif + +/* + * Register sign_exception_context_asm for AArch64. + * void sign_exception_context_asm(struct pt_regs *regs); + * On entry: + * x0: the regs of task + */ +SYM_FUNC_START(sign_exception_context_asm) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr_pt_regs + sign_exception_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(sign_exception_context_asm) + +/* + * Register auth_exception_context_asm for AArch64. + * void auth_exception_context_asm(struct pt_regs *regs); + * On entry: + * x0: the regs of task + */ +SYM_FUNC_START(auth_exception_context_asm) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr_pt_regs + auth_exception_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(auth_exception_context_asm) + +#ifdef CONFIG_COMPAT +/* + * Register sign_compat_exception_context_asm for AArch64. + * void sign_compat_exception_context_asm(struct pt_regs *regs); + * On entry: + * x0: the regs of compat task + */ +SYM_FUNC_START(sign_compat_exception_context_asm) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr_compat_pt_regs + sign_exception_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(sign_compat_exception_context_asm) + +/* + * Register auth_compat_exception_context_asm for AArch64. + * void auth_compat_exception_context_asm(struct pt_regs *regs); + * On entry: + * x0: the regs of compat task + */ +SYM_FUNC_START(auth_compat_exception_context_asm) + stp x29, x30, [sp, #-16]! + mov x29, sp + ldr_compat_pt_regs + auth_exception_context_common + ldp x29, x30, [sp], #16 + ret +SYM_FUNC_END(auth_compat_exception_context_asm) +#endif + +#endif /* CONFIG_ARM64_PTR_AUTH */ diff --git a/pac/arm64/src/pointer_auth_context.c b/pac/arm64/src/pointer_auth_context.c new file mode 100644 index 0000000..2a0bd39 --- /dev/null +++ b/pac/arm64/src/pointer_auth_context.c @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: GPL-2.0-or-later +/* + * Copyright (c) 2023 Huawei Device Co., Ltd. + */ + +#include +#include +#include +#include + +#ifdef CONFIG_ARM64_PTR_AUTH + +/* The members of arrays below are corresponding to the enum defined in pointer_auth_context.h: + * enum pac_pt_regs { + * REGS_X16 = 0, + * REGS_X17, + * REGS_LR, + * REGS_SP, + * REGS_PC, + * REGS_PSTATE, + * }; + * + * compat_regs_offset_array[]: + * S_X14: the offset of compat_lr + * S_X13: the offset of compat_sp + */ +static off_t compat_regs_offset_array[] = {0, 0, S_X14, S_X13, S_PC, S_PSTATE}; +static off_t regs_offset_array[] = {S_X16, S_X17, S_LR, S_SP, S_PC, S_PSTATE}; + +int set_compat_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val) +{ + switch (regs_enum) { + case REGS_LR: + case REGS_SP: + case REGS_PC: + case REGS_PSTATE: + return set_compat_exception_context_register_asm(regs, compat_regs_offset_array[regs_enum], val); + default: + return -EINVAL; + } +} + +int set_exception_context_register(void *regs, enum pac_pt_regs regs_enum, u64 val) +{ + if (compat_user_mode((struct pt_regs *)regs)) { + return set_compat_exception_context_register(regs, regs_enum, val); + } else { + switch (regs_enum) { + case REGS_X16: + case REGS_X17: + case REGS_LR: + case REGS_SP: + case REGS_PC: + case REGS_PSTATE: + return set_exception_context_register_asm(regs, regs_offset_array[regs_enum], val); + default: + return -EINVAL; + } + } +} + +void set_compat_exception_context_register_index(struct pt_regs *regs, int index, uint64_t val) +{ + /* 14 means the index of compat_lr */ + if (index == 14) { + set_compat_exception_context_register_asm(regs, S_X14, val); + /* 13 means the index of compat_sp */ + } else if (index == 13) { + set_compat_exception_context_register_asm(regs, S_X13, val); + } else { + regs->regs[index] = val; + } +} + +void set_exception_context_register_index(struct pt_regs *regs, int index, uint64_t val) +{ + off_t offset; + + if (compat_user_mode(regs)) { + set_compat_exception_context_register_index(regs, index, val); + } else { + switch (index) { + /* 16 means the index of regs[16] */ + case 16: + /* 17 means the index of regs[17] */ + case 17: + /* 30 means the index of regs[30] */ + case 30: + offset = offsetof(struct pt_regs, regs[index]); + set_exception_context_register_asm(regs, offset, val); + break; + default: + regs->regs[index] = val; + } + } +} + +#endif /* CONFIG_ARM64_PTR_AUTH */ + -- Gitee