2023-02-22 03:30:18 +00:00
|
|
|
/* SPDX-License-Identifier: GPL-2.0 */
|
|
|
|
|
|
|
|
#ifndef _ASM_RISCV_ENTRY_COMMON_H
|
|
|
|
#define _ASM_RISCV_ENTRY_COMMON_H
|
|
|
|
|
|
|
|
#include <asm/stacktrace.h>
|
2024-01-15 05:59:23 +00:00
|
|
|
#include <asm/thread_info.h>
|
|
|
|
#include <asm/vector.h>
|
|
|
|
|
|
|
|
static inline void arch_exit_to_user_mode_prepare(struct pt_regs *regs,
|
|
|
|
unsigned long ti_work)
|
|
|
|
{
|
|
|
|
if (ti_work & _TIF_RISCV_V_DEFER_RESTORE) {
|
|
|
|
clear_thread_flag(TIF_RISCV_V_DEFER_RESTORE);
|
|
|
|
/*
|
|
|
|
* We are already called with irq disabled, so go without
|
|
|
|
* keeping track of riscv_v_flags.
|
|
|
|
*/
|
2024-01-15 05:59:26 +00:00
|
|
|
riscv_v_vstate_restore(¤t->thread.vstate, regs);
|
2024-01-15 05:59:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#define arch_exit_to_user_mode_prepare arch_exit_to_user_mode_prepare
|
2023-02-22 03:30:18 +00:00
|
|
|
|
|
|
|
void handle_page_fault(struct pt_regs *regs);
|
|
|
|
void handle_break(struct pt_regs *regs);
|
|
|
|
|
2023-10-04 15:13:59 +00:00
|
|
|
#ifdef CONFIG_RISCV_MISALIGNED
|
|
|
|
int handle_misaligned_load(struct pt_regs *regs);
|
|
|
|
int handle_misaligned_store(struct pt_regs *regs);
|
|
|
|
#else
|
|
|
|
static inline int handle_misaligned_load(struct pt_regs *regs)
|
|
|
|
{
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
static inline int handle_misaligned_store(struct pt_regs *regs)
|
|
|
|
{
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2023-02-22 03:30:18 +00:00
|
|
|
#endif /* _ASM_RISCV_ENTRY_COMMON_H */
|