mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2024-11-01 08:58:07 +00:00
0ff0edb550
- rtmutex cleanup & spring cleaning pass that removes ~400 lines of code - Futex simplifications & cleanups - Add debugging to the CSD code, to help track down a tenacious race (or hw problem) - Add lockdep_assert_not_held(), to allow code to require a lock to not be held, and propagate this into the ath10k driver - Misc LKMM documentation updates - Misc KCSAN updates: cleanups & documentation updates - Misc fixes and cleanups - Fix locktorture bugs with ww_mutexes Signed-off-by: Ingo Molnar <mingo@kernel.org> -----BEGIN PGP SIGNATURE----- iQJFBAABCgAvFiEEBpT5eoXrXCwVQwEKEnMQ0APhK1gFAmCJDn0RHG1pbmdvQGtl cm5lbC5vcmcACgkQEnMQ0APhK1hPrRAAryS4zPnuDsfkVk0smxo7a0lK5ljbH2Xo 28QUZXOl6upnEV8dzbjwG7eAjt5ZJVI5tKIeG0PV0NUJH2nsyHwESdtULGGYuPf/ 4YUzNwZJa+nI/jeBnVsXCimLVxxnNCRdR7yOVOHm4ukEwa+YTNt1pvlYRmUd4YyH Q5cCrpb3THvLka3AAamEbqnHnAdGxHKuuHYVRkODpMQ+zrQvtN8antYsuk8kJsqM m+GZg/dVCuLEPah5k+lOACtcq/w7HCmTlxS8t4XLvD52jywFZLcCPvi1rk0+JR+k Vd9TngC09GJ4jXuDpr42YKkU9/X6qy2Es39iA/ozCvc1Alrhspx/59XmaVSuWQGo XYuEPx38Yuo/6w16haSgp0k4WSay15A4uhCTQ75VF4vli8Bqgg9PaxLyQH1uG8e2 xk8U90R7bDzLlhKYIx1Vu5Z0t7A1JtB5CJtgpcfg/zQLlzygo75fHzdAiU5fDBDm 3QQXSU2Oqzt7c5ZypioHWazARk7tL6th38KGN1gZDTm5zwifpaCtHi7sml6hhZ/4 ATH6zEPzIbXJL2UqumSli6H4ye5ORNjOu32r7YPqLI4IDbzpssfoSwfKYlQG4Tvn 4H1Ukirzni0gz5+wbleItzf2aeo1rocs4YQTnaT02j8NmUHUz4AzOHGOQFr5Tvh0 wk/P4MIoSb0= =cOOk -----END PGP SIGNATURE----- Merge tag 'locking-core-2021-04-28' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip Pull locking updates from Ingo Molnar: - rtmutex cleanup & spring cleaning pass that removes ~400 lines of code - Futex simplifications & cleanups - Add debugging to the CSD code, to help track down a tenacious race (or hw problem) - Add lockdep_assert_not_held(), to allow code to require a lock to not be held, and propagate this into the ath10k driver - Misc LKMM documentation updates - Misc KCSAN updates: cleanups & documentation updates - Misc fixes and cleanups - Fix locktorture bugs with ww_mutexes * tag 'locking-core-2021-04-28' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (44 commits) kcsan: Fix printk format string static_call: Relax static_call_update() function argument type static_call: Fix unused variable warn w/o MODULE locking/rtmutex: Clean up signal handling in __rt_mutex_slowlock() locking/rtmutex: Restrict the trylock WARN_ON() to debug locking/rtmutex: Fix misleading comment in rt_mutex_postunlock() locking/rtmutex: Consolidate the fast/slowpath invocation locking/rtmutex: Make text section and inlining consistent locking/rtmutex: Move debug functions as inlines into common header locking/rtmutex: Decrapify __rt_mutex_init() locking/rtmutex: Remove pointless CONFIG_RT_MUTEXES=n stubs locking/rtmutex: Inline chainwalk depth check locking/rtmutex: Move rt_mutex_debug_task_free() to rtmutex.c locking/rtmutex: Remove empty and unused debug stubs locking/rtmutex: Consolidate rt_mutex_init() locking/rtmutex: Remove output from deadlock detector locking/rtmutex: Remove rtmutex deadlock tester leftovers locking/rtmutex: Remove rt_mutex_timed_lock() MAINTAINERS: Add myself as futex reviewer locking/mutex: Remove repeated declaration ...
88 lines
1.8 KiB
C
88 lines
1.8 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
#ifndef _ASM_X86_JUMP_LABEL_H
|
|
#define _ASM_X86_JUMP_LABEL_H
|
|
|
|
#define HAVE_JUMP_LABEL_BATCH
|
|
|
|
#define JUMP_LABEL_NOP_SIZE 5
|
|
|
|
#include <asm/asm.h>
|
|
#include <asm/nops.h>
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
#include <linux/stringify.h>
|
|
#include <linux/types.h>
|
|
|
|
static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
|
|
{
|
|
asm_volatile_goto("1:"
|
|
".byte " __stringify(BYTES_NOP5) "\n\t"
|
|
".pushsection __jump_table, \"aw\" \n\t"
|
|
_ASM_ALIGN "\n\t"
|
|
".long 1b - ., %l[l_yes] - . \n\t"
|
|
_ASM_PTR "%c0 + %c1 - .\n\t"
|
|
".popsection \n\t"
|
|
: : "i" (key), "i" (branch) : : l_yes);
|
|
|
|
return false;
|
|
l_yes:
|
|
return true;
|
|
}
|
|
|
|
static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
|
|
{
|
|
asm_volatile_goto("1:"
|
|
".byte 0xe9\n\t .long %l[l_yes] - 2f\n\t"
|
|
"2:\n\t"
|
|
".pushsection __jump_table, \"aw\" \n\t"
|
|
_ASM_ALIGN "\n\t"
|
|
".long 1b - ., %l[l_yes] - . \n\t"
|
|
_ASM_PTR "%c0 + %c1 - .\n\t"
|
|
".popsection \n\t"
|
|
: : "i" (key), "i" (branch) : : l_yes);
|
|
|
|
return false;
|
|
l_yes:
|
|
return true;
|
|
}
|
|
|
|
#else /* __ASSEMBLY__ */
|
|
|
|
.macro STATIC_JUMP_IF_TRUE target, key, def
|
|
.Lstatic_jump_\@:
|
|
.if \def
|
|
/* Equivalent to "jmp.d32 \target" */
|
|
.byte 0xe9
|
|
.long \target - .Lstatic_jump_after_\@
|
|
.Lstatic_jump_after_\@:
|
|
.else
|
|
.byte BYTES_NOP5
|
|
.endif
|
|
.pushsection __jump_table, "aw"
|
|
_ASM_ALIGN
|
|
.long .Lstatic_jump_\@ - ., \target - .
|
|
_ASM_PTR \key - .
|
|
.popsection
|
|
.endm
|
|
|
|
.macro STATIC_JUMP_IF_FALSE target, key, def
|
|
.Lstatic_jump_\@:
|
|
.if \def
|
|
.byte BYTES_NOP5
|
|
.else
|
|
/* Equivalent to "jmp.d32 \target" */
|
|
.byte 0xe9
|
|
.long \target - .Lstatic_jump_after_\@
|
|
.Lstatic_jump_after_\@:
|
|
.endif
|
|
.pushsection __jump_table, "aw"
|
|
_ASM_ALIGN
|
|
.long .Lstatic_jump_\@ - ., \target - .
|
|
_ASM_PTR \key + 1 - .
|
|
.popsection
|
|
.endm
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
#endif
|