mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2024-11-01 00:48:50 +00:00
ca520cab25
Pull locking and atomic updates from Ingo Molnar: "Main changes in this cycle are: - Extend atomic primitives with coherent logic op primitives (atomic_{or,and,xor}()) and deprecate the old partial APIs (atomic_{set,clear}_mask()) The old ops were incoherent with incompatible signatures across architectures and with incomplete support. Now every architecture supports the primitives consistently (by Peter Zijlstra) - Generic support for 'relaxed atomics': - _acquire/release/relaxed() flavours of xchg(), cmpxchg() and {add,sub}_return() - atomic_read_acquire() - atomic_set_release() This came out of porting qwrlock code to arm64 (by Will Deacon) - Clean up the fragile static_key APIs that were causing repeat bugs, by introducing a new one: DEFINE_STATIC_KEY_TRUE(name); DEFINE_STATIC_KEY_FALSE(name); which define a key of different types with an initial true/false value. Then allow: static_branch_likely() static_branch_unlikely() to take a key of either type and emit the right instruction for the case. To be able to know the 'type' of the static key we encode it in the jump entry (by Peter Zijlstra) - Static key self-tests (by Jason Baron) - qrwlock optimizations (by Waiman Long) - small futex enhancements (by Davidlohr Bueso) - ... and misc other changes" * 'locking-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: (63 commits) jump_label/x86: Work around asm build bug on older/backported GCCs locking, ARM, atomics: Define our SMP atomics in terms of _relaxed() operations locking, include/llist: Use linux/atomic.h instead of asm/cmpxchg.h locking/qrwlock: Make use of _{acquire|release|relaxed}() atomics locking/qrwlock: Implement queue_write_unlock() using smp_store_release() locking/lockref: Remove homebrew cmpxchg64_relaxed() macro definition locking, asm-generic: Add _{relaxed|acquire|release}() variants for 'atomic_long_t' locking, asm-generic: Rework atomic-long.h to avoid bulk code duplication locking/atomics: Add _{acquire|release|relaxed}() variants of some atomic operations locking, compiler.h: Cast away attributes in the WRITE_ONCE() magic locking/static_keys: Make verify_keys() static jump label, locking/static_keys: Update docs locking/static_keys: Provide a selftest jump_label: Provide a self-test s390/uaccess, locking/static_keys: employ static_branch_likely() x86, tsc, locking/static_keys: Employ static_branch_likely() locking/static_keys: Add selftest locking/static_keys: Add a new static_key interface locking/static_keys: Rework update logic locking/static_keys: Add static_key_{en,dis}able() helpers ...
165 lines
4.2 KiB
C
165 lines
4.2 KiB
C
/*
|
|
* Export of symbols defined in assembler
|
|
*/
|
|
|
|
/* Tell string.h we don't want memcpy etc. as cpp defines */
|
|
#define EXPORT_SYMTAB_STROPS
|
|
|
|
#include <linux/module.h>
|
|
#include <linux/string.h>
|
|
#include <linux/types.h>
|
|
|
|
#include <asm/checksum.h>
|
|
#include <asm/uaccess.h>
|
|
#include <asm/ftrace.h>
|
|
|
|
/* string functions */
|
|
EXPORT_SYMBOL(strlen);
|
|
EXPORT_SYMBOL(strncmp);
|
|
|
|
/* mem* functions */
|
|
extern void *__memscan_zero(void *, size_t);
|
|
extern void *__memscan_generic(void *, int, size_t);
|
|
extern void *__bzero(void *, size_t);
|
|
|
|
EXPORT_SYMBOL(memscan);
|
|
EXPORT_SYMBOL(__memscan_zero);
|
|
EXPORT_SYMBOL(__memscan_generic);
|
|
EXPORT_SYMBOL(memcmp);
|
|
EXPORT_SYMBOL(memcpy);
|
|
EXPORT_SYMBOL(memset);
|
|
EXPORT_SYMBOL(memmove);
|
|
EXPORT_SYMBOL(__bzero);
|
|
|
|
/* Networking helper routines. */
|
|
EXPORT_SYMBOL(csum_partial);
|
|
|
|
#ifdef CONFIG_MCOUNT
|
|
EXPORT_SYMBOL(_mcount);
|
|
#endif
|
|
|
|
/*
|
|
* sparc
|
|
*/
|
|
#ifdef CONFIG_SPARC32
|
|
extern int __ashrdi3(int, int);
|
|
extern int __ashldi3(int, int);
|
|
extern int __lshrdi3(int, int);
|
|
extern int __muldi3(int, int);
|
|
extern int __divdi3(int, int);
|
|
|
|
extern void (*__copy_1page)(void *, const void *);
|
|
extern void (*bzero_1page)(void *);
|
|
|
|
extern void ___rw_read_enter(void);
|
|
extern void ___rw_read_try(void);
|
|
extern void ___rw_read_exit(void);
|
|
extern void ___rw_write_enter(void);
|
|
|
|
/* Networking helper routines. */
|
|
EXPORT_SYMBOL(__csum_partial_copy_sparc_generic);
|
|
|
|
/* Special internal versions of library functions. */
|
|
EXPORT_SYMBOL(__copy_1page);
|
|
EXPORT_SYMBOL(__memmove);
|
|
EXPORT_SYMBOL(bzero_1page);
|
|
|
|
/* Moving data to/from/in userspace. */
|
|
EXPORT_SYMBOL(__copy_user);
|
|
|
|
/* Used by asm/spinlock.h */
|
|
#ifdef CONFIG_SMP
|
|
EXPORT_SYMBOL(___rw_read_enter);
|
|
EXPORT_SYMBOL(___rw_read_try);
|
|
EXPORT_SYMBOL(___rw_read_exit);
|
|
EXPORT_SYMBOL(___rw_write_enter);
|
|
#endif
|
|
|
|
EXPORT_SYMBOL(__ashrdi3);
|
|
EXPORT_SYMBOL(__ashldi3);
|
|
EXPORT_SYMBOL(__lshrdi3);
|
|
EXPORT_SYMBOL(__muldi3);
|
|
EXPORT_SYMBOL(__divdi3);
|
|
#endif
|
|
|
|
/*
|
|
* sparc64
|
|
*/
|
|
#ifdef CONFIG_SPARC64
|
|
/* Networking helper routines. */
|
|
EXPORT_SYMBOL(csum_partial_copy_nocheck);
|
|
EXPORT_SYMBOL(__csum_partial_copy_from_user);
|
|
EXPORT_SYMBOL(__csum_partial_copy_to_user);
|
|
EXPORT_SYMBOL(ip_fast_csum);
|
|
|
|
/* Moving data to/from/in userspace. */
|
|
EXPORT_SYMBOL(___copy_to_user);
|
|
EXPORT_SYMBOL(___copy_from_user);
|
|
EXPORT_SYMBOL(___copy_in_user);
|
|
EXPORT_SYMBOL(__clear_user);
|
|
|
|
/* Atomic counter implementation. */
|
|
#define ATOMIC_OP(op) \
|
|
EXPORT_SYMBOL(atomic_##op); \
|
|
EXPORT_SYMBOL(atomic64_##op);
|
|
|
|
#define ATOMIC_OP_RETURN(op) \
|
|
EXPORT_SYMBOL(atomic_##op##_return); \
|
|
EXPORT_SYMBOL(atomic64_##op##_return);
|
|
|
|
#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
|
|
|
|
ATOMIC_OPS(add)
|
|
ATOMIC_OPS(sub)
|
|
ATOMIC_OP(and)
|
|
ATOMIC_OP(or)
|
|
ATOMIC_OP(xor)
|
|
|
|
#undef ATOMIC_OPS
|
|
#undef ATOMIC_OP_RETURN
|
|
#undef ATOMIC_OP
|
|
|
|
EXPORT_SYMBOL(atomic64_dec_if_positive);
|
|
|
|
/* Atomic bit operations. */
|
|
EXPORT_SYMBOL(test_and_set_bit);
|
|
EXPORT_SYMBOL(test_and_clear_bit);
|
|
EXPORT_SYMBOL(test_and_change_bit);
|
|
EXPORT_SYMBOL(set_bit);
|
|
EXPORT_SYMBOL(clear_bit);
|
|
EXPORT_SYMBOL(change_bit);
|
|
|
|
/* Special internal versions of library functions. */
|
|
EXPORT_SYMBOL(_clear_page);
|
|
EXPORT_SYMBOL(clear_user_page);
|
|
EXPORT_SYMBOL(copy_user_page);
|
|
|
|
/* RAID code needs this */
|
|
void VISenter(void);
|
|
EXPORT_SYMBOL(VISenter);
|
|
|
|
extern void xor_vis_2(unsigned long, unsigned long *, unsigned long *);
|
|
extern void xor_vis_3(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *);
|
|
extern void xor_vis_4(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *, unsigned long *);
|
|
extern void xor_vis_5(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *, unsigned long *, unsigned long *);
|
|
EXPORT_SYMBOL(xor_vis_2);
|
|
EXPORT_SYMBOL(xor_vis_3);
|
|
EXPORT_SYMBOL(xor_vis_4);
|
|
EXPORT_SYMBOL(xor_vis_5);
|
|
|
|
extern void xor_niagara_2(unsigned long, unsigned long *, unsigned long *);
|
|
extern void xor_niagara_3(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *);
|
|
extern void xor_niagara_4(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *, unsigned long *);
|
|
extern void xor_niagara_5(unsigned long, unsigned long *, unsigned long *,
|
|
unsigned long *, unsigned long *, unsigned long *);
|
|
|
|
EXPORT_SYMBOL(xor_niagara_2);
|
|
EXPORT_SYMBOL(xor_niagara_3);
|
|
EXPORT_SYMBOL(xor_niagara_4);
|
|
EXPORT_SYMBOL(xor_niagara_5);
|
|
#endif
|