arm64: klib: Optimised atomic bitops

This patch implements the AArch64-specific atomic bitops functions using
exclusive memory accesses to avoid locking.

Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
This commit is contained in:
Catalin Marinas 2013-03-21 16:28:47 +00:00
parent 2b8cac814c
commit 6247958653
4 changed files with 94 additions and 32 deletions

View File

@ -32,6 +32,16 @@
#error only <linux/bitops.h> can be included directly
#endif
/*
* Little endian assembly atomic bitops.
*/
extern void set_bit(int nr, volatile unsigned long *p);
extern void clear_bit(int nr, volatile unsigned long *p);
extern void change_bit(int nr, volatile unsigned long *p);
extern int test_and_set_bit(int nr, volatile unsigned long *p);
extern int test_and_clear_bit(int nr, volatile unsigned long *p);
extern int test_and_change_bit(int nr, volatile unsigned long *p);
#include <asm-generic/bitops/builtin-__ffs.h>
#include <asm-generic/bitops/builtin-ffs.h>
#include <asm-generic/bitops/builtin-__fls.h>
@ -45,9 +55,13 @@
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>
#include <asm-generic/bitops/atomic.h>
#include <asm-generic/bitops/non-atomic.h>
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>
/*
* Ext2 is defined to use little-endian byte ordering.
*/
#define ext2_set_bit_atomic(lock, nr, p) test_and_set_bit_le(nr, p)
#define ext2_clear_bit_atomic(lock, nr, p) test_and_clear_bit_le(nr, p)
#endif /* __ASM_BITOPS_H */

View File

@ -39,11 +39,6 @@ EXPORT_SYMBOL(__copy_from_user);
EXPORT_SYMBOL(__copy_to_user);
EXPORT_SYMBOL(__clear_user);
/* bitops */
#ifdef CONFIG_SMP
EXPORT_SYMBOL(__atomic_hash);
#endif
/* physical memory */
EXPORT_SYMBOL(memstart_addr);
@ -54,3 +49,11 @@ EXPORT_SYMBOL(memset);
EXPORT_SYMBOL(memcpy);
EXPORT_SYMBOL(memmove);
EXPORT_SYMBOL(memchr);
/* atomic bitops */
EXPORT_SYMBOL(set_bit);
EXPORT_SYMBOL(test_and_set_bit);
EXPORT_SYMBOL(clear_bit);
EXPORT_SYMBOL(test_and_clear_bit);
EXPORT_SYMBOL(change_bit);
EXPORT_SYMBOL(test_and_change_bit);

70
arch/arm64/lib/bitops.S Normal file
View File

@ -0,0 +1,70 @@
/*
* Based on arch/arm/lib/bitops.h
*
* Copyright (C) 2013 ARM Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/linkage.h>
#include <asm/assembler.h>
/*
* x0: bits 5:0 bit offset
* bits 63:6 word offset
* x1: address
*/
.macro bitop, name, instr
ENTRY( \name )
and x3, x0, #63 // Get bit offset
eor x0, x0, x3 // Clear low bits
mov x2, #1
add x1, x1, x0, lsr #3 // Get word offset
lsl x3, x2, x3 // Create mask
1: ldxr x2, [x1]
\instr x2, x2, x3
stxr w0, x2, [x1]
cbnz w0, 1b
ret
ENDPROC(\name )
.endm
.macro testop, name, instr
ENTRY( \name )
and x3, x0, #63 // Get bit offset
eor x0, x0, x3 // Clear low bits
mov x2, #1
add x1, x1, x0, lsr #3 // Get word offset
lsl x4, x2, x3 // Create mask
smp_dmb ish
1: ldxr x2, [x1]
lsr x0, x2, x3 // Save old value of bit
\instr x2, x2, x4 // toggle bit
stxr w2, x2, [x1]
cbnz w2, 1b
smp_dmb ish
and x0, x0, #1
3: ret
ENDPROC(\name )
.endm
/*
* Atomic bit operations.
*/
bitop change_bit, eor
bitop clear_bit, bic
bitop set_bit, orr
testop test_and_change_bit, eor
testop test_and_clear_bit, bic
testop test_and_set_bit, orr

View File

@ -1,25 +0,0 @@
/*
* Copyright (C) 2012 ARM Limited
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <linux/kernel.h>
#include <linux/spinlock.h>
#include <linux/atomic.h>
#ifdef CONFIG_SMP
arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
[0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
};
#endif