mirror of
https://git.kernel.org/pub/scm/linux/kernel/git/stable/linux.git
synced 2024-11-01 00:48:50 +00:00
cb8095bba6
In the "xchg" implementation, %ebx and %ecx don't need to be copied into %eax and %edx respectively (this is only necessary when desiring to only read the stored value). In the "add_unless" implementation, swapping the use of %ecx and %esi for passing arguments allows %esi to become an input only (i.e. permitting the register to be re-used to address the same object without reload). In "{add,sub}_return", doing the initial read64 through the passed in %ecx decreases a register dependency. In "inc_not_zero", a branch can be eliminated by or-ing together the two halves of the current (64-bit) value, and code size can be further reduced by adjusting the arithmetic slightly. v2: Undo the folding of "xchg" and "set". Signed-off-by: Jan Beulich <jbeulich@suse.com> Link: http://lkml.kernel.org/r/4F19A2BC020000780006E0DC@nat28.tlf.novell.com Cc: Luca Barbieri <luca@luca-barbieri.com> Cc: Eric Dumazet <eric.dumazet@gmail.com> Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
194 lines
2.6 KiB
ArmAsm
194 lines
2.6 KiB
ArmAsm
/*
|
|
* atomic64_t for 386/486
|
|
*
|
|
* Copyright © 2010 Luca Barbieri
|
|
*
|
|
* This program is free software; you can redistribute it and/or modify
|
|
* it under the terms of the GNU General Public License as published by
|
|
* the Free Software Foundation; either version 2 of the License, or
|
|
* (at your option) any later version.
|
|
*/
|
|
|
|
#include <linux/linkage.h>
|
|
#include <asm/alternative-asm.h>
|
|
#include <asm/dwarf2.h>
|
|
|
|
/* if you want SMP support, implement these with real spinlocks */
|
|
.macro LOCK reg
|
|
pushfl_cfi
|
|
cli
|
|
.endm
|
|
|
|
.macro UNLOCK reg
|
|
popfl_cfi
|
|
.endm
|
|
|
|
#define BEGIN(op) \
|
|
.macro endp; \
|
|
CFI_ENDPROC; \
|
|
ENDPROC(atomic64_##op##_386); \
|
|
.purgem endp; \
|
|
.endm; \
|
|
ENTRY(atomic64_##op##_386); \
|
|
CFI_STARTPROC; \
|
|
LOCK v;
|
|
|
|
#define ENDP endp
|
|
|
|
#define RET \
|
|
UNLOCK v; \
|
|
ret
|
|
|
|
#define RET_ENDP \
|
|
RET; \
|
|
ENDP
|
|
|
|
#define v %ecx
|
|
BEGIN(read)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(set)
|
|
movl %ebx, (v)
|
|
movl %ecx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(xchg)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
movl %ebx, (v)
|
|
movl %ecx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %ecx
|
|
BEGIN(add)
|
|
addl %eax, (v)
|
|
adcl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %ecx
|
|
BEGIN(add_return)
|
|
addl (v), %eax
|
|
adcl 4(v), %edx
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %ecx
|
|
BEGIN(sub)
|
|
subl %eax, (v)
|
|
sbbl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %ecx
|
|
BEGIN(sub_return)
|
|
negl %edx
|
|
negl %eax
|
|
sbbl $0, %edx
|
|
addl (v), %eax
|
|
adcl 4(v), %edx
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(inc)
|
|
addl $1, (v)
|
|
adcl $0, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(inc_return)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
addl $1, %eax
|
|
adcl $0, %edx
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(dec)
|
|
subl $1, (v)
|
|
sbbl $0, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(dec_return)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
subl $1, %eax
|
|
sbbl $0, %edx
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
RET_ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(add_unless)
|
|
addl %eax, %ecx
|
|
adcl %edx, %edi
|
|
addl (v), %eax
|
|
adcl 4(v), %edx
|
|
cmpl %eax, %ecx
|
|
je 3f
|
|
1:
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
movl $1, %eax
|
|
2:
|
|
RET
|
|
3:
|
|
cmpl %edx, %edi
|
|
jne 1b
|
|
xorl %eax, %eax
|
|
jmp 2b
|
|
ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(inc_not_zero)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
testl %eax, %eax
|
|
je 3f
|
|
1:
|
|
addl $1, %eax
|
|
adcl $0, %edx
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
movl $1, %eax
|
|
2:
|
|
RET
|
|
3:
|
|
testl %edx, %edx
|
|
jne 1b
|
|
jmp 2b
|
|
ENDP
|
|
#undef v
|
|
|
|
#define v %esi
|
|
BEGIN(dec_if_positive)
|
|
movl (v), %eax
|
|
movl 4(v), %edx
|
|
subl $1, %eax
|
|
sbbl $0, %edx
|
|
js 1f
|
|
movl %eax, (v)
|
|
movl %edx, 4(v)
|
|
1:
|
|
RET_ENDP
|
|
#undef v
|