x86/lib/memset_64.S: Convert to ALTERNATIVE_2 macro

Make alternatives replace single JMPs instead of whole memset functions,
thus decreasing the amount of instructions copied during patching time
at boot.

While at it, make it use the REP_GOOD version by default which means
alternatives NOP out the JMP to the other versions, as REP_GOOD is set
by default on the majority of relevant x86 processors.

Signed-off-by: Borislav Petkov <bp@suse.de>
This commit is contained in:
Borislav Petkov 2015-02-04 08:57:00 +01:00
parent a930dc4543
commit 84d95ad4cb
1 changed files with 24 additions and 37 deletions

View File

@ -5,19 +5,30 @@
#include <asm/cpufeature.h>
#include <asm/alternative-asm.h>
.weak memset
/*
* ISO C memset - set a memory block to a byte value. This function uses fast
* string to get better performance than the original function. The code is
* simpler and shorter than the orignal function as well.
*
*
* rdi destination
* rsi value (char)
* rdx count (bytes)
*
* rsi value (char)
* rdx count (bytes)
*
* rax original destination
*/
.section .altinstr_replacement, "ax", @progbits
.Lmemset_c:
*/
ENTRY(memset)
ENTRY(__memset)
/*
* Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
* to use it when possible. If not available, use fast string instructions.
*
* Otherwise, use original memset function.
*/
ALTERNATIVE_2 "jmp memset_orig", "", X86_FEATURE_REP_GOOD, \
"jmp memset_erms", X86_FEATURE_ERMS
movq %rdi,%r9
movq %rdx,%rcx
andl $7,%edx
@ -31,8 +42,8 @@
rep stosb
movq %r9,%rax
ret
.Lmemset_e:
.previous
ENDPROC(memset)
ENDPROC(__memset)
/*
* ISO C memset - set a memory block to a byte value. This function uses
@ -45,21 +56,16 @@
*
* rax original destination
*/
.section .altinstr_replacement, "ax", @progbits
.Lmemset_c_e:
ENTRY(memset_erms)
movq %rdi,%r9
movb %sil,%al
movq %rdx,%rcx
rep stosb
movq %r9,%rax
ret
.Lmemset_e_e:
.previous
ENDPROC(memset_erms)
.weak memset
ENTRY(memset)
ENTRY(__memset)
ENTRY(memset_orig)
CFI_STARTPROC
movq %rdi,%r10
@ -134,23 +140,4 @@ ENTRY(__memset)
jmp .Lafter_bad_alignment
.Lfinal:
CFI_ENDPROC
ENDPROC(memset)
ENDPROC(__memset)
/* Some CPUs support enhanced REP MOVSB/STOSB feature.
* It is recommended to use this when possible.
*
* If enhanced REP MOVSB/STOSB feature is not available, use fast string
* instructions.
*
* Otherwise, use original memset function.
*
* In .altinstructions section, ERMS feature is placed after REG_GOOD
* feature to implement the right patch order.
*/
.section .altinstructions,"a"
altinstruction_entry __memset,.Lmemset_c,X86_FEATURE_REP_GOOD,\
.Lfinal-__memset,.Lmemset_e-.Lmemset_c,0
altinstruction_entry __memset,.Lmemset_c_e,X86_FEATURE_ERMS, \
.Lfinal-__memset,.Lmemset_e_e-.Lmemset_c_e,0
.previous
ENDPROC(memset_orig)