uml/x86: use x86 load_unaligned_zeropad()

allows, among other things, to drop !DCACHE_WORD_ACCESS mess in
x86 csum-partial_64.c

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
This commit is contained in:
Al Viro 2022-01-30 21:25:53 -05:00
parent 0c9dceb9bb
commit 6692531df6
3 changed files with 1 additions and 27 deletions

View File

@ -23,7 +23,6 @@ generic-y += softirq_stack.h
generic-y += switch_to.h
generic-y += topology.h
generic-y += trace_clock.h
generic-y += word-at-a-time.h
generic-y += kprobes.h
generic-y += mm_hooks.h
generic-y += vga.h

View File

@ -93,7 +93,6 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
buff += 8;
}
if (len & 7) {
#ifdef CONFIG_DCACHE_WORD_ACCESS
unsigned int shift = (8 - (len & 7)) * 8;
unsigned long trail;
@ -103,31 +102,6 @@ __wsum csum_partial(const void *buff, int len, __wsum sum)
"adcq $0,%[res]"
: [res] "+r" (temp64)
: [trail] "r" (trail));
#else
if (len & 4) {
asm("addq %[val],%[res]\n\t"
"adcq $0,%[res]"
: [res] "+r" (temp64)
: [val] "r" ((u64)*(u32 *)buff)
: "memory");
buff += 4;
}
if (len & 2) {
asm("addq %[val],%[res]\n\t"
"adcq $0,%[res]"
: [res] "+r" (temp64)
: [val] "r" ((u64)*(u16 *)buff)
: "memory");
buff += 2;
}
if (len & 1) {
asm("addq %[val],%[res]\n\t"
"adcq $0,%[res]"
: [res] "+r" (temp64)
: [val] "r" ((u64)*(u8 *)buff)
: "memory");
}
#endif
}
result = add32_with_carry(temp64 >> 32, temp64 & 0xffffffff);
if (unlikely(odd)) {

View File

@ -8,6 +8,7 @@ endmenu
config UML_X86
def_bool y
select DCACHE_WORD_ACCESS
config 64BIT
bool "64-bit kernel" if "$(SUBARCH)" = "x86"