mirror of
https://github.com/Fishwaldo/linux-bl808.git
synced 2025-03-18 13:04:18 +00:00
x86: Use return-thunk in asm code
Use the return thunk in asm code. If the thunk isn't needed, it will get patched into a RET instruction during boot by apply_returns(). Since alternatives can't handle relocations outside of the first instruction, putting a 'jmp __x86_return_thunk' in one is not valid, therefore carve out the memmove ERMS path into a separate label and jump to it. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Borislav Petkov <bp@suse.de> Reviewed-by: Josh Poimboeuf <jpoimboe@kernel.org> Signed-off-by: Borislav Petkov <bp@suse.de>
This commit is contained in:
parent
0ee9073000
commit
aa3d480315
3 changed files with 15 additions and 1 deletions
|
@ -92,6 +92,7 @@ endif
|
|||
endif
|
||||
|
||||
$(vobjs): KBUILD_CFLAGS := $(filter-out $(CC_FLAGS_LTO) $(RANDSTRUCT_CFLAGS) $(GCC_PLUGINS_CFLAGS) $(RETPOLINE_CFLAGS),$(KBUILD_CFLAGS)) $(CFL)
|
||||
$(vobjs): KBUILD_AFLAGS += -DBUILD_VDSO
|
||||
|
||||
#
|
||||
# vDSO code runs in userspace and -pg doesn't help with profiling anyway.
|
||||
|
|
|
@ -19,19 +19,27 @@
|
|||
#define __ALIGN_STR __stringify(__ALIGN)
|
||||
#endif
|
||||
|
||||
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
|
||||
#define RET jmp __x86_return_thunk
|
||||
#else /* CONFIG_RETPOLINE */
|
||||
#ifdef CONFIG_SLS
|
||||
#define RET ret; int3
|
||||
#else
|
||||
#define RET ret
|
||||
#endif
|
||||
#endif /* CONFIG_RETPOLINE */
|
||||
|
||||
#else /* __ASSEMBLY__ */
|
||||
|
||||
#if defined(CONFIG_RETPOLINE) && !defined(__DISABLE_EXPORTS) && !defined(BUILD_VDSO)
|
||||
#define ASM_RET "jmp __x86_return_thunk\n\t"
|
||||
#else /* CONFIG_RETPOLINE */
|
||||
#ifdef CONFIG_SLS
|
||||
#define ASM_RET "ret; int3\n\t"
|
||||
#else
|
||||
#define ASM_RET "ret\n\t"
|
||||
#endif
|
||||
#endif /* CONFIG_RETPOLINE */
|
||||
|
||||
#endif /* __ASSEMBLY__ */
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ SYM_FUNC_START(__memmove)
|
|||
/* FSRM implies ERMS => no length checks, do the copy directly */
|
||||
.Lmemmove_begin_forward:
|
||||
ALTERNATIVE "cmp $0x20, %rdx; jb 1f", "", X86_FEATURE_FSRM
|
||||
ALTERNATIVE "", __stringify(movq %rdx, %rcx; rep movsb; RET), X86_FEATURE_ERMS
|
||||
ALTERNATIVE "", "jmp .Lmemmove_erms", X86_FEATURE_ERMS
|
||||
|
||||
/*
|
||||
* movsq instruction have many startup latency
|
||||
|
@ -205,6 +205,11 @@ SYM_FUNC_START(__memmove)
|
|||
movb %r11b, (%rdi)
|
||||
13:
|
||||
RET
|
||||
|
||||
.Lmemmove_erms:
|
||||
movq %rdx, %rcx
|
||||
rep movsb
|
||||
RET
|
||||
SYM_FUNC_END(__memmove)
|
||||
EXPORT_SYMBOL(__memmove)
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue