mirror of
https://github.com/Fishwaldo/Star64_linux.git
synced 2025-06-27 17:11:46 +00:00
arm64: Fix !CONFIG_SMP kernel build
Commit fb4a96029c
(arm64: kernel: fix per-cpu offset restore on
resume) uses per_cpu_offset() unconditionally during CPU wakeup,
however, this is only defined for the SMP case.
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
Reported-by: Dave P Martin <Dave.Martin@arm.com>
This commit is contained in:
parent
84fe6826c2
commit
b57fc9e806
1 changed files with 8 additions and 0 deletions
|
@ -16,6 +16,8 @@
|
||||||
#ifndef __ASM_PERCPU_H
|
#ifndef __ASM_PERCPU_H
|
||||||
#define __ASM_PERCPU_H
|
#define __ASM_PERCPU_H
|
||||||
|
|
||||||
|
#ifdef CONFIG_SMP
|
||||||
|
|
||||||
static inline void set_my_cpu_offset(unsigned long off)
|
static inline void set_my_cpu_offset(unsigned long off)
|
||||||
{
|
{
|
||||||
asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
|
asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
|
||||||
|
@ -36,6 +38,12 @@ static inline unsigned long __my_cpu_offset(void)
|
||||||
}
|
}
|
||||||
#define __my_cpu_offset __my_cpu_offset()
|
#define __my_cpu_offset __my_cpu_offset()
|
||||||
|
|
||||||
|
#else /* !CONFIG_SMP */
|
||||||
|
|
||||||
|
#define set_my_cpu_offset(x) do { } while (0)
|
||||||
|
|
||||||
|
#endif /* CONFIG_SMP */
|
||||||
|
|
||||||
#include <asm-generic/percpu.h>
|
#include <asm-generic/percpu.h>
|
||||||
|
|
||||||
#endif /* __ASM_PERCPU_H */
|
#endif /* __ASM_PERCPU_H */
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue