Home Home > GIT Browse
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJiri Kosina <jkosina@suse.cz>2018-02-19 22:13:08 +0100
committerJiri Kosina <jkosina@suse.cz>2018-02-19 22:13:08 +0100
commit251ec241d5e0a514a4b9bf68ccaf0a839d853998 (patch)
treeaeec1a75f52d746d4456cc15a8f9fd75bac04ec1
parente883211836798d1fabd15433584e2a616e17d547 (diff)
parent942b437c5ee760d6e0bb5dd2cbb7c153d3330be9 (diff)
Merge remote-tracking branch 'origin/users/bpetkov/SLE11-SP4/for-next' into SLE11-SP4rpm-3.0.101-108.35
-rw-r--r--patches.arch/13.0-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch (renamed from patches.arch/14.2-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch)0
-rw-r--r--patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch32
-rw-r--r--patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch21
-rw-r--r--patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch8
-rw-r--r--patches.arch/14.3-x86-retpoline-simplify-vmexit_fill_rsb.patch256
-rw-r--r--patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch8
-rw-r--r--series.conf3
7 files changed, 28 insertions, 300 deletions
diff --git a/patches.arch/14.2-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch b/patches.arch/13.0-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch
index fa136c48b8..fa136c48b8 100644
--- a/patches.arch/14.2-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch
+++ b/patches.arch/13.0-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch
diff --git a/patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch b/patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch
index 8cb401ffa7..caaecbd5b7 100644
--- a/patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch
+++ b/patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch
@@ -36,26 +36,18 @@ Link: http://lkml.kernel.org/r/20170929141537.29167-1-aryabinin@virtuozzo.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Acked-by: Borislav Petkov <bp@suse.de>
---
- arch/x86/include/asm/thread_info.h | 4 ++++
- 1 file changed, 4 insertions(+)
-
---- a/arch/x86/include/asm/thread_info.h
-+++ b/arch/x86/include/asm/thread_info.h
-@@ -224,6 +224,8 @@ static inline struct thread_info *curren
- return ti;
- }
-
-+register unsigned long current_stack_pointer asm("rsp") __used;
-+
- #else /* !__ASSEMBLY__ */
-
- /* how to get the thread information struct from ASM */
-@@ -235,6 +237,8 @@ static inline struct thread_info *curren
-
- #endif /* !X86_32 */
+ arch/x86/include/asm/asm.h | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+--- a/arch/x86/include/asm/asm.h
++++ b/arch/x86/include/asm/asm.h
+@@ -56,4 +56,9 @@
+ " .popsection\n"
+ #endif
++#ifndef __ASSEMBLY__
++register unsigned long current_stack_pointer asm(_ASM_SP);
+#define ASM_CALL_CONSTRAINT "+r" (current_stack_pointer)
++#endif
+
- /*
- * Thread-synchronous status.
- *
+ #endif /* _ASM_X86_ASM_H */
diff --git a/patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch b/patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch
index 6a0bcf0759..9cb5af39d3 100644
--- a/patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch
+++ b/patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch
@@ -36,10 +36,10 @@ Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Signed-off-by: Razvan Ghitulete <rga@amazon.de>
Signed-off-by: Jiri Slaby <jslaby@suse.cz>
---
- arch/x86/include/asm/nospec-branch.h | 79 ++++++++++++++++++++++++++++++++++-
+ arch/x86/include/asm/nospec-branch.h | 76 ++++++++++++++++++++++++++++++++++-
arch/x86/kvm/svm.c | 4 +
arch/x86/kvm/vmx.c | 4 +
- 3 files changed, 86 insertions(+), 1 deletion(-)
+ 3 files changed, 83 insertions(+), 1 deletion(-)
--- a/arch/x86/include/asm/nospec-branch.h
+++ b/arch/x86/include/asm/nospec-branch.h
@@ -92,7 +92,7 @@ Signed-off-by: Jiri Slaby <jslaby@suse.cz>
#ifdef __ASSEMBLY__
/*
-@@ -67,8 +109,23 @@
+@@ -67,6 +109,18 @@
#endif
.endm
@@ -102,21 +102,16 @@ Signed-off-by: Jiri Slaby <jslaby@suse.cz>
+ */
+.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
+#ifdef CONFIG_RETPOLINE
-+ ALTERNATIVE "jmp .Lskip_rsb_\@", \
-+ __stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
-+ \ftr
++ ALTERNATIVE "jmp .Lskip_rsb_\@", ASM_NOP5, \ftr
++ __FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)
+.Lskip_rsb_\@:
+#endif
+.endm
+
#else /* __ASSEMBLY__ */
-+#define FILL_RETURN_BUFFER(reg, nr, ftr)
-+
#if defined(CONFIG_X86_64) && defined(RETPOLINE)
-
- /*
-@@ -103,11 +160,12 @@
+@@ -103,11 +157,12 @@
X86_FEATURE_RETPOLINE)
# define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
@@ -130,7 +125,7 @@ Signed-off-by: Jiri Slaby <jslaby@suse.cz>
/* The Spectre V2 mitigation variants */
enum spectre_v2_mitigation {
SPECTRE_V2_NONE,
-@@ -118,5 +176,24 @@ enum spectre_v2_mitigation {
+@@ -118,5 +173,24 @@ enum spectre_v2_mitigation {
SPECTRE_V2_IBRS,
};
@@ -145,7 +140,7 @@ Signed-off-by: Jiri Slaby <jslaby@suse.cz>
+#ifdef CONFIG_RETPOLINE
+ unsigned long loops = RSB_CLEAR_LOOPS / 2;
+
-+ asm volatile (ALTERNATIVE("jmp 910f",
++ asm volatile (ALTERNATIVE("jmp 910f; " __stringify(__FILL_RETURN_BUFFER(%0, RSB_CLEAR_LOOPS, %1)),
+ __stringify(__FILL_RETURN_BUFFER(%0, RSB_CLEAR_LOOPS, %1)),
+ X86_FEATURE_RETPOLINE)
+ "910:"
diff --git a/patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch b/patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch
index 6320133f32..52f8edde42 100644
--- a/patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch
+++ b/patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch
@@ -87,7 +87,7 @@ Acked-by: Borislav Petkov <bp@suse.de>
+ * speculative execution to prevent attack.
+ */
+#ifdef CONFIG_RETPOLINE
-+#define __switch_fill_rsb "FILL_RETURN_BUFFER %ebx, RSB_CLEAR_LOOPS, X86_FEATURE_RSB_CTXSW\n\t"
++#define __switch_fill_rsb __stringify(__FILL_RETURN_BUFFER(%%ebx, RSB_CLEAR_LOOPS, %%esp))
+#else
+#define __switch_fill_rsb
+#endif
@@ -100,7 +100,7 @@ Acked-by: Borislav Petkov <bp@suse.de>
"movl $1f,%[prev_ip]\n\t" /* save EIP */ \
"pushl %[next_ip]\n\t" /* restore EIP */ \
__switch_canary \
-+ __switch_fill_rsb \
++ __switch_fill_rsb "\n\t" \
"jmp __switch_to\n" /* regparm call */ \
"1:\t" \
"popl %%ebp\n\t" /* restore EBP */ \
@@ -116,7 +116,7 @@ Acked-by: Borislav Petkov <bp@suse.de>
+ * speculative execution to prevent attack.
+ */
+#ifdef CONFIG_RETPOLINE
-+#define __switch_fill_rsb "FILL_RETURN_BUFFER %r8, RSB_CLEAR_LOOPS, X86_FEATURE_RSB_CTXSW\n\t"
++#define __switch_fill_rsb __stringify(__FILL_RETURN_BUFFER(%%rbx, RSB_CLEAR_LOOPS, %%rsp))
+#else
+#define __switch_fill_rsb
+#endif
@@ -126,7 +126,7 @@ Acked-by: Borislav Petkov <bp@suse.de>
asm volatile(SAVE_CONTEXT \
"movq %%rsp,%P[threadrsp](%[prev])\n\t" /* save RSP */ \
"movq %P[threadrsp](%[next]),%%rsp\n\t" /* restore RSP */ \
-+ __switch_fill_rsb \
++ __switch_fill_rsb "\n\t" \
"call __switch_to\n\t" \
THREAD_RETURN_SYM \
"movq "__percpu_arg([current_task])",%%rsi\n\t" \
diff --git a/patches.arch/14.3-x86-retpoline-simplify-vmexit_fill_rsb.patch b/patches.arch/14.3-x86-retpoline-simplify-vmexit_fill_rsb.patch
deleted file mode 100644
index 9030077066..0000000000
--- a/patches.arch/14.3-x86-retpoline-simplify-vmexit_fill_rsb.patch
+++ /dev/null
@@ -1,256 +0,0 @@
-From: Borislav Petkov <bp@alien8.de>
-Date: Sat, 27 Jan 2018 16:24:33 +0000
-Subject: x86/retpoline: Simplify vmexit_fill_RSB()
-Git-commit: 1dde7415e99933bb7293d6b2843752cbdb43ec11
-Patch-mainline: v4.16-rc1
-References: bsc#1068032 CVE-2017-5754
-
-Simplify it to call an asm-function instead of pasting 41 insn bytes at
-every call site. Also, add alignment to the macro as suggested here:
-
- https://support.google.com/faqs/answer/7625886
-
-[dwmw2: Clean up comments, let it clobber %ebx and just tell the compiler]
-
-Signed-off-by: Borislav Petkov <bp@suse.de>
-Signed-off-by: David Woodhouse <dwmw@amazon.co.uk>
-Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
-Cc: ak@linux.intel.com
-Cc: dave.hansen@intel.com
-Cc: karahmed@amazon.de
-Cc: arjan@linux.intel.com
-Cc: torvalds@linux-foundation.org
-Cc: peterz@infradead.org
-Cc: bp@alien8.de
-Cc: pbonzini@redhat.com
-Cc: tim.c.chen@linux.intel.com
-Cc: gregkh@linux-foundation.org
-Link: https://lkml.kernel.org/r/1517070274-12128-3-git-send-email-dwmw@amazon.co.uk
----
- arch/x86/include/asm/alternative.h | 6 ++
- arch/x86/include/asm/asm-prototypes.h | 3 +
- arch/x86/include/asm/nospec-branch.h | 69 ++++------------------------------
- arch/x86/include/asm/system.h | 8 ++-
- arch/x86/lib/retpoline.S | 56 +++++++++++++++++++++++++++
- 5 files changed, 80 insertions(+), 62 deletions(-)
-
---- a/arch/x86/include/asm/alternative.h
-+++ b/arch/x86/include/asm/alternative.h
-@@ -151,6 +151,12 @@ static inline int alternatives_text_rese
- */
- #define ASM_OUTPUT2(a...) a
-
-+/*
-+ * use this macro if you need clobbers but no inputs in
-+ * alternative_{input,io,call}()
-+ */
-+#define ASM_NO_INPUT_CLOBBER(clbr...) "i" (0) : clbr
-+
- struct paravirt_patch_site;
- #ifdef CONFIG_PARAVIRT
- void apply_paravirt(struct paravirt_patch_site *start,
---- a/arch/x86/include/asm/asm-prototypes.h
-+++ b/arch/x86/include/asm/asm-prototypes.h
-@@ -36,4 +36,7 @@ INDIRECT_THUNK(si)
- INDIRECT_THUNK(di)
- INDIRECT_THUNK(bp)
- INDIRECT_THUNK(sp)
-+asmlinkage void __fill_rsb(void);
-+asmlinkage void __clear_rsb(void);
-+
- #endif /* CONFIG_RETPOLINE */
---- a/arch/x86/include/asm/nospec-branch.h
-+++ b/arch/x86/include/asm/nospec-branch.h
-@@ -8,48 +8,6 @@
- #include <asm/cpufeature.h>
- #include <asm/nops.h>
-
--/*
-- * Fill the CPU return stack buffer.
-- *
-- * Each entry in the RSB, if used for a speculative 'ret', contains an
-- * infinite 'pause; jmp' loop to capture speculative execution.
-- *
-- * This is required in various cases for retpoline and IBRS-based
-- * mitigations for the Spectre variant 2 vulnerability. Sometimes to
-- * eliminate potentially bogus entries from the RSB, and sometimes
-- * purely to ensure that it doesn't get empty, which on some CPUs would
-- * allow predictions from other (unwanted!) sources to be used.
-- *
-- * We define a CPP macro such that it can be used from both .S files and
-- * inline assembly. It's possible to do a .macro and then include that
-- * from C via asm(".include <asm/nospec-branch.h>") but let's not go there.
-- */
--
--#define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
--#define RSB_FILL_LOOPS 16 /* To avoid underflow */
--
--/*
-- * Google experimented with loop-unrolling and this turned out to be
-- * the optimal version — two calls, each with their own speculation
-- * trap should their return address end up getting used, in a loop.
-- */
--#define __FILL_RETURN_BUFFER(reg, nr, sp) \
-- mov $(nr/2), reg; \
--771: \
-- call 772f; \
--773: /* speculation trap */ \
-- pause; \
-- jmp 773b; \
--772: \
-- call 774f; \
--775: /* speculation trap */ \
-- pause; \
-- jmp 775b; \
--774: \
-- dec reg; \
-- jnz 771b; \
-- add $(BITS_PER_LONG/8) * nr, sp;
--
- #ifdef __ASSEMBLY__
-
- /*
-@@ -109,22 +67,17 @@
- #endif
- .endm
-
-- /*
-- * A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP
-- * monstrosity above, manually.
-- */
--.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req
-+/* This clobbers the BX register */
-+.macro FILL_RETURN_BUFFER ftr:req
- #ifdef CONFIG_RETPOLINE
-- ALTERNATIVE "jmp .Lskip_rsb_\@", \
-- __stringify(__FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP)) \
-- \ftr
--.Lskip_rsb_\@:
-+ ALTERNATIVE ASM_NOP5, "call __clear_rsb", \ftr
- #endif
- .endm
-
- #else /* __ASSEMBLY__ */
-
--#define FILL_RETURN_BUFFER(reg, nr, ftr)
-+#define FILL_RETURN_BUFFER(ftr) \
-+ ALTERNATIVE(ASM_NOP5, "call __clear_rsb", \ftr)
-
- #if defined(CONFIG_X86_64) && defined(RETPOLINE)
-
-@@ -185,14 +138,10 @@ enum spectre_v2_mitigation {
- static inline void vmexit_fill_RSB(void)
- {
- #ifdef CONFIG_RETPOLINE
-- unsigned long loops = RSB_CLEAR_LOOPS / 2;
--
-- asm volatile (ALTERNATIVE("jmp 910f",
-- __stringify(__FILL_RETURN_BUFFER(%0, RSB_CLEAR_LOOPS, %1)),
-- X86_FEATURE_RETPOLINE)
-- "910:"
-- : "=&r" (loops), "+r" (current_stack_pointer)
-- : "r" (loops) : "memory" );
-+ alternative_input(ASM_NOP5,
-+ "call __fill_rsb",
-+ X86_FEATURE_RETPOLINE,
-+ ASM_NO_INPUT_CLOBBER(_ASM_BX, "memory"));
- #endif
- }
- #endif /* __ASSEMBLY__ */
---- a/arch/x86/include/asm/system.h
-+++ b/arch/x86/include/asm/system.h
-@@ -48,9 +48,11 @@ extern void show_regs_common(void);
- * with userspace addresses. On CPUs where those concerns
- * exist, overwrite the RSB with entries which capture
- * speculative execution to prevent attack.
-+ *
-+ * Clobbers %ebx
- */
- #ifdef CONFIG_RETPOLINE
--#define __switch_fill_rsb "FILL_RETURN_BUFFER %ebx, RSB_CLEAR_LOOPS, X86_FEATURE_RSB_CTXSW\n\t"
-+#define __switch_fill_rsb FILL_RETURN_BUFFER(X86_FEATURE_RSB_CTXSW)
- #else
- #define __switch_fill_rsb
- #endif
-@@ -147,9 +149,11 @@ do { \
- * with userspace addresses. On CPUs where those concerns
- * exist, overwrite the RSB with entries which capture
- * speculative execution to prevent attack.
-+ *
-+ * Clobbers %rbx.
- */
- #ifdef CONFIG_RETPOLINE
--#define __switch_fill_rsb "FILL_RETURN_BUFFER %r8, RSB_CLEAR_LOOPS, X86_FEATURE_RSB_CTXSW\n\t"
-+#define __switch_fill_rsb FILL_RETURN_BUFFER(X86_FEATURE_RSB_CTXSW)
- #else
- #define __switch_fill_rsb
- #endif
---- a/arch/x86/lib/retpoline.S
-+++ b/arch/x86/lib/retpoline.S
-@@ -7,6 +7,7 @@
- #include <asm/alternative-asm.h>
- #include <asm-generic/export.h>
- #include <asm/nospec-branch.h>
-+#include <asm/bitsperlong.h>
-
- .macro THUNK reg
- .section .text.__x86.indirect_thunk.\reg
-@@ -46,3 +47,58 @@ GENERATE_THUNK(r13)
- GENERATE_THUNK(r14)
- GENERATE_THUNK(r15)
- #endif
-+
-+/*
-+ * Fill the CPU return stack buffer.
-+ *
-+ * Each entry in the RSB, if used for a speculative 'ret', contains an
-+ * infinite 'pause; lfence; jmp' loop to capture speculative execution.
-+ *
-+ * This is required in various cases for retpoline and IBRS-based
-+ * mitigations for the Spectre variant 2 vulnerability. Sometimes to
-+ * eliminate potentially bogus entries from the RSB, and sometimes
-+ * purely to ensure that it doesn't get empty, which on some CPUs would
-+ * allow predictions from other (unwanted!) sources to be used.
-+ *
-+ * Google experimented with loop-unrolling and this turned out to be
-+ * the optimal version - two calls, each with their own speculation
-+ * trap should their return address end up getting used, in a loop.
-+ */
-+.macro STUFF_RSB nr:req sp:req
-+ mov $(\nr / 2), %_ASM_BX
-+ .align 16
-+771:
-+ call 772f
-+773: /* speculation trap */
-+ pause
-+ lfence
-+ jmp 773b
-+ .align 16
-+772:
-+ call 774f
-+775: /* speculation trap */
-+ pause
-+ lfence
-+ jmp 775b
-+ .align 16
-+774:
-+ dec %_ASM_BX
-+ jnz 771b
-+ add $((BITS_PER_LONG/8) * \nr), \sp
-+.endm
-+
-+#define RSB_FILL_LOOPS 16 /* To avoid underflow */
-+
-+ENTRY(__fill_rsb)
-+ STUFF_RSB RSB_FILL_LOOPS, %_ASM_SP
-+ ret
-+END(__fill_rsb)
-+EXPORT_SYMBOL_GPL(__fill_rsb)
-+
-+#define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
-+
-+ENTRY(__clear_rsb)
-+ STUFF_RSB RSB_CLEAR_LOOPS, %_ASM_SP
-+ ret
-+END(__clear_rsb)
-+EXPORT_SYMBOL_GPL(__clear_rsb)
diff --git a/patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch b/patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch
index f35c74736e..99b5e33b5e 100644
--- a/patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch
+++ b/patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch
@@ -38,17 +38,15 @@ Acked-by: Borislav Petkov <bp@suse.de>
--- a/arch/x86/include/asm/asm-prototypes.h
+++ b/arch/x86/include/asm/asm-prototypes.h
-@@ -35,7 +35,6 @@ INDIRECT_THUNK(dx)
+@@ -35,5 +35,4 @@ INDIRECT_THUNK(dx)
INDIRECT_THUNK(si)
INDIRECT_THUNK(di)
INDIRECT_THUNK(bp)
-INDIRECT_THUNK(sp)
- asmlinkage void __fill_rsb(void);
- asmlinkage void __clear_rsb(void);
-
+ #endif /* CONFIG_RETPOLINE */
--- a/arch/x86/lib/retpoline.S
+++ b/arch/x86/lib/retpoline.S
-@@ -36,7 +36,6 @@ GENERATE_THUNK(_ASM_DX)
+@@ -35,7 +35,6 @@ GENERATE_THUNK(_ASM_DX)
GENERATE_THUNK(_ASM_SI)
GENERATE_THUNK(_ASM_DI)
GENERATE_THUNK(_ASM_BP)
diff --git a/series.conf b/series.conf
index d9a43e4e22..74ca75a8a4 100644
--- a/series.conf
+++ b/series.conf
@@ -24935,12 +24935,11 @@
patches.arch/11-x86-retpoline-xen-Convert-Xen-hypercall-indirect-jum.patch
patches.arch/12-x86-retpoline-checksum32-Convert-assembler-indirect-.patch
patches.arch/13-x86-retpoline-irq32-Convert-assembler-indirect-jumps.patch
+ patches.arch/13.0-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch
patches.arch/13.1-x86-asm-use-register-variable-to-get-stack-pointer-value.patch
patches.arch/14-x86-retpoline-Fill-return-stack-buffer-on-vmexit.patch
patches.arch/14.0-x86-cpu-intel-introduce-macros-for-intel-family-numbers.patch
patches.arch/14.1-x86-retpoline-fill-rsb-on-context-switch-for-affected-cpus.patch
- patches.arch/14.2-x86-asm-extend-definitions-of-asm_-with-a-raw-format.patch
- patches.arch/14.3-x86-retpoline-simplify-vmexit_fill_rsb.patch
patches.arch/16-asm-prototypes-clear-any-cpp-defines-before-declaring-the-functions.patch
patches.arch/17-x86-retpoline-remove-the-esp-rsp-thunk.patch
patches.suse/x86-speculation-Fix-typo-IBRS_ATT-which-should-be-IB.patch