blob: 320ab978fb1f3149b314beb8003c5b224490e7ca [file] [log] [blame]
Thomas Gleixnerff167012018-11-25 19:33:47 +01001// SPDX-License-Identifier: GPL-2.0
2//
3// Code shared between 32 and 64 bit
4
Thomas Gleixner5635d9992018-11-25 19:33:48 +01005#include <asm/spec-ctrl.h>
6
Thomas Gleixnerff167012018-11-25 19:33:47 +01007void __switch_to_xtra(struct task_struct *prev_p, struct task_struct *next_p);
8
9/*
10 * This needs to be inline to optimize for the common case where no extra
11 * work needs to be done.
12 */
13static inline void switch_to_extra(struct task_struct *prev,
14 struct task_struct *next)
15{
16 unsigned long next_tif = task_thread_info(next)->flags;
17 unsigned long prev_tif = task_thread_info(prev)->flags;
18
Thomas Gleixner5635d9992018-11-25 19:33:48 +010019 if (IS_ENABLED(CONFIG_SMP)) {
20 /*
21 * Avoid __switch_to_xtra() invocation when conditional
Waiman Longaa77bfb2018-12-05 14:49:27 -050022 * STIBP is disabled and the only different bit is
Thomas Gleixner5635d9992018-11-25 19:33:48 +010023 * TIF_SPEC_IB. For CONFIG_SMP=n TIF_SPEC_IB is not
24 * in the TIF_WORK_CTXSW masks.
25 */
26 if (!static_branch_likely(&switch_to_cond_stibp)) {
27 prev_tif &= ~_TIF_SPEC_IB;
28 next_tif &= ~_TIF_SPEC_IB;
29 }
30 }
31
Thomas Gleixnerff167012018-11-25 19:33:47 +010032 /*
33 * __switch_to_xtra() handles debug registers, i/o bitmaps,
34 * speculation mitigations etc.
35 */
36 if (unlikely(next_tif & _TIF_WORK_CTXSW_NEXT ||
37 prev_tif & _TIF_WORK_CTXSW_PREV))
38 __switch_to_xtra(prev, next);
39}