1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * Supervisor Mode Access Prevention support |
4 | * |
5 | * Copyright (C) 2012 Intel Corporation |
6 | * Author: H. Peter Anvin <hpa@linux.intel.com> |
7 | */ |
8 | |
9 | #ifndef _ASM_X86_SMAP_H |
10 | #define _ASM_X86_SMAP_H |
11 | |
12 | #include <asm/nops.h> |
13 | #include <asm/cpufeatures.h> |
14 | #include <asm/alternative.h> |
15 | |
16 | /* "Raw" instruction opcodes */ |
17 | #define __ASM_CLAC ".byte 0x0f,0x01,0xca" |
18 | #define __ASM_STAC ".byte 0x0f,0x01,0xcb" |
19 | |
20 | #ifdef __ASSEMBLY__ |
21 | |
22 | #define ASM_CLAC \ |
23 | ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP |
24 | |
25 | #define ASM_STAC \ |
26 | ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP |
27 | |
28 | #else /* __ASSEMBLY__ */ |
29 | |
30 | static __always_inline void clac(void) |
31 | { |
32 | /* Note: a barrier is implicit in alternative() */ |
33 | alternative("" , __ASM_CLAC, X86_FEATURE_SMAP); |
34 | } |
35 | |
36 | static __always_inline void stac(void) |
37 | { |
38 | /* Note: a barrier is implicit in alternative() */ |
39 | alternative("" , __ASM_STAC, X86_FEATURE_SMAP); |
40 | } |
41 | |
42 | static __always_inline unsigned long smap_save(void) |
43 | { |
44 | unsigned long flags; |
45 | |
46 | asm volatile ("# smap_save\n\t" |
47 | ALTERNATIVE("" , "pushf; pop %0; " __ASM_CLAC "\n\t" , |
48 | X86_FEATURE_SMAP) |
49 | : "=rm" (flags) : : "memory" , "cc" ); |
50 | |
51 | return flags; |
52 | } |
53 | |
54 | static __always_inline void smap_restore(unsigned long flags) |
55 | { |
56 | asm volatile ("# smap_restore\n\t" |
57 | ALTERNATIVE("" , "push %0; popf\n\t" , |
58 | X86_FEATURE_SMAP) |
59 | : : "g" (flags) : "memory" , "cc" ); |
60 | } |
61 | |
62 | /* These macros can be used in asm() statements */ |
63 | #define ASM_CLAC \ |
64 | ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) |
65 | #define ASM_STAC \ |
66 | ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP) |
67 | |
68 | #endif /* __ASSEMBLY__ */ |
69 | |
70 | #endif /* _ASM_X86_SMAP_H */ |
71 | |