x86, xsave: struct _fpstate extensions to include extended state information
[deliverable/linux.git] / include / asm-x86 / xsave.h
CommitLineData
dc1e35c6
SS
1#ifndef __ASM_X86_XSAVE_H
2#define __ASM_X86_XSAVE_H
3
4#include <asm/processor.h>
5#include <asm/i387.h>
6
7#define XSTATE_FP 0x1
8#define XSTATE_SSE 0x2
9
10#define XSTATE_FPSSE (XSTATE_FP | XSTATE_SSE)
11
12#define FXSAVE_SIZE 512
13
14/*
15 * These are the features that the OS can handle currently.
16 */
17#define XCNTXT_LMASK (XSTATE_FP | XSTATE_SSE)
18#define XCNTXT_HMASK 0x0
19
b359e8a4
SS
20#ifdef CONFIG_X86_64
21#define REX_PREFIX "0x48, "
22#else
23#define REX_PREFIX
24#endif
25
dc1e35c6
SS
26extern unsigned int xstate_size, pcntxt_hmask, pcntxt_lmask;
27extern struct xsave_struct *init_xstate_buf;
28
29extern void xsave_cntxt_init(void);
30extern void xsave_init(void);
b359e8a4
SS
31extern int init_fpu(struct task_struct *child);
32
33static inline int xrstor_checking(struct xsave_struct *fx)
34{
35 int err;
36
37 asm volatile("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n\t"
38 "2:\n"
39 ".section .fixup,\"ax\"\n"
40 "3: movl $-1,%[err]\n"
41 " jmp 2b\n"
42 ".previous\n"
43 _ASM_EXTABLE(1b, 3b)
44 : [err] "=r" (err)
45 : "D" (fx), "m" (*fx), "a" (-1), "d" (-1), "0" (0)
46 : "memory");
47
48 return err;
49}
50
9dc89c0f
SS
51static inline int xsave_check(struct xsave_struct __user *buf)
52{
53 int err;
54 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x27\n"
55 "2:\n"
56 ".section .fixup,\"ax\"\n"
57 "3: movl $-1,%[err]\n"
58 " jmp 2b\n"
59 ".previous\n"
60 ".section __ex_table,\"a\"\n"
61 _ASM_ALIGN "\n"
62 _ASM_PTR "1b,3b\n"
63 ".previous"
64 : [err] "=r" (err)
65 : "D" (buf), "a" (-1), "d" (-1), "0" (0)
66 : "memory");
67 if (unlikely(err) && __clear_user(buf, xstate_size))
68 err = -EFAULT;
69 /* No need to clear here because the caller clears USED_MATH */
70 return err;
71}
72
73static inline int xrestore_user(struct xsave_struct __user *buf,
74 unsigned int lmask,
75 unsigned int hmask)
76{
77 int err;
78 struct xsave_struct *xstate = ((__force struct xsave_struct *)buf);
79
80 __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n"
81 "2:\n"
82 ".section .fixup,\"ax\"\n"
83 "3: movl $-1,%[err]\n"
84 " jmp 2b\n"
85 ".previous\n"
86 ".section __ex_table,\"a\"\n"
87 _ASM_ALIGN "\n"
88 _ASM_PTR "1b,3b\n"
89 ".previous"
90 : [err] "=r" (err)
91 : "D" (xstate), "a" (lmask), "d" (hmask), "0" (0)
92 : "memory"); /* memory required? */
93 return err;
94}
95
96static inline void xrstor_state(struct xsave_struct *fx, int lmask, int hmask)
97{
98 asm volatile(".byte " REX_PREFIX "0x0f,0xae,0x2f\n\t"
99 : : "D" (fx), "m" (*fx), "a" (lmask), "d" (hmask)
100 : "memory");
101}
102
b359e8a4
SS
103static inline void xsave(struct task_struct *tsk)
104{
105 /* This, however, we can work around by forcing the compiler to select
106 an addressing mode that doesn't require extended registers. */
107 __asm__ __volatile__(".byte " REX_PREFIX "0x0f,0xae,0x27"
108 : : "D" (&(tsk->thread.xstate->xsave)),
109 "a" (-1), "d"(-1) : "memory");
110}
dc1e35c6 111#endif
This page took 0.028574 seconds and 5 git commands to generate.