Lines Matching full:fpu
6 * General FPU state handling cleanups
20 #include <asm/fpu/api.h>
21 #include <asm/fpu/xstate.h>
22 #include <asm/fpu/xcr.h>
24 #include <asm/trace/fpu.h>
27 * High level FPU state handling functions:
29 extern void fpu__prepare_read(struct fpu *fpu);
30 extern void fpu__prepare_write(struct fpu *fpu);
31 extern void fpu__save(struct fpu *fpu);
33 extern void fpu__drop(struct fpu *fpu);
35 extern void fpu__clear_user_states(struct fpu *fpu);
36 extern void fpu__clear_all(struct fpu *fpu);
37 extern int fpu__exception_code(struct fpu *fpu, int trap_nr);
40 * Boot time FPU initialization functions:
60 * FPU related CPU feature flag helper routines:
104 extern void fpstate_sanitize_xstate(struct fpu *fpu);
200 static inline void copy_fxregs_to_kernel(struct fpu *fpu) in copy_fxregs_to_kernel() argument
203 asm volatile( "fxsave %[fx]" : [fx] "=m" (fpu->state.fxsave)); in copy_fxregs_to_kernel()
205 asm volatile("fxsaveq %[fx]" : [fx] "=m" (fpu->state.fxsave)); in copy_fxregs_to_kernel()
303 * We should never fault when copying from a kernel buffer, and the FPU in copy_kernel_to_xregs_booting()
405 extern int copy_fpregs_to_fpstate(struct fpu *fpu);
440 * FPU context switch related helper methods:
443 DECLARE_PER_CPU(struct fpu *, fpu_fpregs_owner_ctx);
446 * The in-register FPU state for an FPU context on a CPU is assumed to be
447 * valid if the fpu->last_cpu matches the CPU, and the fpu_fpregs_owner_ctx
448 * matches the FPU.
450 * If the FPU register state is valid, the kernel can skip restoring the
451 * FPU state from memory.
453 * Any code that clobbers the FPU registers or updates the in-memory
454 * FPU state for a task MUST let the rest of the kernel know that the
455 * FPU registers are no longer valid for this task.
459 * (with preemption disabled), FPU for the current task, or a task that
467 static inline void __fpu_invalidate_fpregs_state(struct fpu *fpu) in __fpu_invalidate_fpregs_state() argument
469 fpu->last_cpu = -1; in __fpu_invalidate_fpregs_state()
472 static inline int fpregs_state_valid(struct fpu *fpu, unsigned int cpu) in fpregs_state_valid() argument
474 return fpu == this_cpu_read(fpu_fpregs_owner_ctx) && cpu == fpu->last_cpu; in fpregs_state_valid()
481 static inline void fpregs_deactivate(struct fpu *fpu) in fpregs_deactivate() argument
484 trace_x86_fpu_regs_deactivated(fpu); in fpregs_deactivate()
487 static inline void fpregs_activate(struct fpu *fpu) in fpregs_activate() argument
489 this_cpu_write(fpu_fpregs_owner_ctx, fpu); in fpregs_activate()
490 trace_x86_fpu_regs_activated(fpu); in fpregs_activate()
498 struct fpu *fpu = ¤t->thread.fpu; in __fpregs_load_activate() local
504 if (!fpregs_state_valid(fpu, cpu)) { in __fpregs_load_activate()
505 copy_kernel_to_fpregs(&fpu->state); in __fpregs_load_activate()
506 fpregs_activate(fpu); in __fpregs_load_activate()
507 fpu->last_cpu = cpu; in __fpregs_load_activate()
513 * FPU state switching for scheduling.
523 * If TIF_NEED_FPU_LOAD is cleared then the CPU's FPU registers
524 * are saved in the current thread's FPU register state.
526 * If TIF_NEED_FPU_LOAD is set then CPU's FPU registers may not
527 * hold current()'s FPU registers. It is required to load the
531 * The FPU context is only stored/restored for a user task and
536 struct fpu *old_fpu = &prev->thread.fpu; in switch_fpu_prepare()
554 * Load PKRU from the FPU context if available. Delay loading of the
555 * complete FPU state until the return to userland.
561 struct fpu *next_fpu = &next->thread.fpu; in switch_fpu_finish()