linux/arch/mips/include/asm/switch_to.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003, 06 by Ralf Baechle
   7 * Copyright (C) 1996 by Paul M. Antoine
   8 * Copyright (C) 1999 Silicon Graphics
   9 * Kevin D. Kissell, kevink@mips.org and Carsten Langgaard, carstenl@mips.com
  10 * Copyright (C) 2000 MIPS Technologies, Inc.
  11 */
  12#ifndef _ASM_SWITCH_TO_H
  13#define _ASM_SWITCH_TO_H
  14
  15#include <asm/cpu-features.h>
  16#include <asm/watch.h>
  17#include <asm/dsp.h>
  18
  19struct task_struct;
  20
  21/*
  22 * switch_to(n) should switch tasks to task nr n, first
  23 * checking that n isn't the current task, in which case it does nothing.
  24 */
  25extern asmlinkage void *resume(void *last, void *next, void *next_ti, u32 __usedfpu);
  26
  27extern unsigned int ll_bit;
  28extern struct task_struct *ll_task;
  29
  30#ifdef CONFIG_MIPS_MT_FPAFF
  31
  32/*
  33 * Handle the scheduler resume end of FPU affinity management.  We do this
  34 * inline to try to keep the overhead down. If we have been forced to run on
  35 * a "CPU" with an FPU because of a previous high level of FP computation,
  36 * but did not actually use the FPU during the most recent time-slice (CU1
  37 * isn't set), we undo the restriction on cpus_allowed.
  38 *
  39 * We're not calling set_cpus_allowed() here, because we have no need to
  40 * force prompt migration - we're already switching the current CPU to a
  41 * different thread.
  42 */
  43
  44#define __mips_mt_fpaff_switch_to(prev)                                 \
  45do {                                                                    \
  46        struct thread_info *__prev_ti = task_thread_info(prev);         \
  47                                                                        \
  48        if (cpu_has_fpu &&                                              \
  49            test_ti_thread_flag(__prev_ti, TIF_FPUBOUND) &&             \
  50            (!(KSTK_STATUS(prev) & ST0_CU1))) {                         \
  51                clear_ti_thread_flag(__prev_ti, TIF_FPUBOUND);          \
  52                prev->cpus_allowed = prev->thread.user_cpus_allowed;    \
  53        }                                                               \
  54        next->thread.emulated_fp = 0;                                   \
  55} while(0)
  56
  57#else
  58#define __mips_mt_fpaff_switch_to(prev) do { (void) (prev); } while (0)
  59#endif
  60
  61#define __clear_software_ll_bit()                                       \
  62do {                                                                    \
  63        if (!__builtin_constant_p(cpu_has_llsc) || !cpu_has_llsc)       \
  64                ll_bit = 0;                                             \
  65} while (0)
  66
  67#define switch_to(prev, next, last)                                     \
  68do {                                                                    \
  69        u32 __usedfpu;                                                  \
  70        __mips_mt_fpaff_switch_to(prev);                                \
  71        if (cpu_has_dsp)                                                \
  72                __save_dsp(prev);                                       \
  73        __clear_software_ll_bit();                                      \
  74        __usedfpu = test_and_clear_tsk_thread_flag(prev, TIF_USEDFPU);  \
  75        (last) = resume(prev, next, task_thread_info(next), __usedfpu); \
  76} while (0)
  77
  78#define finish_arch_switch(prev)                                        \
  79do {                                                                    \
  80        if (cpu_has_dsp)                                                \
  81                __restore_dsp(current);                                 \
  82        if (cpu_has_userlocal)                                          \
  83                write_c0_userlocal(current_thread_info()->tp_value);    \
  84        __restore_watch();                                              \
  85} while (0)
  86
  87#endif /* _ASM_SWITCH_TO_H */
  88