[PATCH] configfs: Convenience macros for attribute definition.
[linux-2.6] / include / asm-x86 / mmu_context_32.h
1 #ifndef __I386_SCHED_H
2 #define __I386_SCHED_H
3
4 static inline void enter_lazy_tlb(struct mm_struct *mm, struct task_struct *tsk)
5 {
6 #ifdef CONFIG_SMP
7         unsigned cpu = smp_processor_id();
8         if (per_cpu(cpu_tlbstate, cpu).state == TLBSTATE_OK)
9                 per_cpu(cpu_tlbstate, cpu).state = TLBSTATE_LAZY;
10 #endif
11 }
12
13 static inline void switch_mm(struct mm_struct *prev,
14                              struct mm_struct *next,
15                              struct task_struct *tsk)
16 {
17         int cpu = smp_processor_id();
18
19         if (likely(prev != next)) {
20                 /* stop flush ipis for the previous mm */
21                 cpu_clear(cpu, prev->cpu_vm_mask);
22 #ifdef CONFIG_SMP
23                 per_cpu(cpu_tlbstate, cpu).state = TLBSTATE_OK;
24                 per_cpu(cpu_tlbstate, cpu).active_mm = next;
25 #endif
26                 cpu_set(cpu, next->cpu_vm_mask);
27
28                 /* Re-load page tables */
29                 load_cr3(next->pgd);
30
31                 /*
32                  * load the LDT, if the LDT is different:
33                  */
34                 if (unlikely(prev->context.ldt != next->context.ldt))
35                         load_LDT_nolock(&next->context);
36         }
37 #ifdef CONFIG_SMP
38         else {
39                 per_cpu(cpu_tlbstate, cpu).state = TLBSTATE_OK;
40                 BUG_ON(per_cpu(cpu_tlbstate, cpu).active_mm != next);
41
42                 if (!cpu_test_and_set(cpu, next->cpu_vm_mask)) {
43                         /* We were in lazy tlb mode and leave_mm disabled
44                          * tlb flush IPI delivery. We must reload %cr3.
45                          */
46                         load_cr3(next->pgd);
47                         load_LDT_nolock(&next->context);
48                 }
49         }
50 #endif
51 }
52
53 #define deactivate_mm(tsk, mm)                  \
54         asm("movl %0,%%gs": :"r" (0));
55
56 #endif