From c4fe760efde84e52168a81bf125f25ba2f118b51 Mon Sep 17 00:00:00 2001 From: Joe Perches Date: Sun, 23 Mar 2008 01:02:43 -0700 Subject: [PATCH] include/asm-x86/mmu_context_64.h: checkpatch cleanups - formatting only Signed-off-by: Joe Perches Signed-off-by: Ingo Molnar --- include/asm-x86/mmu_context_64.h | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/include/asm-x86/mmu_context_64.h b/include/asm-x86/mmu_context_64.h index ad6dc821ef..ca44c71e7f 100644 --- a/include/asm-x86/mmu_context_64.h +++ b/include/asm-x86/mmu_context_64.h @@ -20,12 +20,12 @@ void destroy_context(struct mm_struct *mm); static inline void enter_lazy_tlb(struct mm_struct *mm, struct task_struct *tsk) { #ifdef CONFIG_SMP - if (read_pda(mmu_state) == TLBSTATE_OK) + if (read_pda(mmu_state) == TLBSTATE_OK) write_pda(mmu_state, TLBSTATE_LAZY); #endif } -static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, +static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, struct task_struct *tsk) { unsigned cpu = smp_processor_id(); @@ -39,7 +39,7 @@ static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, cpu_set(cpu, next->cpu_vm_mask); load_cr3(next->pgd); - if (unlikely(next->context.ldt != prev->context.ldt)) + if (unlikely(next->context.ldt != prev->context.ldt)) load_LDT_nolock(&next->context); } #ifdef CONFIG_SMP @@ -48,7 +48,7 @@ static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, if (read_pda(active_mm) != next) BUG(); if (!cpu_test_and_set(cpu, next->cpu_vm_mask)) { - /* We were in lazy tlb mode and leave_mm disabled + /* We were in lazy tlb mode and leave_mm disabled * tlb flush IPI delivery. We must reload CR3 * to make sure to use no freed page tables. */ @@ -59,13 +59,14 @@ static inline void switch_mm(struct mm_struct *prev, struct mm_struct *next, #endif } -#define deactivate_mm(tsk,mm) do { \ - load_gs_index(0); \ - asm volatile("movl %0,%%fs"::"r"(0)); \ -} while(0) +#define deactivate_mm(tsk, mm) \ +do { \ + load_gs_index(0); \ + asm volatile("movl %0,%%fs"::"r"(0)); \ +} while (0) -#define activate_mm(prev, next) \ - switch_mm((prev),(next),NULL) +#define activate_mm(prev, next) \ + switch_mm((prev), (next), NULL) #endif -- 2.39.5