1 #ifndef _SPARC64_TLBFLUSH_H
2 #define _SPARC64_TLBFLUSH_H
5 #include <asm/mmu_context.h>
7 /* TSB flush operations. */
9 #define TLB_BATCH_NR 192
15 unsigned long vaddrs[TLB_BATCH_NR];
18 extern void flush_tsb_kernel_range(unsigned long start, unsigned long end);
19 extern void flush_tsb_user(struct tlb_batch *tb);
20 extern void flush_tsb_user_page(struct mm_struct *mm, unsigned long vaddr);
22 /* TLB flush operations. */
24 static inline void flush_tlb_mm(struct mm_struct *mm)
28 static inline void flush_tlb_page(struct vm_area_struct *vma,
33 static inline void flush_tlb_range(struct vm_area_struct *vma,
34 unsigned long start, unsigned long end)
38 #define __HAVE_ARCH_ENTER_LAZY_MMU_MODE
40 extern void flush_tlb_pending(void);
41 extern void arch_enter_lazy_mmu_mode(void);
42 extern void arch_leave_lazy_mmu_mode(void);
43 #define arch_flush_lazy_mmu_mode() do {} while (0)
46 extern void __flush_tlb_all(void);
47 extern void __flush_tlb_page(unsigned long context, unsigned long vaddr);
48 extern void __flush_tlb_kernel_range(unsigned long start, unsigned long end);
52 #define flush_tlb_kernel_range(start,end) \
53 do { flush_tsb_kernel_range(start,end); \
54 __flush_tlb_kernel_range(start,end); \
57 static inline void global_flush_tlb_page(struct mm_struct *mm, unsigned long vaddr)
59 __flush_tlb_page(CTX_HWBITS(mm->context), vaddr);
62 #else /* CONFIG_SMP */
64 extern void smp_flush_tlb_kernel_range(unsigned long start, unsigned long end);
65 extern void smp_flush_tlb_page(struct mm_struct *mm, unsigned long vaddr);
67 #define flush_tlb_kernel_range(start, end) \
68 do { flush_tsb_kernel_range(start,end); \
69 smp_flush_tlb_kernel_range(start, end); \
72 #define global_flush_tlb_page(mm, vaddr) \
73 smp_flush_tlb_page(mm, vaddr)
75 #endif /* ! CONFIG_SMP */
77 #endif /* _SPARC64_TLBFLUSH_H */