Skip to content

Commit d9fb08b

Browse files
committed
arm64: tlb: Pass the corresponding mm to __tlbi_sync_s1ish()
The mm structure will be used for workarounds that need limiting to specific tasks. Acked-by: Mark Rutland <[email protected]> Cc: Will Deacon <[email protected]> Reviewed-by: Will Deacon <[email protected]> Signed-off-by: Catalin Marinas <[email protected]>
1 parent 6bfbf57 commit d9fb08b

2 files changed

Lines changed: 5 additions & 5 deletions

File tree

arch/arm64/include/asm/tlbflush.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ do { \
185185
* Complete broadcast TLB maintenance issued by the host which invalidates
186186
* stage 1 information in the host's own translation regime.
187187
*/
188-
static inline void __tlbi_sync_s1ish(void)
188+
static inline void __tlbi_sync_s1ish(struct mm_struct *mm)
189189
{
190190
dsb(ish);
191191
__repeat_tlbi_sync(vale1is, 0);
@@ -323,7 +323,7 @@ static inline void flush_tlb_mm(struct mm_struct *mm)
323323
asid = __TLBI_VADDR(0, ASID(mm));
324324
__tlbi(aside1is, asid);
325325
__tlbi_user(aside1is, asid);
326-
__tlbi_sync_s1ish();
326+
__tlbi_sync_s1ish(mm);
327327
mmu_notifier_arch_invalidate_secondary_tlbs(mm, 0, -1UL);
328328
}
329329

@@ -377,7 +377,7 @@ static inline void flush_tlb_page(struct vm_area_struct *vma,
377377
unsigned long uaddr)
378378
{
379379
flush_tlb_page_nosync(vma, uaddr);
380-
__tlbi_sync_s1ish();
380+
__tlbi_sync_s1ish(vma->vm_mm);
381381
}
382382

383383
static inline bool arch_tlbbatch_should_defer(struct mm_struct *mm)
@@ -532,7 +532,7 @@ static inline void __flush_tlb_range(struct vm_area_struct *vma,
532532
{
533533
__flush_tlb_range_nosync(vma->vm_mm, start, end, stride,
534534
last_level, tlb_level);
535-
__tlbi_sync_s1ish();
535+
__tlbi_sync_s1ish(vma->vm_mm);
536536
}
537537

538538
static inline void local_flush_tlb_contpte(struct vm_area_struct *vma,

arch/arm64/kernel/sys_compat.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ __do_compat_cache_op(unsigned long start, unsigned long end)
3737
* We pick the reserved-ASID to minimise the impact.
3838
*/
3939
__tlbi(aside1is, __TLBI_VADDR(0, 0));
40-
__tlbi_sync_s1ish();
40+
__tlbi_sync_s1ish(current->mm);
4141
}
4242

4343
ret = caches_clean_inval_user_pou(start, start + chunk);

0 commit comments

Comments
 (0)