Lines Matching refs:gts
70 struct gru_thread_state *gts = NULL; in gru_find_lock_gts() local
75 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_find_lock_gts()
76 if (gts) in gru_find_lock_gts()
77 mutex_lock(>s->ts_ctxlock); in gru_find_lock_gts()
80 return gts; in gru_find_lock_gts()
87 struct gru_thread_state *gts = ERR_PTR(-EINVAL); in gru_alloc_locked_gts() local
94 gts = gru_alloc_thread_state(vma, TSID(vaddr, vma)); in gru_alloc_locked_gts()
95 if (IS_ERR(gts)) in gru_alloc_locked_gts()
97 mutex_lock(>s->ts_ctxlock); in gru_alloc_locked_gts()
99 return gts; in gru_alloc_locked_gts()
103 return gts; in gru_alloc_locked_gts()
109 static void gru_unlock_gts(struct gru_thread_state *gts) in gru_unlock_gts() argument
111 mutex_unlock(>s->ts_ctxlock); in gru_unlock_gts()
252 static int gru_vtop(struct gru_thread_state *gts, unsigned long vaddr, in gru_vtop() argument
255 struct mm_struct *mm = gts->ts_mm; in gru_vtop()
310 struct gru_thread_state *gts, int atomic, in gru_preload_tlb() argument
332 ret = gru_vtop(gts, vaddr, write, atomic, &gpa, &pageshift); in gru_preload_tlb()
338 atomic ? "atomic" : "non-atomic", gru->gs_gid, gts, tfh, in gru_preload_tlb()
356 struct gru_thread_state *gts, in gru_try_dropin() argument
361 unsigned char tlb_preload_count = gts->ts_tlb_preload_count; in gru_try_dropin()
410 if (atomic_read(>s->ts_gms->ms_range_active)) in gru_try_dropin()
413 ret = gru_vtop(gts, vaddr, write, atomic, &gpa, &pageshift); in gru_try_dropin()
419 if (!(gts->ts_sizeavail & GRU_SIZEAVAIL(pageshift))) { in gru_try_dropin()
420 gts->ts_sizeavail |= GRU_SIZEAVAIL(pageshift); in gru_try_dropin()
421 if (atomic || !gru_update_cch(gts)) { in gru_try_dropin()
422 gts->ts_force_cch_reload = 1; in gru_try_dropin()
428 gru_preload_tlb(gru, gts, atomic, vaddr, asid, write, tlb_preload_count, tfh, cbe); in gru_try_dropin()
433 gts->ustats.tlbdropin++; in gru_try_dropin()
439 atomic ? "atomic" : "non-atomic", gru->gs_gid, gts, tfh, vaddr, asid, in gru_try_dropin()
523 struct gru_thread_state *gts; in gru_intr() local
564 gts = gru->gs_gts[ctxnum]; in gru_intr()
567 if (!gts) { in gru_intr()
576 gts->ustats.fmm_tlbmiss++; in gru_intr()
577 if (!gts->ts_force_cch_reload && in gru_intr()
578 mmap_read_trylock(gts->ts_mm)) { in gru_intr()
579 gru_try_dropin(gru, gts, tfh, NULL); in gru_intr()
580 mmap_read_unlock(gts->ts_mm); in gru_intr()
613 static int gru_user_dropin(struct gru_thread_state *gts, in gru_user_dropin() argument
617 struct gru_mm_struct *gms = gts->ts_gms; in gru_user_dropin()
620 gts->ustats.upm_tlbmiss++; in gru_user_dropin()
625 ret = gru_try_dropin(gts->ts_gru, gts, tfh, cb); in gru_user_dropin()
640 struct gru_thread_state *gts; in gru_handle_user_call_os() local
652 gts = gru_find_lock_gts(cb); in gru_handle_user_call_os()
653 if (!gts) in gru_handle_user_call_os()
655 …u_dbg(grudev, "address 0x%lx, gid %d, gts 0x%p\n", cb, gts->ts_gru ? gts->ts_gru->gs_gid : -1, gts… in gru_handle_user_call_os()
657 if (ucbnum >= gts->ts_cbr_au_count * GRU_CBR_AU_SIZE) in gru_handle_user_call_os()
660 if (gru_check_context_placement(gts)) { in gru_handle_user_call_os()
661 gru_unlock_gts(gts); in gru_handle_user_call_os()
662 gru_unload_context(gts, 1); in gru_handle_user_call_os()
669 if (gts->ts_gru && gts->ts_force_cch_reload) { in gru_handle_user_call_os()
670 gts->ts_force_cch_reload = 0; in gru_handle_user_call_os()
671 gru_update_cch(gts); in gru_handle_user_call_os()
675 cbrnum = thread_cbr_number(gts, ucbnum); in gru_handle_user_call_os()
676 if (gts->ts_gru) { in gru_handle_user_call_os()
677 tfh = get_tfh_by_index(gts->ts_gru, cbrnum); in gru_handle_user_call_os()
678 cbk = get_gseg_base_address_cb(gts->ts_gru->gs_gru_base_vaddr, in gru_handle_user_call_os()
679 gts->ts_ctxnum, ucbnum); in gru_handle_user_call_os()
680 ret = gru_user_dropin(gts, tfh, cbk); in gru_handle_user_call_os()
683 gru_unlock_gts(gts); in gru_handle_user_call_os()
695 struct gru_thread_state *gts; in gru_get_exception_detail() local
702 gts = gru_find_lock_gts(excdet.cb); in gru_get_exception_detail()
703 if (!gts) in gru_get_exception_detail()
706 …rudev, "address 0x%lx, gid %d, gts 0x%p\n", excdet.cb, gts->ts_gru ? gts->ts_gru->gs_gid : -1, gts… in gru_get_exception_detail()
708 if (ucbnum >= gts->ts_cbr_au_count * GRU_CBR_AU_SIZE) { in gru_get_exception_detail()
710 } else if (gts->ts_gru) { in gru_get_exception_detail()
711 cbrnum = thread_cbr_number(gts, ucbnum); in gru_get_exception_detail()
712 cbe = get_cbe_by_index(gts->ts_gru, cbrnum); in gru_get_exception_detail()
727 gru_unlock_gts(gts); in gru_get_exception_detail()
744 struct gru_thread_state *gts; in gru_unload_all_contexts() local
754 gts = gru->gs_gts[ctxnum]; in gru_unload_all_contexts()
755 if (gts && mutex_trylock(>s->ts_ctxlock)) { in gru_unload_all_contexts()
757 gru_unload_context(gts, 1); in gru_unload_all_contexts()
758 mutex_unlock(>s->ts_ctxlock); in gru_unload_all_contexts()
769 struct gru_thread_state *gts; in gru_user_unload_context() local
781 gts = gru_find_lock_gts(req.gseg); in gru_user_unload_context()
782 if (!gts) in gru_user_unload_context()
785 if (gts->ts_gru) in gru_user_unload_context()
786 gru_unload_context(gts, 1); in gru_user_unload_context()
787 gru_unlock_gts(gts); in gru_user_unload_context()
798 struct gru_thread_state *gts; in gru_user_flush_tlb() local
809 gts = gru_find_lock_gts(req.gseg); in gru_user_flush_tlb()
810 if (!gts) in gru_user_flush_tlb()
813 gms = gts->ts_gms; in gru_user_flush_tlb()
814 gru_unlock_gts(gts); in gru_user_flush_tlb()
825 struct gru_thread_state *gts; in gru_get_gseg_statistics() local
836 gts = gru_find_lock_gts(req.gseg); in gru_get_gseg_statistics()
837 if (gts) { in gru_get_gseg_statistics()
838 memcpy(&req.stats, >s->ustats, sizeof(gts->ustats)); in gru_get_gseg_statistics()
839 gru_unlock_gts(gts); in gru_get_gseg_statistics()
841 memset(&req.stats, 0, sizeof(gts->ustats)); in gru_get_gseg_statistics()
856 struct gru_thread_state *gts; in gru_set_context_option() local
865 gts = gru_find_lock_gts(req.gseg); in gru_set_context_option()
866 if (!gts) { in gru_set_context_option()
867 gts = gru_alloc_locked_gts(req.gseg); in gru_set_context_option()
868 if (IS_ERR(gts)) in gru_set_context_option()
869 return PTR_ERR(gts); in gru_set_context_option()
880 gts->ts_user_blade_id = req.val1; in gru_set_context_option()
881 gts->ts_user_chiplet_id = req.val0; in gru_set_context_option()
882 if (gru_check_context_placement(gts)) { in gru_set_context_option()
883 gru_unlock_gts(gts); in gru_set_context_option()
884 gru_unload_context(gts, 1); in gru_set_context_option()
891 gts->ts_tgid_owner = current->tgid; in gru_set_context_option()
895 gts->ts_cch_req_slice = req.val1 & 3; in gru_set_context_option()
900 gru_unlock_gts(gts); in gru_set_context_option()