Lines Matching refs:gts

200 				  struct gru_thread_state *gts)  in reserve_gru_resources()  argument
203 gts->ts_cbr_map = in reserve_gru_resources()
204 gru_reserve_cb_resources(gru, gts->ts_cbr_au_count, in reserve_gru_resources()
205 gts->ts_cbr_idx); in reserve_gru_resources()
206 gts->ts_dsr_map = in reserve_gru_resources()
207 gru_reserve_ds_resources(gru, gts->ts_dsr_au_count, NULL); in reserve_gru_resources()
211 struct gru_thread_state *gts) in free_gru_resources() argument
214 gru->gs_cbr_map |= gts->ts_cbr_map; in free_gru_resources()
215 gru->gs_dsr_map |= gts->ts_dsr_map; in free_gru_resources()
238 struct gru_thread_state *gts) in gru_load_mm_tracker() argument
240 struct gru_mm_struct *gms = gts->ts_gms; in gru_load_mm_tracker()
242 unsigned short ctxbitmap = (1 << gts->ts_ctxnum); in gru_load_mm_tracker()
268 gru->gs_gid, gts, gms, gts->ts_ctxnum, asid, in gru_load_mm_tracker()
274 struct gru_thread_state *gts) in gru_unload_mm_tracker() argument
276 struct gru_mm_struct *gms = gts->ts_gms; in gru_unload_mm_tracker()
281 ctxbitmap = (1 << gts->ts_ctxnum); in gru_unload_mm_tracker()
287 gru->gs_gid, gts, gms, gts->ts_ctxnum, gms->ms_asidmap[0]); in gru_unload_mm_tracker()
296 void gts_drop(struct gru_thread_state *gts) in gts_drop() argument
298 if (gts && atomic_dec_return(&gts->ts_refcnt) == 0) { in gts_drop()
299 if (gts->ts_gms) in gts_drop()
300 gru_drop_mmu_notifier(gts->ts_gms); in gts_drop()
301 kfree(gts); in gts_drop()
312 struct gru_thread_state *gts; in gru_find_current_gts_nolock() local
314 list_for_each_entry(gts, &vdata->vd_head, ts_next) in gru_find_current_gts_nolock()
315 if (gts->ts_tsid == tsid) in gru_find_current_gts_nolock()
316 return gts; in gru_find_current_gts_nolock()
327 struct gru_thread_state *gts; in gru_alloc_gts() local
333 gts = kmalloc(bytes, GFP_KERNEL); in gru_alloc_gts()
334 if (!gts) in gru_alloc_gts()
338 memset(gts, 0, sizeof(struct gru_thread_state)); /* zero out header */ in gru_alloc_gts()
339 atomic_set(&gts->ts_refcnt, 1); in gru_alloc_gts()
340 mutex_init(&gts->ts_ctxlock); in gru_alloc_gts()
341 gts->ts_cbr_au_count = cbr_au_count; in gru_alloc_gts()
342 gts->ts_dsr_au_count = dsr_au_count; in gru_alloc_gts()
343 gts->ts_tlb_preload_count = tlb_preload_count; in gru_alloc_gts()
344 gts->ts_user_options = options; in gru_alloc_gts()
345 gts->ts_user_blade_id = -1; in gru_alloc_gts()
346 gts->ts_user_chiplet_id = -1; in gru_alloc_gts()
347 gts->ts_tsid = tsid; in gru_alloc_gts()
348 gts->ts_ctxnum = NULLCTX; in gru_alloc_gts()
349 gts->ts_tlb_int_select = -1; in gru_alloc_gts()
350 gts->ts_cch_req_slice = -1; in gru_alloc_gts()
351 gts->ts_sizeavail = GRU_SIZEAVAIL(PAGE_SHIFT); in gru_alloc_gts()
353 gts->ts_mm = current->mm; in gru_alloc_gts()
354 gts->ts_vma = vma; in gru_alloc_gts()
358 gts->ts_gms = gms; in gru_alloc_gts()
361 gru_dbg(grudev, "alloc gts %p\n", gts); in gru_alloc_gts()
362 return gts; in gru_alloc_gts()
365 gts_drop(gts); in gru_alloc_gts()
394 struct gru_thread_state *gts; in gru_find_thread_state() local
397 gts = gru_find_current_gts_nolock(vdata, tsid); in gru_find_thread_state()
399 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts); in gru_find_thread_state()
400 return gts; in gru_find_thread_state()
411 struct gru_thread_state *gts, *ngts; in gru_alloc_thread_state() local
413 gts = gru_alloc_gts(vma, vdata->vd_cbr_au_count, in gru_alloc_thread_state()
417 if (IS_ERR(gts)) in gru_alloc_thread_state()
418 return gts; in gru_alloc_thread_state()
423 gts_drop(gts); in gru_alloc_thread_state()
424 gts = ngts; in gru_alloc_thread_state()
427 list_add(&gts->ts_next, &vdata->vd_head); in gru_alloc_thread_state()
430 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts); in gru_alloc_thread_state()
431 return gts; in gru_alloc_thread_state()
437 static void gru_free_gru_context(struct gru_thread_state *gts) in gru_free_gru_context() argument
441 gru = gts->ts_gru; in gru_free_gru_context()
442 gru_dbg(grudev, "gts %p, gid %d\n", gts, gru->gs_gid); in gru_free_gru_context()
445 gru->gs_gts[gts->ts_ctxnum] = NULL; in gru_free_gru_context()
446 free_gru_resources(gru, gts); in gru_free_gru_context()
447 BUG_ON(test_bit(gts->ts_ctxnum, &gru->gs_context_map) == 0); in gru_free_gru_context()
448 __clear_bit(gts->ts_ctxnum, &gru->gs_context_map); in gru_free_gru_context()
449 gts->ts_ctxnum = NULLCTX; in gru_free_gru_context()
450 gts->ts_gru = NULL; in gru_free_gru_context()
451 gts->ts_blade = -1; in gru_free_gru_context()
454 gts_drop(gts); in gru_free_gru_context()
555 void gru_unload_context(struct gru_thread_state *gts, int savestate) in gru_unload_context() argument
557 struct gru_state *gru = gts->ts_gru; in gru_unload_context()
559 int ctxnum = gts->ts_ctxnum; in gru_unload_context()
561 if (!is_kernel_context(gts)) in gru_unload_context()
562 zap_vma_ptes(gts->ts_vma, UGRUADDR(gts), GRU_GSEG_PAGESIZE); in gru_unload_context()
566 gts, gts->ts_cbr_map, gts->ts_dsr_map); in gru_unload_context()
571 if (!is_kernel_context(gts)) in gru_unload_context()
572 gru_unload_mm_tracker(gru, gts); in gru_unload_context()
574 gru_unload_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr, in gru_unload_context()
575 ctxnum, gts->ts_cbr_map, in gru_unload_context()
576 gts->ts_dsr_map); in gru_unload_context()
577 gts->ts_data_valid = 1; in gru_unload_context()
584 gru_free_gru_context(gts); in gru_unload_context()
591 void gru_load_context(struct gru_thread_state *gts) in gru_load_context() argument
593 struct gru_state *gru = gts->ts_gru; in gru_load_context()
595 int i, err, asid, ctxnum = gts->ts_ctxnum; in gru_load_context()
600 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL in gru_load_context()
601 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_load_context()
602 cch->tlb_int_enable = (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_load_context()
604 gts->ts_tlb_int_select = gru_cpu_fault_map_id(); in gru_load_context()
605 cch->tlb_int_select = gts->ts_tlb_int_select; in gru_load_context()
607 if (gts->ts_cch_req_slice >= 0) { in gru_load_context()
609 cch->req_slice = gts->ts_cch_req_slice; in gru_load_context()
614 cch->dsr_allocation_map = gts->ts_dsr_map; in gru_load_context()
615 cch->cbr_allocation_map = gts->ts_cbr_map; in gru_load_context()
617 if (is_kernel_context(gts)) { in gru_load_context()
626 asid = gru_load_mm_tracker(gru, gts); in gru_load_context()
629 cch->sizeavail[i] = gts->ts_sizeavail; in gru_load_context()
637 err, cch, gts, gts->ts_cbr_map, gts->ts_dsr_map); in gru_load_context()
641 gru_load_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr, ctxnum, in gru_load_context()
642 gts->ts_cbr_map, gts->ts_dsr_map, gts->ts_data_valid); in gru_load_context()
649 gts->ts_gru->gs_gid, gts, gts->ts_cbr_map, gts->ts_dsr_map, in gru_load_context()
650 (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR), gts->ts_tlb_int_select); in gru_load_context()
658 int gru_update_cch(struct gru_thread_state *gts) in gru_update_cch() argument
661 struct gru_state *gru = gts->ts_gru; in gru_update_cch()
662 int i, ctxnum = gts->ts_ctxnum, ret = 0; in gru_update_cch()
668 if (gru->gs_gts[gts->ts_ctxnum] != gts) in gru_update_cch()
673 cch->sizeavail[i] = gts->ts_sizeavail; in gru_update_cch()
674 gts->ts_tlb_int_select = gru_cpu_fault_map_id(); in gru_update_cch()
677 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL in gru_update_cch()
678 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_update_cch()
695 static int gru_retarget_intr(struct gru_thread_state *gts) in gru_retarget_intr() argument
697 if (gts->ts_tlb_int_select < 0 in gru_retarget_intr()
698 || gts->ts_tlb_int_select == gru_cpu_fault_map_id()) in gru_retarget_intr()
701 gru_dbg(grudev, "retarget from %d to %d\n", gts->ts_tlb_int_select, in gru_retarget_intr()
703 return gru_update_cch(gts); in gru_retarget_intr()
713 struct gru_thread_state *gts) in gru_check_chiplet_assignment() argument
718 blade_id = gts->ts_user_blade_id; in gru_check_chiplet_assignment()
722 chiplet_id = gts->ts_user_chiplet_id; in gru_check_chiplet_assignment()
732 void gru_check_context_placement(struct gru_thread_state *gts) in gru_check_context_placement() argument
741 gru = gts->ts_gru; in gru_check_context_placement()
742 if (!gru || gts->ts_tgid_owner != current->tgid) in gru_check_context_placement()
745 if (!gru_check_chiplet_assignment(gru, gts)) { in gru_check_context_placement()
747 gru_unload_context(gts, 1); in gru_check_context_placement()
748 } else if (gru_retarget_intr(gts)) { in gru_check_context_placement()
762 static int is_gts_stealable(struct gru_thread_state *gts, in is_gts_stealable() argument
765 if (is_kernel_context(gts)) in is_gts_stealable()
768 return mutex_trylock(&gts->ts_ctxlock); in is_gts_stealable()
771 static void gts_stolen(struct gru_thread_state *gts, in gts_stolen() argument
774 if (is_kernel_context(gts)) { in gts_stolen()
778 mutex_unlock(&gts->ts_ctxlock); in gts_stolen()
783 void gru_steal_context(struct gru_thread_state *gts) in gru_steal_context() argument
791 blade_id = gts->ts_user_blade_id; in gru_steal_context()
794 cbr = gts->ts_cbr_au_count; in gru_steal_context()
795 dsr = gts->ts_dsr_au_count; in gru_steal_context()
809 if (gru_check_chiplet_assignment(gru, gts)) { in gru_steal_context()
840 gts->ustats.context_stolen++; in gru_steal_context()
869 struct gru_state *gru_assign_gru_context(struct gru_thread_state *gts) in gru_assign_gru_context() argument
873 int blade_id = gts->ts_user_blade_id; in gru_assign_gru_context()
881 if (!gru_check_chiplet_assignment(grux, gts)) in gru_assign_gru_context()
883 if (check_gru_resources(grux, gts->ts_cbr_au_count, in gru_assign_gru_context()
884 gts->ts_dsr_au_count, in gru_assign_gru_context()
895 if (!check_gru_resources(gru, gts->ts_cbr_au_count, in gru_assign_gru_context()
896 gts->ts_dsr_au_count, GRU_NUM_CCH)) { in gru_assign_gru_context()
900 reserve_gru_resources(gru, gts); in gru_assign_gru_context()
901 gts->ts_gru = gru; in gru_assign_gru_context()
902 gts->ts_blade = gru->gs_blade_id; in gru_assign_gru_context()
903 gts->ts_ctxnum = gru_assign_context_number(gru); in gru_assign_gru_context()
904 atomic_inc(&gts->ts_refcnt); in gru_assign_gru_context()
905 gru->gs_gts[gts->ts_ctxnum] = gts; in gru_assign_gru_context()
911 gseg_virtual_address(gts->ts_gru, gts->ts_ctxnum), gts, in gru_assign_gru_context()
912 gts->ts_gru->gs_gid, gts->ts_ctxnum, in gru_assign_gru_context()
913 gts->ts_cbr_au_count, gts->ts_dsr_au_count); in gru_assign_gru_context()
931 struct gru_thread_state *gts; in gru_fault() local
940 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_fault()
941 if (!gts) in gru_fault()
945 mutex_lock(&gts->ts_ctxlock); in gru_fault()
948 gru_check_context_placement(gts); in gru_fault()
950 if (!gts->ts_gru) { in gru_fault()
952 if (!gru_assign_gru_context(gts)) { in gru_fault()
954 mutex_unlock(&gts->ts_ctxlock); in gru_fault()
957 if (gts->ts_steal_jiffies + GRU_STEAL_DELAY < jiffies) in gru_fault()
958 gru_steal_context(gts); in gru_fault()
961 gru_load_context(gts); in gru_fault()
962 paddr = gseg_physical_address(gts->ts_gru, gts->ts_ctxnum); in gru_fault()
969 mutex_unlock(&gts->ts_ctxlock); in gru_fault()