Lines Matching refs:gts

187 				  struct gru_thread_state *gts)  in reserve_gru_resources()  argument
190 gts->ts_cbr_map = in reserve_gru_resources()
191 gru_reserve_cb_resources(gru, gts->ts_cbr_au_count, in reserve_gru_resources()
192 gts->ts_cbr_idx); in reserve_gru_resources()
193 gts->ts_dsr_map = in reserve_gru_resources()
194 gru_reserve_ds_resources(gru, gts->ts_dsr_au_count, NULL); in reserve_gru_resources()
198 struct gru_thread_state *gts) in free_gru_resources() argument
201 gru->gs_cbr_map |= gts->ts_cbr_map; in free_gru_resources()
202 gru->gs_dsr_map |= gts->ts_dsr_map; in free_gru_resources()
225 struct gru_thread_state *gts) in gru_load_mm_tracker() argument
227 struct gru_mm_struct *gms = gts->ts_gms; in gru_load_mm_tracker()
229 unsigned short ctxbitmap = (1 << gts->ts_ctxnum); in gru_load_mm_tracker()
255 gru->gs_gid, gts, gms, gts->ts_ctxnum, asid, in gru_load_mm_tracker()
261 struct gru_thread_state *gts) in gru_unload_mm_tracker() argument
263 struct gru_mm_struct *gms = gts->ts_gms; in gru_unload_mm_tracker()
268 ctxbitmap = (1 << gts->ts_ctxnum); in gru_unload_mm_tracker()
274 gru->gs_gid, gts, gms, gts->ts_ctxnum, gms->ms_asidmap[0]); in gru_unload_mm_tracker()
283 void gts_drop(struct gru_thread_state *gts) in gts_drop() argument
285 if (gts && refcount_dec_and_test(&gts->ts_refcnt)) { in gts_drop()
286 if (gts->ts_gms) in gts_drop()
287 gru_drop_mmu_notifier(gts->ts_gms); in gts_drop()
288 kfree(gts); in gts_drop()
299 struct gru_thread_state *gts; in gru_find_current_gts_nolock() local
301 list_for_each_entry(gts, &vdata->vd_head, ts_next) in gru_find_current_gts_nolock()
302 if (gts->ts_tsid == tsid) in gru_find_current_gts_nolock()
303 return gts; in gru_find_current_gts_nolock()
314 struct gru_thread_state *gts; in gru_alloc_gts() local
320 gts = kmalloc(bytes, GFP_KERNEL); in gru_alloc_gts()
321 if (!gts) in gru_alloc_gts()
325 memset(gts, 0, sizeof(struct gru_thread_state)); /* zero out header */ in gru_alloc_gts()
326 refcount_set(&gts->ts_refcnt, 1); in gru_alloc_gts()
327 mutex_init(&gts->ts_ctxlock); in gru_alloc_gts()
328 gts->ts_cbr_au_count = cbr_au_count; in gru_alloc_gts()
329 gts->ts_dsr_au_count = dsr_au_count; in gru_alloc_gts()
330 gts->ts_tlb_preload_count = tlb_preload_count; in gru_alloc_gts()
331 gts->ts_user_options = options; in gru_alloc_gts()
332 gts->ts_user_blade_id = -1; in gru_alloc_gts()
333 gts->ts_user_chiplet_id = -1; in gru_alloc_gts()
334 gts->ts_tsid = tsid; in gru_alloc_gts()
335 gts->ts_ctxnum = NULLCTX; in gru_alloc_gts()
336 gts->ts_tlb_int_select = -1; in gru_alloc_gts()
337 gts->ts_cch_req_slice = -1; in gru_alloc_gts()
338 gts->ts_sizeavail = GRU_SIZEAVAIL(PAGE_SHIFT); in gru_alloc_gts()
340 gts->ts_mm = current->mm; in gru_alloc_gts()
341 gts->ts_vma = vma; in gru_alloc_gts()
345 gts->ts_gms = gms; in gru_alloc_gts()
348 gru_dbg(grudev, "alloc gts %p\n", gts); in gru_alloc_gts()
349 return gts; in gru_alloc_gts()
352 gts_drop(gts); in gru_alloc_gts()
381 struct gru_thread_state *gts; in gru_find_thread_state() local
384 gts = gru_find_current_gts_nolock(vdata, tsid); in gru_find_thread_state()
386 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts); in gru_find_thread_state()
387 return gts; in gru_find_thread_state()
398 struct gru_thread_state *gts, *ngts; in gru_alloc_thread_state() local
400 gts = gru_alloc_gts(vma, vdata->vd_cbr_au_count, in gru_alloc_thread_state()
404 if (IS_ERR(gts)) in gru_alloc_thread_state()
405 return gts; in gru_alloc_thread_state()
410 gts_drop(gts); in gru_alloc_thread_state()
411 gts = ngts; in gru_alloc_thread_state()
414 list_add(&gts->ts_next, &vdata->vd_head); in gru_alloc_thread_state()
417 gru_dbg(grudev, "vma %p, gts %p\n", vma, gts); in gru_alloc_thread_state()
418 return gts; in gru_alloc_thread_state()
424 static void gru_free_gru_context(struct gru_thread_state *gts) in gru_free_gru_context() argument
428 gru = gts->ts_gru; in gru_free_gru_context()
429 gru_dbg(grudev, "gts %p, gid %d\n", gts, gru->gs_gid); in gru_free_gru_context()
432 gru->gs_gts[gts->ts_ctxnum] = NULL; in gru_free_gru_context()
433 free_gru_resources(gru, gts); in gru_free_gru_context()
434 BUG_ON(test_bit(gts->ts_ctxnum, &gru->gs_context_map) == 0); in gru_free_gru_context()
435 __clear_bit(gts->ts_ctxnum, &gru->gs_context_map); in gru_free_gru_context()
436 gts->ts_ctxnum = NULLCTX; in gru_free_gru_context()
437 gts->ts_gru = NULL; in gru_free_gru_context()
438 gts->ts_blade = -1; in gru_free_gru_context()
441 gts_drop(gts); in gru_free_gru_context()
542 void gru_unload_context(struct gru_thread_state *gts, int savestate) in gru_unload_context() argument
544 struct gru_state *gru = gts->ts_gru; in gru_unload_context()
546 int ctxnum = gts->ts_ctxnum; in gru_unload_context()
548 if (!is_kernel_context(gts)) in gru_unload_context()
549 zap_vma_ptes(gts->ts_vma, UGRUADDR(gts), GRU_GSEG_PAGESIZE); in gru_unload_context()
553 gts, gts->ts_cbr_map, gts->ts_dsr_map); in gru_unload_context()
558 if (!is_kernel_context(gts)) in gru_unload_context()
559 gru_unload_mm_tracker(gru, gts); in gru_unload_context()
561 gru_unload_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr, in gru_unload_context()
562 ctxnum, gts->ts_cbr_map, in gru_unload_context()
563 gts->ts_dsr_map); in gru_unload_context()
564 gts->ts_data_valid = 1; in gru_unload_context()
571 gru_free_gru_context(gts); in gru_unload_context()
578 void gru_load_context(struct gru_thread_state *gts) in gru_load_context() argument
580 struct gru_state *gru = gts->ts_gru; in gru_load_context()
582 int i, err, asid, ctxnum = gts->ts_ctxnum; in gru_load_context()
587 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL in gru_load_context()
588 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_load_context()
589 cch->tlb_int_enable = (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_load_context()
591 gts->ts_tlb_int_select = gru_cpu_fault_map_id(); in gru_load_context()
592 cch->tlb_int_select = gts->ts_tlb_int_select; in gru_load_context()
594 if (gts->ts_cch_req_slice >= 0) { in gru_load_context()
596 cch->req_slice = gts->ts_cch_req_slice; in gru_load_context()
601 cch->dsr_allocation_map = gts->ts_dsr_map; in gru_load_context()
602 cch->cbr_allocation_map = gts->ts_cbr_map; in gru_load_context()
604 if (is_kernel_context(gts)) { in gru_load_context()
613 asid = gru_load_mm_tracker(gru, gts); in gru_load_context()
616 cch->sizeavail[i] = gts->ts_sizeavail; in gru_load_context()
624 err, cch, gts, gts->ts_cbr_map, gts->ts_dsr_map); in gru_load_context()
628 gru_load_context_data(gts->ts_gdata, gru->gs_gru_base_vaddr, ctxnum, in gru_load_context()
629 gts->ts_cbr_map, gts->ts_dsr_map, gts->ts_data_valid); in gru_load_context()
636 gts->ts_gru->gs_gid, gts, gts->ts_cbr_map, gts->ts_dsr_map, in gru_load_context()
637 (gts->ts_user_options == GRU_OPT_MISS_FMM_INTR), gts->ts_tlb_int_select); in gru_load_context()
645 int gru_update_cch(struct gru_thread_state *gts) in gru_update_cch() argument
648 struct gru_state *gru = gts->ts_gru; in gru_update_cch()
649 int i, ctxnum = gts->ts_ctxnum, ret = 0; in gru_update_cch()
655 if (gru->gs_gts[gts->ts_ctxnum] != gts) in gru_update_cch()
660 cch->sizeavail[i] = gts->ts_sizeavail; in gru_update_cch()
661 gts->ts_tlb_int_select = gru_cpu_fault_map_id(); in gru_update_cch()
664 (gts->ts_user_options == GRU_OPT_MISS_FMM_POLL in gru_update_cch()
665 || gts->ts_user_options == GRU_OPT_MISS_FMM_INTR); in gru_update_cch()
682 static int gru_retarget_intr(struct gru_thread_state *gts) in gru_retarget_intr() argument
684 if (gts->ts_tlb_int_select < 0 in gru_retarget_intr()
685 || gts->ts_tlb_int_select == gru_cpu_fault_map_id()) in gru_retarget_intr()
688 gru_dbg(grudev, "retarget from %d to %d\n", gts->ts_tlb_int_select, in gru_retarget_intr()
690 return gru_update_cch(gts); in gru_retarget_intr()
700 struct gru_thread_state *gts) in gru_check_chiplet_assignment() argument
705 blade_id = gts->ts_user_blade_id; in gru_check_chiplet_assignment()
709 chiplet_id = gts->ts_user_chiplet_id; in gru_check_chiplet_assignment()
719 int gru_check_context_placement(struct gru_thread_state *gts) in gru_check_context_placement() argument
729 gru = gts->ts_gru; in gru_check_context_placement()
736 if (!gru || gts->ts_tgid_owner != current->tgid) in gru_check_context_placement()
739 if (!gru_check_chiplet_assignment(gru, gts)) { in gru_check_context_placement()
742 } else if (gru_retarget_intr(gts)) { in gru_check_context_placement()
758 static int is_gts_stealable(struct gru_thread_state *gts, in is_gts_stealable() argument
761 if (is_kernel_context(gts)) in is_gts_stealable()
764 return mutex_trylock(&gts->ts_ctxlock); in is_gts_stealable()
767 static void gts_stolen(struct gru_thread_state *gts, in gts_stolen() argument
770 if (is_kernel_context(gts)) { in gts_stolen()
774 mutex_unlock(&gts->ts_ctxlock); in gts_stolen()
779 void gru_steal_context(struct gru_thread_state *gts) in gru_steal_context() argument
787 blade_id = gts->ts_user_blade_id; in gru_steal_context()
790 cbr = gts->ts_cbr_au_count; in gru_steal_context()
791 dsr = gts->ts_dsr_au_count; in gru_steal_context()
805 if (gru_check_chiplet_assignment(gru, gts)) { in gru_steal_context()
836 gts->ustats.context_stolen++; in gru_steal_context()
865 struct gru_state *gru_assign_gru_context(struct gru_thread_state *gts) in gru_assign_gru_context() argument
869 int blade_id = gts->ts_user_blade_id; in gru_assign_gru_context()
877 if (!gru_check_chiplet_assignment(grux, gts)) in gru_assign_gru_context()
879 if (check_gru_resources(grux, gts->ts_cbr_au_count, in gru_assign_gru_context()
880 gts->ts_dsr_au_count, in gru_assign_gru_context()
891 if (!check_gru_resources(gru, gts->ts_cbr_au_count, in gru_assign_gru_context()
892 gts->ts_dsr_au_count, GRU_NUM_CCH)) { in gru_assign_gru_context()
896 reserve_gru_resources(gru, gts); in gru_assign_gru_context()
897 gts->ts_gru = gru; in gru_assign_gru_context()
898 gts->ts_blade = gru->gs_blade_id; in gru_assign_gru_context()
899 gts->ts_ctxnum = gru_assign_context_number(gru); in gru_assign_gru_context()
900 refcount_inc(&gts->ts_refcnt); in gru_assign_gru_context()
901 gru->gs_gts[gts->ts_ctxnum] = gts; in gru_assign_gru_context()
907 gseg_virtual_address(gts->ts_gru, gts->ts_ctxnum), gts, in gru_assign_gru_context()
908 gts->ts_gru->gs_gid, gts->ts_ctxnum, in gru_assign_gru_context()
909 gts->ts_cbr_au_count, gts->ts_dsr_au_count); in gru_assign_gru_context()
928 struct gru_thread_state *gts; in gru_fault() local
938 gts = gru_find_thread_state(vma, TSID(vaddr, vma)); in gru_fault()
939 if (!gts) in gru_fault()
943 mutex_lock(&gts->ts_ctxlock); in gru_fault()
946 if (gru_check_context_placement(gts)) { in gru_fault()
948 mutex_unlock(&gts->ts_ctxlock); in gru_fault()
949 gru_unload_context(gts, 1); in gru_fault()
953 if (!gts->ts_gru) { in gru_fault()
955 if (!gru_assign_gru_context(gts)) { in gru_fault()
957 mutex_unlock(&gts->ts_ctxlock); in gru_fault()
960 expires = gts->ts_steal_jiffies + GRU_STEAL_DELAY; in gru_fault()
962 gru_steal_context(gts); in gru_fault()
965 gru_load_context(gts); in gru_fault()
966 paddr = gseg_physical_address(gts->ts_gru, gts->ts_ctxnum); in gru_fault()
973 mutex_unlock(&gts->ts_ctxlock); in gru_fault()