Home
last modified time | relevance | path

Searched refs:fence (Results 1 – 25 of 343) sorted by relevance

12345678910>>...14

/linux-6.1.9/drivers/dma-buf/
Ddma-fence.c114 static const char *dma_fence_stub_get_name(struct dma_fence *fence) in dma_fence_stub_get_name() argument
158 struct dma_fence *fence; in dma_fence_allocate_private_stub() local
160 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in dma_fence_allocate_private_stub()
161 if (fence == NULL) in dma_fence_allocate_private_stub()
164 dma_fence_init(fence, in dma_fence_allocate_private_stub()
172 dma_fence_signal(fence); in dma_fence_allocate_private_stub()
174 return fence; in dma_fence_allocate_private_stub()
366 int dma_fence_signal_timestamp_locked(struct dma_fence *fence, in dma_fence_signal_timestamp_locked() argument
372 lockdep_assert_held(fence->lock); in dma_fence_signal_timestamp_locked()
375 &fence->flags))) in dma_fence_signal_timestamp_locked()
[all …]
Dsync_file.c65 struct sync_file *sync_file_create(struct dma_fence *fence) in sync_file_create() argument
73 sync_file->fence = dma_fence_get(fence); in sync_file_create()
106 struct dma_fence *fence; in sync_file_get_fence() local
112 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence()
115 return fence; in sync_file_get_fence()
137 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local
140 fence->ops->get_driver_name(fence), in sync_file_get_name()
141 fence->ops->get_timeline_name(fence), in sync_file_get_name()
142 fence->context, in sync_file_get_name()
143 fence->seqno); in sync_file_get_name()
[all …]
Ddma-resv.c71 struct dma_resv *resv, struct dma_fence **fence, in dma_resv_list_entry() argument
78 *fence = (struct dma_fence *)(tmp & ~DMA_RESV_LIST_MASK); in dma_resv_list_entry()
86 struct dma_fence *fence, in dma_resv_list_set() argument
89 long tmp = ((long)fence) | usage; in dma_resv_list_set()
121 struct dma_fence *fence; in dma_resv_list_free() local
123 dma_resv_list_entry(list, i, NULL, &fence, NULL); in dma_resv_list_free()
124 dma_fence_put(fence); in dma_resv_list_free()
205 struct dma_fence *fence; in dma_resv_reserve_fences() local
207 dma_resv_list_entry(old, i, obj, &fence, &usage); in dma_resv_reserve_fences()
208 if (dma_fence_is_signaled(fence)) in dma_resv_reserve_fences()
[all …]
Ddma-fence-chain.c12 static bool dma_fence_chain_enable_signaling(struct dma_fence *fence);
39 struct dma_fence *dma_fence_chain_walk(struct dma_fence *fence) in dma_fence_chain_walk() argument
44 chain = to_dma_fence_chain(fence); in dma_fence_chain_walk()
46 dma_fence_put(fence); in dma_fence_chain_walk()
54 if (!dma_fence_is_signaled(prev_chain->fence)) in dma_fence_chain_walk()
74 dma_fence_put(fence); in dma_fence_chain_walk()
112 static const char *dma_fence_chain_get_driver_name(struct dma_fence *fence) in dma_fence_chain_get_driver_name() argument
117 static const char *dma_fence_chain_get_timeline_name(struct dma_fence *fence) in dma_fence_chain_get_timeline_name() argument
145 static bool dma_fence_chain_enable_signaling(struct dma_fence *fence) in dma_fence_chain_enable_signaling() argument
147 struct dma_fence_chain *head = to_dma_fence_chain(fence); in dma_fence_chain_enable_signaling()
[all …]
Dst-dma-fence-chain.c62 struct dma_fence *fence, in mock_chain() argument
71 dma_fence_chain_init(f, dma_fence_get(prev), dma_fence_get(fence), in mock_chain()
184 struct dma_fence *fence; in find_seqno() local
192 fence = dma_fence_get(fc.tail); in find_seqno()
193 err = dma_fence_chain_find_seqno(&fence, 0); in find_seqno()
194 dma_fence_put(fence); in find_seqno()
201 fence = dma_fence_get(fc.tail); in find_seqno()
202 err = dma_fence_chain_find_seqno(&fence, i + 1); in find_seqno()
203 dma_fence_put(fence); in find_seqno()
209 if (fence != fc.chains[i]) { in find_seqno()
[all …]
/linux-6.1.9/drivers/gpu/drm/i915/
Di915_sw_fence.c49 static inline void debug_fence_init(struct i915_sw_fence *fence) in debug_fence_init() argument
51 debug_object_init(fence, &i915_sw_fence_debug_descr); in debug_fence_init()
54 static inline void debug_fence_init_onstack(struct i915_sw_fence *fence) in debug_fence_init_onstack() argument
56 debug_object_init_on_stack(fence, &i915_sw_fence_debug_descr); in debug_fence_init_onstack()
59 static inline void debug_fence_activate(struct i915_sw_fence *fence) in debug_fence_activate() argument
61 debug_object_activate(fence, &i915_sw_fence_debug_descr); in debug_fence_activate()
64 static inline void debug_fence_set_state(struct i915_sw_fence *fence, in debug_fence_set_state() argument
67 debug_object_active_state(fence, &i915_sw_fence_debug_descr, old, new); in debug_fence_set_state()
70 static inline void debug_fence_deactivate(struct i915_sw_fence *fence) in debug_fence_deactivate() argument
72 debug_object_deactivate(fence, &i915_sw_fence_debug_descr); in debug_fence_deactivate()
[all …]
Di915_sw_fence.h42 void __i915_sw_fence_init(struct i915_sw_fence *fence,
47 #define i915_sw_fence_init(fence, fn) \ argument
52 __i915_sw_fence_init((fence), (fn), #fence, &__key); \
55 #define i915_sw_fence_init(fence, fn) \ argument
58 __i915_sw_fence_init((fence), (fn), NULL, NULL); \
62 void i915_sw_fence_reinit(struct i915_sw_fence *fence);
65 void i915_sw_fence_fini(struct i915_sw_fence *fence);
67 static inline void i915_sw_fence_fini(struct i915_sw_fence *fence) {} in i915_sw_fence_fini() argument
70 void i915_sw_fence_commit(struct i915_sw_fence *fence);
72 int i915_sw_fence_await_sw_fence(struct i915_sw_fence *fence,
[all …]
Di915_request.c54 struct i915_sw_fence *fence; member
61 static const char *i915_fence_get_driver_name(struct dma_fence *fence) in i915_fence_get_driver_name() argument
63 return dev_name(to_request(fence)->i915->drm.dev); in i915_fence_get_driver_name()
66 static const char *i915_fence_get_timeline_name(struct dma_fence *fence) in i915_fence_get_timeline_name() argument
79 if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) in i915_fence_get_timeline_name()
82 ctx = i915_request_gem_context(to_request(fence)); in i915_fence_get_timeline_name()
89 static bool i915_fence_signaled(struct dma_fence *fence) in i915_fence_signaled() argument
91 return i915_request_completed(to_request(fence)); in i915_fence_signaled()
94 static bool i915_fence_enable_signaling(struct dma_fence *fence) in i915_fence_enable_signaling() argument
96 return i915_request_enable_breadcrumb(to_request(fence)); in i915_fence_enable_signaling()
[all …]
/linux-6.1.9/include/linux/
Ddma-fence.h105 typedef void (*dma_fence_func_t)(struct dma_fence *fence,
143 const char * (*get_driver_name)(struct dma_fence *fence);
155 const char * (*get_timeline_name)(struct dma_fence *fence);
192 bool (*enable_signaling)(struct dma_fence *fence);
209 bool (*signaled)(struct dma_fence *fence);
227 signed long (*wait)(struct dma_fence *fence,
238 void (*release)(struct dma_fence *fence);
248 void (*fence_value_str)(struct dma_fence *fence, char *str, int size);
258 void (*timeline_value_str)(struct dma_fence *fence,
262 void dma_fence_init(struct dma_fence *fence, const struct dma_fence_ops *ops,
[all …]
/linux-6.1.9/drivers/gpu/drm/i915/gt/
Dintel_ggtt_fencing.c48 static struct drm_i915_private *fence_to_i915(struct i915_fence_reg *fence) in fence_to_i915() argument
50 return fence->ggtt->vm.i915; in fence_to_i915()
53 static struct intel_uncore *fence_to_uncore(struct i915_fence_reg *fence) in fence_to_uncore() argument
55 return fence->ggtt->vm.gt->uncore; in fence_to_uncore()
58 static void i965_write_fence_reg(struct i915_fence_reg *fence) in i965_write_fence_reg() argument
64 if (GRAPHICS_VER(fence_to_i915(fence)) >= 6) { in i965_write_fence_reg()
65 fence_reg_lo = FENCE_REG_GEN6_LO(fence->id); in i965_write_fence_reg()
66 fence_reg_hi = FENCE_REG_GEN6_HI(fence->id); in i965_write_fence_reg()
70 fence_reg_lo = FENCE_REG_965_LO(fence->id); in i965_write_fence_reg()
71 fence_reg_hi = FENCE_REG_965_HI(fence->id); in i965_write_fence_reg()
[all …]
/linux-6.1.9/drivers/gpu/drm/nouveau/
Dnouveau_fence.c44 from_fence(struct dma_fence *fence) in from_fence() argument
46 return container_of(fence, struct nouveau_fence, base); in from_fence()
50 nouveau_fctx(struct nouveau_fence *fence) in nouveau_fctx() argument
52 return container_of(fence->base.lock, struct nouveau_fence_chan, lock); in nouveau_fctx()
56 nouveau_fence_signal(struct nouveau_fence *fence) in nouveau_fence_signal() argument
60 dma_fence_signal_locked(&fence->base); in nouveau_fence_signal()
61 list_del(&fence->head); in nouveau_fence_signal()
62 rcu_assign_pointer(fence->channel, NULL); in nouveau_fence_signal()
64 if (test_bit(DMA_FENCE_FLAG_USER_BITS, &fence->base.flags)) { in nouveau_fence_signal()
65 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal()
[all …]
/linux-6.1.9/drivers/gpu/drm/amd/amdgpu/
Damdgpu_amdkfd_fence.c67 struct amdgpu_amdkfd_fence *fence; in amdgpu_amdkfd_fence_create() local
69 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create()
70 if (fence == NULL) in amdgpu_amdkfd_fence_create()
75 fence->mm = mm; in amdgpu_amdkfd_fence_create()
76 get_task_comm(fence->timeline_name, current); in amdgpu_amdkfd_fence_create()
77 spin_lock_init(&fence->lock); in amdgpu_amdkfd_fence_create()
78 fence->svm_bo = svm_bo; in amdgpu_amdkfd_fence_create()
79 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create()
82 return fence; in amdgpu_amdkfd_fence_create()
87 struct amdgpu_amdkfd_fence *fence; in to_amdgpu_amdkfd_fence() local
[all …]
/linux-6.1.9/drivers/gpu/drm/vgem/
Dvgem_fence.c38 static const char *vgem_fence_get_driver_name(struct dma_fence *fence) in vgem_fence_get_driver_name() argument
43 static const char *vgem_fence_get_timeline_name(struct dma_fence *fence) in vgem_fence_get_timeline_name() argument
50 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local
52 del_timer_sync(&fence->timer); in vgem_fence_release()
53 dma_fence_free(&fence->base); in vgem_fence_release()
56 static void vgem_fence_value_str(struct dma_fence *fence, char *str, int size) in vgem_fence_value_str() argument
58 snprintf(str, size, "%llu", fence->seqno); in vgem_fence_value_str()
61 static void vgem_fence_timeline_value_str(struct dma_fence *fence, char *str, in vgem_fence_timeline_value_str() argument
65 dma_fence_is_signaled(fence) ? fence->seqno : 0); in vgem_fence_timeline_value_str()
79 struct vgem_fence *fence = from_timer(fence, t, timer); in vgem_fence_timeout() local
[all …]
/linux-6.1.9/include/trace/events/
Ddma_fence.h14 TP_PROTO(struct dma_fence *fence),
16 TP_ARGS(fence),
19 __string(driver, fence->ops->get_driver_name(fence))
20 __string(timeline, fence->ops->get_timeline_name(fence))
26 __assign_str(driver, fence->ops->get_driver_name(fence));
27 __assign_str(timeline, fence->ops->get_timeline_name(fence));
28 __entry->context = fence->context;
29 __entry->seqno = fence->seqno;
39 TP_PROTO(struct dma_fence *fence),
41 TP_ARGS(fence)
[all …]
/linux-6.1.9/drivers/gpu/drm/scheduler/
Dsched_fence.c51 void drm_sched_fence_scheduled(struct drm_sched_fence *fence) in drm_sched_fence_scheduled() argument
53 dma_fence_signal(&fence->scheduled); in drm_sched_fence_scheduled()
56 void drm_sched_fence_finished(struct drm_sched_fence *fence) in drm_sched_fence_finished() argument
58 dma_fence_signal(&fence->finished); in drm_sched_fence_finished()
61 static const char *drm_sched_fence_get_driver_name(struct dma_fence *fence) in drm_sched_fence_get_driver_name() argument
68 struct drm_sched_fence *fence = to_drm_sched_fence(f); in drm_sched_fence_get_timeline_name() local
69 return (const char *)fence->sched->name; in drm_sched_fence_get_timeline_name()
75 struct drm_sched_fence *fence = to_drm_sched_fence(f); in drm_sched_fence_free_rcu() local
77 if (!WARN_ON_ONCE(!fence)) in drm_sched_fence_free_rcu()
78 kmem_cache_free(sched_fence_slab, fence); in drm_sched_fence_free_rcu()
[all …]
Dgpu_scheduler_trace.h40 __field(struct dma_fence *, fence)
50 __entry->fence = &sched_job->s_fence->finished;
58 __entry->fence, __get_str(name),
73 TP_PROTO(struct drm_sched_fence *fence),
74 TP_ARGS(fence),
76 __field(struct dma_fence *, fence)
80 __entry->fence = &fence->finished;
82 TP_printk("fence=%p signaled", __entry->fence)
86 TP_PROTO(struct drm_sched_job *sched_job, struct dma_fence *fence),
87 TP_ARGS(sched_job, fence),
[all …]
/linux-6.1.9/drivers/gpu/drm/i915/selftests/
Dlib_sw_fence.c30 nop_fence_notify(struct i915_sw_fence *fence, enum i915_sw_fence_notify state) in nop_fence_notify() argument
35 void __onstack_fence_init(struct i915_sw_fence *fence, in __onstack_fence_init() argument
39 debug_fence_init_onstack(fence); in __onstack_fence_init()
41 __init_waitqueue_head(&fence->wait, name, key); in __onstack_fence_init()
42 atomic_set(&fence->pending, 1); in __onstack_fence_init()
43 fence->error = 0; in __onstack_fence_init()
44 fence->fn = nop_fence_notify; in __onstack_fence_init()
47 void onstack_fence_fini(struct i915_sw_fence *fence) in onstack_fence_fini() argument
49 if (!fence->fn) in onstack_fence_fini()
52 i915_sw_fence_commit(fence); in onstack_fence_fini()
[all …]
/linux-6.1.9/drivers/gpu/drm/vmwgfx/
Dvmwgfx_fence.c52 struct vmw_fence_obj fence; member
72 struct vmw_fence_obj *fence; member
80 fman_from_fence(struct vmw_fence_obj *fence) in fman_from_fence() argument
82 return container_of(fence->base.lock, struct vmw_fence_manager, lock); in fman_from_fence()
125 struct vmw_fence_obj *fence = in vmw_fence_obj_destroy() local
128 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_obj_destroy()
131 list_del_init(&fence->head); in vmw_fence_obj_destroy()
134 fence->destroy(fence); in vmw_fence_obj_destroy()
149 struct vmw_fence_obj *fence = in vmw_fence_enable_signaling() local
152 struct vmw_fence_manager *fman = fman_from_fence(fence); in vmw_fence_enable_signaling()
[all …]
/linux-6.1.9/drivers/gpu/drm/i915/gem/
Di915_gem_wait.c18 i915_gem_object_wait_fence(struct dma_fence *fence, in i915_gem_object_wait_fence() argument
24 if (test_bit(DMA_FENCE_FLAG_SIGNALED_BIT, &fence->flags)) in i915_gem_object_wait_fence()
27 if (dma_fence_is_i915(fence)) in i915_gem_object_wait_fence()
28 return i915_request_wait_timeout(to_request(fence), flags, timeout); in i915_gem_object_wait_fence()
30 return dma_fence_wait_timeout(fence, in i915_gem_object_wait_fence()
39 struct dma_fence *fence; in i915_gem_object_boost() local
59 dma_resv_for_each_fence_unlocked(&cursor, fence) in i915_gem_object_boost()
60 if (dma_fence_is_i915(fence) && in i915_gem_object_boost()
61 !i915_request_started(to_request(fence))) in i915_gem_object_boost()
62 intel_rps_boost(to_request(fence)); in i915_gem_object_boost()
[all …]
/linux-6.1.9/drivers/gpu/drm/radeon/
Dradeon_fence.c135 struct radeon_fence **fence, in radeon_fence_emit() argument
141 *fence = kmalloc(sizeof(struct radeon_fence), GFP_KERNEL); in radeon_fence_emit()
142 if ((*fence) == NULL) { in radeon_fence_emit()
145 (*fence)->rdev = rdev; in radeon_fence_emit()
146 (*fence)->seq = seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
147 (*fence)->ring = ring; in radeon_fence_emit()
148 (*fence)->is_vm_update = false; in radeon_fence_emit()
149 dma_fence_init(&(*fence)->base, &radeon_fence_ops, in radeon_fence_emit()
153 radeon_fence_ring_emit(rdev, ring, *fence); in radeon_fence_emit()
154 trace_radeon_fence_emit(rdev->ddev, ring, (*fence)->seq); in radeon_fence_emit()
[all …]
Dradeon_sync.c63 struct radeon_fence *fence) in radeon_sync_fence() argument
67 if (!fence) in radeon_sync_fence()
70 other = sync->sync_to[fence->ring]; in radeon_sync_fence()
71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence()
73 if (fence->is_vm_update) { in radeon_sync_fence()
75 sync->last_vm_update = radeon_fence_later(fence, other); in radeon_sync_fence()
95 struct radeon_fence *fence; in radeon_sync_resv() local
100 fence = to_radeon_fence(f); in radeon_sync_resv()
101 if (fence && fence->rdev == rdev) in radeon_sync_resv()
102 radeon_sync_fence(sync, fence); in radeon_sync_resv()
[all …]
/linux-6.1.9/drivers/gpu/drm/
Ddrm_syncobj.c207 struct dma_fence *fence; member
244 struct dma_fence *fence; in drm_syncobj_fence_add_wait() local
246 if (wait->fence) in drm_syncobj_fence_add_wait()
254 fence = dma_fence_get(rcu_dereference_protected(syncobj->fence, 1)); in drm_syncobj_fence_add_wait()
255 if (!fence || dma_fence_chain_find_seqno(&fence, wait->point)) { in drm_syncobj_fence_add_wait()
256 dma_fence_put(fence); in drm_syncobj_fence_add_wait()
258 } else if (!fence) { in drm_syncobj_fence_add_wait()
259 wait->fence = dma_fence_get_stub(); in drm_syncobj_fence_add_wait()
261 wait->fence = fence; in drm_syncobj_fence_add_wait()
288 struct dma_fence *fence, in drm_syncobj_add_point() argument
[all …]
/linux-6.1.9/drivers/gpu/drm/v3d/
Dv3d_fence.c8 struct v3d_fence *fence; in v3d_fence_create() local
10 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in v3d_fence_create()
11 if (!fence) in v3d_fence_create()
14 fence->dev = &v3d->drm; in v3d_fence_create()
15 fence->queue = queue; in v3d_fence_create()
16 fence->seqno = ++v3d->queue[queue].emit_seqno; in v3d_fence_create()
17 dma_fence_init(&fence->base, &v3d_fence_ops, &v3d->job_lock, in v3d_fence_create()
18 v3d->queue[queue].fence_context, fence->seqno); in v3d_fence_create()
20 return &fence->base; in v3d_fence_create()
23 static const char *v3d_fence_get_driver_name(struct dma_fence *fence) in v3d_fence_get_driver_name() argument
[all …]
/linux-6.1.9/tools/testing/selftests/sync/
Dsync_stress_consumer.c40 static int busy_wait_on_fence(int fence) in busy_wait_on_fence() argument
45 error = sync_fence_count_with_status(fence, FENCE_STATUS_ERROR); in busy_wait_on_fence()
47 active = sync_fence_count_with_status(fence, in busy_wait_on_fence()
66 int fence, valid, i; in mpsc_producer_thread() local
72 fence = sw_sync_fence_create(consumer_timeline, "fence", i); in mpsc_producer_thread()
73 valid = sw_sync_fence_is_valid(fence); in mpsc_producer_thread()
82 ASSERT(sync_wait(fence, -1) > 0, in mpsc_producer_thread()
85 ASSERT(busy_wait_on_fence(fence) == 0, in mpsc_producer_thread()
100 sw_sync_fence_destroy(fence); in mpsc_producer_thread()
108 int fence, merged, tmp, valid, it, i; in mpcs_consumer_thread() local
[all …]
/linux-6.1.9/drivers/gpu/drm/virtio/
Dvirtgpu_fence.c60 struct virtio_gpu_fence *fence = to_virtio_gpu_fence(f); in virtio_gpu_timeline_value_str() local
63 (u64)atomic64_read(&fence->drv->last_fence_id)); in virtio_gpu_timeline_value_str()
80 struct virtio_gpu_fence *fence = kzalloc(sizeof(struct virtio_gpu_fence), in virtio_gpu_fence_alloc() local
83 if (!fence) in virtio_gpu_fence_alloc()
84 return fence; in virtio_gpu_fence_alloc()
86 fence->drv = drv; in virtio_gpu_fence_alloc()
87 fence->ring_idx = ring_idx; in virtio_gpu_fence_alloc()
88 fence->emit_fence_info = !(base_fence_ctx == drv->context); in virtio_gpu_fence_alloc()
95 dma_fence_init(&fence->f, &virtio_gpu_fence_ops, &drv->lock, in virtio_gpu_fence_alloc()
98 return fence; in virtio_gpu_fence_alloc()
[all …]

12345678910>>...14