Lines Matching full:ref

33 	struct i915_active *ref;  member
81 struct i915_active *ref = addr; in active_debug_hint() local
83 return (void *)ref->active ?: (void *)ref->retire ?: (void *)ref; in active_debug_hint()
91 static void debug_active_init(struct i915_active *ref) in debug_active_init() argument
93 debug_object_init(ref, &active_debug_desc); in debug_active_init()
96 static void debug_active_activate(struct i915_active *ref) in debug_active_activate() argument
98 lockdep_assert_held(&ref->tree_lock); in debug_active_activate()
99 if (!atomic_read(&ref->count)) /* before the first inc */ in debug_active_activate()
100 debug_object_activate(ref, &active_debug_desc); in debug_active_activate()
103 static void debug_active_deactivate(struct i915_active *ref) in debug_active_deactivate() argument
105 lockdep_assert_held(&ref->tree_lock); in debug_active_deactivate()
106 if (!atomic_read(&ref->count)) /* after the last dec */ in debug_active_deactivate()
107 debug_object_deactivate(ref, &active_debug_desc); in debug_active_deactivate()
110 static void debug_active_fini(struct i915_active *ref) in debug_active_fini() argument
112 debug_object_free(ref, &active_debug_desc); in debug_active_fini()
115 static void debug_active_assert(struct i915_active *ref) in debug_active_assert() argument
117 debug_object_assert_init(ref, &active_debug_desc); in debug_active_assert()
122 static inline void debug_active_init(struct i915_active *ref) { } in debug_active_init() argument
123 static inline void debug_active_activate(struct i915_active *ref) { } in debug_active_activate() argument
124 static inline void debug_active_deactivate(struct i915_active *ref) { } in debug_active_deactivate() argument
125 static inline void debug_active_fini(struct i915_active *ref) { } in debug_active_fini() argument
126 static inline void debug_active_assert(struct i915_active *ref) { } in debug_active_assert() argument
131 __active_retire(struct i915_active *ref) in __active_retire() argument
137 GEM_BUG_ON(i915_active_is_idle(ref)); in __active_retire()
140 if (!atomic_dec_and_lock_irqsave(&ref->count, &ref->tree_lock, flags)) in __active_retire()
143 GEM_BUG_ON(rcu_access_pointer(ref->excl.fence)); in __active_retire()
144 debug_active_deactivate(ref); in __active_retire()
147 if (!ref->cache) in __active_retire()
148 ref->cache = fetch_node(ref->tree.rb_node); in __active_retire()
151 if (ref->cache) { in __active_retire()
153 rb_erase(&ref->cache->node, &ref->tree); in __active_retire()
154 root = ref->tree; in __active_retire()
157 rb_link_node(&ref->cache->node, NULL, &ref->tree.rb_node); in __active_retire()
158 rb_insert_color(&ref->cache->node, &ref->tree); in __active_retire()
159 GEM_BUG_ON(ref->tree.rb_node != &ref->cache->node); in __active_retire()
163 ref->cache->timeline = 0; /* needs cmpxchg(u64) */ in __active_retire()
166 spin_unlock_irqrestore(&ref->tree_lock, flags); in __active_retire()
169 if (ref->retire) in __active_retire()
170 ref->retire(ref); in __active_retire()
173 wake_up_var(ref); in __active_retire()
185 struct i915_active *ref = container_of(wrk, typeof(*ref), work); in active_work() local
187 GEM_BUG_ON(!atomic_read(&ref->count)); in active_work()
188 if (atomic_add_unless(&ref->count, -1, 1)) in active_work()
191 __active_retire(ref); in active_work()
195 active_retire(struct i915_active *ref) in active_retire() argument
197 GEM_BUG_ON(!atomic_read(&ref->count)); in active_retire()
198 if (atomic_add_unless(&ref->count, -1, 1)) in active_retire()
201 if (ref->flags & I915_ACTIVE_RETIRE_SLEEPS) { in active_retire()
202 queue_work(system_unbound_wq, &ref->work); in active_retire()
206 __active_retire(ref); in active_retire()
228 active_retire(container_of(cb, struct active_node, base.cb)->ref); in node_retire()
238 static struct active_node *__active_lookup(struct i915_active *ref, u64 idx) in __active_lookup() argument
251 it = READ_ONCE(ref->cache); in __active_lookup()
278 GEM_BUG_ON(i915_active_is_idle(ref)); in __active_lookup()
280 it = fetch_node(ref->tree.rb_node); in __active_lookup()
287 WRITE_ONCE(ref->cache, it); in __active_lookup()
297 active_instance(struct i915_active *ref, u64 idx) in active_instance() argument
302 node = __active_lookup(ref, idx); in active_instance()
311 spin_lock_irq(&ref->tree_lock); in active_instance()
312 GEM_BUG_ON(i915_active_is_idle(ref)); in active_instance()
315 p = &ref->tree.rb_node; in active_instance()
333 node->ref = ref; in active_instance()
337 rb_insert_color(&node->node, &ref->tree); in active_instance()
340 WRITE_ONCE(ref->cache, node); in active_instance()
341 spin_unlock_irq(&ref->tree_lock); in active_instance()
346 void __i915_active_init(struct i915_active *ref, in __i915_active_init() argument
347 int (*active)(struct i915_active *ref), in __i915_active_init() argument
348 void (*retire)(struct i915_active *ref), in __i915_active_init() argument
354 debug_active_init(ref); in __i915_active_init()
356 ref->flags = 0; in __i915_active_init()
357 ref->active = active; in __i915_active_init()
358 ref->retire = ptr_unpack_bits(retire, &bits, 2); in __i915_active_init()
360 ref->flags |= I915_ACTIVE_RETIRE_SLEEPS; in __i915_active_init()
362 spin_lock_init(&ref->tree_lock); in __i915_active_init()
363 ref->tree = RB_ROOT; in __i915_active_init()
364 ref->cache = NULL; in __i915_active_init()
366 init_llist_head(&ref->preallocated_barriers); in __i915_active_init()
367 atomic_set(&ref->count, 0); in __i915_active_init()
368 __mutex_init(&ref->mutex, "i915_active", mkey); in __i915_active_init()
369 __i915_active_fence_init(&ref->excl, NULL, excl_retire); in __i915_active_init()
370 INIT_WORK(&ref->work, active_work); in __i915_active_init()
372 lockdep_init_map(&ref->work.lockdep_map, "i915_active.work", wkey, 0); in __i915_active_init()
376 static bool ____active_del_barrier(struct i915_active *ref, in ____active_del_barrier() argument
419 __active_del_barrier(struct i915_active *ref, struct active_node *node) in __active_del_barrier() argument
421 return ____active_del_barrier(ref, node, barrier_to_engine(node)); in __active_del_barrier()
425 replace_barrier(struct i915_active *ref, struct i915_active_fence *active) in replace_barrier() argument
435 __active_del_barrier(ref, node_from_active(active)); in replace_barrier()
439 int i915_active_ref(struct i915_active *ref, u64 idx, struct dma_fence *fence) in i915_active_ref() argument
445 err = i915_active_acquire(ref); in i915_active_ref()
449 active = active_instance(ref, idx); in i915_active_ref()
455 if (replace_barrier(ref, active)) { in i915_active_ref()
457 atomic_dec(&ref->count); in i915_active_ref()
460 __i915_active_acquire(ref); in i915_active_ref()
463 i915_active_release(ref); in i915_active_ref()
468 __i915_active_set_fence(struct i915_active *ref, in __i915_active_set_fence() argument
474 if (replace_barrier(ref, active)) { in __i915_active_set_fence()
484 __i915_active_acquire(ref); in __i915_active_set_fence()
491 __active_fence(struct i915_active *ref, u64 idx) in __active_fence() argument
495 it = __active_lookup(ref, idx); in __active_fence()
497 spin_lock_irq(&ref->tree_lock); in __active_fence()
498 it = __active_lookup(ref, idx); in __active_fence()
499 spin_unlock_irq(&ref->tree_lock); in __active_fence()
507 __i915_active_ref(struct i915_active *ref, u64 idx, struct dma_fence *fence) in __i915_active_ref() argument
510 return __i915_active_set_fence(ref, __active_fence(ref, idx), fence); in __i915_active_ref()
514 i915_active_set_exclusive(struct i915_active *ref, struct dma_fence *f) in i915_active_set_exclusive() argument
517 return __i915_active_set_fence(ref, &ref->excl, f); in i915_active_set_exclusive()
520 bool i915_active_acquire_if_busy(struct i915_active *ref) in i915_active_acquire_if_busy() argument
522 debug_active_assert(ref); in i915_active_acquire_if_busy()
523 return atomic_add_unless(&ref->count, 1, 0); in i915_active_acquire_if_busy()
526 static void __i915_active_activate(struct i915_active *ref) in __i915_active_activate() argument
528 spin_lock_irq(&ref->tree_lock); /* __active_retire() */ in __i915_active_activate()
529 if (!atomic_fetch_inc(&ref->count)) in __i915_active_activate()
530 debug_active_activate(ref); in __i915_active_activate()
531 spin_unlock_irq(&ref->tree_lock); in __i915_active_activate()
534 int i915_active_acquire(struct i915_active *ref) in i915_active_acquire() argument
538 if (i915_active_acquire_if_busy(ref)) in i915_active_acquire()
541 if (!ref->active) { in i915_active_acquire()
542 __i915_active_activate(ref); in i915_active_acquire()
546 err = mutex_lock_interruptible(&ref->mutex); in i915_active_acquire()
550 if (likely(!i915_active_acquire_if_busy(ref))) { in i915_active_acquire()
551 err = ref->active(ref); in i915_active_acquire()
553 __i915_active_activate(ref); in i915_active_acquire()
556 mutex_unlock(&ref->mutex); in i915_active_acquire()
561 int i915_active_acquire_for_context(struct i915_active *ref, u64 idx) in i915_active_acquire_for_context() argument
566 err = i915_active_acquire(ref); in i915_active_acquire_for_context()
570 active = active_instance(ref, idx); in i915_active_acquire_for_context()
572 i915_active_release(ref); in i915_active_acquire_for_context()
576 return 0; /* return with active ref */ in i915_active_acquire_for_context()
579 void i915_active_release(struct i915_active *ref) in i915_active_release() argument
581 debug_active_assert(ref); in i915_active_release()
582 active_retire(ref); in i915_active_release()
615 static int flush_lazy_signals(struct i915_active *ref) in flush_lazy_signals() argument
620 enable_signaling(&ref->excl); in flush_lazy_signals()
621 rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { in flush_lazy_signals()
632 int __i915_active_wait(struct i915_active *ref, int state) in __i915_active_wait() argument
638 if (!i915_active_acquire_if_busy(ref)) in __i915_active_wait()
642 err = flush_lazy_signals(ref); in __i915_active_wait()
643 i915_active_release(ref); in __i915_active_wait()
647 if (!i915_active_is_idle(ref) && in __i915_active_wait()
648 ___wait_var_event(ref, i915_active_is_idle(ref), in __i915_active_wait()
652 flush_work(&ref->work); in __i915_active_wait()
680 struct i915_active *ref; member
688 if (i915_active_is_idle(wb->ref)) { in barrier_wake()
697 static int __await_barrier(struct i915_active *ref, struct i915_sw_fence *fence) in __await_barrier() argument
705 GEM_BUG_ON(i915_active_is_idle(ref)); in __await_barrier()
714 wb->ref = ref; in __await_barrier()
716 add_wait_queue(__var_waitqueue(ref), &wb->base); in __await_barrier()
720 static int await_active(struct i915_active *ref, in await_active() argument
727 if (!i915_active_acquire_if_busy(ref)) in await_active()
731 rcu_access_pointer(ref->excl.fence)) { in await_active()
732 err = __await_active(&ref->excl, fn, arg); in await_active()
740 rbtree_postorder_for_each_entry_safe(it, n, &ref->tree, node) { in await_active()
748 err = flush_lazy_signals(ref); in await_active()
752 err = __await_barrier(ref, barrier); in await_active()
758 i915_active_release(ref); in await_active()
768 struct i915_active *ref, in i915_request_await_active() argument
771 return await_active(ref, flags, rq_await_fence, rq, &rq->submit); in i915_request_await_active()
781 struct i915_active *ref, in i915_sw_fence_await_active() argument
784 return await_active(ref, flags, sw_await_fence, fence, fence); in i915_sw_fence_await_active()
787 void i915_active_fini(struct i915_active *ref) in i915_active_fini() argument
789 debug_active_fini(ref); in i915_active_fini()
790 GEM_BUG_ON(atomic_read(&ref->count)); in i915_active_fini()
791 GEM_BUG_ON(work_pending(&ref->work)); in i915_active_fini()
792 mutex_destroy(&ref->mutex); in i915_active_fini()
794 if (ref->cache) in i915_active_fini()
795 kmem_cache_free(global.slab_cache, ref->cache); in i915_active_fini()
803 static struct active_node *reuse_idle_barrier(struct i915_active *ref, u64 idx) in reuse_idle_barrier() argument
807 if (RB_EMPTY_ROOT(&ref->tree)) in reuse_idle_barrier()
810 GEM_BUG_ON(i915_active_is_idle(ref)); in reuse_idle_barrier()
819 if (ref->cache && is_idle_barrier(ref->cache, idx)) { in reuse_idle_barrier()
820 p = &ref->cache->node; in reuse_idle_barrier()
825 p = ref->tree.rb_node; in reuse_idle_barrier()
870 ____active_del_barrier(ref, node, engine)) in reuse_idle_barrier()
877 spin_lock_irq(&ref->tree_lock); in reuse_idle_barrier()
878 rb_erase(p, &ref->tree); /* Hide from waits and sibling allocations */ in reuse_idle_barrier()
879 if (p == &ref->cache->node) in reuse_idle_barrier()
880 WRITE_ONCE(ref->cache, NULL); in reuse_idle_barrier()
881 spin_unlock_irq(&ref->tree_lock); in reuse_idle_barrier()
886 int i915_active_acquire_preallocate_barrier(struct i915_active *ref, in i915_active_acquire_preallocate_barrier() argument
893 GEM_BUG_ON(i915_active_is_idle(ref)); in i915_active_acquire_preallocate_barrier()
896 while (!llist_empty(&ref->preallocated_barriers)) in i915_active_acquire_preallocate_barrier()
912 node = reuse_idle_barrier(ref, idx); in i915_active_acquire_preallocate_barrier()
922 node->ref = ref; in i915_active_acquire_preallocate_barrier()
937 __i915_active_acquire(ref); in i915_active_acquire_preallocate_barrier()
949 GEM_BUG_ON(!llist_empty(&ref->preallocated_barriers)); in i915_active_acquire_preallocate_barrier()
950 llist_add_batch(first, last, &ref->preallocated_barriers); in i915_active_acquire_preallocate_barrier()
960 atomic_dec(&ref->count); in i915_active_acquire_preallocate_barrier()
968 void i915_active_acquire_barrier(struct i915_active *ref) in i915_active_acquire_barrier() argument
973 GEM_BUG_ON(i915_active_is_idle(ref)); in i915_active_acquire_barrier()
981 llist_for_each_safe(pos, next, take_preallocated_barriers(ref)) { in i915_active_acquire_barrier()
986 spin_lock_irqsave_nested(&ref->tree_lock, flags, in i915_active_acquire_barrier()
989 p = &ref->tree.rb_node; in i915_active_acquire_barrier()
1002 rb_insert_color(&node->node, &ref->tree); in i915_active_acquire_barrier()
1003 spin_unlock_irqrestore(&ref->tree_lock, flags); in i915_active_acquire_barrier()
1128 struct kref ref; member
1131 struct i915_active *i915_active_get(struct i915_active *ref) in i915_active_get() argument
1133 struct auto_active *aa = container_of(ref, typeof(*aa), base); in i915_active_get()
1135 kref_get(&aa->ref); in i915_active_get()
1139 static void auto_release(struct kref *ref) in auto_release() argument
1141 struct auto_active *aa = container_of(ref, typeof(*aa), ref); in auto_release()
1147 void i915_active_put(struct i915_active *ref) in i915_active_put() argument
1149 struct auto_active *aa = container_of(ref, typeof(*aa), base); in i915_active_put()
1151 kref_put(&aa->ref, auto_release); in i915_active_put()
1154 static int auto_active(struct i915_active *ref) in auto_active() argument
1156 i915_active_get(ref); in auto_active()
1160 static void auto_retire(struct i915_active *ref) in auto_retire() argument
1162 i915_active_put(ref); in auto_retire()
1173 kref_init(&aa->ref); in i915_active_create()