Lines Matching defs:active_node

26 struct active_node {
33 #define fetch_node(x) rb_entry(READ_ONCE(x), typeof(struct active_node), node)
35 static inline struct active_node *
38 return container_of(active, struct active_node, base);
48 static inline struct llist_node *barrier_to_ll(struct active_node *node)
55 __barrier_to_engine(struct active_node *node)
61 barrier_to_engine(struct active_node *node)
67 static inline struct active_node *barrier_from_ll(struct llist_node *x)
70 struct active_node, base.cb.node);
129 struct active_node *it, *n;
226 active_retire(container_of(cb, struct active_node, base.cb)->ref);
236 static struct active_node *__active_lookup(struct i915_active *ref, u64 idx)
238 struct active_node *it;
295 struct active_node *node;
310 node = rb_entry(parent, struct active_node, node);
378 struct active_node *node,
394 * the active_node and the caller will just shrug and know not to
420 __active_del_barrier(struct i915_active *ref, struct active_node *node)
581 static int flush_barrier(struct active_node *it)
598 struct active_node *it, *n;
721 struct active_node *it, *n;
785 static inline bool is_idle_barrier(struct active_node *node, u64 idx)
790 static struct active_node *reuse_idle_barrier(struct i915_active *ref, u64 idx)
814 struct active_node *node =
815 rb_entry(p, struct active_node, node);
834 struct active_node *node =
835 rb_entry(p, struct active_node, node);
870 return rb_entry(p, struct active_node, node);
896 struct active_node *node;
947 struct active_node *node = barrier_from_ll(first);
977 struct active_node *node = barrier_from_ll(pos);
986 struct active_node *it;
990 it = rb_entry(parent, struct active_node, node);
1232 slab_cache = KMEM_CACHE(active_node, SLAB_HWCACHE_ALIGN);
1236 pool_init(&slab_cache, sizeof(struct active_node),