Home
last modified time | relevance | path

Searched refs:STAILQ_FIRST (Results 1 – 25 of 31) sorted by relevance

12

/spdk/include/spdk/
H A Dqueue_extras.h142 #define STAILQ_FIRST(head) ((head)->stqh_first) macro
145 for ((var) = ((var) ? (var) : STAILQ_FIRST((head))); \
150 for ((var) = STAILQ_FIRST((head)); \
155 for ((var) = ((var) ? (var) : STAILQ_FIRST((head))); \
172 struct type *swap_first = STAILQ_FIRST(head1); \
174 STAILQ_FIRST(head1) = STAILQ_FIRST(head2); \
176 STAILQ_FIRST(head2) = swap_first; \
179 (head1)->stqh_last = &STAILQ_FIRST(head1); \
181 (head2)->stqh_last = &STAILQ_FIRST(head2); \
/spdk/test/unit/lib/nvme/nvme_poll_group.c/
H A Dnvme_poll_group_ut.c297 qpair = STAILQ_FIRST(&tgroup->connected_qpairs); in test_spdk_nvme_poll_group_add_remove()
315 qpair = STAILQ_FIRST(&tgroup->connected_qpairs); in test_spdk_nvme_poll_group_add_remove()
365 qpair = STAILQ_FIRST(&tgroup->connected_qpairs); in test_spdk_nvme_poll_group_add_remove()
397 qpair = STAILQ_FIRST(&tgroup_1->connected_qpairs); in test_spdk_nvme_poll_group_process_completions()
402 qpair = STAILQ_FIRST(&tgroup_2->connected_qpairs); in test_spdk_nvme_poll_group_process_completions()
427 qpair = STAILQ_FIRST(&tgroup_1->connected_qpairs); in test_spdk_nvme_poll_group_destroy()
432 qpair = STAILQ_FIRST(&tgroup_2->connected_qpairs); in test_spdk_nvme_poll_group_destroy()
439 qpair = STAILQ_FIRST(&tgroup_4->connected_qpairs); in test_spdk_nvme_poll_group_destroy()
546 tgroup_1 = STAILQ_FIRST(&group->tgroups);
/spdk/lib/thread/
H A Diobuf.c524 buf = STAILQ_FIRST(&cache->small.cache); in spdk_iobuf_get()
530 buf = STAILQ_FIRST(&cache->large.cache); in spdk_iobuf_get()
710 buf = (void *)STAILQ_FIRST(&pool->cache);
792 bufs[i] = STAILQ_FIRST(&pool->cache);
801 entry = STAILQ_FIRST(pool->queue);
/spdk/lib/nvme/
H A Dnvme_qpair.c576 req = STAILQ_FIRST(&tmp); in nvme_qpair_abort_queued_reqs()
605 req = STAILQ_FIRST(&tmp); in _nvme_qpair_complete_abort_queued_reqs()
662 req = STAILQ_FIRST(&qpair->queued_req); in nvme_qpair_check_enabled()
709 if ((req = STAILQ_FIRST(&qpair->queued_req)) == NULL) { in nvme_qpair_resubmit_requests()
745 ctx = STAILQ_FIRST(&operations); in nvme_complete_register_operations()
924 req = STAILQ_FIRST(&qpair->err_req_head);
H A Dnvme_internal.h1433 req = STAILQ_FIRST(&qpair->free_req); in _nvme_free_request()
/spdk/test/common/lib/
H A Dtest_iobuf.c132 entry = STAILQ_FIRST(&g_iobuf_entries); in spdk_iobuf_put()
/spdk/test/external_code/accel/
H A Dmodule.c84 while ((accel_task = STAILQ_FIRST(&tasks_to_complete))) { in ex_accel_comp_poll()
/spdk/module/sock/uring/
H A During.c883 tr = STAILQ_FIRST(&sock->recv_stream); in uring_sock_recv_next()
953 tr = STAILQ_FIRST(&sock->recv_stream); in uring_sock_readv_no_pipe()
968 tr = STAILQ_FIRST(&sock->recv_stream); in uring_sock_readv_no_pipe()
1810 tracker = STAILQ_FIRST(&group->free_trackers); in uring_sock_group_populate_buf_ring()
1821 assert(STAILQ_FIRST(&group->free_trackers) != tracker); in uring_sock_group_impl_poll()
1825 tracker = STAILQ_FIRST(&group->free_trackers); in uring_sock_group_impl_poll()
/spdk/test/unit/lib/nvme/nvme_transport.c/
H A Dnvme_transport_ut.c173 CU_ASSERT(STAILQ_FIRST(&tgroup.disconnected_qpairs) == &qpair); in test_nvme_transport_poll_group_add_remove()
/spdk/module/accel/iaa/
H A Daccel_iaa.c228 task = STAILQ_FIRST(&chan->queued_tasks); in idxd_poll()
/spdk/lib/jsonrpc/
H A Djsonrpc_server_tcp.c73 request = STAILQ_FIRST(&conn->send_queue); in jsonrpc_server_dequeue_request()
/spdk/test/unit/lib/accel/accel.c/
H A Daccel_ut.c209 expected_accel_task = STAILQ_FIRST(&g_accel_ch->task_pool); in test_get_task()
269 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_dualcast()
332 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_compare()
373 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_fill()
419 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_crc32c()
456 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_crc32cv()
496 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_copy_crc32c()
535 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_submit_xor()
573 expected_accel_task = STAILQ_FIRST(&g_sw_ch->tasks_to_complete); in test_spdk_accel_module_find_by_name()
2387 cache_entry = STAILQ_FIRST( in test_sequence_accel_buffers()
[all...]
/spdk/test/unit/lib/nvme/nvme_rdma.c/
H A Dnvme_rdma_ut.c612 CU_ASSERT(STAILQ_FIRST(&group.pollers) == poller_1); in test_nvme_rdma_poller_create()
622 CU_ASSERT(STAILQ_FIRST(&group.pollers) == poller_2); in test_nvme_rdma_poller_create()
1317 poller = STAILQ_FIRST(&group->pollers); in test_nvme_rdma_qpair_set_poller()
1347 poller = STAILQ_FIRST(&group->pollers); in test_nvme_rdma_qpair_set_poller()
/spdk/module/accel/dsa/
H A Daccel_dsa.c422 while ((task = STAILQ_FIRST(&chan->queued_tasks))) { in dsa_create_cb()
/spdk/lib/nvmf/
H A Drdma.c2070 r = STAILQ_FIRST(&rgroup->group.pending_buf_queue); in nvmf_rdma_request_process()
2203 if (&rdma_req->req != STAILQ_FIRST(&rgroup->group.pending_buf_queue)) { in nvmf_rdma_request_process()
2241 if (rdma_req != STAILQ_FIRST(&rqpair->pending_rdma_read_queue)) { in nvmf_rdma_request_process()
2403 if (rdma_req != STAILQ_FIRST(&rqpair->pending_rdma_write_queue)) { in nvmf_rdma_request_process()
2428 if (rdma_req != STAILQ_FIRST(&rqpair->pending_rdma_send_queue)) { in nvmf_rdma_request_process()
3424 rdma_req = STAILQ_FIRST(&resources->free_queue); in nvmf_rdma_destroy_drained_qpair()
3426 rdma_req->recv = STAILQ_FIRST(&resources->incoming_queue); in nvmf_rdma_destroy_drained_qpair()
4546 rqpair = STAILQ_FIRST(&rpoller->qpairs_pending_recv); in _qp_reset_failed_sends()
4627 rqpair = STAILQ_FIRST(&rpoller->qpairs_pending_send); in nvmf_rdma_poller_poll()
H A Dtcp.c2655 msg = STAILQ_FIRST(&list->free_msgs); in nvmf_tcp_req_parse_sgl()
2675 tcp_req = STAILQ_FIRST(&list->waiting_for_msg_reqs); in nvmf_tcp_req_parse_sgl()
/spdk/test/unit/lib/sock/sock.c/
H A Dsock_ut.c1024 CU_ASSERT(STAILQ_FIRST(&map.entries)->ref == 1); in ut_sock_map()
1027 CU_ASSERT(STAILQ_FIRST(&map.entries)->ref == 2); in ut_sock_map()
/spdk/module/accel/dpdk_compressdev/
H A Daccel_dpdk_compressdev.c614 task_to_resubmit = STAILQ_FIRST(&chan->queued_tasks); in _process_single_task()
/spdk/module/vfu_device/
H A Dvfu_virtio.c421 req = STAILQ_FIRST(&vq->free_reqs); in vfu_virtio_dev_get_req()
486 req = STAILQ_FIRST(&vq->free_reqs); in vfu_virtio_dev_free_reqs()
/spdk/lib/sock/
H A Dsock.c727 provided = STAILQ_FIRST(&group->pool); in sock_group_impl_poll_count()
/spdk/test/unit/lib/nvmf/tcp.c/
H A Dtcp_ut.c
/spdk/test/unit/lib/nvmf/rdma.c/
H A Drdma_ut.c716 CU_ASSERT(rdma_req == STAILQ_FIRST(&rqpair.pending_rdma_send_queue)); in test_spdk_nvmf_rdma_request_process()
722 CU_ASSERT(rdma_req == STAILQ_FIRST(&rqpair.pending_rdma_send_queue)); in test_spdk_nvmf_rdma_request_process()
/spdk/test/unit/lib/nvme/nvme_pcie.c/
H A Dnvme_pcie_ut.c303 entry = STAILQ_FIRST(&g_events);
/spdk/test/unit/lib/bdev/nvme/bdev_nvme.c/
H A Dbdev_nvme_ut.c1971 io_path1 = STAILQ_FIRST(&nbdev_ch1->io_path_list); in test_pending_reset()
1982 io_path2 = STAILQ_FIRST(&nbdev_ch2->io_path_list); in test_pending_reset()
2791 io_path1 = STAILQ_FIRST(&nbdev_ch1->io_path_list); in test_abort()
3355 io_path1 = STAILQ_FIRST(&nbdev_ch1->io_path_list); in test_reconnect_qpair()
3366 io_path2 = STAILQ_FIRST(&nbdev_ch2->io_path_list); in test_reconnect_qpair()
7881 io_path1 = STAILQ_FIRST(&nbdev_ch1->io_path_list);
7891 io_path2 = STAILQ_FIRST(&nbdev_ch2->io_path_list);
/spdk/test/unit/lib/nvme/nvme.c/
H A Dnvme_ut.c766 req = STAILQ_FIRST(&match_req.qpair->free_req); in test_nvme_allocate_request_user_copy()

12