Lines Matching defs:q_depth
398 uint16_t q_depth,
410 eq_size = roundup_pow_of_two(GDMA_EQE_SIZE * q_depth);
414 cq_size = roundup_pow_of_two(GDMA_CQE_SIZE * q_depth);
437 comp_buf = mallocarray(q_depth, sizeof(struct gdma_comp),
442 hwc_cq->queue_depth = q_depth;
456 mana_hwc_alloc_dma_buf(struct hw_channel_context *hwc, uint16_t q_depth,
471 q_depth * sizeof(struct hwc_work_request),
474 dma_buf->num_reqs = q_depth;
476 buf_size = ALIGN(q_depth * max_msg_size, PAGE_SIZE);
489 for (i = 0; i < q_depth; i++) {
532 enum gdma_queue_type q_type, uint16_t q_depth,
547 queue_size = roundup_pow_of_two(GDMA_MAX_RQE_SIZE * q_depth);
549 queue_size = roundup_pow_of_two(GDMA_MAX_SQE_SIZE * q_depth);
562 hwc_wq->queue_depth = q_depth;
565 err = mana_hwc_alloc_dma_buf(hwc, q_depth, max_msg_size,
642 mana_hwc_test_channel(struct hw_channel_context *hwc, uint16_t q_depth,
653 for (i = 0; i < q_depth; i++) {
660 ctx = malloc(q_depth * sizeof(struct hwc_caller_ctx),
663 for (i = 0; i < q_depth; ++i)
672 mana_hwc_establish_channel(struct gdma_context *gc, uint16_t *q_depth,
697 *q_depth = hwc->hwc_init_q_depth_max;
716 mana_hwc_init_queues(struct hw_channel_context *hwc, uint16_t q_depth,
721 err = mana_hwc_init_inflight_msg(hwc, q_depth);
728 err = mana_hwc_create_cq(hwc, q_depth * 2,
737 err = mana_hwc_create_wq(hwc, GDMA_RQ, q_depth, max_req_msg_size,
744 err = mana_hwc_create_wq(hwc, GDMA_SQ, q_depth, max_resp_msg_size,
751 hwc->num_inflight_msg = q_depth;