Lines Matching refs:engine
178 i915_ggtt_offset(req->engine->scratch) + 2 * CACHELINE_BYTES; in intel_emit_post_sync_nonzero_flush()
212 i915_ggtt_offset(req->engine->scratch) + 2 * CACHELINE_BYTES; in gen6_render_ring_flush()
282 i915_ggtt_offset(req->engine->scratch) + 2 * CACHELINE_BYTES; in gen7_render_ring_flush()
376 i915_ggtt_offset(req->engine->scratch) + in gen8_render_ring_flush()
384 static void ring_setup_phys_status_page(struct intel_engine_cs *engine) in ring_setup_phys_status_page() argument
386 struct drm_i915_private *dev_priv = engine->i915; in ring_setup_phys_status_page()
395 static void intel_ring_setup_status_page(struct intel_engine_cs *engine) in intel_ring_setup_status_page() argument
397 struct drm_i915_private *dev_priv = engine->i915; in intel_ring_setup_status_page()
404 switch (engine->id) { in intel_ring_setup_status_page()
410 GEM_BUG_ON(engine->id); in intel_ring_setup_status_page()
425 mmio = RING_HWS_PGA_GEN6(engine->mmio_base); in intel_ring_setup_status_page()
428 mmio = RING_HWS_PGA(engine->mmio_base); in intel_ring_setup_status_page()
432 I915_WRITE(RING_HWSTAM(engine->mmio_base), 0xffffffff); in intel_ring_setup_status_page()
434 I915_WRITE(mmio, engine->status_page.ggtt_offset); in intel_ring_setup_status_page()
445 i915_reg_t reg = RING_INSTPM(engine->mmio_base); in intel_ring_setup_status_page()
448 WARN_ON((I915_READ_MODE(engine) & MODE_IDLE) == 0); in intel_ring_setup_status_page()
457 engine->name); in intel_ring_setup_status_page()
461 static bool stop_ring(struct intel_engine_cs *engine) in stop_ring() argument
463 struct drm_i915_private *dev_priv = engine->i915; in stop_ring()
466 I915_WRITE_MODE(engine, _MASKED_BIT_ENABLE(STOP_RING)); in stop_ring()
468 RING_MI_MODE(engine->mmio_base), in stop_ring()
473 engine->name); in stop_ring()
478 if (I915_READ_HEAD(engine) != I915_READ_TAIL(engine)) in stop_ring()
483 I915_WRITE_CTL(engine, 0); in stop_ring()
484 I915_WRITE_HEAD(engine, 0); in stop_ring()
485 I915_WRITE_TAIL(engine, 0); in stop_ring()
487 return (I915_READ_HEAD(engine) & HEAD_ADDR) == 0; in stop_ring()
490 static int init_ring_common(struct intel_engine_cs *engine) in init_ring_common() argument
492 struct drm_i915_private *dev_priv = engine->i915; in init_ring_common()
493 struct intel_ring *ring = engine->buffer; in init_ring_common()
498 if (!stop_ring(engine)) { in init_ring_common()
502 engine->name, in init_ring_common()
503 I915_READ_CTL(engine), in init_ring_common()
504 I915_READ_HEAD(engine), in init_ring_common()
505 I915_READ_TAIL(engine), in init_ring_common()
506 I915_READ_START(engine)); in init_ring_common()
508 if (!stop_ring(engine)) { in init_ring_common()
511 engine->name, in init_ring_common()
512 I915_READ_CTL(engine), in init_ring_common()
513 I915_READ_HEAD(engine), in init_ring_common()
514 I915_READ_TAIL(engine), in init_ring_common()
515 I915_READ_START(engine)); in init_ring_common()
522 ring_setup_phys_status_page(engine); in init_ring_common()
524 intel_ring_setup_status_page(engine); in init_ring_common()
526 intel_engine_reset_breadcrumbs(engine); in init_ring_common()
529 I915_READ_HEAD(engine); in init_ring_common()
535 I915_WRITE_START(engine, i915_ggtt_offset(ring->vma)); in init_ring_common()
538 if (I915_READ_HEAD(engine)) in init_ring_common()
540 engine->name, I915_READ_HEAD(engine)); in init_ring_common()
543 I915_WRITE_HEAD(engine, ring->head); in init_ring_common()
544 I915_WRITE_TAIL(engine, ring->tail); in init_ring_common()
545 (void)I915_READ_TAIL(engine); in init_ring_common()
547 I915_WRITE_CTL(engine, RING_CTL_SIZE(ring->size) | RING_VALID); in init_ring_common()
550 if (intel_wait_for_register(dev_priv, RING_CTL(engine->mmio_base), in init_ring_common()
555 engine->name, in init_ring_common()
556 I915_READ_CTL(engine), in init_ring_common()
557 I915_READ_CTL(engine) & RING_VALID, in init_ring_common()
558 I915_READ_HEAD(engine), ring->head, in init_ring_common()
559 I915_READ_TAIL(engine), ring->tail, in init_ring_common()
560 I915_READ_START(engine), in init_ring_common()
566 intel_engine_init_hangcheck(engine); in init_ring_common()
569 I915_WRITE_MODE(engine, _MASKED_BIT_DISABLE(STOP_RING)); in init_ring_common()
577 static void reset_ring_common(struct intel_engine_cs *engine, in reset_ring_common() argument
586 assert_forcewakes_active(engine->i915, FORCEWAKE_ALL); in reset_ring_common()
604 struct intel_context *ce = &request->ctx->engine[engine->id]; in reset_ring_common()
618 ppgtt = request->ctx->ppgtt ?: engine->i915->mm.aliasing_ppgtt; in reset_ring_common()
622 I915_WRITE(RING_PP_DIR_DCLV(engine), PP_DIR_DCLV_2G); in reset_ring_common()
623 I915_WRITE(RING_PP_DIR_BASE(engine), pd_offset); in reset_ring_common()
627 RING_PP_DIR_BASE(engine), in reset_ring_common()
632 ppgtt->pd_dirty_rings &= ~intel_engine_flag(engine); in reset_ring_common()
639 engine->legacy_active_context = NULL; in reset_ring_common()
658 static int init_render_ring(struct intel_engine_cs *engine) in init_render_ring() argument
660 struct drm_i915_private *dev_priv = engine->i915; in init_render_ring()
661 int ret = init_ring_common(engine); in init_render_ring()
704 I915_WRITE_IMR(engine, ~engine->irq_keep_mask); in init_render_ring()
706 return init_workarounds_ring(engine); in init_render_ring()
709 static void render_ring_cleanup(struct intel_engine_cs *engine) in render_ring_cleanup() argument
711 struct drm_i915_private *dev_priv = engine->i915; in render_ring_cleanup()
723 u64 gtt_offset = req->engine->semaphore.signal_ggtt[id]; in gen8_rcs_signal()
749 u64 gtt_offset = req->engine->semaphore.signal_ggtt[id]; in gen8_xcs_signal()
768 struct intel_engine_cs *engine; in gen6_signal() local
772 for_each_engine(engine, dev_priv, id) { in gen6_signal()
775 if (!(BIT(engine->hw_id) & GEN6_SEMAPHORES_MASK)) in gen6_signal()
778 mbox_reg = req->engine->semaphore.mbox.signal[engine->hw_id]; in gen6_signal()
792 static void cancel_requests(struct intel_engine_cs *engine) in cancel_requests() argument
797 spin_lock_irqsave(&engine->timeline->lock, flags); in cancel_requests()
800 list_for_each_entry(request, &engine->timeline->requests, link) { in cancel_requests()
807 spin_unlock_irqrestore(&engine->timeline->lock, flags); in cancel_requests()
816 I915_WRITE_TAIL(request->engine, in i9xx_submit_request()
844 req->engine->semaphore.signal(req, cs)); in gen6_sema_emit_breadcrumb()
850 struct intel_engine_cs *engine = req->engine; in gen8_render_emit_breadcrumb() local
852 if (engine->semaphore.signal) in gen8_render_emit_breadcrumb()
853 cs = engine->semaphore.signal(req, cs); in gen8_render_emit_breadcrumb()
858 *cs++ = intel_hws_seqno_address(engine); in gen8_render_emit_breadcrumb()
885 u64 offset = GEN8_WAIT_OFFSET(req->engine, signal->engine->id); in gen8_ring_sync_to()
906 if (ppgtt && req->engine->id != RCS) in gen8_ring_sync_to()
907 ppgtt->pd_dirty_rings |= intel_engine_flag(req->engine); in gen8_ring_sync_to()
918 u32 wait_mbox = signal->engine->semaphore.mbox.wait[req->engine->hw_id]; in gen6_ring_sync_to()
941 gen5_seqno_barrier(struct intel_engine_cs *engine) in gen5_seqno_barrier() argument
959 gen6_seqno_barrier(struct intel_engine_cs *engine) in gen6_seqno_barrier() argument
961 struct drm_i915_private *dev_priv = engine->i915; in gen6_seqno_barrier()
979 POSTING_READ_FW(RING_ACTHD(engine->mmio_base)); in gen6_seqno_barrier()
984 gen5_irq_enable(struct intel_engine_cs *engine) in gen5_irq_enable() argument
986 gen5_enable_gt_irq(engine->i915, engine->irq_enable_mask); in gen5_irq_enable()
990 gen5_irq_disable(struct intel_engine_cs *engine) in gen5_irq_disable() argument
992 gen5_disable_gt_irq(engine->i915, engine->irq_enable_mask); in gen5_irq_disable()
996 i9xx_irq_enable(struct intel_engine_cs *engine) in i9xx_irq_enable() argument
998 struct drm_i915_private *dev_priv = engine->i915; in i9xx_irq_enable()
1000 dev_priv->irq_mask &= ~engine->irq_enable_mask; in i9xx_irq_enable()
1002 POSTING_READ_FW(RING_IMR(engine->mmio_base)); in i9xx_irq_enable()
1006 i9xx_irq_disable(struct intel_engine_cs *engine) in i9xx_irq_disable() argument
1008 struct drm_i915_private *dev_priv = engine->i915; in i9xx_irq_disable()
1010 dev_priv->irq_mask |= engine->irq_enable_mask; in i9xx_irq_disable()
1015 i8xx_irq_enable(struct intel_engine_cs *engine) in i8xx_irq_enable() argument
1017 struct drm_i915_private *dev_priv = engine->i915; in i8xx_irq_enable()
1019 dev_priv->irq_mask &= ~engine->irq_enable_mask; in i8xx_irq_enable()
1021 POSTING_READ16(RING_IMR(engine->mmio_base)); in i8xx_irq_enable()
1025 i8xx_irq_disable(struct intel_engine_cs *engine) in i8xx_irq_disable() argument
1027 struct drm_i915_private *dev_priv = engine->i915; in i8xx_irq_disable()
1029 dev_priv->irq_mask |= engine->irq_enable_mask; in i8xx_irq_disable()
1049 gen6_irq_enable(struct intel_engine_cs *engine) in gen6_irq_enable() argument
1051 struct drm_i915_private *dev_priv = engine->i915; in gen6_irq_enable()
1053 I915_WRITE_IMR(engine, in gen6_irq_enable()
1054 ~(engine->irq_enable_mask | in gen6_irq_enable()
1055 engine->irq_keep_mask)); in gen6_irq_enable()
1056 gen5_enable_gt_irq(dev_priv, engine->irq_enable_mask); in gen6_irq_enable()
1060 gen6_irq_disable(struct intel_engine_cs *engine) in gen6_irq_disable() argument
1062 struct drm_i915_private *dev_priv = engine->i915; in gen6_irq_disable()
1064 I915_WRITE_IMR(engine, ~engine->irq_keep_mask); in gen6_irq_disable()
1065 gen5_disable_gt_irq(dev_priv, engine->irq_enable_mask); in gen6_irq_disable()
1069 hsw_vebox_irq_enable(struct intel_engine_cs *engine) in hsw_vebox_irq_enable() argument
1071 struct drm_i915_private *dev_priv = engine->i915; in hsw_vebox_irq_enable()
1073 I915_WRITE_IMR(engine, ~engine->irq_enable_mask); in hsw_vebox_irq_enable()
1074 gen6_unmask_pm_irq(dev_priv, engine->irq_enable_mask); in hsw_vebox_irq_enable()
1078 hsw_vebox_irq_disable(struct intel_engine_cs *engine) in hsw_vebox_irq_disable() argument
1080 struct drm_i915_private *dev_priv = engine->i915; in hsw_vebox_irq_disable()
1082 I915_WRITE_IMR(engine, ~0); in hsw_vebox_irq_disable()
1083 gen6_mask_pm_irq(dev_priv, engine->irq_enable_mask); in hsw_vebox_irq_disable()
1087 gen8_irq_enable(struct intel_engine_cs *engine) in gen8_irq_enable() argument
1089 struct drm_i915_private *dev_priv = engine->i915; in gen8_irq_enable()
1091 I915_WRITE_IMR(engine, in gen8_irq_enable()
1092 ~(engine->irq_enable_mask | in gen8_irq_enable()
1093 engine->irq_keep_mask)); in gen8_irq_enable()
1094 POSTING_READ_FW(RING_IMR(engine->mmio_base)); in gen8_irq_enable()
1098 gen8_irq_disable(struct intel_engine_cs *engine) in gen8_irq_disable() argument
1100 struct drm_i915_private *dev_priv = engine->i915; in gen8_irq_disable()
1102 I915_WRITE_IMR(engine, ~engine->irq_keep_mask); in gen8_irq_disable()
1133 u32 *cs, cs_offset = i915_ggtt_offset(req->engine->scratch); in i830_emit_bb_start()
1311 intel_engine_create_ring(struct intel_engine_cs *engine, int size) in intel_engine_create_ring() argument
1331 if (IS_I830(engine->i915) || IS_I845G(engine->i915)) in intel_engine_create_ring()
1336 vma = intel_ring_create_vma(engine->i915, size); in intel_engine_create_ring()
1359 struct i915_vma *vma = ctx->engine[RCS].state; in context_pin()
1377 alloc_context_vma(struct intel_engine_cs *engine) in alloc_context_vma() argument
1379 struct drm_i915_private *i915 = engine->i915; in alloc_context_vma()
1383 obj = i915_gem_object_create(i915, engine->context_size); in alloc_context_vma()
1415 intel_ring_context_pin(struct intel_engine_cs *engine, in intel_ring_context_pin() argument
1418 struct intel_context *ce = &ctx->engine[engine->id]; in intel_ring_context_pin()
1427 if (!ce->state && engine->context_size) { in intel_ring_context_pin()
1430 vma = alloc_context_vma(engine); in intel_ring_context_pin()
1462 return engine->buffer; in intel_ring_context_pin()
1469 static void intel_ring_context_unpin(struct intel_engine_cs *engine, in intel_ring_context_unpin() argument
1472 struct intel_context *ce = &ctx->engine[engine->id]; in intel_ring_context_unpin()
1488 static int intel_init_ring_buffer(struct intel_engine_cs *engine) in intel_init_ring_buffer() argument
1493 intel_engine_setup_common(engine); in intel_init_ring_buffer()
1495 err = intel_engine_init_common(engine); in intel_init_ring_buffer()
1499 ring = intel_engine_create_ring(engine, 32 * PAGE_SIZE); in intel_init_ring_buffer()
1506 err = intel_ring_pin(ring, engine->i915, I915_GTT_PAGE_SIZE); in intel_init_ring_buffer()
1510 GEM_BUG_ON(engine->buffer); in intel_init_ring_buffer()
1511 engine->buffer = ring; in intel_init_ring_buffer()
1518 intel_engine_cleanup_common(engine); in intel_init_ring_buffer()
1522 void intel_engine_cleanup(struct intel_engine_cs *engine) in intel_engine_cleanup() argument
1524 struct drm_i915_private *dev_priv = engine->i915; in intel_engine_cleanup()
1527 (I915_READ_MODE(engine) & MODE_IDLE) == 0); in intel_engine_cleanup()
1529 intel_ring_unpin(engine->buffer); in intel_engine_cleanup()
1530 intel_ring_free(engine->buffer); in intel_engine_cleanup()
1532 if (engine->cleanup) in intel_engine_cleanup()
1533 engine->cleanup(engine); in intel_engine_cleanup()
1535 intel_engine_cleanup_common(engine); in intel_engine_cleanup()
1537 dev_priv->engine[engine->id] = NULL; in intel_engine_cleanup()
1538 kfree(engine); in intel_engine_cleanup()
1543 struct intel_engine_cs *engine; in intel_legacy_submission_resume() local
1547 for_each_engine(engine, dev_priv, id) in intel_legacy_submission_resume()
1548 intel_ring_reset(engine->buffer, 0); in intel_legacy_submission_resume()
1555 GEM_BUG_ON(!request->ctx->engine[request->engine->id].pin_count); in ring_request_alloc()
1893 struct intel_engine_cs *engine) in intel_ring_init_semaphores() argument
1926 engine->semaphore.sync_to = gen8_ring_sync_to; in intel_ring_init_semaphores()
1927 engine->semaphore.signal = gen8_xcs_signal; in intel_ring_init_semaphores()
1932 if (i != engine->id) in intel_ring_init_semaphores()
1933 ring_offset = offset + GEN8_SEMAPHORE_OFFSET(engine->id, i); in intel_ring_init_semaphores()
1937 engine->semaphore.signal_ggtt[i] = ring_offset; in intel_ring_init_semaphores()
1940 engine->semaphore.sync_to = gen6_ring_sync_to; in intel_ring_init_semaphores()
1941 engine->semaphore.signal = gen6_signal; in intel_ring_init_semaphores()
1979 if (i == engine->hw_id) { in intel_ring_init_semaphores()
1983 wait_mbox = sem_data[engine->hw_id][i].wait_mbox; in intel_ring_init_semaphores()
1984 mbox_reg = sem_data[engine->hw_id][i].mbox_reg; in intel_ring_init_semaphores()
1987 engine->semaphore.mbox.wait[i] = wait_mbox; in intel_ring_init_semaphores()
1988 engine->semaphore.mbox.signal[i] = mbox_reg; in intel_ring_init_semaphores()
2002 struct intel_engine_cs *engine) in intel_ring_init_irq() argument
2004 engine->irq_enable_mask = GT_RENDER_USER_INTERRUPT << engine->irq_shift; in intel_ring_init_irq()
2007 engine->irq_enable = gen8_irq_enable; in intel_ring_init_irq()
2008 engine->irq_disable = gen8_irq_disable; in intel_ring_init_irq()
2009 engine->irq_seqno_barrier = gen6_seqno_barrier; in intel_ring_init_irq()
2011 engine->irq_enable = gen6_irq_enable; in intel_ring_init_irq()
2012 engine->irq_disable = gen6_irq_disable; in intel_ring_init_irq()
2013 engine->irq_seqno_barrier = gen6_seqno_barrier; in intel_ring_init_irq()
2015 engine->irq_enable = gen5_irq_enable; in intel_ring_init_irq()
2016 engine->irq_disable = gen5_irq_disable; in intel_ring_init_irq()
2017 engine->irq_seqno_barrier = gen5_seqno_barrier; in intel_ring_init_irq()
2019 engine->irq_enable = i9xx_irq_enable; in intel_ring_init_irq()
2020 engine->irq_disable = i9xx_irq_disable; in intel_ring_init_irq()
2022 engine->irq_enable = i8xx_irq_enable; in intel_ring_init_irq()
2023 engine->irq_disable = i8xx_irq_disable; in intel_ring_init_irq()
2027 static void i9xx_set_default_submission(struct intel_engine_cs *engine) in i9xx_set_default_submission() argument
2029 engine->submit_request = i9xx_submit_request; in i9xx_set_default_submission()
2030 engine->cancel_requests = cancel_requests; in i9xx_set_default_submission()
2033 static void gen6_bsd_set_default_submission(struct intel_engine_cs *engine) in gen6_bsd_set_default_submission() argument
2035 engine->submit_request = gen6_bsd_submit_request; in gen6_bsd_set_default_submission()
2036 engine->cancel_requests = cancel_requests; in gen6_bsd_set_default_submission()
2040 struct intel_engine_cs *engine) in intel_ring_default_vfuncs() argument
2042 intel_ring_init_irq(dev_priv, engine); in intel_ring_default_vfuncs()
2043 intel_ring_init_semaphores(dev_priv, engine); in intel_ring_default_vfuncs()
2045 engine->init_hw = init_ring_common; in intel_ring_default_vfuncs()
2046 engine->reset_hw = reset_ring_common; in intel_ring_default_vfuncs()
2048 engine->context_pin = intel_ring_context_pin; in intel_ring_default_vfuncs()
2049 engine->context_unpin = intel_ring_context_unpin; in intel_ring_default_vfuncs()
2051 engine->request_alloc = ring_request_alloc; in intel_ring_default_vfuncs()
2053 engine->emit_breadcrumb = i9xx_emit_breadcrumb; in intel_ring_default_vfuncs()
2054 engine->emit_breadcrumb_sz = i9xx_emit_breadcrumb_sz; in intel_ring_default_vfuncs()
2058 engine->emit_breadcrumb = gen6_sema_emit_breadcrumb; in intel_ring_default_vfuncs()
2062 engine->emit_breadcrumb_sz += num_rings * 6; in intel_ring_default_vfuncs()
2064 engine->emit_breadcrumb_sz += num_rings * 3; in intel_ring_default_vfuncs()
2066 engine->emit_breadcrumb_sz++; in intel_ring_default_vfuncs()
2070 engine->set_default_submission = i9xx_set_default_submission; in intel_ring_default_vfuncs()
2073 engine->emit_bb_start = gen8_emit_bb_start; in intel_ring_default_vfuncs()
2075 engine->emit_bb_start = gen6_emit_bb_start; in intel_ring_default_vfuncs()
2077 engine->emit_bb_start = i965_emit_bb_start; in intel_ring_default_vfuncs()
2079 engine->emit_bb_start = i830_emit_bb_start; in intel_ring_default_vfuncs()
2081 engine->emit_bb_start = i915_emit_bb_start; in intel_ring_default_vfuncs()
2084 int intel_init_render_ring_buffer(struct intel_engine_cs *engine) in intel_init_render_ring_buffer() argument
2086 struct drm_i915_private *dev_priv = engine->i915; in intel_init_render_ring_buffer()
2089 intel_ring_default_vfuncs(dev_priv, engine); in intel_init_render_ring_buffer()
2092 engine->irq_keep_mask = GT_RENDER_L3_PARITY_ERROR_INTERRUPT; in intel_init_render_ring_buffer()
2095 engine->init_context = intel_rcs_ctx_init; in intel_init_render_ring_buffer()
2096 engine->emit_breadcrumb = gen8_render_emit_breadcrumb; in intel_init_render_ring_buffer()
2097 engine->emit_breadcrumb_sz = gen8_render_emit_breadcrumb_sz; in intel_init_render_ring_buffer()
2098 engine->emit_flush = gen8_render_ring_flush; in intel_init_render_ring_buffer()
2102 engine->semaphore.signal = gen8_rcs_signal; in intel_init_render_ring_buffer()
2105 engine->emit_breadcrumb_sz += num_rings * 8; in intel_init_render_ring_buffer()
2108 engine->init_context = intel_rcs_ctx_init; in intel_init_render_ring_buffer()
2109 engine->emit_flush = gen7_render_ring_flush; in intel_init_render_ring_buffer()
2111 engine->emit_flush = gen6_render_ring_flush; in intel_init_render_ring_buffer()
2113 engine->emit_flush = gen4_render_ring_flush; in intel_init_render_ring_buffer()
2116 engine->emit_flush = gen2_render_ring_flush; in intel_init_render_ring_buffer()
2118 engine->emit_flush = gen4_render_ring_flush; in intel_init_render_ring_buffer()
2119 engine->irq_enable_mask = I915_USER_INTERRUPT; in intel_init_render_ring_buffer()
2123 engine->emit_bb_start = hsw_emit_bb_start; in intel_init_render_ring_buffer()
2125 engine->init_hw = init_render_ring; in intel_init_render_ring_buffer()
2126 engine->cleanup = render_ring_cleanup; in intel_init_render_ring_buffer()
2128 ret = intel_init_ring_buffer(engine); in intel_init_render_ring_buffer()
2133 ret = intel_engine_create_scratch(engine, PAGE_SIZE); in intel_init_render_ring_buffer()
2137 ret = intel_engine_create_scratch(engine, I830_WA_SIZE); in intel_init_render_ring_buffer()
2145 int intel_init_bsd_ring_buffer(struct intel_engine_cs *engine) in intel_init_bsd_ring_buffer() argument
2147 struct drm_i915_private *dev_priv = engine->i915; in intel_init_bsd_ring_buffer()
2149 intel_ring_default_vfuncs(dev_priv, engine); in intel_init_bsd_ring_buffer()
2154 engine->set_default_submission = gen6_bsd_set_default_submission; in intel_init_bsd_ring_buffer()
2155 engine->emit_flush = gen6_bsd_ring_flush; in intel_init_bsd_ring_buffer()
2157 engine->irq_enable_mask = GT_BSD_USER_INTERRUPT; in intel_init_bsd_ring_buffer()
2159 engine->mmio_base = BSD_RING_BASE; in intel_init_bsd_ring_buffer()
2160 engine->emit_flush = bsd_ring_flush; in intel_init_bsd_ring_buffer()
2162 engine->irq_enable_mask = ILK_BSD_USER_INTERRUPT; in intel_init_bsd_ring_buffer()
2164 engine->irq_enable_mask = I915_BSD_USER_INTERRUPT; in intel_init_bsd_ring_buffer()
2167 return intel_init_ring_buffer(engine); in intel_init_bsd_ring_buffer()
2170 int intel_init_blt_ring_buffer(struct intel_engine_cs *engine) in intel_init_blt_ring_buffer() argument
2172 struct drm_i915_private *dev_priv = engine->i915; in intel_init_blt_ring_buffer()
2174 intel_ring_default_vfuncs(dev_priv, engine); in intel_init_blt_ring_buffer()
2176 engine->emit_flush = gen6_ring_flush; in intel_init_blt_ring_buffer()
2178 engine->irq_enable_mask = GT_BLT_USER_INTERRUPT; in intel_init_blt_ring_buffer()
2180 return intel_init_ring_buffer(engine); in intel_init_blt_ring_buffer()
2183 int intel_init_vebox_ring_buffer(struct intel_engine_cs *engine) in intel_init_vebox_ring_buffer() argument
2185 struct drm_i915_private *dev_priv = engine->i915; in intel_init_vebox_ring_buffer()
2187 intel_ring_default_vfuncs(dev_priv, engine); in intel_init_vebox_ring_buffer()
2189 engine->emit_flush = gen6_ring_flush; in intel_init_vebox_ring_buffer()
2192 engine->irq_enable_mask = PM_VEBOX_USER_INTERRUPT; in intel_init_vebox_ring_buffer()
2193 engine->irq_enable = hsw_vebox_irq_enable; in intel_init_vebox_ring_buffer()
2194 engine->irq_disable = hsw_vebox_irq_disable; in intel_init_vebox_ring_buffer()
2197 return intel_init_ring_buffer(engine); in intel_init_vebox_ring_buffer()