Home
last modified time | relevance | path

Searched refs:adev (Results 1 – 25 of 327) sorted by relevance

12345678910>>...14

/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/
H A Damdgpu_device.c138 struct amdgpu_device *adev = ddev->dev_private; in amdgpu_device_get_pcie_replay_count() local
139 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
149 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev);
161 struct amdgpu_device *adev = dev->dev_private; in amdgpu_device_supports_boco() local
163 if (adev->flags & AMD_IS_PX) in amdgpu_device_supports_boco()
178 struct amdgpu_device *adev = dev->dev_private; in amdgpu_device_supports_baco() local
180 return amdgpu_asic_supports_baco(adev); in amdgpu_device_supports_baco()
194 void amdgpu_device_vram_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_vram_access() argument
202 spin_lock_irqsave(&adev->mmio_idx_lock, flags); in amdgpu_device_vram_access()
209 spin_unlock_irqrestore(&adev->mmio_idx_lock, flags); in amdgpu_device_vram_access()
[all …]
H A Damdgpu_soc15.c102 static u32 soc15_pcie_rreg(struct amdgpu_device *adev, u32 reg) in soc15_pcie_rreg() argument
106 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_rreg()
107 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_rreg()
109 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in soc15_pcie_rreg()
113 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in soc15_pcie_rreg()
117 static void soc15_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_pcie_wreg() argument
121 address = adev->nbio.funcs->get_pcie_index_offset(adev); in soc15_pcie_wreg()
122 data = adev->nbio.funcs->get_pcie_data_offset(adev); in soc15_pcie_wreg()
124 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in soc15_pcie_wreg()
129 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in soc15_pcie_wreg()
[all …]
H A Damdgpu_gmc_v9_0.c210 static int gmc_v9_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_ecc_interrupt_state() argument
219 if (adev->asic_type >= CHIP_VEGA20) in gmc_v9_0_ecc_interrupt_state()
260 static int gmc_v9_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_vm_fault_interrupt_state() argument
278 for (j = 0; j < adev->num_vmhubs; j++) { in gmc_v9_0_vm_fault_interrupt_state()
279 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
289 for (j = 0; j < adev->num_vmhubs; j++) { in gmc_v9_0_vm_fault_interrupt_state()
290 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
305 static int gmc_v9_0_process_interrupt(struct amdgpu_device *adev, in gmc_v9_0_process_interrupt() argument
318 if (retry_fault && amdgpu_gmc_filter_faults(adev, addr, entry->pasid, in gmc_v9_0_process_interrupt()
324 hub = &adev->vmhub[AMDGPU_MMHUB_0]; in gmc_v9_0_process_interrupt()
[all …]
H A Damdgpu_nv.c73 static u32 nv_pcie_rreg(struct amdgpu_device *adev, u32 reg) in nv_pcie_rreg() argument
77 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_rreg()
78 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg()
80 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in nv_pcie_rreg()
84 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in nv_pcie_rreg()
88 static void nv_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in nv_pcie_wreg() argument
92 address = adev->nbio.funcs->get_pcie_index_offset(adev); in nv_pcie_wreg()
93 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_wreg()
95 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in nv_pcie_wreg()
100 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in nv_pcie_wreg()
[all …]
H A Damdgpu_vi.c92 static u32 vi_pcie_rreg(struct amdgpu_device *adev, u32 reg) in vi_pcie_rreg() argument
97 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
101 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
105 static void vi_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_pcie_wreg() argument
109 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
114 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
117 static u32 vi_smc_rreg(struct amdgpu_device *adev, u32 reg) in vi_smc_rreg() argument
122 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_rreg()
125 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in vi_smc_rreg()
129 static void vi_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_smc_wreg() argument
[all …]
H A Damdgpu_gart.c77 static int amdgpu_gart_dummy_page_init(struct amdgpu_device *adev) in amdgpu_gart_dummy_page_init() argument
85 if (adev->dummy_page_map != NULL) in amdgpu_gart_dummy_page_init()
88 error = bus_dmamem_alloc(adev->ddev->dmat, PAGE_SIZE, PAGE_SIZE, 0, in amdgpu_gart_dummy_page_init()
89 &adev->dummy_page_seg, 1, &rsegs, BUS_DMA_WAITOK); in amdgpu_gart_dummy_page_init()
93 error = bus_dmamem_map(adev->ddev->dmat, &adev->dummy_page_seg, 1, in amdgpu_gart_dummy_page_init()
98 bus_dmamem_unmap(adev->ddev->dmat, p, PAGE_SIZE); in amdgpu_gart_dummy_page_init()
99 error = bus_dmamap_create(adev->ddev->dmat, PAGE_SIZE, 1, PAGE_SIZE, 0, in amdgpu_gart_dummy_page_init()
100 BUS_DMA_WAITOK, &adev->dummy_page_map); in amdgpu_gart_dummy_page_init()
103 error = bus_dmamap_load_raw(adev->ddev->dmat, adev->dummy_page_map, in amdgpu_gart_dummy_page_init()
104 &adev->dummy_page_seg, 1, PAGE_SIZE, BUS_DMA_WAITOK); in amdgpu_gart_dummy_page_init()
[all …]
H A Damdgpu_gmc_v10_0.c66 gmc_v10_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_vm_fault_interrupt_state() argument
92 hub = &adev->vmhub[AMDGPU_MMHUB_0]; in gmc_v10_0_vm_fault_interrupt_state()
101 hub = &adev->vmhub[AMDGPU_GFXHUB_0]; in gmc_v10_0_vm_fault_interrupt_state()
111 hub = &adev->vmhub[AMDGPU_MMHUB_0]; in gmc_v10_0_vm_fault_interrupt_state()
120 hub = &adev->vmhub[AMDGPU_GFXHUB_0]; in gmc_v10_0_vm_fault_interrupt_state()
135 static int gmc_v10_0_process_interrupt(struct amdgpu_device *adev, in gmc_v10_0_process_interrupt() argument
139 struct amdgpu_vmhub *hub = &adev->vmhub[entry->vmid_src]; in gmc_v10_0_process_interrupt()
146 if (!amdgpu_sriov_vf(adev)) { in gmc_v10_0_process_interrupt()
163 amdgpu_vm_get_task_info(adev, entry->pasid, &task_info); in gmc_v10_0_process_interrupt()
165 dev_err(adev->dev, in gmc_v10_0_process_interrupt()
[all …]
H A Damdgpu_gfx.c43 int amdgpu_gfx_mec_queue_to_bit(struct amdgpu_device *adev, int mec, in amdgpu_gfx_mec_queue_to_bit() argument
48 bit += mec * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_mec_queue_to_bit()
49 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
50 bit += pipe * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
56 void amdgpu_gfx_bit_to_mec_queue(struct amdgpu_device *adev, int bit, in amdgpu_gfx_bit_to_mec_queue() argument
59 *queue = bit % adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_bit_to_mec_queue()
60 *pipe = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_gfx_bit_to_mec_queue()
61 % adev->gfx.mec.num_pipe_per_mec; in amdgpu_gfx_bit_to_mec_queue()
62 *mec = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_gfx_bit_to_mec_queue()
63 / adev->gfx.mec.num_pipe_per_mec; in amdgpu_gfx_bit_to_mec_queue()
[all …]
H A Damdgpu_pm.c50 static int amdgpu_debugfs_pm_init(struct amdgpu_device *adev);
93 void amdgpu_pm_acpi_event_handler(struct amdgpu_device *adev) in amdgpu_pm_acpi_event_handler() argument
95 if (adev->pm.dpm_enabled) { in amdgpu_pm_acpi_event_handler()
96 mutex_lock(&adev->pm.mutex); in amdgpu_pm_acpi_event_handler()
98 adev->pm.ac_power = true; in amdgpu_pm_acpi_event_handler()
100 adev->pm.ac_power = false; in amdgpu_pm_acpi_event_handler()
101 if (adev->powerplay.pp_funcs->enable_bapm) in amdgpu_pm_acpi_event_handler()
102 amdgpu_dpm_enable_bapm(adev, adev->pm.ac_power); in amdgpu_pm_acpi_event_handler()
103 mutex_unlock(&adev->pm.mutex); in amdgpu_pm_acpi_event_handler()
107 int amdgpu_dpm_read_sensor(struct amdgpu_device *adev, enum amd_pp_sensors sensor, in amdgpu_dpm_read_sensor() argument
[all …]
H A Damdgpu_dpm.h258 #define amdgpu_dpm_pre_set_power_state(adev) \ argument
259 ((adev)->powerplay.pp_funcs->pre_set_power_state((adev)->powerplay.pp_handle))
261 #define amdgpu_dpm_set_power_state(adev) \ argument
262 ((adev)->powerplay.pp_funcs->set_power_state((adev)->powerplay.pp_handle))
264 #define amdgpu_dpm_post_set_power_state(adev) \ argument
265 ((adev)->powerplay.pp_funcs->post_set_power_state((adev)->powerplay.pp_handle))
267 #define amdgpu_dpm_display_configuration_changed(adev) \ argument
268 ((adev)->powerplay.pp_funcs->display_configuration_changed((adev)->powerplay.pp_handle))
270 #define amdgpu_dpm_print_power_state(adev, ps) \ argument
271 ((adev)->powerplay.pp_funcs->print_power_state((adev)->powerplay.pp_handle, (ps)))
[all …]
H A Damdgpu_acp.c103 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_init() local
105 adev->acp.parent = adev->dev; in acp_sw_init()
107 adev->acp.cgs_device = in acp_sw_init()
108 amdgpu_cgs_create_device(adev); in acp_sw_init()
109 if (!adev->acp.cgs_device) in acp_sw_init()
117 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_fini() local
119 if (adev->acp.cgs_device) in acp_sw_fini()
120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini()
128 void *adev; member
135 struct amdgpu_device *adev; in acp_poweroff() local
[all …]
H A Damdgpu_irq.c91 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, in amdgpu_hotplug_work_func() local
93 struct drm_device *dev = adev->ddev; in amdgpu_hotplug_work_func()
115 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument
121 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
123 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all()
127 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all()
134 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all()
142 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
159 struct amdgpu_device *adev = dev->dev_private; in amdgpu_irq_handler() local
162 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler()
[all …]
H A Damdgpu_kv_dpm.c51 static void kv_dpm_set_irq_funcs(struct amdgpu_device *adev);
52 static int kv_enable_nb_dpm(struct amdgpu_device *adev,
54 static void kv_init_graphics_levels(struct amdgpu_device *adev);
55 static int kv_calculate_ds_divider(struct amdgpu_device *adev);
56 static int kv_calculate_nbps_level_settings(struct amdgpu_device *adev);
57 static int kv_calculate_dpm_settings(struct amdgpu_device *adev);
58 static void kv_enable_new_levels(struct amdgpu_device *adev);
59 static void kv_program_nbps_index_settings(struct amdgpu_device *adev,
61 static int kv_set_enabled_level(struct amdgpu_device *adev, u32 level);
62 static int kv_set_enabled_levels(struct amdgpu_device *adev);
[all …]
H A Damdgpu_amdkfd.c69 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument
71 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe()
73 adev->kfd.dev = kgd2kfd_probe((struct kgd_dev *)adev, in amdgpu_amdkfd_device_probe()
74 adev->pdev, adev->asic_type, vf); in amdgpu_amdkfd_device_probe()
76 if (adev->kfd.dev) in amdgpu_amdkfd_device_probe()
77 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_probe()
93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument
102 if (adev->doorbell.size > adev->doorbell.num_doorbells * sizeof(u32)) { in amdgpu_doorbell_get_kfd_info()
103 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
104 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info()
[all …]
H A Damdgpu_cgs.c43 struct amdgpu_device *adev; member
47 struct amdgpu_device *adev = \
48 ((struct amdgpu_cgs_device *)cgs_device)->adev
149 if (adev->asic_type >= CHIP_TOPAZ) in fw_type_convert()
174 fw_version = adev->sdma.instance[0].fw_version; in amdgpu_get_firmware_version()
177 fw_version = adev->sdma.instance[1].fw_version; in amdgpu_get_firmware_version()
180 fw_version = adev->gfx.ce_fw_version; in amdgpu_get_firmware_version()
183 fw_version = adev->gfx.pfp_fw_version; in amdgpu_get_firmware_version()
186 fw_version = adev->gfx.me_fw_version; in amdgpu_get_firmware_version()
189 fw_version = adev->gfx.mec_fw_version; in amdgpu_get_firmware_version()
[all …]
H A Damdgpu_gmc_v6_0.c49 static void gmc_v6_0_set_gmc_funcs(struct amdgpu_device *adev);
50 static void gmc_v6_0_set_irq_funcs(struct amdgpu_device *adev);
80 static void gmc_v6_0_mc_stop(struct amdgpu_device *adev) in gmc_v6_0_mc_stop() argument
84 gmc_v6_0_wait_for_idle((void *)adev); in gmc_v6_0_mc_stop()
100 static void gmc_v6_0_mc_resume(struct amdgpu_device *adev) in gmc_v6_0_mc_resume() argument
114 static int gmc_v6_0_init_microcode(struct amdgpu_device *adev) in gmc_v6_0_init_microcode() argument
123 switch (adev->asic_type) { in gmc_v6_0_init_microcode()
150 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v6_0_init_microcode()
154 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v6_0_init_microcode()
158 dev_err(adev->dev, in gmc_v6_0_init_microcode()
[all …]
H A Damdgpu_xgmi.c86 static int amdgpu_xgmi_sysfs_create(struct amdgpu_device *adev, in amdgpu_xgmi_sysfs_create() argument
95 hive->kobj = kobject_create_and_add("xgmi_hive_info", &adev->dev->kobj); in amdgpu_xgmi_sysfs_create()
97 dev_err(adev->dev, "XGMI: Failed to allocate sysfs entry!\n"); in amdgpu_xgmi_sysfs_create()
112 dev_err(adev->dev, "XGMI: Failed to create device file xgmi_hive_id\n"); in amdgpu_xgmi_sysfs_create()
122 static void amdgpu_xgmi_sysfs_destroy(struct amdgpu_device *adev, in amdgpu_xgmi_sysfs_destroy() argument
138 struct amdgpu_device *adev = ddev->dev_private; in amdgpu_xgmi_show_device_id() local
140 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.xgmi.node_id); in amdgpu_xgmi_show_device_id()
150 struct amdgpu_device *adev = ddev->dev_private; in amdgpu_xgmi_show_error() local
158 fica_out = adev->df.funcs->get_fica(adev, ficaa_pie_ctl_in); in amdgpu_xgmi_show_error()
162 fica_out = adev->df.funcs->get_fica(adev, ficaa_pie_status_in); in amdgpu_xgmi_show_error()
[all …]
H A Damdgpu_rlc.c42 void amdgpu_gfx_rlc_enter_safe_mode(struct amdgpu_device *adev) in amdgpu_gfx_rlc_enter_safe_mode() argument
44 if (adev->gfx.rlc.in_safe_mode) in amdgpu_gfx_rlc_enter_safe_mode()
48 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode()
51 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode()
54 adev->gfx.rlc.funcs->set_safe_mode(adev); in amdgpu_gfx_rlc_enter_safe_mode()
55 adev->gfx.rlc.in_safe_mode = true; in amdgpu_gfx_rlc_enter_safe_mode()
66 void amdgpu_gfx_rlc_exit_safe_mode(struct amdgpu_device *adev) in amdgpu_gfx_rlc_exit_safe_mode() argument
68 if (!(adev->gfx.rlc.in_safe_mode)) in amdgpu_gfx_rlc_exit_safe_mode()
72 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode()
75 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode()
[all …]
H A Damdgpu_cik.c83 static u32 cik_pcie_rreg(struct amdgpu_device *adev, u32 reg) in cik_pcie_rreg() argument
88 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in cik_pcie_rreg()
92 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in cik_pcie_rreg()
96 static void cik_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in cik_pcie_wreg() argument
100 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in cik_pcie_wreg()
105 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in cik_pcie_wreg()
108 static u32 cik_smc_rreg(struct amdgpu_device *adev, u32 reg) in cik_smc_rreg() argument
113 spin_lock_irqsave(&adev->smc_idx_lock, flags); in cik_smc_rreg()
116 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in cik_smc_rreg()
120 static void cik_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in cik_smc_wreg() argument
[all …]
H A Damdgpu_vega20_reg_init.c34 int vega20_reg_base_init(struct amdgpu_device *adev) in vega20_reg_base_init() argument
39 adev->reg_offset[GC_HWIP][i] = (uint32_t *)(&(GC_BASE.instance[i])); in vega20_reg_base_init()
40 adev->reg_offset[HDP_HWIP][i] = (uint32_t *)(&(HDP_BASE.instance[i])); in vega20_reg_base_init()
41 adev->reg_offset[MMHUB_HWIP][i] = (uint32_t *)(&(MMHUB_BASE.instance[i])); in vega20_reg_base_init()
42 adev->reg_offset[ATHUB_HWIP][i] = (uint32_t *)(&(ATHUB_BASE.instance[i])); in vega20_reg_base_init()
43 adev->reg_offset[NBIO_HWIP][i] = (uint32_t *)(&(NBIO_BASE.instance[i])); in vega20_reg_base_init()
44 adev->reg_offset[MP0_HWIP][i] = (uint32_t *)(&(MP0_BASE.instance[i])); in vega20_reg_base_init()
45 adev->reg_offset[MP1_HWIP][i] = (uint32_t *)(&(MP1_BASE.instance[i])); in vega20_reg_base_init()
46 adev->reg_offset[UVD_HWIP][i] = (uint32_t *)(&(UVD_BASE.instance[i])); in vega20_reg_base_init()
47 adev->reg_offset[VCE_HWIP][i] = (uint32_t *)(&(VCE_BASE.instance[i])); in vega20_reg_base_init()
[all …]
H A Damdgpu_gfx_v10_0.c249 static void gfx_v10_0_set_ring_funcs(struct amdgpu_device *adev);
250 static void gfx_v10_0_set_irq_funcs(struct amdgpu_device *adev);
251 static void gfx_v10_0_set_gds_init(struct amdgpu_device *adev);
252 static void gfx_v10_0_set_rlc_funcs(struct amdgpu_device *adev);
253 static int gfx_v10_0_get_cu_info(struct amdgpu_device *adev,
255 static uint64_t gfx_v10_0_get_gpu_clock_counter(struct amdgpu_device *adev);
256 static void gfx_v10_0_select_se_sh(struct amdgpu_device *adev, u32 se_num,
258 static u32 gfx_v10_0_get_wgp_active_bitmap_per_sh(struct amdgpu_device *adev);
260 static int gfx_v10_0_rlc_backdoor_autoload_buffer_init(struct amdgpu_device *adev);
261 static void gfx_v10_0_rlc_backdoor_autoload_buffer_fini(struct amdgpu_device *adev);
[all …]
H A Damdgpu_gmc_v8_0.c59 static void gmc_v8_0_set_gmc_funcs(struct amdgpu_device *adev);
60 static void gmc_v8_0_set_irq_funcs(struct amdgpu_device *adev);
134 static void gmc_v8_0_init_golden_registers(struct amdgpu_device *adev) in gmc_v8_0_init_golden_registers() argument
136 switch (adev->asic_type) { in gmc_v8_0_init_golden_registers()
138 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
141 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
146 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
149 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
156 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
161 amdgpu_device_program_register_sequence(adev, in gmc_v8_0_init_golden_registers()
[all …]
H A Damdgpu_gmc_v7_0.c58 static void gmc_v7_0_set_gmc_funcs(struct amdgpu_device *adev);
59 static void gmc_v7_0_set_irq_funcs(struct amdgpu_device *adev);
79 static void gmc_v7_0_init_golden_registers(struct amdgpu_device *adev) in gmc_v7_0_init_golden_registers() argument
81 switch (adev->asic_type) { in gmc_v7_0_init_golden_registers()
83 amdgpu_device_program_register_sequence(adev, in gmc_v7_0_init_golden_registers()
86 amdgpu_device_program_register_sequence(adev, in gmc_v7_0_init_golden_registers()
95 static void gmc_v7_0_mc_stop(struct amdgpu_device *adev) in gmc_v7_0_mc_stop() argument
99 gmc_v7_0_wait_for_idle((void *)adev); in gmc_v7_0_mc_stop()
114 static void gmc_v7_0_mc_resume(struct amdgpu_device *adev) in gmc_v7_0_mc_resume() argument
137 static int gmc_v7_0_init_microcode(struct amdgpu_device *adev) in gmc_v7_0_init_microcode() argument
[all …]
H A Damdgpu_kms.c53 void amdgpu_unregister_gpu_instance(struct amdgpu_device *adev) in amdgpu_unregister_gpu_instance() argument
62 if (gpu_instance->adev == adev) { in amdgpu_unregister_gpu_instance()
66 if (adev->flags & AMD_IS_APU) in amdgpu_unregister_gpu_instance()
87 struct amdgpu_device *adev = dev->dev_private; in amdgpu_driver_unload_kms() local
89 if (adev == NULL) in amdgpu_driver_unload_kms()
92 amdgpu_unregister_gpu_instance(adev); in amdgpu_driver_unload_kms()
94 if (adev->rmmio_size == 0) in amdgpu_driver_unload_kms()
97 if (amdgpu_sriov_vf(adev)) in amdgpu_driver_unload_kms()
98 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_driver_unload_kms()
100 if (adev->runpm) { in amdgpu_driver_unload_kms()
[all …]
H A Damdgpu_mxgpu_nv.c40 static void xgpu_nv_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_nv_mailbox_send_ack() argument
45 static void xgpu_nv_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_nv_mailbox_set_valid() argument
59 static enum idh_event xgpu_nv_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_nv_mailbox_peek_msg() argument
66 static int xgpu_nv_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_msg() argument
76 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg()
81 static uint8_t xgpu_nv_peek_ack(struct amdgpu_device *adev) in xgpu_nv_peek_ack() argument
86 static int xgpu_nv_poll_ack(struct amdgpu_device *adev) in xgpu_nv_poll_ack() argument
105 static int xgpu_nv_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_nv_poll_msg() argument
110 r = xgpu_nv_mailbox_rcv_msg(adev, event); in xgpu_nv_poll_msg()
123 static void xgpu_nv_mailbox_trans_msg (struct amdgpu_device *adev, in xgpu_nv_mailbox_trans_msg() argument
[all …]

12345678910>>...14