/netbsd-src/external/gpl3/gcc/dist/libsanitizer/tsan/ |
H A D | tsan_vector_clock.cpp | 31 m128* vclk = reinterpret_cast<m128*>(clk_); in Reset() 32 for (uptr i = 0; i < kVectorClockSize; i++) _mm_store_si128(&vclk[i], z); in Reset() 95 m128* __restrict vclk = reinterpret_cast<m128*>(clk_); in ReleaseStoreAcquire() local 98 m128 c = _mm_load_si128(&vclk[i]); in ReleaseStoreAcquire() 101 _mm_store_si128(&vclk[i], m); in ReleaseStoreAcquire() 115 m128* __restrict vclk = reinterpret_cast<m128*>(clk_); in ReleaseAcquire() local 117 m128 c = _mm_load_si128(&vclk[i]); in ReleaseAcquire() 121 _mm_store_si128(&vclk[i], m); in ReleaseAcquire()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/radeon/ |
H A D | radeon_rs780_dpm.c | 576 if ((new_ps->vclk == old_ps->vclk) && in rs780_set_uvd_clock_before_set_eng_clock() 583 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rs780_set_uvd_clock_before_set_eng_clock() 593 if ((new_ps->vclk == old_ps->vclk) && in rs780_set_uvd_clock_after_set_eng_clock() 600 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rs780_set_uvd_clock_after_set_eng_clock() 733 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in rs780_parse_pplib_non_clock_info() 736 rps->vclk = 0; in rs780_parse_pplib_non_clock_info() 741 if ((rps->vclk == 0) || (rps->dclk == 0)) { in rs780_parse_pplib_non_clock_info() 742 rps->vclk = RS780_DEFAULT_VCLK_FREQ; in rs780_parse_pplib_non_clock_info() 951 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rs780_dpm_print_power_state() 1001 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rs780_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_trinity_dpm.c | 903 if ((rps->vclk == 0) && (rps->dclk == 0)) in trinity_uvd_clocks_zero() 915 if ((rps1->vclk == rps2->vclk) && in trinity_uvd_clocks_equal() 948 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); in trinity_setup_uvd_clocks() 959 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); in trinity_setup_uvd_clocks() 1463 if ((rps->vclk == pi->sys_info.uvd_clock_table_entries[i].vclk) && in trinity_get_uvd_clock_index() 1697 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in trinity_parse_pplib_non_clock_info() 1700 rps->vclk = 0; in trinity_parse_pplib_non_clock_info() 1938 pi->sys_info.uvd_clock_table_entries[i].vclk = in trinity_parse_sys_info_table() 2024 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in trinity_dpm_print_power_state() 2050 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in trinity_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_sumo_dpm.c | 829 radeon_set_uvd_clocks(rdev, new_rps->vclk, new_rps->dclk); in sumo_setup_uvd_clocks() 845 if ((new_rps->vclk == old_rps->vclk) && in sumo_set_uvd_clock_before_set_eng_clock() 863 if ((new_rps->vclk == old_rps->vclk) && in sumo_set_uvd_clock_after_set_eng_clock() 1419 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in sumo_parse_pplib_non_clock_info() 1422 rps->vclk = 0; in sumo_parse_pplib_non_clock_info() 1807 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in sumo_dpm_print_power_state() 1831 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in sumo_dpm_debugfs_print_current_performance_level() 1839 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in sumo_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_rv770_dpm.c | 1443 if ((new_ps->vclk == old_ps->vclk) && in rv770_set_uvd_clock_before_set_eng_clock() 1450 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rv770_set_uvd_clock_before_set_eng_clock() 1460 if ((new_ps->vclk == old_ps->vclk) && in rv770_set_uvd_clock_after_set_eng_clock() 1467 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rv770_set_uvd_clock_after_set_eng_clock() 2158 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in rv7xx_parse_pplib_non_clock_info() 2161 rps->vclk = 0; in rv7xx_parse_pplib_non_clock_info() 2166 if ((rps->vclk == 0) || (rps->dclk == 0)) { in rv7xx_parse_pplib_non_clock_info() 2167 rps->vclk = RV770_DEFAULT_VCLK_FREQ; in rv7xx_parse_pplib_non_clock_info() 2445 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rv770_dpm_print_power_state() 2490 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rv770_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_rv6xx_dpm.c | 1523 if ((new_ps->vclk == old_ps->vclk) && in rv6xx_set_uvd_clock_before_set_eng_clock() 1530 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rv6xx_set_uvd_clock_before_set_eng_clock() 1540 if ((new_ps->vclk == old_ps->vclk) && in rv6xx_set_uvd_clock_after_set_eng_clock() 1547 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in rv6xx_set_uvd_clock_after_set_eng_clock() 1808 rps->vclk = RV6XX_DEFAULT_VCLK_FREQ; in rv6xx_parse_pplib_non_clock_info() 1811 rps->vclk = 0; in rv6xx_parse_pplib_non_clock_info() 2020 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rv6xx_dpm_print_power_state() 2053 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in rv6xx_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_uvd.c | 967 unsigned vclk, unsigned dclk, in radeon_uvd_calc_upll_dividers() argument 982 vco_min = max(max(vco_min, vclk), dclk); in radeon_uvd_calc_upll_dividers() 997 vclk_div = radeon_uvd_calc_upll_post_div(vco_freq, vclk, in radeon_uvd_calc_upll_dividers() 1009 score = vclk - (vco_freq / vclk_div) + dclk - (vco_freq / dclk_div); in radeon_uvd_calc_upll_dividers()
|
H A D | trinity_dpm.h | 71 u32 vclk; member
|
H A D | radeon_asic.h | 413 int r600_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 480 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 537 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 538 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 751 int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 789 int cik_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
|
H A D | radeon_rv770.c | 54 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 56 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in rv770_set_uvd_clocks() argument 63 return evergreen_set_uvd_clocks(rdev, vclk, dclk); in rv770_set_uvd_clocks() 70 if (!vclk || !dclk) { in rv770_set_uvd_clocks() 76 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, in rv770_set_uvd_clocks()
|
H A D | radeon_ni_dpm.c | 3519 if ((new_ps->vclk == old_ps->vclk) && in ni_set_uvd_clock_before_set_eng_clock() 3527 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in ni_set_uvd_clock_before_set_eng_clock() 3537 if ((new_ps->vclk == old_ps->vclk) && in ni_set_uvd_clock_after_set_eng_clock() 3545 radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk); in ni_set_uvd_clock_after_set_eng_clock() 3908 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in ni_parse_pplib_non_clock_info() 3911 rps->vclk = RV770_DEFAULT_VCLK_FREQ; in ni_parse_pplib_non_clock_info() 3914 rps->vclk = 0; in ni_parse_pplib_non_clock_info() 4295 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in ni_dpm_print_power_state() 4324 seq_printf(m, "uvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in ni_dpm_debugfs_print_current_performance_level()
|
H A D | radeon_kv_dpm.c | 841 pi->uvd_level[i].VclkFrequency = cpu_to_be32(table->entries[i].vclk); in kv_populate_uvd_table() 846 (u8)kv_get_clk_bypass(rdev, table->entries[i].vclk); in kv_populate_uvd_table() 851 table->entries[i].vclk, false, ÷rs); in kv_populate_uvd_table() 2229 pi->video_start = new_rps->dclk || new_rps->vclk || in kv_apply_state_adjust_rules() 2602 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in kv_parse_pplib_non_clock_info() 2605 rps->vclk = 0; in kv_parse_pplib_non_clock_info() 2865 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in kv_dpm_print_power_state()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/powerplay/hwmgr/ |
H A D | amdgpu_smu8_hwmgr.c | 147 if (clock <= ptable->entries[i].vclk) in smu8_get_uvd_level() 155 if (clock >= ptable->entries[i].vclk) in smu8_get_uvd_level() 518 (i < uvd_table->count) ? uvd_table->entries[i].vclk : 0; in smu8_upload_pptable_to_smu() 602 clock = table->entries[level].vclk; in smu8_init_uvd_limit() 604 clock = table->entries[table->count - 1].vclk; in smu8_init_uvd_limit() 1393 smu8_ps->uvd_clocks.vclk = ps->uvd_clocks.VCLK; in smu8_dpm_get_pp_table_entry() 1697 uint32_t sclk, vclk, dclk, ecclk, tmp, activity_percent; in smu8_read_sensor() local 1731 vclk = uvd_table->entries[uvd_index].vclk; in smu8_read_sensor() 1732 *((uint32_t *)value) = vclk; in smu8_read_sensor() 1858 ptable->entries[ptable->count - 1].vclk; in smu8_dpm_update_uvd_dpm()
|
H A D | hwmgr_ppt.h | 62 uint32_t vclk; /* UVD V-clock */ member
|
H A D | smu10_hwmgr.h | 99 uint32_t vclk; member
|
H A D | smu8_hwmgr.h | 116 uint32_t vclk; member
|
H A D | smu7_hwmgr.h | 70 uint32_t vclk; member
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/dispnv04/ |
H A D | nouveau_dispnv04_arb.c | 256 nouveau_calc_arb(struct drm_device *dev, int vclk, int bpp, int *burst, int *lwm) in nouveau_calc_arb() argument 261 nv04_update_arb(dev, vclk, bpp, burst, lwm); in nouveau_calc_arb()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/powerplay/inc/ |
H A D | power_state.h | 185 unsigned long vclk; member
|
H A D | amdgpu_smu.h | 133 uint32_t vclk; member 219 uint32_t vclk; member
|
H A D | hwmgr.h | 111 uint32_t vclk; member 138 uint32_t vclk; member
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
H A D | amdgpu_dpm.h | 62 u32 vclk; member 164 u32 vclk; member
|
H A D | amdgpu_kv_dpm.c | 924 pi->uvd_level[i].VclkFrequency = cpu_to_be32(table->entries[i].vclk); in kv_populate_uvd_table() 929 (u8)kv_get_clk_bypass(adev, table->entries[i].vclk); in kv_populate_uvd_table() 934 table->entries[i].vclk, false, ÷rs); in kv_populate_uvd_table() 2294 pi->video_start = new_rps->dclk || new_rps->vclk || in kv_apply_state_adjust_rules() 2670 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in kv_parse_pplib_non_clock_info() 2673 rps->vclk = 0; in kv_parse_pplib_non_clock_info() 2909 printk("\tuvd vclk: %d dclk: %d\n", rps->vclk, rps->dclk); in kv_dpm_print_power_state() 3282 *equal = ((cps->vclk == rps->vclk) && (cps->dclk == rps->dclk)); in kv_check_state_equal()
|
H A D | amdgpu_si_dpm.c | 2377 amdgpu_state->vclk && amdgpu_state->dclk) in si_should_disable_uvd_powertune() 3184 if ((new_ps->vclk == old_ps->vclk) && in ni_set_uvd_clock_before_set_eng_clock() 3192 amdgpu_asic_set_uvd_clocks(adev, new_ps->vclk, new_ps->dclk); in ni_set_uvd_clock_before_set_eng_clock() 3202 if ((new_ps->vclk == old_ps->vclk) && in ni_set_uvd_clock_after_set_eng_clock() 3210 amdgpu_asic_set_uvd_clocks(adev, new_ps->vclk, new_ps->dclk); in ni_set_uvd_clock_after_set_eng_clock() 3485 if (rps->vclk || rps->dclk) { in si_apply_state_adjust_rules() 5636 if ((amdgpu_state->vclk != 0) || (amdgpu_state->dclk != 0)) in si_is_state_ulv_compatible() 5673 if (amdgpu_state->vclk && amdgpu_state->dclk) { in si_convert_power_state_to_smc() 7121 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK); in si_parse_pplib_non_clock_info() 7124 rps->vclk = RV770_DEFAULT_VCLK_FREQ; in si_parse_pplib_non_clock_info() [all …]
|
/netbsd-src/sys/dev/pci/ |
H A D | pm2fb.c | 1564 uint32_t vclk, tmp; in pm2fb_set_dac() local 1604 vclk = bus_space_read_4(sc->sc_memt, sc->sc_regh, PM2_VCLKCTL); in pm2fb_set_dac() 1606 vclk & 0xfffffffc); in pm2fb_set_dac() 1642 uint32_t vclk; in pm2vfb_set_dac() local 1683 vclk = bus_space_read_4(sc->sc_memt, sc->sc_regh, PM2_VCLKCTL); in pm2vfb_set_dac() 1685 vclk & 0xfffffffc); in pm2vfb_set_dac()
|