| /netbsd-src/sys/external/bsd/drm2/dist/drm/ttm/ |
| H A D | ttm_bo.c | 92 uint32_t *mem_type) in ttm_mem_type_from_place() argument 100 *mem_type = pos - 1; in ttm_mem_type_from_place() 105 int mem_type) in ttm_mem_type_debug() argument 107 struct ttm_mem_type_manager *man = &bdev->man[mem_type]; in ttm_mem_type_debug() 116 if (mem_type != TTM_PL_SYSTEM) in ttm_mem_type_debug() 124 int i, ret, mem_type; in ttm_bo_mem_space_debug() local 131 &mem_type); in ttm_bo_mem_space_debug() 135 i, placement->placement[i].flags, mem_type); in ttm_bo_mem_space_debug() 136 ttm_mem_type_debug(bo->bdev, &p, mem_type); in ttm_bo_mem_space_debug() 208 man = &bdev->man[mem->mem_type]; in ttm_bo_add_mem_to_lru() [all …]
|
| H A D | ttm_bo_util.c | 71 if (old_mem->mem_type != TTM_PL_SYSTEM) { in ttm_bo_move_ttm() 84 old_mem->mem_type = TTM_PL_SYSTEM; in ttm_bo_move_ttm() 91 if (new_mem->mem_type != TTM_PL_SYSTEM) { in ttm_bo_move_ttm() 144 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ttm_mem_io_reserve() 168 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ttm_mem_io_free() 187 &bo->bdev->man[mem->mem_type]; in ttm_mem_io_reserve_vm() 214 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in ttm_mem_reg_ioremap() 266 man = &bdev->man[mem->mem_type]; in ttm_mem_reg_iounmap() 418 struct ttm_mem_type_manager *man = &bdev->man[new_mem->mem_type]; in ttm_bo_move_memcpy() 469 if ((old_mem->mem_type == new_mem->mem_type) && in ttm_bo_move_memcpy() [all …]
|
| /netbsd-src/external/gpl3/gcc/dist/libsanitizer/sanitizer_common/ |
| H A D | sanitizer_posix.cpp | 44 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 47 MAP_PRIVATE | MAP_ANON, mem_type); in MmapOrDie() 50 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno, raw_report); in MmapOrDie() 66 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 69 MAP_PRIVATE | MAP_ANON, mem_type); in MmapOrDieOnFatalError() 74 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno); in MmapOrDieOnFatalError() 84 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 88 uptr map_res = (uptr)MmapOrDieOnFatalError(map_size, mem_type); in MmapAlignedOrDieOnFatalError() 103 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 106 MAP_PRIVATE | MAP_ANON | MAP_NORESERVE, mem_type); in MmapNoReserveOrDie() [all …]
|
| H A D | sanitizer_fuchsia.cpp | 130 static void *DoAnonymousMmapOrDie(uptr size, const char *mem_type, in DoAnonymousMmapOrDie() argument 138 ReportMmapFailureAndDie(size, mem_type, "zx_vmo_create", status, in DoAnonymousMmapOrDie() 142 _zx_object_set_property(vmo, ZX_PROP_NAME, mem_type, in DoAnonymousMmapOrDie() 143 internal_strlen(mem_type)); in DoAnonymousMmapOrDie() 154 ReportMmapFailureAndDie(size, mem_type, "zx_vmar_map", status, in DoAnonymousMmapOrDie() 164 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 165 return DoAnonymousMmapOrDie(size, mem_type, raw_report, true); in MmapOrDie() 168 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 169 return MmapOrDie(size, mem_type); in MmapNoReserveOrDie() 172 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument [all …]
|
| H A D | sanitizer_win.cpp | 130 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 133 ReportMmapFailureAndDie(size, mem_type, "allocate", in MmapOrDie() 158 static void *ReturnNullptrOnOOMOrDie(uptr size, const char *mem_type, in ReturnNullptrOnOOMOrDie() argument 163 ReportMmapFailureAndDie(size, mem_type, mmap_type, last_error); in ReturnNullptrOnOOMOrDie() 166 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 169 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate"); in MmapOrDieOnFatalError() 175 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 185 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() 192 ReportMmapFailureAndDie(size, mem_type, "deallocate", GetLastError()); in MmapAlignedOrDieOnFatalError() 205 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() [all …]
|
| /netbsd-src/external/gpl3/gcc.old/dist/libsanitizer/sanitizer_common/ |
| H A D | sanitizer_posix.cc | 42 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 49 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno, raw_report); in MmapOrDie() 65 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 74 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno); in MmapOrDieOnFatalError() 84 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 88 uptr map_res = (uptr)MmapOrDieOnFatalError(map_size, mem_type); in MmapAlignedOrDieOnFatalError() 103 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 112 ReportMmapFailureAndDie(size, mem_type, "allocate noreserve", reserrno); in MmapNoReserveOrDie() 128 char mem_type[40]; in MmapFixedImpl() local 129 internal_snprintf(mem_type, sizeof(mem_type), "memory at address 0x%zx", in MmapFixedImpl() [all …]
|
| H A D | sanitizer_fuchsia.cc | 159 static void *DoAnonymousMmapOrDie(uptr size, const char *mem_type, in DoAnonymousMmapOrDie() argument 167 ReportMmapFailureAndDie(size, mem_type, "zx_vmo_create", status, in DoAnonymousMmapOrDie() 171 _zx_object_set_property(vmo, ZX_PROP_NAME, mem_type, in DoAnonymousMmapOrDie() 172 internal_strlen(mem_type)); in DoAnonymousMmapOrDie() 183 ReportMmapFailureAndDie(size, mem_type, "zx_vmar_map", status, in DoAnonymousMmapOrDie() 193 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 194 return DoAnonymousMmapOrDie(size, mem_type, raw_report, true); in MmapOrDie() 197 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 198 return MmapOrDie(size, mem_type); in MmapNoReserveOrDie() 201 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument [all …]
|
| H A D | sanitizer_win.cc | 113 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 116 ReportMmapFailureAndDie(size, mem_type, "allocate", in MmapOrDie() 141 static void *ReturnNullptrOnOOMOrDie(uptr size, const char *mem_type, in ReturnNullptrOnOOMOrDie() argument 146 ReportMmapFailureAndDie(size, mem_type, mmap_type, last_error); in ReturnNullptrOnOOMOrDie() 149 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 152 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate"); in MmapOrDieOnFatalError() 158 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 168 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() 175 ReportMmapFailureAndDie(size, mem_type, "deallocate", GetLastError()); in MmapAlignedOrDieOnFatalError() 188 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() [all …]
|
| H A D | sanitizer_rtems.cc | 148 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 152 ReportMmapFailureAndDie(size, mem_type, "allocate", res, raw_report); in MmapOrDie() 158 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 164 ReportMmapFailureAndDie(size, mem_type, "allocate", false); in MmapOrDieOnFatalError() 172 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 178 ReportMmapFailureAndDie(size, mem_type, "align allocate", res, false); in MmapAlignedOrDieOnFatalError() 184 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 185 return MmapOrDie(size, mem_type, false); in MmapNoReserveOrDie()
|
| /netbsd-src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/ |
| H A D | sanitizer_posix.cc | 45 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 52 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno, raw_report); in MmapOrDie() 68 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 77 ReportMmapFailureAndDie(size, mem_type, "allocate", reserrno); in MmapOrDieOnFatalError() 87 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 91 uptr map_res = (uptr)MmapOrDieOnFatalError(map_size, mem_type); in MmapAlignedOrDieOnFatalError() 106 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 115 ReportMmapFailureAndDie(size, mem_type, "allocate noreserve", reserrno); in MmapNoReserveOrDie() 131 char mem_type[40]; in MmapFixedImpl() local 132 internal_snprintf(mem_type, sizeof(mem_type), "memory at address 0x%zx", in MmapFixedImpl() [all …]
|
| H A D | sanitizer_fuchsia.cc | 160 static void *DoAnonymousMmapOrDie(uptr size, const char *mem_type, in DoAnonymousMmapOrDie() argument 168 ReportMmapFailureAndDie(size, mem_type, "zx_vmo_create", status, in DoAnonymousMmapOrDie() 172 _zx_object_set_property(vmo, ZX_PROP_NAME, mem_type, in DoAnonymousMmapOrDie() 173 internal_strlen(mem_type)); in DoAnonymousMmapOrDie() 184 ReportMmapFailureAndDie(size, mem_type, "zx_vmar_map", status, in DoAnonymousMmapOrDie() 194 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 195 return DoAnonymousMmapOrDie(size, mem_type, raw_report, true); in MmapOrDie() 198 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 199 return MmapOrDie(size, mem_type); in MmapNoReserveOrDie() 202 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument [all …]
|
| H A D | sanitizer_win.cc | 111 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 114 ReportMmapFailureAndDie(size, mem_type, "allocate", in MmapOrDie() 139 static void *ReturnNullptrOnOOMOrDie(uptr size, const char *mem_type, in ReturnNullptrOnOOMOrDie() argument 144 ReportMmapFailureAndDie(size, mem_type, mmap_type, last_error); in ReturnNullptrOnOOMOrDie() 147 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 150 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate"); in MmapOrDieOnFatalError() 156 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 166 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() 173 ReportMmapFailureAndDie(size, mem_type, "deallocate", GetLastError()); in MmapAlignedOrDieOnFatalError() 186 return ReturnNullptrOnOOMOrDie(size, mem_type, "allocate aligned"); in MmapAlignedOrDieOnFatalError() [all …]
|
| H A D | sanitizer_rtems.cc | 148 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) { in MmapOrDie() argument 152 ReportMmapFailureAndDie(size, mem_type, "allocate", res, raw_report); in MmapOrDie() 158 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) { in MmapOrDieOnFatalError() argument 164 ReportMmapFailureAndDie(size, mem_type, "allocate", false); in MmapOrDieOnFatalError() 172 const char *mem_type) { in MmapAlignedOrDieOnFatalError() argument 178 ReportMmapFailureAndDie(size, mem_type, "align allocate", res, false); in MmapAlignedOrDieOnFatalError() 184 void *MmapNoReserveOrDie(uptr size, const char *mem_type) { in MmapNoReserveOrDie() argument 185 return MmapOrDie(size, mem_type, false); in MmapNoReserveOrDie()
|
| /netbsd-src/sys/external/bsd/drm2/dist/drm/qxl/ |
| H A D | qxl_object.h | 68 static inline int qxl_bo_wait(struct qxl_bo *bo, u32 *mem_type, in qxl_bo_wait() argument 83 if (mem_type) in qxl_bo_wait() 84 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait()
|
| H A D | qxl_ttm.c | 121 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in qxl_ttm_io_mem_reserve() 131 switch (mem->mem_type) { in qxl_ttm_io_mem_reserve() 239 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in qxl_bo_move() 258 if (bo->mem.mem_type == TTM_PL_PRIV && qbo->surface_id) in qxl_bo_move_notify()
|
| H A D | qxl_object.c | 179 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in qxl_bo_kmap_atomic_page() 181 else if (bo->tbo.mem.mem_type == TTM_PL_PRIV) in qxl_bo_kmap_atomic_page() 217 if ((bo->tbo.mem.mem_type != TTM_PL_VRAM) && in qxl_bo_kunmap_atomic_page() 218 (bo->tbo.mem.mem_type != TTM_PL_PRIV)) in qxl_bo_kunmap_atomic_page()
|
| /netbsd-src/sys/dev/pci/ |
| H A D | if_ath_pci.c | 159 pcireg_t mem_type; in ath_pci_attach() local 176 mem_type = pci_mapreg_type(pc, pa->pa_tag, ATH_PCI_MMBA); in ath_pci_attach() 177 if (mem_type != PCI_MAPREG_TYPE_MEM && in ath_pci_attach() 178 mem_type != PCI_MAPREG_MEM_TYPE_64BIT) { in ath_pci_attach() 180 (int)mem_type); in ath_pci_attach() 183 if (pci_mapreg_map(pa, ATH_PCI_MMBA, mem_type, 0, &psc->sc_iot, in ath_pci_attach()
|
| /netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
| H A D | amdgpu_ttm.c | 179 switch (bo->mem.mem_type) { in amdgpu_evict_flags() 284 addr += bo->bdev->man[mem->mem_type].gpu_offset; in amdgpu_mm_node_addr() 464 if (old_mem->mem_type == TTM_PL_VRAM && in amdgpu_move_blit() 607 if (mem->mem_type == TTM_PL_SYSTEM || in amdgpu_mem_visible() 608 mem->mem_type == TTM_PL_TT) in amdgpu_mem_visible() 610 if (mem->mem_type != TTM_PL_VRAM) in amdgpu_mem_visible() 642 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in amdgpu_bo_move() 646 if ((old_mem->mem_type == TTM_PL_TT && in amdgpu_bo_move() 647 new_mem->mem_type == TTM_PL_SYSTEM) || in amdgpu_bo_move() 648 (old_mem->mem_type == TTM_PL_SYSTEM && in amdgpu_bo_move() [all …]
|
| H A D | amdgpu_object.c | 73 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) { in amdgpu_bo_subtract_pin_size() 77 } else if (bo->tbo.mem.mem_type == TTM_PL_TT) { in amdgpu_bo_subtract_pin_size() 592 bo->tbo.mem.mem_type == TTM_PL_VRAM && in amdgpu_bo_do_create() 919 uint32_t mem_type = bo->tbo.mem.mem_type; in amdgpu_bo_pin_restricted() local 921 if (!(domain & amdgpu_mem_type_to_domain(mem_type))) in amdgpu_bo_pin_restricted() 927 u64 domain_start = bo->tbo.bdev->man[mem_type].gpu_offset; in amdgpu_bo_pin_restricted() 962 domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type); in amdgpu_bo_pin_restricted() 1303 trace_amdgpu_bo_move(abo, new_mem->mem_type, old_mem->mem_type); in amdgpu_bo_move_notify() 1327 if (bo->mem.mem_type != TTM_PL_VRAM || !bo->mem.mm_node || in amdgpu_bo_release_notify() 1369 if (bo->mem.mem_type != TTM_PL_VRAM) in amdgpu_bo_fault_reserve_notify() [all …]
|
| H A D | amdgpu_atomfirmware.c | 191 u8 mem_type; in amdgpu_atomfirmware_get_vram_info() local 216 mem_type = igp_info->v11.memorytype; in amdgpu_atomfirmware_get_vram_info() 218 *vram_type = convert_atom_mem_type_to_vram_type(adev, mem_type); in amdgpu_atomfirmware_get_vram_info() 237 mem_type = vram_module->v9.memory_type; in amdgpu_atomfirmware_get_vram_info() 239 *vram_type = convert_atom_mem_type_to_vram_type(adev, mem_type); in amdgpu_atomfirmware_get_vram_info() 257 mem_type = vram_module->v10.memory_type; in amdgpu_atomfirmware_get_vram_info() 259 *vram_type = convert_atom_mem_type_to_vram_type(adev, mem_type); in amdgpu_atomfirmware_get_vram_info()
|
| /netbsd-src/sys/external/bsd/drm2/dist/drm/radeon/ |
| H A D | radeon_object.c | 61 unsigned mem_type, int sign) in radeon_update_memory_usage() argument 66 switch (mem_type) { in radeon_update_memory_usage() 88 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy() 419 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_bo_unpin() 571 radeon_mem_type_to_domain(bo->tbo.mem.mem_type); in radeon_bo_list_validate() 776 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) { in radeon_bo_check_tiling() 808 radeon_update_memory_usage(rbo, bo->mem.mem_type, -1); in radeon_bo_move_notify() 809 radeon_update_memory_usage(rbo, new_mem->mem_type, 1); in radeon_bo_move_notify() 825 if (bo->mem.mem_type != TTM_PL_VRAM) in radeon_bo_fault_reserve_notify() 862 int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait) in radeon_bo_wait() argument [all …]
|
| H A D | radeon_ttm.c | 156 switch (bo->mem.mem_type) { in radeon_evict_flags() 234 switch (old_mem->mem_type) { in radeon_move_blit() 242 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type); in radeon_move_blit() 245 switch (new_mem->mem_type) { in radeon_move_blit() 253 DRM_ERROR("Unknown placement %d\n", old_mem->mem_type); in radeon_move_blit() 375 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in radeon_bo_move() 379 if ((old_mem->mem_type == TTM_PL_TT && in radeon_bo_move() 380 new_mem->mem_type == TTM_PL_SYSTEM) || in radeon_bo_move() 381 (old_mem->mem_type == TTM_PL_SYSTEM && in radeon_bo_move() 382 new_mem->mem_type == TTM_PL_TT)) { in radeon_bo_move() [all …]
|
| H A D | radeon_object.h | 42 static inline unsigned radeon_mem_type_to_domain(u32 mem_type) in radeon_mem_type_to_domain() argument 44 switch (mem_type) { in radeon_mem_type_to_domain() 124 extern int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type,
|
| /netbsd-src/sys/external/bsd/drm2/dist/drm/vmwgfx/ |
| H A D | vmwgfx_bo.c | 251 if (bo->mem.mem_type == TTM_PL_VRAM && in vmw_bo_pin_in_start_of_vram() 323 if (bo->mem.mem_type == TTM_PL_VRAM) { in vmw_bo_get_guest_ptr() 346 uint32_t old_mem_type = bo->mem.mem_type; in vmw_bo_pin_reserved() 373 BUG_ON(ret != 0 || bo->mem.mem_type != old_mem_type); in vmw_bo_pin_reserved() 1176 if (mem->mem_type == TTM_PL_VRAM || bo->mem.mem_type == TTM_PL_VRAM) in vmw_bo_move_notify() 1184 if (mem->mem_type != VMW_PL_MOB && bo->mem.mem_type == VMW_PL_MOB) in vmw_bo_move_notify()
|
| H A D | vmwgfx_ttm_buffer.c | 249 int mem_type; member 595 vmw_be->mem_type = bo_mem->mem_type; in vmw_ttm_bind() 597 switch (bo_mem->mem_type) { in vmw_ttm_bind() 623 switch (vmw_be->mem_type) { in vmw_ttm_unbind() 804 struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type]; in vmw_ttm_io_mem_reserve() 814 switch (mem->mem_type) { in vmw_ttm_io_mem_reserve()
|