/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/fault/ |
H A D | nouveau_nvkm_subdev_fault_base.c | 35 struct nvkm_fault *fault = container_of(event, typeof(*fault), event); in nvkm_fault_ntfy_fini() local 36 fault->func->buffer.intr(fault->buffer[index], false); in nvkm_fault_ntfy_fini() 42 struct nvkm_fault *fault = container_of(event, typeof(*fault), event); in nvkm_fault_ntfy_init() local 43 fault->func->buffer.intr(fault->buffer[index], true); in nvkm_fault_ntfy_init() 70 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_intr() local 71 return fault->func->intr(fault); in nvkm_fault_intr() 77 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_fini() local 78 if (fault->func->fini) in nvkm_fault_fini() 79 fault->func->fini(fault); in nvkm_fault_fini() 86 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_init() local [all …]
|
H A D | nouveau_nvkm_subdev_fault_gv100.c | 38 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_process() 47 const u32 base = get * buffer->fault->func->buffer.entry_size; in gv100_fault_buffer_process() 81 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_intr() 92 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_fini() 100 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_init() 112 struct nvkm_device *device = buffer->fault->subdev.device; in gv100_fault_buffer_info() 125 struct nvkm_fault *fault = container_of(notify, typeof(*fault), nrpfb); in gv100_fault_ntfy_nrpfb() local 126 gv100_fault_buffer_process(fault->buffer[0]); in gv100_fault_ntfy_nrpfb() 131 gv100_fault_intr_fault(struct nvkm_fault *fault) in gv100_fault_intr_fault() argument 133 struct nvkm_subdev *subdev = &fault->subdev; in gv100_fault_intr_fault() [all …]
|
H A D | nouveau_nvkm_subdev_fault_tu102.c | 47 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_fini() 55 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_init() 67 struct nvkm_device *device = buffer->fault->subdev.device; in tu102_fault_buffer_info() 78 tu102_fault_intr_fault(struct nvkm_fault *fault) in tu102_fault_intr_fault() argument 80 struct nvkm_subdev *subdev = &fault->subdev; in tu102_fault_intr_fault() 104 tu102_fault_intr(struct nvkm_fault *fault) in tu102_fault_intr() argument 106 struct nvkm_subdev *subdev = &fault->subdev; in tu102_fault_intr() 111 tu102_fault_intr_fault(fault); in tu102_fault_intr() 117 if (fault->buffer[0]) { in tu102_fault_intr() 118 nvkm_event_send(&fault->event, 1, 0, NULL, 0); in tu102_fault_intr() [all …]
|
H A D | Kbuild | 2 nvkm-y += nvkm/subdev/fault/base.o 3 nvkm-y += nvkm/subdev/fault/user.o 4 nvkm-y += nvkm/subdev/fault/gp100.o 5 nvkm-y += nvkm/subdev/fault/gp10b.o 6 nvkm-y += nvkm/subdev/fault/gv100.o 7 nvkm-y += nvkm/subdev/fault/tu102.o
|
H A D | nouveau_nvkm_subdev_fault_user.c | 46 struct nvkm_device *device = buffer->fault->subdev.device; in nvkm_ufault_map() 62 *pevent = &buffer->fault->event; in nvkm_ufault_ntfy() 72 buffer->fault->func->buffer.fini(buffer); in nvkm_ufault_fini() 80 buffer->fault->func->buffer.init(buffer); in nvkm_ufault_init() 106 struct nvkm_fault *fault = device->fault; in nvkm_ufault_new() local 107 struct nvkm_fault_buffer *buffer = fault->buffer[fault->func->user.rp]; in nvkm_ufault_new()
|
H A D | nouveau_nvkm_subdev_fault_gp100.c | 37 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_intr() 44 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_fini() 51 struct nvkm_device *device = buffer->fault->subdev.device; in gp100_fault_buffer_init() 65 buffer->entries = nvkm_rd32(buffer->fault->subdev.device, 0x002a78); in gp100_fault_buffer_info() 71 gp100_fault_intr(struct nvkm_fault *fault) in gp100_fault_intr() argument 73 nvkm_event_send(&fault->event, 1, 0, NULL, 0); in gp100_fault_intr()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/ |
H A D | nouveau_svm.c | 68 u8 fault; member 70 } **fault; member 420 struct nouveau_svm_fault *fault) in nouveau_svm_fault_cancel_fault() argument 422 nouveau_svm_fault_cancel(svm, fault->inst, in nouveau_svm_fault_cancel_fault() 423 fault->hub, in nouveau_svm_fault_cancel_fault() 424 fault->gpc, in nouveau_svm_fault_cancel_fault() 425 fault->client); in nouveau_svm_fault_cancel_fault() 460 struct nouveau_svm_fault *fault; in nouveau_svm_fault_cache() local 468 if (!buffer->fault[buffer->fault_nr]) { in nouveau_svm_fault_cache() 469 fault = kmalloc(sizeof(*fault), GFP_KERNEL); in nouveau_svm_fault_cache() [all …]
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/fifo/ |
H A D | nouveau_nvkm_engine_fifo_gk20a.c | 34 .intr.fault = gf100_fifo_intr_fault, 36 .fault.access = gk104_fifo_fault_access, 37 .fault.engine = gk104_fifo_fault_engine, 38 .fault.reason = gk104_fifo_fault_reason, 39 .fault.hubclient = gk104_fifo_fault_hubclient, 40 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gm20b.c | 34 .intr.fault = gm107_fifo_intr_fault, 36 .fault.access = gk104_fifo_fault_access, 37 .fault.engine = gm107_fifo_fault_engine, 38 .fault.reason = gk104_fifo_fault_reason, 39 .fault.hubclient = gk104_fifo_fault_hubclient, 40 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gp10b.c | 34 .intr.fault = gp100_fifo_intr_fault, 36 .fault.access = gk104_fifo_fault_access, 37 .fault.engine = gp100_fifo_fault_engine, 38 .fault.reason = gk104_fifo_fault_reason, 39 .fault.hubclient = gk104_fifo_fault_hubclient, 40 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gm200.c | 50 .intr.fault = gm107_fifo_intr_fault, 52 .fault.access = gk104_fifo_fault_access, 53 .fault.engine = gm107_fifo_fault_engine, 54 .fault.reason = gk104_fifo_fault_reason, 55 .fault.hubclient = gk104_fifo_fault_hubclient, 56 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gk110.c | 56 .intr.fault = gf100_fifo_intr_fault, 58 .fault.access = gk104_fifo_fault_access, 59 .fault.engine = gk104_fifo_fault_engine, 60 .fault.reason = gk104_fifo_fault_reason, 61 .fault.hubclient = gk104_fifo_fault_hubclient, 62 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gk208.c | 53 .intr.fault = gf100_fifo_intr_fault, 55 .fault.access = gk104_fifo_fault_access, 56 .fault.engine = gk104_fifo_fault_engine, 57 .fault.reason = gk104_fifo_fault_reason, 58 .fault.hubclient = gk104_fifo_fault_hubclient, 59 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gp100.c | 86 .intr.fault = gp100_fifo_intr_fault, 88 .fault.access = gk104_fifo_fault_access, 89 .fault.engine = gp100_fifo_fault_engine, 90 .fault.reason = gk104_fifo_fault_reason, 91 .fault.hubclient = gk104_fifo_fault_hubclient, 92 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gm107.c | 102 .intr.fault = gm107_fifo_intr_fault, 104 .fault.access = gk104_fifo_fault_access, 105 .fault.engine = gm107_fifo_fault_engine, 106 .fault.reason = gk104_fifo_fault_reason, 107 .fault.hubclient = gk104_fifo_fault_hubclient, 108 .fault.gpcclient = gk104_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_tu102.c | 106 .fault.access = gv100_fifo_fault_access, 107 .fault.engine = tu102_fifo_fault_engine, 108 .fault.reason = gv100_fifo_fault_reason, 109 .fault.hubclient = gv100_fifo_fault_hubclient, 110 .fault.gpcclient = gv100_fifo_fault_gpcclient,
|
H A D | nouveau_nvkm_engine_fifo_gv100.c | 297 .fault.access = gv100_fifo_fault_access, 298 .fault.engine = gv100_fifo_fault_engine, 299 .fault.reason = gv100_fifo_fault_reason, 300 .fault.hubclient = gv100_fifo_fault_hubclient, 301 .fault.gpcclient = gv100_fifo_fault_gpcclient,
|
/netbsd-src/sys/arch/m68k/m68k/ |
H A D | busaddrerr.s | 108 movl #T_MMUFLT,%sp@- | show that we are an MMU fault 115 movl %sp@(FR_HW+8+8),_C_LABEL(m68k_fault_addr) | save fault addr 131 movl %sp@(FR_HW+20),%d1 | get fault address 139 movl %d1,%sp@- | pass fault address. 143 movl #T_MMUFLT,%sp@- | show that we are an MMU fault 150 movl %sp@(FR_HW+8+20),_C_LABEL(m68k_fault_addr) | save fault addr 169 movw %sp@(FR_HW+10),%d0 | grab SSW for fault processing 180 btst #8,%d0 | data fault? 182 movl %sp@(FR_HW+16),%d1 | fault address is as given in frame 203 movl %d1,%sp@- | push fault VA [all …]
|
H A D | trap_subr.s | 51 * Common fault handling code. Called by exception vector handlers. 54 ASENTRY_NOPROFILE(fault) 133 jra _ASM_LABEL(fault) 139 jra _ASM_LABEL(fault) 145 jra _ASM_LABEL(fault) 151 jra _ASM_LABEL(fault) 157 jra _ASM_LABEL(fault)
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/ |
H A D | amdgpu_gmc.c | 298 struct amdgpu_gmc_fault *fault; in amdgpu_gmc_filter_faults() local 309 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults() 310 while (fault->timestamp >= stamp) { in amdgpu_gmc_filter_faults() 313 if (fault->key == key) in amdgpu_gmc_filter_faults() 316 tmp = fault->timestamp; in amdgpu_gmc_filter_faults() 317 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults() 320 if (fault->timestamp >= tmp) in amdgpu_gmc_filter_faults() 325 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults() 326 fault->key = key; in amdgpu_gmc_filter_faults() 327 fault->timestamp = timestamp; in amdgpu_gmc_filter_faults() [all …]
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/i915/gt/ |
H A D | intel_gt.c | 205 u32 fault; in gen6_check_faults() local 208 fault = GEN6_RING_FAULT_REG_READ(engine); in gen6_check_faults() 209 if (fault & RING_FAULT_VALID) { in gen6_check_faults() 215 fault & PAGE_MASK, in gen6_check_faults() 216 fault & RING_FAULT_GTTSEL_MASK ? in gen6_check_faults() 218 RING_FAULT_SRCID(fault), in gen6_check_faults() 219 RING_FAULT_FAULT_TYPE(fault)); in gen6_check_faults() 228 u32 fault; in gen8_check_faults() local 240 fault = intel_uncore_read(uncore, fault_reg); in gen8_check_faults() 241 if (fault & RING_FAULT_VALID) { in gen8_check_faults() [all …]
|
/netbsd-src/sys/arch/m68k/060sp/ |
H A D | netbsd060sp.S | 89 # of a page fault during a _copyout. 180 movl _C_LABEL(curpcb),%a1 | fault handler 212 movl _C_LABEL(curpcb),%a1 | fault handler 243 movl _C_LABEL(curpcb),%a1 | fault handler 270 movl _C_LABEL(curpcb),%a1 | fault handler 297 movl _C_LABEL(curpcb),%a1 | fault handler 324 movl _C_LABEL(curpcb),%a1 | fault handler 336 clrl %a1@(PCB_ONFAULT) | clear fault handler
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/top/ |
H A D | nouveau_nvkm_subdev_top_base.c | 38 info->fault = -1; in nvkm_top_device_new() 126 if (info->index == devidx && info->fault >= 0) in nvkm_top_fault_id() 127 return info->fault; in nvkm_top_fault_id() 134 nvkm_top_fault(struct nvkm_device *device, int fault) in nvkm_top_fault() argument 140 if (info->fault == fault) in nvkm_top_fault()
|
/netbsd-src/external/apache2/llvm/dist/llvm/docs/ |
H A D | FaultMaps.rst | 17 made to fault reliably if the check would have failed, and recovering 18 from such a fault by using a signal handler. 32 special "fault map" section. On Darwin this section is named 56 FailtKind describes the reason of expected fault. Currently three kind 59 1. ``FaultMaps::FaultingLoad`` - fault due to load from memory. 60 2. ``FaultMaps::FaultingLoadStore`` - fault due to instruction load and store. 61 3. ``FaultMaps::FaultingStore`` - fault due to store to memory. 113 fault once the application has reached a steady state. A standard way 124 fault.
|
/netbsd-src/regress/sys/uvm/pdsim/ |
H A D | pdsim.c | 52 int fault; member 180 fault(struct uvm_object *obj, int index) in fault() function 191 stats[index].fault++; in fault() 263 fault(&obj, i); in test() 276 if (stats[i].fault == 0) { in dumpstats() 280 stats[i].hit, stats[i].fault, irr[i]); in dumpstats()
|