/netbsd-src/sys/arch/arm/at91/ |
H A D | at91pdcvar.h | 65 static __inline int AT91PDC_FIFO_EMPTY(at91pdc_fifo_t *fifo) in AT91PDC_FIFO_EMPTY() argument 67 return fifo->f_length == 0; in AT91PDC_FIFO_EMPTY() 70 static __inline int AT91PDC_FIFO_FULL(at91pdc_fifo_t *fifo) in AT91PDC_FIFO_FULL() argument 72 return fifo->f_length >= fifo->f_buf_size; in AT91PDC_FIFO_FULL() 75 static __inline int AT91PDC_FIFO_SPACE(at91pdc_fifo_t *fifo) in AT91PDC_FIFO_SPACE() argument 77 return fifo->f_buf_size - fifo->f_length; in AT91PDC_FIFO_SPACE() 85 at91pdc_fifo_t *fifo, in AT91PDC_RESET_FIFO() argument 88 fifo->f_ndx = fifo->f_length = 0; in AT91PDC_RESET_FIFO() 90 fifo->f_pdc_rd_ndx = fifo->f_pdc_wr_ndx = 0; in AT91PDC_RESET_FIFO() 91 fifo->f_pdc_space = fifo->f_buf_size; in AT91PDC_RESET_FIFO() [all …]
|
H A D | at91pdc.c | 7 int at91pdc_alloc_fifo(bus_dma_tag_t dmat, at91pdc_fifo_t *fifo, int size, in at91pdc_alloc_fifo() argument 15 memset(fifo, 0, sizeof(*fifo)); in at91pdc_alloc_fifo() 21 &fifo->f_dmamap); in at91pdc_alloc_fifo() 34 err = bus_dmamem_map(dmat, &segs, 1, size, &fifo->f_buf, in at91pdc_alloc_fifo() 42 err = bus_dmamap_load(dmat, fifo->f_dmamap, fifo->f_buf, size, NULL, in at91pdc_alloc_fifo() 48 fifo->f_buf_size = size; in at91pdc_alloc_fifo() 49 fifo->f_ndx = fifo->f_length = 0; in at91pdc_alloc_fifo() 51 fifo->f_buf_addr = fifo->f_dmamap->dm_segs[0].ds_addr; in at91pdc_alloc_fifo() 52 fifo->f_pdc_rd_ndx = fifo->f_pdc_wr_ndx = 0; in at91pdc_alloc_fifo() 53 fifo->f_pdc_space = fifo->f_buf_size; in at91pdc_alloc_fifo() [all …]
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/fifo/ |
H A D | nouveau_nvkm_engine_fifo_base.c | 42 nvkm_fifo_recover_chan(struct nvkm_fifo *fifo, int chid) in nvkm_fifo_recover_chan() argument 45 if (WARN_ON(!fifo->func->recover_chan)) in nvkm_fifo_recover_chan() 47 spin_lock_irqsave(&fifo->lock, flags); in nvkm_fifo_recover_chan() 48 fifo->func->recover_chan(fifo, chid); in nvkm_fifo_recover_chan() 49 spin_unlock_irqrestore(&fifo->lock, flags); in nvkm_fifo_recover_chan() 53 nvkm_fifo_pause(struct nvkm_fifo *fifo, unsigned long *flags) in nvkm_fifo_pause() argument 55 return fifo->func->pause(fifo, flags); in nvkm_fifo_pause() 59 nvkm_fifo_start(struct nvkm_fifo *fifo, unsigned long *flags) in nvkm_fifo_start() argument 61 return fifo->func->start(fifo, flags); in nvkm_fifo_start() 65 nvkm_fifo_fault(struct nvkm_fifo *fifo, struct nvkm_fault_data *info) in nvkm_fifo_fault() argument [all …]
|
H A D | nouveau_nvkm_engine_fifo_gk104.c | 57 gk104_fifo_engine_status(struct gk104_fifo *fifo, int engn, in gk104_fifo_engine_status() argument 60 struct nvkm_engine *engine = fifo->engine[engn].engine; in gk104_fifo_engine_status() 61 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in gk104_fifo_engine_status() 107 struct gk104_fifo *fifo = gk104_fifo(base); in gk104_fifo_class_new() local 108 if (oclass->engn == &fifo->func->chan) { in gk104_fifo_class_new() 110 return user->ctor(fifo, oclass, argv, argc, pobject); in gk104_fifo_class_new() 112 if (oclass->engn == &fifo->func->user) { in gk104_fifo_class_new() 124 struct gk104_fifo *fifo = gk104_fifo(base); in gk104_fifo_class_get() local 127 if (fifo->func->user.ctor && c++ == index) { in gk104_fifo_class_get() 128 oclass->base = fifo->func->user.user; in gk104_fifo_class_get() [all …]
|
H A D | nouveau_nvkm_engine_fifo_gf100.c | 42 gf100_fifo_uevent_init(struct nvkm_fifo *fifo) in gf100_fifo_uevent_init() argument 44 struct nvkm_device *device = fifo->engine.subdev.device; in gf100_fifo_uevent_init() 49 gf100_fifo_uevent_fini(struct nvkm_fifo *fifo) in gf100_fifo_uevent_fini() argument 51 struct nvkm_device *device = fifo->engine.subdev.device; in gf100_fifo_uevent_fini() 56 gf100_fifo_runlist_commit(struct gf100_fifo *fifo) in gf100_fifo_runlist_commit() argument 59 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in gf100_fifo_runlist_commit() 66 cur = fifo->runlist.mem[fifo->runlist.active]; in gf100_fifo_runlist_commit() 67 fifo->runlist.active = !fifo->runlist.active; in gf100_fifo_runlist_commit() 70 list_for_each_entry(chan, &fifo->chan, head) { in gf100_fifo_runlist_commit() 104 spin_lock(&fifo->runlist.lock); in gf100_fifo_runlist_commit() [all …]
|
H A D | Kbuild | 2 nvkm-y += nvkm/engine/fifo/base.o 3 nvkm-y += nvkm/engine/fifo/nv04.o 4 nvkm-y += nvkm/engine/fifo/nv10.o 5 nvkm-y += nvkm/engine/fifo/nv17.o 6 nvkm-y += nvkm/engine/fifo/nv40.o 7 nvkm-y += nvkm/engine/fifo/nv50.o 8 nvkm-y += nvkm/engine/fifo/g84.o 9 nvkm-y += nvkm/engine/fifo/gf100.o 10 nvkm-y += nvkm/engine/fifo/gk104.o 11 nvkm-y += nvkm/engine/fifo/gk110.o [all …]
|
H A D | nouveau_nvkm_engine_fifo_nv50.c | 35 nv50_fifo_runlist_update_locked(struct nv50_fifo *fifo) in nv50_fifo_runlist_update_locked() argument 37 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_runlist_update_locked() 41 cur = fifo->runlist[fifo->cur_runlist]; in nv50_fifo_runlist_update_locked() 42 fifo->cur_runlist = !fifo->cur_runlist; in nv50_fifo_runlist_update_locked() 45 for (i = 0, p = 0; i < fifo->base.nr; i++) { in nv50_fifo_runlist_update_locked() 57 nv50_fifo_runlist_update(struct nv50_fifo *fifo) in nv50_fifo_runlist_update() argument 59 mutex_lock(&fifo->base.engine.subdev.mutex); in nv50_fifo_runlist_update() 60 nv50_fifo_runlist_update_locked(fifo); in nv50_fifo_runlist_update() 61 mutex_unlock(&fifo->base.engine.subdev.mutex); in nv50_fifo_runlist_update() 67 struct nv50_fifo *fifo = nv50_fifo(base); in nv50_fifo_oneinit() local [all …]
|
H A D | nouveau_nvkm_engine_fifo_dmanv04.c | 44 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_dtor() 46 mutex_lock(&chan->fifo->base.engine.subdev.mutex); in nv04_fifo_dma_object_dtor() 48 mutex_unlock(&chan->fifo->base.engine.subdev.mutex); in nv04_fifo_dma_object_dtor() 56 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; in nv04_fifo_dma_object_ctor() 71 mutex_lock(&chan->fifo->base.engine.subdev.mutex); in nv04_fifo_dma_object_ctor() 74 mutex_unlock(&chan->fifo->base.engine.subdev.mutex); in nv04_fifo_dma_object_ctor() 82 struct nv04_fifo *fifo = chan->fifo; in nv04_fifo_dma_fini() local 83 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_dma_fini() 87 u32 mask = fifo->base.nr - 1; in nv04_fifo_dma_fini() 92 spin_lock_irqsave(&fifo->base.lock, flags); in nv04_fifo_dma_fini() [all …]
|
H A D | nouveau_nvkm_engine_fifo_nv04.c | 54 __acquires(fifo->base.lock) in nv04_fifo_pause() 56 struct nv04_fifo *fifo = nv04_fifo(base); in nv04_fifo_pause() local 57 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_pause() 60 spin_lock_irqsave(&fifo->base.lock, flags); in nv04_fifo_pause() 90 __releases(fifo->base.lock) in nv04_fifo_start() 92 struct nv04_fifo *fifo = nv04_fifo(base); in nv04_fifo_start() local 93 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv04_fifo_start() 99 spin_unlock_irqrestore(&fifo->base.lock, flags); in nv04_fifo_start() 142 nv04_fifo_cache_error(struct nv04_fifo *fifo, u32 chid, u32 get) in nv04_fifo_cache_error() argument 144 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in nv04_fifo_cache_error() [all …]
|
H A D | nouveau_nvkm_engine_fifo_gpfifogk104.c | 45 struct gk104_fifo *fifo = chan->fifo; in gk104_fifo_gpfifo_kick_locked() local 46 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in gk104_fifo_gpfifo_kick_locked() 63 nvkm_fifo_recover_chan(&fifo->base, chan->base.chid); in gk104_fifo_gpfifo_kick_locked() 73 mutex_lock(&chan->base.fifo->engine.subdev.mutex); in gk104_fifo_gpfifo_kick() 75 mutex_unlock(&chan->base.fifo->engine.subdev.mutex); in gk104_fifo_gpfifo_kick() 192 struct gk104_fifo *fifo = chan->fifo; in gk104_fifo_gpfifo_fini() local 193 struct nvkm_device *device = fifo->base.engine.subdev.device; in gk104_fifo_gpfifo_fini() 197 gk104_fifo_runlist_remove(fifo, chan); in gk104_fifo_gpfifo_fini() 200 gk104_fifo_runlist_update(fifo, chan->runl); in gk104_fifo_gpfifo_fini() 210 struct gk104_fifo *fifo = chan->fifo; in gk104_fifo_gpfifo_init() local [all …]
|
H A D | nouveau_nvkm_engine_fifo_gpfifogf100.c | 46 *pevent = &chan->fifo->uevent; in gf100_fifo_chan_ntfy() 49 *pevent = &chan->fifo->kevent; in gf100_fifo_chan_ntfy() 80 struct nvkm_subdev *subdev = &chan->fifo->base.engine.subdev; in gf100_fifo_gpfifo_engine_fini() 167 struct gf100_fifo *fifo = chan->fifo; in gf100_fifo_gpfifo_fini() local 168 struct nvkm_device *device = fifo->base.engine.subdev.device; in gf100_fifo_gpfifo_fini() 172 gf100_fifo_runlist_remove(fifo, chan); in gf100_fifo_gpfifo_fini() 174 gf100_fifo_runlist_commit(fifo); in gf100_fifo_gpfifo_fini() 177 gf100_fifo_intr_engine(fifo); in gf100_fifo_gpfifo_fini() 186 struct gf100_fifo *fifo = chan->fifo; in gf100_fifo_gpfifo_init() local 187 struct nvkm_device *device = fifo->base.engine.subdev.device; in gf100_fifo_gpfifo_init() [all …]
|
H A D | nouveau_nvkm_engine_fifo_dmanv40.c | 68 struct nv04_fifo *fifo = chan->fifo; in nv40_fifo_dma_engine_fini() local 69 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv40_fifo_dma_engine_fini() 78 spin_lock_irqsave(&fifo->base.lock, flags); in nv40_fifo_dma_engine_fini() 81 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_fini() 89 spin_unlock_irqrestore(&fifo->base.lock, flags); in nv40_fifo_dma_engine_fini() 98 struct nv04_fifo *fifo = chan->fifo; in nv40_fifo_dma_engine_init() local 99 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv40_fifo_dma_engine_init() 109 spin_lock_irqsave(&fifo->base.lock, flags); in nv40_fifo_dma_engine_init() 112 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_init() 120 spin_unlock_irqrestore(&fifo->base.lock, flags); in nv40_fifo_dma_engine_init() [all …]
|
H A D | nouveau_nvkm_engine_fifo_gpfifogv100.c | 45 struct nvkm_subdev *subdev = &chan->base.fifo->engine.subdev; in gv100_fifo_gpfifo_engine_valid() 129 struct gk104_fifo *fifo, u64 *runlists, u16 *chid, in gv100_fifo_gpfifo_new_() argument 134 struct nvkm_device *device = fifo->base.engine.subdev.device; in gv100_fifo_gpfifo_new_() 142 if (!vmm || runlist < 0 || runlist >= fifo->runlist_nr) in gv100_fifo_gpfifo_new_() 146 engm = fifo->runlist[runlist].engm; in gv100_fifo_gpfifo_new_() 147 for_each_set_bit(i, &engm, fifo->engine_nr) { in gv100_fifo_gpfifo_new_() 148 if (fifo->engine[i].engine) in gv100_fifo_gpfifo_new_() 149 subdevs |= BIT_ULL(fifo->engine[i].engine->subdev.index); in gv100_fifo_gpfifo_new_() 156 chan->fifo = fifo; in gv100_fifo_gpfifo_new_() 160 ret = nvkm_fifo_chan_ctor(func, &fifo->base, 0x1000, 0x1000, true, vmm, in gv100_fifo_gpfifo_new_() [all …]
|
H A D | nouveau_nvkm_engine_fifo_channv50.c | 55 struct nv50_fifo *fifo = chan->fifo; in nv50_fifo_chan_engine_fini() local 56 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in nv50_fifo_chan_engine_fini() 187 struct nv50_fifo *fifo = chan->fifo; in nv50_fifo_chan_fini() local 188 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_chan_fini() 193 nv50_fifo_runlist_update(fifo); in nv50_fifo_chan_fini() 201 struct nv50_fifo *fifo = chan->fifo; in nv50_fifo_chan_init() local 202 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_chan_init() 207 nv50_fifo_runlist_update(fifo); in nv50_fifo_chan_init() 236 nv50_fifo_chan_ctor(struct nv50_fifo *fifo, u64 vmm, u64 push, in nv50_fifo_chan_ctor() argument 240 struct nvkm_device *device = fifo->base.engine.subdev.device; in nv50_fifo_chan_ctor() [all …]
|
H A D | nouveau_nvkm_engine_fifo_chan.c | 162 if (engine->func->fifo.cclass) { in nvkm_fifo_chan_child_new() 163 ret = engine->func->fifo.cclass(chan, &cclass, in nvkm_fifo_chan_child_new() 210 struct nvkm_fifo *fifo = chan->fifo; in nvkm_fifo_chan_child_get() local 211 struct nvkm_device *device = fifo->engine.subdev.device; in nvkm_fifo_chan_child_get() 222 if (engine->func->fifo.sclass) { in nvkm_fifo_chan_child_get() 223 ret = engine->func->fifo.sclass(oclass, index); in nvkm_fifo_chan_child_get() 372 struct nvkm_fifo *fifo = chan->fifo; in nvkm_fifo_chan_dtor() local 376 spin_lock_irqsave(&fifo->lock, flags); in nvkm_fifo_chan_dtor() 378 __clear_bit(chan->chid, fifo->mask); in nvkm_fifo_chan_dtor() 381 spin_unlock_irqrestore(&fifo->lock, flags); in nvkm_fifo_chan_dtor() [all …]
|
H A D | nouveau_nvkm_engine_fifo_chang84.c | 44 *pevent = &chan->fifo->uevent; in g84_fifo_chan_ntfy() 99 struct nv50_fifo *fifo = chan->fifo; in g84_fifo_chan_engine_fini() local 100 struct nvkm_subdev *subdev = &fifo->base.engine.subdev; in g84_fifo_chan_engine_fini() 213 struct nv50_fifo *fifo = chan->fifo; in g84_fifo_chan_init() local 214 struct nvkm_device *device = fifo->base.engine.subdev.device; in g84_fifo_chan_init() 219 nv50_fifo_runlist_update(fifo); in g84_fifo_chan_init() 237 g84_fifo_chan_ctor(struct nv50_fifo *fifo, u64 vmm, u64 push, in g84_fifo_chan_ctor() argument 241 struct nvkm_device *device = fifo->base.engine.subdev.device; in g84_fifo_chan_ctor() 247 ret = nvkm_fifo_chan_ctor(&g84_fifo_chan_func, &fifo->base, in g84_fifo_chan_ctor() 264 chan->fifo = fifo; in g84_fifo_chan_ctor()
|
H A D | nouveau_nvkm_engine_fifo_tu102.c | 37 tu102_fifo_runlist_commit(struct gk104_fifo *fifo, int runl, in tu102_fifo_runlist_commit() argument 40 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_runlist_commit() 88 tu102_fifo_pbdma_init(struct gk104_fifo *fifo) in tu102_fifo_pbdma_init() argument 90 struct nvkm_device *device = fifo->base.engine.subdev.device; in tu102_fifo_pbdma_init() 91 const u32 mask = (1 << fifo->pbdma_nr) - 1; in tu102_fifo_pbdma_init()
|
H A D | nouveau_nvkm_engine_fifo_g84.c | 33 g84_fifo_uevent_fini(struct nvkm_fifo *fifo) in g84_fifo_uevent_fini() argument 35 struct nvkm_device *device = fifo->engine.subdev.device; in g84_fifo_uevent_fini() 40 g84_fifo_uevent_init(struct nvkm_fifo *fifo) in g84_fifo_uevent_init() argument 42 struct nvkm_device *device = fifo->engine.subdev.device; in g84_fifo_uevent_init()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/vmwgfx/ |
H A D | vmwgfx_fifo.c | 50 const struct vmw_fifo_state *fifo = &dev_priv->fifo; in vmw_fifo_have_3d() local 77 ((fifo->capabilities & in vmw_fifo_have_3d() 110 int vmw_fifo_init(struct vmw_private *dev_priv, struct vmw_fifo_state *fifo) in vmw_fifo_init() argument 116 fifo->dx = false; in vmw_fifo_init() 117 fifo->static_buffer_size = VMWGFX_FIFO_STATIC_SIZE; in vmw_fifo_init() 118 fifo->static_buffer = vmalloc(fifo->static_buffer_size); in vmw_fifo_init() 119 if (unlikely(fifo->static_buffer == NULL)) in vmw_fifo_init() 122 fifo->dynamic_buffer = NULL; in vmw_fifo_init() 123 fifo->reserved_size = 0; in vmw_fifo_init() 124 fifo->using_bounce_buffer = false; in vmw_fifo_init() [all …]
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/dispnv04/ |
H A D | nouveau_dispnv04_arb.c | 59 nv04_calc_arb(struct nv_fifo_info *fifo, struct nv_sim_state *arb) in nv04_calc_arb() argument 107 fifo->lwm = clwm; in nv04_calc_arb() 108 fifo->burst = cbs; in nv04_calc_arb() 113 nv10_calc_arb(struct nv_fifo_info *fifo, struct nv_sim_state *arb) in nv10_calc_arb() argument 179 fifo->burst = min(max_burst_o, 1024); in nv10_calc_arb() 183 fifo->burst = min(max_burst_l, fifo->burst); in nv10_calc_arb() 185 fifo->burst = rounddown_pow_of_two(fifo->burst); in nv10_calc_arb() 190 max_lwm = fifo_len - fifo->burst in nv10_calc_arb() 192 + fifo->burst * drain_rate / fill_rate; in nv10_calc_arb() 194 fifo->lwm = min_lwm + 10 * (max_lwm - min_lwm) / 100; /* Empirical. */ in nv10_calc_arb()
|
/netbsd-src/external/bsd/libevent/dist/sample/ |
H A D | event-read-fifo.c | 107 const char *fifo = "event.fifo"; in main() local 110 if (lstat(fifo, &st) == 0) { in main() 118 unlink(fifo); in main() 119 if (mkfifo(fifo, 0600) == -1) { in main() 124 socket = open(fifo, O_RDONLY | O_NONBLOCK, 0); in main() 131 fprintf(stderr, "Write data to %s\n", fifo); in main() 158 unlink(fifo); in main()
|
/netbsd-src/external/ibm-public/postfix/dist/src/util/ |
H A D | stream_connect.c | 69 int fifo; in stream_connect() local 75 if ((fifo = open(path, O_WRONLY | O_NONBLOCK, 0)) < 0) in stream_connect() 82 non_blocking(fifo, BLOCKING); in stream_connect() 89 if (ioctl(fifo, I_SENDFD, pair[1]) < 0) in stream_connect() 102 close(fifo); in stream_connect()
|
/netbsd-src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/engine/device/ |
H A D | nouveau_nvkm_engine_device_base.c | 118 .fifo = nv04_fifo_new, 139 .fifo = nv04_fifo_new, 181 .fifo = nv10_fifo_new, 203 .fifo = nv10_fifo_new, 225 .fifo = nv17_fifo_new, 247 .fifo = nv17_fifo_new, 269 .fifo = nv10_fifo_new, 291 .fifo = nv17_fifo_new, 313 .fifo = nv17_fifo_new, 335 .fifo = nv17_fifo_new, [all …]
|
/netbsd-src/sys/external/gpl2/dts/dist/arch/arm/boot/dts/ |
H A D | bcm283x-rpi-usb-otg.dtsi | 4 g-rx-fifo-size = <256>; 5 g-np-tx-fifo-size = <32>; 8 * fifo sizes shouldn't exceed 3776 bytes. 10 g-tx-fifo-size = <256 256 512 512 512 768 768>;
|
/netbsd-src/tests/kernel/kqueue/write/ |
H A D | t_fifo.c | 68 ATF_TC_WITH_CLEANUP(fifo); 69 ATF_TC_HEAD(fifo, tc) in ATF_TC_HEAD() argument 73 ATF_TC_BODY(fifo, tc) in ATF_TC_BODY() argument 140 ATF_TC_CLEANUP(fifo, tc) in ATF_TC_CLEANUP() argument 147 ATF_TP_ADD_TC(tp, fifo); in ATF_TP_ADD_TCS()
|