Lines Matching defs:ncr_sc
173 struct ncr5380_softc *ncr_sc = p;
174 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
178 if (*ncr_sc->sci_csr & SCI_CSR_INT) {
181 decode_5380_intr(ncr_sc);
184 claimed = ncr5380_intr(ncr_sc);
186 if (((*ncr_sc->sci_csr & ~SCI_CSR_PHASE_MATCH) == SCI_CSR_INT)
187 && ((*ncr_sc->sci_bus_csr & ~SCI_BUS_RST) == 0)) {
188 SCI_CLR_INTR(ncr_sc); /* RST interrupt */
190 (*sc->sc_clrintr)(ncr_sc);
195 device_xname(ncr_sc->sc_dev));
205 decode_5380_intr(struct ncr5380_softc *ncr_sc)
207 u_int8_t csr = *ncr_sc->sci_csr;
208 u_int8_t bus_csr = *ncr_sc->sci_bus_csr;
213 printf("%s: reselect\n", device_xname(ncr_sc->sc_dev));
215 printf("%s: select\n", device_xname(ncr_sc->sc_dev));
218 printf("%s: DMA eop\n", device_xname(ncr_sc->sc_dev));
221 printf("%s: bus reset\n", device_xname(ncr_sc->sc_dev));
224 printf("%s: parity error\n", device_xname(ncr_sc->sc_dev));
227 printf("%s: phase mismatch\n", device_xname(ncr_sc->sc_dev));
230 printf("%s: disconnect\n", device_xname(ncr_sc->sc_dev));
233 device_xname(ncr_sc->sc_dev), csr, bus_csr);
243 sbc_pdma_in(struct ncr5380_softc *ncr_sc, int phase, int datalen, u_char *data)
245 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
251 if (datalen < ncr_sc->sc_min_dma_len ||
253 (ncr_sc->sc_current != NULL &&
254 (ncr_sc->sc_current->sr_xs->xs_control & XS_CTL_POLL)))
255 return ncr5380_pio_in(ncr_sc, phase, datalen, data);
258 if (sbc_wait_busy(ncr_sc)) {
263 *ncr_sc->sci_mode |= SCI_MODE_DMA;
264 *ncr_sc->sci_irecv = 0;
280 if (sbc_ready(ncr_sc))
288 if (sbc_ready(ncr_sc))
297 SCI_CLR_INTR(ncr_sc);
298 *ncr_sc->sci_mode &= ~SCI_MODE_DMA;
299 *ncr_sc->sci_icmd = 0;
305 sbc_pdma_out(struct ncr5380_softc *ncr_sc, int phase, int datalen, u_char *data)
307 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
319 if (datalen < ncr_sc->sc_min_dma_len ||
322 (ncr_sc->sc_current != NULL &&
323 (ncr_sc->sc_current->sr_xs->xs_control & XS_CTL_POLL)))
324 return ncr5380_pio_out(ncr_sc, phase, datalen, data);
327 if (sbc_wait_busy(ncr_sc)) {
332 icmd = *(ncr_sc->sci_icmd) & SCI_ICMD_RMASK;
333 *ncr_sc->sci_icmd = icmd | SCI_ICMD_DATA;
334 *ncr_sc->sci_mode |= SCI_MODE_DMA;
335 *ncr_sc->sci_dma_send = 0;
353 if (sbc_ready(ncr_sc))
356 if (sbc_ready(ncr_sc))
359 if (sbc_ready(ncr_sc))
362 if (sbc_ready(ncr_sc))
365 if (sbc_ready(ncr_sc))
373 if (sbc_ready(ncr_sc))
380 if (sbc_wait_dreq(ncr_sc))
382 device_xname(ncr_sc->sc_dev));
388 if ((*ncr_sc->sci_csr & SCI_CSR_PHASE_MATCH) == 0) {
389 *ncr_sc->sci_icmd = icmd & ~SCI_ICMD_DATA;
394 SCI_CLR_INTR(ncr_sc);
395 *ncr_sc->sci_mode &= ~SCI_MODE_DMA;
396 *ncr_sc->sci_icmd = icmd;
425 struct ncr5380_softc *ncr_sc = (struct ncr5380_softc *)p;
426 struct sci_req *sr = ncr_sc->sc_current;
438 if (sbc_ready(ncr_sc) || dh->dh_len == 0)
444 device_xname(ncr_sc->sc_dev), dh->dh_len, dh->dh_flags);
465 device_xname(ncr_sc->sc_dev), count,
478 device_xname(ncr_sc->sc_dev), count, dh->dh_len);
489 #define CHECKMORE if ((*ncr_sc->sci_csr & SCI_CSR_DREQ) == 0) { \
553 if (*ncr_sc->sci_csr & SCI_CSR_ACK)
624 device_xname(ncr_sc->sc_dev), *ncr_sc->sci_csr,
625 *ncr_sc->sci_bus_csr);
630 sbc_dma_alloc(struct ncr5380_softc *ncr_sc)
632 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
633 struct sci_req *sr = ncr_sc->sc_current;
647 xlen = ncr_sc->sc_datalen;
666 dh->dh_addr = ncr_sc->sc_dataptr;
677 sbc_dma_free(struct ncr5380_softc *ncr_sc)
679 struct sci_req *sr = ncr_sc->sc_current;
687 if (ncr_sc->sc_state & NCR_DOINGDMA)
699 sbc_dma_poll(struct ncr5380_softc *ncr_sc)
701 struct sci_req *sr = ncr_sc->sc_current;
712 device_xname(ncr_sc->sc_dev));
718 sbc_dma_setup(struct ncr5380_softc *ncr_sc)
724 sbc_dma_start(struct ncr5380_softc *ncr_sc)
726 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
727 struct sci_req *sr = ncr_sc->sc_current;
735 *ncr_sc->sci_tcmd = PHASE_DATA_OUT;
736 SCI_CLR_INTR(ncr_sc);
738 (*sc->sc_clrintr)(ncr_sc);
739 *ncr_sc->sci_mode |= SCI_MODE_DMA;
740 *ncr_sc->sci_icmd = SCI_ICMD_DATA;
741 *ncr_sc->sci_dma_send = 0;
743 *ncr_sc->sci_tcmd = PHASE_DATA_IN;
744 SCI_CLR_INTR(ncr_sc);
746 (*sc->sc_clrintr)(ncr_sc);
747 *ncr_sc->sci_mode |= SCI_MODE_DMA;
748 *ncr_sc->sci_icmd = 0;
749 *ncr_sc->sci_irecv = 0;
751 ncr_sc->sc_state |= NCR_DOINGDMA;
756 device_xname(ncr_sc->sc_dev), dh->dh_addr, dh->dh_len);
761 sbc_dma_eop(struct ncr5380_softc *ncr_sc)
767 sbc_dma_stop(struct ncr5380_softc *ncr_sc)
769 struct sbc_softc *sc = (struct sbc_softc *)ncr_sc;
770 struct sci_req *sr = ncr_sc->sc_current;
774 if ((ncr_sc->sc_state & NCR_DOINGDMA) == 0) {
778 device_xname(ncr_sc->sc_dev));
782 ncr_sc->sc_state &= ~NCR_DOINGDMA;
784 if ((ncr_sc->sc_state & NCR_ABORTING) == 0) {
785 ntrans = ncr_sc->sc_datalen - dh->dh_len;
790 device_xname(ncr_sc->sc_dev), ntrans);
793 if (ntrans > ncr_sc->sc_datalen)
797 ncr_sc->sc_dataptr += ntrans;
798 ncr_sc->sc_datalen -= ntrans;
801 SCI_CLR_INTR(ncr_sc);
803 (*sc->sc_clrintr)(ncr_sc);
807 *ncr_sc->sci_mode &= ~SCI_MODE_DMA;
808 *ncr_sc->sci_icmd = 0;
813 device_xname(ncr_sc->sc_dev), *ncr_sc->sci_csr,
814 *ncr_sc->sci_bus_csr);