Lines Matching defs:sc
76 static void cuda_send_inbound(struct cuda_softc *sc);
77 static void cuda_send_outbound(struct cuda_softc *sc);
117 static uint8_t cuda_read_reg(struct cuda_softc *sc, u_int offset);
118 static void cuda_write_reg(struct cuda_softc *sc, u_int offset, uint8_t value);
143 struct cuda_softc *sc;
149 sc = device_get_softc(dev);
150 sc->sc_dev = dev;
152 sc->sc_memrid = 0;
153 sc->sc_memr = bus_alloc_resource_any(dev, SYS_RES_MEMORY,
154 &sc->sc_memrid, RF_ACTIVE);
156 if (sc->sc_memr == NULL) {
161 sc->sc_irqrid = 0;
162 sc->sc_irq = bus_alloc_resource_any(dev, SYS_RES_IRQ, &sc->sc_irqrid,
164 if (sc->sc_irq == NULL) {
166 bus_release_resource(dev, SYS_RES_MEMORY, sc->sc_memrid,
167 sc->sc_memr);
171 if (bus_setup_intr(dev, sc->sc_irq, INTR_TYPE_MISC | INTR_MPSAFE
172 | INTR_ENTROPY, NULL, cuda_intr, dev, &sc->sc_ih) != 0) {
174 bus_release_resource(dev, SYS_RES_MEMORY, sc->sc_memrid,
175 sc->sc_memr);
176 bus_release_resource(dev, SYS_RES_IRQ, sc->sc_irqrid,
177 sc->sc_irq);
181 mtx_init(&sc->sc_mutex,"cuda",NULL,MTX_DEF | MTX_RECURSE);
183 sc->sc_sent = 0;
184 sc->sc_received = 0;
185 sc->sc_waiting = 0;
186 sc->sc_polling = 0;
187 sc->sc_state = CUDA_NOTREADY;
188 sc->sc_autopoll = 0;
189 sc->sc_rtc = -1;
191 STAILQ_INIT(&sc->sc_inq);
192 STAILQ_INIT(&sc->sc_outq);
193 STAILQ_INIT(&sc->sc_freeq);
196 STAILQ_INSERT_TAIL(&sc->sc_freeq, &sc->sc_pkts[i], pkt_q);
200 reg = cuda_read_reg(sc, vDirB);
202 cuda_write_reg(sc, vDirB, reg);
204 reg = cuda_read_reg(sc, vDirB);
206 cuda_write_reg(sc, vDirB, reg);
208 reg = cuda_read_reg(sc, vACR);
210 cuda_write_reg(sc, vACR, reg);
212 cuda_write_reg(sc, vACR, (cuda_read_reg(sc, vACR) | 0x0c) & ~0x10);
214 sc->sc_state = CUDA_IDLE; /* used by all types of hardware */
216 cuda_write_reg(sc, vIER, 0x84); /* make sure VIA interrupts are on */
218 cuda_idle(sc); /* reset ADB */
222 i = cuda_read_reg(sc, vSR); /* clear interrupt */
223 cuda_write_reg(sc, vIER, 0x04); /* no interrupts while clearing */
224 cuda_idle(sc); /* reset state to idle */
226 cuda_tip(sc); /* signal start of frame */
228 cuda_toggle_ack(sc);
230 cuda_clear_tip(sc);
232 cuda_idle(sc); /* back to idle state */
233 i = cuda_read_reg(sc, vSR); /* clear interrupt */
234 cuda_write_reg(sc, vIER, 0x84); /* ints ok now */
249 sc->adb_bus = device_add_child(dev,"adb",DEVICE_UNIT_ANY);
254 EVENTHANDLER_REGISTER(shutdown_final, cuda_shutdown, sc,
262 struct cuda_softc *sc;
269 sc = device_get_softc(dev);
271 bus_teardown_intr(dev, sc->sc_irq, sc->sc_ih);
272 bus_release_resource(dev, SYS_RES_IRQ, sc->sc_irqrid, sc->sc_irq);
273 bus_release_resource(dev, SYS_RES_MEMORY, sc->sc_memrid, sc->sc_memr);
274 mtx_destroy(&sc->sc_mutex);
280 cuda_read_reg(struct cuda_softc *sc, u_int offset) {
281 return (bus_read_1(sc->sc_memr, offset));
285 cuda_write_reg(struct cuda_softc *sc, u_int offset, uint8_t value) {
286 bus_write_1(sc->sc_memr, offset, value);
290 cuda_idle(struct cuda_softc *sc)
294 reg = cuda_read_reg(sc, vBufB);
296 cuda_write_reg(sc, vBufB, reg);
300 cuda_tip(struct cuda_softc *sc)
304 reg = cuda_read_reg(sc, vBufB);
306 cuda_write_reg(sc, vBufB, reg);
310 cuda_clear_tip(struct cuda_softc *sc)
314 reg = cuda_read_reg(sc, vBufB);
316 cuda_write_reg(sc, vBufB, reg);
320 cuda_in(struct cuda_softc *sc)
324 reg = cuda_read_reg(sc, vACR);
326 cuda_write_reg(sc, vACR, reg);
330 cuda_out(struct cuda_softc *sc)
334 reg = cuda_read_reg(sc, vACR);
336 cuda_write_reg(sc, vACR, reg);
340 cuda_toggle_ack(struct cuda_softc *sc)
344 reg = cuda_read_reg(sc, vBufB);
346 cuda_write_reg(sc, vBufB, reg);
350 cuda_ack_off(struct cuda_softc *sc)
354 reg = cuda_read_reg(sc, vBufB);
356 cuda_write_reg(sc, vBufB, reg);
360 cuda_intr_state(struct cuda_softc *sc)
362 return ((cuda_read_reg(sc, vBufB) & vPB3) == 0);
368 struct cuda_softc *sc = cookie;
369 device_t dev = sc->sc_dev;
372 if (sc->sc_state == CUDA_NOTREADY)
375 mtx_lock(&sc->sc_mutex);
377 pkt = STAILQ_FIRST(&sc->sc_freeq);
379 mtx_unlock(&sc->sc_mutex);
387 STAILQ_REMOVE_HEAD(&sc->sc_freeq, pkt_q);
388 STAILQ_INSERT_TAIL(&sc->sc_outq, pkt, pkt_q);
395 if (sc->sc_waiting) {
396 mtx_unlock(&sc->sc_mutex);
400 cuda_send_outbound(sc);
401 mtx_unlock(&sc->sc_mutex);
403 if (sc->sc_polling || poll || cold)
410 cuda_send_outbound(struct cuda_softc *sc)
414 mtx_assert(&sc->sc_mutex, MA_OWNED);
416 pkt = STAILQ_FIRST(&sc->sc_outq);
420 sc->sc_out_length = pkt->len + 1;
421 memcpy(sc->sc_out, &pkt->type, pkt->len + 1);
422 sc->sc_sent = 0;
424 STAILQ_REMOVE_HEAD(&sc->sc_outq, pkt_q);
425 STAILQ_INSERT_TAIL(&sc->sc_freeq, pkt, pkt_q);
427 sc->sc_waiting = 1;
429 cuda_poll(sc->sc_dev);
433 if (sc->sc_state == CUDA_IDLE && !cuda_intr_state(sc)) {
434 sc->sc_state = CUDA_OUT;
435 cuda_out(sc);
436 cuda_write_reg(sc, vSR, sc->sc_out[0]);
437 cuda_ack_off(sc);
438 cuda_tip(sc);
443 cuda_send_inbound(struct cuda_softc *sc)
448 dev = sc->sc_dev;
450 mtx_lock(&sc->sc_mutex);
452 while ((pkt = STAILQ_FIRST(&sc->sc_inq)) != NULL) {
453 STAILQ_REMOVE_HEAD(&sc->sc_inq, pkt_q);
455 mtx_unlock(&sc->sc_mutex);
461 adb_receive_raw_packet(sc->adb_bus,
465 adb_receive_raw_packet(sc->adb_bus,
470 mtx_lock(&sc->sc_mutex);
473 sc->sc_autopoll = 1;
476 memcpy(&sc->sc_rtc, &pkt->data[2],
477 sizeof(sc->sc_rtc));
478 wakeup(&sc->sc_rtc);
483 mtx_unlock(&sc->sc_mutex);
500 mtx_lock(&sc->sc_mutex);
502 STAILQ_INSERT_TAIL(&sc->sc_freeq, pkt, pkt_q);
505 mtx_unlock(&sc->sc_mutex);
511 struct cuda_softc *sc = device_get_softc(dev);
513 if (sc->sc_state == CUDA_IDLE && !cuda_intr_state(sc) &&
514 !sc->sc_waiting)
525 struct cuda_softc *sc;
530 sc = device_get_softc(dev);
532 mtx_lock(&sc->sc_mutex);
535 reg = cuda_read_reg(sc, vIFR);
537 mtx_unlock(&sc->sc_mutex);
541 cuda_write_reg(sc, vIFR, 0x7f); /* Clear interrupt */
544 switch (sc->sc_state) {
551 sc->sc_in[1] = cuda_read_reg(sc, vSR);
553 if (cuda_intr_state(sc) == 0) {
556 if (sc->sc_waiting) {
559 sc->sc_state = CUDA_OUT;
560 sc->sc_sent = 0;
561 cuda_out(sc);
562 cuda_write_reg(sc, vSR, sc->sc_out[1]);
563 cuda_ack_off(sc);
564 cuda_tip(sc);
569 cuda_in(sc);
570 cuda_tip(sc);
572 sc->sc_received = 1;
573 sc->sc_state = CUDA_IN;
577 sc->sc_in[sc->sc_received] = cuda_read_reg(sc, vSR);
580 if (sc->sc_received > 255) {
582 if (sc->sc_received == 256) {
587 sc->sc_received++;
590 if (cuda_intr_state(sc) == 0) {
593 cuda_toggle_ack(sc);
600 cuda_idle(sc);
603 pkt = STAILQ_FIRST(&sc->sc_freeq);
607 pkt->len = sc->sc_received - 2;
608 pkt->type = sc->sc_in[1];
609 memcpy(pkt->data, &sc->sc_in[2], pkt->len);
611 STAILQ_REMOVE_HEAD(&sc->sc_freeq, pkt_q);
612 STAILQ_INSERT_TAIL(&sc->sc_inq, pkt, pkt_q);
617 sc->sc_state = CUDA_IDLE;
618 sc->sc_received = 0;
624 if (sc->sc_waiting == 1) {
626 sc->sc_sent = 0;
627 sc->sc_state = CUDA_OUT;
635 if (cuda_intr_state(sc)) {
636 cuda_in(sc);
637 cuda_idle(sc);
638 sc->sc_sent = 0;
639 sc->sc_state = CUDA_IDLE;
640 sc->sc_received = 0;
650 cuda_out(sc);
651 cuda_write_reg(sc, vSR,
652 sc->sc_out[sc->sc_sent]);
653 cuda_ack_off(sc);
654 cuda_tip(sc);
660 cuda_read_reg(sc, vSR); /* reset SR-intr in IFR */
662 sc->sc_sent++;
663 if (cuda_intr_state(sc)) { /* ADB intr low during write */
664 cuda_in(sc); /* make sure SR is set to IN */
665 cuda_idle(sc);
666 sc->sc_sent = 0; /* must start all over */
667 sc->sc_state = CUDA_IDLE; /* new state */
668 sc->sc_received = 0;
669 sc->sc_waiting = 1; /* must retry when done with
676 if (sc->sc_out_length == sc->sc_sent) { /* check for done */
677 sc->sc_waiting = 0; /* done writing */
678 sc->sc_state = CUDA_IDLE; /* signal bus is idle */
679 cuda_in(sc);
680 cuda_idle(sc);
683 cuda_write_reg(sc, vSR, sc->sc_out[sc->sc_sent]);
684 cuda_toggle_ack(sc); /* signal byte ready to
696 mtx_unlock(&sc->sc_mutex);
699 cuda_send_inbound(sc);
701 mtx_lock(&sc->sc_mutex);
703 if (!sc->sc_waiting && sc->sc_state == CUDA_IDLE)
704 cuda_send_outbound(sc);
706 mtx_unlock(&sc->sc_mutex);
714 struct cuda_softc *sc = device_get_softc(dev);
724 cuda_send(sc, poll, len + 2, packet);
731 struct cuda_softc *sc = device_get_softc(dev);
735 mtx_lock(&sc->sc_mutex);
737 if (cmd[2] == sc->sc_autopoll) {
738 mtx_unlock(&sc->sc_mutex);
742 sc->sc_autopoll = -1;
743 cuda_send(sc, 1, 3, cmd);
745 mtx_unlock(&sc->sc_mutex);
753 struct cuda_softc *sc = xsc;
763 cuda_poll(sc->sc_dev);
764 cuda_send(sc, 1, 2, cmd);
767 cuda_poll(sc->sc_dev);
775 struct cuda_softc *sc = device_get_softc(dev);
778 mtx_lock(&sc->sc_mutex);
779 sc->sc_rtc = -1;
780 cuda_send(sc, 1, 2, cmd);
781 if (sc->sc_rtc == -1)
782 mtx_sleep(&sc->sc_rtc, &sc->sc_mutex, 0, "rtc", 100);
784 ts->tv_sec = sc->sc_rtc - DIFF19041970;
786 mtx_unlock(&sc->sc_mutex);
794 struct cuda_softc *sc = device_get_softc(dev);
801 mtx_lock(&sc->sc_mutex);
802 cuda_send(sc, 0, 6, cmd);
803 mtx_unlock(&sc->sc_mutex);