Lines Matching refs:pp

271 static int	m1553_map_regs(struct ecppunit *pp);
272 static void m1553_unmap_regs(struct ecppunit *pp);
312 static uchar_t nibble_peripheral2host(struct ecppunit *pp, uint8_t *);
592 struct ecppunit *pp; in ecpp_attach() local
602 if (!(pp = ddi_get_soft_state(ecppsoft_statep, instance))) { in ecpp_attach()
606 mutex_enter(&pp->umutex); in ecpp_attach()
608 pp->suspended = FALSE; in ecpp_attach()
613 (void) ECPP_CONFIG_CHIP(pp); in ecpp_attach()
614 (void) ecpp_reset_port_regs(pp); in ecpp_attach()
616 if (pp->oflag == TRUE) { in ecpp_attach()
617 int current_mode = pp->current_mode; in ecpp_attach()
619 (void) ecpp_1284_termination(pp); in ecpp_attach()
620 (void) ecpp_mode_negotiation(pp, current_mode); in ecpp_attach()
623 mutex_exit(&pp->umutex); in ecpp_attach()
636 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_attach()
638 pp->dip = dip; in ecpp_attach()
639 pp->suspended = FALSE; in ecpp_attach()
644 hw_bind = ecpp_determine_sio_type(pp); in ecpp_attach()
650 pp->hw = hw_bind->hw; in ecpp_attach()
651 ecpp_error(pp->dip, "SuperIO type: %s\n", hw_bind->info); in ecpp_attach()
657 if (ECPP_MAP_REGS(pp) != SUCCESS) { in ecpp_attach()
661 if (ddi_dma_alloc_handle(dip, pp->hw->attr, DDI_DMA_DONTWAIT, in ecpp_attach()
662 NULL, &pp->dma_handle) != DDI_SUCCESS) { in ecpp_attach()
668 &pp->ecpp_trap_cookie) != DDI_SUCCESS) { in ecpp_attach()
673 mutex_init(&pp->umutex, NULL, MUTEX_DRIVER, in ecpp_attach()
674 (void *)pp->ecpp_trap_cookie); in ecpp_attach()
676 cv_init(&pp->pport_cv, NULL, CV_DRIVER, NULL); in ecpp_attach()
678 if (ddi_add_intr(dip, 0, &pp->ecpp_trap_cookie, NULL, ecpp_isr, in ecpp_attach()
679 (caddr_t)pp) != DDI_SUCCESS) { in ecpp_attach()
685 &pp->softintr_id, 0, 0, ecpp_softintr, in ecpp_attach()
686 (caddr_t)pp) != DDI_SUCCESS) { in ecpp_attach()
699 pp->ioblock = (caddr_t)kmem_alloc(IO_BLOCK_SZ, KM_SLEEP); in ecpp_attach()
700 if (pp->ioblock == NULL) { in ecpp_attach()
704 ecpp_error(pp->dip, "ecpp_attach: ioblock=0x%x\n", pp->ioblock); in ecpp_attach()
707 ecpp_get_props(pp); in ecpp_attach()
709 if (pp->hw == &x86 && pp->uh.x86.chn != 0xff) { in ecpp_attach()
710 if (ddi_dmae_alloc(dip, pp->uh.x86.chn, in ecpp_attach()
712 ecpp_error(pp->dip, "dmae_alloc success!\n"); in ecpp_attach()
715 if (ECPP_CONFIG_CHIP(pp) == FAILURE) { in ecpp_attach()
716 ecpp_error(pp->dip, "config_chip failed.\n"); in ecpp_attach()
720 ecpp_kstat_init(pp); in ecpp_attach()
728 kmem_free(pp->ioblock, IO_BLOCK_SZ); in ecpp_attach()
732 ddi_remove_softintr(pp->softintr_id); in ecpp_attach()
734 ddi_remove_intr(dip, (uint_t)0, pp->ecpp_trap_cookie); in ecpp_attach()
736 mutex_destroy(&pp->umutex); in ecpp_attach()
737 cv_destroy(&pp->pport_cv); in ecpp_attach()
739 ddi_dma_free_handle(&pp->dma_handle); in ecpp_attach()
741 ECPP_UNMAP_REGS(pp); in ecpp_attach()
755 struct ecppunit *pp; in ecpp_detach() local
764 if (!(pp = ddi_get_soft_state(ecppsoft_statep, instance))) { in ecpp_detach()
768 mutex_enter(&pp->umutex); in ecpp_detach()
769 ASSERT(pp->suspended == FALSE); in ecpp_detach()
771 pp->suspended = TRUE; /* prevent new transfers */ in ecpp_detach()
776 if ((pp->e_busy == ECPP_BUSY) || (pp->e_busy == ECPP_FLUSH)) { in ecpp_detach()
777 (void) cv_reltimedwait(&pp->pport_cv, &pp->umutex, in ecpp_detach()
780 if ((pp->e_busy == ECPP_BUSY) || in ecpp_detach()
781 (pp->e_busy == ECPP_FLUSH)) { in ecpp_detach()
782 pp->suspended = FALSE; in ecpp_detach()
783 mutex_exit(&pp->umutex); in ecpp_detach()
784 ecpp_error(pp->dip, in ecpp_detach()
790 mutex_exit(&pp->umutex); in ecpp_detach()
797 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_detach()
799 if (pp->hw == &x86 && pp->uh.x86.chn != 0xff) in ecpp_detach()
800 (void) ddi_dmae_release(pp->dip, pp->uh.x86.chn); in ecpp_detach()
802 if (pp->dma_handle != NULL) in ecpp_detach()
803 ddi_dma_free_handle(&pp->dma_handle); in ecpp_detach()
807 ddi_remove_softintr(pp->softintr_id); in ecpp_detach()
809 ddi_remove_intr(dip, (uint_t)0, pp->ecpp_trap_cookie); in ecpp_detach()
811 if (pp->ksp) { in ecpp_detach()
812 kstat_delete(pp->ksp); in ecpp_detach()
814 if (pp->intrstats) { in ecpp_detach()
815 kstat_delete(pp->intrstats); in ecpp_detach()
818 cv_destroy(&pp->pport_cv); in ecpp_detach()
820 mutex_destroy(&pp->umutex); in ecpp_detach()
822 ECPP_UNMAP_REGS(pp); in ecpp_detach()
824 kmem_free(pp->ioblock, IO_BLOCK_SZ); in ecpp_detach()
841 ecpp_get_props(struct ecppunit *pp) in ecpp_get_props() argument
857 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
859 pp->fast_centronics = in ecpp_get_props()
863 pp->fast_centronics = FALSE; in ecpp_get_props()
876 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
878 pp->fast_compat = (strcmp(prop, "true") == 0) ? TRUE : FALSE; in ecpp_get_props()
881 pp->fast_compat = TRUE; in ecpp_get_props()
891 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
893 pp->init_seq = (strcmp(prop, "true") == 0) ? TRUE : FALSE; in ecpp_get_props()
896 pp->init_seq = FALSE; in ecpp_get_props()
905 pp->wsrv_retry = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
914 pp->wait_for_busy = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
921 pp->data_setup_time = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
928 pp->strobe_pulse_width = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
936 pp->dip, 0, "ecpp-transfer-timeout", ecpp_def_timeout); in ecpp_get_props()
938 pp->xfer_parms = default_xfer_parms; in ecpp_get_props()
943 if (pp->hw == &m1553) { in ecpp_get_props()
944 pp->uh.m1553.chn = ddi_prop_get_int(DDI_DEV_T_ANY, in ecpp_get_props()
945 pp->dip, 0, "dma-channel", 0x1); in ecpp_get_props()
946 ecpp_error(pp->dip, "ecpp_get_prop:chn=%x\n", pp->uh.m1553.chn); in ecpp_get_props()
951 if (pp->hw == &x86) { in ecpp_get_props()
952 if (ddi_prop_op(DDI_DEV_T_ANY, pp->dip, PROP_LEN_AND_VAL_BUF, in ecpp_get_props()
955 ecpp_error(pp->dip, "No dma channel found\n"); in ecpp_get_props()
956 pp->uh.x86.chn = 0xff; in ecpp_get_props()
957 pp->fast_compat = FALSE; in ecpp_get_props()
958 pp->noecpregs = TRUE; in ecpp_get_props()
960 pp->uh.x86.chn = (uint8_t)value; in ecpp_get_props()
966 pp->ecp_rev_speed = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
969 pp->rev_watchdog = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
972 ecpp_error(pp->dip, in ecpp_get_props()
977 pp->fast_centronics, pp->fast_compat, in ecpp_get_props()
978 pp->wsrv_retry, pp->wait_for_busy, in ecpp_get_props()
979 pp->data_setup_time, pp->strobe_pulse_width, in ecpp_get_props()
980 pp->xfer_parms.write_timeout); in ecpp_get_props()
988 struct ecppunit *pp; in ecpp_getinfo() local
995 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_getinfo()
996 if (pp != NULL) { in ecpp_getinfo()
997 *result = pp->dip; in ecpp_getinfo()
1021 struct ecppunit *pp; in ecpp_open() local
1032 pp = (struct ecppunit *)ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_open()
1034 if (pp == NULL) { in ecpp_open()
1038 mutex_enter(&pp->umutex); in ecpp_open()
1044 if (pp->oflag == TRUE) { in ecpp_open()
1045 ecpp_error(pp->dip, "ecpp open failed"); in ecpp_open()
1046 mutex_exit(&pp->umutex); in ecpp_open()
1050 pp->oflag = TRUE; in ecpp_open()
1053 pp->prn_timeouts = prn_timeouts_default; in ecpp_open()
1054 pp->xfer_parms = default_xfer_parms; in ecpp_open()
1055 pp->current_mode = ECPP_CENTRONICS; in ecpp_open()
1056 pp->backchannel = ECPP_CENTRONICS; in ecpp_open()
1057 pp->current_phase = ECPP_PHASE_PO; in ecpp_open()
1058 pp->port = ECPP_PORT_DMA; in ecpp_open()
1059 pp->instance = instance; in ecpp_open()
1060 pp->timeout_error = 0; in ecpp_open()
1061 pp->saved_dsr = DSR_READ(pp); in ecpp_open()
1062 pp->ecpp_drain_counter = 0; in ecpp_open()
1063 pp->dma_cancelled = FALSE; in ecpp_open()
1064 pp->io_mode = ECPP_DMA; in ecpp_open()
1065 pp->joblen = 0; in ecpp_open()
1066 pp->tfifo_intr = 0; in ecpp_open()
1067 pp->softintr_pending = 0; in ecpp_open()
1068 pp->nread = 0; in ecpp_open()
1071 pp->e_busy = ECPP_IDLE; in ecpp_open()
1073 pp->readq = RD(q); in ecpp_open()
1074 pp->writeq = WR(q); in ecpp_open()
1075 pp->msg = NULL; in ecpp_open()
1077 RD(q)->q_ptr = WR(q)->q_ptr = (caddr_t)pp; in ecpp_open()
1082 if (ecpp_reset_port_regs(pp) == FAILURE) { in ecpp_open()
1083 mutex_exit(&pp->umutex); in ecpp_open()
1087 mutex_exit(&pp->umutex); in ecpp_open()
1113 mutex_enter(&pp->umutex); in ecpp_open()
1115 ecpp_default_negotiation(pp); in ecpp_open()
1118 (void) ecpp_idle_phase(pp); in ecpp_open()
1120 ecpp_error(pp->dip, in ecpp_open()
1122 pp->current_mode, pp->current_phase, in ecpp_open()
1123 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_open()
1125 mutex_exit(&pp->umutex); in ecpp_open()
1134 struct ecppunit *pp; in ecpp_close() local
1137 pp = (struct ecppunit *)q->q_ptr; in ecpp_close()
1139 ecpp_error(pp->dip, "ecpp_close: entering ...\n"); in ecpp_close()
1141 mutex_enter(&pp->umutex); in ecpp_close()
1155 while (pp->e_busy != ECPP_IDLE || qsize(WR(q))) { in ecpp_close()
1156 if (!cv_wait_sig(&pp->pport_cv, &pp->umutex)) { in ecpp_close()
1157 ecpp_error(pp->dip, "ecpp_close:B: received SIG\n"); in ecpp_close()
1162 ecpp_flush(pp, FWRITE); in ecpp_close()
1165 ecpp_error(pp->dip, "ecpp_close:rcvd cv-sig\n"); in ecpp_close()
1169 ecpp_error(pp->dip, "ecpp_close: joblen=%d, ctx_cf=%d, " in ecpp_close()
1171 pp->joblen, pp->ctx_cf, qsize(pp->writeq), qsize(q)); in ecpp_close()
1181 timeout_id = pp->timeout_id; in ecpp_close()
1182 fifo_timer_id = pp->fifo_timer_id; in ecpp_close()
1183 wsrv_timer_id = pp->wsrv_timer_id; in ecpp_close()
1185 pp->timeout_id = pp->fifo_timer_id = pp->wsrv_timer_id = 0; in ecpp_close()
1187 pp->softintr_pending = 0; in ecpp_close()
1188 pp->dma_cancelled = TRUE; in ecpp_close()
1189 ECPP_MASK_INTR(pp); in ecpp_close()
1191 mutex_exit(&pp->umutex); in ecpp_close()
1205 mutex_enter(&pp->umutex); in ecpp_close()
1208 if ((pp->current_mode == ECPP_ECP_MODE) && in ecpp_close()
1209 (pp->current_phase != ECPP_PHASE_ECP_FWD_IDLE)) { in ecpp_close()
1210 (void) ecp_reverse2forward(pp); in ecpp_close()
1213 (void) ecpp_1284_termination(pp); in ecpp_close()
1215 pp->oflag = FALSE; in ecpp_close()
1217 pp->readq = pp->writeq = NULL; in ecpp_close()
1218 pp->msg = NULL; in ecpp_close()
1220 ecpp_error(pp->dip, "ecpp_close: ecr=%x, dsr=%x, dcr=%x\n", in ecpp_close()
1221 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_close()
1223 mutex_exit(&pp->umutex); in ecpp_close()
1235 struct ecppunit *pp; in ecpp_wput() local
1237 pp = (struct ecppunit *)q->q_ptr; in ecpp_wput()
1244 ecpp_error(pp->dip, in ecpp_wput()
1264 ecpp_error(pp->dip, in ecpp_wput()
1285 ecpp_error(pp->dip, "ecpp_wput:M_IOCTL %x\n", iocbp->ioc_cmd); in ecpp_wput()
1287 mutex_enter(&pp->umutex); in ecpp_wput()
1290 if ((pp->e_busy == ECPP_BUSY) && in ecpp_wput()
1293 mutex_exit(&pp->umutex); in ecpp_wput()
1296 mutex_exit(&pp->umutex); in ecpp_wput()
1306 ecpp_error(pp->dip, "ecpp_wput:M_IOCDATA\n"); in ecpp_wput()
1370 ecpp_error(pp->dip, "ecpp_wput:M_FLUSH\n"); in ecpp_wput()
1373 mutex_enter(&pp->umutex); in ecpp_wput()
1374 ecpp_flush(pp, FWRITE); in ecpp_wput()
1375 mutex_exit(&pp->umutex); in ecpp_wput()
1379 mutex_enter(&pp->umutex); in ecpp_wput()
1380 ecpp_flush(pp, FREAD); in ecpp_wput()
1381 mutex_exit(&pp->umutex); in ecpp_wput()
1396 mutex_enter(&pp->umutex); in ecpp_wput()
1397 if (pp->e_busy == ECPP_IDLE) { in ecpp_wput()
1398 pp->nread += *(size_t *)mp->b_rptr; in ecpp_wput()
1399 ecpp_error(pp->dip, "ecpp_wput: M_READ %d", pp->nread); in ecpp_wput()
1402 ecpp_error(pp->dip, "ecpp_wput: M_READ queueing"); in ecpp_wput()
1405 mutex_exit(&pp->umutex); in ecpp_wput()
1409 ecpp_error(pp->dip, "ecpp_wput: bad messagetype 0x%x\n", in ecpp_wput()
1501 struct ecppunit *pp; in ecpp_putioc() local
1503 pp = (struct ecppunit *)q->q_ptr; in ecpp_putioc()
1523 mutex_enter(&pp->umutex); in ecpp_putioc()
1525 pp->xfer_parms.mode = pp->current_mode; in ecpp_putioc()
1526 xfer_parms = pp->xfer_parms; in ecpp_putioc()
1528 mutex_exit(&pp->umutex); in ecpp_putioc()
1535 mutex_enter(&pp->umutex); in ecpp_putioc()
1536 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1537 mutex_exit(&pp->umutex); in ecpp_putioc()
1541 mutex_exit(&pp->umutex); in ecpp_putioc()
1551 mutex_enter(&pp->umutex); in ecpp_putioc()
1553 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1554 mutex_exit(&pp->umutex); in ecpp_putioc()
1559 rg.dsr = DSR_READ(pp); in ecpp_putioc()
1560 rg.dcr = DCR_READ(pp); in ecpp_putioc()
1562 mutex_exit(&pp->umutex); in ecpp_putioc()
1564 ecpp_error(pp->dip, "ECPPIOC_GETREGS: dsr=%x,dcr=%x\n", in ecpp_putioc()
1577 mutex_enter(&pp->umutex); in ecpp_putioc()
1578 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1579 mutex_exit(&pp->umutex); in ecpp_putioc()
1583 mutex_exit(&pp->umutex); in ecpp_putioc()
1597 mutex_enter(&pp->umutex); in ecpp_putioc()
1600 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1601 mutex_exit(&pp->umutex); in ecpp_putioc()
1607 byte = pp->port; in ecpp_putioc()
1609 switch (pp->port) { in ecpp_putioc()
1611 byte = DATAR_READ(pp); in ecpp_putioc()
1614 byte = TFIFO_READ(pp); in ecpp_putioc()
1615 ecpp_error(pp->dip, "GETDATA=0x%x\n", byte); in ecpp_putioc()
1622 mutex_exit(&pp->umutex); in ecpp_putioc()
1623 ecpp_error(pp->dip, "weird command"); in ecpp_putioc()
1628 mutex_exit(&pp->umutex); in ecpp_putioc()
1638 mutex_enter(&pp->umutex); in ecpp_putioc()
1640 bpp_status.timeout_occurred = pp->timeout_error; in ecpp_putioc()
1642 bpp_status.pin_status = ecpp_get_error_status(pp->saved_dsr); in ecpp_putioc()
1644 mutex_exit(&pp->umutex); in ecpp_putioc()
1652 mutex_enter(&pp->umutex); in ecpp_putioc()
1654 if (!((pp->current_mode == ECPP_CENTRONICS) || in ecpp_putioc()
1655 (pp->current_mode == ECPP_COMPAT_MODE))) { in ecpp_putioc()
1658 pp->saved_dsr = DSR_READ(pp); in ecpp_putioc()
1660 if ((pp->saved_dsr & ECPP_PE) || in ecpp_putioc()
1661 !(pp->saved_dsr & ECPP_SLCT) || in ecpp_putioc()
1662 !(pp->saved_dsr & ECPP_nERR)) { in ecpp_putioc()
1669 mutex_exit(&pp->umutex); in ecpp_putioc()
1678 mutex_enter(&pp->umutex); in ecpp_putioc()
1679 if (pp->e_busy == ECPP_BUSY) { in ecpp_putioc()
1680 mutex_exit(&pp->umutex); in ecpp_putioc()
1683 (void) ecpp_mode_negotiation(pp, ECPP_CENTRONICS); in ecpp_putioc()
1685 DCR_WRITE(pp, ECPP_SLCTIN); in ecpp_putioc()
1687 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in ecpp_putioc()
1689 ecpp_default_negotiation(pp); in ecpp_putioc()
1691 mutex_exit(&pp->umutex); in ecpp_putioc()
1699 mutex_enter(&pp->umutex); in ecpp_putioc()
1701 ifcap = ecpp_get_prn_ifcap(pp); in ecpp_putioc()
1703 mutex_exit(&pp->umutex); in ecpp_putioc()
1718 mutex_enter(&pp->umutex); in ecpp_putioc()
1719 timeouts = pp->prn_timeouts; in ecpp_putioc()
1720 mutex_exit(&pp->umutex); in ecpp_putioc()
1737 mutex_enter(&pp->umutex); in ecpp_putioc()
1740 if (pp->current_mode == ECPP_CENTRONICS || in ecpp_putioc()
1741 pp->current_mode == ECPP_COMPAT_MODE) { in ecpp_putioc()
1742 dsr = DSR_READ(pp); in ecpp_putioc()
1753 mutex_exit(&pp->umutex); in ecpp_putioc()
1763 mutex_enter(&pp->umutex); in ecpp_putioc()
1766 if (pp->current_mode != ECPP_COMPAT_MODE && in ecpp_putioc()
1767 pp->current_mode != ECPP_CENTRONICS) { in ecpp_putioc()
1768 mutex_exit(&pp->umutex); in ecpp_putioc()
1773 dsr = DSR_READ(pp); /* read status */ in ecpp_putioc()
1775 mutex_exit(&pp->umutex); in ecpp_putioc()
1777 ecpp_error(pp->dip, "PRNIOC_GET_STATUS: %x\n", dsr); in ecpp_putioc()
1802 ecpp_error(pp->dip, "putioc: unknown IOCTL: %x\n", in ecpp_putioc()
1873 struct ecppunit *pp = (struct ecppunit *)q->q_ptr; in ecpp_rsrv() local
1875 mutex_enter(&pp->umutex); in ecpp_rsrv()
1877 if (pp->e_busy == ECPP_IDLE) { in ecpp_rsrv()
1878 (void) ecpp_idle_phase(pp); in ecpp_rsrv()
1879 cv_signal(&pp->pport_cv); /* signal ecpp_close() */ in ecpp_rsrv()
1882 mutex_exit(&pp->umutex); in ecpp_rsrv()
1891 struct ecppunit *pp = (struct ecppunit *)q->q_ptr; in ecpp_wsrv() local
1898 mutex_enter(&pp->umutex); in ecpp_wsrv()
1900 ecpp_error(pp->dip, "ecpp_wsrv: e_busy=%x\n", pp->e_busy); in ecpp_wsrv()
1903 if (pp->e_busy == ECPP_BUSY || pp->e_busy == ECPP_FLUSH) { in ecpp_wsrv()
1904 mutex_exit(&pp->umutex); in ecpp_wsrv()
1906 } else if (pp->suspended == TRUE) { in ecpp_wsrv()
1911 cv_signal(&pp->pport_cv); in ecpp_wsrv()
1912 mutex_exit(&pp->umutex); in ecpp_wsrv()
1917 if (pp->e_busy == ECPP_ERR) { in ecpp_wsrv()
1918 if (ecpp_check_status(pp) == FAILURE) { in ecpp_wsrv()
1919 if (pp->wsrv_timer_id == 0) { in ecpp_wsrv()
1920 ecpp_error(pp->dip, "wsrv: start wrsv_timer\n"); in ecpp_wsrv()
1921 pp->wsrv_timer_id = timeout(ecpp_wsrv_timer, in ecpp_wsrv()
1922 (caddr_t)pp, in ecpp_wsrv()
1923 drv_usectohz(pp->wsrv_retry * 1000)); in ecpp_wsrv()
1925 ecpp_error(pp->dip, in ecpp_wsrv()
1929 mutex_exit(&pp->umutex); in ecpp_wsrv()
1932 pp->e_busy = ECPP_IDLE; in ecpp_wsrv()
1936 my_ioblock = pp->ioblock; in ecpp_wsrv()
1943 pp->msg = NULL; in ecpp_wsrv()
1959 pp->e_busy = ECPP_BUSY; in ecpp_wsrv()
1969 pp->msg = mp; in ecpp_wsrv()
1985 start_addr = (caddr_t)pp->ioblock; in ecpp_wsrv()
2005 ecpp_error(pp->dip, "M_IOCTL.\n"); in ecpp_wsrv()
2007 mutex_exit(&pp->umutex); in ecpp_wsrv()
2011 mutex_enter(&pp->umutex); in ecpp_wsrv()
2018 ecpp_error(pp->dip, "M_IOCDATA\n"); in ecpp_wsrv()
2050 if (pp->e_busy != ECPP_IDLE) { in ecpp_wsrv()
2051 ecpp_error(pp->dip, "wsrv: M_CTL postponed\n"); in ecpp_wsrv()
2055 ecpp_error(pp->dip, "wsrv: M_CTL\n"); in ecpp_wsrv()
2061 ecpp_error(pp->dip, "wsrv: bogus M_CTL"); in ecpp_wsrv()
2069 (void) ecpp_peripheral2host(pp); in ecpp_wsrv()
2072 if (pp->e_busy == ECPP_BUSY) { in ecpp_wsrv()
2078 pp->nread += *(size_t *)mp->b_rptr; in ecpp_wsrv()
2080 ecpp_error(pp->dip, "wsrv: M_READ %d", pp->nread); in ecpp_wsrv()
2084 ecpp_error(pp->dip, "wsrv: should never get here\n"); in ecpp_wsrv()
2094 ecpp_error(pp->dip, "wsrv:starting: total_len=%d\n", total_len); in ecpp_wsrv()
2095 pp->e_busy = ECPP_BUSY; in ecpp_wsrv()
2096 ecpp_start(pp, start_addr, total_len); in ecpp_wsrv()
2098 ecpp_error(pp->dip, "wsrv:finishing: ebusy=%x\n", pp->e_busy); in ecpp_wsrv()
2101 if (pp->e_busy == ECPP_IDLE) { in ecpp_wsrv()
2102 (void) ecpp_idle_phase(pp); in ecpp_wsrv()
2103 cv_signal(&pp->pport_cv); /* signal ecpp_close() */ in ecpp_wsrv()
2107 mutex_exit(&pp->umutex); in ecpp_wsrv()
2118 struct ecppunit *pp; in ecpp_srvioc() local
2121 pp = (struct ecppunit *)q->q_ptr; in ecpp_srvioc()
2144 pp->xfer_parms = *xferp; in ecpp_srvioc()
2145 pp->prn_timeouts.tmo_forward = pp->xfer_parms.write_timeout; in ecpp_srvioc()
2147 ecpp_error(pp->dip, "srvioc: current_mode =%x new mode=%x\n", in ecpp_srvioc()
2148 pp->current_mode, pp->xfer_parms.mode); in ecpp_srvioc()
2150 if (ecpp_mode_negotiation(pp, pp->xfer_parms.mode) == FAILURE) { in ecpp_srvioc()
2157 if ((pp->current_mode == ECPP_NIBBLE_MODE) && in ecpp_srvioc()
2158 (read_nibble_backchan(pp) == FAILURE)) { in ecpp_srvioc()
2170 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2171 pp->port = ECPP_PORT_DMA; in ecpp_srvioc()
2173 pp->port = ECPP_PORT_PIO; in ecpp_srvioc()
2176 pp->xfer_parms.mode = pp->current_mode; in ecpp_srvioc()
2188 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2201 dcr = DCR_READ(pp) & ~ECPP_REV_DIR; in ecpp_srvioc()
2205 DCR_WRITE(pp, dcr); in ecpp_srvioc()
2206 ecpp_error(pp->dip, "ECPPIOC_SETREGS:dcr=%x\n", dcr); in ecpp_srvioc()
2217 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2225 ECR_WRITE(pp, in ecpp_srvioc()
2227 pp->port = *port; in ecpp_srvioc()
2232 ecpp_error(pp->dip, "SETPORT: to TDMA\n"); in ecpp_srvioc()
2233 pp->tfifo_intr = 1; in ecpp_srvioc()
2235 ECR_WRITE(pp, in ecpp_srvioc()
2237 pp->port = *port; in ecpp_srvioc()
2254 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2259 switch (pp->port) { in ecpp_srvioc()
2261 DATAR_WRITE(pp, *data); in ecpp_srvioc()
2266 TFIFO_WRITE(pp, *data); in ecpp_srvioc()
2342 ifcap = ecpp_get_prn_ifcap(pp); in ecpp_srvioc()
2357 ecpp_default_negotiation(pp); in ecpp_srvioc()
2359 (void) ecpp_mode_negotiation(pp, ECPP_CENTRONICS); in ecpp_srvioc()
2376 pp->prn_timeouts = *prn_timeouts; in ecpp_srvioc()
2377 pp->xfer_parms.write_timeout = (int)prn_timeouts->tmo_forward; in ecpp_srvioc()
2396 struct ecppunit *pp; in ecpp_srvioc_devid() local
2404 pp = (struct ecppunit *)q->q_ptr; in ecpp_srvioc_devid()
2411 ecpp_error(pp->dip, "ecpp_srvioc_devid: mode=%x, len=%x\n", in ecpp_srvioc_devid()
2429 if (error = ecpp_getdevid(pp, NULL, &len, mode)) { in ecpp_srvioc_devid()
2457 error = ecpp_getdevid(pp, datamp->b_rptr, &len, mode); in ecpp_srvioc_devid()
2473 (void) ecpp_1284_termination(pp); in ecpp_srvioc_devid()
2545 ecpp_flush(struct ecppunit *pp, int cmd) in ecpp_flush() argument
2551 ASSERT(mutex_owned(&pp->umutex)); in ecpp_flush()
2557 q = pp->writeq; in ecpp_flush()
2560 ecpp_error(pp->dip, "ecpp_flush e_busy=%x\n", pp->e_busy); in ecpp_flush()
2563 switch (pp->e_busy) { in ecpp_flush()
2569 pp->e_busy = ECPP_FLUSH; in ecpp_flush()
2583 pp->dma_cancelled = TRUE; in ecpp_flush()
2586 if (COMPAT_DMA(pp) || in ecpp_flush()
2587 (pp->current_mode == ECPP_ECP_MODE) || in ecpp_flush()
2588 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_flush()
2594 if (ECPP_DMA_STOP(pp, NULL) == FAILURE) { in ecpp_flush()
2595 ecpp_error(pp->dip, in ecpp_flush()
2606 pp->dma_handle) != DDI_SUCCESS) in ecpp_flush()
2607 ecpp_error(pp->dip, in ecpp_flush()
2610 if (pp->msg != NULL) { in ecpp_flush()
2611 freemsg(pp->msg); in ecpp_flush()
2612 pp->msg = NULL; in ecpp_flush()
2618 dcr = DCR_READ(pp); in ecpp_flush()
2620 DCR_WRITE(pp, dcr); in ecpp_flush()
2621 ECPP_MASK_INTR(pp); in ecpp_flush()
2632 timeout_id = pp->timeout_id; in ecpp_flush()
2633 fifo_timer_id = pp->fifo_timer_id; in ecpp_flush()
2634 pp->timeout_id = pp->fifo_timer_id = 0; in ecpp_flush()
2635 pp->softintr_pending = 0; in ecpp_flush()
2644 pp->e_busy = ECPP_FLUSH; in ecpp_flush()
2653 wsrv_timer_id = pp->wsrv_timer_id; in ecpp_flush()
2654 pp->wsrv_timer_id = 0; in ecpp_flush()
2663 ecpp_error(pp->dip, in ecpp_flush()
2664 "ecpp_flush: illegal state %x\n", pp->e_busy); in ecpp_flush()
2668 if (pp->current_mode == ECPP_DIAG_MODE) { in ecpp_flush()
2669 ecr = ECR_READ(pp); in ecpp_flush()
2671 ECR_WRITE(pp, in ecpp_flush()
2673 ECR_WRITE(pp, ecr); in ecpp_flush()
2681 pp->e_busy = ECPP_IDLE; in ecpp_flush()
2684 if (pp->current_mode == ECPP_ECP_MODE) { in ecpp_flush()
2685 if (pp->current_phase == ECPP_PHASE_ECP_REV_XFER) { in ecpp_flush()
2686 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecpp_flush()
2688 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecpp_flush()
2693 mutex_exit(&pp->umutex); in ecpp_flush()
2705 mutex_enter(&pp->umutex); in ecpp_flush()
2707 cv_signal(&pp->pport_cv); /* wake up ecpp_close() */ in ecpp_flush()
2711 ecpp_start(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_start() argument
2713 ASSERT(mutex_owned(&pp->umutex)); in ecpp_start()
2714 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_start()
2716 ecpp_error(pp->dip, in ecpp_start()
2718 pp->current_mode, pp->current_phase, ECR_READ(pp), len); in ecpp_start()
2720 pp->dma_dir = DDI_DMA_WRITE; /* this is a forward transfer */ in ecpp_start()
2722 switch (pp->current_mode) { in ecpp_start()
2724 (void) ecpp_1284_termination(pp); in ecpp_start()
2732 if (pp->io_mode == ECPP_DMA) { in ecpp_start()
2733 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2738 if (ecpp_prep_pio_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2741 (void) ecpp_pio_writeb(pp); in ecpp_start()
2749 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_110); in ecpp_start()
2755 if (!(ECR_READ(pp) & ECPP_FIFO_EMPTY)) { in ecpp_start()
2756 ecpp_error(pp->dip, in ecpp_start()
2758 ECR_WRITE(pp, in ecpp_start()
2760 ECR_WRITE(pp, in ecpp_start()
2770 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2776 ecpp_putback_untransfered(pp, addr + len, oldlen - len); in ecpp_start()
2783 ASSERT(pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE || in ecpp_start()
2784 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecpp_start()
2787 if (pp->current_phase == ECPP_PHASE_ECP_REV_IDLE) { in ecpp_start()
2788 if (ecp_reverse2forward(pp) == FAILURE) { in ecpp_start()
2789 if (pp->msg) { in ecpp_start()
2790 (void) putbq(pp->writeq, pp->msg); in ecpp_start()
2792 ecpp_putback_untransfered(pp, in ecpp_start()
2798 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2806 pp->timeout_id = timeout(ecpp_xfer_timeout, (caddr_t)pp, in ecpp_start()
2807 pp->xfer_parms.write_timeout * drv_usectohz(1000000)); in ecpp_start()
2815 ecpp_prep_pio_xfer(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_prep_pio_xfer() argument
2817 pp->next_byte = addr; in ecpp_prep_pio_xfer()
2818 pp->last_byte = (caddr_t)((ulong_t)addr + len); in ecpp_prep_pio_xfer()
2820 if (ecpp_check_status(pp) == FAILURE) { in ecpp_prep_pio_xfer()
2825 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2828 if (pp->msg != NULL) { in ecpp_prep_pio_xfer()
2834 ecpp_putback_untransfered(pp, in ecpp_prep_pio_xfer()
2835 (void *)pp->msg->b_rptr, len); in ecpp_prep_pio_xfer()
2836 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2839 freemsg(pp->msg); in ecpp_prep_pio_xfer()
2840 pp->msg = NULL; in ecpp_prep_pio_xfer()
2842 ecpp_putback_untransfered(pp, pp->ioblock, len); in ecpp_prep_pio_xfer()
2843 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2846 qenable(pp->writeq); in ecpp_prep_pio_xfer()
2851 pp->dma_cancelled = FALSE; in ecpp_prep_pio_xfer()
2854 if (ecr_write(pp, ECR_mode_001 | in ecpp_prep_pio_xfer()
2856 ecpp_error(pp->dip, "ecpp_prep_pio_xfer: failed w/ECR.\n"); in ecpp_prep_pio_xfer()
2859 ecpp_error(pp->dip, "ecpp_prep_pio_xfer: dcr=%x ecr=%x\n", in ecpp_prep_pio_xfer()
2860 DCR_READ(pp), ECR_READ(pp)); in ecpp_prep_pio_xfer()
2866 ecpp_init_dma_xfer(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_init_dma_xfer() argument
2880 ASSERT((pp->current_mode <= ECPP_DIAG_MODE) && in ecpp_init_dma_xfer()
2881 (ecr_mode[pp->current_mode] != 0)); in ecpp_init_dma_xfer()
2883 if (ecpp_setup_dma_resources(pp, addr, len) == FAILURE) { in ecpp_init_dma_xfer()
2884 qenable(pp->writeq); in ecpp_init_dma_xfer()
2888 if (ecpp_check_status(pp) == FAILURE) { in ecpp_init_dma_xfer()
2893 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2895 pp->dma_cookie.dmac_size); in ecpp_init_dma_xfer()
2897 if (pp->msg != NULL) { in ecpp_init_dma_xfer()
2903 ecpp_putback_untransfered(pp, in ecpp_init_dma_xfer()
2904 (void *)pp->msg->b_rptr, len); in ecpp_init_dma_xfer()
2905 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2908 freemsg(pp->msg); in ecpp_init_dma_xfer()
2909 pp->msg = NULL; in ecpp_init_dma_xfer()
2911 ecpp_putback_untransfered(pp, pp->ioblock, len); in ecpp_init_dma_xfer()
2912 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2916 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_init_dma_xfer()
2917 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2920 qenable(pp->writeq); in ecpp_init_dma_xfer()
2924 pp->xfercnt = pp->resid = len; in ecpp_init_dma_xfer()
2925 pp->dma_cancelled = FALSE; in ecpp_init_dma_xfer()
2926 pp->tfifo_intr = 0; in ecpp_init_dma_xfer()
2929 ecr = ecr_mode[pp->current_mode]; in ecpp_init_dma_xfer()
2930 (void) ecr_write(pp, ecr | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecpp_init_dma_xfer()
2933 if (ECPP_DMA_START(pp) == FAILURE) { in ecpp_init_dma_xfer()
2934 ecpp_error(pp->dip, "ecpp_init_dma_xfer: dma_start FAILED.\n"); in ecpp_init_dma_xfer()
2939 (void) ecr_write(pp, ecr | ECPP_DMA_ENABLE | ECPP_INTR_MASK); in ecpp_init_dma_xfer()
2945 ecpp_setup_dma_resources(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_setup_dma_resources() argument
2951 ASSERT(pp->dma_dir == DDI_DMA_READ || pp->dma_dir == DDI_DMA_WRITE); in ecpp_setup_dma_resources()
2953 err = ddi_dma_addr_bind_handle(pp->dma_handle, NULL, in ecpp_setup_dma_resources()
2954 addr, len, pp->dma_dir | DDI_DMA_PARTIAL, in ecpp_setup_dma_resources()
2956 &pp->dma_cookie, &pp->dma_cookie_count); in ecpp_setup_dma_resources()
2960 ecpp_error(pp->dip, "ecpp_setup_dma: DMA_MAPPED\n"); in ecpp_setup_dma_resources()
2962 pp->dma_nwin = 1; in ecpp_setup_dma_resources()
2963 pp->dma_curwin = 1; in ecpp_setup_dma_resources()
2967 ecpp_error(pp->dip, "ecpp_setup_dma: DMA_PARTIAL_MAP\n"); in ecpp_setup_dma_resources()
2969 if (ddi_dma_numwin(pp->dma_handle, in ecpp_setup_dma_resources()
2970 &pp->dma_nwin) != DDI_SUCCESS) { in ecpp_setup_dma_resources()
2971 (void) ddi_dma_unbind_handle(pp->dma_handle); in ecpp_setup_dma_resources()
2974 pp->dma_curwin = 1; in ecpp_setup_dma_resources()
2981 if (ddi_dma_getwin(pp->dma_handle, 0, &woff, &wlen, in ecpp_setup_dma_resources()
2982 &pp->dma_cookie, &pp->dma_cookie_count) != DDI_SUCCESS) { in ecpp_setup_dma_resources()
2983 ecpp_error(pp->dip, in ecpp_setup_dma_resources()
2985 (void) ddi_dma_unbind_handle(pp->dma_handle); in ecpp_setup_dma_resources()
2989 ecpp_error(pp->dip, in ecpp_setup_dma_resources()
2992 pp->dma_cookie_count, pp->dma_nwin, in ecpp_setup_dma_resources()
2993 pp->dma_cookie.dmac_address, pp->dma_cookie.dmac_size); in ecpp_setup_dma_resources()
2999 ecpp_error(pp->dip, "ecpp_setup_dma: err=%x\n", err); in ecpp_setup_dma_resources()
3048 struct ecppunit *pp = (struct ecppunit *)(void *)arg; in ecpp_isr() local
3057 mutex_enter(&pp->umutex); in ecpp_isr()
3065 if (pp->dma_cancelled == TRUE) { in ecpp_isr()
3066 ecpp_error(pp->dip, "dma-cancel isr\n"); in ecpp_isr()
3068 pp->intr_hard++; in ecpp_isr()
3069 pp->dma_cancelled = FALSE; in ecpp_isr()
3071 mutex_exit(&pp->umutex); in ecpp_isr()
3077 if (pp->hw == &x86) in ecpp_isr()
3079 if (pp->hw == &m1553) in ecpp_isr()
3082 retval = ecpp_M1553_intr(pp); in ecpp_isr()
3086 mutex_exit(&pp->umutex); in ecpp_isr()
3095 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3099 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3109 if (!COMPAT_PIO(pp)) { in ecpp_isr()
3111 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3115 dsr = DSR_READ(pp); in ecpp_isr()
3123 if (pp->hw != &pc97317) { in ecpp_isr()
3140 ((COMPAT_PIO(pp)) && (pp->e_busy == ECPP_BUSY)) || in ecpp_isr()
3142 (pp->current_mode == ECPP_ECP_MODE))) { in ecpp_isr()
3149 pp->intr_hard++; in ecpp_isr()
3156 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_isr()
3161 if (ECPP_DMA_STOP(pp, &bcr) == FAILURE) { in ecpp_isr()
3162 ecpp_error(pp->dip, "ecpp_isr: dma_stop failed\n"); in ecpp_isr()
3165 ecpp_error(pp->dip, "ecpp_isr: DMAC ERROR bcr=%d\n", bcr); in ecpp_isr()
3167 ecpp_xfer_cleanup(pp); in ecpp_isr()
3169 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_isr()
3170 ecpp_error(pp->dip, "ecpp_isr(e): unbind failed\n"); in ecpp_isr()
3173 mutex_exit(&pp->umutex); in ecpp_isr()
3178 retval = ecpp_dma_ihdlr(pp); in ecpp_isr()
3179 mutex_exit(&pp->umutex); in ecpp_isr()
3183 if (COMPAT_PIO(pp)) { in ecpp_isr()
3184 retval = ecpp_pio_ihdlr(pp); in ecpp_isr()
3185 mutex_exit(&pp->umutex); in ecpp_isr()
3191 retval = ecpp_nErr_ihdlr(pp); in ecpp_isr()
3192 mutex_exit(&pp->umutex); in ecpp_isr()
3196 pp->intr_hard--; in ecpp_isr()
3200 pp->intr_spurious++; in ecpp_isr()
3209 if (pp->lastspur == 0 || now - pp->lastspur > SPUR_PERIOD) { in ecpp_isr()
3211 pp->lastspur = now; in ecpp_isr()
3212 pp->nspur = 1; in ecpp_isr()
3215 pp->nspur++; in ecpp_isr()
3218 if (pp->nspur >= SPUR_CRITICAL) { in ecpp_isr()
3219 ECPP_MASK_INTR(pp); in ecpp_isr()
3220 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_isr()
3221 pp->nspur = 0; in ecpp_isr()
3223 ddi_get_name(pp->dip), ddi_get_instance(pp->dip)); in ecpp_isr()
3225 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecpp_isr()
3228 ecpp_error(pp->dip, in ecpp_isr()
3230 dcsr, ECR_READ(pp), dsr, DCR_READ(pp), in ecpp_isr()
3231 pp->current_mode, pp->current_phase); in ecpp_isr()
3233 mutex_exit(&pp->umutex); in ecpp_isr()
3238 pp->intr_spurious++; in ecpp_isr()
3240 ecpp_error(pp->dip, in ecpp_isr()
3242 dcsr, ECR_READ(pp), DSR_READ(pp), DCR_READ(pp), in ecpp_isr()
3243 pp->current_mode, pp->current_phase); in ecpp_isr()
3245 mutex_exit(&pp->umutex); in ecpp_isr()
3253 ecpp_M1553_intr(struct ecppunit *pp) in ecpp_M1553_intr() argument
3257 pp->intr_hard++; in ecpp_M1553_intr()
3259 if (pp->e_busy == ECPP_BUSY) { in ecpp_M1553_intr()
3261 if (COMPAT_PIO(pp)) { in ecpp_M1553_intr()
3262 return (ecpp_pio_ihdlr(pp)); in ecpp_M1553_intr()
3266 if (COMPAT_DMA(pp) || in ecpp_M1553_intr()
3267 (pp->current_mode == ECPP_ECP_MODE) || in ecpp_M1553_intr()
3268 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_M1553_intr()
3269 return (ecpp_dma_ihdlr(pp)); in ecpp_M1553_intr()
3274 if ((DSR_READ(pp) & ECPP_nERR) == 0) { in ecpp_M1553_intr()
3275 return (ecpp_nErr_ihdlr(pp)); in ecpp_M1553_intr()
3285 ecpp_dma_ihdlr(struct ecppunit *pp) in ecpp_dma_ihdlr() argument
3289 ecpp_error(pp->dip, "ecpp_dma_ihdlr(%x): ecr=%x, dsr=%x, dcr=%x\n", in ecpp_dma_ihdlr()
3290 pp->current_mode, ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_dma_ihdlr()
3293 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_dma_ihdlr()
3296 if (pp->tfifo_intr == 1) { in ecpp_dma_ihdlr()
3297 pp->tfifo_intr = 0; in ecpp_dma_ihdlr()
3298 ecpp_error(pp->dip, "ecpp_dma_ihdlr: tfifo_intr is 1\n"); in ecpp_dma_ihdlr()
3302 if (ECPP_DMA_STOP(pp, NULL) == FAILURE) { in ecpp_dma_ihdlr()
3303 ecpp_error(pp->dip, "ecpp_dma_ihdlr: dma_stop failed\n"); in ecpp_dma_ihdlr()
3306 if (pp->current_mode == ECPP_ECP_MODE && in ecpp_dma_ihdlr()
3307 pp->current_phase == ECPP_PHASE_ECP_REV_XFER) { in ecpp_dma_ihdlr()
3308 ecpp_ecp_read_completion(pp); in ecpp_dma_ihdlr()
3313 if ((ECR_READ(pp) & ECPP_FIFO_EMPTY) || in ecpp_dma_ihdlr()
3314 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_dma_ihdlr()
3319 pp->fifo_timer_id = timeout(ecpp_fifo_timer, (caddr_t)pp, tm); in ecpp_dma_ihdlr()
3327 ecpp_untimeout_unblock(pp, &pp->timeout_id); in ecpp_dma_ihdlr()
3337 ecpp_pio_ihdlr(struct ecppunit *pp) in ecpp_pio_ihdlr() argument
3339 ASSERT(mutex_owned(&pp->umutex)); in ecpp_pio_ihdlr()
3340 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_pio_ihdlr()
3343 pp->joblen++; in ecpp_pio_ihdlr()
3344 pp->ctxpio_obytes++; in ecpp_pio_ihdlr()
3347 ECPP_MASK_INTR(pp); in ecpp_pio_ihdlr()
3348 DCR_WRITE(pp, DCR_READ(pp) & ~(ECPP_REV_DIR | ECPP_INTR_EN)); in ecpp_pio_ihdlr()
3354 if (pp->next_byte >= pp->last_byte) { in ecpp_pio_ihdlr()
3355 ecpp_xfer_cleanup(pp); in ecpp_pio_ihdlr()
3356 ecpp_error(pp->dip, in ecpp_pio_ihdlr()
3358 pp->joblen, pp->ctx_cf); in ecpp_pio_ihdlr()
3360 if (pp->softintr_pending) { in ecpp_pio_ihdlr()
3361 ecpp_error(pp->dip, in ecpp_pio_ihdlr()
3364 pp->softintr_flags = ECPP_SOFTINTR_PIONEXT; in ecpp_pio_ihdlr()
3365 pp->softintr_pending = 1; in ecpp_pio_ihdlr()
3366 ddi_trigger_softintr(pp->softintr_id); in ecpp_pio_ihdlr()
3377 ecpp_pio_writeb(struct ecppunit *pp) in ecpp_pio_writeb() argument
3381 dcr = DCR_READ(pp) & ~ECPP_REV_DIR; in ecpp_pio_writeb()
3385 DATAR_WRITE(pp, *(pp->next_byte++)); in ecpp_pio_writeb()
3387 drv_usecwait(pp->data_setup_time); in ecpp_pio_writeb()
3390 if (dcr_write(pp, dcr | ECPP_STB) == FAILURE) { in ecpp_pio_writeb()
3391 ecpp_error(pp->dip, "ecpp_pio_writeb:1: failed w/DCR\n"); in ecpp_pio_writeb()
3395 (void) DSR_READ(pp); /* ensure IRQ_ST is armed */ in ecpp_pio_writeb()
3396 ECPP_UNMASK_INTR(pp); in ecpp_pio_writeb()
3398 drv_usecwait(pp->strobe_pulse_width); in ecpp_pio_writeb()
3400 if (dcr_write(pp, dcr & ~ECPP_STB) == FAILURE) { in ecpp_pio_writeb()
3401 ecpp_error(pp->dip, "ecpp_pio_writeb:2: failed w/DCR\n"); in ecpp_pio_writeb()
3409 ecpp_nErr_ihdlr(struct ecppunit *pp) in ecpp_nErr_ihdlr() argument
3411 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: mode=%x, phase=%x\n", in ecpp_nErr_ihdlr()
3412 pp->current_mode, pp->current_phase); in ecpp_nErr_ihdlr()
3414 if (pp->oflag != TRUE) { in ecpp_nErr_ihdlr()
3415 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: not open!\n"); in ecpp_nErr_ihdlr()
3419 if (pp->e_busy == ECPP_BUSY) { in ecpp_nErr_ihdlr()
3420 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: busy\n"); in ecpp_nErr_ihdlr()
3421 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK); in ecpp_nErr_ihdlr()
3426 ECPP_MASK_INTR(pp); in ecpp_nErr_ihdlr()
3427 DCR_WRITE(pp, DCR_READ(pp) & ~(ECPP_INTR_EN | ECPP_REV_DIR)); in ecpp_nErr_ihdlr()
3428 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK); in ecpp_nErr_ihdlr()
3431 switch (pp->current_mode) { in ecpp_nErr_ihdlr()
3445 pp->current_phase = ECPP_PHASE_NIBT_REVINTR; in ecpp_nErr_ihdlr()
3449 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: wrong mode!\n"); in ecpp_nErr_ihdlr()
3453 (void) ecpp_backchan_req(pp); /* put backchannel request on the wq */ in ecpp_nErr_ihdlr()
3465 struct ecppunit *pp = (struct ecppunit *)arg; in ecpp_softintr() local
3468 mutex_enter(&pp->umutex); in ecpp_softintr()
3470 pp->intr_soft++; in ecpp_softintr()
3472 if (!pp->softintr_pending) { in ecpp_softintr()
3473 mutex_exit(&pp->umutex); in ecpp_softintr()
3476 pp->softintr_pending = 0; in ecpp_softintr()
3479 if (pp->softintr_flags & ECPP_SOFTINTR_PIONEXT) { in ecpp_softintr()
3480 pp->softintr_flags &= ~ECPP_SOFTINTR_PIONEXT; in ecpp_softintr()
3486 if (ecpp_check_status(pp) == SUCCESS) { in ecpp_softintr()
3487 pp->e_busy = ECPP_BUSY; in ecpp_softintr()
3491 if (pp->isr_reattempt_high < ecpp_reattempts) { in ecpp_softintr()
3492 pp->isr_reattempt_high = ecpp_reattempts; in ecpp_softintr()
3494 } while (++ecpp_reattempts < pp->wait_for_busy); in ecpp_softintr()
3497 if (pp->e_busy == ECPP_ERR) { in ecpp_softintr()
3498 ++pp->ctx_cf; /* check status fail */ in ecpp_softintr()
3499 ecpp_error(pp->dip, "ecpp_softintr:check_status:F: " in ecpp_softintr()
3501 DSR_READ(pp), pp->joblen, pp->ctx_cf); in ecpp_softintr()
3507 unx_len = pp->last_byte - pp->next_byte; in ecpp_softintr()
3508 if (pp->msg != NULL) { in ecpp_softintr()
3509 ecpp_putback_untransfered(pp, in ecpp_softintr()
3510 (void *)pp->msg->b_rptr, unx_len); in ecpp_softintr()
3511 ecpp_error(pp->dip, in ecpp_softintr()
3514 freemsg(pp->msg); in ecpp_softintr()
3515 pp->msg = NULL; in ecpp_softintr()
3517 ecpp_putback_untransfered(pp, in ecpp_softintr()
3518 pp->next_byte, unx_len); in ecpp_softintr()
3519 ecpp_error(pp->dip, in ecpp_softintr()
3523 ecpp_xfer_cleanup(pp); in ecpp_softintr()
3524 pp->e_busy = ECPP_ERR; in ecpp_softintr()
3525 qenable(pp->writeq); in ecpp_softintr()
3528 pp->e_busy = ECPP_BUSY; in ecpp_softintr()
3529 (void) ecpp_pio_writeb(pp); in ecpp_softintr()
3533 mutex_exit(&pp->umutex); in ecpp_softintr()
3545 ecpp_xfer_cleanup(struct ecppunit *pp) in ecpp_xfer_cleanup() argument
3547 ASSERT(mutex_owned(&pp->umutex)); in ecpp_xfer_cleanup()
3553 if (pp->msg != NULL) { in ecpp_xfer_cleanup()
3554 freemsg(pp->msg); in ecpp_xfer_cleanup()
3555 pp->msg = NULL; in ecpp_xfer_cleanup()
3559 pp->e_busy = ECPP_IDLE; in ecpp_xfer_cleanup()
3562 ecpp_untimeout_unblock(pp, &pp->timeout_id); in ecpp_xfer_cleanup()
3564 qenable(pp->writeq); in ecpp_xfer_cleanup()
3616 struct ecppunit *pp = arg; in ecpp_xfer_timeout() local
3622 mutex_enter(&pp->umutex); in ecpp_xfer_timeout()
3624 if (pp->timeout_id == 0) { in ecpp_xfer_timeout()
3625 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3628 pp->timeout_id = 0; in ecpp_xfer_timeout()
3631 pp->xfer_tout++; in ecpp_xfer_timeout()
3633 pp->dma_cancelled = TRUE; /* prevent race with isr() */ in ecpp_xfer_timeout()
3635 if (COMPAT_PIO(pp)) { in ecpp_xfer_timeout()
3641 dcr = DCR_READ(pp); in ecpp_xfer_timeout()
3642 (void) dcr_write(pp, dcr & ~(ECPP_REV_DIR | ECPP_INTR_EN)); in ecpp_xfer_timeout()
3643 ECPP_MASK_INTR(pp); in ecpp_xfer_timeout()
3645 pp->softintr_pending = 0; in ecpp_xfer_timeout()
3646 unx_len = pp->last_byte - pp->next_byte; in ecpp_xfer_timeout()
3647 ecpp_error(pp->dip, "xfer_timeout: unx_len=%d\n", unx_len); in ecpp_xfer_timeout()
3650 unx_addr = pp->next_byte; in ecpp_xfer_timeout()
3652 ecpp_xfer_cleanup(pp); in ecpp_xfer_timeout()
3653 qenable(pp->writeq); in ecpp_xfer_timeout()
3654 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3664 if (ECPP_DMA_STOP(pp, &unx_len) == FAILURE) { in ecpp_xfer_timeout()
3665 ecpp_error(pp->dip, in ecpp_xfer_timeout()
3669 ecpp_error(pp->dip, "xfer_timeout: unx_len=%d\n", unx_len); in ecpp_xfer_timeout()
3671 if (ddi_dma_unbind_handle(pp->dma_handle) == DDI_FAILURE) { in ecpp_xfer_timeout()
3672 ecpp_error(pp->dip, in ecpp_xfer_timeout()
3682 qenable(pp->writeq); in ecpp_xfer_timeout()
3683 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3686 xferd = pp->dma_cookie.dmac_size - unx_len; in ecpp_xfer_timeout()
3687 pp->resid -= xferd; in ecpp_xfer_timeout()
3688 unx_len = pp->resid; in ecpp_xfer_timeout()
3691 pp->obytes[pp->current_mode] += xferd; in ecpp_xfer_timeout()
3692 pp->joblen += xferd; in ecpp_xfer_timeout()
3694 if (pp->msg != NULL) { in ecpp_xfer_timeout()
3695 unx_addr = (caddr_t)pp->msg->b_wptr - unx_len; in ecpp_xfer_timeout()
3697 unx_addr = pp->ioblock + in ecpp_xfer_timeout()
3698 (pp->xfercnt - unx_len); in ecpp_xfer_timeout()
3705 ecpp_putback_untransfered(pp, (caddr_t)unx_addr, unx_len); in ecpp_xfer_timeout()
3707 if (pp->msg != NULL) { in ecpp_xfer_timeout()
3708 freemsg(pp->msg); in ecpp_xfer_timeout()
3709 pp->msg = NULL; in ecpp_xfer_timeout()
3713 pp->timeout_error = 1; in ecpp_xfer_timeout()
3714 pp->e_busy = ECPP_ERR; in ecpp_xfer_timeout()
3715 fifo_timer_id = pp->fifo_timer_id; in ecpp_xfer_timeout()
3716 pp->fifo_timer_id = 0; in ecpp_xfer_timeout()
3718 qenable(pp->writeq); in ecpp_xfer_timeout()
3720 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3728 ecpp_putback_untransfered(struct ecppunit *pp, void *startp, uint_t len) in ecpp_putback_untransfered() argument
3732 ecpp_error(pp->dip, "ecpp_putback_untrans=%d\n", len); in ecpp_putback_untransfered()
3740 ecpp_error(pp->dip, in ecpp_putback_untransfered()
3748 if (!putbq(pp->writeq, new_mp)) { in ecpp_putback_untransfered()
3754 ecr_write(struct ecppunit *pp, uint8_t ecr_byte) in ecr_write() argument
3759 ECR_WRITE(pp, ecr_byte); in ecr_write()
3761 current_ecr = ECR_READ(pp); in ecr_write()
3771 dcr_write(struct ecppunit *pp, uint8_t dcr_byte) in dcr_write() argument
3777 DCR_WRITE(pp, dcr_byte); in dcr_write()
3779 current_dcr = DCR_READ(pp); in dcr_write()
3785 ecpp_error(pp->dip, in dcr_write()
3793 ecpp_reset_port_regs(struct ecppunit *pp) in ecpp_reset_port_regs() argument
3795 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in ecpp_reset_port_regs()
3796 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_reset_port_regs()
3809 struct ecppunit *pp = arg; in ecpp_fifo_timer() local
3813 mutex_enter(&pp->umutex); in ecpp_fifo_timer()
3818 if (pp->fifo_timer_id == 0) { in ecpp_fifo_timer()
3819 ecpp_error(pp->dip, "ecpp_fifo_timer: untimedout\n"); in ecpp_fifo_timer()
3820 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3823 pp->fifo_timer_id = 0; in ecpp_fifo_timer()
3831 ecr = ECR_READ(pp); in ecpp_fifo_timer()
3833 if ((pp->current_mode != ECPP_DIAG_MODE) && in ecpp_fifo_timer()
3835 (pp->ecpp_drain_counter < 10))) { in ecpp_fifo_timer()
3837 ecpp_error(pp->dip, in ecpp_fifo_timer()
3839 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3841 pp->fifo_timer_id = timeout(ecpp_fifo_timer, in ecpp_fifo_timer()
3842 (caddr_t)pp, drv_usectohz(FIFO_DRAIN_PERIOD)); in ecpp_fifo_timer()
3843 ++pp->ecpp_drain_counter; in ecpp_fifo_timer()
3845 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3849 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_fifo_timer()
3854 if (pp->ecpp_drain_counter >= 10) { in ecpp_fifo_timer()
3855 ecpp_error(pp->dip, "ecpp_fifo_timer(%d):" in ecpp_fifo_timer()
3857 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3859 ecpp_error(pp->dip, in ecpp_fifo_timer()
3861 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3864 pp->ecpp_drain_counter = 0; in ecpp_fifo_timer()
3876 timeout_id = pp->timeout_id; in ecpp_fifo_timer()
3877 pp->timeout_id = 0; in ecpp_fifo_timer()
3880 if (pp->current_mode == ECPP_ECP_MODE || in ecpp_fifo_timer()
3881 pp->current_mode == ECPP_DIAG_MODE || in ecpp_fifo_timer()
3882 COMPAT_DMA(pp)) { in ecpp_fifo_timer()
3887 pp->resid -= pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3890 pp->joblen += pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3891 if (pp->dma_dir == DDI_DMA_WRITE) { in ecpp_fifo_timer()
3892 pp->obytes[pp->current_mode] += in ecpp_fifo_timer()
3893 pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3895 pp->ibytes[pp->current_mode] += in ecpp_fifo_timer()
3896 pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3902 if (--pp->dma_cookie_count > 0) { in ecpp_fifo_timer()
3904 ddi_dma_nextcookie(pp->dma_handle, in ecpp_fifo_timer()
3905 &pp->dma_cookie); in ecpp_fifo_timer()
3906 } else if (pp->dma_curwin < pp->dma_nwin) { in ecpp_fifo_timer()
3908 if (ddi_dma_getwin(pp->dma_handle, in ecpp_fifo_timer()
3909 pp->dma_curwin, &off, &len, in ecpp_fifo_timer()
3910 &pp->dma_cookie, in ecpp_fifo_timer()
3911 &pp->dma_cookie_count) != DDI_SUCCESS) { in ecpp_fifo_timer()
3912 ecpp_error(pp->dip, in ecpp_fifo_timer()
3917 pp->dma_curwin++; in ecpp_fifo_timer()
3922 ecpp_error(pp->dip, "ecpp_fifo_timer: next addr=%llx len=%d\n", in ecpp_fifo_timer()
3923 pp->dma_cookie.dmac_address, in ecpp_fifo_timer()
3924 pp->dma_cookie.dmac_size); in ecpp_fifo_timer()
3927 if (ECPP_DMA_START(pp) != SUCCESS) { in ecpp_fifo_timer()
3928 ecpp_error(pp->dip, in ecpp_fifo_timer()
3933 (void) ecr_write(pp, (ecr & 0xe0) | in ecpp_fifo_timer()
3936 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3944 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_fifo_timer()
3945 ecpp_error(pp->dip, "ecpp_fifo_timer: unbind failed\n"); in ecpp_fifo_timer()
3947 ecpp_error(pp->dip, "ecpp_fifo_timer: unbind ok\n"); in ecpp_fifo_timer()
3955 if (pp->msg != NULL) { in ecpp_fifo_timer()
3956 freemsg(pp->msg); in ecpp_fifo_timer()
3957 pp->msg = NULL; in ecpp_fifo_timer()
3961 pp->e_busy = ECPP_IDLE; in ecpp_fifo_timer()
3963 qenable(pp->writeq); in ecpp_fifo_timer()
3965 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3976 ecpp_check_status(struct ecppunit *pp) in ecpp_check_status() argument
3981 if (pp->current_mode == ECPP_ECP_MODE || in ecpp_check_status()
3982 pp->current_mode == ECPP_DIAG_MODE) in ecpp_check_status()
3987 dsr = DSR_READ(pp); in ecpp_check_status()
3989 pp->e_busy = ECPP_ERR; in ecpp_check_status()
4004 struct ecppunit *pp = arg; in ecpp_wsrv_timer() local
4006 ecpp_error(pp->dip, "ecpp_wsrv_timer: starting\n"); in ecpp_wsrv_timer()
4008 mutex_enter(&pp->umutex); in ecpp_wsrv_timer()
4010 if (pp->wsrv_timer_id == 0) { in ecpp_wsrv_timer()
4011 mutex_exit(&pp->umutex); in ecpp_wsrv_timer()
4014 pp->wsrv_timer_id = 0; in ecpp_wsrv_timer()
4017 ecpp_error(pp->dip, "ecpp_wsrv_timer: qenabling...\n"); in ecpp_wsrv_timer()
4019 qenable(pp->writeq); in ecpp_wsrv_timer()
4021 mutex_exit(&pp->umutex); in ecpp_wsrv_timer()
4029 ecpp_backchan_req(struct ecppunit *pp) in ecpp_backchan_req() argument
4034 ecpp_error(pp->dip, "ecpp_backchan_req: allocb failed\n"); in ecpp_backchan_req()
4040 if (!putbq(pp->writeq, mp)) { in ecpp_backchan_req()
4041 ecpp_error(pp->dip, "ecpp_backchan_req:putbq failed\n"); in ecpp_backchan_req()
4054 ecpp_untimeout_unblock(struct ecppunit *pp, timeout_id_t *id) in ecpp_untimeout_unblock() argument
4058 ASSERT(mutex_owned(&pp->umutex)); in ecpp_untimeout_unblock()
4063 mutex_exit(&pp->umutex); in ecpp_untimeout_unblock()
4065 mutex_enter(&pp->umutex); in ecpp_untimeout_unblock()
4073 ecpp_get_prn_ifcap(struct ecppunit *pp) in ecpp_get_prn_ifcap() argument
4080 if (pp->current_mode == ECPP_CENTRONICS || in ecpp_get_prn_ifcap()
4081 pp->current_mode == ECPP_COMPAT_MODE) { in ecpp_get_prn_ifcap()
4083 } else if (pp->current_mode == ECPP_NIBBLE_MODE || in ecpp_get_prn_ifcap()
4084 pp->current_mode == ECPP_ECP_MODE) { in ecpp_get_prn_ifcap()
4095 ecpp_determine_sio_type(struct ecppunit *pp) in ecpp_determine_sio_type() argument
4101 name = ddi_binding_name(pp->dip); in ecpp_determine_sio_type()
4129 ecpp_1284_init_interface(struct ecppunit *pp) in ecpp_1284_init_interface() argument
4131 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_init_interface()
4137 if (pp->init_seq == TRUE) { in ecpp_1284_init_interface()
4138 DCR_WRITE(pp, ECPP_SLCTIN); in ecpp_1284_init_interface()
4142 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_init_interface()
4144 pp->current_mode = pp->backchannel = ECPP_CENTRONICS; in ecpp_1284_init_interface()
4145 pp->current_phase = ECPP_PHASE_C_IDLE; in ecpp_1284_init_interface()
4146 ECPP_CONFIG_MODE(pp); in ecpp_1284_init_interface()
4147 pp->to_mode[pp->current_mode]++; in ecpp_1284_init_interface()
4149 ecpp_error(pp->dip, "ecpp_1284_init_interface: ok\n"); in ecpp_1284_init_interface()
4156 ecp_negotiation(struct ecppunit *pp) in ecp_negotiation() argument
4162 if (ecpp_1284_negotiation(pp, ECPP_XREQ_ECP, &dsr) == FAILURE) in ecp_negotiation()
4168 ecpp_error(pp->dip, in ecp_negotiation()
4169 "ecp_negotiation: failed event 5 %x\n", DSR_READ(pp)); in ecp_negotiation()
4170 (void) ecpp_1284_termination(pp); in ecp_negotiation()
4175 pp->current_phase = ECPP_PHASE_ECP_SETUP; in ecp_negotiation()
4178 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in ecp_negotiation()
4181 if (wait_dsr(pp, ECPP_PE, ECPP_PE, 35000) < 0) { in ecp_negotiation()
4182 ecpp_error(pp->dip, in ecp_negotiation()
4183 "ecp_negotiation: failed event 31 %x\n", DSR_READ(pp)); in ecp_negotiation()
4184 (void) ecpp_1284_termination(pp); in ecp_negotiation()
4189 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecp_negotiation()
4192 pp->current_mode = ECPP_ECP_MODE; in ecp_negotiation()
4193 pp->backchannel = ECPP_ECP_MODE; in ecp_negotiation()
4195 ecpp_error(pp->dip, "ecp_negotiation: ok\n"); in ecp_negotiation()
4204 nibble_negotiation(struct ecppunit *pp) in nibble_negotiation() argument
4208 if (ecpp_1284_negotiation(pp, ECPP_XREQ_NIBBLE, &dsr) == FAILURE) { in nibble_negotiation()
4217 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in nibble_negotiation()
4219 pp->current_phase = ECPP_PHASE_NIBT_NAVAIL; in nibble_negotiation()
4223 pp->current_mode = ECPP_NIBBLE_MODE; in nibble_negotiation()
4224 pp->backchannel = ECPP_NIBBLE_MODE; in nibble_negotiation()
4226 ecpp_error(pp->dip, "nibble_negotiation: ok (phase=%x)\n", in nibble_negotiation()
4227 pp->current_phase); in nibble_negotiation()
4239 wait_dsr(struct ecppunit *pp, uint8_t mask, uint8_t val, int ptimeout) in wait_dsr() argument
4241 while (((DSR_READ(pp) & mask) != val) && ptimeout--) { in wait_dsr()
4262 ecpp_1284_negotiation(struct ecppunit *pp, uint8_t xreq, uint8_t *rdsr) in ecpp_1284_negotiation() argument
4266 ecpp_error(pp->dip, "nego(%x): entering...\n", xreq); in ecpp_1284_negotiation()
4269 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4272 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_negotiation()
4274 pp->current_phase = ECPP_PHASE_NEGO; in ecpp_1284_negotiation()
4277 DATAR_WRITE(pp, xreq); in ecpp_1284_negotiation()
4280 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in ecpp_1284_negotiation()
4288 if (wait_dsr(pp, ECPP_nERR | ECPP_SLCT | ECPP_PE | ECPP_nACK, in ecpp_1284_negotiation()
4291 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4292 "nego(%x): failed event 2 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4293 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4301 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX | ECPP_STB); in ecpp_1284_negotiation()
4309 DCR_WRITE(pp, ECPP_nINIT); in ecpp_1284_negotiation()
4321 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in ecpp_1284_negotiation()
4323 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4324 "nego(%x): failed event 6 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4325 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4329 if ((DSR_READ(pp) & ECPP_SLCT) != xflag) { in ecpp_1284_negotiation()
4331 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4332 "nego(%x): failed event 5 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4333 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4338 *rdsr = DSR_READ(pp); in ecpp_1284_negotiation()
4358 ecpp_1284_termination(struct ecppunit *pp) in ecpp_1284_termination() argument
4360 int previous_mode = pp->current_mode; in ecpp_1284_termination()
4362 if (((pp->current_mode == ECPP_COMPAT_MODE || in ecpp_1284_termination()
4363 pp->current_mode == ECPP_CENTRONICS) && in ecpp_1284_termination()
4364 pp->current_phase == ECPP_PHASE_C_IDLE) || in ecpp_1284_termination()
4365 pp->current_mode == ECPP_DIAG_MODE) { in ecpp_1284_termination()
4366 ecpp_error(pp->dip, "termination: not needed\n"); in ecpp_1284_termination()
4371 ECPP_MASK_INTR(pp); in ecpp_1284_termination()
4372 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_termination()
4374 pp->current_mode = ECPP_COMPAT_MODE; /* needed by next function */ in ecpp_1284_termination()
4376 ECPP_CONFIG_MODE(pp); in ecpp_1284_termination()
4383 DCR_WRITE(pp, 0); in ecpp_1284_termination()
4388 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4394 pp->current_phase = ECPP_PHASE_TERM; in ecpp_1284_termination()
4397 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4401 if (wait_dsr(pp, ECPP_nERR | ECPP_nBUSY | ECPP_nACK, in ecpp_1284_termination()
4403 ecpp_error(pp->dip, in ecpp_1284_termination()
4404 "termination: failed events 23,24 %x\n", DSR_READ(pp)); in ecpp_1284_termination()
4405 ecpp_1284_init_interface(pp); in ecpp_1284_termination()
4412 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN | ECPP_AFX); in ecpp_1284_termination()
4417 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in ecpp_1284_termination()
4418 ecpp_error(pp->dip, in ecpp_1284_termination()
4419 "termination: failed event 27 %x\n", DSR_READ(pp)); in ecpp_1284_termination()
4420 ecpp_1284_init_interface(pp); in ecpp_1284_termination()
4427 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4433 pp->current_phase = ECPP_PHASE_C_IDLE; in ecpp_1284_termination()
4435 ecpp_error(pp->dip, "termination: completed %x %x\n", in ecpp_1284_termination()
4436 DSR_READ(pp), DCR_READ(pp)); in ecpp_1284_termination()
4445 ecp_peripheral2host(struct ecppunit *pp) in ecp_peripheral2host() argument
4451 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_peripheral2host()
4452 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecp_peripheral2host()
4464 if (pp->nread > 0) { in ecp_peripheral2host()
4465 len = min(pp->nread, ECP_REV_BLKSZ_MAX); in ecp_peripheral2host()
4470 pp->nread = 0; /* clear after use */ in ecp_peripheral2host()
4477 ecpp_error(pp->dip, in ecp_peripheral2host()
4490 pp->msg = mp; in ecp_peripheral2host()
4491 pp->e_busy = ECPP_BUSY; in ecp_peripheral2host()
4492 pp->dma_dir = DDI_DMA_READ; in ecp_peripheral2host()
4493 pp->current_phase = ECPP_PHASE_ECP_REV_XFER; in ecp_peripheral2host()
4495 if (ecpp_init_dma_xfer(pp, (caddr_t)mp->b_rptr, len) == FAILURE) { in ecp_peripheral2host()
4533 xfer_time = max((1000 * len) / pp->ecp_rev_speed, ECP_REV_MINTOUT); in ecp_peripheral2host()
4535 pp->rev_timeout_cnt = (pp->hw == &x86) ? 1 : in ecp_peripheral2host()
4536 max(xfer_time / pp->rev_watchdog, 1); in ecp_peripheral2host()
4538 pp->rev_timeout_cnt = (pp->hw == &m1553) ? 1 : in ecp_peripheral2host()
4539 max(xfer_time / pp->rev_watchdog, 1); in ecp_peripheral2host()
4542 pp->last_dmacnt = len; /* nothing xferred yet */ in ecp_peripheral2host()
4544 pp->timeout_id = timeout(ecpp_ecp_read_timeout, (caddr_t)pp, in ecp_peripheral2host()
4545 drv_usectohz(pp->rev_watchdog * 1000)); in ecp_peripheral2host()
4547 ecpp_error(pp->dip, "ecp_periph2host: DMA started len=%d\n" in ecp_peripheral2host()
4549 len, xfer_time, pp->rev_watchdog, pp->rev_timeout_cnt); in ecp_peripheral2host()
4557 pp->e_busy = ECPP_IDLE; in ecp_peripheral2host()
4558 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecp_peripheral2host()
4572 struct ecppunit *pp = arg; in ecpp_ecp_read_timeout() local
4575 mutex_enter(&pp->umutex); in ecpp_ecp_read_timeout()
4577 if (pp->timeout_id == 0) { in ecpp_ecp_read_timeout()
4578 mutex_exit(&pp->umutex); in ecpp_ecp_read_timeout()
4581 pp->timeout_id = 0; in ecpp_ecp_read_timeout()
4584 if (--pp->rev_timeout_cnt == 0) { in ecpp_ecp_read_timeout()
4588 ecpp_error(pp->dip, "ecp_read_timeout: timeout\n"); in ecpp_ecp_read_timeout()
4589 pp->xfer_tout++; in ecpp_ecp_read_timeout()
4590 ecpp_ecp_read_completion(pp); in ecpp_ecp_read_timeout()
4596 dmacnt = ECPP_DMA_GETCNT(pp); in ecpp_ecp_read_timeout()
4597 if (dmacnt - pp->last_dmacnt == 0) { in ecpp_ecp_read_timeout()
4602 ecpp_error(pp->dip, "ecp_read_timeout: no progress\n"); in ecpp_ecp_read_timeout()
4603 pp->xfer_tout++; in ecpp_ecp_read_timeout()
4604 ecpp_ecp_read_completion(pp); in ecpp_ecp_read_timeout()
4609 ecpp_error(pp->dip, "ecp_read_timeout: restarting\n"); in ecpp_ecp_read_timeout()
4610 pp->last_dmacnt = dmacnt; in ecpp_ecp_read_timeout()
4611 pp->timeout_id = timeout(ecpp_ecp_read_timeout, in ecpp_ecp_read_timeout()
4612 (caddr_t)pp, in ecpp_ecp_read_timeout()
4613 drv_usectohz(pp->rev_watchdog * 1000)); in ecpp_ecp_read_timeout()
4617 mutex_exit(&pp->umutex); in ecpp_ecp_read_timeout()
4625 ecpp_ecp_read_completion(struct ecppunit *pp) in ecpp_ecp_read_completion() argument
4630 ASSERT(mutex_owned(&pp->umutex)); in ecpp_ecp_read_completion()
4631 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecpp_ecp_read_completion()
4632 pp->current_phase == ECPP_PHASE_ECP_REV_XFER); in ecpp_ecp_read_completion()
4633 ASSERT(pp->msg != NULL); in ecpp_ecp_read_completion()
4638 if (ECPP_DMA_STOP(pp, &unx_len) == FAILURE) { in ecpp_ecp_read_completion()
4639 unx_len = pp->resid; in ecpp_ecp_read_completion()
4640 ecpp_error(pp->dip, "ecp_read_completion: failed dma_stop\n"); in ecpp_ecp_read_completion()
4643 mp = pp->msg; in ecpp_ecp_read_completion()
4644 xfer_len = pp->resid - unx_len; /* how much data was transferred */ in ecpp_ecp_read_completion()
4646 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_ecp_read_completion()
4647 ecpp_error(pp->dip, "ecp_read_completion: unbind failed.\n"); in ecpp_ecp_read_completion()
4650 ecpp_error(pp->dip, "ecp_read_completion: xfered %d bytes of %d\n", in ecpp_ecp_read_completion()
4651 xfer_len, pp->resid); in ecpp_ecp_read_completion()
4654 pp->msg = NULL; in ecpp_ecp_read_completion()
4655 pp->resid -= xfer_len; in ecpp_ecp_read_completion()
4656 pp->ibytes[pp->current_mode] += xfer_len; in ecpp_ecp_read_completion()
4657 pp->e_busy = ECPP_IDLE; in ecpp_ecp_read_completion()
4658 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecpp_ecp_read_completion()
4664 if (canputnext(pp->readq)) { in ecpp_ecp_read_completion()
4665 mutex_exit(&pp->umutex); in ecpp_ecp_read_completion()
4666 putnext(pp->readq, mp); in ecpp_ecp_read_completion()
4667 mutex_enter(&pp->umutex); in ecpp_ecp_read_completion()
4669 ecpp_error(pp->dip, "ecp_read_completion: fail canputnext\n"); in ecpp_ecp_read_completion()
4670 if (!putq(pp->readq, mp)) { in ecpp_ecp_read_completion()
4676 if (!(ECR_READ(pp) & ECPP_FIFO_EMPTY)) { in ecpp_ecp_read_completion()
4677 (void) ecpp_backchan_req(pp); in ecpp_ecp_read_completion()
4680 qenable(pp->writeq); in ecpp_ecp_read_completion()
4687 nibble_peripheral2host(struct ecppunit *pp, uint8_t *byte) in nibble_peripheral2host() argument
4697 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in nibble_peripheral2host()
4702 if (wait_dsr(pp, ECPP_nACK, 0, 35000) < 0) { in nibble_peripheral2host()
4703 ecpp_error(pp->dip, in nibble_peripheral2host()
4705 i + 1, DSR_READ(pp)); in nibble_peripheral2host()
4706 (void) ecpp_1284_termination(pp); in nibble_peripheral2host()
4710 n[i] = DSR_READ(pp); /* get a nibble */ in nibble_peripheral2host()
4713 DCR_WRITE(pp, ECPP_nINIT); in nibble_peripheral2host()
4718 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in nibble_peripheral2host()
4719 ecpp_error(pp->dip, in nibble_peripheral2host()
4721 i + 1, DSR_READ(pp)); in nibble_peripheral2host()
4722 (void) ecpp_1284_termination(pp); in nibble_peripheral2host()
4731 pp->ibytes[ECPP_NIBBLE_MODE]++; in nibble_peripheral2host()
4739 ecpp_peripheral2host(struct ecppunit *pp) in ecpp_peripheral2host() argument
4741 if (!canputnext(pp->readq)) { in ecpp_peripheral2host()
4742 ecpp_error(pp->dip, "ecpp_peripheral2host: readq full\n"); in ecpp_peripheral2host()
4746 switch (pp->backchannel) { in ecpp_peripheral2host()
4752 ASSERT(pp->current_mode == ECPP_NIBBLE_MODE); in ecpp_peripheral2host()
4757 DCR_WRITE(pp, ECPP_nINIT); in ecpp_peripheral2host()
4760 if (wait_dsr(pp, ECPP_PE, 0, 35000) < 0) { in ecpp_peripheral2host()
4761 ecpp_error(pp->dip, in ecpp_peripheral2host()
4763 DSR_READ(pp)); in ecpp_peripheral2host()
4764 (void) ecpp_1284_termination(pp); in ecpp_peripheral2host()
4768 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in ecpp_peripheral2host()
4771 return (ecpp_idle_phase(pp)); in ecpp_peripheral2host()
4774 if ((pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE) && in ecpp_peripheral2host()
4775 (ecp_forward2reverse(pp) == FAILURE)) { in ecpp_peripheral2host()
4779 return (ecp_peripheral2host(pp)); /* start the transfer */ in ecpp_peripheral2host()
4785 if (ECR_READ(pp) & ECPP_FIFO_EMPTY) { in ecpp_peripheral2host()
4786 ecpp_error(pp->dip, "ecpp_periph2host: fifo empty\n"); in ecpp_peripheral2host()
4792 ecpp_error(pp->dip, in ecpp_peripheral2host()
4801 while (i-- && (!(ECR_READ(pp) & ECPP_FIFO_EMPTY))) { in ecpp_peripheral2host()
4802 *mp->b_wptr++ = TFIFO_READ(pp); in ecpp_peripheral2host()
4806 if (canputnext(pp->readq)) { in ecpp_peripheral2host()
4807 mutex_exit(&pp->umutex); in ecpp_peripheral2host()
4809 ecpp_error(pp->dip, in ecpp_peripheral2host()
4812 putnext(pp->readq, mp); in ecpp_peripheral2host()
4813 mutex_enter(&pp->umutex); in ecpp_peripheral2host()
4816 ecpp_error(pp->dip, in ecpp_peripheral2host()
4824 ecpp_error(pp->dip, "ecpp_peripheraltohost: illegal back"); in ecpp_peripheral2host()
4835 ecp_forward2reverse(struct ecppunit *pp) in ecp_forward2reverse() argument
4837 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_forward2reverse()
4838 pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE); in ecp_forward2reverse()
4841 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_forward2reverse()
4844 DCR_WRITE(pp, ECPP_REV_DIR | ECPP_nINIT); in ecp_forward2reverse()
4847 ECR_WRITE(pp, ECR_mode_011 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_forward2reverse()
4852 DCR_WRITE(pp, ECPP_REV_DIR); in ecp_forward2reverse()
4856 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecp_forward2reverse()
4858 ecpp_error(pp->dip, "ecp_forward2reverse ok\n"); in ecp_forward2reverse()
4869 ecp_reverse2forward(struct ecppunit *pp) in ecp_reverse2forward() argument
4871 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_reverse2forward()
4872 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecp_reverse2forward()
4875 DCR_WRITE(pp, ECPP_REV_DIR | ECPP_nINIT); in ecp_reverse2forward()
4881 if (wait_dsr(pp, ECPP_PE, ECPP_PE, 35000) < 0) { in ecp_reverse2forward()
4882 ecpp_error(pp->dip, in ecp_reverse2forward()
4883 "ecp_reverse2forward: failed event 49 %x\n", DSR_READ(pp)); in ecp_reverse2forward()
4884 (void) ecpp_1284_termination(pp); in ecp_reverse2forward()
4889 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_reverse2forward()
4892 DCR_WRITE(pp, ECPP_nINIT); in ecp_reverse2forward()
4895 ECR_WRITE(pp, ECR_mode_011 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_reverse2forward()
4897 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecp_reverse2forward()
4899 ecpp_error(pp->dip, "ecp_reverse2forward ok\n"); in ecp_reverse2forward()
4909 ecpp_default_negotiation(struct ecppunit *pp) in ecpp_default_negotiation() argument
4911 if (!noecp && (ecpp_mode_negotiation(pp, ECPP_ECP_MODE) == SUCCESS)) { in ecpp_default_negotiation()
4913 pp->io_mode = (pp->fast_compat == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4915 } else if (ecpp_mode_negotiation(pp, ECPP_NIBBLE_MODE) == SUCCESS) { in ecpp_default_negotiation()
4917 pp->io_mode = (pp->fast_compat == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4920 pp->io_mode = in ecpp_default_negotiation()
4921 (pp->fast_centronics == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4923 ECPP_CONFIG_MODE(pp); in ecpp_default_negotiation()
4930 ecpp_mode_negotiation(struct ecppunit *pp, uchar_t newmode) in ecpp_mode_negotiation() argument
4933 ASSERT(pp->current_mode == ECPP_CENTRONICS || in ecpp_mode_negotiation()
4934 pp->current_mode == ECPP_COMPAT_MODE || in ecpp_mode_negotiation()
4935 pp->current_mode == ECPP_NIBBLE_MODE || in ecpp_mode_negotiation()
4936 pp->current_mode == ECPP_ECP_MODE || in ecpp_mode_negotiation()
4937 pp->current_mode == ECPP_DIAG_MODE); in ecpp_mode_negotiation()
4939 if (pp->current_mode == newmode) { in ecpp_mode_negotiation()
4944 if ((pp->current_mode == ECPP_ECP_MODE) && in ecpp_mode_negotiation()
4945 (pp->current_phase != ECPP_PHASE_ECP_FWD_IDLE)) { in ecpp_mode_negotiation()
4947 (void) ecp_reverse2forward(pp); in ecpp_mode_negotiation()
4952 (void) ecpp_1284_termination(pp); in ecpp_mode_negotiation()
4955 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_mode_negotiation()
4957 pp->current_mode = ECPP_CENTRONICS; in ecpp_mode_negotiation()
4958 pp->backchannel = ECPP_CENTRONICS; in ecpp_mode_negotiation()
4959 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4961 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4966 if (pp->current_mode == ECPP_NIBBLE_MODE) { in ecpp_mode_negotiation()
4967 if (ecpp_1284_termination(pp) == SUCCESS) { in ecpp_mode_negotiation()
4968 pp->current_mode = ECPP_COMPAT_MODE; in ecpp_mode_negotiation()
4969 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4970 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4971 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4978 if ((nibble_negotiation(pp) == SUCCESS) && in ecpp_mode_negotiation()
4979 (ecpp_1284_termination(pp) == SUCCESS)) { in ecpp_mode_negotiation()
4980 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4981 pp->current_mode = ECPP_COMPAT_MODE; in ecpp_mode_negotiation()
4982 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4983 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4990 if (nibble_negotiation(pp) == FAILURE) { in ecpp_mode_negotiation()
4994 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4995 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4996 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
5001 if (pp->noecpregs) in ecpp_mode_negotiation()
5003 if (ecp_negotiation(pp) == FAILURE) { in ecpp_mode_negotiation()
5010 DCR_WRITE(pp, ECPP_nINIT); in ecpp_mode_negotiation()
5012 if (ecr_write(pp, ECR_mode_011 | in ecpp_mode_negotiation()
5014 ecpp_error(pp->dip, "mode_nego:ECP: failed w/ecr\n"); in ecpp_mode_negotiation()
5018 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
5019 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
5028 (void) ecpp_1284_termination(pp); in ecpp_mode_negotiation()
5031 if (ecr_write(pp, ECR_mode_001 | in ecpp_mode_negotiation()
5033 ecpp_error(pp->dip, "put to TFIFO: failed w/ecr\n"); in ecpp_mode_negotiation()
5037 pp->current_mode = ECPP_DIAG_MODE; in ecpp_mode_negotiation()
5038 pp->backchannel = ECPP_DIAG_MODE; in ecpp_mode_negotiation()
5039 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
5040 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
5045 ecpp_error(pp->dip, in ecpp_mode_negotiation()
5058 ecpp_idle_phase(struct ecppunit *pp) in ecpp_idle_phase() argument
5065 if (!canputnext(pp->readq)) { in ecpp_idle_phase()
5066 ecpp_error(pp->dip, "ecpp_idle_phase: readq full\n"); in ecpp_idle_phase()
5070 switch (pp->backchannel) { in ecpp_idle_phase()
5075 ecpp_error(pp->dip, "ecpp_idle_phase: compat idle\n"); in ecpp_idle_phase()
5083 ecpp_error(pp->dip, "ecpp_idle_phase: nibble backchannel\n"); in ecpp_idle_phase()
5084 if ((pp->current_mode != ECPP_NIBBLE_MODE) && in ecpp_idle_phase()
5085 (ecpp_mode_negotiation(pp, ECPP_NIBBLE_MODE) == FAILURE)) { in ecpp_idle_phase()
5089 rval = read_nibble_backchan(pp); in ecpp_idle_phase()
5092 if (pp->current_phase == ECPP_PHASE_NIBT_NAVAIL && in ecpp_idle_phase()
5093 canputnext(pp->readq)) { in ecpp_idle_phase()
5094 ecpp_error(pp->dip, "ecpp_idle_phase: going revidle\n"); in ecpp_idle_phase()
5100 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX | ECPP_INTR_EN); in ecpp_idle_phase()
5102 ECPP_UNMASK_INTR(pp); in ecpp_idle_phase()
5112 ecpp_error(pp->dip, "ecpp_idle_phase: ECP forward\n"); in ecpp_idle_phase()
5114 ASSERT(pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE || in ecpp_idle_phase()
5115 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecpp_idle_phase()
5118 if ((pp->current_phase == ECPP_PHASE_ECP_REV_IDLE) && in ecpp_idle_phase()
5119 (ecp_reverse2forward(pp) == FAILURE)) { in ecpp_idle_phase()
5127 if ((DSR_READ(pp) & ECPP_nERR) == 0) { in ecpp_idle_phase()
5128 (void) ecpp_backchan_req(pp); in ecpp_idle_phase()
5130 ECR_WRITE(pp, in ecpp_idle_phase()
5131 ECR_READ(pp) & ~ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_idle_phase()
5133 ECPP_UNMASK_INTR(pp); in ecpp_idle_phase()
5139 ecpp_error(pp->dip, "ecpp_idle_phase: illegal backchannel"); in ecpp_idle_phase()
5155 read_nibble_backchan(struct ecppunit *pp) in read_nibble_backchan() argument
5161 ASSERT(pp->current_mode == ECPP_NIBBLE_MODE); in read_nibble_backchan()
5163 pp->current_phase = (DSR_READ(pp) & (ECPP_nERR | ECPP_PE)) in read_nibble_backchan()
5166 ecpp_error(pp->dip, "read_nibble_backchan: %x\n", DSR_READ(pp)); in read_nibble_backchan()
5172 while (pp->current_phase == ECPP_PHASE_NIBT_AVAIL && rval == SUCCESS) { in read_nibble_backchan()
5174 if (!canputnext(pp->readq)) { in read_nibble_backchan()
5175 ecpp_error(pp->dip, in read_nibble_backchan()
5181 ecpp_error(pp->dip, in read_nibble_backchan()
5188 while (i-- && !(DSR_READ(pp) & ECPP_nERR)) { in read_nibble_backchan()
5189 if (nibble_peripheral2host(pp, mp->b_wptr) != SUCCESS) { in read_nibble_backchan()
5196 pp->current_phase = (DSR_READ(pp) & (ECPP_nERR | ECPP_PE)) in read_nibble_backchan()
5201 ecpp_error(pp->dip, in read_nibble_backchan()
5204 pp->nread = 0; in read_nibble_backchan()
5205 mutex_exit(&pp->umutex); in read_nibble_backchan()
5206 putnext(pp->readq, mp); in read_nibble_backchan()
5207 mutex_enter(&pp->umutex); in read_nibble_backchan()
5220 devidnib_negotiation(struct ecppunit *pp) in devidnib_negotiation() argument
5224 if (ecpp_1284_negotiation(pp, in devidnib_negotiation()
5234 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in devidnib_negotiation()
5236 pp->current_phase = ECPP_PHASE_NIBT_NAVAIL; in devidnib_negotiation()
5239 ecpp_error(pp->dip, "ecpp_devidnib_nego: current_phase=%x\n", in devidnib_negotiation()
5240 pp->current_phase); in devidnib_negotiation()
5243 pp->current_mode = ECPP_NIBBLE_MODE; in devidnib_negotiation()
5244 pp->backchannel = ECPP_NIBBLE_MODE; in devidnib_negotiation()
5246 ecpp_error(pp->dip, "ecpp_devidnib_nego: ok\n"); in devidnib_negotiation()
5263 ecpp_getdevid(struct ecppunit *pp, uint8_t *id, int *lenp, int mode) in ecpp_getdevid() argument
5272 if ((pp->current_mode != mode) || (id == NULL)) { in ecpp_getdevid()
5273 if (devidnib_negotiation(pp) == FAILURE) { in ecpp_getdevid()
5278 if (pp->current_phase != ECPP_PHASE_NIBT_AVAIL) { in ecpp_getdevid()
5288 dsr = DSR_READ(pp); in ecpp_getdevid()
5297 (nibble_peripheral2host(pp, &lenhi) == FAILURE) || in ecpp_getdevid()
5299 (nibble_peripheral2host(pp, &lenlo) == FAILURE)) { in ecpp_getdevid()
5300 ecpp_error(pp->dip, in ecpp_getdevid()
5307 ecpp_error(pp->dip, in ecpp_getdevid()
5319 if (nibble_peripheral2host(pp, id++) == FAILURE) in ecpp_getdevid()
5323 dsr = DSR_READ(pp); in ecpp_getdevid()
5325 ecpp_error(pp->dip, in ecpp_getdevid()
5332 (void) ecpp_1284_termination(pp); in ecpp_getdevid()
5353 empty_config_mode(struct ecppunit *pp) in empty_config_mode() argument
5359 empty_mask_intr(struct ecppunit *pp) in empty_mask_intr() argument
5365 x86_getcnt(struct ecppunit *pp) in x86_getcnt() argument
5369 (void) ddi_dmae_getcnt(pp->dip, pp->uh.x86.chn, &count); in x86_getcnt()
5383 pc87332_map_regs(struct ecppunit *pp) in pc87332_map_regs() argument
5385 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->uh.ebus.c_reg, 0, in pc87332_map_regs()
5387 &pp->uh.ebus.c_handle) != DDI_SUCCESS) { in pc87332_map_regs()
5388 ecpp_error(pp->dip, "pc87332_map_regs: failed c_reg\n"); in pc87332_map_regs()
5392 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in pc87332_map_regs()
5393 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in pc87332_map_regs()
5395 ecpp_error(pp->dip, "pc87332_map_regs: failed i_reg\n"); in pc87332_map_regs()
5399 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->f_reg, 0x400, in pc87332_map_regs()
5400 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in pc87332_map_regs()
5402 ecpp_error(pp->dip, "pc87332_map_regs: failed f_reg\n"); in pc87332_map_regs()
5406 if (ddi_regs_map_setup(pp->dip, 2, (caddr_t *)&pp->uh.ebus.dmac, 0, in pc87332_map_regs()
5408 &pp->uh.ebus.d_handle) != DDI_SUCCESS) { in pc87332_map_regs()
5409 ecpp_error(pp->dip, "pc87332_map_regs: failed dmac\n"); in pc87332_map_regs()
5416 pc87332_unmap_regs(pp); in pc87332_map_regs()
5421 pc87332_unmap_regs(struct ecppunit *pp) in pc87332_unmap_regs() argument
5423 if (pp->uh.ebus.c_handle) { in pc87332_unmap_regs()
5424 ddi_regs_map_free(&pp->uh.ebus.c_handle); in pc87332_unmap_regs()
5426 if (pp->uh.ebus.d_handle) { in pc87332_unmap_regs()
5427 ddi_regs_map_free(&pp->uh.ebus.d_handle); in pc87332_unmap_regs()
5429 if (pp->i_handle) { in pc87332_unmap_regs()
5430 ddi_regs_map_free(&pp->i_handle); in pc87332_unmap_regs()
5432 if (pp->f_handle) { in pc87332_unmap_regs()
5433 ddi_regs_map_free(&pp->f_handle); in pc87332_unmap_regs()
5438 pc87332_read_config_reg(struct ecppunit *pp, uint8_t reg_num) in pc87332_read_config_reg() argument
5442 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->index, reg_num); in pc87332_read_config_reg()
5443 retval = PP_GETB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data); in pc87332_read_config_reg()
5449 pc87332_write_config_reg(struct ecppunit *pp, uint8_t reg_num, uint8_t val) in pc87332_write_config_reg() argument
5451 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->index, reg_num); in pc87332_write_config_reg()
5452 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data, val); in pc87332_write_config_reg()
5460 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data, val); in pc87332_write_config_reg()
5464 pc87332_config_chip(struct ecppunit *pp) in pc87332_config_chip() argument
5468 pp->current_phase = ECPP_PHASE_INIT; in pc87332_config_chip()
5471 pmc = pc87332_read_config_reg(pp, PMC); in pc87332_config_chip()
5473 pc87332_write_config_reg(pp, PMC, in pc87332_config_chip()
5481 fcr = pc87332_read_config_reg(pp, FCR); in pc87332_config_chip()
5483 pc87332_write_config_reg(pp, FCR, in pc87332_config_chip()
5493 if (dcr_write(pp, ECPP_DCR_SET | ECPP_nINIT) == FAILURE) { in pc87332_config_chip()
5494 ecpp_error(pp->dip, "ecpp_config_87332: DCR config\n"); in pc87332_config_chip()
5498 pc87332_write_config_reg(pp, PCR, in pc87332_config_chip()
5502 if (ecr_write(pp, ECR_mode_001 | in pc87332_config_chip()
5504 ecpp_error(pp->dip, "ecpp_config_87332: ECR\n"); in pc87332_config_chip()
5507 if (dcr_write(pp, ECPP_DCR_SET | ECPP_SLCTIN | ECPP_nINIT) == FAILURE) { in pc87332_config_chip()
5508 ecpp_error(pp->dip, "ecpp_config_87332: w/DCR failed2.\n"); in pc87332_config_chip()
5513 pp->current_mode = ECPP_CENTRONICS; in pc87332_config_chip()
5516 pp->current_phase = ECPP_PHASE_C_IDLE; in pc87332_config_chip()
5526 pc87332_config_mode(struct ecppunit *pp) in pc87332_config_mode() argument
5528 if (COMPAT_PIO(pp)) { in pc87332_config_mode()
5529 pc87332_write_config_reg(pp, PCR, 0x04); in pc87332_config_mode()
5531 pc87332_write_config_reg(pp, PCR, 0x14); in pc87332_config_mode()
5536 pc97317_map_regs(struct ecppunit *pp) in pc97317_map_regs() argument
5538 if (pc87332_map_regs(pp) != SUCCESS) { in pc97317_map_regs()
5542 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->uh.ebus.c2_reg, in pc97317_map_regs()
5544 &pp->uh.ebus.c2_handle) != DDI_SUCCESS) { in pc97317_map_regs()
5545 ecpp_error(pp->dip, "pc97317_map_regs: failed c2_reg\n"); in pc97317_map_regs()
5546 pc87332_unmap_regs(pp); in pc97317_map_regs()
5554 pc97317_unmap_regs(struct ecppunit *pp) in pc97317_unmap_regs() argument
5556 if (pp->uh.ebus.c2_handle) { in pc97317_unmap_regs()
5557 ddi_regs_map_free(&pp->uh.ebus.c2_handle); in pc97317_unmap_regs()
5560 pc87332_unmap_regs(pp); in pc97317_unmap_regs()
5570 pc97317_config_chip(struct ecppunit *pp) in pc97317_config_chip() argument
5575 pc87332_write_config_reg(pp, PC97317_CONFIG_DEV_NO, 0x4); in pc97317_config_chip()
5578 PP_PUTB(pp->uh.ebus.c2_handle, in pc97317_config_chip()
5579 &pp->uh.ebus.c2_reg->eir, PC97317_CONFIG2_CONTROL2); in pc97317_config_chip()
5580 PP_PUTB(pp->uh.ebus.c2_handle, &pp->uh.ebus.c2_reg->edr, 0x80); in pc97317_config_chip()
5583 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x00); in pc97317_config_chip()
5586 pc87332_write_config_reg(pp, PC97317_CONFIG_PP_CONFIG, 0xf2); in pc97317_config_chip()
5588 if (dcr_write(pp, ECPP_SLCTIN | ECPP_nINIT) == FAILURE) { in pc97317_config_chip()
5589 ecpp_error(pp->dip, "pc97317_config_chip: failed w/DCR\n"); in pc97317_config_chip()
5592 if (ecr_write(pp, ECR_mode_001 | in pc97317_config_chip()
5594 ecpp_error(pp->dip, "pc97317_config_chip: failed w/ECR\n"); in pc97317_config_chip()
5598 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DEV_NO); in pc97317_config_chip()
5599 ecpp_error(pp->dip, "97317:conreg7(logical dev)=%x\n", conreg); in pc97317_config_chip()
5601 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_BASE_ADDR_MSB); in pc97317_config_chip()
5602 ecpp_error(pp->dip, "97317:conreg60(addrHi)=%x\n", conreg); in pc97317_config_chip()
5604 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_BASE_ADDR_LSB); in pc97317_config_chip()
5605 ecpp_error(pp->dip, "97317:conreg61(addrLo)=%x\n", conreg); in pc97317_config_chip()
5607 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_INTR_SEL); in pc97317_config_chip()
5608 ecpp_error(pp->dip, "97317:conreg70(IRQL)=%x\n", conreg); in pc97317_config_chip()
5610 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_INTR_TYPE); in pc97317_config_chip()
5611 ecpp_error(pp->dip, "97317:conreg71(intr type)=%x\n", conreg); in pc97317_config_chip()
5613 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_ACTIVATE); in pc97317_config_chip()
5614 ecpp_error(pp->dip, "97317:conreg30(Active)=%x\n", conreg); in pc97317_config_chip()
5616 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_IO_RANGE); in pc97317_config_chip()
5617 ecpp_error(pp->dip, "97317:conreg31(IO Range Check)=%x\n", conreg); in pc97317_config_chip()
5619 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DMA0_CHAN); in pc97317_config_chip()
5620 ecpp_error(pp->dip, "97317:conreg74(DMA0 Chan)=%x\n", conreg); in pc97317_config_chip()
5621 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DMA1_CHAN); in pc97317_config_chip()
5622 ecpp_error(pp->dip, "97317:conreg75(DMA1 Chan)=%x\n", conreg); in pc97317_config_chip()
5624 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_PP_CONFIG); in pc97317_config_chip()
5625 ecpp_error(pp->dip, "97317:conregFO(pport conf)=%x\n", conreg); in pc97317_config_chip()
5627 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_PP_CONFIG); in pc97317_config_chip()
5628 ecpp_error(pp->dip, "97317:conregFO(pport conf)=%x\n", conreg); in pc97317_config_chip()
5639 pc97317_config_mode(struct ecppunit *pp) in pc97317_config_mode() argument
5642 pc87332_write_config_reg(pp, PC97317_CONFIG_DEV_NO, 0x4); in pc97317_config_mode()
5644 if (COMPAT_PIO(pp) || pp->current_mode == ECPP_NIBBLE_MODE) { in pc97317_config_mode()
5645 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x02); in pc97317_config_mode()
5647 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x00); in pc97317_config_mode()
5652 cheerio_mask_intr(struct ecppunit *pp) in cheerio_mask_intr() argument
5655 AND_SET_LONG_R(pp->uh.ebus.d_handle, in cheerio_mask_intr()
5656 &pp->uh.ebus.dmac->csr, ~DCSR_INT_EN); in cheerio_mask_intr()
5660 cheerio_unmask_intr(struct ecppunit *pp) in cheerio_unmask_intr() argument
5663 OR_SET_LONG_R(pp->uh.ebus.d_handle, in cheerio_unmask_intr()
5664 &pp->uh.ebus.dmac->csr, DCSR_INT_EN | DCSR_TCI_DIS); in cheerio_unmask_intr()
5668 cheerio_dma_start(struct ecppunit *pp) in cheerio_dma_start() argument
5670 cheerio_reset_dcsr(pp); in cheerio_dma_start()
5671 SET_DMAC_BCR(pp, pp->dma_cookie.dmac_size); in cheerio_dma_start()
5672 SET_DMAC_ACR(pp, pp->dma_cookie.dmac_address); in cheerio_dma_start()
5674 if (pp->dma_dir == DDI_DMA_READ) { in cheerio_dma_start()
5675 SET_DMAC_CSR(pp, DCSR_INT_EN | DCSR_EN_CNT | DCSR_EN_DMA | in cheerio_dma_start()
5678 SET_DMAC_CSR(pp, DCSR_INT_EN | DCSR_EN_CNT | DCSR_EN_DMA | in cheerio_dma_start()
5689 cheerio_dma_stop(struct ecppunit *pp, size_t *countp) in cheerio_dma_stop() argument
5694 AND_SET_LONG_R(pp->uh.ebus.d_handle, &pp->uh.ebus.dmac->csr, in cheerio_dma_stop()
5698 OR_SET_LONG_R(pp->uh.ebus.d_handle, &pp->uh.ebus.dmac->csr, in cheerio_dma_stop()
5703 *countp = cheerio_getcnt(pp); in cheerio_dma_stop()
5706 cheerio_reset_dcsr(pp); in cheerio_dma_stop()
5707 SET_DMAC_BCR(pp, 0); in cheerio_dma_stop()
5710 ecr = ECR_READ(pp); in cheerio_dma_stop()
5711 if (ecr_write(pp, ecr & ~ECPP_DMA_ENABLE) == FAILURE) { in cheerio_dma_stop()
5716 ecr = ECR_READ(pp); in cheerio_dma_stop()
5718 return (ecr_write(pp, ecr | ECPP_INTR_SRV)); in cheerio_dma_stop()
5722 cheerio_getcnt(struct ecppunit *pp) in cheerio_getcnt() argument
5724 return (GET_DMAC_BCR(pp)); in cheerio_getcnt()
5733 cheerio_reset_dcsr(struct ecppunit *pp) in cheerio_reset_dcsr() argument
5737 SET_DMAC_CSR(pp, DCSR_RESET); in cheerio_reset_dcsr()
5739 while (GET_DMAC_CSR(pp) & DCSR_CYC_PEND) { in cheerio_reset_dcsr()
5741 ecpp_error(pp->dip, "cheerio_reset_dcsr: timeout\n"); in cheerio_reset_dcsr()
5749 SET_DMAC_CSR(pp, 0); in cheerio_reset_dcsr()
5762 m1553_map_regs(struct ecppunit *pp) in m1553_map_regs() argument
5764 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->uh.m1553.isa_space, in m1553_map_regs()
5766 &pp->uh.m1553.d_handle) != DDI_SUCCESS) { in m1553_map_regs()
5767 ecpp_error(pp->dip, "m1553_map_regs: failed isa space\n"); in m1553_map_regs()
5771 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in m1553_map_regs()
5772 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in m1553_map_regs()
5774 ecpp_error(pp->dip, "m1553_map_regs: failed i_reg\n"); in m1553_map_regs()
5778 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->f_reg, 0x400, in m1553_map_regs()
5779 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in m1553_map_regs()
5781 ecpp_error(pp->dip, "m1553_map_regs: failed f_reg\n"); in m1553_map_regs()
5788 m1553_unmap_regs(pp); in m1553_map_regs()
5793 m1553_unmap_regs(struct ecppunit *pp) in m1553_unmap_regs() argument
5795 if (pp->uh.m1553.d_handle) { in m1553_unmap_regs()
5796 ddi_regs_map_free(&pp->uh.m1553.d_handle); in m1553_unmap_regs()
5798 if (pp->i_handle) { in m1553_unmap_regs()
5799 ddi_regs_map_free(&pp->i_handle); in m1553_unmap_regs()
5801 if (pp->f_handle) { in m1553_unmap_regs()
5802 ddi_regs_map_free(&pp->f_handle); in m1553_unmap_regs()
5808 x86_map_regs(struct ecppunit *pp) in x86_map_regs() argument
5812 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in x86_map_regs()
5813 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in x86_map_regs()
5815 ecpp_error(pp->dip, "x86_map_regs: failed i_reg\n"); in x86_map_regs()
5818 if (ddi_dev_nregs(pp->dip, &nregs) == DDI_SUCCESS && nregs == 2) { in x86_map_regs()
5819 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->f_reg, 0, in x86_map_regs()
5820 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in x86_map_regs()
5822 ecpp_error(pp->dip, "x86_map_regs: failed f_reg\n"); in x86_map_regs()
5825 pp->noecpregs = FALSE; in x86_map_regs()
5827 pp->noecpregs = TRUE; in x86_map_regs()
5831 x86_unmap_regs(pp); in x86_map_regs()
5836 x86_unmap_regs(struct ecppunit *pp) in x86_unmap_regs() argument
5838 if (pp->i_handle) { in x86_unmap_regs()
5839 ddi_regs_map_free(&pp->i_handle); in x86_unmap_regs()
5841 if (pp->f_handle) { in x86_unmap_regs()
5842 ddi_regs_map_free(&pp->f_handle); in x86_unmap_regs()
5848 m1553_read_config_reg(struct ecppunit *pp, uint8_t reg_num) in m1553_read_config_reg() argument
5852 dma8237_write(pp, 0x3F0, reg_num); in m1553_read_config_reg()
5853 retval = dma8237_read(pp, 0x3F1); in m1553_read_config_reg()
5859 m1553_write_config_reg(struct ecppunit *pp, uint8_t reg_num, uint8_t val) in m1553_write_config_reg() argument
5861 dma8237_write(pp, 0x3F0, reg_num); in m1553_write_config_reg()
5862 dma8237_write(pp, 0x3F1, val); in m1553_write_config_reg()
5866 m1553_config_chip(struct ecppunit *pp) in m1553_config_chip() argument
5871 dma8237_write(pp, 0x3F0, 0x51); in m1553_config_chip()
5872 dma8237_write(pp, 0x3F0, 0x23); in m1553_config_chip()
5874 m1553_write_config_reg(pp, PnP_CONFIG_DEV_NO, 0x3); in m1553_config_chip()
5875 conreg = m1553_read_config_reg(pp, PnP_CONFIG_DEV_NO); in m1553_config_chip()
5876 ecpp_error(pp->dip, "M1553:conreg7(logical dev)=%x\n", conreg); in m1553_config_chip()
5878 conreg = m1553_read_config_reg(pp, PnP_CONFIG_ACTIVATE); in m1553_config_chip()
5879 ecpp_error(pp->dip, "M1553:conreg30(Active)=%x\n", conreg); in m1553_config_chip()
5881 conreg = m1553_read_config_reg(pp, PnP_CONFIG_BASE_ADDR_MSB); in m1553_config_chip()
5882 ecpp_error(pp->dip, "M1553:conreg60(addrHi)=%x\n", conreg); in m1553_config_chip()
5883 conreg = m1553_read_config_reg(pp, PnP_CONFIG_BASE_ADDR_LSB); in m1553_config_chip()
5884 ecpp_error(pp->dip, "M1553:conreg61(addrLo)=%x\n", conreg); in m1553_config_chip()
5886 conreg = m1553_read_config_reg(pp, PnP_CONFIG_INTR_SEL); in m1553_config_chip()
5887 ecpp_error(pp->dip, "M1553:conreg70(IRQL)=%x\n", conreg); in m1553_config_chip()
5889 conreg = m1553_read_config_reg(pp, PnP_CONFIG_DMA0_CHAN); in m1553_config_chip()
5890 ecpp_error(pp->dip, "M1553:conreg74(DMA0 Chan)=%x\n", conreg); in m1553_config_chip()
5893 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG0); in m1553_config_chip()
5895 m1553_write_config_reg(pp, PnP_CONFIG_PP_CONFIG0, conreg); in m1553_config_chip()
5896 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG0); in m1553_config_chip()
5897 ecpp_error(pp->dip, "M1553:conregFO(pport conf)=%x\n", conreg); in m1553_config_chip()
5899 m1553_write_config_reg(pp, PnP_CONFIG_PP_CONFIG1, 0x04); in m1553_config_chip()
5900 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG1); in m1553_config_chip()
5901 ecpp_error(pp->dip, "M1553:conregF1(outconf)=%x\n", conreg); in m1553_config_chip()
5904 dma8237_write(pp, 0x3F0, 0xBB); in m1553_config_chip()
5907 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in m1553_config_chip()
5908 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in m1553_config_chip()
5910 ecpp_error(pp->dip, "m1553_config_chip: ecr=%x, dsr=%x, dcr=%x\n", in m1553_config_chip()
5911 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in m1553_config_chip()
5918 x86_config_chip(struct ecppunit *pp) in x86_config_chip() argument
5920 if (ecr_write(pp, ECR_mode_001 | in x86_config_chip()
5922 ecpp_error(pp->dip, "config chip: failed w/ecr\n"); in x86_config_chip()
5923 pp->noecpregs = TRUE; in x86_config_chip()
5925 if (pp->noecpregs) in x86_config_chip()
5926 pp->fast_compat = FALSE; in x86_config_chip()
5927 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in x86_config_chip()
5928 ecpp_error(pp->dip, "x86_config_chip: ecr=%x, dsr=%x, dcr=%x\n", in x86_config_chip()
5929 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in x86_config_chip()
5940 dma8237_dma_start(struct ecppunit *pp) in dma8237_dma_start() argument
5944 chn = pp->uh.m1553.chn; in dma8237_dma_start()
5947 pp->dma_cookie.dmac_size != 0 && in dma8237_dma_start()
5948 pp->dma_cookie.dmac_address != 0); in dma8237_dma_start()
5953 dma8237_write(pp, DMAC2_MODE, DMAMODE_CASC); in dma8237_dma_start()
5954 if (pp->dma_dir == DDI_DMA_READ) { in dma8237_dma_start()
5955 dma8237_write(pp, DMAC1_MODE, DMAMODE_SINGLE | in dma8237_dma_start()
5958 dma8237_write(pp, DMAC1_MODE, DMAMODE_SINGLE | in dma8237_dma_start()
5962 dma8237_write_addr(pp, pp->dma_cookie.dmac_address); in dma8237_dma_start()
5963 dma8237_write_count(pp, pp->dma_cookie.dmac_size - 1); in dma8237_dma_start()
5980 dma8237_write(pp, DMAC1_ALLMASK, ~(1 << chn)); in dma8237_dma_start()
5981 pp->uh.m1553.isadma_entered = 1; in dma8237_dma_start()
5987 dma8237_dma_stop(struct ecppunit *pp, size_t *countp) in dma8237_dma_stop() argument
5992 ecr = (ECR_READ(pp) & 0xe0) | ECPP_INTR_MASK | ECPP_INTR_SRV; in dma8237_dma_stop()
5993 (void) ecr_write(pp, ecr); in dma8237_dma_stop()
5995 if (pp->uh.m1553.isadma_entered) { in dma8237_dma_stop()
5997 dma8237_write(pp, DMAC1_ALLMASK, 0); in dma8237_dma_stop()
5998 pp->uh.m1553.isadma_entered = 0; in dma8237_dma_stop()
6004 *countp = dma8237_getcnt(pp); in dma8237_dma_stop()
6005 if (pp->dma_dir == DDI_DMA_READ && *countp > 0) { in dma8237_dma_stop()
6013 x86_dma_start(struct ecppunit *pp) in x86_dma_start() argument
6018 chn = pp->uh.x86.chn; in x86_dma_start()
6020 pp->dma_cookie.dmac_size != 0 && in x86_dma_start()
6021 pp->dma_cookie.dmac_address != 0); in x86_dma_start()
6024 (pp->dma_dir & DDI_DMA_READ) ? DMAE_CMD_READ : DMAE_CMD_WRITE; in x86_dma_start()
6025 if (ddi_dmae_prog(pp->dip, &dmaereq, &pp->dma_cookie, chn) in x86_dma_start()
6027 ecpp_error(pp->dip, "prog failed !!!\n"); in x86_dma_start()
6028 ecpp_error(pp->dip, "dma_started..\n"); in x86_dma_start()
6033 x86_dma_stop(struct ecppunit *pp, size_t *countp) in x86_dma_stop() argument
6038 if (pp->uh.x86.chn == 0xff) in x86_dma_stop()
6040 ecr = (ECR_READ(pp) & 0xe0) | ECPP_INTR_MASK | ECPP_INTR_SRV; in x86_dma_stop()
6041 (void) ecr_write(pp, ecr); in x86_dma_stop()
6042 ecpp_error(pp->dip, "dma_stop\n"); in x86_dma_stop()
6046 *countp = x86_getcnt(pp); in x86_dma_stop()
6048 ecpp_error(pp->dip, "dma_stoped..\n"); in x86_dma_stop()
6055 dma8237_write_addr(struct ecppunit *pp, uint32_t addr) in dma8237_write_addr() argument
6060 switch (pp->uh.m1553.chn) { in dma8237_write_addr()
6089 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_addr]; in dma8237_write_addr()
6090 ddi_put16(pp->uh.m1553.d_handle, p, addr & 0xFFFF); in dma8237_write_addr()
6092 dma8237_write(pp, c_lpage, (addr & 0xFF0000) >> 16); in dma8237_write_addr()
6093 dma8237_write(pp, c_hpage, (addr & 0xFF000000) >> 24); in dma8237_write_addr()
6103 dma8237_read_addr(struct ecppunit *pp) in dma8237_read_addr() argument
6111 switch (pp->uh.m1553.chn) { in dma8237_read_addr()
6140 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_addr]; in dma8237_read_addr()
6141 rval16 = ddi_get16(pp->uh.m1553.d_handle, p); in dma8237_read_addr()
6143 rval3 = dma8237_read(pp, c_lpage); in dma8237_read_addr()
6144 rval4 = dma8237_read(pp, c_hpage); in dma8237_read_addr()
6153 dma8237_write_count(struct ecppunit *pp, uint32_t count) in dma8237_write_count() argument
6158 switch (pp->uh.m1553.chn) { in dma8237_write_count()
6179 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_wcnt]; in dma8237_write_count()
6180 ddi_put16(pp->uh.m1553.d_handle, p, count & 0xFFFF); in dma8237_write_count()
6185 dma8237_read_count(struct ecppunit *pp) in dma8237_read_count() argument
6190 switch (pp->uh.m1553.chn) { in dma8237_read_count()
6211 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_wcnt]; in dma8237_read_count()
6212 return (ddi_get16(pp->uh.m1553.d_handle, p)); in dma8237_read_count()
6217 dma8237_write(struct ecppunit *pp, int reg_num, uint8_t val) in dma8237_write() argument
6219 ddi_put8(pp->uh.m1553.d_handle, in dma8237_write()
6220 &pp->uh.m1553.isa_space->isa_reg[reg_num], val); in dma8237_write()
6224 dma8237_read(struct ecppunit *pp, int reg_num) in dma8237_read() argument
6226 return (ddi_get8(pp->uh.m1553.d_handle, in dma8237_read()
6227 &pp->uh.m1553.isa_space->isa_reg[reg_num])); in dma8237_read()
6231 dma8237_getcnt(struct ecppunit *pp) in dma8237_getcnt() argument
6235 if ((cnt = dma8237_read_count(pp)) == 0xffff) in dma8237_getcnt()
6249 ecpp_kstat_init(struct ecppunit *pp) in ecpp_kstat_init() argument
6257 (void) sprintf(buf, "ecppc%d", pp->instance); in ecpp_kstat_init()
6258 pp->intrstats = kstat_create("ecpp", pp->instance, buf, "controller", in ecpp_kstat_init()
6260 if (pp->intrstats == NULL) { in ecpp_kstat_init()
6261 ecpp_error(pp->dip, "ecpp_kstat_init:1: kstat_create failed"); in ecpp_kstat_init()
6263 pp->intrstats->ks_update = ecpp_kstatintr_update; in ecpp_kstat_init()
6264 pp->intrstats->ks_private = (void *) pp; in ecpp_kstat_init()
6265 kstat_install(pp->intrstats); in ecpp_kstat_init()
6271 pp->ksp = kstat_create("ecpp", pp->instance, NULL, "misc", in ecpp_kstat_init()
6275 if (pp->ksp == NULL) { in ecpp_kstat_init()
6276 ecpp_error(pp->dip, "ecpp_kstat_init:2: kstat_create failed"); in ecpp_kstat_init()
6280 ekp = (struct ecppkstat *)pp->ksp->ks_data; in ecpp_kstat_init()
6308 pp->ksp->ks_update = ecpp_kstat_update; in ecpp_kstat_init()
6309 pp->ksp->ks_private = (void *) pp; in ecpp_kstat_init()
6310 kstat_install(pp->ksp); in ecpp_kstat_init()
6316 struct ecppunit *pp; in ecpp_kstat_update() local
6327 pp = (struct ecppunit *)ksp->ks_private; in ecpp_kstat_update()
6330 mutex_enter(&pp->umutex); in ecpp_kstat_update()
6332 ekp->ek_ctx_obytes.value.ui32 = pp->obytes[ECPP_CENTRONICS] + in ecpp_kstat_update()
6333 pp->obytes[ECPP_COMPAT_MODE]; in ecpp_kstat_update()
6334 ekp->ek_ctxpio_obytes.value.ui32 = pp->ctxpio_obytes; in ecpp_kstat_update()
6335 ekp->ek_nib_ibytes.value.ui32 = pp->ibytes[ECPP_NIBBLE_MODE]; in ecpp_kstat_update()
6336 ekp->ek_ecp_obytes.value.ui32 = pp->obytes[ECPP_ECP_MODE]; in ecpp_kstat_update()
6337 ekp->ek_ecp_ibytes.value.ui32 = pp->ibytes[ECPP_ECP_MODE]; in ecpp_kstat_update()
6338 ekp->ek_epp_obytes.value.ui32 = pp->obytes[ECPP_EPP_MODE]; in ecpp_kstat_update()
6339 ekp->ek_epp_ibytes.value.ui32 = pp->ibytes[ECPP_EPP_MODE]; in ecpp_kstat_update()
6340 ekp->ek_diag_obytes.value.ui32 = pp->obytes[ECPP_DIAG_MODE]; in ecpp_kstat_update()
6341 ekp->ek_to_ctx.value.ui32 = pp->to_mode[ECPP_CENTRONICS] + in ecpp_kstat_update()
6342 pp->to_mode[ECPP_COMPAT_MODE]; in ecpp_kstat_update()
6343 ekp->ek_to_nib.value.ui32 = pp->to_mode[ECPP_NIBBLE_MODE]; in ecpp_kstat_update()
6344 ekp->ek_to_ecp.value.ui32 = pp->to_mode[ECPP_ECP_MODE]; in ecpp_kstat_update()
6345 ekp->ek_to_epp.value.ui32 = pp->to_mode[ECPP_EPP_MODE]; in ecpp_kstat_update()
6346 ekp->ek_to_diag.value.ui32 = pp->to_mode[ECPP_DIAG_MODE]; in ecpp_kstat_update()
6347 ekp->ek_xfer_tout.value.ui32 = pp->xfer_tout; in ecpp_kstat_update()
6348 ekp->ek_ctx_cf.value.ui32 = pp->ctx_cf; in ecpp_kstat_update()
6349 ekp->ek_joblen.value.ui32 = pp->joblen; in ecpp_kstat_update()
6350 ekp->ek_isr_reattempt_high.value.ui32 = pp->isr_reattempt_high; in ecpp_kstat_update()
6351 ekp->ek_mode.value.ui32 = pp->current_mode; in ecpp_kstat_update()
6352 ekp->ek_phase.value.ui32 = pp->current_phase; in ecpp_kstat_update()
6353 ekp->ek_backchan.value.ui32 = pp->backchannel; in ecpp_kstat_update()
6354 ekp->ek_iomode.value.ui32 = pp->io_mode; in ecpp_kstat_update()
6355 ekp->ek_state.value.ui32 = pp->e_busy; in ecpp_kstat_update()
6357 mutex_exit(&pp->umutex); in ecpp_kstat_update()
6365 struct ecppunit *pp; in ecpp_kstatintr_update() local
6375 pp = (struct ecppunit *)ksp->ks_private; in ecpp_kstatintr_update()
6377 mutex_enter(&pp->umutex); in ecpp_kstatintr_update()
6379 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_HARD] = pp->intr_hard; in ecpp_kstatintr_update()
6380 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_SPURIOUS] = pp->intr_spurious; in ecpp_kstatintr_update()
6381 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_SOFT] = pp->intr_soft; in ecpp_kstatintr_update()
6383 mutex_exit(&pp->umutex); in ecpp_kstatintr_update()