Lines Matching refs:pp

271 static int	m1553_map_regs(struct ecppunit *pp);
272 static void m1553_unmap_regs(struct ecppunit *pp);
312 static uchar_t nibble_peripheral2host(struct ecppunit *pp, uint8_t *);
592 struct ecppunit *pp; in ecpp_attach() local
602 if (!(pp = ddi_get_soft_state(ecppsoft_statep, instance))) { in ecpp_attach()
606 mutex_enter(&pp->umutex); in ecpp_attach()
608 pp->suspended = FALSE; in ecpp_attach()
613 (void) ECPP_CONFIG_CHIP(pp); in ecpp_attach()
614 (void) ecpp_reset_port_regs(pp); in ecpp_attach()
616 if (pp->oflag == TRUE) { in ecpp_attach()
617 int current_mode = pp->current_mode; in ecpp_attach()
619 (void) ecpp_1284_termination(pp); in ecpp_attach()
620 (void) ecpp_mode_negotiation(pp, current_mode); in ecpp_attach()
623 mutex_exit(&pp->umutex); in ecpp_attach()
636 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_attach()
638 pp->dip = dip; in ecpp_attach()
639 pp->suspended = FALSE; in ecpp_attach()
644 hw_bind = ecpp_determine_sio_type(pp); in ecpp_attach()
650 pp->hw = hw_bind->hw; in ecpp_attach()
651 ecpp_error(pp->dip, "SuperIO type: %s\n", hw_bind->info); in ecpp_attach()
657 if (ECPP_MAP_REGS(pp) != SUCCESS) { in ecpp_attach()
661 if (ddi_dma_alloc_handle(dip, pp->hw->attr, DDI_DMA_DONTWAIT, in ecpp_attach()
662 NULL, &pp->dma_handle) != DDI_SUCCESS) { in ecpp_attach()
668 &pp->ecpp_trap_cookie) != DDI_SUCCESS) { in ecpp_attach()
673 mutex_init(&pp->umutex, NULL, MUTEX_DRIVER, in ecpp_attach()
674 (void *)pp->ecpp_trap_cookie); in ecpp_attach()
676 cv_init(&pp->pport_cv, NULL, CV_DRIVER, NULL); in ecpp_attach()
678 if (ddi_add_intr(dip, 0, &pp->ecpp_trap_cookie, NULL, ecpp_isr, in ecpp_attach()
679 (caddr_t)pp) != DDI_SUCCESS) { in ecpp_attach()
685 &pp->softintr_id, 0, 0, ecpp_softintr, in ecpp_attach()
686 (caddr_t)pp) != DDI_SUCCESS) { in ecpp_attach()
699 pp->ioblock = (caddr_t)kmem_alloc(IO_BLOCK_SZ, KM_SLEEP); in ecpp_attach()
700 if (pp->ioblock == NULL) { in ecpp_attach()
704 ecpp_error(pp->dip, "ecpp_attach: ioblock=0x%x\n", pp->ioblock); in ecpp_attach()
707 ecpp_get_props(pp); in ecpp_attach()
709 if (pp->hw == &x86 && pp->uh.x86.chn != 0xff) { in ecpp_attach()
710 if (ddi_dmae_alloc(dip, pp->uh.x86.chn, in ecpp_attach()
712 ecpp_error(pp->dip, "dmae_alloc success!\n"); in ecpp_attach()
715 if (ECPP_CONFIG_CHIP(pp) == FAILURE) { in ecpp_attach()
716 ecpp_error(pp->dip, "config_chip failed.\n"); in ecpp_attach()
720 ecpp_kstat_init(pp); in ecpp_attach()
728 kmem_free(pp->ioblock, IO_BLOCK_SZ); in ecpp_attach()
732 ddi_remove_softintr(pp->softintr_id); in ecpp_attach()
734 ddi_remove_intr(dip, (uint_t)0, pp->ecpp_trap_cookie); in ecpp_attach()
736 mutex_destroy(&pp->umutex); in ecpp_attach()
737 cv_destroy(&pp->pport_cv); in ecpp_attach()
739 ddi_dma_free_handle(&pp->dma_handle); in ecpp_attach()
741 ECPP_UNMAP_REGS(pp); in ecpp_attach()
755 struct ecppunit *pp; in ecpp_detach() local
764 if (!(pp = ddi_get_soft_state(ecppsoft_statep, instance))) { in ecpp_detach()
768 mutex_enter(&pp->umutex); in ecpp_detach()
769 ASSERT(pp->suspended == FALSE); in ecpp_detach()
771 pp->suspended = TRUE; /* prevent new transfers */ in ecpp_detach()
776 if ((pp->e_busy == ECPP_BUSY) || (pp->e_busy == ECPP_FLUSH)) { in ecpp_detach()
777 (void) cv_reltimedwait(&pp->pport_cv, &pp->umutex, in ecpp_detach()
780 if ((pp->e_busy == ECPP_BUSY) || in ecpp_detach()
781 (pp->e_busy == ECPP_FLUSH)) { in ecpp_detach()
782 pp->suspended = FALSE; in ecpp_detach()
783 mutex_exit(&pp->umutex); in ecpp_detach()
784 ecpp_error(pp->dip, in ecpp_detach()
790 mutex_exit(&pp->umutex); in ecpp_detach()
797 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_detach()
799 if (pp->hw == &x86 && pp->uh.x86.chn != 0xff) in ecpp_detach()
800 (void) ddi_dmae_release(pp->dip, pp->uh.x86.chn); in ecpp_detach()
802 if (pp->dma_handle != NULL) in ecpp_detach()
803 ddi_dma_free_handle(&pp->dma_handle); in ecpp_detach()
807 ddi_remove_softintr(pp->softintr_id); in ecpp_detach()
809 ddi_remove_intr(dip, (uint_t)0, pp->ecpp_trap_cookie); in ecpp_detach()
811 if (pp->ksp) { in ecpp_detach()
812 kstat_delete(pp->ksp); in ecpp_detach()
814 if (pp->intrstats) { in ecpp_detach()
815 kstat_delete(pp->intrstats); in ecpp_detach()
818 cv_destroy(&pp->pport_cv); in ecpp_detach()
820 mutex_destroy(&pp->umutex); in ecpp_detach()
822 ECPP_UNMAP_REGS(pp); in ecpp_detach()
824 kmem_free(pp->ioblock, IO_BLOCK_SZ); in ecpp_detach()
841 ecpp_get_props(struct ecppunit *pp) in ecpp_get_props() argument
857 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
859 pp->fast_centronics = in ecpp_get_props()
863 pp->fast_centronics = FALSE; in ecpp_get_props()
876 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
878 pp->fast_compat = (strcmp(prop, "true") == 0) ? TRUE : FALSE; in ecpp_get_props()
881 pp->fast_compat = TRUE; in ecpp_get_props()
891 if (ddi_prop_lookup_string(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
893 pp->init_seq = (strcmp(prop, "true") == 0) ? TRUE : FALSE; in ecpp_get_props()
896 pp->init_seq = FALSE; in ecpp_get_props()
905 pp->wsrv_retry = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
914 pp->wait_for_busy = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
921 pp->data_setup_time = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
928 pp->strobe_pulse_width = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
936 pp->dip, 0, "ecpp-transfer-timeout", ecpp_def_timeout); in ecpp_get_props()
938 pp->xfer_parms = default_xfer_parms; in ecpp_get_props()
943 if (pp->hw == &m1553) { in ecpp_get_props()
944 pp->uh.m1553.chn = ddi_prop_get_int(DDI_DEV_T_ANY, in ecpp_get_props()
945 pp->dip, 0, "dma-channel", 0x1); in ecpp_get_props()
946 ecpp_error(pp->dip, "ecpp_get_prop:chn=%x\n", pp->uh.m1553.chn); in ecpp_get_props()
951 if (pp->hw == &x86) { in ecpp_get_props()
952 if (ddi_prop_op(DDI_DEV_T_ANY, pp->dip, PROP_LEN_AND_VAL_BUF, in ecpp_get_props()
955 ecpp_error(pp->dip, "No dma channel found\n"); in ecpp_get_props()
956 pp->uh.x86.chn = 0xff; in ecpp_get_props()
957 pp->fast_compat = FALSE; in ecpp_get_props()
958 pp->noecpregs = TRUE; in ecpp_get_props()
960 pp->uh.x86.chn = (uint8_t)value; in ecpp_get_props()
966 pp->ecp_rev_speed = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
969 pp->rev_watchdog = ddi_prop_get_int(DDI_DEV_T_ANY, pp->dip, 0, in ecpp_get_props()
972 ecpp_error(pp->dip, in ecpp_get_props()
977 pp->fast_centronics, pp->fast_compat, in ecpp_get_props()
978 pp->wsrv_retry, pp->wait_for_busy, in ecpp_get_props()
979 pp->data_setup_time, pp->strobe_pulse_width, in ecpp_get_props()
980 pp->xfer_parms.write_timeout); in ecpp_get_props()
988 struct ecppunit *pp; in ecpp_getinfo() local
995 pp = ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_getinfo()
996 if (pp != NULL) { in ecpp_getinfo()
997 *result = pp->dip; in ecpp_getinfo()
1021 struct ecppunit *pp; in ecpp_open() local
1032 pp = (struct ecppunit *)ddi_get_soft_state(ecppsoft_statep, instance); in ecpp_open()
1034 if (pp == NULL) { in ecpp_open()
1038 mutex_enter(&pp->umutex); in ecpp_open()
1044 if (pp->oflag == TRUE) { in ecpp_open()
1045 ecpp_error(pp->dip, "ecpp open failed"); in ecpp_open()
1046 mutex_exit(&pp->umutex); in ecpp_open()
1050 pp->oflag = TRUE; in ecpp_open()
1053 pp->prn_timeouts = prn_timeouts_default; in ecpp_open()
1054 pp->xfer_parms = default_xfer_parms; in ecpp_open()
1055 pp->current_mode = ECPP_CENTRONICS; in ecpp_open()
1056 pp->backchannel = ECPP_CENTRONICS; in ecpp_open()
1057 pp->current_phase = ECPP_PHASE_PO; in ecpp_open()
1058 pp->port = ECPP_PORT_DMA; in ecpp_open()
1059 pp->instance = instance; in ecpp_open()
1060 pp->timeout_error = 0; in ecpp_open()
1061 pp->saved_dsr = DSR_READ(pp); in ecpp_open()
1062 pp->ecpp_drain_counter = 0; in ecpp_open()
1063 pp->dma_cancelled = FALSE; in ecpp_open()
1064 pp->io_mode = ECPP_DMA; in ecpp_open()
1065 pp->joblen = 0; in ecpp_open()
1066 pp->tfifo_intr = 0; in ecpp_open()
1067 pp->softintr_pending = 0; in ecpp_open()
1068 pp->nread = 0; in ecpp_open()
1071 pp->e_busy = ECPP_IDLE; in ecpp_open()
1073 pp->readq = RD(q); in ecpp_open()
1074 pp->writeq = WR(q); in ecpp_open()
1075 pp->msg = NULL; in ecpp_open()
1077 RD(q)->q_ptr = WR(q)->q_ptr = (caddr_t)pp; in ecpp_open()
1082 if (ecpp_reset_port_regs(pp) == FAILURE) { in ecpp_open()
1083 mutex_exit(&pp->umutex); in ecpp_open()
1087 mutex_exit(&pp->umutex); in ecpp_open()
1113 mutex_enter(&pp->umutex); in ecpp_open()
1115 ecpp_default_negotiation(pp); in ecpp_open()
1118 (void) ecpp_idle_phase(pp); in ecpp_open()
1120 ecpp_error(pp->dip, in ecpp_open()
1122 pp->current_mode, pp->current_phase, in ecpp_open()
1123 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_open()
1125 mutex_exit(&pp->umutex); in ecpp_open()
1134 struct ecppunit *pp; in ecpp_close() local
1137 pp = (struct ecppunit *)q->q_ptr; in ecpp_close()
1139 ecpp_error(pp->dip, "ecpp_close: entering ...\n"); in ecpp_close()
1141 mutex_enter(&pp->umutex); in ecpp_close()
1155 while (pp->e_busy != ECPP_IDLE || qsize(WR(q))) { in ecpp_close()
1156 if (!cv_wait_sig(&pp->pport_cv, &pp->umutex)) { in ecpp_close()
1157 ecpp_error(pp->dip, "ecpp_close:B: received SIG\n"); in ecpp_close()
1162 ecpp_flush(pp, FWRITE); in ecpp_close()
1165 ecpp_error(pp->dip, "ecpp_close:rcvd cv-sig\n"); in ecpp_close()
1169 ecpp_error(pp->dip, "ecpp_close: joblen=%d, ctx_cf=%d, " in ecpp_close()
1171 pp->joblen, pp->ctx_cf, qsize(pp->writeq), qsize(q)); in ecpp_close()
1181 timeout_id = pp->timeout_id; in ecpp_close()
1182 fifo_timer_id = pp->fifo_timer_id; in ecpp_close()
1183 wsrv_timer_id = pp->wsrv_timer_id; in ecpp_close()
1185 pp->timeout_id = pp->fifo_timer_id = pp->wsrv_timer_id = 0; in ecpp_close()
1187 pp->softintr_pending = 0; in ecpp_close()
1188 pp->dma_cancelled = TRUE; in ecpp_close()
1189 ECPP_MASK_INTR(pp); in ecpp_close()
1191 mutex_exit(&pp->umutex); in ecpp_close()
1205 mutex_enter(&pp->umutex); in ecpp_close()
1208 if ((pp->current_mode == ECPP_ECP_MODE) && in ecpp_close()
1209 (pp->current_phase != ECPP_PHASE_ECP_FWD_IDLE)) { in ecpp_close()
1210 (void) ecp_reverse2forward(pp); in ecpp_close()
1213 (void) ecpp_1284_termination(pp); in ecpp_close()
1215 pp->oflag = FALSE; in ecpp_close()
1217 pp->readq = pp->writeq = NULL; in ecpp_close()
1218 pp->msg = NULL; in ecpp_close()
1220 ecpp_error(pp->dip, "ecpp_close: ecr=%x, dsr=%x, dcr=%x\n", in ecpp_close()
1221 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_close()
1223 mutex_exit(&pp->umutex); in ecpp_close()
1235 struct ecppunit *pp; in ecpp_wput() local
1237 pp = (struct ecppunit *)q->q_ptr; in ecpp_wput()
1244 ecpp_error(pp->dip, in ecpp_wput()
1264 ecpp_error(pp->dip, in ecpp_wput()
1285 ecpp_error(pp->dip, "ecpp_wput:M_IOCTL %x\n", iocbp->ioc_cmd); in ecpp_wput()
1287 mutex_enter(&pp->umutex); in ecpp_wput()
1290 if ((pp->e_busy == ECPP_BUSY) && in ecpp_wput()
1293 mutex_exit(&pp->umutex); in ecpp_wput()
1296 mutex_exit(&pp->umutex); in ecpp_wput()
1306 ecpp_error(pp->dip, "ecpp_wput:M_IOCDATA\n"); in ecpp_wput()
1370 ecpp_error(pp->dip, "ecpp_wput:M_FLUSH\n"); in ecpp_wput()
1373 mutex_enter(&pp->umutex); in ecpp_wput()
1374 ecpp_flush(pp, FWRITE); in ecpp_wput()
1375 mutex_exit(&pp->umutex); in ecpp_wput()
1379 mutex_enter(&pp->umutex); in ecpp_wput()
1380 ecpp_flush(pp, FREAD); in ecpp_wput()
1381 mutex_exit(&pp->umutex); in ecpp_wput()
1396 mutex_enter(&pp->umutex); in ecpp_wput()
1397 if (pp->e_busy == ECPP_IDLE) { in ecpp_wput()
1398 pp->nread += *(size_t *)mp->b_rptr; in ecpp_wput()
1399 ecpp_error(pp->dip, "ecpp_wput: M_READ %d", pp->nread); in ecpp_wput()
1402 ecpp_error(pp->dip, "ecpp_wput: M_READ queueing"); in ecpp_wput()
1405 mutex_exit(&pp->umutex); in ecpp_wput()
1409 ecpp_error(pp->dip, "ecpp_wput: bad messagetype 0x%x\n", in ecpp_wput()
1501 struct ecppunit *pp; in ecpp_putioc() local
1503 pp = (struct ecppunit *)q->q_ptr; in ecpp_putioc()
1523 mutex_enter(&pp->umutex); in ecpp_putioc()
1525 pp->xfer_parms.mode = pp->current_mode; in ecpp_putioc()
1526 xfer_parms = pp->xfer_parms; in ecpp_putioc()
1528 mutex_exit(&pp->umutex); in ecpp_putioc()
1535 mutex_enter(&pp->umutex); in ecpp_putioc()
1536 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1537 mutex_exit(&pp->umutex); in ecpp_putioc()
1541 mutex_exit(&pp->umutex); in ecpp_putioc()
1551 mutex_enter(&pp->umutex); in ecpp_putioc()
1553 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1554 mutex_exit(&pp->umutex); in ecpp_putioc()
1559 rg.dsr = DSR_READ(pp); in ecpp_putioc()
1560 rg.dcr = DCR_READ(pp); in ecpp_putioc()
1562 mutex_exit(&pp->umutex); in ecpp_putioc()
1564 ecpp_error(pp->dip, "ECPPIOC_GETREGS: dsr=%x,dcr=%x\n", in ecpp_putioc()
1577 mutex_enter(&pp->umutex); in ecpp_putioc()
1578 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1579 mutex_exit(&pp->umutex); in ecpp_putioc()
1583 mutex_exit(&pp->umutex); in ecpp_putioc()
1597 mutex_enter(&pp->umutex); in ecpp_putioc()
1600 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_putioc()
1601 mutex_exit(&pp->umutex); in ecpp_putioc()
1607 byte = pp->port; in ecpp_putioc()
1609 switch (pp->port) { in ecpp_putioc()
1611 byte = DATAR_READ(pp); in ecpp_putioc()
1614 byte = TFIFO_READ(pp); in ecpp_putioc()
1615 ecpp_error(pp->dip, "GETDATA=0x%x\n", byte); in ecpp_putioc()
1622 mutex_exit(&pp->umutex); in ecpp_putioc()
1623 ecpp_error(pp->dip, "weird command"); in ecpp_putioc()
1628 mutex_exit(&pp->umutex); in ecpp_putioc()
1638 mutex_enter(&pp->umutex); in ecpp_putioc()
1640 bpp_status.timeout_occurred = pp->timeout_error; in ecpp_putioc()
1642 bpp_status.pin_status = ecpp_get_error_status(pp->saved_dsr); in ecpp_putioc()
1644 mutex_exit(&pp->umutex); in ecpp_putioc()
1652 mutex_enter(&pp->umutex); in ecpp_putioc()
1654 if (!((pp->current_mode == ECPP_CENTRONICS) || in ecpp_putioc()
1655 (pp->current_mode == ECPP_COMPAT_MODE))) { in ecpp_putioc()
1658 pp->saved_dsr = DSR_READ(pp); in ecpp_putioc()
1660 if ((pp->saved_dsr & ECPP_PE) || in ecpp_putioc()
1661 !(pp->saved_dsr & ECPP_SLCT) || in ecpp_putioc()
1662 !(pp->saved_dsr & ECPP_nERR)) { in ecpp_putioc()
1669 mutex_exit(&pp->umutex); in ecpp_putioc()
1678 mutex_enter(&pp->umutex); in ecpp_putioc()
1679 if (pp->e_busy == ECPP_BUSY) { in ecpp_putioc()
1680 mutex_exit(&pp->umutex); in ecpp_putioc()
1683 (void) ecpp_mode_negotiation(pp, ECPP_CENTRONICS); in ecpp_putioc()
1685 DCR_WRITE(pp, ECPP_SLCTIN); in ecpp_putioc()
1687 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in ecpp_putioc()
1689 ecpp_default_negotiation(pp); in ecpp_putioc()
1691 mutex_exit(&pp->umutex); in ecpp_putioc()
1699 mutex_enter(&pp->umutex); in ecpp_putioc()
1701 ifcap = ecpp_get_prn_ifcap(pp); in ecpp_putioc()
1703 mutex_exit(&pp->umutex); in ecpp_putioc()
1718 mutex_enter(&pp->umutex); in ecpp_putioc()
1719 timeouts = pp->prn_timeouts; in ecpp_putioc()
1720 mutex_exit(&pp->umutex); in ecpp_putioc()
1737 mutex_enter(&pp->umutex); in ecpp_putioc()
1740 if (pp->current_mode == ECPP_CENTRONICS || in ecpp_putioc()
1741 pp->current_mode == ECPP_COMPAT_MODE) { in ecpp_putioc()
1742 dsr = DSR_READ(pp); in ecpp_putioc()
1753 mutex_exit(&pp->umutex); in ecpp_putioc()
1763 mutex_enter(&pp->umutex); in ecpp_putioc()
1766 if (pp->current_mode != ECPP_COMPAT_MODE && in ecpp_putioc()
1767 pp->current_mode != ECPP_CENTRONICS) { in ecpp_putioc()
1768 mutex_exit(&pp->umutex); in ecpp_putioc()
1773 dsr = DSR_READ(pp); /* read status */ in ecpp_putioc()
1775 mutex_exit(&pp->umutex); in ecpp_putioc()
1777 ecpp_error(pp->dip, "PRNIOC_GET_STATUS: %x\n", dsr); in ecpp_putioc()
1802 ecpp_error(pp->dip, "putioc: unknown IOCTL: %x\n", in ecpp_putioc()
1873 struct ecppunit *pp = (struct ecppunit *)q->q_ptr; in ecpp_rsrv() local
1875 mutex_enter(&pp->umutex); in ecpp_rsrv()
1877 if (pp->e_busy == ECPP_IDLE) { in ecpp_rsrv()
1878 (void) ecpp_idle_phase(pp); in ecpp_rsrv()
1879 cv_signal(&pp->pport_cv); /* signal ecpp_close() */ in ecpp_rsrv()
1882 mutex_exit(&pp->umutex); in ecpp_rsrv()
1891 struct ecppunit *pp = (struct ecppunit *)q->q_ptr; in ecpp_wsrv() local
1898 mutex_enter(&pp->umutex); in ecpp_wsrv()
1900 ecpp_error(pp->dip, "ecpp_wsrv: e_busy=%x\n", pp->e_busy); in ecpp_wsrv()
1903 if (pp->e_busy == ECPP_BUSY || pp->e_busy == ECPP_FLUSH) { in ecpp_wsrv()
1904 mutex_exit(&pp->umutex); in ecpp_wsrv()
1906 } else if (pp->suspended == TRUE) { in ecpp_wsrv()
1911 cv_signal(&pp->pport_cv); in ecpp_wsrv()
1912 mutex_exit(&pp->umutex); in ecpp_wsrv()
1917 if (pp->e_busy == ECPP_ERR) { in ecpp_wsrv()
1918 if (ecpp_check_status(pp) == FAILURE) { in ecpp_wsrv()
1919 if (pp->wsrv_timer_id == 0) { in ecpp_wsrv()
1920 ecpp_error(pp->dip, "wsrv: start wrsv_timer\n"); in ecpp_wsrv()
1921 pp->wsrv_timer_id = timeout(ecpp_wsrv_timer, in ecpp_wsrv()
1922 (caddr_t)pp, in ecpp_wsrv()
1923 drv_usectohz(pp->wsrv_retry * 1000)); in ecpp_wsrv()
1925 ecpp_error(pp->dip, in ecpp_wsrv()
1929 mutex_exit(&pp->umutex); in ecpp_wsrv()
1932 pp->e_busy = ECPP_IDLE; in ecpp_wsrv()
1936 my_ioblock = pp->ioblock; in ecpp_wsrv()
1943 pp->msg = NULL; in ecpp_wsrv()
1959 pp->e_busy = ECPP_BUSY; in ecpp_wsrv()
1969 pp->msg = mp; in ecpp_wsrv()
1985 start_addr = (caddr_t)pp->ioblock; in ecpp_wsrv()
2005 ecpp_error(pp->dip, "M_IOCTL.\n"); in ecpp_wsrv()
2007 mutex_exit(&pp->umutex); in ecpp_wsrv()
2011 mutex_enter(&pp->umutex); in ecpp_wsrv()
2018 ecpp_error(pp->dip, "M_IOCDATA\n"); in ecpp_wsrv()
2050 if (pp->e_busy != ECPP_IDLE) { in ecpp_wsrv()
2051 ecpp_error(pp->dip, "wsrv: M_CTL postponed\n"); in ecpp_wsrv()
2055 ecpp_error(pp->dip, "wsrv: M_CTL\n"); in ecpp_wsrv()
2061 ecpp_error(pp->dip, "wsrv: bogus M_CTL"); in ecpp_wsrv()
2069 (void) ecpp_peripheral2host(pp); in ecpp_wsrv()
2072 if (pp->e_busy == ECPP_BUSY) { in ecpp_wsrv()
2078 pp->nread += *(size_t *)mp->b_rptr; in ecpp_wsrv()
2080 ecpp_error(pp->dip, "wsrv: M_READ %d", pp->nread); in ecpp_wsrv()
2084 ecpp_error(pp->dip, "wsrv: should never get here\n"); in ecpp_wsrv()
2094 ecpp_error(pp->dip, "wsrv:starting: total_len=%d\n", total_len); in ecpp_wsrv()
2095 pp->e_busy = ECPP_BUSY; in ecpp_wsrv()
2096 ecpp_start(pp, start_addr, total_len); in ecpp_wsrv()
2098 ecpp_error(pp->dip, "wsrv:finishing: ebusy=%x\n", pp->e_busy); in ecpp_wsrv()
2101 if (pp->e_busy == ECPP_IDLE) { in ecpp_wsrv()
2102 (void) ecpp_idle_phase(pp); in ecpp_wsrv()
2103 cv_signal(&pp->pport_cv); /* signal ecpp_close() */ in ecpp_wsrv()
2107 mutex_exit(&pp->umutex); in ecpp_wsrv()
2118 struct ecppunit *pp; in ecpp_srvioc() local
2121 pp = (struct ecppunit *)q->q_ptr; in ecpp_srvioc()
2144 pp->xfer_parms = *xferp; in ecpp_srvioc()
2145 pp->prn_timeouts.tmo_forward = pp->xfer_parms.write_timeout; in ecpp_srvioc()
2147 ecpp_error(pp->dip, "srvioc: current_mode =%x new mode=%x\n", in ecpp_srvioc()
2148 pp->current_mode, pp->xfer_parms.mode); in ecpp_srvioc()
2150 if (ecpp_mode_negotiation(pp, pp->xfer_parms.mode) == FAILURE) { in ecpp_srvioc()
2157 if ((pp->current_mode == ECPP_NIBBLE_MODE) && in ecpp_srvioc()
2158 (read_nibble_backchan(pp) == FAILURE)) { in ecpp_srvioc()
2170 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2171 pp->port = ECPP_PORT_DMA; in ecpp_srvioc()
2173 pp->port = ECPP_PORT_PIO; in ecpp_srvioc()
2176 pp->xfer_parms.mode = pp->current_mode; in ecpp_srvioc()
2188 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2201 dcr = DCR_READ(pp) & ~ECPP_REV_DIR; in ecpp_srvioc()
2205 DCR_WRITE(pp, dcr); in ecpp_srvioc()
2206 ecpp_error(pp->dip, "ECPPIOC_SETREGS:dcr=%x\n", dcr); in ecpp_srvioc()
2217 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2225 ECR_WRITE(pp, in ecpp_srvioc()
2227 pp->port = *port; in ecpp_srvioc()
2232 ecpp_error(pp->dip, "SETPORT: to TDMA\n"); in ecpp_srvioc()
2233 pp->tfifo_intr = 1; in ecpp_srvioc()
2235 ECR_WRITE(pp, in ecpp_srvioc()
2237 pp->port = *port; in ecpp_srvioc()
2254 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_srvioc()
2259 switch (pp->port) { in ecpp_srvioc()
2261 DATAR_WRITE(pp, *data); in ecpp_srvioc()
2266 TFIFO_WRITE(pp, *data); in ecpp_srvioc()
2342 ifcap = ecpp_get_prn_ifcap(pp); in ecpp_srvioc()
2357 ecpp_default_negotiation(pp); in ecpp_srvioc()
2359 (void) ecpp_mode_negotiation(pp, ECPP_CENTRONICS); in ecpp_srvioc()
2376 pp->prn_timeouts = *prn_timeouts; in ecpp_srvioc()
2377 pp->xfer_parms.write_timeout = (int)prn_timeouts->tmo_forward; in ecpp_srvioc()
2396 struct ecppunit *pp; in ecpp_srvioc_devid() local
2404 pp = (struct ecppunit *)q->q_ptr; in ecpp_srvioc_devid()
2411 ecpp_error(pp->dip, "ecpp_srvioc_devid: mode=%x, len=%x\n", in ecpp_srvioc_devid()
2429 if (error = ecpp_getdevid(pp, NULL, &len, mode)) { in ecpp_srvioc_devid()
2457 error = ecpp_getdevid(pp, datamp->b_rptr, &len, mode); in ecpp_srvioc_devid()
2473 (void) ecpp_1284_termination(pp); in ecpp_srvioc_devid()
2545 ecpp_flush(struct ecppunit *pp, int cmd) in ecpp_flush() argument
2551 ASSERT(mutex_owned(&pp->umutex)); in ecpp_flush()
2557 q = pp->writeq; in ecpp_flush()
2560 ecpp_error(pp->dip, "ecpp_flush e_busy=%x\n", pp->e_busy); in ecpp_flush()
2563 switch (pp->e_busy) { in ecpp_flush()
2569 pp->e_busy = ECPP_FLUSH; in ecpp_flush()
2583 pp->dma_cancelled = TRUE; in ecpp_flush()
2586 if (COMPAT_DMA(pp) || in ecpp_flush()
2587 (pp->current_mode == ECPP_ECP_MODE) || in ecpp_flush()
2588 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_flush()
2594 if (ECPP_DMA_STOP(pp, NULL) == FAILURE) { in ecpp_flush()
2595 ecpp_error(pp->dip, in ecpp_flush()
2606 pp->dma_handle) != DDI_SUCCESS) in ecpp_flush()
2607 ecpp_error(pp->dip, in ecpp_flush()
2610 if (pp->msg != NULL) { in ecpp_flush()
2611 freemsg(pp->msg); in ecpp_flush()
2612 pp->msg = NULL; in ecpp_flush()
2618 dcr = DCR_READ(pp); in ecpp_flush()
2620 DCR_WRITE(pp, dcr); in ecpp_flush()
2621 ECPP_MASK_INTR(pp); in ecpp_flush()
2632 timeout_id = pp->timeout_id; in ecpp_flush()
2633 fifo_timer_id = pp->fifo_timer_id; in ecpp_flush()
2634 pp->timeout_id = pp->fifo_timer_id = 0; in ecpp_flush()
2635 pp->softintr_pending = 0; in ecpp_flush()
2644 pp->e_busy = ECPP_FLUSH; in ecpp_flush()
2653 wsrv_timer_id = pp->wsrv_timer_id; in ecpp_flush()
2654 pp->wsrv_timer_id = 0; in ecpp_flush()
2663 ecpp_error(pp->dip, in ecpp_flush()
2664 "ecpp_flush: illegal state %x\n", pp->e_busy); in ecpp_flush()
2668 if (pp->current_mode == ECPP_DIAG_MODE) { in ecpp_flush()
2669 ecr = ECR_READ(pp); in ecpp_flush()
2671 ECR_WRITE(pp, in ecpp_flush()
2673 ECR_WRITE(pp, ecr); in ecpp_flush()
2681 pp->e_busy = ECPP_IDLE; in ecpp_flush()
2684 if (pp->current_mode == ECPP_ECP_MODE) { in ecpp_flush()
2685 if (pp->current_phase == ECPP_PHASE_ECP_REV_XFER) { in ecpp_flush()
2686 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecpp_flush()
2688 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecpp_flush()
2693 mutex_exit(&pp->umutex); in ecpp_flush()
2705 mutex_enter(&pp->umutex); in ecpp_flush()
2707 cv_signal(&pp->pport_cv); /* wake up ecpp_close() */ in ecpp_flush()
2711 ecpp_start(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_start() argument
2713 ASSERT(mutex_owned(&pp->umutex)); in ecpp_start()
2714 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_start()
2716 ecpp_error(pp->dip, in ecpp_start()
2718 pp->current_mode, pp->current_phase, ECR_READ(pp), len); in ecpp_start()
2720 pp->dma_dir = DDI_DMA_WRITE; /* this is a forward transfer */ in ecpp_start()
2722 switch (pp->current_mode) { in ecpp_start()
2724 (void) ecpp_1284_termination(pp); in ecpp_start()
2732 if (pp->io_mode == ECPP_DMA) { in ecpp_start()
2733 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2738 if (ecpp_prep_pio_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2741 (void) ecpp_pio_writeb(pp); in ecpp_start()
2749 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_110); in ecpp_start()
2755 if (!(ECR_READ(pp) & ECPP_FIFO_EMPTY)) { in ecpp_start()
2756 ecpp_error(pp->dip, in ecpp_start()
2758 ECR_WRITE(pp, in ecpp_start()
2760 ECR_WRITE(pp, in ecpp_start()
2770 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2776 ecpp_putback_untransfered(pp, addr + len, oldlen - len); in ecpp_start()
2783 ASSERT(pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE || in ecpp_start()
2784 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecpp_start()
2787 if (pp->current_phase == ECPP_PHASE_ECP_REV_IDLE) { in ecpp_start()
2788 if (ecp_reverse2forward(pp) == FAILURE) { in ecpp_start()
2789 if (pp->msg) { in ecpp_start()
2790 (void) putbq(pp->writeq, pp->msg); in ecpp_start()
2792 ecpp_putback_untransfered(pp, in ecpp_start()
2798 if (ecpp_init_dma_xfer(pp, addr, len) == FAILURE) { in ecpp_start()
2806 pp->timeout_id = timeout(ecpp_xfer_timeout, (caddr_t)pp, in ecpp_start()
2807 pp->xfer_parms.write_timeout * drv_usectohz(1000000)); in ecpp_start()
2815 ecpp_prep_pio_xfer(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_prep_pio_xfer() argument
2817 pp->next_byte = addr; in ecpp_prep_pio_xfer()
2818 pp->last_byte = (caddr_t)((ulong_t)addr + len); in ecpp_prep_pio_xfer()
2820 if (ecpp_check_status(pp) == FAILURE) { in ecpp_prep_pio_xfer()
2825 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2828 if (pp->msg != NULL) { in ecpp_prep_pio_xfer()
2834 ecpp_putback_untransfered(pp, in ecpp_prep_pio_xfer()
2835 (void *)pp->msg->b_rptr, len); in ecpp_prep_pio_xfer()
2836 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2839 freemsg(pp->msg); in ecpp_prep_pio_xfer()
2840 pp->msg = NULL; in ecpp_prep_pio_xfer()
2842 ecpp_putback_untransfered(pp, pp->ioblock, len); in ecpp_prep_pio_xfer()
2843 ecpp_error(pp->dip, in ecpp_prep_pio_xfer()
2846 qenable(pp->writeq); in ecpp_prep_pio_xfer()
2851 pp->dma_cancelled = FALSE; in ecpp_prep_pio_xfer()
2854 if (ecr_write(pp, ECR_mode_001 | in ecpp_prep_pio_xfer()
2856 ecpp_error(pp->dip, "ecpp_prep_pio_xfer: failed w/ECR.\n"); in ecpp_prep_pio_xfer()
2859 ecpp_error(pp->dip, "ecpp_prep_pio_xfer: dcr=%x ecr=%x\n", in ecpp_prep_pio_xfer()
2860 DCR_READ(pp), ECR_READ(pp)); in ecpp_prep_pio_xfer()
2866 ecpp_init_dma_xfer(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_init_dma_xfer() argument
2880 ASSERT((pp->current_mode <= ECPP_DIAG_MODE) && in ecpp_init_dma_xfer()
2881 (ecr_mode[pp->current_mode] != 0)); in ecpp_init_dma_xfer()
2883 if (ecpp_setup_dma_resources(pp, addr, len) == FAILURE) { in ecpp_init_dma_xfer()
2884 qenable(pp->writeq); in ecpp_init_dma_xfer()
2888 if (ecpp_check_status(pp) == FAILURE) { in ecpp_init_dma_xfer()
2893 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2895 pp->dma_cookie.dmac_size); in ecpp_init_dma_xfer()
2897 if (pp->msg != NULL) { in ecpp_init_dma_xfer()
2903 ecpp_putback_untransfered(pp, in ecpp_init_dma_xfer()
2904 (void *)pp->msg->b_rptr, len); in ecpp_init_dma_xfer()
2905 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2908 freemsg(pp->msg); in ecpp_init_dma_xfer()
2909 pp->msg = NULL; in ecpp_init_dma_xfer()
2911 ecpp_putback_untransfered(pp, pp->ioblock, len); in ecpp_init_dma_xfer()
2912 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2916 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_init_dma_xfer()
2917 ecpp_error(pp->dip, in ecpp_init_dma_xfer()
2920 qenable(pp->writeq); in ecpp_init_dma_xfer()
2924 pp->xfercnt = pp->resid = len; in ecpp_init_dma_xfer()
2925 pp->dma_cancelled = FALSE; in ecpp_init_dma_xfer()
2926 pp->tfifo_intr = 0; in ecpp_init_dma_xfer()
2929 ecr = ecr_mode[pp->current_mode]; in ecpp_init_dma_xfer()
2930 (void) ecr_write(pp, ecr | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecpp_init_dma_xfer()
2933 if (ECPP_DMA_START(pp) == FAILURE) { in ecpp_init_dma_xfer()
2934 ecpp_error(pp->dip, "ecpp_init_dma_xfer: dma_start FAILED.\n"); in ecpp_init_dma_xfer()
2939 (void) ecr_write(pp, ecr | ECPP_DMA_ENABLE | ECPP_INTR_MASK); in ecpp_init_dma_xfer()
2945 ecpp_setup_dma_resources(struct ecppunit *pp, caddr_t addr, size_t len) in ecpp_setup_dma_resources() argument
2951 ASSERT(pp->dma_dir == DDI_DMA_READ || pp->dma_dir == DDI_DMA_WRITE); in ecpp_setup_dma_resources()
2953 err = ddi_dma_addr_bind_handle(pp->dma_handle, NULL, in ecpp_setup_dma_resources()
2954 addr, len, pp->dma_dir | DDI_DMA_PARTIAL, in ecpp_setup_dma_resources()
2956 &pp->dma_cookie, &pp->dma_cookie_count); in ecpp_setup_dma_resources()
2960 ecpp_error(pp->dip, "ecpp_setup_dma: DMA_MAPPED\n"); in ecpp_setup_dma_resources()
2962 pp->dma_nwin = 1; in ecpp_setup_dma_resources()
2963 pp->dma_curwin = 1; in ecpp_setup_dma_resources()
2967 ecpp_error(pp->dip, "ecpp_setup_dma: DMA_PARTIAL_MAP\n"); in ecpp_setup_dma_resources()
2969 if (ddi_dma_numwin(pp->dma_handle, in ecpp_setup_dma_resources()
2970 &pp->dma_nwin) != DDI_SUCCESS) { in ecpp_setup_dma_resources()
2971 (void) ddi_dma_unbind_handle(pp->dma_handle); in ecpp_setup_dma_resources()
2974 pp->dma_curwin = 1; in ecpp_setup_dma_resources()
2981 if (ddi_dma_getwin(pp->dma_handle, 0, &woff, &wlen, in ecpp_setup_dma_resources()
2982 &pp->dma_cookie, &pp->dma_cookie_count) != DDI_SUCCESS) { in ecpp_setup_dma_resources()
2983 ecpp_error(pp->dip, in ecpp_setup_dma_resources()
2985 (void) ddi_dma_unbind_handle(pp->dma_handle); in ecpp_setup_dma_resources()
2989 ecpp_error(pp->dip, in ecpp_setup_dma_resources()
2992 pp->dma_cookie_count, pp->dma_nwin, in ecpp_setup_dma_resources()
2993 pp->dma_cookie.dmac_address, pp->dma_cookie.dmac_size); in ecpp_setup_dma_resources()
2999 ecpp_error(pp->dip, "ecpp_setup_dma: err=%x\n", err); in ecpp_setup_dma_resources()
3048 struct ecppunit *pp = (struct ecppunit *)(void *)arg; in ecpp_isr() local
3055 mutex_enter(&pp->umutex); in ecpp_isr()
3063 if (pp->dma_cancelled == TRUE) { in ecpp_isr()
3064 ecpp_error(pp->dip, "dma-cancel isr\n"); in ecpp_isr()
3066 pp->intr_hard++; in ecpp_isr()
3067 pp->dma_cancelled = FALSE; in ecpp_isr()
3069 mutex_exit(&pp->umutex); in ecpp_isr()
3075 if (pp->hw == &x86) in ecpp_isr()
3077 if (pp->hw == &m1553) in ecpp_isr()
3080 retval = ecpp_M1553_intr(pp); in ecpp_isr()
3084 mutex_exit(&pp->umutex); in ecpp_isr()
3093 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3097 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3107 if (!COMPAT_PIO(pp)) { in ecpp_isr()
3109 dcsr = GET_DMAC_CSR(pp); in ecpp_isr()
3113 dsr = DSR_READ(pp); in ecpp_isr()
3121 if (pp->hw != &pc97317) { in ecpp_isr()
3138 ((COMPAT_PIO(pp)) && (pp->e_busy == ECPP_BUSY)) || in ecpp_isr()
3140 (pp->current_mode == ECPP_ECP_MODE))) { in ecpp_isr()
3147 pp->intr_hard++; in ecpp_isr()
3154 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_isr()
3159 if (ECPP_DMA_STOP(pp, &bcr) == FAILURE) { in ecpp_isr()
3160 ecpp_error(pp->dip, "ecpp_isr: dma_stop failed\n"); in ecpp_isr()
3163 ecpp_error(pp->dip, "ecpp_isr: DMAC ERROR bcr=%d\n", bcr); in ecpp_isr()
3165 ecpp_xfer_cleanup(pp); in ecpp_isr()
3167 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_isr()
3168 ecpp_error(pp->dip, "ecpp_isr(e): unbind failed\n"); in ecpp_isr()
3171 mutex_exit(&pp->umutex); in ecpp_isr()
3176 retval = ecpp_dma_ihdlr(pp); in ecpp_isr()
3177 mutex_exit(&pp->umutex); in ecpp_isr()
3181 if (COMPAT_PIO(pp)) { in ecpp_isr()
3182 retval = ecpp_pio_ihdlr(pp); in ecpp_isr()
3183 mutex_exit(&pp->umutex); in ecpp_isr()
3189 retval = ecpp_nErr_ihdlr(pp); in ecpp_isr()
3190 mutex_exit(&pp->umutex); in ecpp_isr()
3194 pp->intr_hard--; in ecpp_isr()
3198 pp->intr_spurious++; in ecpp_isr()
3207 if (pp->lastspur == 0 || now - pp->lastspur > SPUR_PERIOD) { in ecpp_isr()
3209 pp->lastspur = now; in ecpp_isr()
3210 pp->nspur = 1; in ecpp_isr()
3213 pp->nspur++; in ecpp_isr()
3216 if (pp->nspur >= SPUR_CRITICAL) { in ecpp_isr()
3217 ECPP_MASK_INTR(pp); in ecpp_isr()
3218 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_isr()
3219 pp->nspur = 0; in ecpp_isr()
3221 ddi_get_name(pp->dip), ddi_get_instance(pp->dip)); in ecpp_isr()
3223 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecpp_isr()
3226 ecpp_error(pp->dip, in ecpp_isr()
3228 dcsr, ECR_READ(pp), dsr, DCR_READ(pp), in ecpp_isr()
3229 pp->current_mode, pp->current_phase); in ecpp_isr()
3231 mutex_exit(&pp->umutex); in ecpp_isr()
3236 pp->intr_spurious++; in ecpp_isr()
3238 ecpp_error(pp->dip, in ecpp_isr()
3240 dcsr, ECR_READ(pp), DSR_READ(pp), DCR_READ(pp), in ecpp_isr()
3241 pp->current_mode, pp->current_phase); in ecpp_isr()
3243 mutex_exit(&pp->umutex); in ecpp_isr()
3251 ecpp_M1553_intr(struct ecppunit *pp) in ecpp_M1553_intr() argument
3255 pp->intr_hard++; in ecpp_M1553_intr()
3257 if (pp->e_busy == ECPP_BUSY) { in ecpp_M1553_intr()
3259 if (COMPAT_PIO(pp)) { in ecpp_M1553_intr()
3260 return (ecpp_pio_ihdlr(pp)); in ecpp_M1553_intr()
3264 if (COMPAT_DMA(pp) || in ecpp_M1553_intr()
3265 (pp->current_mode == ECPP_ECP_MODE) || in ecpp_M1553_intr()
3266 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_M1553_intr()
3267 return (ecpp_dma_ihdlr(pp)); in ecpp_M1553_intr()
3272 if ((DSR_READ(pp) & ECPP_nERR) == 0) { in ecpp_M1553_intr()
3273 return (ecpp_nErr_ihdlr(pp)); in ecpp_M1553_intr()
3283 ecpp_dma_ihdlr(struct ecppunit *pp) in ecpp_dma_ihdlr() argument
3287 ecpp_error(pp->dip, "ecpp_dma_ihdlr(%x): ecr=%x, dsr=%x, dcr=%x\n", in ecpp_dma_ihdlr()
3288 pp->current_mode, ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in ecpp_dma_ihdlr()
3291 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_dma_ihdlr()
3294 if (pp->tfifo_intr == 1) { in ecpp_dma_ihdlr()
3295 pp->tfifo_intr = 0; in ecpp_dma_ihdlr()
3296 ecpp_error(pp->dip, "ecpp_dma_ihdlr: tfifo_intr is 1\n"); in ecpp_dma_ihdlr()
3300 if (ECPP_DMA_STOP(pp, NULL) == FAILURE) { in ecpp_dma_ihdlr()
3301 ecpp_error(pp->dip, "ecpp_dma_ihdlr: dma_stop failed\n"); in ecpp_dma_ihdlr()
3304 if (pp->current_mode == ECPP_ECP_MODE && in ecpp_dma_ihdlr()
3305 pp->current_phase == ECPP_PHASE_ECP_REV_XFER) { in ecpp_dma_ihdlr()
3306 ecpp_ecp_read_completion(pp); in ecpp_dma_ihdlr()
3311 if ((ECR_READ(pp) & ECPP_FIFO_EMPTY) || in ecpp_dma_ihdlr()
3312 (pp->current_mode == ECPP_DIAG_MODE)) { in ecpp_dma_ihdlr()
3317 pp->fifo_timer_id = timeout(ecpp_fifo_timer, (caddr_t)pp, tm); in ecpp_dma_ihdlr()
3325 ecpp_untimeout_unblock(pp, &pp->timeout_id); in ecpp_dma_ihdlr()
3335 ecpp_pio_ihdlr(struct ecppunit *pp) in ecpp_pio_ihdlr() argument
3337 ASSERT(mutex_owned(&pp->umutex)); in ecpp_pio_ihdlr()
3338 ASSERT(pp->e_busy == ECPP_BUSY); in ecpp_pio_ihdlr()
3341 pp->joblen++; in ecpp_pio_ihdlr()
3342 pp->ctxpio_obytes++; in ecpp_pio_ihdlr()
3345 ECPP_MASK_INTR(pp); in ecpp_pio_ihdlr()
3346 DCR_WRITE(pp, DCR_READ(pp) & ~(ECPP_REV_DIR | ECPP_INTR_EN)); in ecpp_pio_ihdlr()
3352 if (pp->next_byte >= pp->last_byte) { in ecpp_pio_ihdlr()
3353 ecpp_xfer_cleanup(pp); in ecpp_pio_ihdlr()
3354 ecpp_error(pp->dip, in ecpp_pio_ihdlr()
3356 pp->joblen, pp->ctx_cf); in ecpp_pio_ihdlr()
3358 if (pp->softintr_pending) { in ecpp_pio_ihdlr()
3359 ecpp_error(pp->dip, in ecpp_pio_ihdlr()
3362 pp->softintr_flags = ECPP_SOFTINTR_PIONEXT; in ecpp_pio_ihdlr()
3363 pp->softintr_pending = 1; in ecpp_pio_ihdlr()
3364 ddi_trigger_softintr(pp->softintr_id); in ecpp_pio_ihdlr()
3375 ecpp_pio_writeb(struct ecppunit *pp) in ecpp_pio_writeb() argument
3379 dcr = DCR_READ(pp) & ~ECPP_REV_DIR; in ecpp_pio_writeb()
3383 DATAR_WRITE(pp, *(pp->next_byte++)); in ecpp_pio_writeb()
3385 drv_usecwait(pp->data_setup_time); in ecpp_pio_writeb()
3388 if (dcr_write(pp, dcr | ECPP_STB) == FAILURE) { in ecpp_pio_writeb()
3389 ecpp_error(pp->dip, "ecpp_pio_writeb:1: failed w/DCR\n"); in ecpp_pio_writeb()
3393 (void) DSR_READ(pp); /* ensure IRQ_ST is armed */ in ecpp_pio_writeb()
3394 ECPP_UNMASK_INTR(pp); in ecpp_pio_writeb()
3396 drv_usecwait(pp->strobe_pulse_width); in ecpp_pio_writeb()
3398 if (dcr_write(pp, dcr & ~ECPP_STB) == FAILURE) { in ecpp_pio_writeb()
3399 ecpp_error(pp->dip, "ecpp_pio_writeb:2: failed w/DCR\n"); in ecpp_pio_writeb()
3407 ecpp_nErr_ihdlr(struct ecppunit *pp) in ecpp_nErr_ihdlr() argument
3409 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: mode=%x, phase=%x\n", in ecpp_nErr_ihdlr()
3410 pp->current_mode, pp->current_phase); in ecpp_nErr_ihdlr()
3412 if (pp->oflag != TRUE) { in ecpp_nErr_ihdlr()
3413 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: not open!\n"); in ecpp_nErr_ihdlr()
3417 if (pp->e_busy == ECPP_BUSY) { in ecpp_nErr_ihdlr()
3418 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: busy\n"); in ecpp_nErr_ihdlr()
3419 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK); in ecpp_nErr_ihdlr()
3424 ECPP_MASK_INTR(pp); in ecpp_nErr_ihdlr()
3425 DCR_WRITE(pp, DCR_READ(pp) & ~(ECPP_INTR_EN | ECPP_REV_DIR)); in ecpp_nErr_ihdlr()
3426 ECR_WRITE(pp, ECR_READ(pp) | ECPP_INTR_MASK); in ecpp_nErr_ihdlr()
3429 switch (pp->current_mode) { in ecpp_nErr_ihdlr()
3443 pp->current_phase = ECPP_PHASE_NIBT_REVINTR; in ecpp_nErr_ihdlr()
3447 ecpp_error(pp->dip, "ecpp_nErr_ihdlr: wrong mode!\n"); in ecpp_nErr_ihdlr()
3451 (void) ecpp_backchan_req(pp); /* put backchannel request on the wq */ in ecpp_nErr_ihdlr()
3463 struct ecppunit *pp = (struct ecppunit *)arg; in ecpp_softintr() local
3466 mutex_enter(&pp->umutex); in ecpp_softintr()
3468 pp->intr_soft++; in ecpp_softintr()
3470 if (!pp->softintr_pending) { in ecpp_softintr()
3471 mutex_exit(&pp->umutex); in ecpp_softintr()
3474 pp->softintr_pending = 0; in ecpp_softintr()
3477 if (pp->softintr_flags & ECPP_SOFTINTR_PIONEXT) { in ecpp_softintr()
3478 pp->softintr_flags &= ~ECPP_SOFTINTR_PIONEXT; in ecpp_softintr()
3484 if (ecpp_check_status(pp) == SUCCESS) { in ecpp_softintr()
3485 pp->e_busy = ECPP_BUSY; in ecpp_softintr()
3489 if (pp->isr_reattempt_high < ecpp_reattempts) { in ecpp_softintr()
3490 pp->isr_reattempt_high = ecpp_reattempts; in ecpp_softintr()
3492 } while (++ecpp_reattempts < pp->wait_for_busy); in ecpp_softintr()
3495 if (pp->e_busy == ECPP_ERR) { in ecpp_softintr()
3496 ++pp->ctx_cf; /* check status fail */ in ecpp_softintr()
3497 ecpp_error(pp->dip, "ecpp_softintr:check_status:F: " in ecpp_softintr()
3499 DSR_READ(pp), pp->joblen, pp->ctx_cf); in ecpp_softintr()
3505 unx_len = pp->last_byte - pp->next_byte; in ecpp_softintr()
3506 if (pp->msg != NULL) { in ecpp_softintr()
3507 ecpp_putback_untransfered(pp, in ecpp_softintr()
3508 (void *)pp->msg->b_rptr, unx_len); in ecpp_softintr()
3509 ecpp_error(pp->dip, in ecpp_softintr()
3512 freemsg(pp->msg); in ecpp_softintr()
3513 pp->msg = NULL; in ecpp_softintr()
3515 ecpp_putback_untransfered(pp, in ecpp_softintr()
3516 pp->next_byte, unx_len); in ecpp_softintr()
3517 ecpp_error(pp->dip, in ecpp_softintr()
3521 ecpp_xfer_cleanup(pp); in ecpp_softintr()
3522 pp->e_busy = ECPP_ERR; in ecpp_softintr()
3523 qenable(pp->writeq); in ecpp_softintr()
3526 pp->e_busy = ECPP_BUSY; in ecpp_softintr()
3527 (void) ecpp_pio_writeb(pp); in ecpp_softintr()
3531 mutex_exit(&pp->umutex); in ecpp_softintr()
3543 ecpp_xfer_cleanup(struct ecppunit *pp) in ecpp_xfer_cleanup() argument
3545 ASSERT(mutex_owned(&pp->umutex)); in ecpp_xfer_cleanup()
3551 if (pp->msg != NULL) { in ecpp_xfer_cleanup()
3552 freemsg(pp->msg); in ecpp_xfer_cleanup()
3553 pp->msg = NULL; in ecpp_xfer_cleanup()
3557 pp->e_busy = ECPP_IDLE; in ecpp_xfer_cleanup()
3560 ecpp_untimeout_unblock(pp, &pp->timeout_id); in ecpp_xfer_cleanup()
3562 qenable(pp->writeq); in ecpp_xfer_cleanup()
3614 struct ecppunit *pp = arg; in ecpp_xfer_timeout() local
3620 mutex_enter(&pp->umutex); in ecpp_xfer_timeout()
3622 if (pp->timeout_id == 0) { in ecpp_xfer_timeout()
3623 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3626 pp->timeout_id = 0; in ecpp_xfer_timeout()
3629 pp->xfer_tout++; in ecpp_xfer_timeout()
3631 pp->dma_cancelled = TRUE; /* prevent race with isr() */ in ecpp_xfer_timeout()
3633 if (COMPAT_PIO(pp)) { in ecpp_xfer_timeout()
3639 dcr = DCR_READ(pp); in ecpp_xfer_timeout()
3640 (void) dcr_write(pp, dcr & ~(ECPP_REV_DIR | ECPP_INTR_EN)); in ecpp_xfer_timeout()
3641 ECPP_MASK_INTR(pp); in ecpp_xfer_timeout()
3643 pp->softintr_pending = 0; in ecpp_xfer_timeout()
3644 unx_len = pp->last_byte - pp->next_byte; in ecpp_xfer_timeout()
3645 ecpp_error(pp->dip, "xfer_timeout: unx_len=%d\n", unx_len); in ecpp_xfer_timeout()
3648 unx_addr = pp->next_byte; in ecpp_xfer_timeout()
3650 ecpp_xfer_cleanup(pp); in ecpp_xfer_timeout()
3651 qenable(pp->writeq); in ecpp_xfer_timeout()
3652 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3662 if (ECPP_DMA_STOP(pp, &unx_len) == FAILURE) { in ecpp_xfer_timeout()
3663 ecpp_error(pp->dip, in ecpp_xfer_timeout()
3667 ecpp_error(pp->dip, "xfer_timeout: unx_len=%d\n", unx_len); in ecpp_xfer_timeout()
3669 if (ddi_dma_unbind_handle(pp->dma_handle) == DDI_FAILURE) { in ecpp_xfer_timeout()
3670 ecpp_error(pp->dip, in ecpp_xfer_timeout()
3680 qenable(pp->writeq); in ecpp_xfer_timeout()
3681 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3684 xferd = pp->dma_cookie.dmac_size - unx_len; in ecpp_xfer_timeout()
3685 pp->resid -= xferd; in ecpp_xfer_timeout()
3686 unx_len = pp->resid; in ecpp_xfer_timeout()
3689 pp->obytes[pp->current_mode] += xferd; in ecpp_xfer_timeout()
3690 pp->joblen += xferd; in ecpp_xfer_timeout()
3692 if (pp->msg != NULL) { in ecpp_xfer_timeout()
3693 unx_addr = (caddr_t)pp->msg->b_wptr - unx_len; in ecpp_xfer_timeout()
3695 unx_addr = pp->ioblock + in ecpp_xfer_timeout()
3696 (pp->xfercnt - unx_len); in ecpp_xfer_timeout()
3703 ecpp_putback_untransfered(pp, (caddr_t)unx_addr, unx_len); in ecpp_xfer_timeout()
3705 if (pp->msg != NULL) { in ecpp_xfer_timeout()
3706 freemsg(pp->msg); in ecpp_xfer_timeout()
3707 pp->msg = NULL; in ecpp_xfer_timeout()
3711 pp->timeout_error = 1; in ecpp_xfer_timeout()
3712 pp->e_busy = ECPP_ERR; in ecpp_xfer_timeout()
3713 fifo_timer_id = pp->fifo_timer_id; in ecpp_xfer_timeout()
3714 pp->fifo_timer_id = 0; in ecpp_xfer_timeout()
3716 qenable(pp->writeq); in ecpp_xfer_timeout()
3718 mutex_exit(&pp->umutex); in ecpp_xfer_timeout()
3726 ecpp_putback_untransfered(struct ecppunit *pp, void *startp, uint_t len) in ecpp_putback_untransfered() argument
3730 ecpp_error(pp->dip, "ecpp_putback_untrans=%d\n", len); in ecpp_putback_untransfered()
3738 ecpp_error(pp->dip, in ecpp_putback_untransfered()
3746 if (!putbq(pp->writeq, new_mp)) { in ecpp_putback_untransfered()
3752 ecr_write(struct ecppunit *pp, uint8_t ecr_byte) in ecr_write() argument
3757 ECR_WRITE(pp, ecr_byte); in ecr_write()
3759 current_ecr = ECR_READ(pp); in ecr_write()
3769 dcr_write(struct ecppunit *pp, uint8_t dcr_byte) in dcr_write() argument
3775 DCR_WRITE(pp, dcr_byte); in dcr_write()
3777 current_dcr = DCR_READ(pp); in dcr_write()
3783 ecpp_error(pp->dip, in dcr_write()
3791 ecpp_reset_port_regs(struct ecppunit *pp) in ecpp_reset_port_regs() argument
3793 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in ecpp_reset_port_regs()
3794 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_reset_port_regs()
3807 struct ecppunit *pp = arg; in ecpp_fifo_timer() local
3811 mutex_enter(&pp->umutex); in ecpp_fifo_timer()
3816 if (pp->fifo_timer_id == 0) { in ecpp_fifo_timer()
3817 ecpp_error(pp->dip, "ecpp_fifo_timer: untimedout\n"); in ecpp_fifo_timer()
3818 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3821 pp->fifo_timer_id = 0; in ecpp_fifo_timer()
3829 ecr = ECR_READ(pp); in ecpp_fifo_timer()
3831 if ((pp->current_mode != ECPP_DIAG_MODE) && in ecpp_fifo_timer()
3833 (pp->ecpp_drain_counter < 10))) { in ecpp_fifo_timer()
3835 ecpp_error(pp->dip, in ecpp_fifo_timer()
3837 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3839 pp->fifo_timer_id = timeout(ecpp_fifo_timer, in ecpp_fifo_timer()
3840 (caddr_t)pp, drv_usectohz(FIFO_DRAIN_PERIOD)); in ecpp_fifo_timer()
3841 ++pp->ecpp_drain_counter; in ecpp_fifo_timer()
3843 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3847 if (pp->current_mode != ECPP_DIAG_MODE) { in ecpp_fifo_timer()
3852 if (pp->ecpp_drain_counter >= 10) { in ecpp_fifo_timer()
3853 ecpp_error(pp->dip, "ecpp_fifo_timer(%d):" in ecpp_fifo_timer()
3855 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3857 ecpp_error(pp->dip, in ecpp_fifo_timer()
3859 pp->ecpp_drain_counter, ecr); in ecpp_fifo_timer()
3862 pp->ecpp_drain_counter = 0; in ecpp_fifo_timer()
3874 timeout_id = pp->timeout_id; in ecpp_fifo_timer()
3875 pp->timeout_id = 0; in ecpp_fifo_timer()
3878 if (pp->current_mode == ECPP_ECP_MODE || in ecpp_fifo_timer()
3879 pp->current_mode == ECPP_DIAG_MODE || in ecpp_fifo_timer()
3880 COMPAT_DMA(pp)) { in ecpp_fifo_timer()
3885 pp->resid -= pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3888 pp->joblen += pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3889 if (pp->dma_dir == DDI_DMA_WRITE) { in ecpp_fifo_timer()
3890 pp->obytes[pp->current_mode] += in ecpp_fifo_timer()
3891 pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3893 pp->ibytes[pp->current_mode] += in ecpp_fifo_timer()
3894 pp->dma_cookie.dmac_size; in ecpp_fifo_timer()
3900 if (--pp->dma_cookie_count > 0) { in ecpp_fifo_timer()
3902 ddi_dma_nextcookie(pp->dma_handle, in ecpp_fifo_timer()
3903 &pp->dma_cookie); in ecpp_fifo_timer()
3904 } else if (pp->dma_curwin < pp->dma_nwin) { in ecpp_fifo_timer()
3906 if (ddi_dma_getwin(pp->dma_handle, in ecpp_fifo_timer()
3907 pp->dma_curwin, &off, &len, in ecpp_fifo_timer()
3908 &pp->dma_cookie, in ecpp_fifo_timer()
3909 &pp->dma_cookie_count) != DDI_SUCCESS) { in ecpp_fifo_timer()
3910 ecpp_error(pp->dip, in ecpp_fifo_timer()
3915 pp->dma_curwin++; in ecpp_fifo_timer()
3920 ecpp_error(pp->dip, "ecpp_fifo_timer: next addr=%llx len=%d\n", in ecpp_fifo_timer()
3921 pp->dma_cookie.dmac_address, in ecpp_fifo_timer()
3922 pp->dma_cookie.dmac_size); in ecpp_fifo_timer()
3925 if (ECPP_DMA_START(pp) != SUCCESS) { in ecpp_fifo_timer()
3926 ecpp_error(pp->dip, in ecpp_fifo_timer()
3931 (void) ecr_write(pp, (ecr & 0xe0) | in ecpp_fifo_timer()
3934 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3942 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_fifo_timer()
3943 ecpp_error(pp->dip, "ecpp_fifo_timer: unbind failed\n"); in ecpp_fifo_timer()
3945 ecpp_error(pp->dip, "ecpp_fifo_timer: unbind ok\n"); in ecpp_fifo_timer()
3953 if (pp->msg != NULL) { in ecpp_fifo_timer()
3954 freemsg(pp->msg); in ecpp_fifo_timer()
3955 pp->msg = NULL; in ecpp_fifo_timer()
3959 pp->e_busy = ECPP_IDLE; in ecpp_fifo_timer()
3961 qenable(pp->writeq); in ecpp_fifo_timer()
3963 mutex_exit(&pp->umutex); in ecpp_fifo_timer()
3974 ecpp_check_status(struct ecppunit *pp) in ecpp_check_status() argument
3979 if (pp->current_mode == ECPP_ECP_MODE || in ecpp_check_status()
3980 pp->current_mode == ECPP_DIAG_MODE) in ecpp_check_status()
3985 dsr = DSR_READ(pp); in ecpp_check_status()
3987 pp->e_busy = ECPP_ERR; in ecpp_check_status()
4002 struct ecppunit *pp = arg; in ecpp_wsrv_timer() local
4004 ecpp_error(pp->dip, "ecpp_wsrv_timer: starting\n"); in ecpp_wsrv_timer()
4006 mutex_enter(&pp->umutex); in ecpp_wsrv_timer()
4008 if (pp->wsrv_timer_id == 0) { in ecpp_wsrv_timer()
4009 mutex_exit(&pp->umutex); in ecpp_wsrv_timer()
4012 pp->wsrv_timer_id = 0; in ecpp_wsrv_timer()
4015 ecpp_error(pp->dip, "ecpp_wsrv_timer: qenabling...\n"); in ecpp_wsrv_timer()
4017 qenable(pp->writeq); in ecpp_wsrv_timer()
4019 mutex_exit(&pp->umutex); in ecpp_wsrv_timer()
4027 ecpp_backchan_req(struct ecppunit *pp) in ecpp_backchan_req() argument
4032 ecpp_error(pp->dip, "ecpp_backchan_req: allocb failed\n"); in ecpp_backchan_req()
4038 if (!putbq(pp->writeq, mp)) { in ecpp_backchan_req()
4039 ecpp_error(pp->dip, "ecpp_backchan_req:putbq failed\n"); in ecpp_backchan_req()
4052 ecpp_untimeout_unblock(struct ecppunit *pp, timeout_id_t *id) in ecpp_untimeout_unblock() argument
4056 ASSERT(mutex_owned(&pp->umutex)); in ecpp_untimeout_unblock()
4061 mutex_exit(&pp->umutex); in ecpp_untimeout_unblock()
4063 mutex_enter(&pp->umutex); in ecpp_untimeout_unblock()
4071 ecpp_get_prn_ifcap(struct ecppunit *pp) in ecpp_get_prn_ifcap() argument
4078 if (pp->current_mode == ECPP_CENTRONICS || in ecpp_get_prn_ifcap()
4079 pp->current_mode == ECPP_COMPAT_MODE) { in ecpp_get_prn_ifcap()
4081 } else if (pp->current_mode == ECPP_NIBBLE_MODE || in ecpp_get_prn_ifcap()
4082 pp->current_mode == ECPP_ECP_MODE) { in ecpp_get_prn_ifcap()
4093 ecpp_determine_sio_type(struct ecppunit *pp) in ecpp_determine_sio_type() argument
4099 name = ddi_binding_name(pp->dip); in ecpp_determine_sio_type()
4127 ecpp_1284_init_interface(struct ecppunit *pp) in ecpp_1284_init_interface() argument
4129 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_init_interface()
4135 if (pp->init_seq == TRUE) { in ecpp_1284_init_interface()
4136 DCR_WRITE(pp, ECPP_SLCTIN); in ecpp_1284_init_interface()
4140 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_init_interface()
4142 pp->current_mode = pp->backchannel = ECPP_CENTRONICS; in ecpp_1284_init_interface()
4143 pp->current_phase = ECPP_PHASE_C_IDLE; in ecpp_1284_init_interface()
4144 ECPP_CONFIG_MODE(pp); in ecpp_1284_init_interface()
4145 pp->to_mode[pp->current_mode]++; in ecpp_1284_init_interface()
4147 ecpp_error(pp->dip, "ecpp_1284_init_interface: ok\n"); in ecpp_1284_init_interface()
4154 ecp_negotiation(struct ecppunit *pp) in ecp_negotiation() argument
4160 if (ecpp_1284_negotiation(pp, ECPP_XREQ_ECP, &dsr) == FAILURE) in ecp_negotiation()
4166 ecpp_error(pp->dip, in ecp_negotiation()
4167 "ecp_negotiation: failed event 5 %x\n", DSR_READ(pp)); in ecp_negotiation()
4168 (void) ecpp_1284_termination(pp); in ecp_negotiation()
4173 pp->current_phase = ECPP_PHASE_ECP_SETUP; in ecp_negotiation()
4176 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in ecp_negotiation()
4179 if (wait_dsr(pp, ECPP_PE, ECPP_PE, 35000) < 0) { in ecp_negotiation()
4180 ecpp_error(pp->dip, in ecp_negotiation()
4181 "ecp_negotiation: failed event 31 %x\n", DSR_READ(pp)); in ecp_negotiation()
4182 (void) ecpp_1284_termination(pp); in ecp_negotiation()
4187 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecp_negotiation()
4190 pp->current_mode = ECPP_ECP_MODE; in ecp_negotiation()
4191 pp->backchannel = ECPP_ECP_MODE; in ecp_negotiation()
4193 ecpp_error(pp->dip, "ecp_negotiation: ok\n"); in ecp_negotiation()
4202 nibble_negotiation(struct ecppunit *pp) in nibble_negotiation() argument
4206 if (ecpp_1284_negotiation(pp, ECPP_XREQ_NIBBLE, &dsr) == FAILURE) { in nibble_negotiation()
4215 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in nibble_negotiation()
4217 pp->current_phase = ECPP_PHASE_NIBT_NAVAIL; in nibble_negotiation()
4221 pp->current_mode = ECPP_NIBBLE_MODE; in nibble_negotiation()
4222 pp->backchannel = ECPP_NIBBLE_MODE; in nibble_negotiation()
4224 ecpp_error(pp->dip, "nibble_negotiation: ok (phase=%x)\n", in nibble_negotiation()
4225 pp->current_phase); in nibble_negotiation()
4237 wait_dsr(struct ecppunit *pp, uint8_t mask, uint8_t val, int ptimeout) in wait_dsr() argument
4239 while (((DSR_READ(pp) & mask) != val) && ptimeout--) { in wait_dsr()
4260 ecpp_1284_negotiation(struct ecppunit *pp, uint8_t xreq, uint8_t *rdsr) in ecpp_1284_negotiation() argument
4264 ecpp_error(pp->dip, "nego(%x): entering...\n", xreq); in ecpp_1284_negotiation()
4267 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4270 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_negotiation()
4272 pp->current_phase = ECPP_PHASE_NEGO; in ecpp_1284_negotiation()
4275 DATAR_WRITE(pp, xreq); in ecpp_1284_negotiation()
4278 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in ecpp_1284_negotiation()
4286 if (wait_dsr(pp, ECPP_nERR | ECPP_SLCT | ECPP_PE | ECPP_nACK, in ecpp_1284_negotiation()
4289 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4290 "nego(%x): failed event 2 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4291 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4299 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX | ECPP_STB); in ecpp_1284_negotiation()
4307 DCR_WRITE(pp, ECPP_nINIT); in ecpp_1284_negotiation()
4319 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in ecpp_1284_negotiation()
4321 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4322 "nego(%x): failed event 6 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4323 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4327 if ((DSR_READ(pp) & ECPP_SLCT) != xflag) { in ecpp_1284_negotiation()
4329 ecpp_error(pp->dip, in ecpp_1284_negotiation()
4330 "nego(%x): failed event 5 %x\n", xreq, DSR_READ(pp)); in ecpp_1284_negotiation()
4331 (void) ecpp_1284_termination(pp); in ecpp_1284_negotiation()
4336 *rdsr = DSR_READ(pp); in ecpp_1284_negotiation()
4356 ecpp_1284_termination(struct ecppunit *pp) in ecpp_1284_termination() argument
4358 int previous_mode = pp->current_mode; in ecpp_1284_termination()
4360 if (((pp->current_mode == ECPP_COMPAT_MODE || in ecpp_1284_termination()
4361 pp->current_mode == ECPP_CENTRONICS) && in ecpp_1284_termination()
4362 pp->current_phase == ECPP_PHASE_C_IDLE) || in ecpp_1284_termination()
4363 pp->current_mode == ECPP_DIAG_MODE) { in ecpp_1284_termination()
4364 ecpp_error(pp->dip, "termination: not needed\n"); in ecpp_1284_termination()
4369 ECPP_MASK_INTR(pp); in ecpp_1284_termination()
4370 ECR_WRITE(pp, ECPP_INTR_SRV | ECPP_INTR_MASK | ECR_mode_001); in ecpp_1284_termination()
4372 pp->current_mode = ECPP_COMPAT_MODE; /* needed by next function */ in ecpp_1284_termination()
4374 ECPP_CONFIG_MODE(pp); in ecpp_1284_termination()
4381 DCR_WRITE(pp, 0); in ecpp_1284_termination()
4386 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4392 pp->current_phase = ECPP_PHASE_TERM; in ecpp_1284_termination()
4395 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4399 if (wait_dsr(pp, ECPP_nERR | ECPP_nBUSY | ECPP_nACK, in ecpp_1284_termination()
4401 ecpp_error(pp->dip, in ecpp_1284_termination()
4402 "termination: failed events 23,24 %x\n", DSR_READ(pp)); in ecpp_1284_termination()
4403 ecpp_1284_init_interface(pp); in ecpp_1284_termination()
4410 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN | ECPP_AFX); in ecpp_1284_termination()
4415 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in ecpp_1284_termination()
4416 ecpp_error(pp->dip, in ecpp_1284_termination()
4417 "termination: failed event 27 %x\n", DSR_READ(pp)); in ecpp_1284_termination()
4418 ecpp_1284_init_interface(pp); in ecpp_1284_termination()
4425 DCR_WRITE(pp, ECPP_nINIT | ECPP_SLCTIN); in ecpp_1284_termination()
4431 pp->current_phase = ECPP_PHASE_C_IDLE; in ecpp_1284_termination()
4433 ecpp_error(pp->dip, "termination: completed %x %x\n", in ecpp_1284_termination()
4434 DSR_READ(pp), DCR_READ(pp)); in ecpp_1284_termination()
4443 ecp_peripheral2host(struct ecppunit *pp) in ecp_peripheral2host() argument
4449 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_peripheral2host()
4450 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecp_peripheral2host()
4462 if (pp->nread > 0) { in ecp_peripheral2host()
4463 len = min(pp->nread, ECP_REV_BLKSZ_MAX); in ecp_peripheral2host()
4468 pp->nread = 0; /* clear after use */ in ecp_peripheral2host()
4475 ecpp_error(pp->dip, in ecp_peripheral2host()
4488 pp->msg = mp; in ecp_peripheral2host()
4489 pp->e_busy = ECPP_BUSY; in ecp_peripheral2host()
4490 pp->dma_dir = DDI_DMA_READ; in ecp_peripheral2host()
4491 pp->current_phase = ECPP_PHASE_ECP_REV_XFER; in ecp_peripheral2host()
4493 if (ecpp_init_dma_xfer(pp, (caddr_t)mp->b_rptr, len) == FAILURE) { in ecp_peripheral2host()
4531 xfer_time = max((1000 * len) / pp->ecp_rev_speed, ECP_REV_MINTOUT); in ecp_peripheral2host()
4533 pp->rev_timeout_cnt = (pp->hw == &x86) ? 1 : in ecp_peripheral2host()
4534 max(xfer_time / pp->rev_watchdog, 1); in ecp_peripheral2host()
4536 pp->rev_timeout_cnt = (pp->hw == &m1553) ? 1 : in ecp_peripheral2host()
4537 max(xfer_time / pp->rev_watchdog, 1); in ecp_peripheral2host()
4540 pp->last_dmacnt = len; /* nothing xferred yet */ in ecp_peripheral2host()
4542 pp->timeout_id = timeout(ecpp_ecp_read_timeout, (caddr_t)pp, in ecp_peripheral2host()
4543 drv_usectohz(pp->rev_watchdog * 1000)); in ecp_peripheral2host()
4545 ecpp_error(pp->dip, "ecp_periph2host: DMA started len=%d\n" in ecp_peripheral2host()
4547 len, xfer_time, pp->rev_watchdog, pp->rev_timeout_cnt); in ecp_peripheral2host()
4555 pp->e_busy = ECPP_IDLE; in ecp_peripheral2host()
4556 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecp_peripheral2host()
4570 struct ecppunit *pp = arg; in ecpp_ecp_read_timeout() local
4573 mutex_enter(&pp->umutex); in ecpp_ecp_read_timeout()
4575 if (pp->timeout_id == 0) { in ecpp_ecp_read_timeout()
4576 mutex_exit(&pp->umutex); in ecpp_ecp_read_timeout()
4579 pp->timeout_id = 0; in ecpp_ecp_read_timeout()
4582 if (--pp->rev_timeout_cnt == 0) { in ecpp_ecp_read_timeout()
4586 ecpp_error(pp->dip, "ecp_read_timeout: timeout\n"); in ecpp_ecp_read_timeout()
4587 pp->xfer_tout++; in ecpp_ecp_read_timeout()
4588 ecpp_ecp_read_completion(pp); in ecpp_ecp_read_timeout()
4594 dmacnt = ECPP_DMA_GETCNT(pp); in ecpp_ecp_read_timeout()
4595 if (dmacnt - pp->last_dmacnt == 0) { in ecpp_ecp_read_timeout()
4600 ecpp_error(pp->dip, "ecp_read_timeout: no progress\n"); in ecpp_ecp_read_timeout()
4601 pp->xfer_tout++; in ecpp_ecp_read_timeout()
4602 ecpp_ecp_read_completion(pp); in ecpp_ecp_read_timeout()
4607 ecpp_error(pp->dip, "ecp_read_timeout: restarting\n"); in ecpp_ecp_read_timeout()
4608 pp->last_dmacnt = dmacnt; in ecpp_ecp_read_timeout()
4609 pp->timeout_id = timeout(ecpp_ecp_read_timeout, in ecpp_ecp_read_timeout()
4610 (caddr_t)pp, in ecpp_ecp_read_timeout()
4611 drv_usectohz(pp->rev_watchdog * 1000)); in ecpp_ecp_read_timeout()
4615 mutex_exit(&pp->umutex); in ecpp_ecp_read_timeout()
4623 ecpp_ecp_read_completion(struct ecppunit *pp) in ecpp_ecp_read_completion() argument
4628 ASSERT(mutex_owned(&pp->umutex)); in ecpp_ecp_read_completion()
4629 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecpp_ecp_read_completion()
4630 pp->current_phase == ECPP_PHASE_ECP_REV_XFER); in ecpp_ecp_read_completion()
4631 ASSERT(pp->msg != NULL); in ecpp_ecp_read_completion()
4636 if (ECPP_DMA_STOP(pp, &unx_len) == FAILURE) { in ecpp_ecp_read_completion()
4637 unx_len = pp->resid; in ecpp_ecp_read_completion()
4638 ecpp_error(pp->dip, "ecp_read_completion: failed dma_stop\n"); in ecpp_ecp_read_completion()
4641 mp = pp->msg; in ecpp_ecp_read_completion()
4642 xfer_len = pp->resid - unx_len; /* how much data was transferred */ in ecpp_ecp_read_completion()
4644 if (ddi_dma_unbind_handle(pp->dma_handle) != DDI_SUCCESS) { in ecpp_ecp_read_completion()
4645 ecpp_error(pp->dip, "ecp_read_completion: unbind failed.\n"); in ecpp_ecp_read_completion()
4648 ecpp_error(pp->dip, "ecp_read_completion: xfered %d bytes of %d\n", in ecpp_ecp_read_completion()
4649 xfer_len, pp->resid); in ecpp_ecp_read_completion()
4652 pp->msg = NULL; in ecpp_ecp_read_completion()
4653 pp->resid -= xfer_len; in ecpp_ecp_read_completion()
4654 pp->ibytes[pp->current_mode] += xfer_len; in ecpp_ecp_read_completion()
4655 pp->e_busy = ECPP_IDLE; in ecpp_ecp_read_completion()
4656 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecpp_ecp_read_completion()
4662 if (canputnext(pp->readq)) { in ecpp_ecp_read_completion()
4663 mutex_exit(&pp->umutex); in ecpp_ecp_read_completion()
4664 putnext(pp->readq, mp); in ecpp_ecp_read_completion()
4665 mutex_enter(&pp->umutex); in ecpp_ecp_read_completion()
4667 ecpp_error(pp->dip, "ecp_read_completion: fail canputnext\n"); in ecpp_ecp_read_completion()
4668 if (!putq(pp->readq, mp)) { in ecpp_ecp_read_completion()
4674 if (!(ECR_READ(pp) & ECPP_FIFO_EMPTY)) { in ecpp_ecp_read_completion()
4675 (void) ecpp_backchan_req(pp); in ecpp_ecp_read_completion()
4678 qenable(pp->writeq); in ecpp_ecp_read_completion()
4685 nibble_peripheral2host(struct ecppunit *pp, uint8_t *byte) in nibble_peripheral2host() argument
4695 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX); in nibble_peripheral2host()
4700 if (wait_dsr(pp, ECPP_nACK, 0, 35000) < 0) { in nibble_peripheral2host()
4701 ecpp_error(pp->dip, in nibble_peripheral2host()
4703 i + 1, DSR_READ(pp)); in nibble_peripheral2host()
4704 (void) ecpp_1284_termination(pp); in nibble_peripheral2host()
4708 n[i] = DSR_READ(pp); /* get a nibble */ in nibble_peripheral2host()
4711 DCR_WRITE(pp, ECPP_nINIT); in nibble_peripheral2host()
4716 if (wait_dsr(pp, ECPP_nACK, ECPP_nACK, 35000) < 0) { in nibble_peripheral2host()
4717 ecpp_error(pp->dip, in nibble_peripheral2host()
4719 i + 1, DSR_READ(pp)); in nibble_peripheral2host()
4720 (void) ecpp_1284_termination(pp); in nibble_peripheral2host()
4729 pp->ibytes[ECPP_NIBBLE_MODE]++; in nibble_peripheral2host()
4737 ecpp_peripheral2host(struct ecppunit *pp) in ecpp_peripheral2host() argument
4739 if (!canputnext(pp->readq)) { in ecpp_peripheral2host()
4740 ecpp_error(pp->dip, "ecpp_peripheral2host: readq full\n"); in ecpp_peripheral2host()
4744 switch (pp->backchannel) { in ecpp_peripheral2host()
4750 ASSERT(pp->current_mode == ECPP_NIBBLE_MODE); in ecpp_peripheral2host()
4755 DCR_WRITE(pp, ECPP_nINIT); in ecpp_peripheral2host()
4758 if (wait_dsr(pp, ECPP_PE, 0, 35000) < 0) { in ecpp_peripheral2host()
4759 ecpp_error(pp->dip, in ecpp_peripheral2host()
4761 DSR_READ(pp)); in ecpp_peripheral2host()
4762 (void) ecpp_1284_termination(pp); in ecpp_peripheral2host()
4766 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in ecpp_peripheral2host()
4769 return (ecpp_idle_phase(pp)); in ecpp_peripheral2host()
4772 if ((pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE) && in ecpp_peripheral2host()
4773 (ecp_forward2reverse(pp) == FAILURE)) { in ecpp_peripheral2host()
4777 return (ecp_peripheral2host(pp)); /* start the transfer */ in ecpp_peripheral2host()
4783 if (ECR_READ(pp) & ECPP_FIFO_EMPTY) { in ecpp_peripheral2host()
4784 ecpp_error(pp->dip, "ecpp_periph2host: fifo empty\n"); in ecpp_peripheral2host()
4790 ecpp_error(pp->dip, in ecpp_peripheral2host()
4799 while (i-- && (!(ECR_READ(pp) & ECPP_FIFO_EMPTY))) { in ecpp_peripheral2host()
4800 *mp->b_wptr++ = TFIFO_READ(pp); in ecpp_peripheral2host()
4804 if (canputnext(pp->readq)) { in ecpp_peripheral2host()
4805 mutex_exit(&pp->umutex); in ecpp_peripheral2host()
4807 ecpp_error(pp->dip, in ecpp_peripheral2host()
4810 putnext(pp->readq, mp); in ecpp_peripheral2host()
4811 mutex_enter(&pp->umutex); in ecpp_peripheral2host()
4814 ecpp_error(pp->dip, in ecpp_peripheral2host()
4822 ecpp_error(pp->dip, "ecpp_peripheraltohost: illegal back"); in ecpp_peripheral2host()
4833 ecp_forward2reverse(struct ecppunit *pp) in ecp_forward2reverse() argument
4835 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_forward2reverse()
4836 pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE); in ecp_forward2reverse()
4839 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_forward2reverse()
4842 DCR_WRITE(pp, ECPP_REV_DIR | ECPP_nINIT); in ecp_forward2reverse()
4845 ECR_WRITE(pp, ECR_mode_011 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_forward2reverse()
4850 DCR_WRITE(pp, ECPP_REV_DIR); in ecp_forward2reverse()
4854 pp->current_phase = ECPP_PHASE_ECP_REV_IDLE; in ecp_forward2reverse()
4856 ecpp_error(pp->dip, "ecp_forward2reverse ok\n"); in ecp_forward2reverse()
4867 ecp_reverse2forward(struct ecppunit *pp) in ecp_reverse2forward() argument
4869 ASSERT(pp->current_mode == ECPP_ECP_MODE && in ecp_reverse2forward()
4870 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecp_reverse2forward()
4873 DCR_WRITE(pp, ECPP_REV_DIR | ECPP_nINIT); in ecp_reverse2forward()
4879 if (wait_dsr(pp, ECPP_PE, ECPP_PE, 35000) < 0) { in ecp_reverse2forward()
4880 ecpp_error(pp->dip, in ecp_reverse2forward()
4881 "ecp_reverse2forward: failed event 49 %x\n", DSR_READ(pp)); in ecp_reverse2forward()
4882 (void) ecpp_1284_termination(pp); in ecp_reverse2forward()
4887 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_reverse2forward()
4890 DCR_WRITE(pp, ECPP_nINIT); in ecp_reverse2forward()
4893 ECR_WRITE(pp, ECR_mode_011 | ECPP_INTR_SRV | ECPP_INTR_MASK); in ecp_reverse2forward()
4895 pp->current_phase = ECPP_PHASE_ECP_FWD_IDLE; in ecp_reverse2forward()
4897 ecpp_error(pp->dip, "ecp_reverse2forward ok\n"); in ecp_reverse2forward()
4907 ecpp_default_negotiation(struct ecppunit *pp) in ecpp_default_negotiation() argument
4909 if (!noecp && (ecpp_mode_negotiation(pp, ECPP_ECP_MODE) == SUCCESS)) { in ecpp_default_negotiation()
4911 pp->io_mode = (pp->fast_compat == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4913 } else if (ecpp_mode_negotiation(pp, ECPP_NIBBLE_MODE) == SUCCESS) { in ecpp_default_negotiation()
4915 pp->io_mode = (pp->fast_compat == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4918 pp->io_mode = in ecpp_default_negotiation()
4919 (pp->fast_centronics == TRUE) ? ECPP_DMA : ECPP_PIO; in ecpp_default_negotiation()
4921 ECPP_CONFIG_MODE(pp); in ecpp_default_negotiation()
4928 ecpp_mode_negotiation(struct ecppunit *pp, uchar_t newmode) in ecpp_mode_negotiation() argument
4931 ASSERT(pp->current_mode == ECPP_CENTRONICS || in ecpp_mode_negotiation()
4932 pp->current_mode == ECPP_COMPAT_MODE || in ecpp_mode_negotiation()
4933 pp->current_mode == ECPP_NIBBLE_MODE || in ecpp_mode_negotiation()
4934 pp->current_mode == ECPP_ECP_MODE || in ecpp_mode_negotiation()
4935 pp->current_mode == ECPP_DIAG_MODE); in ecpp_mode_negotiation()
4937 if (pp->current_mode == newmode) { in ecpp_mode_negotiation()
4942 if ((pp->current_mode == ECPP_ECP_MODE) && in ecpp_mode_negotiation()
4943 (pp->current_phase != ECPP_PHASE_ECP_FWD_IDLE)) { in ecpp_mode_negotiation()
4945 (void) ecp_reverse2forward(pp); in ecpp_mode_negotiation()
4950 (void) ecpp_1284_termination(pp); in ecpp_mode_negotiation()
4953 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_mode_negotiation()
4955 pp->current_mode = ECPP_CENTRONICS; in ecpp_mode_negotiation()
4956 pp->backchannel = ECPP_CENTRONICS; in ecpp_mode_negotiation()
4957 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4959 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4964 if (pp->current_mode == ECPP_NIBBLE_MODE) { in ecpp_mode_negotiation()
4965 if (ecpp_1284_termination(pp) == SUCCESS) { in ecpp_mode_negotiation()
4966 pp->current_mode = ECPP_COMPAT_MODE; in ecpp_mode_negotiation()
4967 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4968 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4969 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4976 if ((nibble_negotiation(pp) == SUCCESS) && in ecpp_mode_negotiation()
4977 (ecpp_1284_termination(pp) == SUCCESS)) { in ecpp_mode_negotiation()
4978 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4979 pp->current_mode = ECPP_COMPAT_MODE; in ecpp_mode_negotiation()
4980 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4981 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4988 if (nibble_negotiation(pp) == FAILURE) { in ecpp_mode_negotiation()
4992 pp->backchannel = ECPP_NIBBLE_MODE; in ecpp_mode_negotiation()
4993 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
4994 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
4999 if (pp->noecpregs) in ecpp_mode_negotiation()
5001 if (ecp_negotiation(pp) == FAILURE) { in ecpp_mode_negotiation()
5008 DCR_WRITE(pp, ECPP_nINIT); in ecpp_mode_negotiation()
5010 if (ecr_write(pp, ECR_mode_011 | in ecpp_mode_negotiation()
5012 ecpp_error(pp->dip, "mode_nego:ECP: failed w/ecr\n"); in ecpp_mode_negotiation()
5016 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
5017 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
5026 (void) ecpp_1284_termination(pp); in ecpp_mode_negotiation()
5029 if (ecr_write(pp, ECR_mode_001 | in ecpp_mode_negotiation()
5031 ecpp_error(pp->dip, "put to TFIFO: failed w/ecr\n"); in ecpp_mode_negotiation()
5035 pp->current_mode = ECPP_DIAG_MODE; in ecpp_mode_negotiation()
5036 pp->backchannel = ECPP_DIAG_MODE; in ecpp_mode_negotiation()
5037 ECPP_CONFIG_MODE(pp); in ecpp_mode_negotiation()
5038 pp->to_mode[pp->current_mode]++; in ecpp_mode_negotiation()
5043 ecpp_error(pp->dip, in ecpp_mode_negotiation()
5056 ecpp_idle_phase(struct ecppunit *pp) in ecpp_idle_phase() argument
5063 if (!canputnext(pp->readq)) { in ecpp_idle_phase()
5064 ecpp_error(pp->dip, "ecpp_idle_phase: readq full\n"); in ecpp_idle_phase()
5068 switch (pp->backchannel) { in ecpp_idle_phase()
5073 ecpp_error(pp->dip, "ecpp_idle_phase: compat idle\n"); in ecpp_idle_phase()
5081 ecpp_error(pp->dip, "ecpp_idle_phase: nibble backchannel\n"); in ecpp_idle_phase()
5082 if ((pp->current_mode != ECPP_NIBBLE_MODE) && in ecpp_idle_phase()
5083 (ecpp_mode_negotiation(pp, ECPP_NIBBLE_MODE) == FAILURE)) { in ecpp_idle_phase()
5087 rval = read_nibble_backchan(pp); in ecpp_idle_phase()
5090 if (pp->current_phase == ECPP_PHASE_NIBT_NAVAIL && in ecpp_idle_phase()
5091 canputnext(pp->readq)) { in ecpp_idle_phase()
5092 ecpp_error(pp->dip, "ecpp_idle_phase: going revidle\n"); in ecpp_idle_phase()
5098 DCR_WRITE(pp, ECPP_nINIT | ECPP_AFX | ECPP_INTR_EN); in ecpp_idle_phase()
5100 ECPP_UNMASK_INTR(pp); in ecpp_idle_phase()
5110 ecpp_error(pp->dip, "ecpp_idle_phase: ECP forward\n"); in ecpp_idle_phase()
5112 ASSERT(pp->current_phase == ECPP_PHASE_ECP_FWD_IDLE || in ecpp_idle_phase()
5113 pp->current_phase == ECPP_PHASE_ECP_REV_IDLE); in ecpp_idle_phase()
5116 if ((pp->current_phase == ECPP_PHASE_ECP_REV_IDLE) && in ecpp_idle_phase()
5117 (ecp_reverse2forward(pp) == FAILURE)) { in ecpp_idle_phase()
5125 if ((DSR_READ(pp) & ECPP_nERR) == 0) { in ecpp_idle_phase()
5126 (void) ecpp_backchan_req(pp); in ecpp_idle_phase()
5128 ECR_WRITE(pp, in ecpp_idle_phase()
5129 ECR_READ(pp) & ~ECPP_INTR_MASK | ECPP_INTR_SRV); in ecpp_idle_phase()
5131 ECPP_UNMASK_INTR(pp); in ecpp_idle_phase()
5137 ecpp_error(pp->dip, "ecpp_idle_phase: illegal backchannel"); in ecpp_idle_phase()
5153 read_nibble_backchan(struct ecppunit *pp) in read_nibble_backchan() argument
5159 ASSERT(pp->current_mode == ECPP_NIBBLE_MODE); in read_nibble_backchan()
5161 pp->current_phase = (DSR_READ(pp) & (ECPP_nERR | ECPP_PE)) in read_nibble_backchan()
5164 ecpp_error(pp->dip, "read_nibble_backchan: %x\n", DSR_READ(pp)); in read_nibble_backchan()
5170 while (pp->current_phase == ECPP_PHASE_NIBT_AVAIL && rval == SUCCESS) { in read_nibble_backchan()
5172 if (!canputnext(pp->readq)) { in read_nibble_backchan()
5173 ecpp_error(pp->dip, in read_nibble_backchan()
5179 ecpp_error(pp->dip, in read_nibble_backchan()
5186 while (i-- && !(DSR_READ(pp) & ECPP_nERR)) { in read_nibble_backchan()
5187 if (nibble_peripheral2host(pp, mp->b_wptr) != SUCCESS) { in read_nibble_backchan()
5194 pp->current_phase = (DSR_READ(pp) & (ECPP_nERR | ECPP_PE)) in read_nibble_backchan()
5199 ecpp_error(pp->dip, in read_nibble_backchan()
5202 pp->nread = 0; in read_nibble_backchan()
5203 mutex_exit(&pp->umutex); in read_nibble_backchan()
5204 putnext(pp->readq, mp); in read_nibble_backchan()
5205 mutex_enter(&pp->umutex); in read_nibble_backchan()
5218 devidnib_negotiation(struct ecppunit *pp) in devidnib_negotiation() argument
5222 if (ecpp_1284_negotiation(pp, in devidnib_negotiation()
5232 pp->current_phase = ECPP_PHASE_NIBT_AVAIL; in devidnib_negotiation()
5234 pp->current_phase = ECPP_PHASE_NIBT_NAVAIL; in devidnib_negotiation()
5237 ecpp_error(pp->dip, "ecpp_devidnib_nego: current_phase=%x\n", in devidnib_negotiation()
5238 pp->current_phase); in devidnib_negotiation()
5241 pp->current_mode = ECPP_NIBBLE_MODE; in devidnib_negotiation()
5242 pp->backchannel = ECPP_NIBBLE_MODE; in devidnib_negotiation()
5244 ecpp_error(pp->dip, "ecpp_devidnib_nego: ok\n"); in devidnib_negotiation()
5261 ecpp_getdevid(struct ecppunit *pp, uint8_t *id, int *lenp, int mode) in ecpp_getdevid() argument
5270 if ((pp->current_mode != mode) || (id == NULL)) { in ecpp_getdevid()
5271 if (devidnib_negotiation(pp) == FAILURE) { in ecpp_getdevid()
5276 if (pp->current_phase != ECPP_PHASE_NIBT_AVAIL) { in ecpp_getdevid()
5286 dsr = DSR_READ(pp); in ecpp_getdevid()
5295 (nibble_peripheral2host(pp, &lenhi) == FAILURE) || in ecpp_getdevid()
5297 (nibble_peripheral2host(pp, &lenlo) == FAILURE)) { in ecpp_getdevid()
5298 ecpp_error(pp->dip, in ecpp_getdevid()
5305 ecpp_error(pp->dip, in ecpp_getdevid()
5317 if (nibble_peripheral2host(pp, id++) == FAILURE) in ecpp_getdevid()
5321 dsr = DSR_READ(pp); in ecpp_getdevid()
5323 ecpp_error(pp->dip, in ecpp_getdevid()
5330 (void) ecpp_1284_termination(pp); in ecpp_getdevid()
5351 empty_config_mode(struct ecppunit *pp) in empty_config_mode() argument
5357 empty_mask_intr(struct ecppunit *pp) in empty_mask_intr() argument
5363 x86_getcnt(struct ecppunit *pp) in x86_getcnt() argument
5367 (void) ddi_dmae_getcnt(pp->dip, pp->uh.x86.chn, &count); in x86_getcnt()
5381 pc87332_map_regs(struct ecppunit *pp) in pc87332_map_regs() argument
5383 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->uh.ebus.c_reg, 0, in pc87332_map_regs()
5385 &pp->uh.ebus.c_handle) != DDI_SUCCESS) { in pc87332_map_regs()
5386 ecpp_error(pp->dip, "pc87332_map_regs: failed c_reg\n"); in pc87332_map_regs()
5390 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in pc87332_map_regs()
5391 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in pc87332_map_regs()
5393 ecpp_error(pp->dip, "pc87332_map_regs: failed i_reg\n"); in pc87332_map_regs()
5397 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->f_reg, 0x400, in pc87332_map_regs()
5398 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in pc87332_map_regs()
5400 ecpp_error(pp->dip, "pc87332_map_regs: failed f_reg\n"); in pc87332_map_regs()
5404 if (ddi_regs_map_setup(pp->dip, 2, (caddr_t *)&pp->uh.ebus.dmac, 0, in pc87332_map_regs()
5406 &pp->uh.ebus.d_handle) != DDI_SUCCESS) { in pc87332_map_regs()
5407 ecpp_error(pp->dip, "pc87332_map_regs: failed dmac\n"); in pc87332_map_regs()
5414 pc87332_unmap_regs(pp); in pc87332_map_regs()
5419 pc87332_unmap_regs(struct ecppunit *pp) in pc87332_unmap_regs() argument
5421 if (pp->uh.ebus.c_handle) { in pc87332_unmap_regs()
5422 ddi_regs_map_free(&pp->uh.ebus.c_handle); in pc87332_unmap_regs()
5424 if (pp->uh.ebus.d_handle) { in pc87332_unmap_regs()
5425 ddi_regs_map_free(&pp->uh.ebus.d_handle); in pc87332_unmap_regs()
5427 if (pp->i_handle) { in pc87332_unmap_regs()
5428 ddi_regs_map_free(&pp->i_handle); in pc87332_unmap_regs()
5430 if (pp->f_handle) { in pc87332_unmap_regs()
5431 ddi_regs_map_free(&pp->f_handle); in pc87332_unmap_regs()
5436 pc87332_read_config_reg(struct ecppunit *pp, uint8_t reg_num) in pc87332_read_config_reg() argument
5440 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->index, reg_num); in pc87332_read_config_reg()
5441 retval = PP_GETB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data); in pc87332_read_config_reg()
5447 pc87332_write_config_reg(struct ecppunit *pp, uint8_t reg_num, uint8_t val) in pc87332_write_config_reg() argument
5449 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->index, reg_num); in pc87332_write_config_reg()
5450 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data, val); in pc87332_write_config_reg()
5458 PP_PUTB(pp->uh.ebus.c_handle, &pp->uh.ebus.c_reg->data, val); in pc87332_write_config_reg()
5462 pc87332_config_chip(struct ecppunit *pp) in pc87332_config_chip() argument
5466 pp->current_phase = ECPP_PHASE_INIT; in pc87332_config_chip()
5469 pmc = pc87332_read_config_reg(pp, PMC); in pc87332_config_chip()
5471 pc87332_write_config_reg(pp, PMC, in pc87332_config_chip()
5479 fcr = pc87332_read_config_reg(pp, FCR); in pc87332_config_chip()
5481 pc87332_write_config_reg(pp, FCR, in pc87332_config_chip()
5491 if (dcr_write(pp, ECPP_DCR_SET | ECPP_nINIT) == FAILURE) { in pc87332_config_chip()
5492 ecpp_error(pp->dip, "ecpp_config_87332: DCR config\n"); in pc87332_config_chip()
5496 pc87332_write_config_reg(pp, PCR, in pc87332_config_chip()
5500 if (ecr_write(pp, ECR_mode_001 | in pc87332_config_chip()
5502 ecpp_error(pp->dip, "ecpp_config_87332: ECR\n"); in pc87332_config_chip()
5505 if (dcr_write(pp, ECPP_DCR_SET | ECPP_SLCTIN | ECPP_nINIT) == FAILURE) { in pc87332_config_chip()
5506 ecpp_error(pp->dip, "ecpp_config_87332: w/DCR failed2.\n"); in pc87332_config_chip()
5511 pp->current_mode = ECPP_CENTRONICS; in pc87332_config_chip()
5514 pp->current_phase = ECPP_PHASE_C_IDLE; in pc87332_config_chip()
5524 pc87332_config_mode(struct ecppunit *pp) in pc87332_config_mode() argument
5526 if (COMPAT_PIO(pp)) { in pc87332_config_mode()
5527 pc87332_write_config_reg(pp, PCR, 0x04); in pc87332_config_mode()
5529 pc87332_write_config_reg(pp, PCR, 0x14); in pc87332_config_mode()
5534 pc97317_map_regs(struct ecppunit *pp) in pc97317_map_regs() argument
5536 if (pc87332_map_regs(pp) != SUCCESS) { in pc97317_map_regs()
5540 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->uh.ebus.c2_reg, in pc97317_map_regs()
5542 &pp->uh.ebus.c2_handle) != DDI_SUCCESS) { in pc97317_map_regs()
5543 ecpp_error(pp->dip, "pc97317_map_regs: failed c2_reg\n"); in pc97317_map_regs()
5544 pc87332_unmap_regs(pp); in pc97317_map_regs()
5552 pc97317_unmap_regs(struct ecppunit *pp) in pc97317_unmap_regs() argument
5554 if (pp->uh.ebus.c2_handle) { in pc97317_unmap_regs()
5555 ddi_regs_map_free(&pp->uh.ebus.c2_handle); in pc97317_unmap_regs()
5558 pc87332_unmap_regs(pp); in pc97317_unmap_regs()
5568 pc97317_config_chip(struct ecppunit *pp) in pc97317_config_chip() argument
5573 pc87332_write_config_reg(pp, PC97317_CONFIG_DEV_NO, 0x4); in pc97317_config_chip()
5576 PP_PUTB(pp->uh.ebus.c2_handle, in pc97317_config_chip()
5577 &pp->uh.ebus.c2_reg->eir, PC97317_CONFIG2_CONTROL2); in pc97317_config_chip()
5578 PP_PUTB(pp->uh.ebus.c2_handle, &pp->uh.ebus.c2_reg->edr, 0x80); in pc97317_config_chip()
5581 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x00); in pc97317_config_chip()
5584 pc87332_write_config_reg(pp, PC97317_CONFIG_PP_CONFIG, 0xf2); in pc97317_config_chip()
5586 if (dcr_write(pp, ECPP_SLCTIN | ECPP_nINIT) == FAILURE) { in pc97317_config_chip()
5587 ecpp_error(pp->dip, "pc97317_config_chip: failed w/DCR\n"); in pc97317_config_chip()
5590 if (ecr_write(pp, ECR_mode_001 | in pc97317_config_chip()
5592 ecpp_error(pp->dip, "pc97317_config_chip: failed w/ECR\n"); in pc97317_config_chip()
5596 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DEV_NO); in pc97317_config_chip()
5597 ecpp_error(pp->dip, "97317:conreg7(logical dev)=%x\n", conreg); in pc97317_config_chip()
5599 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_BASE_ADDR_MSB); in pc97317_config_chip()
5600 ecpp_error(pp->dip, "97317:conreg60(addrHi)=%x\n", conreg); in pc97317_config_chip()
5602 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_BASE_ADDR_LSB); in pc97317_config_chip()
5603 ecpp_error(pp->dip, "97317:conreg61(addrLo)=%x\n", conreg); in pc97317_config_chip()
5605 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_INTR_SEL); in pc97317_config_chip()
5606 ecpp_error(pp->dip, "97317:conreg70(IRQL)=%x\n", conreg); in pc97317_config_chip()
5608 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_INTR_TYPE); in pc97317_config_chip()
5609 ecpp_error(pp->dip, "97317:conreg71(intr type)=%x\n", conreg); in pc97317_config_chip()
5611 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_ACTIVATE); in pc97317_config_chip()
5612 ecpp_error(pp->dip, "97317:conreg30(Active)=%x\n", conreg); in pc97317_config_chip()
5614 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_IO_RANGE); in pc97317_config_chip()
5615 ecpp_error(pp->dip, "97317:conreg31(IO Range Check)=%x\n", conreg); in pc97317_config_chip()
5617 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DMA0_CHAN); in pc97317_config_chip()
5618 ecpp_error(pp->dip, "97317:conreg74(DMA0 Chan)=%x\n", conreg); in pc97317_config_chip()
5619 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_DMA1_CHAN); in pc97317_config_chip()
5620 ecpp_error(pp->dip, "97317:conreg75(DMA1 Chan)=%x\n", conreg); in pc97317_config_chip()
5622 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_PP_CONFIG); in pc97317_config_chip()
5623 ecpp_error(pp->dip, "97317:conregFO(pport conf)=%x\n", conreg); in pc97317_config_chip()
5625 conreg = pc87332_read_config_reg(pp, PC97317_CONFIG_PP_CONFIG); in pc97317_config_chip()
5626 ecpp_error(pp->dip, "97317:conregFO(pport conf)=%x\n", conreg); in pc97317_config_chip()
5637 pc97317_config_mode(struct ecppunit *pp) in pc97317_config_mode() argument
5640 pc87332_write_config_reg(pp, PC97317_CONFIG_DEV_NO, 0x4); in pc97317_config_mode()
5642 if (COMPAT_PIO(pp) || pp->current_mode == ECPP_NIBBLE_MODE) { in pc97317_config_mode()
5643 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x02); in pc97317_config_mode()
5645 pc87332_write_config_reg(pp, PC97317_CONFIG_INTR_TYPE, 0x00); in pc97317_config_mode()
5650 cheerio_mask_intr(struct ecppunit *pp) in cheerio_mask_intr() argument
5653 AND_SET_LONG_R(pp->uh.ebus.d_handle, in cheerio_mask_intr()
5654 &pp->uh.ebus.dmac->csr, ~DCSR_INT_EN); in cheerio_mask_intr()
5658 cheerio_unmask_intr(struct ecppunit *pp) in cheerio_unmask_intr() argument
5661 OR_SET_LONG_R(pp->uh.ebus.d_handle, in cheerio_unmask_intr()
5662 &pp->uh.ebus.dmac->csr, DCSR_INT_EN | DCSR_TCI_DIS); in cheerio_unmask_intr()
5666 cheerio_dma_start(struct ecppunit *pp) in cheerio_dma_start() argument
5668 cheerio_reset_dcsr(pp); in cheerio_dma_start()
5669 SET_DMAC_BCR(pp, pp->dma_cookie.dmac_size); in cheerio_dma_start()
5670 SET_DMAC_ACR(pp, pp->dma_cookie.dmac_address); in cheerio_dma_start()
5672 if (pp->dma_dir == DDI_DMA_READ) { in cheerio_dma_start()
5673 SET_DMAC_CSR(pp, DCSR_INT_EN | DCSR_EN_CNT | DCSR_EN_DMA | in cheerio_dma_start()
5676 SET_DMAC_CSR(pp, DCSR_INT_EN | DCSR_EN_CNT | DCSR_EN_DMA | in cheerio_dma_start()
5687 cheerio_dma_stop(struct ecppunit *pp, size_t *countp) in cheerio_dma_stop() argument
5692 AND_SET_LONG_R(pp->uh.ebus.d_handle, &pp->uh.ebus.dmac->csr, in cheerio_dma_stop()
5696 OR_SET_LONG_R(pp->uh.ebus.d_handle, &pp->uh.ebus.dmac->csr, in cheerio_dma_stop()
5701 *countp = cheerio_getcnt(pp); in cheerio_dma_stop()
5704 cheerio_reset_dcsr(pp); in cheerio_dma_stop()
5705 SET_DMAC_BCR(pp, 0); in cheerio_dma_stop()
5708 ecr = ECR_READ(pp); in cheerio_dma_stop()
5709 if (ecr_write(pp, ecr & ~ECPP_DMA_ENABLE) == FAILURE) { in cheerio_dma_stop()
5714 ecr = ECR_READ(pp); in cheerio_dma_stop()
5716 return (ecr_write(pp, ecr | ECPP_INTR_SRV)); in cheerio_dma_stop()
5720 cheerio_getcnt(struct ecppunit *pp) in cheerio_getcnt() argument
5722 return (GET_DMAC_BCR(pp)); in cheerio_getcnt()
5731 cheerio_reset_dcsr(struct ecppunit *pp) in cheerio_reset_dcsr() argument
5735 SET_DMAC_CSR(pp, DCSR_RESET); in cheerio_reset_dcsr()
5737 while (GET_DMAC_CSR(pp) & DCSR_CYC_PEND) { in cheerio_reset_dcsr()
5739 ecpp_error(pp->dip, "cheerio_reset_dcsr: timeout\n"); in cheerio_reset_dcsr()
5747 SET_DMAC_CSR(pp, 0); in cheerio_reset_dcsr()
5760 m1553_map_regs(struct ecppunit *pp) in m1553_map_regs() argument
5762 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->uh.m1553.isa_space, in m1553_map_regs()
5764 &pp->uh.m1553.d_handle) != DDI_SUCCESS) { in m1553_map_regs()
5765 ecpp_error(pp->dip, "m1553_map_regs: failed isa space\n"); in m1553_map_regs()
5769 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in m1553_map_regs()
5770 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in m1553_map_regs()
5772 ecpp_error(pp->dip, "m1553_map_regs: failed i_reg\n"); in m1553_map_regs()
5776 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->f_reg, 0x400, in m1553_map_regs()
5777 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in m1553_map_regs()
5779 ecpp_error(pp->dip, "m1553_map_regs: failed f_reg\n"); in m1553_map_regs()
5786 m1553_unmap_regs(pp); in m1553_map_regs()
5791 m1553_unmap_regs(struct ecppunit *pp) in m1553_unmap_regs() argument
5793 if (pp->uh.m1553.d_handle) { in m1553_unmap_regs()
5794 ddi_regs_map_free(&pp->uh.m1553.d_handle); in m1553_unmap_regs()
5796 if (pp->i_handle) { in m1553_unmap_regs()
5797 ddi_regs_map_free(&pp->i_handle); in m1553_unmap_regs()
5799 if (pp->f_handle) { in m1553_unmap_regs()
5800 ddi_regs_map_free(&pp->f_handle); in m1553_unmap_regs()
5806 x86_map_regs(struct ecppunit *pp) in x86_map_regs() argument
5810 if (ddi_regs_map_setup(pp->dip, 0, (caddr_t *)&pp->i_reg, 0, in x86_map_regs()
5811 sizeof (struct info_reg), &acc_attr, &pp->i_handle) in x86_map_regs()
5813 ecpp_error(pp->dip, "x86_map_regs: failed i_reg\n"); in x86_map_regs()
5816 if (ddi_dev_nregs(pp->dip, &nregs) == DDI_SUCCESS && nregs == 2) { in x86_map_regs()
5817 if (ddi_regs_map_setup(pp->dip, 1, (caddr_t *)&pp->f_reg, 0, in x86_map_regs()
5818 sizeof (struct fifo_reg), &acc_attr, &pp->f_handle) in x86_map_regs()
5820 ecpp_error(pp->dip, "x86_map_regs: failed f_reg\n"); in x86_map_regs()
5823 pp->noecpregs = FALSE; in x86_map_regs()
5825 pp->noecpregs = TRUE; in x86_map_regs()
5829 x86_unmap_regs(pp); in x86_map_regs()
5834 x86_unmap_regs(struct ecppunit *pp) in x86_unmap_regs() argument
5836 if (pp->i_handle) { in x86_unmap_regs()
5837 ddi_regs_map_free(&pp->i_handle); in x86_unmap_regs()
5839 if (pp->f_handle) { in x86_unmap_regs()
5840 ddi_regs_map_free(&pp->f_handle); in x86_unmap_regs()
5846 m1553_read_config_reg(struct ecppunit *pp, uint8_t reg_num) in m1553_read_config_reg() argument
5850 dma8237_write(pp, 0x3F0, reg_num); in m1553_read_config_reg()
5851 retval = dma8237_read(pp, 0x3F1); in m1553_read_config_reg()
5857 m1553_write_config_reg(struct ecppunit *pp, uint8_t reg_num, uint8_t val) in m1553_write_config_reg() argument
5859 dma8237_write(pp, 0x3F0, reg_num); in m1553_write_config_reg()
5860 dma8237_write(pp, 0x3F1, val); in m1553_write_config_reg()
5864 m1553_config_chip(struct ecppunit *pp) in m1553_config_chip() argument
5869 dma8237_write(pp, 0x3F0, 0x51); in m1553_config_chip()
5870 dma8237_write(pp, 0x3F0, 0x23); in m1553_config_chip()
5872 m1553_write_config_reg(pp, PnP_CONFIG_DEV_NO, 0x3); in m1553_config_chip()
5873 conreg = m1553_read_config_reg(pp, PnP_CONFIG_DEV_NO); in m1553_config_chip()
5874 ecpp_error(pp->dip, "M1553:conreg7(logical dev)=%x\n", conreg); in m1553_config_chip()
5876 conreg = m1553_read_config_reg(pp, PnP_CONFIG_ACTIVATE); in m1553_config_chip()
5877 ecpp_error(pp->dip, "M1553:conreg30(Active)=%x\n", conreg); in m1553_config_chip()
5879 conreg = m1553_read_config_reg(pp, PnP_CONFIG_BASE_ADDR_MSB); in m1553_config_chip()
5880 ecpp_error(pp->dip, "M1553:conreg60(addrHi)=%x\n", conreg); in m1553_config_chip()
5881 conreg = m1553_read_config_reg(pp, PnP_CONFIG_BASE_ADDR_LSB); in m1553_config_chip()
5882 ecpp_error(pp->dip, "M1553:conreg61(addrLo)=%x\n", conreg); in m1553_config_chip()
5884 conreg = m1553_read_config_reg(pp, PnP_CONFIG_INTR_SEL); in m1553_config_chip()
5885 ecpp_error(pp->dip, "M1553:conreg70(IRQL)=%x\n", conreg); in m1553_config_chip()
5887 conreg = m1553_read_config_reg(pp, PnP_CONFIG_DMA0_CHAN); in m1553_config_chip()
5888 ecpp_error(pp->dip, "M1553:conreg74(DMA0 Chan)=%x\n", conreg); in m1553_config_chip()
5891 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG0); in m1553_config_chip()
5893 m1553_write_config_reg(pp, PnP_CONFIG_PP_CONFIG0, conreg); in m1553_config_chip()
5894 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG0); in m1553_config_chip()
5895 ecpp_error(pp->dip, "M1553:conregFO(pport conf)=%x\n", conreg); in m1553_config_chip()
5897 m1553_write_config_reg(pp, PnP_CONFIG_PP_CONFIG1, 0x04); in m1553_config_chip()
5898 conreg = m1553_read_config_reg(pp, PnP_CONFIG_PP_CONFIG1); in m1553_config_chip()
5899 ecpp_error(pp->dip, "M1553:conregF1(outconf)=%x\n", conreg); in m1553_config_chip()
5902 dma8237_write(pp, 0x3F0, 0xBB); in m1553_config_chip()
5905 ECR_WRITE(pp, ECR_mode_001 | ECPP_INTR_MASK | ECPP_INTR_SRV); in m1553_config_chip()
5906 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in m1553_config_chip()
5908 ecpp_error(pp->dip, "m1553_config_chip: ecr=%x, dsr=%x, dcr=%x\n", in m1553_config_chip()
5909 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in m1553_config_chip()
5916 x86_config_chip(struct ecppunit *pp) in x86_config_chip() argument
5918 if (ecr_write(pp, ECR_mode_001 | in x86_config_chip()
5920 ecpp_error(pp->dip, "config chip: failed w/ecr\n"); in x86_config_chip()
5921 pp->noecpregs = TRUE; in x86_config_chip()
5923 if (pp->noecpregs) in x86_config_chip()
5924 pp->fast_compat = FALSE; in x86_config_chip()
5925 DCR_WRITE(pp, ECPP_SLCTIN | ECPP_nINIT); in x86_config_chip()
5926 ecpp_error(pp->dip, "x86_config_chip: ecr=%x, dsr=%x, dcr=%x\n", in x86_config_chip()
5927 ECR_READ(pp), DSR_READ(pp), DCR_READ(pp)); in x86_config_chip()
5938 dma8237_dma_start(struct ecppunit *pp) in dma8237_dma_start() argument
5942 chn = pp->uh.m1553.chn; in dma8237_dma_start()
5945 pp->dma_cookie.dmac_size != 0 && in dma8237_dma_start()
5946 pp->dma_cookie.dmac_address != NULL); in dma8237_dma_start()
5951 dma8237_write(pp, DMAC2_MODE, DMAMODE_CASC); in dma8237_dma_start()
5952 if (pp->dma_dir == DDI_DMA_READ) { in dma8237_dma_start()
5953 dma8237_write(pp, DMAC1_MODE, DMAMODE_SINGLE | in dma8237_dma_start()
5956 dma8237_write(pp, DMAC1_MODE, DMAMODE_SINGLE | in dma8237_dma_start()
5960 dma8237_write_addr(pp, pp->dma_cookie.dmac_address); in dma8237_dma_start()
5961 dma8237_write_count(pp, pp->dma_cookie.dmac_size - 1); in dma8237_dma_start()
5978 dma8237_write(pp, DMAC1_ALLMASK, ~(1 << chn)); in dma8237_dma_start()
5979 pp->uh.m1553.isadma_entered = 1; in dma8237_dma_start()
5985 dma8237_dma_stop(struct ecppunit *pp, size_t *countp) in dma8237_dma_stop() argument
5990 ecr = (ECR_READ(pp) & 0xe0) | ECPP_INTR_MASK | ECPP_INTR_SRV; in dma8237_dma_stop()
5991 (void) ecr_write(pp, ecr); in dma8237_dma_stop()
5993 if (pp->uh.m1553.isadma_entered) { in dma8237_dma_stop()
5995 dma8237_write(pp, DMAC1_ALLMASK, 0); in dma8237_dma_stop()
5996 pp->uh.m1553.isadma_entered = 0; in dma8237_dma_stop()
6002 *countp = dma8237_getcnt(pp); in dma8237_dma_stop()
6003 if (pp->dma_dir == DDI_DMA_READ && *countp > 0) { in dma8237_dma_stop()
6011 x86_dma_start(struct ecppunit *pp) in x86_dma_start() argument
6016 chn = pp->uh.x86.chn; in x86_dma_start()
6018 pp->dma_cookie.dmac_size != 0 && in x86_dma_start()
6019 pp->dma_cookie.dmac_address != NULL); in x86_dma_start()
6022 (pp->dma_dir & DDI_DMA_READ) ? DMAE_CMD_READ : DMAE_CMD_WRITE; in x86_dma_start()
6023 if (ddi_dmae_prog(pp->dip, &dmaereq, &pp->dma_cookie, chn) in x86_dma_start()
6025 ecpp_error(pp->dip, "prog failed !!!\n"); in x86_dma_start()
6026 ecpp_error(pp->dip, "dma_started..\n"); in x86_dma_start()
6031 x86_dma_stop(struct ecppunit *pp, size_t *countp) in x86_dma_stop() argument
6036 if (pp->uh.x86.chn == 0xff) in x86_dma_stop()
6038 ecr = (ECR_READ(pp) & 0xe0) | ECPP_INTR_MASK | ECPP_INTR_SRV; in x86_dma_stop()
6039 (void) ecr_write(pp, ecr); in x86_dma_stop()
6040 ecpp_error(pp->dip, "dma_stop\n"); in x86_dma_stop()
6044 *countp = x86_getcnt(pp); in x86_dma_stop()
6046 ecpp_error(pp->dip, "dma_stoped..\n"); in x86_dma_stop()
6053 dma8237_write_addr(struct ecppunit *pp, uint32_t addr) in dma8237_write_addr() argument
6058 switch (pp->uh.m1553.chn) { in dma8237_write_addr()
6087 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_addr]; in dma8237_write_addr()
6088 ddi_put16(pp->uh.m1553.d_handle, p, addr & 0xFFFF); in dma8237_write_addr()
6090 dma8237_write(pp, c_lpage, (addr & 0xFF0000) >> 16); in dma8237_write_addr()
6091 dma8237_write(pp, c_hpage, (addr & 0xFF000000) >> 24); in dma8237_write_addr()
6101 dma8237_read_addr(struct ecppunit *pp) in dma8237_read_addr() argument
6109 switch (pp->uh.m1553.chn) { in dma8237_read_addr()
6138 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_addr]; in dma8237_read_addr()
6139 rval16 = ddi_get16(pp->uh.m1553.d_handle, p); in dma8237_read_addr()
6141 rval3 = dma8237_read(pp, c_lpage); in dma8237_read_addr()
6142 rval4 = dma8237_read(pp, c_hpage); in dma8237_read_addr()
6151 dma8237_write_count(struct ecppunit *pp, uint32_t count) in dma8237_write_count() argument
6156 switch (pp->uh.m1553.chn) { in dma8237_write_count()
6177 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_wcnt]; in dma8237_write_count()
6178 ddi_put16(pp->uh.m1553.d_handle, p, count & 0xFFFF); in dma8237_write_count()
6183 dma8237_read_count(struct ecppunit *pp) in dma8237_read_count() argument
6188 switch (pp->uh.m1553.chn) { in dma8237_read_count()
6209 p = (uint16_t *)&pp->uh.m1553.isa_space->isa_reg[c_wcnt]; in dma8237_read_count()
6210 return (ddi_get16(pp->uh.m1553.d_handle, p)); in dma8237_read_count()
6215 dma8237_write(struct ecppunit *pp, int reg_num, uint8_t val) in dma8237_write() argument
6217 ddi_put8(pp->uh.m1553.d_handle, in dma8237_write()
6218 &pp->uh.m1553.isa_space->isa_reg[reg_num], val); in dma8237_write()
6222 dma8237_read(struct ecppunit *pp, int reg_num) in dma8237_read() argument
6224 return (ddi_get8(pp->uh.m1553.d_handle, in dma8237_read()
6225 &pp->uh.m1553.isa_space->isa_reg[reg_num])); in dma8237_read()
6229 dma8237_getcnt(struct ecppunit *pp) in dma8237_getcnt() argument
6233 if ((cnt = dma8237_read_count(pp)) == 0xffff) in dma8237_getcnt()
6247 ecpp_kstat_init(struct ecppunit *pp) in ecpp_kstat_init() argument
6255 (void) sprintf(buf, "ecppc%d", pp->instance); in ecpp_kstat_init()
6256 pp->intrstats = kstat_create("ecpp", pp->instance, buf, "controller", in ecpp_kstat_init()
6258 if (pp->intrstats == NULL) { in ecpp_kstat_init()
6259 ecpp_error(pp->dip, "ecpp_kstat_init:1: kstat_create failed"); in ecpp_kstat_init()
6261 pp->intrstats->ks_update = ecpp_kstatintr_update; in ecpp_kstat_init()
6262 pp->intrstats->ks_private = (void *) pp; in ecpp_kstat_init()
6263 kstat_install(pp->intrstats); in ecpp_kstat_init()
6269 pp->ksp = kstat_create("ecpp", pp->instance, NULL, "misc", in ecpp_kstat_init()
6273 if (pp->ksp == NULL) { in ecpp_kstat_init()
6274 ecpp_error(pp->dip, "ecpp_kstat_init:2: kstat_create failed"); in ecpp_kstat_init()
6278 ekp = (struct ecppkstat *)pp->ksp->ks_data; in ecpp_kstat_init()
6306 pp->ksp->ks_update = ecpp_kstat_update; in ecpp_kstat_init()
6307 pp->ksp->ks_private = (void *) pp; in ecpp_kstat_init()
6308 kstat_install(pp->ksp); in ecpp_kstat_init()
6314 struct ecppunit *pp; in ecpp_kstat_update() local
6325 pp = (struct ecppunit *)ksp->ks_private; in ecpp_kstat_update()
6328 mutex_enter(&pp->umutex); in ecpp_kstat_update()
6330 ekp->ek_ctx_obytes.value.ui32 = pp->obytes[ECPP_CENTRONICS] + in ecpp_kstat_update()
6331 pp->obytes[ECPP_COMPAT_MODE]; in ecpp_kstat_update()
6332 ekp->ek_ctxpio_obytes.value.ui32 = pp->ctxpio_obytes; in ecpp_kstat_update()
6333 ekp->ek_nib_ibytes.value.ui32 = pp->ibytes[ECPP_NIBBLE_MODE]; in ecpp_kstat_update()
6334 ekp->ek_ecp_obytes.value.ui32 = pp->obytes[ECPP_ECP_MODE]; in ecpp_kstat_update()
6335 ekp->ek_ecp_ibytes.value.ui32 = pp->ibytes[ECPP_ECP_MODE]; in ecpp_kstat_update()
6336 ekp->ek_epp_obytes.value.ui32 = pp->obytes[ECPP_EPP_MODE]; in ecpp_kstat_update()
6337 ekp->ek_epp_ibytes.value.ui32 = pp->ibytes[ECPP_EPP_MODE]; in ecpp_kstat_update()
6338 ekp->ek_diag_obytes.value.ui32 = pp->obytes[ECPP_DIAG_MODE]; in ecpp_kstat_update()
6339 ekp->ek_to_ctx.value.ui32 = pp->to_mode[ECPP_CENTRONICS] + in ecpp_kstat_update()
6340 pp->to_mode[ECPP_COMPAT_MODE]; in ecpp_kstat_update()
6341 ekp->ek_to_nib.value.ui32 = pp->to_mode[ECPP_NIBBLE_MODE]; in ecpp_kstat_update()
6342 ekp->ek_to_ecp.value.ui32 = pp->to_mode[ECPP_ECP_MODE]; in ecpp_kstat_update()
6343 ekp->ek_to_epp.value.ui32 = pp->to_mode[ECPP_EPP_MODE]; in ecpp_kstat_update()
6344 ekp->ek_to_diag.value.ui32 = pp->to_mode[ECPP_DIAG_MODE]; in ecpp_kstat_update()
6345 ekp->ek_xfer_tout.value.ui32 = pp->xfer_tout; in ecpp_kstat_update()
6346 ekp->ek_ctx_cf.value.ui32 = pp->ctx_cf; in ecpp_kstat_update()
6347 ekp->ek_joblen.value.ui32 = pp->joblen; in ecpp_kstat_update()
6348 ekp->ek_isr_reattempt_high.value.ui32 = pp->isr_reattempt_high; in ecpp_kstat_update()
6349 ekp->ek_mode.value.ui32 = pp->current_mode; in ecpp_kstat_update()
6350 ekp->ek_phase.value.ui32 = pp->current_phase; in ecpp_kstat_update()
6351 ekp->ek_backchan.value.ui32 = pp->backchannel; in ecpp_kstat_update()
6352 ekp->ek_iomode.value.ui32 = pp->io_mode; in ecpp_kstat_update()
6353 ekp->ek_state.value.ui32 = pp->e_busy; in ecpp_kstat_update()
6355 mutex_exit(&pp->umutex); in ecpp_kstat_update()
6363 struct ecppunit *pp; in ecpp_kstatintr_update() local
6373 pp = (struct ecppunit *)ksp->ks_private; in ecpp_kstatintr_update()
6375 mutex_enter(&pp->umutex); in ecpp_kstatintr_update()
6377 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_HARD] = pp->intr_hard; in ecpp_kstatintr_update()
6378 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_SPURIOUS] = pp->intr_spurious; in ecpp_kstatintr_update()
6379 KSTAT_INTR_PTR(ksp)->intrs[KSTAT_INTR_SOFT] = pp->intr_soft; in ecpp_kstatintr_update()
6381 mutex_exit(&pp->umutex); in ecpp_kstatintr_update()