Lines Matching full:ch

73 static int mvs_sata_connect(struct mvs_channel *ch);
116 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_attach() local
120 ch->dev = dev; in mvs_ch_attach()
121 ch->unit = (intptr_t)device_get_ivars(dev); in mvs_ch_attach()
122 ch->quirks = ctlr->quirks; in mvs_ch_attach()
123 mtx_init(&ch->mtx, "MVS channel lock", NULL, MTX_DEF); in mvs_ch_attach()
124 ch->pm_level = 0; in mvs_ch_attach()
126 device_get_unit(dev), "pm_level", &ch->pm_level); in mvs_ch_attach()
127 if (ch->pm_level > 3) in mvs_ch_attach()
128 callout_init_mtx(&ch->pm_timer, &ch->mtx, 0); in mvs_ch_attach()
129 callout_init_mtx(&ch->reset_timer, &ch->mtx, 0); in mvs_ch_attach()
133 ch->user[i].revision = sata_rev; in mvs_ch_attach()
134 ch->user[i].mode = 0; in mvs_ch_attach()
135 ch->user[i].bytecount = (ch->quirks & MVS_Q_GENIIE) ? 8192 : 2048; in mvs_ch_attach()
136 ch->user[i].tags = MVS_MAX_SLOTS; in mvs_ch_attach()
137 ch->curr[i] = ch->user[i]; in mvs_ch_attach()
138 if (ch->pm_level) { in mvs_ch_attach()
139 ch->user[i].caps = CTS_SATA_CAPS_H_PMREQ | in mvs_ch_attach()
143 ch->user[i].caps |= CTS_SATA_CAPS_H_AN; in mvs_ch_attach()
145 rid = ch->unit; in mvs_ch_attach()
146 if (!(ch->r_mem = bus_alloc_resource_any(dev, SYS_RES_MEMORY, in mvs_ch_attach()
152 mtx_lock(&ch->mtx); in mvs_ch_attach()
154 if (!(ch->r_irq = bus_alloc_resource_any(dev, SYS_RES_IRQ, in mvs_ch_attach()
160 if ((bus_setup_intr(dev, ch->r_irq, ATA_INTR_FLAGS, NULL, in mvs_ch_attach()
161 mvs_ch_intr_locked, dev, &ch->ih))) { in mvs_ch_attach()
174 ch->sim = cam_sim_alloc(mvsaction, mvspoll, "mvsch", ch, in mvs_ch_attach()
175 device_get_unit(dev), &ch->mtx, in mvs_ch_attach()
176 2, (ch->quirks & MVS_Q_GENI) ? 0 : MVS_MAX_SLOTS - 1, in mvs_ch_attach()
178 if (ch->sim == NULL) { in mvs_ch_attach()
184 if (xpt_bus_register(ch->sim, dev, 0) != CAM_SUCCESS) { in mvs_ch_attach()
189 if (xpt_create_path(&ch->path, /*periph*/NULL, cam_sim_path(ch->sim), in mvs_ch_attach()
195 if (ch->pm_level > 3) { in mvs_ch_attach()
196 callout_reset(&ch->pm_timer, in mvs_ch_attach()
197 (ch->pm_level == 4) ? hz / 1000 : hz / 8, in mvs_ch_attach()
200 mtx_unlock(&ch->mtx); in mvs_ch_attach()
204 xpt_bus_deregister(cam_sim_path(ch->sim)); in mvs_ch_attach()
206 cam_sim_free(ch->sim, /*free_devq*/TRUE); in mvs_ch_attach()
208 bus_release_resource(dev, SYS_RES_IRQ, ATA_IRQ_RID, ch->r_irq); in mvs_ch_attach()
210 bus_release_resource(dev, SYS_RES_MEMORY, ch->unit, ch->r_mem); in mvs_ch_attach()
211 mtx_unlock(&ch->mtx); in mvs_ch_attach()
212 mtx_destroy(&ch->mtx); in mvs_ch_attach()
219 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_detach() local
221 mtx_lock(&ch->mtx); in mvs_ch_detach()
222 xpt_async(AC_LOST_DEVICE, ch->path, NULL); in mvs_ch_detach()
224 if (ch->resetting) { in mvs_ch_detach()
225 ch->resetting = 0; in mvs_ch_detach()
226 xpt_release_simq(ch->sim, TRUE); in mvs_ch_detach()
228 xpt_free_path(ch->path); in mvs_ch_detach()
229 xpt_bus_deregister(cam_sim_path(ch->sim)); in mvs_ch_detach()
230 cam_sim_free(ch->sim, /*free_devq*/TRUE); in mvs_ch_detach()
231 mtx_unlock(&ch->mtx); in mvs_ch_detach()
233 if (ch->pm_level > 3) in mvs_ch_detach()
234 callout_drain(&ch->pm_timer); in mvs_ch_detach()
235 callout_drain(&ch->reset_timer); in mvs_ch_detach()
236 bus_teardown_intr(dev, ch->r_irq, ch->ih); in mvs_ch_detach()
237 bus_release_resource(dev, SYS_RES_IRQ, ATA_IRQ_RID, ch->r_irq); in mvs_ch_detach()
243 bus_release_resource(dev, SYS_RES_MEMORY, ch->unit, ch->r_mem); in mvs_ch_detach()
244 mtx_destroy(&ch->mtx); in mvs_ch_detach()
251 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_init() local
255 ATA_OUTL(ch->r_mem, EDMA_IEM, 0); in mvs_ch_init()
257 ch->curr_mode = MVS_EDMA_UNKNOWN; in mvs_ch_init()
260 ATA_OUTL(ch->r_mem, SATA_FISIC, 0); in mvs_ch_init()
261 reg = ATA_INL(ch->r_mem, SATA_FISC); in mvs_ch_init()
263 ATA_OUTL(ch->r_mem, SATA_FISC, reg); in mvs_ch_init()
264 reg = ATA_INL(ch->r_mem, SATA_FISIM); in mvs_ch_init()
266 ATA_OUTL(ch->r_mem, SATA_FISC, reg); in mvs_ch_init()
268 ATA_OUTL(ch->r_mem, SATA_SE, 0xffffffff); in mvs_ch_init()
270 ATA_OUTL(ch->r_mem, EDMA_IEC, 0); in mvs_ch_init()
272 ATA_OUTL(ch->r_mem, EDMA_IEM, ~EDMA_IE_TRANSIENT); in mvs_ch_init()
279 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_deinit() local
284 ATA_OUTL(ch->r_mem, EDMA_IEM, 0); in mvs_ch_deinit()
291 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_suspend() local
293 mtx_lock(&ch->mtx); in mvs_ch_suspend()
294 xpt_freeze_simq(ch->sim, 1); in mvs_ch_suspend()
295 while (ch->oslots) in mvs_ch_suspend()
296 msleep(ch, &ch->mtx, PRIBIO, "mvssusp", hz/100); in mvs_ch_suspend()
298 if (ch->resetting) { in mvs_ch_suspend()
299 ch->resetting = 0; in mvs_ch_suspend()
300 callout_stop(&ch->reset_timer); in mvs_ch_suspend()
301 xpt_release_simq(ch->sim, TRUE); in mvs_ch_suspend()
304 mtx_unlock(&ch->mtx); in mvs_ch_suspend()
311 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_resume() local
313 mtx_lock(&ch->mtx); in mvs_ch_resume()
316 xpt_release_simq(ch->sim, TRUE); in mvs_ch_resume()
317 mtx_unlock(&ch->mtx); in mvs_ch_resume()
329 struct mvs_channel *ch = device_get_softc(dev); in mvs_dmainit() local
336 0, NULL, NULL, &ch->dma.workrq_tag)) in mvs_dmainit()
338 if (bus_dmamem_alloc(ch->dma.workrq_tag, (void **)&ch->dma.workrq, 0, in mvs_dmainit()
339 &ch->dma.workrq_map)) in mvs_dmainit()
341 if (bus_dmamap_load(ch->dma.workrq_tag, ch->dma.workrq_map, in mvs_dmainit()
342 ch->dma.workrq, MVS_WORKRQ_SIZE, mvs_dmasetupc_cb, &dcba, 0) || in mvs_dmainit()
344 bus_dmamem_free(ch->dma.workrq_tag, in mvs_dmainit()
345 ch->dma.workrq, ch->dma.workrq_map); in mvs_dmainit()
348 ch->dma.workrq_bus = dcba.maddr; in mvs_dmainit()
353 0, NULL, NULL, &ch->dma.workrp_tag)) in mvs_dmainit()
355 if (bus_dmamem_alloc(ch->dma.workrp_tag, (void **)&ch->dma.workrp, 0, in mvs_dmainit()
356 &ch->dma.workrp_map)) in mvs_dmainit()
358 if (bus_dmamap_load(ch->dma.workrp_tag, ch->dma.workrp_map, in mvs_dmainit()
359 ch->dma.workrp, MVS_WORKRP_SIZE, mvs_dmasetupc_cb, &dcba, 0) || in mvs_dmainit()
361 bus_dmamem_free(ch->dma.workrp_tag, in mvs_dmainit()
362 ch->dma.workrp, ch->dma.workrp_map); in mvs_dmainit()
365 ch->dma.workrp_bus = dcba.maddr; in mvs_dmainit()
371 0, busdma_lock_mutex, &ch->mtx, &ch->dma.data_tag)) { in mvs_dmainit()
393 struct mvs_channel *ch = device_get_softc(dev); in mvs_dmafini() local
395 if (ch->dma.data_tag) { in mvs_dmafini()
396 bus_dma_tag_destroy(ch->dma.data_tag); in mvs_dmafini()
397 ch->dma.data_tag = NULL; in mvs_dmafini()
399 if (ch->dma.workrp_bus) { in mvs_dmafini()
400 bus_dmamap_unload(ch->dma.workrp_tag, ch->dma.workrp_map); in mvs_dmafini()
401 bus_dmamem_free(ch->dma.workrp_tag, in mvs_dmafini()
402 ch->dma.workrp, ch->dma.workrp_map); in mvs_dmafini()
403 ch->dma.workrp_bus = 0; in mvs_dmafini()
404 ch->dma.workrp = NULL; in mvs_dmafini()
406 if (ch->dma.workrp_tag) { in mvs_dmafini()
407 bus_dma_tag_destroy(ch->dma.workrp_tag); in mvs_dmafini()
408 ch->dma.workrp_tag = NULL; in mvs_dmafini()
410 if (ch->dma.workrq_bus) { in mvs_dmafini()
411 bus_dmamap_unload(ch->dma.workrq_tag, ch->dma.workrq_map); in mvs_dmafini()
412 bus_dmamem_free(ch->dma.workrq_tag, in mvs_dmafini()
413 ch->dma.workrq, ch->dma.workrq_map); in mvs_dmafini()
414 ch->dma.workrq_bus = 0; in mvs_dmafini()
415 ch->dma.workrq = NULL; in mvs_dmafini()
417 if (ch->dma.workrq_tag) { in mvs_dmafini()
418 bus_dma_tag_destroy(ch->dma.workrq_tag); in mvs_dmafini()
419 ch->dma.workrq_tag = NULL; in mvs_dmafini()
426 struct mvs_channel *ch = device_get_softc(dev); in mvs_slotsalloc() local
430 bzero(ch->slot, sizeof(ch->slot)); in mvs_slotsalloc()
432 struct mvs_slot *slot = &ch->slot[i]; in mvs_slotsalloc()
439 callout_init_mtx(&slot->timeout, &ch->mtx, 0); in mvs_slotsalloc()
441 if (bus_dmamap_create(ch->dma.data_tag, 0, &slot->dma.data_map)) in mvs_slotsalloc()
442 device_printf(ch->dev, "FAILURE - create data_map\n"); in mvs_slotsalloc()
449 struct mvs_channel *ch = device_get_softc(dev); in mvs_slotsfree() local
454 struct mvs_slot *slot = &ch->slot[i]; in mvs_slotsfree()
458 bus_dmamap_destroy(ch->dma.data_tag, slot->dma.data_map); in mvs_slotsfree()
467 struct mvs_channel *ch = device_get_softc(dev); in mvs_setup_edma_queues() local
471 work = ch->dma.workrq_bus; in mvs_setup_edma_queues()
472 ATA_OUTL(ch->r_mem, EDMA_REQQBAH, work >> 32); in mvs_setup_edma_queues()
473 ATA_OUTL(ch->r_mem, EDMA_REQQIP, work & 0xffffffff); in mvs_setup_edma_queues()
474 ATA_OUTL(ch->r_mem, EDMA_REQQOP, work & 0xffffffff); in mvs_setup_edma_queues()
475 bus_dmamap_sync(ch->dma.workrq_tag, ch->dma.workrq_map, in mvs_setup_edma_queues()
478 memset(ch->dma.workrp, 0xff, MVS_WORKRP_SIZE); in mvs_setup_edma_queues()
479 work = ch->dma.workrp_bus; in mvs_setup_edma_queues()
480 ATA_OUTL(ch->r_mem, EDMA_RESQBAH, work >> 32); in mvs_setup_edma_queues()
481 ATA_OUTL(ch->r_mem, EDMA_RESQIP, work & 0xffffffff); in mvs_setup_edma_queues()
482 ATA_OUTL(ch->r_mem, EDMA_RESQOP, work & 0xffffffff); in mvs_setup_edma_queues()
483 bus_dmamap_sync(ch->dma.workrp_tag, ch->dma.workrp_map, in mvs_setup_edma_queues()
485 ch->out_idx = 0; in mvs_setup_edma_queues()
486 ch->in_idx = 0; in mvs_setup_edma_queues()
492 struct mvs_channel *ch = device_get_softc(dev); in mvs_set_edma_mode() local
496 if (mode == ch->curr_mode) in mvs_set_edma_mode()
499 if (ch->curr_mode != MVS_EDMA_OFF) { in mvs_set_edma_mode()
500 ATA_OUTL(ch->r_mem, EDMA_CMD, EDMA_CMD_EDSEDMA); in mvs_set_edma_mode()
502 while (ATA_INL(ch->r_mem, EDMA_CMD) & EDMA_CMD_EENEDMA) { in mvs_set_edma_mode()
510 ch->curr_mode = mode; in mvs_set_edma_mode()
511 ch->fbs_enabled = 0; in mvs_set_edma_mode()
512 ch->fake_busy = 0; in mvs_set_edma_mode()
517 if (ch->pm_present) { in mvs_set_edma_mode()
519 if (ch->quirks & MVS_Q_GENIIE) { in mvs_set_edma_mode()
521 ch->fbs_enabled = 1; in mvs_set_edma_mode()
524 if (ch->quirks & MVS_Q_GENI) in mvs_set_edma_mode()
526 else if (ch->quirks & MVS_Q_GENII) in mvs_set_edma_mode()
528 if (ch->quirks & MVS_Q_CT) in mvs_set_edma_mode()
536 ATA_OUTL(ch->r_mem, EDMA_CFG, ecfg); in mvs_set_edma_mode()
538 if (ch->quirks & MVS_Q_GENIIE) { in mvs_set_edma_mode()
540 fcfg = ATA_INL(ch->r_mem, SATA_FISC); in mvs_set_edma_mode()
541 ltm = ATA_INL(ch->r_mem, SATA_LTM); in mvs_set_edma_mode()
542 hc = ATA_INL(ch->r_mem, EDMA_HC); in mvs_set_edma_mode()
543 if (ch->fbs_enabled) { in mvs_set_edma_mode()
559 ATA_OUTL(ch->r_mem, SATA_FISC, fcfg); in mvs_set_edma_mode()
560 ATA_OUTL(ch->r_mem, SATA_LTM, ltm); in mvs_set_edma_mode()
561 ATA_OUTL(ch->r_mem, EDMA_HC, hc); in mvs_set_edma_mode()
564 unkn = ATA_INL(ch->r_mem, EDMA_UNKN_RESD); in mvs_set_edma_mode()
569 ATA_OUTL(ch->r_mem, EDMA_UNKN_RESD, unkn); in mvs_set_edma_mode()
573 ATA_OUTL(ch->r_mem, EDMA_CMD, EDMA_CMD_EENEDMA); in mvs_set_edma_mode()
595 struct mvs_channel *ch = device_get_softc(dev); in mvs_phy_check_events() local
597 if (ch->pm_level == 0) { in mvs_phy_check_events()
598 u_int32_t status = ATA_INL(ch->r_mem, SATA_SS); in mvs_phy_check_events()
613 cam_sim_path(ch->sim), in mvs_phy_check_events()
625 struct mvs_channel *ch = device_get_softc(dev); in mvs_notify_events() local
631 fis = ATA_INL(ch->r_mem, SATA_FISDW0); in mvs_notify_events()
635 d = ch->pm_present ? 15 : 0; in mvs_notify_events()
639 xpt_path_path_id(ch->path), d, 0) == CAM_REQ_CMP) { in mvs_notify_events()
650 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_intr_locked() local
652 mtx_lock(&ch->mtx); in mvs_ch_intr_locked()
654 mtx_unlock(&ch->mtx); in mvs_ch_intr_locked()
661 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_pm() local
664 if (ch->numrslots != 0) in mvs_ch_pm()
667 work = ATA_INL(ch->r_mem, SATA_SC); in mvs_ch_pm()
669 if (ch->pm_level == 4) in mvs_ch_pm()
673 ATA_OUTL(ch->r_mem, SATA_SC, work); in mvs_ch_pm()
679 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_pm_wake() local
683 work = ATA_INL(ch->r_mem, SATA_SS); in mvs_ch_pm_wake()
687 work = ATA_INL(ch->r_mem, SATA_SC); in mvs_ch_pm_wake()
690 ATA_OUTL(ch->r_mem, SATA_SC, work); in mvs_ch_pm_wake()
692 while ((ATA_INL(ch->r_mem, SATA_SS) & SATA_SS_IPM_ACTIVE) == 0 && in mvs_ch_pm_wake()
703 struct mvs_channel *ch = device_get_softc(dev); in mvs_ch_intr() local
707 int edma = (ch->numtslots != 0 || ch->numdslots != 0); in mvs_ch_intr()
714 iec = ATA_INL(ch->r_mem, EDMA_IEC); in mvs_ch_intr()
716 serr = ATA_INL(ch->r_mem, SATA_SE); in mvs_ch_intr()
717 ATA_OUTL(ch->r_mem, SATA_SE, serr); in mvs_ch_intr()
725 if (ch->quirks & MVS_Q_GENI) in mvs_ch_intr()
728 else if (ch->quirks & MVS_Q_GENII) in mvs_ch_intr()
731 fisic = ATA_INL(ch->r_mem, SATA_FISIC); in mvs_ch_intr()
734 ch->curr_mode = MVS_EDMA_UNKNOWN; in mvs_ch_intr()
735 ATA_OUTL(ch->r_mem, EDMA_IEC, ~iec); in mvs_ch_intr()
739 if (ch->numpslots != 0) { in mvs_ch_intr()
742 if (ch->quirks & MVS_Q_GENIIE) in mvs_ch_intr()
743 ccs = EDMA_S_EIOID(ATA_INL(ch->r_mem, EDMA_S)); in mvs_ch_intr()
745 ccs = EDMA_S_EDEVQUETAG(ATA_INL(ch->r_mem, EDMA_S)); in mvs_ch_intr()
747 if (ch->fbs_enabled) { in mvs_ch_intr()
750 if (ch->numrslotspd[i] == 0) in mvs_ch_intr()
763 uint16_t p = ATA_INL(ch->r_mem, SATA_SATAITC) >> 16; in mvs_ch_intr()
773 if (((ch->rslots >> i) & 1) == 0) in mvs_ch_intr()
776 ch->slot[i].ccb->ccb_h.target_id != port) in mvs_ch_intr()
780 if (ch->numtslots == 0) { in mvs_ch_intr()
792 ch->fatalerr = 1; in mvs_ch_intr()
795 if (ch->numtslots == 0 && in mvs_ch_intr()
802 mvs_end_transaction(&ch->slot[i], et); in mvs_ch_intr()
809 ATA_OUTL(ch->r_mem, SATA_FISIC, ~fisic); in mvs_ch_intr()
823 struct mvs_channel *ch = device_get_softc(dev); in mvs_getstatus() local
824 uint8_t status = ATA_INB(ch->r_mem, clear ? ATA_STATUS : ATA_ALTSTAT); in mvs_getstatus()
826 if (ch->fake_busy) { in mvs_getstatus()
828 ch->fake_busy = 0; in mvs_getstatus()
838 struct mvs_channel *ch = device_get_softc(dev); in mvs_legacy_intr() local
839 struct mvs_slot *slot = &ch->slot[0]; /* PIO is always in slot 0. */ in mvs_legacy_intr()
875 xpt_freeze_simq(ch->sim, 1); in mvs_legacy_intr()
876 ch->toslots |= (1 << slot->slot); in mvs_legacy_intr()
879 ATA_INSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
880 (uint16_t *)(ccb->ataio.data_ptr + ch->donecount), in mvs_legacy_intr()
881 ch->transfersize / 2); in mvs_legacy_intr()
884 ch->donecount += ch->transfersize; in mvs_legacy_intr()
886 if (ccb->ataio.dxfer_len > ch->donecount) { in mvs_legacy_intr()
888 ch->transfersize = min(ccb->ataio.dxfer_len - ch->donecount, in mvs_legacy_intr()
889 ch->transfersize); in mvs_legacy_intr()
896 xpt_freeze_simq(ch->sim, 1); in mvs_legacy_intr()
897 ch->toslots |= (1 << slot->slot); in mvs_legacy_intr()
900 ATA_OUTSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
901 (uint16_t *)(ccb->ataio.data_ptr + ch->donecount), in mvs_legacy_intr()
902 ch->transfersize / 2); in mvs_legacy_intr()
910 } else if (ch->basic_dma) { /* ATAPI DMA */ in mvs_legacy_intr()
913 else if (ATA_INL(ch->r_mem, DMA_S) & DMA_S_ERR) in mvs_legacy_intr()
916 ATA_OUTL(ch->r_mem, DMA_C, 0); in mvs_legacy_intr()
919 length = ATA_INB(ch->r_mem,ATA_CYL_LSB) | in mvs_legacy_intr()
920 (ATA_INB(ch->r_mem,ATA_CYL_MSB) << 8); in mvs_legacy_intr()
921 size = min(ch->transfersize, length); in mvs_legacy_intr()
922 ireason = ATA_INB(ch->r_mem,ATA_IREASON); in mvs_legacy_intr()
937 ATA_OUTSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
938 (uint16_t *)(ccb->csio.data_ptr + ch->donecount), in mvs_legacy_intr()
940 for (resid = ch->transfersize + (size & 1); in mvs_legacy_intr()
942 ATA_OUTW(ch->r_mem, ATA_DATA, 0); in mvs_legacy_intr()
943 ch->donecount += length; in mvs_legacy_intr()
945 ch->transfersize = min(ccb->csio.dxfer_len - ch->donecount, in mvs_legacy_intr()
946 ch->curr[ccb->ccb_h.target_id].bytecount); in mvs_legacy_intr()
957 ATA_INSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
958 (uint16_t *)(ccb->csio.data_ptr + ch->donecount), in mvs_legacy_intr()
962 ATA_INSW_STRM(ch->r_mem, ATA_DATA, (void*)buf, 1); in mvs_legacy_intr()
963 ((uint8_t *)ccb->csio.data_ptr + ch->donecount + in mvs_legacy_intr()
966 for (resid = ch->transfersize + (size & 1); in mvs_legacy_intr()
968 ATA_INW(ch->r_mem, ATA_DATA); in mvs_legacy_intr()
969 ch->donecount += length; in mvs_legacy_intr()
971 ch->transfersize = min(ccb->csio.dxfer_len - ch->donecount, in mvs_legacy_intr()
972 ch->curr[ccb->ccb_h.target_id].bytecount); in mvs_legacy_intr()
980 ATA_INSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
981 (uint16_t *)(ccb->csio.data_ptr + ch->donecount), in mvs_legacy_intr()
983 ch->donecount += length; in mvs_legacy_intr()
986 ATA_OUTSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_intr()
987 (uint16_t *)(ccb->csio.data_ptr + ch->donecount), in mvs_legacy_intr()
989 ch->donecount += length; in mvs_legacy_intr()
1016 struct mvs_channel *ch = device_get_softc(dev); in mvs_crbq_intr() local
1023 val = ATA_INL(ch->r_mem, EDMA_RESQIP); in mvs_crbq_intr()
1025 val = ATA_INL(ch->r_mem, EDMA_RESQIP); in mvs_crbq_intr()
1028 bus_dmamap_sync(ch->dma.workrp_tag, ch->dma.workrp_map, in mvs_crbq_intr()
1030 fin_idx = cin_idx = ch->in_idx; in mvs_crbq_intr()
1031 ch->in_idx = in_idx; in mvs_crbq_intr()
1034 (ch->dma.workrp + MVS_CRPB_OFFSET + in mvs_crbq_intr()
1046 cin_idx, fin_idx, in_idx, slot, flags, ch->rslots); in mvs_crbq_intr()
1049 if (ch->numtslots != 0 || in mvs_crbq_intr()
1055 if (ch->slot[slot].state >= MVS_SLOT_RUNNING) { in mvs_crbq_intr()
1056 ccb = ch->slot[slot].ccb; in mvs_crbq_intr()
1060 mvs_end_transaction(&ch->slot[slot], MVS_ERR_NONE); in mvs_crbq_intr()
1065 ch->rslots); in mvs_crbq_intr()
1074 bus_dmamap_sync(ch->dma.workrp_tag, ch->dma.workrp_map, in mvs_crbq_intr()
1076 if (cin_idx == ch->in_idx) { in mvs_crbq_intr()
1077 ATA_OUTL(ch->r_mem, EDMA_RESQOP, in mvs_crbq_intr()
1078 ch->dma.workrp_bus | (cin_idx << EDMA_RESQP_ERPQP_SHIFT)); in mvs_crbq_intr()
1086 struct mvs_channel *ch = device_get_softc(dev); in mvs_check_collision() local
1092 if (ch->numdslots != 0) in mvs_check_collision()
1095 if (ch->numpslots != 0) in mvs_check_collision()
1098 if (!ch->fbs_enabled) { in mvs_check_collision()
1100 if (ch->numtslots != 0 && in mvs_check_collision()
1101 ch->taggedtarget != ccb->ccb_h.target_id) in mvs_check_collision()
1107 if (ch->numtslots != 0) in mvs_check_collision()
1110 if (ch->numpslots != 0) in mvs_check_collision()
1115 if (ch->numrslots != 0) in mvs_check_collision()
1120 if (ch->numrslots != 0) in mvs_check_collision()
1125 if (ch->numrslots != 0) in mvs_check_collision()
1129 if (ch->aslots != 0) in mvs_check_collision()
1137 struct mvs_channel *ch = device_get_softc(dev); in mvs_tfd_read() local
1140 res->status = ATA_INB(ch->r_mem, ATA_ALTSTAT); in mvs_tfd_read()
1141 res->error = ATA_INB(ch->r_mem, ATA_ERROR); in mvs_tfd_read()
1142 res->device = ATA_INB(ch->r_mem, ATA_DRIVE); in mvs_tfd_read()
1143 ATA_OUTB(ch->r_mem, ATA_CONTROL, ATA_A_HOB); in mvs_tfd_read()
1144 res->sector_count_exp = ATA_INB(ch->r_mem, ATA_COUNT); in mvs_tfd_read()
1145 res->lba_low_exp = ATA_INB(ch->r_mem, ATA_SECTOR); in mvs_tfd_read()
1146 res->lba_mid_exp = ATA_INB(ch->r_mem, ATA_CYL_LSB); in mvs_tfd_read()
1147 res->lba_high_exp = ATA_INB(ch->r_mem, ATA_CYL_MSB); in mvs_tfd_read()
1148 ATA_OUTB(ch->r_mem, ATA_CONTROL, 0); in mvs_tfd_read()
1149 res->sector_count = ATA_INB(ch->r_mem, ATA_COUNT); in mvs_tfd_read()
1150 res->lba_low = ATA_INB(ch->r_mem, ATA_SECTOR); in mvs_tfd_read()
1151 res->lba_mid = ATA_INB(ch->r_mem, ATA_CYL_LSB); in mvs_tfd_read()
1152 res->lba_high = ATA_INB(ch->r_mem, ATA_CYL_MSB); in mvs_tfd_read()
1158 struct mvs_channel *ch = device_get_softc(dev); in mvs_tfd_write() local
1161 ATA_OUTB(ch->r_mem, ATA_DRIVE, cmd->device); in mvs_tfd_write()
1162 ATA_OUTB(ch->r_mem, ATA_CONTROL, cmd->control); in mvs_tfd_write()
1163 ATA_OUTB(ch->r_mem, ATA_FEATURE, cmd->features_exp); in mvs_tfd_write()
1164 ATA_OUTB(ch->r_mem, ATA_FEATURE, cmd->features); in mvs_tfd_write()
1165 ATA_OUTB(ch->r_mem, ATA_COUNT, cmd->sector_count_exp); in mvs_tfd_write()
1166 ATA_OUTB(ch->r_mem, ATA_COUNT, cmd->sector_count); in mvs_tfd_write()
1167 ATA_OUTB(ch->r_mem, ATA_SECTOR, cmd->lba_low_exp); in mvs_tfd_write()
1168 ATA_OUTB(ch->r_mem, ATA_SECTOR, cmd->lba_low); in mvs_tfd_write()
1169 ATA_OUTB(ch->r_mem, ATA_CYL_LSB, cmd->lba_mid_exp); in mvs_tfd_write()
1170 ATA_OUTB(ch->r_mem, ATA_CYL_LSB, cmd->lba_mid); in mvs_tfd_write()
1171 ATA_OUTB(ch->r_mem, ATA_CYL_MSB, cmd->lba_high_exp); in mvs_tfd_write()
1172 ATA_OUTB(ch->r_mem, ATA_CYL_MSB, cmd->lba_high); in mvs_tfd_write()
1173 ATA_OUTB(ch->r_mem, ATA_COMMAND, cmd->command); in mvs_tfd_write()
1180 struct mvs_channel *ch = device_get_softc(dev); in mvs_begin_transaction() local
1184 if (ch->pm_level > 0) in mvs_begin_transaction()
1193 slotn = ffs(~ch->oslots) - 1; in mvs_begin_transaction()
1196 if (ch->quirks & MVS_Q_GENIIE) in mvs_begin_transaction()
1197 tag = ffs(~ch->otagspd[ccb->ccb_h.target_id]) - 1; in mvs_begin_transaction()
1203 slot = &ch->slot[slotn]; in mvs_begin_transaction()
1207 if (ch->numrslots == 0 && ch->pm_level > 3) in mvs_begin_transaction()
1208 callout_stop(&ch->pm_timer); in mvs_begin_transaction()
1210 ch->oslots |= (1 << slot->slot); in mvs_begin_transaction()
1211 ch->numrslots++; in mvs_begin_transaction()
1212 ch->numrslotspd[ccb->ccb_h.target_id]++; in mvs_begin_transaction()
1215 ch->otagspd[ccb->ccb_h.target_id] |= (1 << slot->tag); in mvs_begin_transaction()
1216 ch->numtslots++; in mvs_begin_transaction()
1217 ch->numtslotspd[ccb->ccb_h.target_id]++; in mvs_begin_transaction()
1218 ch->taggedtarget = ccb->ccb_h.target_id; in mvs_begin_transaction()
1221 ch->numdslots++; in mvs_begin_transaction()
1224 ch->numpslots++; in mvs_begin_transaction()
1229 ch->aslots |= (1 << slot->slot); in mvs_begin_transaction()
1234 ch->numpslots++; in mvs_begin_transaction()
1237 ch->curr[ccb->ccb_h.target_id].mode >= ATA_DMA && in mvs_begin_transaction()
1238 (ch->quirks & MVS_Q_SOC) == 0 && in mvs_begin_transaction()
1248 ch->basic_dma = 1; in mvs_begin_transaction()
1252 if (ch->numpslots == 0 || ch->basic_dma) { in mvs_begin_transaction()
1254 bus_dmamap_load_ccb(ch->dma.data_tag, slot->dma.data_map, in mvs_begin_transaction()
1265 struct mvs_channel *ch = device_get_softc(slot->dev); in mvs_dmasetprd() local
1276 if (nsegs == 1 && ch->basic_dma == 0 && (ch->quirks & MVS_Q_GENIIE)) { in mvs_dmasetprd()
1282 eprd = (struct mvs_eprd *)(ch->dma.workrq + slot->eprd_offset); in mvs_dmasetprd()
1291 bus_dmamap_sync(ch->dma.data_tag, slot->dma.data_map, in mvs_dmasetprd()
1294 if (ch->basic_dma) in mvs_dmasetprd()
1304 struct mvs_channel *ch = device_get_softc(dev); in mvs_legacy_execute_transaction() local
1311 ch->rslots |= (1 << slot->slot); in mvs_legacy_execute_transaction()
1312 ATA_OUTB(ch->r_mem, SATA_SATAICTL, port << SATA_SATAICTL_PMPTX_SHIFT); in mvs_legacy_execute_transaction()
1320 ccb->ataio.res.status = ATA_INB(ch->r_mem, ATA_STATUS); in mvs_legacy_execute_transaction()
1325 ch->donecount = 0; in mvs_legacy_execute_transaction()
1330 ch->transfersize = min(ccb->ataio.dxfer_len, in mvs_legacy_execute_transaction()
1331 ch->curr[port].bytecount); in mvs_legacy_execute_transaction()
1333 ch->transfersize = min(ccb->ataio.dxfer_len, 512); in mvs_legacy_execute_transaction()
1335 ch->fake_busy = 1; in mvs_legacy_execute_transaction()
1341 xpt_freeze_simq(ch->sim, 1); in mvs_legacy_execute_transaction()
1342 ch->toslots |= (1 << slot->slot); in mvs_legacy_execute_transaction()
1346 ATA_OUTSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_execute_transaction()
1347 (uint16_t *)(ccb->ataio.data_ptr + ch->donecount), in mvs_legacy_execute_transaction()
1348 ch->transfersize / 2); in mvs_legacy_execute_transaction()
1351 ch->donecount = 0; in mvs_legacy_execute_transaction()
1352 ch->transfersize = min(ccb->csio.dxfer_len, in mvs_legacy_execute_transaction()
1353 ch->curr[port].bytecount); in mvs_legacy_execute_transaction()
1355 if (ch->basic_dma) { in mvs_legacy_execute_transaction()
1356 ATA_OUTB(ch->r_mem, ATA_FEATURE, ATA_F_DMA); in mvs_legacy_execute_transaction()
1357 ATA_OUTB(ch->r_mem, ATA_CYL_LSB, 0); in mvs_legacy_execute_transaction()
1358 ATA_OUTB(ch->r_mem, ATA_CYL_MSB, 0); in mvs_legacy_execute_transaction()
1360 ATA_OUTB(ch->r_mem, ATA_FEATURE, 0); in mvs_legacy_execute_transaction()
1361 ATA_OUTB(ch->r_mem, ATA_CYL_LSB, ch->transfersize); in mvs_legacy_execute_transaction()
1362 ATA_OUTB(ch->r_mem, ATA_CYL_MSB, ch->transfersize >> 8); in mvs_legacy_execute_transaction()
1364 ATA_OUTB(ch->r_mem, ATA_COMMAND, ATA_PACKET_CMD); in mvs_legacy_execute_transaction()
1365 ch->fake_busy = 1; in mvs_legacy_execute_transaction()
1369 xpt_freeze_simq(ch->sim, 1); in mvs_legacy_execute_transaction()
1370 ch->toslots |= (1 << slot->slot); in mvs_legacy_execute_transaction()
1376 int reason = ATA_INB(ch->r_mem, ATA_IREASON); in mvs_legacy_execute_transaction()
1377 int status = ATA_INB(ch->r_mem, ATA_STATUS); in mvs_legacy_execute_transaction()
1387 xpt_freeze_simq(ch->sim, 1); in mvs_legacy_execute_transaction()
1388 ch->toslots |= (1 << slot->slot); in mvs_legacy_execute_transaction()
1393 ATA_OUTSW_STRM(ch->r_mem, ATA_DATA, in mvs_legacy_execute_transaction()
1396 ch->curr[port].atapi / 2); in mvs_legacy_execute_transaction()
1398 if (ch->basic_dma) { in mvs_legacy_execute_transaction()
1400 eprd = ch->dma.workrq_bus + slot->eprd_offset; in mvs_legacy_execute_transaction()
1401 ATA_OUTL(ch->r_mem, DMA_DTLBA, eprd); in mvs_legacy_execute_transaction()
1402 ATA_OUTL(ch->r_mem, DMA_DTHBA, (eprd >> 16) >> 16); in mvs_legacy_execute_transaction()
1403 ATA_OUTL(ch->r_mem, DMA_C, DMA_C_START | in mvs_legacy_execute_transaction()
1418 struct mvs_channel *ch = device_get_softc(dev); in mvs_execute_transaction() local
1427 eprd = ch->dma.workrq_bus + slot->eprd_offset; in mvs_execute_transaction()
1429 if (ch->quirks & MVS_Q_GENIIE) { in mvs_execute_transaction()
1431 (ch->dma.workrq + MVS_CRQB_OFFSET + (MVS_CRQB_SIZE * ch->out_idx)); in mvs_execute_transaction()
1468 (ch->dma.workrq + MVS_CRQB_OFFSET + (MVS_CRQB_SIZE * ch->out_idx)); in mvs_execute_transaction()
1513 bus_dmamap_sync(ch->dma.workrq_tag, ch->dma.workrq_map, in mvs_execute_transaction()
1515 bus_dmamap_sync(ch->dma.workrp_tag, ch->dma.workrp_map, in mvs_execute_transaction()
1518 ch->rslots |= (1 << slot->slot); in mvs_execute_transaction()
1520 ch->out_idx = (ch->out_idx + 1) & (MVS_MAX_SLOTS - 1); in mvs_execute_transaction()
1521 ATA_OUTL(ch->r_mem, EDMA_REQQIP, in mvs_execute_transaction()
1522 ch->dma.workrq_bus + MVS_CRQB_OFFSET + (MVS_CRQB_SIZE * ch->out_idx)); in mvs_execute_transaction()
1533 struct mvs_channel *ch = device_get_softc(dev); in mvs_process_timeout() local
1536 mtx_assert(&ch->mtx, MA_OWNED); in mvs_process_timeout()
1540 if (ch->slot[i].state < MVS_SLOT_RUNNING) in mvs_process_timeout()
1542 mvs_end_transaction(&ch->slot[i], MVS_ERR_TIMEOUT); in mvs_process_timeout()
1550 struct mvs_channel *ch = device_get_softc(dev); in mvs_rearm_timeout() local
1553 mtx_assert(&ch->mtx, MA_OWNED); in mvs_rearm_timeout()
1555 struct mvs_slot *slot = &ch->slot[i]; in mvs_rearm_timeout()
1560 if ((ch->toslots & (1 << i)) == 0) in mvs_rearm_timeout()
1574 struct mvs_channel *ch = device_get_softc(dev); in mvs_timeout() local
1582 ATA_INL(ch->r_mem, EDMA_IEC), in mvs_timeout()
1583 ATA_INL(ch->r_mem, SATA_SS), ATA_INL(ch->r_mem, SATA_SE), in mvs_timeout()
1584 ATA_INL(ch->r_mem, EDMA_S), ATA_INL(ch->r_mem, DMA_C), in mvs_timeout()
1585 ATA_INL(ch->r_mem, DMA_S), ch->rslots, in mvs_timeout()
1586 ATA_INB(ch->r_mem, ATA_ALTSTAT)); in mvs_timeout()
1590 if (ch->toslots == 0) in mvs_timeout()
1591 xpt_freeze_simq(ch->sim, 1); in mvs_timeout()
1592 ch->toslots |= (1 << slot->slot); in mvs_timeout()
1593 if ((ch->rslots & ~ch->toslots) == 0) in mvs_timeout()
1597 ch->rslots & ~ch->toslots); in mvs_timeout()
1605 struct mvs_channel *ch = device_get_softc(dev); in mvs_end_transaction() local
1609 bus_dmamap_sync(ch->dma.workrq_tag, ch->dma.workrq_map, in mvs_end_transaction()
1625 ch->basic_dma == 0) in mvs_end_transaction()
1626 ccb->csio.resid = ccb->csio.dxfer_len - ch->donecount; in mvs_end_transaction()
1628 if (ch->numpslots == 0 || ch->basic_dma) { in mvs_end_transaction()
1630 bus_dmamap_sync(ch->dma.data_tag, slot->dma.data_map, in mvs_end_transaction()
1633 bus_dmamap_unload(ch->dma.data_tag, slot->dma.data_map); in mvs_end_transaction()
1637 ch->eslots |= (1 << slot->slot); in mvs_end_transaction()
1639 if ((et != MVS_ERR_NONE) && (!ch->recoverycmd) && in mvs_end_transaction()
1653 ch->fatalerr = 1; in mvs_end_transaction()
1669 ch->fatalerr = 1; in mvs_end_transaction()
1670 if (!ch->recoverycmd) { in mvs_end_transaction()
1671 xpt_freeze_simq(ch->sim, 1); in mvs_end_transaction()
1678 if (!ch->recoverycmd) { in mvs_end_transaction()
1679 xpt_freeze_simq(ch->sim, 1); in mvs_end_transaction()
1686 ch->fatalerr = 1; in mvs_end_transaction()
1690 ch->oslots &= ~(1 << slot->slot); in mvs_end_transaction()
1691 ch->rslots &= ~(1 << slot->slot); in mvs_end_transaction()
1692 ch->aslots &= ~(1 << slot->slot); in mvs_end_transaction()
1696 ch->numrslots--; in mvs_end_transaction()
1697 ch->numrslotspd[ccb->ccb_h.target_id]--; in mvs_end_transaction()
1700 ch->otagspd[ccb->ccb_h.target_id] &= ~(1 << slot->tag); in mvs_end_transaction()
1701 ch->numtslots--; in mvs_end_transaction()
1702 ch->numtslotspd[ccb->ccb_h.target_id]--; in mvs_end_transaction()
1704 ch->numdslots--; in mvs_end_transaction()
1706 ch->numpslots--; in mvs_end_transaction()
1709 ch->numpslots--; in mvs_end_transaction()
1710 ch->basic_dma = 0; in mvs_end_transaction()
1714 lastto = (ch->toslots == (1 << slot->slot)); in mvs_end_transaction()
1715 ch->toslots &= ~(1 << slot->slot); in mvs_end_transaction()
1717 xpt_release_simq(ch->sim, TRUE); in mvs_end_transaction()
1729 ch->hold[slot->slot] = ccb; in mvs_end_transaction()
1730 ch->holdtag[slot->slot] = slot->tag; in mvs_end_transaction()
1731 ch->numhslots++; in mvs_end_transaction()
1735 if (ch->rslots == 0) { in mvs_end_transaction()
1737 if (ch->toslots != 0 || ch->fatalerr) { in mvs_end_transaction()
1741 if (ch->eslots != 0) { in mvs_end_transaction()
1743 ch->eslots = 0; in mvs_end_transaction()
1746 if (!ch->recoverycmd && ch->numhslots) in mvs_end_transaction()
1750 } else if ((ch->rslots & ~ch->toslots) == 0 && in mvs_end_transaction()
1754 if (ch->frozen && !mvs_check_collision(dev, ch->frozen)) { in mvs_end_transaction()
1755 union ccb *fccb = ch->frozen; in mvs_end_transaction()
1756 ch->frozen = NULL; in mvs_end_transaction()
1758 xpt_release_simq(ch->sim, TRUE); in mvs_end_transaction()
1761 if (ch->numrslots == 0 && ch->pm_level > 3 && in mvs_end_transaction()
1762 (ch->curr[ch->pm_present ? 15 : 0].caps & CTS_SATA_CAPS_D_PMREQ)) { in mvs_end_transaction()
1763 callout_schedule(&ch->pm_timer, in mvs_end_transaction()
1764 (ch->pm_level == 4) ? hz / 1000 : hz / 8); in mvs_end_transaction()
1771 struct mvs_channel *ch = device_get_softc(dev); in mvs_issue_recovery() local
1779 if (ch->hold[i]) in mvs_issue_recovery()
1788 if (ch->hold[i] == NULL) in mvs_issue_recovery()
1790 ch->hold[i]->ccb_h.status &= ~CAM_STATUS_MASK; in mvs_issue_recovery()
1791 ch->hold[i]->ccb_h.status |= CAM_RESRC_UNAVAIL; in mvs_issue_recovery()
1792 xpt_done(ch->hold[i]); in mvs_issue_recovery()
1793 ch->hold[i] = NULL; in mvs_issue_recovery()
1794 ch->numhslots--; in mvs_issue_recovery()
1799 xpt_setup_ccb(&ccb->ccb_h, ch->hold[i]->ccb_h.path, in mvs_issue_recovery()
1800 ch->hold[i]->ccb_h.pinfo.priority); in mvs_issue_recovery()
1801 if (ch->hold[i]->ccb_h.func_code == XPT_ATA_IO) { in mvs_issue_recovery()
1833 csio->data_ptr = (void *)&ch->hold[i]->csio.sense_data; in mvs_issue_recovery()
1834 csio->dxfer_len = ch->hold[i]->csio.sense_len; in mvs_issue_recovery()
1841 ch->recoverycmd = 1; in mvs_issue_recovery()
1842 xpt_freeze_simq(ch->sim, 1); in mvs_issue_recovery()
1849 struct mvs_channel *ch = device_get_softc(dev); in mvs_process_read_log() local
1854 ch->recoverycmd = 0; in mvs_process_read_log()
1860 if (!ch->hold[i]) in mvs_process_read_log()
1862 if (ch->hold[i]->ccb_h.target_id != ccb->ccb_h.target_id) in mvs_process_read_log()
1864 if ((data[0] & 0x1F) == ch->holdtag[i]) { in mvs_process_read_log()
1865 res = &ch->hold[i]->ataio.res; in mvs_process_read_log()
1878 ch->hold[i]->ccb_h.status &= ~CAM_STATUS_MASK; in mvs_process_read_log()
1879 ch->hold[i]->ccb_h.status |= CAM_REQUEUE_REQ; in mvs_process_read_log()
1881 xpt_done(ch->hold[i]); in mvs_process_read_log()
1882 ch->hold[i] = NULL; in mvs_process_read_log()
1883 ch->numhslots--; in mvs_process_read_log()
1893 if (!ch->hold[i]) in mvs_process_read_log()
1895 if (ch->hold[i]->ccb_h.target_id != ccb->ccb_h.target_id) in mvs_process_read_log()
1897 xpt_done(ch->hold[i]); in mvs_process_read_log()
1898 ch->hold[i] = NULL; in mvs_process_read_log()
1899 ch->numhslots--; in mvs_process_read_log()
1904 xpt_release_simq(ch->sim, TRUE); in mvs_process_read_log()
1910 struct mvs_channel *ch = device_get_softc(dev); in mvs_process_request_sense() local
1913 ch->recoverycmd = 0; in mvs_process_request_sense()
1917 ch->hold[i]->ccb_h.status |= CAM_AUTOSNS_VALID; in mvs_process_request_sense()
1919 ch->hold[i]->ccb_h.status &= ~CAM_STATUS_MASK; in mvs_process_request_sense()
1920 ch->hold[i]->ccb_h.status |= CAM_AUTOSENSE_FAIL; in mvs_process_request_sense()
1922 xpt_done(ch->hold[i]); in mvs_process_request_sense()
1923 ch->hold[i] = NULL; in mvs_process_request_sense()
1924 ch->numhslots--; in mvs_process_request_sense()
1926 xpt_release_simq(ch->sim, TRUE); in mvs_process_request_sense()
1950 struct mvs_channel *ch = device_get_softc(dev); in mvs_requeue_frozen() local
1951 union ccb *fccb = ch->frozen; in mvs_requeue_frozen()
1954 ch->frozen = NULL; in mvs_requeue_frozen()
1968 struct mvs_channel *ch = device_get_softc(dev); in mvs_reset_to() local
1971 if (ch->resetting == 0) in mvs_reset_to()
1973 ch->resetting--; in mvs_reset_to()
1978 (310 - ch->resetting) * 100); in mvs_reset_to()
1980 ch->resetting = 0; in mvs_reset_to()
1981 xpt_release_simq(ch->sim, TRUE); in mvs_reset_to()
1984 if (ch->resetting == 0) { in mvs_reset_to()
1987 xpt_release_simq(ch->sim, TRUE); in mvs_reset_to()
1990 callout_schedule(&ch->reset_timer, hz / 10); in mvs_reset_to()
1996 struct mvs_channel *ch = device_get_softc(dev); in mvs_errata() local
1999 if (ch->quirks & MVS_Q_SOC65) { in mvs_errata()
2000 val = ATA_INL(ch->r_mem, SATA_PHYM3); in mvs_errata()
2005 ATA_OUTL(ch->r_mem, SATA_PHYM3, val); in mvs_errata()
2007 val = ATA_INL(ch->r_mem, SATA_PHYM4); in mvs_errata()
2010 ATA_OUTL(ch->r_mem, SATA_PHYM4, val); in mvs_errata()
2012 val = ATA_INL(ch->r_mem, SATA_PHYM9_GEN2); in mvs_errata()
2016 ATA_OUTL(ch->r_mem, SATA_PHYM9_GEN2, val); in mvs_errata()
2018 val = ATA_INL(ch->r_mem, SATA_PHYM9_GEN1); in mvs_errata()
2022 ATA_OUTL(ch->r_mem, SATA_PHYM9_GEN1, val); in mvs_errata()
2029 struct mvs_channel *ch = device_get_softc(dev); in mvs_reset() local
2032 xpt_freeze_simq(ch->sim, 1); in mvs_reset()
2036 if (ch->resetting) { in mvs_reset()
2037 ch->resetting = 0; in mvs_reset()
2038 callout_stop(&ch->reset_timer); in mvs_reset()
2039 xpt_release_simq(ch->sim, TRUE); in mvs_reset()
2045 ATA_OUTL(ch->r_mem, DMA_C, 0); in mvs_reset()
2048 if (ch->slot[i].state < MVS_SLOT_RUNNING) in mvs_reset()
2051 mvs_end_transaction(&ch->slot[i], MVS_ERR_INNOCENT); in mvs_reset()
2054 if (!ch->hold[i]) in mvs_reset()
2056 xpt_done(ch->hold[i]); in mvs_reset()
2057 ch->hold[i] = NULL; in mvs_reset()
2058 ch->numhslots--; in mvs_reset()
2060 if (ch->toslots != 0) in mvs_reset()
2061 xpt_release_simq(ch->sim, TRUE); in mvs_reset()
2062 ch->eslots = 0; in mvs_reset()
2063 ch->toslots = 0; in mvs_reset()
2064 ch->fatalerr = 0; in mvs_reset()
2065 ch->fake_busy = 0; in mvs_reset()
2067 xpt_async(AC_BUS_RESET, ch->path, NULL); in mvs_reset()
2068 ATA_OUTL(ch->r_mem, EDMA_IEM, 0); in mvs_reset()
2069 ATA_OUTL(ch->r_mem, EDMA_CMD, EDMA_CMD_EATARST); in mvs_reset()
2071 ATA_OUTL(ch->r_mem, EDMA_CMD, 0); in mvs_reset()
2077 ch->devices = 0; in mvs_reset()
2078 ATA_OUTL(ch->r_mem, SATA_SE, 0xffffffff); in mvs_reset()
2079 ATA_OUTL(ch->r_mem, EDMA_IEC, 0); in mvs_reset()
2080 ATA_OUTL(ch->r_mem, EDMA_IEM, ~EDMA_IE_TRANSIENT); in mvs_reset()
2081 xpt_release_simq(ch->sim, TRUE); in mvs_reset()
2093 ch->resetting = 310; in mvs_reset()
2096 ch->devices = 1; in mvs_reset()
2097 ATA_OUTL(ch->r_mem, SATA_SE, 0xffffffff); in mvs_reset()
2098 ATA_OUTL(ch->r_mem, EDMA_IEC, 0); in mvs_reset()
2099 ATA_OUTL(ch->r_mem, EDMA_IEM, ~EDMA_IE_TRANSIENT); in mvs_reset()
2100 if (ch->resetting) in mvs_reset()
2101 callout_reset(&ch->reset_timer, hz / 10, mvs_reset_to, dev); in mvs_reset()
2103 xpt_release_simq(ch->sim, TRUE); in mvs_reset()
2109 struct mvs_channel *ch = device_get_softc(dev); in mvs_softreset() local
2115 ATA_OUTB(ch->r_mem, SATA_SATAICTL, port << SATA_SATAICTL_PMPTX_SHIFT); in mvs_softreset()
2116 ATA_OUTB(ch->r_mem, ATA_CONTROL, ATA_A_RESET); in mvs_softreset()
2118 ATA_OUTB(ch->r_mem, ATA_CONTROL, 0); in mvs_softreset()
2141 if (stuck && ch->pm_present && port != 15) { in mvs_softreset()
2142 ATA_OUTB(ch->r_mem, SATA_SATAICTL, in mvs_softreset()
2144 ATA_OUTB(ch->r_mem, ATA_CONTROL, ATA_A_RESET); in mvs_softreset()
2146 ATA_OUTB(ch->r_mem, ATA_CONTROL, 0); in mvs_softreset()
2154 mvs_sata_connect(struct mvs_channel *ch) in mvs_sata_connect() argument
2161 status = ATA_INL(ch->r_mem, SATA_SS); in mvs_sata_connect()
2170 device_printf(ch->dev, "SATA offline status=%08x\n", in mvs_sata_connect()
2181 device_printf(ch->dev, in mvs_sata_connect()
2188 device_printf(ch->dev, "SATA connect time=%dus status=%08x\n", in mvs_sata_connect()
2192 ATA_OUTL(ch->r_mem, SATA_SE, 0xffffffff); in mvs_sata_connect()
2199 struct mvs_channel *ch = device_get_softc(dev); in mvs_sata_phy_reset() local
2203 sata_rev = ch->user[ch->pm_present ? 15 : 0].revision; in mvs_sata_phy_reset()
2212 ATA_OUTL(ch->r_mem, SATA_SC, in mvs_sata_phy_reset()
2216 ATA_OUTL(ch->r_mem, SATA_SC, in mvs_sata_phy_reset()
2217 SATA_SC_DET_IDLE | val | ((ch->pm_level > 0) ? 0 : in mvs_sata_phy_reset()
2219 if (!mvs_sata_connect(ch)) { in mvs_sata_phy_reset()
2220 if (ch->pm_level > 0) in mvs_sata_phy_reset()
2221 ATA_OUTL(ch->r_mem, SATA_SC, SATA_SC_DET_DISABLE); in mvs_sata_phy_reset()
2230 struct mvs_channel *ch = device_get_softc(dev); in mvs_check_ids() local
2232 if (ccb->ccb_h.target_id > ((ch->quirks & MVS_Q_GENI) ? 0 : 15)) { in mvs_check_ids()
2255 struct mvs_channel *ch; in mvsaction() local
2260 ch = (struct mvs_channel *)cam_sim_softc(sim); in mvsaction()
2261 dev = ch->dev; in mvsaction()
2268 if (ch->devices == 0 || in mvsaction()
2269 (ch->pm_present == 0 && in mvsaction()
2278 ch->frozen = ccb; in mvsaction()
2280 xpt_freeze_simq(ch->sim, 1); in mvsaction()
2297 d = &ch->curr[ccb->ccb_h.target_id]; in mvsaction()
2299 d = &ch->user[ccb->ccb_h.target_id]; in mvsaction()
2305 d->bytecount = min((ch->quirks & MVS_Q_GENIIE) ? 8192 : 2048, in mvsaction()
2311 ch->pm_present = cts->xport_specific.sata.pm_present; in mvsaction()
2329 d = &ch->curr[ccb->ccb_h.target_id]; in mvsaction()
2331 d = &ch->user[ccb->ccb_h.target_id]; in mvsaction()
2340 (ccb->ccb_h.target_id == 0 && !ch->pm_present))) { in mvsaction()
2341 status = ATA_INL(ch->r_mem, SATA_SS) & SATA_SS_SPD_MASK; in mvsaction()
2349 // if (ch->pm_level) in mvsaction()
2353 ch->user[ccb->ccb_h.target_id].caps; in mvsaction()
2360 (ch->quirks & MVS_Q_GENIIE) == 0*/) in mvsaction()
2368 cts->xport_specific.sata.pm_present = ch->pm_present; in mvsaction()
2393 if (!(ch->quirks & MVS_Q_GENI)) { in mvsaction()
2396 if ((ch->quirks & MVS_Q_GENIIE) || ch->pm_present == 0) in mvsaction()
2402 if (!(ch->quirks & MVS_Q_GENI)) in mvsaction()
2419 if ((ch->quirks & MVS_Q_SOC) == 0) { in mvsaction()
2438 struct mvs_channel *ch = (struct mvs_channel *)cam_sim_softc(sim); in mvspoll() local
2441 arg.arg = ch->dev; in mvspoll()
2444 if (ch->resetting != 0 && in mvspoll()
2445 (--ch->resetpolldiv <= 0 || !callout_pending(&ch->reset_timer))) { in mvspoll()
2446 ch->resetpolldiv = 1000; in mvspoll()
2447 mvs_reset_to(ch->dev); in mvspoll()