Lines Matching +full:fast +full:- +full:clk

189  * apix_lock is used for cpu selection and vector re-binding
214 /* number of CPUs in power-on transition state */
294 apixs[i]->x_cpuid = i; in apix_softinit()
295 LOCK_INIT_CLEAR(&apixs[i]->x_lock); in apix_softinit()
324 int cpuid = CPU->cpu_id; in apix_get_pending_spl()
326 return (bsrw_insn(apixs[cpuid]->x_intr_pending)); in apix_get_pending_spl()
338 apix_vector = apixs[cpu]->x_vectbl[vec]; in apix_get_intr_handler()
340 return ((uintptr_t)(apix_vector->v_autovect)); in apix_get_intr_handler()
360 apic_pir_vect = apix_get_ipivect(XC_CPUPOKE_PIL, -1); in apix_init()
382 highest_irq - lowest_irq + 1; in apix_init()
384 * Number of available per-CPU vectors excluding in apix_init()
385 * reserved vectors for Dtrace, int80, system-call, in apix_init()
386 * fast-trap, etc. in apix_init()
388 apix_irminfo.apix_per_cpu_vectors = APIX_NAVINTR - in apix_init()
393 if (apic_hpet_vect != -1) in apix_init()
395 if (apic_sci_vect != -1) in apix_init()
408 apic_reg_ops->apic_write_task_reg(APIC_MASK_ALL); in apix_init_intr()
415 apic_reg_ops->apic_write(APIC_FORMAT_REG, in apix_init_intr()
418 apic_reg_ops->apic_write(APIC_FORMAT_REG, in apix_init_intr()
422 apic_reg_ops->apic_write(APIC_DEST_REG, in apix_init_intr()
431 * interrupts are level-triggered. in apix_init_intr()
437 apic_reg_ops->apic_write(APIC_SPUR_INT_REG, svr); in apix_init_intr()
446 apic_reg_ops->apic_write(APIC_LOCAL_TIMER, AV_MASK|APIC_RESV_IRQ); in apix_init_intr()
447 apic_reg_ops->apic_write(APIC_INT_VECT0, AV_MASK|APIC_RESV_IRQ); in apix_init_intr()
448 apic_reg_ops->apic_write(APIC_INT_VECT1, AV_NMI); /* enable NMI */ in apix_init_intr()
452 * 'Max LVT entry' + 1; on 82489DX's (non-integrated in apix_init_intr()
459 nlvt = ((apic_reg_ops->apic_read(APIC_VERS_REG) >> 16) & in apix_init_intr()
472 apic_cpcovf_vect = apix_get_ipivect(ipl, -1); in apix_init_intr()
483 apic_reg_ops->apic_write(APIC_PCINT_VECT, in apix_init_intr()
493 lvtval = apic_reg_ops->apic_read(APIC_THERM_VECT); in apix_init_intr()
496 apic_reg_ops->apic_write(APIC_THERM_VECT, in apix_init_intr()
506 apic_errvect = apix_get_ipivect(ipl, -1); in apix_init_intr()
516 apic_reg_ops->apic_write(APIC_ERR_VECT, apic_errvect); in apix_init_intr()
517 apic_reg_ops->apic_write(APIC_ERROR_STATUS, 0); in apix_init_intr()
518 apic_reg_ops->apic_write(APIC_ERROR_STATUS, 0); in apix_init_intr()
527 apic_cmci_vect = apix_get_ipivect(ipl, -1); in apix_init_intr()
535 apic_reg_ops->apic_write_task_reg(0); in apix_init_intr()
562 for (i = 7; i >= 1; i--) { in apix_picinit()
563 isr = apic_reg_ops->apic_read(APIC_ISR_REG + (i * 4)); in apix_picinit()
567 apic_reg_ops->apic_write( in apix_picinit()
583 /* add nmi handler - least priority nmi handler */ in apix_picinit()
622 * Return -1 for spurious interrupts
629 uint32_t cpuid = CPU->cpu_id; in apix_intr_enter()
633 int nipl = -1; in apix_intr_enter()
644 cpu_infop->aci_spur_cnt++; in apix_intr_enter()
655 nipl = vecp->v_pri; in apix_intr_enter()
673 ASSERT(vecp->v_state != APIX_STATE_OBSOLETED); in apix_intr_enter()
675 /* pre-EOI handling for level-triggered interrupts */ in apix_intr_enter()
677 (vecp->v_type & APIX_TYPE_FIXED) && apic_level_intr[vecp->v_inum]) in apix_intr_enter()
678 apix_level_intr_pre_eoi(vecp->v_inum); in apix_intr_enter()
683 cpu_infop->aci_current[nipl] = vector; in apix_intr_enter()
684 if ((nipl > ipl) && (nipl > cpu->cpu_base_spl)) { in apix_intr_enter()
685 cpu_infop->aci_curipl = (uchar_t)nipl; in apix_intr_enter()
686 cpu_infop->aci_ISR_in_progress |= 1 << nipl; in apix_intr_enter()
694 APIC_DEBUG_BUF_PUT(vecp->v_inum); in apix_intr_enter()
717 cpu_infop->aci_curipl = (uchar_t)prev_ipl; in apix_intr_exit()
719 cpu_infop->aci_ISR_in_progress &= (2 << prev_ipl) - 1; in apix_intr_exit()
721 if (apixp->x_obsoletes != NULL) { in apix_intr_exit()
726 (void) apix_obsolete_vector(apixp->x_obsoletes); in apix_intr_exit()
741 * --------+---------------------------
752 apic_cpus[psm_get_cpu_id()].aci_ISR_in_progress &= (2 << ipl) - 1; in apix_setspl()
768 if (vecp->v_type == APIX_TYPE_FIXED) in apix_addspl()
769 apix_intx_set_shared(vecp->v_inum, 1); in apix_addspl()
772 if (vecp->v_share > 1) in apix_addspl()
776 if (vecp->v_type == APIX_TYPE_IPI) in apix_addspl()
801 if (vecp->v_type == APIX_TYPE_FIXED) in apix_delspl()
802 apix_intx_set_shared(vecp->v_inum, -1); in apix_delspl()
805 if (vecp->v_share > 1) in apix_delspl()
809 if (vecp->v_type == APIX_TYPE_IPI) in apix_delspl()
844 vecp = apixp->x_vectbl[i]; in apix_disable_intr()
855 vecp = apixp->x_vectbl[i]; in apix_disable_intr()
859 if (vecp->v_flags & APIX_VECT_USER_BOUND) { in apix_disable_intr()
863 type = vecp->v_type; in apix_disable_intr()
882 vecp->v_cpuid, vecp->v_vector); in apix_disable_intr()
932 vecp->v_bound_cpuid != cpun) in apix_enable_intr()
935 if (vecp->v_type != APIX_TYPE_MSI) in apix_enable_intr()
947 * type == -1 indicates it is an internal request. Do not change
956 if (type != -1) in apix_get_ipivect()
961 return (-1); /* shouldn't happen */ in apix_get_ipivect()
969 if ((vector = apix_get_ipivect(ipl, -1)) == -1) in apix_get_clkvect()
970 return (-1); in apix_get_clkvect()
972 apic_clkvect = vector - APIC_BASE_VECT; in apix_get_clkvect()
1025 apic_reg_ops->apic_write(APIC_DIVIDE_REG, apic_divide_reg_init); in apix_post_cpu_start()
1048 * apic_intr_enter during clk intr processing in apix_post_cyclic_setup()
1089 pops->psm_send_ipi = x2apic_send_ipi; in x2apic_update_psm()
1090 send_dirintf = pops->psm_send_ipi; in x2apic_update_psm()
1092 pops->psm_send_pir_ipi = x2apic_send_pir_ipi; in x2apic_update_psm()
1093 psm_send_pir_ipi = pops->psm_send_pir_ipi; in x2apic_update_psm()
1104 * dip - pointer to the dev_info structure of the requested device
1105 * hdlp - pointer to the internal interrupt handle structure for the
1107 * intr_op - opcode for this call
1108 * result - pointer to the integer that will hold the result to be
1124 ispec->intrspec_pri = hdlp->ih_pri; in apix_intr_ops()
1125 ispec->intrspec_vec = hdlp->ih_inum; in apix_intr_ops()
1126 ispec->intrspec_func = hdlp->ih_cb_func; in apix_intr_ops()
1130 switch (hdlp->ih_type) { in apix_intr_ops()
1133 *result = apix_alloc_msi(dip, hdlp->ih_inum, in apix_intr_ops()
1134 hdlp->ih_scratch1, in apix_intr_ops()
1135 (int)(uintptr_t)hdlp->ih_scratch2); in apix_intr_ops()
1138 /* allocate MSI-X vectors */ in apix_intr_ops()
1139 *result = apix_alloc_msix(dip, hdlp->ih_inum, in apix_intr_ops()
1140 hdlp->ih_scratch1, in apix_intr_ops()
1141 (int)(uintptr_t)hdlp->ih_scratch2); in apix_intr_ops()
1145 if ((ihdl_plat_t *)hdlp->ih_private == NULL) { in apix_intr_ops()
1148 ispec = ((ihdl_plat_t *)hdlp->ih_private)->ip_ispecp; in apix_intr_ops()
1149 *result = apix_intx_alloc_vector(dip, hdlp->ih_inum, in apix_intr_ops()
1157 apix_free_vectors(dip, hdlp->ih_inum, hdlp->ih_scratch1, in apix_intr_ops()
1158 hdlp->ih_type); in apix_intr_ops()
1166 vecp = apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type); in apix_intr_ops()
1168 *result = APIX_VIRTVECTOR(vecp->v_cpuid, in apix_intr_ops()
1169 vecp->v_vector); in apix_intr_ops()
1179 if (hdlp->ih_type == DDI_INTR_TYPE_FIXED) { in apix_intr_ops()
1180 vecp = apix_intx_xlate_vector(dip, hdlp->ih_inum, in apix_intr_ops()
1183 APIX_VIRTVECTOR(vecp->v_cpuid, vecp->v_vector); in apix_intr_ops()
1189 vecp = apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type); in apix_intr_ops()
1196 if (hdlp->ih_type != DDI_INTR_TYPE_FIXED) in apix_intr_ops()
1199 vecp = apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type); in apix_intr_ops()
1203 apix_intx_clear_mask(vecp->v_inum); in apix_intr_ops()
1206 if (hdlp->ih_type != DDI_INTR_TYPE_FIXED) in apix_intr_ops()
1209 vecp = apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type); in apix_intr_ops()
1213 apix_intx_set_mask(vecp->v_inum); in apix_intr_ops()
1216 if (hdlp->ih_type != DDI_INTR_TYPE_FIXED) in apix_intr_ops()
1219 vecp = apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type); in apix_intr_ops()
1223 *result = apix_intx_get_shared(vecp->v_inum); in apix_intr_ops()
1230 if (hdlp->ih_type == DDI_INTR_TYPE_FIXED) in apix_intr_ops()
1233 if (apix_get_dev_map(dip, hdlp->ih_inum, hdlp->ih_type) == NULL) in apix_intr_ops()
1245 target = (int)(intptr_t)hdlp->ih_private; in apix_intr_ops()
1254 vecp = apix_get_req_vector(hdlp, hdlp->ih_flags); in apix_intr_ops()
1258 hdlp->ih_vector)); in apix_intr_ops()
1276 newvecp->v_bound_cpuid = target; in apix_intr_ops()
1277 hdlp->ih_vector = APIX_VIRTVECTOR(newvecp->v_cpuid, in apix_intr_ops()
1278 newvecp->v_vector); in apix_intr_ops()
1287 if (apix_get_intr_info(hdlp, hdlp->ih_private) != PSM_SUCCESS) in apix_intr_ops()
1294 * masked off the MSI/X bits in hdlp->ih_type if not in apix_intr_ops()
1298 * hdlp->ih_type passed in from the nexus has all the in apix_intr_ops()
1310 apic_support_msi = -1; /* not-supported */ in apix_intr_ops()
1314 *result = hdlp->ih_type; in apix_intr_ops()
1316 *result = hdlp->ih_type & ~DDI_INTR_TYPE_MSIX; in apix_intr_ops()
1318 *result = hdlp->ih_type & ~(DDI_INTR_TYPE_MSI | in apix_intr_ops()
1323 if (hdlp->ih_type == DDI_INTR_TYPE_FIXED) in apix_intr_ops()
1328 ((apic_get_type_t *)(hdlp->ih_private))->avgi_type = in apix_intr_ops()
1330 ((apic_get_type_t *)(hdlp->ih_private))->avgi_num_intr = in apix_intr_ops()
1332 ((apic_get_type_t *)(hdlp->ih_private))->avgi_num_cpu = in apix_intr_ops()
1334 hdlp->ih_ver = apic_get_apic_version(); in apix_intr_ops()
1356 vecp->v_busy = 0; in apix_cleanup_busy()
1392 apixs[i]->x_vectbl[vect]->v_busy++; in apix_redistribute_compute()
1427 affinity_set(vecp->v_cpuid); in apix_get_pending()
1429 index = vecp->v_vector / 32; in apix_get_pending()
1430 bit = vecp->v_vector % 32; in apix_get_pending()
1431 irr = apic_reg_ops->apic_read(APIC_IRR_REG + index); in apix_get_pending()
1437 if (!pending && vecp->v_type == APIX_TYPE_FIXED) in apix_get_pending()
1438 pending = apix_intx_get_pending(vecp->v_inum); in apix_get_pending()
1452 return (apix_intx_get_vector(hdlp->ih_vector)); in apix_get_req_vector()
1454 virt_vec = (virt_vec == 0) ? hdlp->ih_vector : virt_vec; in apix_get_req_vector()
1463 vecp = apix_get_dev_map(hdlp->ih_dip, hdlp->ih_inum, in apix_get_req_vector()
1464 hdlp->ih_type); in apix_get_req_vector()
1481 vecp = apix_get_req_vector(hdlp, intr_params_p->avgi_req_flags); in apix_get_intr_info()
1483 intr_params_p->avgi_num_devs = 0; in apix_get_intr_info()
1484 intr_params_p->avgi_cpu_id = 0; in apix_get_intr_info()
1485 intr_params_p->avgi_req_flags = 0; in apix_get_intr_info()
1489 if (intr_params_p->avgi_req_flags & PSMGI_REQ_CPUID) { in apix_get_intr_info()
1490 intr_params_p->avgi_cpu_id = vecp->v_cpuid; in apix_get_intr_info()
1493 if (intr_params_p->avgi_cpu_id & IRQ_USER_BOUND) { in apix_get_intr_info()
1494 intr_params_p->avgi_cpu_id &= ~IRQ_USER_BOUND; in apix_get_intr_info()
1495 intr_params_p->avgi_cpu_id |= PSMGI_CPU_USER_BOUND; in apix_get_intr_info()
1499 if (intr_params_p->avgi_req_flags & PSMGI_REQ_VECTOR) in apix_get_intr_info()
1500 intr_params_p->avgi_vector = vecp->v_vector; in apix_get_intr_info()
1502 if (intr_params_p->avgi_req_flags & in apix_get_intr_info()
1505 intr_params_p->avgi_num_devs = vecp->v_share; in apix_get_intr_info()
1507 if (intr_params_p->avgi_req_flags & PSMGI_REQ_GET_DEVS) { in apix_get_intr_info()
1509 intr_params_p->avgi_req_flags |= PSMGI_REQ_NUM_DEVS; in apix_get_intr_info()
1512 if (intr_params_p->avgi_num_devs > 0) { in apix_get_intr_info()
1513 for (i = 0, av_dev = vecp->v_autovect; av_dev; in apix_get_intr_info()
1514 av_dev = av_dev->av_link) { in apix_get_intr_info()
1515 if (av_dev->av_vector && av_dev->av_dip) in apix_get_intr_info()
1518 intr_params_p->avgi_num_devs = in apix_get_intr_info()
1519 (uint8_t)MIN(intr_params_p->avgi_num_devs, i); in apix_get_intr_info()
1523 if (intr_params_p->avgi_num_devs == 0) { in apix_get_intr_info()
1524 intr_params_p->avgi_dip_list = NULL; in apix_get_intr_info()
1529 intr_params_p->avgi_dip_list = kmem_zalloc( in apix_get_intr_info()
1530 intr_params_p->avgi_num_devs * in apix_get_intr_info()
1533 if (intr_params_p->avgi_dip_list == NULL) { in apix_get_intr_info()
1547 for (i = 0, av_dev = vecp->v_autovect; av_dev; in apix_get_intr_info()
1548 av_dev = av_dev->av_link) { in apix_get_intr_info()
1549 if (av_dev->av_vector && av_dev->av_dip) in apix_get_intr_info()
1550 intr_params_p->avgi_dip_list[i++] = in apix_get_intr_info()
1551 av_dev->av_dip; in apix_get_intr_info()
1581 if (vecp->v_type == APIX_TYPE_MSI) { in apix_set_cpu()
1589 * Mask MSI-X. It's unmasked when MSI-X gets enabled. in apix_set_cpu()
1591 if (vecp->v_type == APIX_TYPE_MSIX && IS_VECT_ENABLED(vecp)) { in apix_set_cpu()
1594 inum = vecp->v_devp->dv_inum; in apix_set_cpu()
1602 * entry in the MSI-X table in apix_set_cpu()
1605 off = (uintptr_t)msix_p->msix_tbl_addr + (inum * in apix_set_cpu()
1607 mask = ddi_get32(msix_p->msix_tbl_hdl, (uint32_t *)off); in apix_set_cpu()
1608 ddi_put32(msix_p->msix_tbl_hdl, (uint32_t *)off, in apix_set_cpu()
1619 ddi_put32(msix_p->msix_tbl_hdl, (uint32_t *)off, mask); in apix_set_cpu()
1631 uint32_t orig_cpu = vecp->v_cpuid; in apix_grp_set_cpu()
1632 int orig_vect = vecp->v_vector; in apix_grp_set_cpu()
1640 " newcpu:%x\n", vecp->v_cpuid, vecp->v_vector, new_cpu)); in apix_grp_set_cpu()
1646 if (vecp->v_type != APIX_TYPE_MSI) { in apix_grp_set_cpu()
1655 if ((num_vectors < 1) || ((num_vectors - 1) & orig_vect)) { in apix_grp_set_cpu()
1662 if (vecp->v_inum != apix_get_min_dev_inum(dip, vecp->v_type)) in apix_grp_set_cpu()
1675 if ((vp->v_share != 0) && in apix_grp_set_cpu()
1677 (vp->v_cpuid != vecp->v_cpuid))) { in apix_grp_set_cpu()
1683 vp->v_cpuid, (void *)dip, in apix_grp_set_cpu()
1701 pci_config_put32(handle, msi_mask_off, (uint32_t)-1); in apix_grp_set_cpu()
1728 irqp->airq_cpu = cpuid; in apix_intx_set_vector()
1729 irqp->airq_vector = vector; in apix_intx_set_vector()
1743 if (IS_IRQ_FREE(irqp) || (irqp->airq_cpu == IRQ_UNINIT)) { in apix_intx_get_vector()
1747 cpuid = irqp->airq_cpu; in apix_intx_get_vector()
1748 vector = irqp->airq_vector; in apix_intx_get_vector()
1764 apix_vector_t *vecp = xv_vector(irqp->airq_cpu, irqp->airq_vector); in apix_intx_enable()
1768 ioapicindex = irqp->airq_ioapicindex; in apix_intx_enable()
1769 intin = irqp->airq_intin_no; in apix_intx_enable()
1770 cpu_infop = &apic_cpus[irqp->airq_cpu]; in apix_intx_enable()
1772 irdt.ir_lo = AV_PDEST | AV_FIXED | irqp->airq_rdt_entry; in apix_intx_enable()
1773 irdt.ir_hi = cpu_infop->aci_local_id; in apix_intx_enable()
1775 apic_vt_ops->apic_intrmap_alloc_entry(&vecp->v_intrmap_private, NULL, in apix_intx_enable()
1776 vecp->v_type, 1, ioapicindex); in apix_intx_enable()
1777 apic_vt_ops->apic_intrmap_map_entry(vecp->v_intrmap_private, in apix_intx_enable()
1778 (void *)&irdt, vecp->v_type, 1); in apix_intx_enable()
1779 apic_vt_ops->apic_intrmap_record_rdt(vecp->v_intrmap_private, &irdt); in apix_intx_enable()
1781 /* write RDT entry high dword - destination */ in apix_intx_enable()
1788 vecp->v_state = APIX_STATE_ENABLED; in apix_intx_enable()
1814 ioapicindex = irqp->airq_ioapicindex; in apix_intx_disable()
1815 intin = irqp->airq_intin_no; in apix_intx_disable()
1835 irqp->airq_mps_intr_index = FREE_INDEX; in apix_intx_free()
1836 irqp->airq_cpu = IRQ_UNINIT; in apix_intx_free()
1837 irqp->airq_vector = APIX_INVALID_VECT; in apix_intx_free()
1864 ioapic_ix = irqp->airq_ioapicindex; in apix_intx_rebind()
1865 intin_no = irqp->airq_intin_no; in apix_intx_rebind()
1885 * Mask the RDT entry for level-triggered interrupts. in apix_intx_rebind()
1887 irqp->airq_rdt_entry |= AV_MASK; in apix_intx_rebind()
1911 irqp->airq_rdt_entry &= ~AV_MASK; in apix_intx_rebind()
1925 * wait for remote IRR to be cleared for level-triggered in apix_intx_rebind()
1958 * times for this interrupt, try the last-ditch workaround: in apix_intx_rebind()
1963 * channels has failed. So as a last-ditch in apix_intx_rebind()
2000 /* reprogramme IO-APIC RDT entry */ in apix_intx_rebind()
2023 /* check IO-APIC delivery status */ in apix_intx_get_pending()
2024 intin = irqp->airq_intin_no; in apix_intx_get_pending()
2025 ioapicindex = irqp->airq_ioapicindex; in apix_intx_get_pending()
2053 ASSERT(irqp->airq_mps_intr_index != FREE_INDEX); in apix_intx_set_mask()
2055 intin = irqp->airq_intin_no; in apix_intx_set_mask()
2056 ioapixindex = irqp->airq_ioapicindex; in apix_intx_set_mask()
2085 ASSERT(irqp->airq_mps_intr_index != FREE_INDEX); in apix_intx_clear_mask()
2087 intin = irqp->airq_intin_no; in apix_intx_clear_mask()
2088 ioapixindex = irqp->airq_ioapicindex; in apix_intx_clear_mask()
2105 * For level-triggered interrupt, mask the IRQ line. Mask means
2122 intin_ix = irqp->airq_intin_no; in apix_level_intr_pre_eoi()
2123 apic_ix = irqp->airq_ioapicindex; in apix_level_intr_pre_eoi()
2125 if (irqp->airq_cpu != CPU->cpu_id) { in apix_level_intr_pre_eoi()
2127 ioapic_write_eoi(apic_ix, irqp->airq_vector); in apix_level_intr_pre_eoi()
2141 (irqp->airq_rdt_entry & (~0xff)) | APIX_RESV_VECTOR); in apix_level_intr_pre_eoi()
2144 AV_MASK | irqp->airq_rdt_entry); in apix_level_intr_pre_eoi()
2151 * For level-triggered interrupt, unmask the IRQ line
2165 intin_ix = irqp->airq_intin_no; in apix_level_intr_post_dispatch()
2166 apic_ix = irqp->airq_ioapicindex; in apix_level_intr_post_dispatch()
2171 * Send EOI to IO-APIC in apix_level_intr_post_dispatch()
2173 ioapic_write_eoi(apic_ix, irqp->airq_vector); in apix_level_intr_post_dispatch()
2177 irqp->airq_rdt_entry); in apix_level_intr_post_dispatch()
2181 ioapic_write_eoi(apic_ix, irqp->airq_vector); in apix_level_intr_post_dispatch()
2195 if (IS_IRQ_FREE(irqp) || (irqp->airq_cpu == IRQ_UNINIT)) { in apix_intx_get_shared()
2199 share = irqp->airq_share; in apix_intx_get_shared()
2216 irqp->airq_share += delta; in apix_intx_set_shared()
2221 * Setup IRQ table. Return IRQ no or -1 on failure
2227 int origirq = ispec->intrspec_vec; in apix_intx_setup()
2236 intr_index = (short)(intrp - apic_io_intrp); in apix_intx_setup()
2237 ioapic = intrp->intr_destid; in apix_intx_setup()
2238 ipin = intrp->intr_destintin; in apix_intx_setup()
2241 for (ioapicindex = apic_io_max - 1; ioapicindex; ioapicindex--) in apix_intx_setup()
2248 if ((newirq = apic_find_intin(ioapicindex, ipin)) != -1) in apix_intx_setup()
2256 ipin = irqno - apic_io_vectbase[ioapicindex]; in apix_intx_setup()
2259 apic_irq_table[irqno]->airq_mps_intr_index == ACPI_INDEX) { in apix_intx_setup()
2260 ASSERT(apic_irq_table[irqno]->airq_intin_no == ipin && in apix_intx_setup()
2261 apic_irq_table[irqno]->airq_ioapicindex == in apix_intx_setup()
2278 if (irqp->airq_mps_intr_index != FREE_INDEX) { in apix_intx_setup()
2280 if (newirq == -1) { in apix_intx_setup()
2281 return (-1); in apix_intx_setup()
2291 irqp->airq_mps_intr_index = intr_index; in apix_intx_setup()
2292 irqp->airq_ioapicindex = ioapicindex; in apix_intx_setup()
2293 irqp->airq_intin_no = ipin; in apix_intx_setup()
2294 irqp->airq_dip = dip; in apix_intx_setup()
2295 irqp->airq_origirq = (uchar_t)origirq; in apix_intx_setup()
2297 irqp->airq_iflag = *iflagp; in apix_intx_setup()
2298 irqp->airq_cpu = IRQ_UNINIT; in apix_intx_setup()
2299 irqp->airq_vector = 0; in apix_intx_setup()
2305 * Setup IRQ table for non-pci devices. Return IRQ no or -1 on error
2311 int irqno = ispec->intrspec_vec; in apix_intx_setup_nonpci()
2326 if (((busid = apic_find_bus_id(bustype)) != -1) && in apix_intx_setup_nonpci()
2336 return (-1); in apix_intx_setup_nonpci()
2344 if (hp->Type == ACPI_MADT_TYPE_INTERRUPT_OVERRIDE) { in apix_intx_setup_nonpci()
2346 if (isop->Bus == 0 && in apix_intx_setup_nonpci()
2347 isop->SourceIrq == irqno) { in apix_intx_setup_nonpci()
2348 newirq = isop->GlobalIrq; in apix_intx_setup_nonpci()
2349 intr_flag.intr_po = isop->IntiFlags & in apix_intx_setup_nonpci()
2351 intr_flag.intr_el = (isop->IntiFlags & in apix_intx_setup_nonpci()
2361 hp->Length); in apix_intx_setup_nonpci()
2372 * Setup IRQ table for pci devices. Return IRQ no or -1 on error
2385 return (-1); in apix_intx_setup_pci()
2391 return (-1); in apix_intx_setup_pci()
2392 ipin = pci_config_get8(cfg_handle, PCI_CONF_IPIN) - PCI_INTA; in apix_intx_setup_pci()
2398 return (-1); in apix_intx_setup_pci()
2409 if (pci_irq == -1) in apix_intx_setup_pci()
2410 return (-1); in apix_intx_setup_pci()
2422 int newirq, irqno = ispec->intrspec_vec; in apix_intx_xlate_irq()
2462 if (newirq != -1) in apix_intx_xlate_irq()
2472 if (newirq != -1) in apix_intx_xlate_irq()
2477 if (newirq == -1) { in apix_intx_xlate_irq()
2479 return (-1); in apix_intx_xlate_irq()
2493 if ((irqno = apix_intx_xlate_irq(dip, inum, ispec)) == -1) in apix_intx_alloc_vector()
2502 vecp->v_cpuid, vecp->v_vector)); in apix_intx_alloc_vector()
2519 if ((irqno = apix_intx_xlate_irq(dip, inum, ispec)) == -1) in apix_intx_xlate_vector()
2551 pops->psm_send_ipi = apic_common_send_ipi; in apic_switch_ipi_callback()
2552 send_dirintf = pops->psm_send_ipi; in apic_switch_ipi_callback()
2553 pops->psm_send_pir_ipi = apic_common_send_pir_ipi; in apic_switch_ipi_callback()
2554 psm_send_pir_ipi = pops->psm_send_pir_ipi; in apic_switch_ipi_callback()
2559 apic_poweron_cnt--; in apic_switch_ipi_callback()
2561 pops->psm_send_ipi = x2apic_send_ipi; in apic_switch_ipi_callback()
2562 send_dirintf = pops->psm_send_ipi; in apic_switch_ipi_callback()
2563 pops->psm_send_pir_ipi = x2apic_send_pir_ipi; in apic_switch_ipi_callback()
2564 psm_send_pir_ipi = pops->psm_send_pir_ipi; in apic_switch_ipi_callback()