Lines Matching refs:dev_priv

157 	struct drm_i915_private *dev_priv =
158 container_of(work, typeof(*dev_priv), l3_parity.error_work);
159 struct intel_gt *gt = to_gt(dev_priv);
169 mutex_lock(&dev_priv->drm.struct_mutex);
172 if (drm_WARN_ON(&dev_priv->drm, !dev_priv->l3_parity.which_slice))
175 misccpctl = intel_uncore_rmw(&dev_priv->uncore, GEN7_MISCCPCTL,
177 intel_uncore_posting_read(&dev_priv->uncore, GEN7_MISCCPCTL);
179 while ((slice = ffs(dev_priv->l3_parity.which_slice)) != 0) {
183 if (drm_WARN_ON_ONCE(&dev_priv->drm,
184 slice >= NUM_L3_SLICES(dev_priv)))
187 dev_priv->l3_parity.which_slice &= ~(1<<slice);
191 error_status = intel_uncore_read(&dev_priv->uncore, reg);
196 intel_uncore_write(&dev_priv->uncore, reg, GEN7_PARITY_ERROR_VALID | GEN7_L3CDERRST1_ENABLE);
197 intel_uncore_posting_read(&dev_priv->uncore, reg);
206 kobject_uevent_env(&dev_priv->drm.primary->kdev->kobj,
209 drm_dbg(&dev_priv->drm,
219 intel_uncore_write(&dev_priv->uncore, GEN7_MISCCPCTL, misccpctl);
222 drm_WARN_ON(&dev_priv->drm, dev_priv->l3_parity.which_slice);
224 gen5_gt_enable_irq(gt, GT_PARITY_ERROR(dev_priv));
227 mutex_unlock(&dev_priv->drm.struct_mutex);
232 struct drm_i915_private *dev_priv = arg;
233 struct intel_display *display = &dev_priv->display;
236 if (!intel_irqs_enabled(dev_priv))
240 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
249 gt_iir = intel_uncore_read(&dev_priv->uncore, GTIIR);
250 pm_iir = intel_uncore_read(&dev_priv->uncore, GEN6_PMIIR);
251 iir = intel_uncore_read(&dev_priv->uncore, VLV_IIR);
271 intel_uncore_write(&dev_priv->uncore, VLV_MASTER_IER, 0);
272 ier = intel_uncore_rmw(&dev_priv->uncore, VLV_IER, ~0, 0);
275 intel_uncore_write(&dev_priv->uncore, GTIIR, gt_iir);
277 intel_uncore_write(&dev_priv->uncore, GEN6_PMIIR, pm_iir);
280 hotplug_status = i9xx_hpd_irq_ack(dev_priv);
287 i9xx_pipestat_irq_ack(dev_priv, iir, pipe_stats);
298 intel_uncore_write(&dev_priv->uncore, VLV_IIR, iir);
300 intel_uncore_write(&dev_priv->uncore, VLV_IER, ier);
301 intel_uncore_write(&dev_priv->uncore, VLV_MASTER_IER, MASTER_INTERRUPT_ENABLE);
304 gen6_gt_irq_handler(to_gt(dev_priv), gt_iir);
306 gen6_rps_irq_handler(&to_gt(dev_priv)->rps, pm_iir);
309 i9xx_hpd_irq_handler(dev_priv, hotplug_status);
314 valleyview_pipestat_irq_handler(dev_priv, pipe_stats);
317 pmu_irq_stats(dev_priv, ret);
319 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
326 struct drm_i915_private *dev_priv = arg;
327 struct intel_display *display = &dev_priv->display;
330 if (!intel_irqs_enabled(dev_priv))
334 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
343 master_ctl = intel_uncore_read(&dev_priv->uncore, GEN8_MASTER_IRQ) & ~GEN8_MASTER_IRQ_CONTROL;
344 iir = intel_uncore_read(&dev_priv->uncore, VLV_IIR);
364 intel_uncore_write(&dev_priv->uncore, GEN8_MASTER_IRQ, 0);
365 ier = intel_uncore_rmw(&dev_priv->uncore, VLV_IER, ~0, 0);
367 gen8_gt_irq_handler(to_gt(dev_priv), master_ctl);
370 hotplug_status = i9xx_hpd_irq_ack(dev_priv);
377 i9xx_pipestat_irq_ack(dev_priv, iir, pipe_stats);
389 intel_uncore_write(&dev_priv->uncore, VLV_IIR, iir);
391 intel_uncore_write(&dev_priv->uncore, VLV_IER, ier);
392 intel_uncore_write(&dev_priv->uncore, GEN8_MASTER_IRQ, GEN8_MASTER_IRQ_CONTROL);
395 i9xx_hpd_irq_handler(dev_priv, hotplug_status);
400 valleyview_pipestat_irq_handler(dev_priv, pipe_stats);
403 pmu_irq_stats(dev_priv, ret);
405 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
508 struct drm_i915_private *dev_priv = arg;
509 void __iomem * const regs = intel_uncore_regs(&dev_priv->uncore);
512 if (!intel_irqs_enabled(dev_priv))
522 gen8_gt_irq_handler(to_gt(dev_priv), master_ctl);
526 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
527 gen8_de_irq_handler(dev_priv, master_ctl);
528 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
533 pmu_irq_stats(dev_priv, IRQ_HANDLED);
657 static void ibx_irq_reset(struct drm_i915_private *dev_priv)
659 struct intel_uncore *uncore = &dev_priv->uncore;
661 if (HAS_PCH_NOP(dev_priv))
666 if (HAS_PCH_CPT(dev_priv) || HAS_PCH_LPT(dev_priv))
667 intel_uncore_write(&dev_priv->uncore, SERR_INT, 0xffffffff);
672 static void ilk_irq_reset(struct drm_i915_private *dev_priv)
674 struct intel_uncore *uncore = &dev_priv->uncore;
677 dev_priv->irq_mask = ~0u;
679 if (GRAPHICS_VER(dev_priv) == 7)
682 if (IS_HASWELL(dev_priv)) {
687 gen5_gt_irq_reset(to_gt(dev_priv));
689 ibx_irq_reset(dev_priv);
692 static void valleyview_irq_reset(struct drm_i915_private *dev_priv)
694 intel_uncore_write(&dev_priv->uncore, VLV_MASTER_IER, 0);
695 intel_uncore_posting_read(&dev_priv->uncore, VLV_MASTER_IER);
697 gen5_gt_irq_reset(to_gt(dev_priv));
699 spin_lock_irq(&dev_priv->irq_lock);
700 vlv_display_irq_reset(dev_priv);
701 spin_unlock_irq(&dev_priv->irq_lock);
704 static void gen8_irq_reset(struct drm_i915_private *dev_priv)
706 struct intel_uncore *uncore = &dev_priv->uncore;
710 gen8_gt_irq_reset(to_gt(dev_priv));
711 gen8_display_irq_reset(dev_priv);
714 if (HAS_PCH_SPLIT(dev_priv))
715 ibx_irq_reset(dev_priv);
719 static void gen11_irq_reset(struct drm_i915_private *dev_priv)
721 struct intel_gt *gt = to_gt(dev_priv);
724 gen11_master_intr_disable(intel_uncore_regs(&dev_priv->uncore));
727 gen11_display_irq_reset(dev_priv);
733 static void dg1_irq_reset(struct drm_i915_private *dev_priv)
735 struct intel_uncore *uncore = &dev_priv->uncore;
739 dg1_master_intr_disable(intel_uncore_regs(&dev_priv->uncore));
741 for_each_gt(gt, dev_priv, i)
744 gen11_display_irq_reset(dev_priv);
752 static void cherryview_irq_reset(struct drm_i915_private *dev_priv)
754 struct intel_uncore *uncore = &dev_priv->uncore;
757 intel_uncore_posting_read(&dev_priv->uncore, GEN8_MASTER_IRQ);
759 gen8_gt_irq_reset(to_gt(dev_priv));
763 spin_lock_irq(&dev_priv->irq_lock);
764 vlv_display_irq_reset(dev_priv);
765 spin_unlock_irq(&dev_priv->irq_lock);
768 static void ilk_irq_postinstall(struct drm_i915_private *dev_priv)
770 gen5_gt_irq_postinstall(to_gt(dev_priv));
772 ilk_de_irq_postinstall(dev_priv);
775 static void valleyview_irq_postinstall(struct drm_i915_private *dev_priv)
777 gen5_gt_irq_postinstall(to_gt(dev_priv));
779 spin_lock_irq(&dev_priv->irq_lock);
780 vlv_display_irq_postinstall(dev_priv);
781 spin_unlock_irq(&dev_priv->irq_lock);
783 intel_uncore_write(&dev_priv->uncore, VLV_MASTER_IER, MASTER_INTERRUPT_ENABLE);
784 intel_uncore_posting_read(&dev_priv->uncore, VLV_MASTER_IER);
787 static void gen8_irq_postinstall(struct drm_i915_private *dev_priv)
789 gen8_gt_irq_postinstall(to_gt(dev_priv));
790 gen8_de_irq_postinstall(dev_priv);
792 gen8_master_intr_enable(intel_uncore_regs(&dev_priv->uncore));
795 static void gen11_irq_postinstall(struct drm_i915_private *dev_priv)
797 struct intel_gt *gt = to_gt(dev_priv);
802 gen11_de_irq_postinstall(dev_priv);
807 intel_uncore_posting_read(&dev_priv->uncore, GEN11_GFX_MSTR_IRQ);
810 static void dg1_irq_postinstall(struct drm_i915_private *dev_priv)
812 struct intel_uncore *uncore = &dev_priv->uncore;
817 for_each_gt(gt, dev_priv, i)
822 dg1_de_irq_postinstall(dev_priv);
828 static void cherryview_irq_postinstall(struct drm_i915_private *dev_priv)
830 gen8_gt_irq_postinstall(to_gt(dev_priv));
832 spin_lock_irq(&dev_priv->irq_lock);
833 vlv_display_irq_postinstall(dev_priv);
834 spin_unlock_irq(&dev_priv->irq_lock);
836 intel_uncore_write(&dev_priv->uncore, GEN8_MASTER_IRQ, GEN8_MASTER_IRQ_CONTROL);
837 intel_uncore_posting_read(&dev_priv->uncore, GEN8_MASTER_IRQ);
861 static void i9xx_error_irq_ack(struct drm_i915_private *dev_priv,
866 *eir = intel_uncore_read(&dev_priv->uncore, EIR);
867 intel_uncore_write(&dev_priv->uncore, EIR, *eir);
869 *eir_stuck = intel_uncore_read(&dev_priv->uncore, EIR);
883 emr = intel_uncore_read(&dev_priv->uncore, EMR);
884 intel_uncore_write(&dev_priv->uncore, EMR, 0xffffffff);
885 intel_uncore_write(&dev_priv->uncore, EMR, emr | *eir_stuck);
888 static void i9xx_error_irq_handler(struct drm_i915_private *dev_priv,
891 drm_dbg(&dev_priv->drm, "Master Error, EIR 0x%08x\n", eir);
894 drm_dbg(&dev_priv->drm, "EIR stuck: 0x%08x, masked\n",
897 drm_dbg(&dev_priv->drm, "PGTBL_ER: 0x%08x\n",
898 intel_uncore_read(&dev_priv->uncore, PGTBL_ER));
901 static void i915_irq_reset(struct drm_i915_private *dev_priv)
903 struct intel_uncore *uncore = &dev_priv->uncore;
905 i9xx_display_irq_reset(dev_priv);
909 dev_priv->irq_mask = ~0u;
912 static void i915_irq_postinstall(struct drm_i915_private *dev_priv)
914 struct intel_uncore *uncore = &dev_priv->uncore;
917 gen2_error_init(uncore, GEN2_ERROR_REGS, ~i9xx_error_mask(dev_priv));
919 dev_priv->irq_mask =
930 if (DISPLAY_VER(dev_priv) >= 3) {
931 dev_priv->irq_mask &= ~I915_ASLE_INTERRUPT;
935 if (I915_HAS_HOTPLUG(dev_priv)) {
936 dev_priv->irq_mask &= ~I915_DISPLAY_PORT_INTERRUPT;
940 gen2_irq_init(uncore, GEN2_IRQ_REGS, dev_priv->irq_mask, enable_mask);
944 spin_lock_irq(&dev_priv->irq_lock);
945 i915_enable_pipestat(dev_priv, PIPE_A, PIPE_CRC_DONE_INTERRUPT_STATUS);
946 i915_enable_pipestat(dev_priv, PIPE_B, PIPE_CRC_DONE_INTERRUPT_STATUS);
947 spin_unlock_irq(&dev_priv->irq_lock);
949 i915_enable_asle_pipestat(dev_priv);
954 struct drm_i915_private *dev_priv = arg;
957 if (!intel_irqs_enabled(dev_priv))
961 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
969 iir = intel_uncore_read(&dev_priv->uncore, GEN2_IIR);
975 if (I915_HAS_HOTPLUG(dev_priv) &&
977 hotplug_status = i9xx_hpd_irq_ack(dev_priv);
981 i9xx_pipestat_irq_ack(dev_priv, iir, pipe_stats);
984 i9xx_error_irq_ack(dev_priv, &eir, &eir_stuck);
986 intel_uncore_write(&dev_priv->uncore, GEN2_IIR, iir);
989 intel_engine_cs_irq(to_gt(dev_priv)->engine[RCS0], iir);
992 i9xx_error_irq_handler(dev_priv, eir, eir_stuck);
995 i9xx_hpd_irq_handler(dev_priv, hotplug_status);
997 i915_pipestat_irq_handler(dev_priv, iir, pipe_stats);
1000 pmu_irq_stats(dev_priv, ret);
1002 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1007 static void i965_irq_reset(struct drm_i915_private *dev_priv)
1009 struct intel_uncore *uncore = &dev_priv->uncore;
1011 i9xx_display_irq_reset(dev_priv);
1015 dev_priv->irq_mask = ~0u;
1037 static void i965_irq_postinstall(struct drm_i915_private *dev_priv)
1039 struct intel_uncore *uncore = &dev_priv->uncore;
1042 gen2_error_init(uncore, GEN2_ERROR_REGS, ~i965_error_mask(dev_priv));
1044 dev_priv->irq_mask =
1059 if (IS_G4X(dev_priv))
1062 gen2_irq_init(uncore, GEN2_IRQ_REGS, dev_priv->irq_mask, enable_mask);
1066 spin_lock_irq(&dev_priv->irq_lock);
1067 i915_enable_pipestat(dev_priv, PIPE_A, PIPE_GMBUS_INTERRUPT_STATUS);
1068 i915_enable_pipestat(dev_priv, PIPE_A, PIPE_CRC_DONE_INTERRUPT_STATUS);
1069 i915_enable_pipestat(dev_priv, PIPE_B, PIPE_CRC_DONE_INTERRUPT_STATUS);
1070 spin_unlock_irq(&dev_priv->irq_lock);
1072 i915_enable_asle_pipestat(dev_priv);
1077 struct drm_i915_private *dev_priv = arg;
1080 if (!intel_irqs_enabled(dev_priv))
1084 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1092 iir = intel_uncore_read(&dev_priv->uncore, GEN2_IIR);
1099 hotplug_status = i9xx_hpd_irq_ack(dev_priv);
1103 i9xx_pipestat_irq_ack(dev_priv, iir, pipe_stats);
1106 i9xx_error_irq_ack(dev_priv, &eir, &eir_stuck);
1108 intel_uncore_write(&dev_priv->uncore, GEN2_IIR, iir);
1111 intel_engine_cs_irq(to_gt(dev_priv)->engine[RCS0],
1115 intel_engine_cs_irq(to_gt(dev_priv)->engine[VCS0],
1119 i9xx_error_irq_handler(dev_priv, eir, eir_stuck);
1122 i9xx_hpd_irq_handler(dev_priv, hotplug_status);
1124 i965_pipestat_irq_handler(dev_priv, iir, pipe_stats);
1127 pmu_irq_stats(dev_priv, IRQ_HANDLED);
1129 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1136 * @dev_priv: i915 device instance
1141 void intel_irq_init(struct drm_i915_private *dev_priv)
1145 INIT_WORK(&dev_priv->l3_parity.error_work, ivb_parity_work);
1147 dev_priv->l3_parity.remap_info[i] = NULL;
1150 if (HAS_GT_UC(dev_priv) && GRAPHICS_VER(dev_priv) < 11)
1151 to_gt(dev_priv)->pm_guc_events = GUC_INTR_GUC2HOST << 16;
1168 static irq_handler_t intel_irq_handler(struct drm_i915_private *dev_priv)
1170 if (HAS_GMCH(dev_priv)) {
1171 if (IS_CHERRYVIEW(dev_priv))
1173 else if (IS_VALLEYVIEW(dev_priv))
1175 else if (GRAPHICS_VER(dev_priv) == 4)
1180 if (GRAPHICS_VER_FULL(dev_priv) >= IP_VER(12, 10))
1182 else if (GRAPHICS_VER(dev_priv) >= 11)
1184 else if (GRAPHICS_VER(dev_priv) >= 8)
1191 static void intel_irq_reset(struct drm_i915_private *dev_priv)
1193 if (HAS_GMCH(dev_priv)) {
1194 if (IS_CHERRYVIEW(dev_priv))
1195 cherryview_irq_reset(dev_priv);
1196 else if (IS_VALLEYVIEW(dev_priv))
1197 valleyview_irq_reset(dev_priv);
1198 else if (GRAPHICS_VER(dev_priv) == 4)
1199 i965_irq_reset(dev_priv);
1201 i915_irq_reset(dev_priv);
1203 if (GRAPHICS_VER_FULL(dev_priv) >= IP_VER(12, 10))
1204 dg1_irq_reset(dev_priv);
1205 else if (GRAPHICS_VER(dev_priv) >= 11)
1206 gen11_irq_reset(dev_priv);
1207 else if (GRAPHICS_VER(dev_priv) >= 8)
1208 gen8_irq_reset(dev_priv);
1210 ilk_irq_reset(dev_priv);
1214 static void intel_irq_postinstall(struct drm_i915_private *dev_priv)
1216 if (HAS_GMCH(dev_priv)) {
1217 if (IS_CHERRYVIEW(dev_priv))
1218 cherryview_irq_postinstall(dev_priv);
1219 else if (IS_VALLEYVIEW(dev_priv))
1220 valleyview_irq_postinstall(dev_priv);
1221 else if (GRAPHICS_VER(dev_priv) == 4)
1222 i965_irq_postinstall(dev_priv);
1224 i915_irq_postinstall(dev_priv);
1226 if (GRAPHICS_VER_FULL(dev_priv) >= IP_VER(12, 10))
1227 dg1_irq_postinstall(dev_priv);
1228 else if (GRAPHICS_VER(dev_priv) >= 11)
1229 gen11_irq_postinstall(dev_priv);
1230 else if (GRAPHICS_VER(dev_priv) >= 8)
1231 gen8_irq_postinstall(dev_priv);
1233 ilk_irq_postinstall(dev_priv);
1239 * @dev_priv: i915 device instance
1248 int intel_irq_install(struct drm_i915_private *dev_priv)
1250 int irq = to_pci_dev(dev_priv->drm.dev)->irq;
1258 dev_priv->irqs_enabled = true;
1260 intel_irq_reset(dev_priv);
1262 ret = request_irq(irq, intel_irq_handler(dev_priv),
1263 IRQF_SHARED, DRIVER_NAME, dev_priv);
1265 dev_priv->irqs_enabled = false;
1269 intel_irq_postinstall(dev_priv);
1276 * @dev_priv: i915 device instance
1281 void intel_irq_uninstall(struct drm_i915_private *dev_priv)
1283 int irq = to_pci_dev(dev_priv->drm.dev)->irq;
1285 if (drm_WARN_ON(&dev_priv->drm, !dev_priv->irqs_enabled))
1288 intel_irq_reset(dev_priv);
1290 free_irq(irq, dev_priv);
1292 intel_hpd_cancel_work(dev_priv);
1293 dev_priv->irqs_enabled = false;
1322 bool intel_irqs_enabled(struct drm_i915_private *dev_priv)
1324 return dev_priv->irqs_enabled;