Lines Matching +full:case +full:- +full:sensitive
1 // SPDX-License-Identifier: GPL-2.0-only
6 #include <linux/irqchip/arm-gic.h>
31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow()
33 cpuif->vgic_hcr |= GICH_HCR_UIE; in vgic_v2_set_underflow()
44 * - active bit is transferred as is
45 * - pending bit is
46 * - transferred as is in case of edge sensitive IRQs
47 * - set to the line-level (resample time) for level sensitive IRQs
51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state()
52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state()
57 cpuif->vgic_hcr &= ~GICH_HCR_UIE; in vgic_v2_fold_lr_state()
59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) { in vgic_v2_fold_lr_state()
60 u32 val = cpuif->vgic_lr[lr]; in vgic_v2_fold_lr_state()
70 /* Notify fds when the guest EOI'ed a level-triggered SPI */ in vgic_v2_fold_lr_state()
71 if (lr_signals_eoi_mi(val) && vgic_valid_spi(vcpu->kvm, intid)) in vgic_v2_fold_lr_state()
72 kvm_notify_acked_irq(vcpu->kvm, 0, in vgic_v2_fold_lr_state()
73 intid - VGIC_NR_PRIVATE_IRQS); in vgic_v2_fold_lr_state()
75 irq = vgic_get_irq(vcpu->kvm, vcpu, intid); in vgic_v2_fold_lr_state()
77 raw_spin_lock(&irq->irq_lock); in vgic_v2_fold_lr_state()
80 deactivated = irq->active && !(val & GICH_LR_ACTIVE_BIT); in vgic_v2_fold_lr_state()
81 irq->active = !!(val & GICH_LR_ACTIVE_BIT); in vgic_v2_fold_lr_state()
83 if (irq->active && vgic_irq_is_sgi(intid)) in vgic_v2_fold_lr_state()
84 irq->active_source = cpuid; in vgic_v2_fold_lr_state()
86 /* Edge is the only case where we preserve the pending bit */ in vgic_v2_fold_lr_state()
87 if (irq->config == VGIC_CONFIG_EDGE && in vgic_v2_fold_lr_state()
89 irq->pending_latch = true; in vgic_v2_fold_lr_state()
92 irq->source |= (1 << cpuid); in vgic_v2_fold_lr_state()
98 if (irq->config == VGIC_CONFIG_LEVEL && !(val & GICH_LR_STATE)) in vgic_v2_fold_lr_state()
99 irq->pending_latch = false; in vgic_v2_fold_lr_state()
104 raw_spin_unlock(&irq->irq_lock); in vgic_v2_fold_lr_state()
105 vgic_put_irq(vcpu->kvm, irq); in vgic_v2_fold_lr_state()
108 cpuif->used_lrs = 0; in vgic_v2_fold_lr_state()
113 * - for an edge sensitive IRQ the pending state is cleared in struct vgic_irq
114 * - for a level sensitive IRQ the pending state value is unchanged;
118 * lowest-numbered source VCPU and clear that bit in the source bitmap.
124 u32 val = irq->intid; in vgic_v2_populate_lr()
127 if (irq->active) { in vgic_v2_populate_lr()
129 if (vgic_irq_is_sgi(irq->intid)) in vgic_v2_populate_lr()
130 val |= irq->active_source << GICH_LR_PHYSID_CPUID_SHIFT; in vgic_v2_populate_lr()
137 if (irq->group) in vgic_v2_populate_lr()
140 if (irq->hw && !vgic_irq_needs_resampling(irq)) { in vgic_v2_populate_lr()
142 val |= irq->hwintid << GICH_LR_PHYSID_CPUID_SHIFT; in vgic_v2_populate_lr()
148 if (irq->active) in vgic_v2_populate_lr()
151 if (irq->config == VGIC_CONFIG_LEVEL) { in vgic_v2_populate_lr()
158 if (irq->active) in vgic_v2_populate_lr()
166 if (irq->config == VGIC_CONFIG_EDGE) in vgic_v2_populate_lr()
167 irq->pending_latch = false; in vgic_v2_populate_lr()
169 if (vgic_irq_is_sgi(irq->intid)) { in vgic_v2_populate_lr()
170 u32 src = ffs(irq->source); in vgic_v2_populate_lr()
173 irq->intid)) in vgic_v2_populate_lr()
176 val |= (src - 1) << GICH_LR_PHYSID_CPUID_SHIFT; in vgic_v2_populate_lr()
177 irq->source &= ~(1 << (src - 1)); in vgic_v2_populate_lr()
178 if (irq->source) { in vgic_v2_populate_lr()
179 irq->pending_latch = true; in vgic_v2_populate_lr()
186 * Level-triggered mapped IRQs are special because we only observe in vgic_v2_populate_lr()
192 irq->line_level = false; in vgic_v2_populate_lr()
195 val |= (irq->priority >> 3) << GICH_LR_PRIORITY_SHIFT; in vgic_v2_populate_lr()
197 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr()
202 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr()
207 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr()
210 vmcr = (vmcrp->grpen0 << GICH_VMCR_ENABLE_GRP0_SHIFT) & in vgic_v2_set_vmcr()
212 vmcr |= (vmcrp->grpen1 << GICH_VMCR_ENABLE_GRP1_SHIFT) & in vgic_v2_set_vmcr()
214 vmcr |= (vmcrp->ackctl << GICH_VMCR_ACK_CTL_SHIFT) & in vgic_v2_set_vmcr()
216 vmcr |= (vmcrp->fiqen << GICH_VMCR_FIQ_EN_SHIFT) & in vgic_v2_set_vmcr()
218 vmcr |= (vmcrp->cbpr << GICH_VMCR_CBPR_SHIFT) & in vgic_v2_set_vmcr()
220 vmcr |= (vmcrp->eoim << GICH_VMCR_EOI_MODE_SHIFT) & in vgic_v2_set_vmcr()
222 vmcr |= (vmcrp->abpr << GICH_VMCR_ALIAS_BINPOINT_SHIFT) & in vgic_v2_set_vmcr()
224 vmcr |= (vmcrp->bpr << GICH_VMCR_BINPOINT_SHIFT) & in vgic_v2_set_vmcr()
226 vmcr |= ((vmcrp->pmr >> GICV_PMR_PRIORITY_SHIFT) << in vgic_v2_set_vmcr()
229 cpu_if->vgic_vmcr = vmcr; in vgic_v2_set_vmcr()
234 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_get_vmcr()
237 vmcr = cpu_if->vgic_vmcr; in vgic_v2_get_vmcr()
239 vmcrp->grpen0 = (vmcr & GICH_VMCR_ENABLE_GRP0_MASK) >> in vgic_v2_get_vmcr()
241 vmcrp->grpen1 = (vmcr & GICH_VMCR_ENABLE_GRP1_MASK) >> in vgic_v2_get_vmcr()
243 vmcrp->ackctl = (vmcr & GICH_VMCR_ACK_CTL_MASK) >> in vgic_v2_get_vmcr()
245 vmcrp->fiqen = (vmcr & GICH_VMCR_FIQ_EN_MASK) >> in vgic_v2_get_vmcr()
247 vmcrp->cbpr = (vmcr & GICH_VMCR_CBPR_MASK) >> in vgic_v2_get_vmcr()
249 vmcrp->eoim = (vmcr & GICH_VMCR_EOI_MODE_MASK) >> in vgic_v2_get_vmcr()
252 vmcrp->abpr = (vmcr & GICH_VMCR_ALIAS_BINPOINT_MASK) >> in vgic_v2_get_vmcr()
254 vmcrp->bpr = (vmcr & GICH_VMCR_BINPOINT_MASK) >> in vgic_v2_get_vmcr()
256 vmcrp->pmr = ((vmcr & GICH_VMCR_PRIMASK_MASK) >> in vgic_v2_get_vmcr()
267 vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0; in vgic_v2_enable()
270 vcpu->arch.vgic_cpu.vgic_v2.vgic_hcr = GICH_HCR_EN; in vgic_v2_enable()
291 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_v2_map_resources()
294 if (IS_VGIC_ADDR_UNDEF(dist->vgic_dist_base) || in vgic_v2_map_resources()
295 IS_VGIC_ADDR_UNDEF(dist->vgic_cpu_base)) { in vgic_v2_map_resources()
297 return -ENXIO; in vgic_v2_map_resources()
300 if (!vgic_v2_check_base(dist->vgic_dist_base, dist->vgic_cpu_base)) { in vgic_v2_map_resources()
302 return -EINVAL; in vgic_v2_map_resources()
316 ret = kvm_phys_addr_ioremap(kvm, dist->vgic_cpu_base, in vgic_v2_map_resources()
331 * vgic_v2_probe - probe for a VGICv2 compatible interrupt controller
344 return -ENXIO; in vgic_v2_probe()
347 if (!info->vctrl.start) { in vgic_v2_probe()
349 return -ENXIO; in vgic_v2_probe()
352 if (!PAGE_ALIGNED(info->vcpu.start) || in vgic_v2_probe()
353 !PAGE_ALIGNED(resource_size(&info->vcpu))) { in vgic_v2_probe()
356 ret = create_hyp_io_mappings(info->vcpu.start, in vgic_v2_probe()
357 resource_size(&info->vcpu), in vgic_v2_probe()
368 ret = create_hyp_io_mappings(info->vctrl.start, in vgic_v2_probe()
369 resource_size(&info->vctrl), in vgic_v2_probe()
387 kvm_vgic_global_state.vcpu_base = info->vcpu.start; in vgic_v2_probe()
391 kvm_debug("vgic-v2@%llx\n", info->vctrl.start); in vgic_v2_probe()
405 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in save_lrs()
406 u64 used_lrs = cpu_if->used_lrs; in save_lrs()
416 cpu_if->vgic_lr[i] &= ~GICH_LR_STATE; in save_lrs()
418 cpu_if->vgic_lr[i] = readl_relaxed(base + GICH_LR0 + (i * 4)); in save_lrs()
427 u64 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs; in vgic_v2_save_state()
440 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_restore_state()
442 u64 used_lrs = cpu_if->used_lrs; in vgic_v2_restore_state()
449 writel_relaxed(cpu_if->vgic_hcr, base + GICH_HCR); in vgic_v2_restore_state()
451 writel_relaxed(cpu_if->vgic_lr[i], in vgic_v2_restore_state()
459 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_load()
461 writel_relaxed(cpu_if->vgic_vmcr, in vgic_v2_load()
463 writel_relaxed(cpu_if->vgic_apr, in vgic_v2_load()
469 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_put()
471 cpu_if->vgic_vmcr = readl_relaxed(kvm_vgic_global_state.vctrl_base + GICH_VMCR); in vgic_v2_put()
472 cpu_if->vgic_apr = readl_relaxed(kvm_vgic_global_state.vctrl_base + GICH_APR); in vgic_v2_put()