Home
last modified time | relevance | path

Searched refs:csa (Results 1 – 25 of 30) sorted by relevance

12

/linux/arch/powerpc/platforms/cell/spufs/
H A Dbacking_ops.c40 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event()
41 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event()
42 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event()
43 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event()
45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event()
54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read()
55 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read()
61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read()
62 ctx->csa.prob.mb_stat_R &= ~(0x0000ff); in spu_backing_mbox_read()
63 ctx->csa.spu_chnlcnt_RW[28] = 1; in spu_backing_mbox_read()
[all …]
H A Dswitch.c81 static inline int check_spu_isolate(struct spu_state *csa, struct spu *spu) in check_spu_isolate() argument
97 static inline void disable_interrupts(struct spu_state *csa, struct spu *spu) in disable_interrupts() argument
111 if (csa) { in disable_interrupts()
112 csa->priv1.int_mask_class0_RW = spu_int_mask_get(spu, 0); in disable_interrupts()
113 csa->priv1.int_mask_class1_RW = spu_int_mask_get(spu, 1); in disable_interrupts()
114 csa->priv1.int_mask_class2_RW = spu_int_mask_get(spu, 2); in disable_interrupts()
134 static inline void set_watchdog_timer(struct spu_state *csa, struct spu *spu) in set_watchdog_timer() argument
147 static inline void inhibit_user_access(struct spu_state *csa, struct spu *spu) in inhibit_user_access() argument
158 static inline void set_switch_pending(struct spu_state *csa, struct spu *spu) in set_switch_pending() argument
167 static inline void save_mfc_cntl(struct spu_state *csa, struct spu *spu) in save_mfc_cntl() argument
[all …]
H A Dlscsa_alloc.c21 int spu_alloc_lscsa(struct spu_state *csa) in spu_alloc_lscsa() argument
29 csa->lscsa = lscsa; in spu_alloc_lscsa()
38 void spu_free_lscsa(struct spu_state *csa) in spu_free_lscsa() argument
43 if (csa->lscsa == NULL) in spu_free_lscsa()
46 for (p = csa->lscsa->ls; p < csa->lscsa->ls + LS_SIZE; p += PAGE_SIZE) in spu_free_lscsa()
49 vfree(csa->lscsa); in spu_free_lscsa()
H A Dfault.c55 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0()
61 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
65 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
69 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0()
72 ctx->csa.class_0_pending = 0; in spufs_handle_class0()
102 ea = ctx->csa.class_1_dar; in spufs_handle_class1()
103 dsisr = ctx->csa.class_1_dsisr; in spufs_handle_class1()
141 ctx->csa.class_1_dar = ctx->csa.class_1_dsisr = 0; in spufs_handle_class1()
H A Dfile.c249 pfn = vmalloc_to_pfn(ctx->csa.lscsa->ls + offset); in spufs_mem_mmap_fault()
470 return spufs_dump_emit(cprm, ctx->csa.lscsa->gprs, in spufs_regs_dump()
471 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_dump()
483 if (*pos >= sizeof(ctx->csa.lscsa->gprs)) in spufs_regs_read()
489 ret = simple_read_from_buffer(buffer, size, pos, ctx->csa.lscsa->gprs, in spufs_regs_read()
490 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_read()
500 struct spu_lscsa *lscsa = ctx->csa.lscsa; in spufs_regs_write()
527 return spufs_dump_emit(cprm, &ctx->csa.lscsa->fpcr, in spufs_fpcr_dump()
528 sizeof(ctx->csa.lscsa->fpcr)); in spufs_fpcr_dump()
541 ret = simple_read_from_buffer(buffer, size, pos, &ctx->csa.lscsa->fpcr, in spufs_fpcr_read()
[all …]
H A Drun.c30 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback()
31 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback()
34 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback()
35 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback()
72 dsisr = ctx->csa.class_1_dsisr; in spu_stopped()
76 if (ctx->csa.class_0_pending) in spu_stopped()
H A Dspufs.h69 struct spu_state csa; /* SPU context save area. */ member
346 extern int spu_init_csa(struct spu_state *csa);
347 extern void spu_fini_csa(struct spu_state *csa);
352 extern int spu_alloc_lscsa(struct spu_state *csa);
353 extern void spu_free_lscsa(struct spu_state *csa);
H A Dcontext.c35 if (spu_init_csa(&ctx->csa)) in alloc_spu_context()
77 spu_fini_csa(&ctx->csa); in destroy_spu_context()
H A Dsched.c237 spu_restore(&ctx->csa, spu); in spu_bind_context()
439 spu_save(&ctx->csa, spu); in spu_unbind_context()
/linux/drivers/hwtracing/coresight/
H A Dcoresight-etm4x-core.c73 struct csdev_access *csa; member
160 struct csdev_access *csa) in etm_detect_os_lock() argument
162 u32 oslsr = etm4x_relaxed_read32(csa, TRCOSLSR); in etm_detect_os_lock()
168 struct csdev_access *csa, u32 val) in etm_write_os_lock() argument
174 etm4x_relaxed_write32(csa, val, TRCOSLAR); in etm_write_os_lock()
190 struct csdev_access *csa) in etm4_os_unlock_csa() argument
195 etm_write_os_lock(drvdata, csa, 0x0); in etm4_os_unlock_csa()
215 struct csdev_access *csa) in etm4_cs_lock() argument
218 if (csa->io_mem) in etm4_cs_lock()
219 CS_LOCK(csa->base); in etm4_cs_lock()
[all …]
H A Dcoresight-tpiu.c68 static void tpiu_enable_hw(struct csdev_access *csa) in tpiu_enable_hw() argument
70 CS_UNLOCK(csa->base); in tpiu_enable_hw()
74 CS_LOCK(csa->base); in tpiu_enable_hw()
89 static void tpiu_disable_hw(struct csdev_access *csa) in tpiu_disable_hw() argument
91 CS_UNLOCK(csa->base); in tpiu_disable_hw()
94 csdev_access_relaxed_write32(csa, FFCR_STOP_FI, TPIU_FFCR); in tpiu_disable_hw()
96 csdev_access_relaxed_write32(csa, FFCR_STOP_FI | FFCR_FON_MAN, TPIU_FFCR); in tpiu_disable_hw()
98 coresight_timeout(csa, TPIU_FFCR, FFCR_FON_MAN_BIT, 0); in tpiu_disable_hw()
100 coresight_timeout(csa, TPIU_FFSR, FFSR_FT_STOPPED_BIT, 1); in tpiu_disable_hw()
102 CS_LOCK(csa->base); in tpiu_disable_hw()
H A Dcoresight-etm4x.h514 #define etm4x_relaxed_read32(csa, offset) \ argument
515 ((u32)((csa)->io_mem ? \
516 readl_relaxed((csa)->base + (offset)) : \
519 #define etm4x_relaxed_read64(csa, offset) \ argument
520 ((u64)((csa)->io_mem ? \
521 readq_relaxed((csa)->base + (offset)) : \
524 #define etm4x_read32(csa, offset) \ argument
526 u32 __val = etm4x_relaxed_read32((csa), (offset)); \
531 #define etm4x_read64(csa, offset) \ argument
533 u64 __val = etm4x_relaxed_read64((csa), (offset)); \
[all …]
H A Dcoresight-etm3x-core.c89 etmpdcr = readl_relaxed(drvdata->csa.base + ETMPDCR); in etm_set_pwrup()
91 writel_relaxed(etmpdcr, drvdata->csa.base + ETMPDCR); in etm_set_pwrup()
104 etmpdcr = readl_relaxed(drvdata->csa.base + ETMPDCR); in etm_clr_pwrup()
106 writel_relaxed(etmpdcr, drvdata->csa.base + ETMPDCR); in etm_clr_pwrup()
368 CS_UNLOCK(drvdata->csa.base); in etm_enable_hw()
430 CS_LOCK(drvdata->csa.base); in etm_enable_hw()
564 CS_UNLOCK(drvdata->csa.base); in etm_disable_hw()
576 CS_LOCK(drvdata->csa.base); in etm_disable_hw()
598 CS_UNLOCK(drvdata->csa.base); in etm_disable_perf()
610 CS_LOCK(drvdata->csa.base); in etm_disable_perf()
[all …]
H A Dcoresight-core.c147 void coresight_clear_self_claim_tag(struct csdev_access *csa) in coresight_clear_self_claim_tag() argument
149 if (csa->io_mem) in coresight_clear_self_claim_tag()
150 CS_UNLOCK(csa->base); in coresight_clear_self_claim_tag()
151 coresight_clear_self_claim_tag_unlocked(csa); in coresight_clear_self_claim_tag()
152 if (csa->io_mem) in coresight_clear_self_claim_tag()
153 CS_LOCK(csa->base); in coresight_clear_self_claim_tag()
157 void coresight_clear_self_claim_tag_unlocked(struct csdev_access *csa) in coresight_clear_self_claim_tag_unlocked() argument
159 csdev_access_relaxed_write32(csa, CORESIGHT_CLAIM_SELF_HOSTED, in coresight_clear_self_claim_tag_unlocked()
178 struct csdev_access *csa; in coresight_claim_device_unlocked() local
183 csa = &csdev->access; in coresight_claim_device_unlocked()
[all …]
H A Dcoresight-etm.h232 struct csdev_access csa; member
263 writel_relaxed(val, drvdata->csa.base + off); in etm_writel()
277 val = readl_relaxed(drvdata->csa.base + off); in etm_readl()
H A Dcoresight-etb10.c254 struct csdev_access *csa = &drvdata->csdev->access; in __etb_disable_hw() local
266 if (coresight_timeout(csa, ETB_FFCR, ETB_FFCR_BIT, 0)) { in __etb_disable_hw()
274 if (coresight_timeout(csa, ETB_FFSR, ETB_FFSR_BIT, 1)) { in __etb_disable_hw()
H A Dcoresight-catu.c394 struct csdev_access *csa = &drvdata->csdev->access; in catu_wait_for_ready() local
396 return coresight_timeout(csa, CATU_STATUS, CATU_STATUS_READY, 1); in catu_wait_for_ready()
H A Dcoresight-etm3x-sysfs.c53 CS_UNLOCK(drvdata->csa.base); in etmsr_show()
57 CS_LOCK(drvdata->csa.base); in etmsr_show()
952 CS_UNLOCK(drvdata->csa.base); in seq_curr_state_show()
954 CS_LOCK(drvdata->csa.base); in seq_curr_state_show()
/linux/include/linux/
H A Dcoresight.h448 static inline u32 csdev_access_relaxed_read32(struct csdev_access *csa, in csdev_access_relaxed_read32() argument
451 if (likely(csa->io_mem)) in csdev_access_relaxed_read32()
452 return readl_relaxed(csa->base + offset); in csdev_access_relaxed_read32()
454 return csa->read(offset, true, false); in csdev_access_relaxed_read32()
478 static inline u32 coresight_get_pid(struct csdev_access *csa) in coresight_get_pid() argument
483 pid |= csdev_access_relaxed_read32(csa, CORESIGHT_PIDRn(i)) << (i * 8); in coresight_get_pid()
488 static inline u64 csdev_access_relaxed_read_pair(struct csdev_access *csa, in csdev_access_relaxed_read_pair() argument
491 if (likely(csa->io_mem)) { in csdev_access_relaxed_read_pair()
492 return readl_relaxed(csa->base + lo_offset) | in csdev_access_relaxed_read_pair()
493 ((u64)readl_relaxed(csa->base + hi_offset) << 32); in csdev_access_relaxed_read_pair()
[all …]
/linux/kernel/cgroup/
H A Dcpuset.c842 struct cpuset **csa; /* array of all cpuset ptrs */ in generate_sched_domains() local
856 csa = NULL; in generate_sched_domains()
877 csa = kmalloc_array(nr_cpusets(), sizeof(cp), GFP_KERNEL); in generate_sched_domains()
878 if (!csa) in generate_sched_domains()
884 csa[csn++] = &top_cpuset; in generate_sched_domains()
909 csa[csn++] = cp; in generate_sched_domains()
922 csa[csn++] = cp; in generate_sched_domains()
941 uf_node_init(&csa[i]->node); in generate_sched_domains()
946 if (cpusets_overlap(csa[i], csa[j])) { in generate_sched_domains()
952 uf_union(&csa[i]->node, &csa[j]->node); in generate_sched_domains()
[all …]
/linux/drivers/net/wireless/intel/iwlwifi/dvm/
H A Drx.c47 struct iwl_csa_notification *csa = (void *)pkt->data; in iwlagn_rx_csa() local
58 if (!le32_to_cpu(csa->status) && csa->channel == priv->switch_channel) { in iwlagn_rx_csa()
59 rxon->channel = csa->channel; in iwlagn_rx_csa()
60 ctx->staging.channel = csa->channel; in iwlagn_rx_csa()
62 le16_to_cpu(csa->channel)); in iwlagn_rx_csa()
66 le16_to_cpu(csa->channel)); in iwlagn_rx_csa()
/linux/net/mac80211/
H A Dlink.c113 wiphy_work_init(&link->csa.finalize_work, in ieee80211_link_init()
155 &link->csa.finalize_work); in ieee80211_link_stop()
471 &link->u.mgd.csa.switch_work, in _ieee80211_set_active_links()
472 link->u.mgd.csa.time - in _ieee80211_set_active_links()
H A Dmlme.c2430 u.mgd.csa.switch_work.work); in ieee80211_csa_switch_work()
2456 link->conf->chanreq = link->csa.chanreq; in ieee80211_csa_switch_work()
2457 cfg80211_ch_switch_notify(sdata->dev, &link->csa.chanreq.oper, in ieee80211_csa_switch_work()
2472 &link->csa.chanreq.oper); in ieee80211_csa_switch_work()
2504 &link->csa.chanreq)) { in ieee80211_csa_switch_work()
2512 link->u.mgd.csa.waiting_bcn = true; in ieee80211_csa_switch_work()
2525 if (link->u.mgd.csa.ap_chandef.chan->band == NL80211_BAND_6GHZ && in ieee80211_csa_switch_work()
2527 ieee80211_rearrange_tpe(&link->u.mgd.csa.tpe, in ieee80211_csa_switch_work()
2528 &link->u.mgd.csa.ap_chandef, in ieee80211_csa_switch_work()
2530 if (memcmp(&link->conf->tpe, &link->u.mgd.csa.tpe, in ieee80211_csa_switch_work()
[all …]
H A Dcfg.c1095 const struct ieee80211_csa_settings *csa, in ieee80211_set_probe_resp() argument
1113 if (csa) in ieee80211_set_probe_resp()
1114 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_presp, in ieee80211_set_probe_resp()
1115 csa->n_counter_offsets_presp * in ieee80211_set_probe_resp()
1315 const struct ieee80211_csa_settings *csa, in ieee80211_assign_beacon() argument
1394 if (csa) { in ieee80211_assign_beacon()
1395 new->cntdwn_current_counter = csa->count; in ieee80211_assign_beacon()
1396 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_beacon, in ieee80211_assign_beacon()
1397 csa->n_counter_offsets_beacon * in ieee80211_assign_beacon()
1418 params->probe_resp_len, csa, cca, link); in ieee80211_assign_beacon()
[all …]
/linux/drivers/net/wireless/intel/iwlegacy/
H A Dcommon.c4082 struct il_csa_notification *csa = &(pkt->u.csa_notif); in il_hdl_csa() local
4088 if (!le32_to_cpu(csa->status) && csa->channel == il->switch_channel) { in il_hdl_csa()
4089 rxon->channel = csa->channel; in il_hdl_csa()
4090 il->staging.channel = csa->channel; in il_hdl_csa()
4091 D_11H("CSA notif: channel %d\n", le16_to_cpu(csa->channel)); in il_hdl_csa()
4095 le16_to_cpu(csa->channel)); in il_hdl_csa()

12