/linux/arch/powerpc/platforms/cell/spufs/ |
H A D | backing_ops.c | 40 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 41 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 42 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 43 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 55 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() 62 ctx->csa.prob.mb_stat_R &= ~(0x0000ff); in spu_backing_mbox_read() 63 ctx->csa.spu_chnlcnt_RW[28] = 1; in spu_backing_mbox_read() [all …]
|
H A D | switch.c | 81 static inline int check_spu_isolate(struct spu_state *csa, struct spu *spu) in check_spu_isolate() argument 97 static inline void disable_interrupts(struct spu_state *csa, struct spu *spu) in disable_interrupts() argument 111 if (csa) { in disable_interrupts() 112 csa->priv1.int_mask_class0_RW = spu_int_mask_get(spu, 0); in disable_interrupts() 113 csa->priv1.int_mask_class1_RW = spu_int_mask_get(spu, 1); in disable_interrupts() 114 csa->priv1.int_mask_class2_RW = spu_int_mask_get(spu, 2); in disable_interrupts() 134 static inline void set_watchdog_timer(struct spu_state *csa, struct spu *spu) in set_watchdog_timer() argument 147 static inline void inhibit_user_access(struct spu_state *csa, struct spu *spu) in inhibit_user_access() argument 158 static inline void set_switch_pending(struct spu_state *csa, struct spu *spu) in set_switch_pending() argument 167 static inline void save_mfc_cntl(struct spu_state *csa, struct spu *spu) in save_mfc_cntl() argument [all …]
|
H A D | lscsa_alloc.c | 21 int spu_alloc_lscsa(struct spu_state *csa) in spu_alloc_lscsa() argument 29 csa->lscsa = lscsa; in spu_alloc_lscsa() 38 void spu_free_lscsa(struct spu_state *csa) in spu_free_lscsa() argument 43 if (csa->lscsa == NULL) in spu_free_lscsa() 46 for (p = csa->lscsa->ls; p < csa->lscsa->ls + LS_SIZE; p += PAGE_SIZE) in spu_free_lscsa() 49 vfree(csa->lscsa); in spu_free_lscsa()
|
H A D | fault.c | 55 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 61 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 65 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 69 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 72 ctx->csa.class_0_pending = 0; in spufs_handle_class0() 102 ea = ctx->csa.class_1_dar; in spufs_handle_class1() 103 dsisr = ctx->csa.class_1_dsisr; in spufs_handle_class1() 141 ctx->csa.class_1_dar = ctx->csa.class_1_dsisr = 0; in spufs_handle_class1()
|
H A D | file.c | 249 pfn = vmalloc_to_pfn(ctx->csa.lscsa->ls + offset); in spufs_mem_mmap_fault() 470 return spufs_dump_emit(cprm, ctx->csa.lscsa->gprs, in spufs_regs_dump() 471 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_dump() 483 if (*pos >= sizeof(ctx->csa.lscsa->gprs)) in spufs_regs_read() 489 ret = simple_read_from_buffer(buffer, size, pos, ctx->csa.lscsa->gprs, in spufs_regs_read() 490 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_read() 500 struct spu_lscsa *lscsa = ctx->csa.lscsa; in spufs_regs_write() 527 return spufs_dump_emit(cprm, &ctx->csa.lscsa->fpcr, in spufs_fpcr_dump() 528 sizeof(ctx->csa.lscsa->fpcr)); in spufs_fpcr_dump() 541 ret = simple_read_from_buffer(buffer, size, pos, &ctx->csa.lscsa->fpcr, in spufs_fpcr_read() [all …]
|
H A D | run.c | 30 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 31 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 34 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 35 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 72 dsisr = ctx->csa.class_1_dsisr; in spu_stopped() 76 if (ctx->csa.class_0_pending) in spu_stopped()
|
H A D | spufs.h | 69 struct spu_state csa; /* SPU context save area. */ member 344 extern int spu_init_csa(struct spu_state *csa); 345 extern void spu_fini_csa(struct spu_state *csa); 350 extern int spu_alloc_lscsa(struct spu_state *csa); 351 extern void spu_free_lscsa(struct spu_state *csa);
|
H A D | context.c | 35 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 77 spu_fini_csa(&ctx->csa); in destroy_spu_context()
|
H A D | sched.c | 237 spu_restore(&ctx->csa, spu); in spu_bind_context() 439 spu_save(&ctx->csa, spu); in spu_unbind_context()
|
/linux/drivers/hwtracing/coresight/ |
H A D | coresight-etm4x-core.c | 73 struct csdev_access *csa; member 160 struct csdev_access *csa) in etm_detect_os_lock() argument 162 u32 oslsr = etm4x_relaxed_read32(csa, TRCOSLSR); in etm_detect_os_lock() 168 struct csdev_access *csa, u32 val) in etm_write_os_lock() argument 174 etm4x_relaxed_write32(csa, val, TRCOSLAR); in etm_write_os_lock() 190 struct csdev_access *csa) in etm4_os_unlock_csa() argument 195 etm_write_os_lock(drvdata, csa, 0x0); in etm4_os_unlock_csa() 215 struct csdev_access *csa) in etm4_cs_lock() argument 218 if (csa->io_mem) in etm4_cs_lock() 219 CS_LOCK(csa->base); in etm4_cs_lock() [all …]
|
H A D | coresight-tpiu.c | 68 static void tpiu_enable_hw(struct csdev_access *csa) in tpiu_enable_hw() argument 70 CS_UNLOCK(csa->base); in tpiu_enable_hw() 74 CS_LOCK(csa->base); in tpiu_enable_hw() 89 static void tpiu_disable_hw(struct csdev_access *csa) in tpiu_disable_hw() argument 91 CS_UNLOCK(csa->base); in tpiu_disable_hw() 94 csdev_access_relaxed_write32(csa, FFCR_STOP_FI, TPIU_FFCR); in tpiu_disable_hw() 96 csdev_access_relaxed_write32(csa, FFCR_STOP_FI | FFCR_FON_MAN, TPIU_FFCR); in tpiu_disable_hw() 98 coresight_timeout(csa, TPIU_FFCR, FFCR_FON_MAN_BIT, 0); in tpiu_disable_hw() 100 coresight_timeout(csa, TPIU_FFSR, FFSR_FT_STOPPED_BIT, 1); in tpiu_disable_hw() 102 CS_LOCK(csa->base); in tpiu_disable_hw()
|
H A D | coresight-tmc-core.c | 40 struct csdev_access *csa = &csdev->access; in tmc_wait_for_tmcready() local 43 if (coresight_timeout(csa, TMC_STS, TMC_STS_TMCREADY_BIT, 1)) { in tmc_wait_for_tmcready() 54 struct csdev_access *csa = &csdev->access; in tmc_flush_and_stop() local 63 if (coresight_timeout(csa, TMC_FFCR, TMC_FFCR_FLUSHMAN_BIT, 0)) { in tmc_flush_and_stop()
|
H A D | coresight-etb10.c | 254 struct csdev_access *csa = &drvdata->csdev->access; in __etb_disable_hw() local 266 if (coresight_timeout(csa, ETB_FFCR, ETB_FFCR_BIT, 0)) { in __etb_disable_hw() 274 if (coresight_timeout(csa, ETB_FFSR, ETB_FFSR_BIT, 1)) { in __etb_disable_hw()
|
H A D | coresight-catu.c | 395 struct csdev_access *csa = &drvdata->csdev->access; in catu_wait_for_ready() local 397 return coresight_timeout(csa, CATU_STATUS, CATU_STATUS_READY, 1); in catu_wait_for_ready()
|
H A D | coresight-stm.c | 262 struct csdev_access *csa = &csdev->access; in stm_disable() local 275 coresight_timeout(csa, STMTCSR, STMTCSR_BUSY_BIT, 0); in stm_disable()
|
H A D | coresight-core.c | 1097 * @csa: coresight device access for the device 1105 int coresight_timeout(struct csdev_access *csa, u32 offset, 1112 val = csdev_access_read32(csa, offset); in coresight_release_platform_data() 1030 coresight_timeout(struct csdev_access * csa,u32 offset,int position,int value) coresight_timeout() argument
|
/linux/include/linux/ |
H A D | coresight.h | 419 static inline u32 csdev_access_relaxed_read32(struct csdev_access *csa, in csdev_access_relaxed_read32() 422 if (likely(csa->io_mem)) 423 return readl_relaxed(csa->base + offset); 425 return csa->read(offset, true, false); in coresight_get_cid() 475 static inline u32 coresight_get_pid(struct csdev_access *csa) in coresight_get_pid() 480 pid |= csdev_access_relaxed_read32(csa, CORESIGHT_PIDRn(i)) << (i * 8); in csdev_access_relaxed_read_pair() argument 485 static inline u64 csdev_access_relaxed_read_pair(struct csdev_access *csa, in csdev_access_relaxed_read_pair() 488 if (likely(csa->io_mem)) { in csdev_access_relaxed_read_pair() 489 return readl_relaxed(csa->base + lo_offset) | in csdev_access_relaxed_read_pair() 490 ((u64)readl_relaxed(csa in csdev_access_relaxed_read_pair() 414 csdev_access_relaxed_read32(struct csdev_access * csa,u32 offset) csdev_access_relaxed_read32() argument 470 coresight_get_pid(struct csdev_access * csa) coresight_get_pid() argument 491 csdev_access_relaxed_write_pair(struct csdev_access * csa,u64 val,u32 lo_offset,u32 hi_offset) csdev_access_relaxed_write_pair() argument 503 csdev_access_read32(struct csdev_access * csa,u32 offset) csdev_access_read32() argument 511 csdev_access_relaxed_write32(struct csdev_access * csa,u32 val,u32 offset) csdev_access_relaxed_write32() argument 520 csdev_access_write32(struct csdev_access * csa,u32 val,u32 offset) csdev_access_write32() argument 530 csdev_access_relaxed_read64(struct csdev_access * csa,u32 offset) csdev_access_relaxed_read64() argument 539 csdev_access_read64(struct csdev_access * csa,u32 offset) csdev_access_read64() argument 547 csdev_access_relaxed_write64(struct csdev_access * csa,u64 val,u32 offset) csdev_access_relaxed_write64() argument 556 csdev_access_write64(struct csdev_access * csa,u64 val,u32 offset) csdev_access_write64() argument 566 csdev_access_relaxed_read64(struct csdev_access * csa,u32 offset) csdev_access_relaxed_read64() argument 573 csdev_access_read64(struct csdev_access * csa,u32 offset) csdev_access_read64() argument 579 csdev_access_relaxed_write64(struct csdev_access * csa,u64 val,u32 offset) csdev_access_relaxed_write64() argument 585 csdev_access_write64(struct csdev_access * csa,u64 val,u32 offset) csdev_access_write64() argument [all...] |
/linux/kernel/cgroup/ |
H A D | cpuset.c | 745 struct cpuset **csa; /* array of all cpuset ptrs */ in generate_sched_domains() local 759 csa = NULL; in generate_sched_domains() 780 csa = kmalloc_array(nr_cpusets(), sizeof(cp), GFP_KERNEL); in generate_sched_domains() 781 if (!csa) in generate_sched_domains() 787 csa[csn++] = &top_cpuset; in generate_sched_domains() 812 csa[csn++] = cp; in generate_sched_domains() 825 csa[csn++] = cp; in generate_sched_domains() 844 uf_node_init(&csa[i]->node); in generate_sched_domains() 849 if (cpusets_overlap(csa[i], csa[j])) { in generate_sched_domains() 855 uf_union(&csa[i]->node, &csa[j]->node); in generate_sched_domains() [all …]
|
/linux/net/mac80211/ |
H A D | mesh.c | 953 struct mesh_csa_settings *csa; in ieee80211_mesh_build_beacon() local 1028 csa = rcu_dereference(ifmsh->csa); in ieee80211_mesh_build_beacon() 1029 if (csa) { in ieee80211_mesh_build_beacon() 1040 csa->settings.chandef.chan->center_freq); in ieee80211_mesh_build_beacon() 1041 bcn->cntdwn_current_counter = csa->settings.count; in ieee80211_mesh_build_beacon() 1043 *pos++ = csa->settings.count; in ieee80211_mesh_build_beacon() 1052 *pos++ |= csa->settings.block_tx ? in ieee80211_mesh_build_beacon() 1059 switch (csa->settings.chandef.width) { in ieee80211_mesh_build_beacon() 1066 ct = cfg80211_get_chandef_type(&csa->settings.chandef); in ieee80211_mesh_build_beacon() 1083 chandef = &csa->settings.chandef; in ieee80211_mesh_build_beacon() [all …]
|
H A D | link.c | 43 wiphy_work_init(&link->csa.finalize_work, in ieee80211_link_init() 83 &link->csa.finalize_work); in ieee80211_link_stop() 387 &link->u.mgd.csa.switch_work, in _ieee80211_set_active_links() 388 link->u.mgd.csa.time - in _ieee80211_set_active_links()
|
H A D | mlme.c | 2207 u.mgd.csa.switch_work.work); in ieee80211_csa_switch_work() 2230 link->conf->chanreq = link->csa.chanreq; in ieee80211_csa_switch_work() 2231 cfg80211_ch_switch_notify(sdata->dev, &link->csa.chanreq.oper, in ieee80211_csa_switch_work() 2264 &link->csa.chanreq)) { in ieee80211_csa_switch_work() 2272 link->u.mgd.csa.waiting_bcn = true; in ieee80211_csa_switch_work() 2275 if (link->u.mgd.csa.ap_chandef.chan->band == NL80211_BAND_6GHZ && in ieee80211_csa_switch_work() 2277 ieee80211_rearrange_tpe(&link->u.mgd.csa.tpe, in ieee80211_csa_switch_work() 2278 &link->u.mgd.csa.ap_chandef, in ieee80211_csa_switch_work() 2280 if (memcmp(&link->conf->tpe, &link->u.mgd.csa.tpe, in ieee80211_csa_switch_work() 2281 sizeof(link->u.mgd.csa.tpe))) { in ieee80211_csa_switch_work() [all …]
|
H A D | cfg.c | 949 const struct ieee80211_csa_settings *csa, in ieee80211_set_probe_resp() argument 967 if (csa) in ieee80211_set_probe_resp() 968 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_presp, in ieee80211_set_probe_resp() 969 csa->n_counter_offsets_presp * in ieee80211_set_probe_resp() 1128 const struct ieee80211_csa_settings *csa, in ieee80211_assign_beacon() argument 1214 if (csa) { in ieee80211_assign_beacon() 1215 new->cntdwn_current_counter = csa->count; in ieee80211_assign_beacon() 1216 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_beacon, in ieee80211_assign_beacon() 1217 csa->n_counter_offsets_beacon * in ieee80211_assign_beacon() 1238 params->probe_resp_len, csa, cca, link); in ieee80211_assign_beacon() [all …]
|
H A D | ieee80211_i.h | 797 struct mesh_csa_settings __rcu *csa; member 1002 } csa; member 1065 } csa; member
|
/linux/drivers/net/wireless/intel/iwlwifi/dvm/ |
H A D | rx.c | 47 struct iwl_csa_notification *csa = (void *)pkt->data; in iwlagn_rx_csa() local 58 if (!le32_to_cpu(csa->status) && csa->channel == priv->switch_channel) { in iwlagn_rx_csa() 59 rxon->channel = csa->channel; in iwlagn_rx_csa() 60 ctx->staging.channel = csa->channel; in iwlagn_rx_csa() 62 le16_to_cpu(csa->channel)); in iwlagn_rx_csa() 66 le16_to_cpu(csa->channel)); in iwlagn_rx_csa()
|
/linux/drivers/net/wireless/intel/ipw2x00/ |
H A D | libipw.h | 640 struct libipw_csa csa; member
|