Lines Matching full:spu

9  * Host-side part of SPU context switch sequence outlined in
33 #include <asm/spu.h>
64 static inline void acquire_spu_lock(struct spu *spu) in acquire_spu_lock() argument
68 * Acquire SPU-specific mutual exclusion lock. in acquire_spu_lock()
73 static inline void release_spu_lock(struct spu *spu) in release_spu_lock() argument
76 * Release SPU-specific mutual exclusion lock. in release_spu_lock()
81 static inline int check_spu_isolate(struct spu_state *csa, struct spu *spu) in check_spu_isolate() argument
83 struct spu_problem __iomem *prob = spu->problem; in check_spu_isolate()
89 * SPU is in isolate state and cannot be context in check_spu_isolate()
97 static inline void disable_interrupts(struct spu_state *csa, struct spu *spu) in disable_interrupts() argument
110 spin_lock_irq(&spu->register_lock); in disable_interrupts()
112 csa->priv1.int_mask_class0_RW = spu_int_mask_get(spu, 0); in disable_interrupts()
113 csa->priv1.int_mask_class1_RW = spu_int_mask_get(spu, 1); in disable_interrupts()
114 csa->priv1.int_mask_class2_RW = spu_int_mask_get(spu, 2); in disable_interrupts()
116 spu_int_mask_set(spu, 0, 0ul); in disable_interrupts()
117 spu_int_mask_set(spu, 1, 0ul); in disable_interrupts()
118 spu_int_mask_set(spu, 2, 0ul); in disable_interrupts()
120 spin_unlock_irq(&spu->register_lock); in disable_interrupts()
127 set_bit(SPU_CONTEXT_SWITCH_PENDING, &spu->flags); in disable_interrupts()
128 clear_bit(SPU_CONTEXT_FAULT_PENDING, &spu->flags); in disable_interrupts()
129 synchronize_irq(spu->irqs[0]); in disable_interrupts()
130 synchronize_irq(spu->irqs[1]); in disable_interrupts()
131 synchronize_irq(spu->irqs[2]); in disable_interrupts()
134 static inline void set_watchdog_timer(struct spu_state *csa, struct spu *spu) in set_watchdog_timer() argument
147 static inline void inhibit_user_access(struct spu_state *csa, struct spu *spu) in inhibit_user_access() argument
152 * SPU by unmapping the virtual pages assigned to in inhibit_user_access()
153 * the SPU memory-mapped I/O (MMIO) for problem in inhibit_user_access()
158 static inline void set_switch_pending(struct spu_state *csa, struct spu *spu) in set_switch_pending() argument
167 static inline void save_mfc_cntl(struct spu_state *csa, struct spu *spu) in save_mfc_cntl() argument
169 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_cntl()
201 static inline void save_spu_runcntl(struct spu_state *csa, struct spu *spu) in save_spu_runcntl() argument
203 struct spu_problem __iomem *prob = spu->problem; in save_spu_runcntl()
212 static inline void save_mfc_sr1(struct spu_state *csa, struct spu *spu) in save_mfc_sr1() argument
217 csa->priv1.mfc_sr1_RW = spu_mfc_sr1_get(spu); in save_mfc_sr1()
220 static inline void save_spu_status(struct spu_state *csa, struct spu *spu) in save_spu_status() argument
222 struct spu_problem __iomem *prob = spu->problem; in save_spu_status()
247 struct spu *spu) in save_mfc_stopped_status() argument
249 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_stopped_status()
263 static inline void halt_mfc_decr(struct spu_state *csa, struct spu *spu) in halt_mfc_decr() argument
265 struct spu_priv2 __iomem *priv2 = spu->priv2; in halt_mfc_decr()
276 static inline void save_timebase(struct spu_state *csa, struct spu *spu) in save_timebase() argument
286 struct spu *spu) in remove_other_spu_access() argument
289 * Remove other SPU access to this SPU by unmapping in remove_other_spu_access()
290 * this SPU's pages from their address space. TBD. in remove_other_spu_access()
294 static inline void do_mfc_mssync(struct spu_state *csa, struct spu *spu) in do_mfc_mssync() argument
296 struct spu_problem __iomem *prob = spu->problem; in do_mfc_mssync()
307 static inline void issue_mfc_tlbie(struct spu_state *csa, struct spu *spu) in issue_mfc_tlbie() argument
315 spu_tlb_invalidate(spu); in issue_mfc_tlbie()
320 struct spu *spu) in handle_pending_interrupts() argument
323 * Handle any pending interrupts from this SPU in handle_pending_interrupts()
328 * flag, to ensure the SPU execution or MFC command in handle_pending_interrupts()
333 static inline void save_mfc_queues(struct spu_state *csa, struct spu *spu) in save_mfc_queues() argument
335 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_queues()
366 static inline void save_ppu_querymask(struct spu_state *csa, struct spu *spu) in save_ppu_querymask() argument
368 struct spu_problem __iomem *prob = spu->problem; in save_ppu_querymask()
377 static inline void save_ppu_querytype(struct spu_state *csa, struct spu *spu) in save_ppu_querytype() argument
379 struct spu_problem __iomem *prob = spu->problem; in save_ppu_querytype()
388 static inline void save_ppu_tagstatus(struct spu_state *csa, struct spu *spu) in save_ppu_tagstatus() argument
390 struct spu_problem __iomem *prob = spu->problem; in save_ppu_tagstatus()
401 static inline void save_mfc_csr_tsq(struct spu_state *csa, struct spu *spu) in save_mfc_csr_tsq() argument
403 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_csr_tsq()
413 static inline void save_mfc_csr_cmd(struct spu_state *csa, struct spu *spu) in save_mfc_csr_cmd() argument
415 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_csr_cmd()
425 static inline void save_mfc_csr_ato(struct spu_state *csa, struct spu *spu) in save_mfc_csr_ato() argument
427 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_csr_ato()
436 static inline void save_mfc_tclass_id(struct spu_state *csa, struct spu *spu) in save_mfc_tclass_id() argument
442 csa->priv1.mfc_tclass_id_RW = spu_mfc_tclass_id_get(spu); in save_mfc_tclass_id()
445 static inline void set_mfc_tclass_id(struct spu_state *csa, struct spu *spu) in set_mfc_tclass_id() argument
452 spu_mfc_tclass_id_set(spu, 0x10000000); in set_mfc_tclass_id()
456 static inline void purge_mfc_queue(struct spu_state *csa, struct spu *spu) in purge_mfc_queue() argument
458 struct spu_priv2 __iomem *priv2 = spu->priv2; in purge_mfc_queue()
470 static inline void wait_purge_complete(struct spu_state *csa, struct spu *spu) in wait_purge_complete() argument
472 struct spu_priv2 __iomem *priv2 = spu->priv2; in wait_purge_complete()
483 static inline void setup_mfc_sr1(struct spu_state *csa, struct spu *spu) in setup_mfc_sr1() argument
491 * Implementation note: The SPU-side code in setup_mfc_sr1()
496 spu_mfc_sr1_set(spu, (MFC_STATE1_MASTER_RUN_CONTROL_MASK | in setup_mfc_sr1()
501 static inline void save_spu_npc(struct spu_state *csa, struct spu *spu) in save_spu_npc() argument
503 struct spu_problem __iomem *prob = spu->problem; in save_spu_npc()
511 static inline void save_spu_privcntl(struct spu_state *csa, struct spu *spu) in save_spu_privcntl() argument
513 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_spu_privcntl()
521 static inline void reset_spu_privcntl(struct spu_state *csa, struct spu *spu) in reset_spu_privcntl() argument
523 struct spu_priv2 __iomem *priv2 = spu->priv2; in reset_spu_privcntl()
533 static inline void save_spu_lslr(struct spu_state *csa, struct spu *spu) in save_spu_lslr() argument
535 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_spu_lslr()
543 static inline void reset_spu_lslr(struct spu_state *csa, struct spu *spu) in reset_spu_lslr() argument
545 struct spu_priv2 __iomem *priv2 = spu->priv2; in reset_spu_lslr()
555 static inline void save_spu_cfg(struct spu_state *csa, struct spu *spu) in save_spu_cfg() argument
557 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_spu_cfg()
565 static inline void save_pm_trace(struct spu_state *csa, struct spu *spu) in save_pm_trace() argument
573 static inline void save_mfc_rag(struct spu_state *csa, struct spu *spu) in save_mfc_rag() argument
580 spu_resource_allocation_groupID_get(spu); in save_mfc_rag()
582 spu_resource_allocation_enable_get(spu); in save_mfc_rag()
585 static inline void save_ppu_mb_stat(struct spu_state *csa, struct spu *spu) in save_ppu_mb_stat() argument
587 struct spu_problem __iomem *prob = spu->problem; in save_ppu_mb_stat()
595 static inline void save_ppu_mb(struct spu_state *csa, struct spu *spu) in save_ppu_mb() argument
597 struct spu_problem __iomem *prob = spu->problem; in save_ppu_mb()
605 static inline void save_ppuint_mb(struct spu_state *csa, struct spu *spu) in save_ppuint_mb() argument
607 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_ppuint_mb()
615 static inline void save_ch_part1(struct spu_state *csa, struct spu *spu) in save_ch_part1() argument
617 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_ch_part1()
641 static inline void save_spu_mb(struct spu_state *csa, struct spu *spu) in save_spu_mb() argument
643 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_spu_mb()
647 * Save SPU Read Mailbox Channel. in save_spu_mb()
659 static inline void save_mfc_cmd(struct spu_state *csa, struct spu *spu) in save_mfc_cmd() argument
661 struct spu_priv2 __iomem *priv2 = spu->priv2; in save_mfc_cmd()
672 static inline void reset_ch(struct spu_state *csa, struct spu *spu) in reset_ch() argument
674 struct spu_priv2 __iomem *priv2 = spu->priv2; in reset_ch()
692 static inline void resume_mfc_queue(struct spu_state *csa, struct spu *spu) in resume_mfc_queue() argument
694 struct spu_priv2 __iomem *priv2 = spu->priv2; in resume_mfc_queue()
703 static inline void setup_mfc_slbs(struct spu_state *csa, struct spu *spu, in setup_mfc_slbs() argument
710 * to provide access to SPU context save code and in setup_mfc_slbs()
720 spu_invalidate_slbs(spu); in setup_mfc_slbs()
721 spu_setup_kernel_slbs(spu, csa->lscsa, code, code_size); in setup_mfc_slbs()
724 static inline void set_switch_active(struct spu_state *csa, struct spu *spu) in set_switch_active() argument
735 if (test_bit(SPU_CONTEXT_FAULT_PENDING, &spu->flags)) in set_switch_active()
737 clear_bit(SPU_CONTEXT_SWITCH_PENDING, &spu->flags); in set_switch_active()
741 static inline void enable_interrupts(struct spu_state *csa, struct spu *spu) in enable_interrupts() argument
754 spin_lock_irq(&spu->register_lock); in enable_interrupts()
755 spu_int_stat_clear(spu, 0, CLASS0_INTR_MASK); in enable_interrupts()
756 spu_int_stat_clear(spu, 1, CLASS1_INTR_MASK); in enable_interrupts()
757 spu_int_stat_clear(spu, 2, CLASS2_INTR_MASK); in enable_interrupts()
758 spu_int_mask_set(spu, 0, 0ul); in enable_interrupts()
759 spu_int_mask_set(spu, 1, class1_mask); in enable_interrupts()
760 spu_int_mask_set(spu, 2, 0ul); in enable_interrupts()
761 spin_unlock_irq(&spu->register_lock); in enable_interrupts()
764 static inline int send_mfc_dma(struct spu *spu, unsigned long ea, in send_mfc_dma() argument
769 struct spu_problem __iomem *prob = spu->problem; in send_mfc_dma()
798 static inline void save_ls_16kb(struct spu_state *csa, struct spu *spu) in save_ls_16kb() argument
811 send_mfc_dma(spu, addr, ls_offset, size, tag, rclass, cmd); in save_ls_16kb()
814 static inline void set_spu_npc(struct spu_state *csa, struct spu *spu) in set_spu_npc() argument
816 struct spu_problem __iomem *prob = spu->problem; in set_spu_npc()
824 * This implementation uses SPU-side save/restore in set_spu_npc()
831 static inline void set_signot1(struct spu_state *csa, struct spu *spu) in set_signot1() argument
833 struct spu_problem __iomem *prob = spu->problem; in set_signot1()
849 static inline void set_signot2(struct spu_state *csa, struct spu *spu) in set_signot2() argument
851 struct spu_problem __iomem *prob = spu->problem; in set_signot2()
867 static inline void send_save_code(struct spu_state *csa, struct spu *spu) in send_save_code() argument
878 * to local storage and start SPU. in send_save_code()
880 send_mfc_dma(spu, addr, ls_offset, size, tag, rclass, cmd); in send_save_code()
883 static inline void set_ppu_querymask(struct spu_state *csa, struct spu *spu) in set_ppu_querymask() argument
885 struct spu_problem __iomem *prob = spu->problem; in set_ppu_querymask()
896 static inline void wait_tag_complete(struct spu_state *csa, struct spu *spu) in wait_tag_complete() argument
898 struct spu_problem __iomem *prob = spu->problem; in wait_tag_complete()
914 spu_int_stat_clear(spu, 0, CLASS0_INTR_MASK); in wait_tag_complete()
915 spu_int_stat_clear(spu, 2, CLASS2_INTR_MASK); in wait_tag_complete()
919 static inline void wait_spu_stopped(struct spu_state *csa, struct spu *spu) in wait_spu_stopped() argument
921 struct spu_problem __iomem *prob = spu->problem; in wait_spu_stopped()
926 * Poll until SPU_Status[R]=0 or wait for SPU Class 0 in wait_spu_stopped()
927 * or SPU Class 2 interrupt. Write INT_Stat_class0 in wait_spu_stopped()
933 spu_int_stat_clear(spu, 0, CLASS0_INTR_MASK); in wait_spu_stopped()
934 spu_int_stat_clear(spu, 2, CLASS2_INTR_MASK); in wait_spu_stopped()
938 static inline int check_save_status(struct spu_state *csa, struct spu *spu) in check_save_status() argument
940 struct spu_problem __iomem *prob = spu->problem; in check_save_status()
953 static inline void terminate_spu_app(struct spu_state *csa, struct spu *spu) in terminate_spu_app() argument
957 * the SPU task has been terminated. TBD. in terminate_spu_app()
962 struct spu *spu) in suspend_mfc_and_halt_decr() argument
964 struct spu_priv2 __iomem *priv2 = spu->priv2; in suspend_mfc_and_halt_decr()
976 struct spu *spu) in wait_suspend_mfc_complete() argument
978 struct spu_priv2 __iomem *priv2 = spu->priv2; in wait_suspend_mfc_complete()
989 static inline int suspend_spe(struct spu_state *csa, struct spu *spu) in suspend_spe() argument
991 struct spu_problem __iomem *prob = spu->problem; in suspend_spe()
994 * If SPU_Status[R]=1, stop SPU execution in suspend_spe()
1031 static inline void clear_spu_status(struct spu_state *csa, struct spu *spu) in clear_spu_status() argument
1033 struct spu_problem __iomem *prob = spu->problem; in clear_spu_status()
1037 * release SPU from isolate state. in clear_spu_status()
1042 spu_mfc_sr1_set(spu, in clear_spu_status()
1054 spu_mfc_sr1_set(spu, in clear_spu_status()
1065 static inline void reset_ch_part1(struct spu_state *csa, struct spu *spu) in reset_ch_part1() argument
1067 struct spu_priv2 __iomem *priv2 = spu->priv2; in reset_ch_part1()
1090 static inline void reset_ch_part2(struct spu_state *csa, struct spu *spu) in reset_ch_part2() argument
1092 struct spu_priv2 __iomem *priv2 = spu->priv2; in reset_ch_part2()
1111 struct spu *spu) in setup_spu_status_part1() argument
1125 * instruction sequence to the end of the SPU based restore in setup_spu_status_part1()
1127 * restore the correct SPU status. in setup_spu_status_part1()
1129 * NOTE: Rather than modifying the SPU executable, we in setup_spu_status_part1()
1131 * LSCSA. The SPU-side restore reads this field and in setup_spu_status_part1()
1203 struct spu *spu) in setup_spu_status_part2() argument
1210 * the SPU based restore code. in setup_spu_status_part2()
1212 * NOTE: Rather than modifying the SPU executable, we in setup_spu_status_part2()
1214 * LSCSA. The SPU-side restore reads this field and in setup_spu_status_part2()
1226 static inline void restore_mfc_rag(struct spu_state *csa, struct spu *spu) in restore_mfc_rag() argument
1232 spu_resource_allocation_groupID_set(spu, in restore_mfc_rag()
1234 spu_resource_allocation_enable_set(spu, in restore_mfc_rag()
1238 static inline void send_restore_code(struct spu_state *csa, struct spu *spu) in send_restore_code() argument
1251 send_mfc_dma(spu, addr, ls_offset, size, tag, rclass, cmd); in send_restore_code()
1254 static inline void setup_decr(struct spu_state *csa, struct spu *spu) in setup_decr() argument
1279 static inline void setup_ppu_mb(struct spu_state *csa, struct spu *spu) in setup_ppu_mb() argument
1287 static inline void setup_ppuint_mb(struct spu_state *csa, struct spu *spu) in setup_ppuint_mb() argument
1295 static inline int check_restore_status(struct spu_state *csa, struct spu *spu) in check_restore_status() argument
1297 struct spu_problem __iomem *prob = spu->problem; in check_restore_status()
1310 static inline void restore_spu_privcntl(struct spu_state *csa, struct spu *spu) in restore_spu_privcntl() argument
1312 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_spu_privcntl()
1321 static inline void restore_status_part1(struct spu_state *csa, struct spu *spu) in restore_status_part1() argument
1323 struct spu_problem __iomem *prob = spu->problem; in restore_status_part1()
1341 static inline void restore_status_part2(struct spu_state *csa, struct spu *spu) in restore_status_part2() argument
1343 struct spu_problem __iomem *prob = spu->problem; in restore_status_part2()
1368 static inline void restore_ls_16kb(struct spu_state *csa, struct spu *spu) in restore_ls_16kb() argument
1381 send_mfc_dma(spu, addr, ls_offset, size, tag, rclass, cmd); in restore_ls_16kb()
1384 static inline void suspend_mfc(struct spu_state *csa, struct spu *spu) in suspend_mfc() argument
1386 struct spu_priv2 __iomem *priv2 = spu->priv2; in suspend_mfc()
1396 static inline void clear_interrupts(struct spu_state *csa, struct spu *spu) in clear_interrupts() argument
1406 spin_lock_irq(&spu->register_lock); in clear_interrupts()
1407 spu_int_mask_set(spu, 0, 0ul); in clear_interrupts()
1408 spu_int_mask_set(spu, 1, 0ul); in clear_interrupts()
1409 spu_int_mask_set(spu, 2, 0ul); in clear_interrupts()
1410 spu_int_stat_clear(spu, 0, CLASS0_INTR_MASK); in clear_interrupts()
1411 spu_int_stat_clear(spu, 1, CLASS1_INTR_MASK); in clear_interrupts()
1412 spu_int_stat_clear(spu, 2, CLASS2_INTR_MASK); in clear_interrupts()
1413 spin_unlock_irq(&spu->register_lock); in clear_interrupts()
1416 static inline void restore_mfc_queues(struct spu_state *csa, struct spu *spu) in restore_mfc_queues() argument
1418 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_mfc_queues()
1450 static inline void restore_ppu_querymask(struct spu_state *csa, struct spu *spu) in restore_ppu_querymask() argument
1452 struct spu_problem __iomem *prob = spu->problem; in restore_ppu_querymask()
1461 static inline void restore_ppu_querytype(struct spu_state *csa, struct spu *spu) in restore_ppu_querytype() argument
1463 struct spu_problem __iomem *prob = spu->problem; in restore_ppu_querytype()
1472 static inline void restore_mfc_csr_tsq(struct spu_state *csa, struct spu *spu) in restore_mfc_csr_tsq() argument
1474 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_mfc_csr_tsq()
1484 static inline void restore_mfc_csr_cmd(struct spu_state *csa, struct spu *spu) in restore_mfc_csr_cmd() argument
1486 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_mfc_csr_cmd()
1497 static inline void restore_mfc_csr_ato(struct spu_state *csa, struct spu *spu) in restore_mfc_csr_ato() argument
1499 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_mfc_csr_ato()
1507 static inline void restore_mfc_tclass_id(struct spu_state *csa, struct spu *spu) in restore_mfc_tclass_id() argument
1512 spu_mfc_tclass_id_set(spu, csa->priv1.mfc_tclass_id_RW); in restore_mfc_tclass_id()
1516 static inline void set_llr_event(struct spu_state *csa, struct spu *spu) in set_llr_event() argument
1539 static inline void restore_decr_wrapped(struct spu_state *csa, struct spu *spu) in restore_decr_wrapped() argument
1557 static inline void restore_ch_part1(struct spu_state *csa, struct spu *spu) in restore_ch_part1() argument
1559 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_ch_part1()
1576 static inline void restore_ch_part2(struct spu_state *csa, struct spu *spu) in restore_ch_part2() argument
1578 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_ch_part2()
1599 static inline void restore_spu_lslr(struct spu_state *csa, struct spu *spu) in restore_spu_lslr() argument
1601 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_spu_lslr()
1610 static inline void restore_spu_cfg(struct spu_state *csa, struct spu *spu) in restore_spu_cfg() argument
1612 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_spu_cfg()
1621 static inline void restore_pm_trace(struct spu_state *csa, struct spu *spu) in restore_pm_trace() argument
1629 static inline void restore_spu_npc(struct spu_state *csa, struct spu *spu) in restore_spu_npc() argument
1631 struct spu_problem __iomem *prob = spu->problem; in restore_spu_npc()
1640 static inline void restore_spu_mb(struct spu_state *csa, struct spu *spu) in restore_spu_mb() argument
1642 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_spu_mb()
1657 static inline void check_ppu_mb_stat(struct spu_state *csa, struct spu *spu) in check_ppu_mb_stat() argument
1659 struct spu_problem __iomem *prob = spu->problem; in check_ppu_mb_stat()
1671 static inline void check_ppuint_mb_stat(struct spu_state *csa, struct spu *spu) in check_ppuint_mb_stat() argument
1673 struct spu_priv2 __iomem *priv2 = spu->priv2; in check_ppuint_mb_stat()
1682 spu_int_stat_clear(spu, 2, CLASS2_ENABLE_MAILBOX_INTR); in check_ppuint_mb_stat()
1687 static inline void restore_mfc_sr1(struct spu_state *csa, struct spu *spu) in restore_mfc_sr1() argument
1692 spu_mfc_sr1_set(spu, csa->priv1.mfc_sr1_RW); in restore_mfc_sr1()
1696 static inline void set_int_route(struct spu_state *csa, struct spu *spu) in set_int_route() argument
1698 struct spu_context *ctx = spu->ctx; in set_int_route()
1700 spu_cpu_affinity_set(spu, ctx->last_ran); in set_int_route()
1704 struct spu *spu) in restore_other_spu_access() argument
1707 * Restore other SPU mappings to this SPU. TBD. in restore_other_spu_access()
1711 static inline void restore_spu_runcntl(struct spu_state *csa, struct spu *spu) in restore_spu_runcntl() argument
1713 struct spu_problem __iomem *prob = spu->problem; in restore_spu_runcntl()
1725 static inline void restore_mfc_cntl(struct spu_state *csa, struct spu *spu) in restore_mfc_cntl() argument
1727 struct spu_priv2 __iomem *priv2 = spu->priv2; in restore_mfc_cntl()
1741 * state of the spu. in restore_mfc_cntl()
1745 static inline void enable_user_access(struct spu_state *csa, struct spu *spu) in enable_user_access() argument
1749 * SPU by mapping the virtual pages assigned to in enable_user_access()
1750 * the SPU memory-mapped I/O (MMIO) for problem in enable_user_access()
1755 static inline void reset_switch_active(struct spu_state *csa, struct spu *spu) in reset_switch_active() argument
1763 static inline void reenable_interrupts(struct spu_state *csa, struct spu *spu) in reenable_interrupts() argument
1766 * Re-enable SPU interrupts. in reenable_interrupts()
1768 spin_lock_irq(&spu->register_lock); in reenable_interrupts()
1769 spu_int_mask_set(spu, 0, csa->priv1.int_mask_class0_RW); in reenable_interrupts()
1770 spu_int_mask_set(spu, 1, csa->priv1.int_mask_class1_RW); in reenable_interrupts()
1771 spu_int_mask_set(spu, 2, csa->priv1.int_mask_class2_RW); in reenable_interrupts()
1772 spin_unlock_irq(&spu->register_lock); in reenable_interrupts()
1775 static int quiece_spu(struct spu_state *prev, struct spu *spu) in quiece_spu() argument
1778 * Combined steps 2-18 of SPU context save sequence, which in quiece_spu()
1779 * quiesce the SPU state (disable SPU execution, MFC command in quiece_spu()
1780 * queues, decrementer, SPU interrupts, etc.). in quiece_spu()
1787 if (check_spu_isolate(prev, spu)) { /* Step 2. */ in quiece_spu()
1790 disable_interrupts(prev, spu); /* Step 3. */ in quiece_spu()
1791 set_watchdog_timer(prev, spu); /* Step 4. */ in quiece_spu()
1792 inhibit_user_access(prev, spu); /* Step 5. */ in quiece_spu()
1793 if (check_spu_isolate(prev, spu)) { /* Step 6. */ in quiece_spu()
1796 set_switch_pending(prev, spu); /* Step 7. */ in quiece_spu()
1797 save_mfc_cntl(prev, spu); /* Step 8. */ in quiece_spu()
1798 save_spu_runcntl(prev, spu); /* Step 9. */ in quiece_spu()
1799 save_mfc_sr1(prev, spu); /* Step 10. */ in quiece_spu()
1800 save_spu_status(prev, spu); /* Step 11. */ in quiece_spu()
1801 save_mfc_stopped_status(prev, spu); /* Step 12. */ in quiece_spu()
1802 halt_mfc_decr(prev, spu); /* Step 13. */ in quiece_spu()
1803 save_timebase(prev, spu); /* Step 14. */ in quiece_spu()
1804 remove_other_spu_access(prev, spu); /* Step 15. */ in quiece_spu()
1805 do_mfc_mssync(prev, spu); /* Step 16. */ in quiece_spu()
1806 issue_mfc_tlbie(prev, spu); /* Step 17. */ in quiece_spu()
1807 handle_pending_interrupts(prev, spu); /* Step 18. */ in quiece_spu()
1812 static void save_csa(struct spu_state *prev, struct spu *spu) in save_csa() argument
1815 * Combine steps 19-44 of SPU context save sequence, which in save_csa()
1819 save_mfc_queues(prev, spu); /* Step 19. */ in save_csa()
1820 save_ppu_querymask(prev, spu); /* Step 20. */ in save_csa()
1821 save_ppu_querytype(prev, spu); /* Step 21. */ in save_csa()
1822 save_ppu_tagstatus(prev, spu); /* NEW. */ in save_csa()
1823 save_mfc_csr_tsq(prev, spu); /* Step 22. */ in save_csa()
1824 save_mfc_csr_cmd(prev, spu); /* Step 23. */ in save_csa()
1825 save_mfc_csr_ato(prev, spu); /* Step 24. */ in save_csa()
1826 save_mfc_tclass_id(prev, spu); /* Step 25. */ in save_csa()
1827 set_mfc_tclass_id(prev, spu); /* Step 26. */ in save_csa()
1828 save_mfc_cmd(prev, spu); /* Step 26a - moved from 44. */ in save_csa()
1829 purge_mfc_queue(prev, spu); /* Step 27. */ in save_csa()
1830 wait_purge_complete(prev, spu); /* Step 28. */ in save_csa()
1831 setup_mfc_sr1(prev, spu); /* Step 30. */ in save_csa()
1832 save_spu_npc(prev, spu); /* Step 31. */ in save_csa()
1833 save_spu_privcntl(prev, spu); /* Step 32. */ in save_csa()
1834 reset_spu_privcntl(prev, spu); /* Step 33. */ in save_csa()
1835 save_spu_lslr(prev, spu); /* Step 34. */ in save_csa()
1836 reset_spu_lslr(prev, spu); /* Step 35. */ in save_csa()
1837 save_spu_cfg(prev, spu); /* Step 36. */ in save_csa()
1838 save_pm_trace(prev, spu); /* Step 37. */ in save_csa()
1839 save_mfc_rag(prev, spu); /* Step 38. */ in save_csa()
1840 save_ppu_mb_stat(prev, spu); /* Step 39. */ in save_csa()
1841 save_ppu_mb(prev, spu); /* Step 40. */ in save_csa()
1842 save_ppuint_mb(prev, spu); /* Step 41. */ in save_csa()
1843 save_ch_part1(prev, spu); /* Step 42. */ in save_csa()
1844 save_spu_mb(prev, spu); /* Step 43. */ in save_csa()
1845 reset_ch(prev, spu); /* Step 45. */ in save_csa()
1848 static void save_lscsa(struct spu_state *prev, struct spu *spu) in save_lscsa() argument
1851 * Perform steps 46-57 of SPU context save sequence, in save_lscsa()
1856 resume_mfc_queue(prev, spu); /* Step 46. */ in save_lscsa()
1858 setup_mfc_slbs(prev, spu, spu_save_code, sizeof(spu_save_code)); in save_lscsa()
1859 set_switch_active(prev, spu); /* Step 48. */ in save_lscsa()
1860 enable_interrupts(prev, spu); /* Step 49. */ in save_lscsa()
1861 save_ls_16kb(prev, spu); /* Step 50. */ in save_lscsa()
1862 set_spu_npc(prev, spu); /* Step 51. */ in save_lscsa()
1863 set_signot1(prev, spu); /* Step 52. */ in save_lscsa()
1864 set_signot2(prev, spu); /* Step 53. */ in save_lscsa()
1865 send_save_code(prev, spu); /* Step 54. */ in save_lscsa()
1866 set_ppu_querymask(prev, spu); /* Step 55. */ in save_lscsa()
1867 wait_tag_complete(prev, spu); /* Step 56. */ in save_lscsa()
1868 wait_spu_stopped(prev, spu); /* Step 57. */ in save_lscsa()
1871 static void force_spu_isolate_exit(struct spu *spu) in force_spu_isolate_exit() argument
1873 struct spu_problem __iomem *prob = spu->problem; in force_spu_isolate_exit()
1874 struct spu_priv2 __iomem *priv2 = spu->priv2; in force_spu_isolate_exit()
1882 spu_mfc_sr1_set(spu, MFC_STATE1_MASTER_RUN_CONTROL_MASK); in force_spu_isolate_exit()
1900 * Check SPU run-control state and force isolated
1903 static void stop_spu_isolate(struct spu *spu) in stop_spu_isolate() argument
1905 struct spu_problem __iomem *prob = spu->problem; in stop_spu_isolate()
1908 /* The SPU is in isolated state; the only way in stop_spu_isolate()
1912 force_spu_isolate_exit(spu); in stop_spu_isolate()
1916 static void harvest(struct spu_state *prev, struct spu *spu) in harvest() argument
1919 * Perform steps 2-25 of SPU context restore sequence, in harvest()
1920 * which resets an SPU either after a failed save, or in harvest()
1921 * when using SPU for first time. in harvest()
1924 disable_interrupts(prev, spu); /* Step 2. */ in harvest()
1925 inhibit_user_access(prev, spu); /* Step 3. */ in harvest()
1926 terminate_spu_app(prev, spu); /* Step 4. */ in harvest()
1927 set_switch_pending(prev, spu); /* Step 5. */ in harvest()
1928 stop_spu_isolate(spu); /* NEW. */ in harvest()
1929 remove_other_spu_access(prev, spu); /* Step 6. */ in harvest()
1930 suspend_mfc_and_halt_decr(prev, spu); /* Step 7. */ in harvest()
1931 wait_suspend_mfc_complete(prev, spu); /* Step 8. */ in harvest()
1932 if (!suspend_spe(prev, spu)) /* Step 9. */ in harvest()
1933 clear_spu_status(prev, spu); /* Step 10. */ in harvest()
1934 do_mfc_mssync(prev, spu); /* Step 11. */ in harvest()
1935 issue_mfc_tlbie(prev, spu); /* Step 12. */ in harvest()
1936 handle_pending_interrupts(prev, spu); /* Step 13. */ in harvest()
1937 purge_mfc_queue(prev, spu); /* Step 14. */ in harvest()
1938 wait_purge_complete(prev, spu); /* Step 15. */ in harvest()
1939 reset_spu_privcntl(prev, spu); /* Step 16. */ in harvest()
1940 reset_spu_lslr(prev, spu); /* Step 17. */ in harvest()
1941 setup_mfc_sr1(prev, spu); /* Step 18. */ in harvest()
1942 spu_invalidate_slbs(spu); /* Step 19. */ in harvest()
1943 reset_ch_part1(prev, spu); /* Step 20. */ in harvest()
1944 reset_ch_part2(prev, spu); /* Step 21. */ in harvest()
1945 enable_interrupts(prev, spu); /* Step 22. */ in harvest()
1946 set_switch_active(prev, spu); /* Step 23. */ in harvest()
1947 set_mfc_tclass_id(prev, spu); /* Step 24. */ in harvest()
1948 resume_mfc_queue(prev, spu); /* Step 25. */ in harvest()
1951 static void restore_lscsa(struct spu_state *next, struct spu *spu) in restore_lscsa() argument
1954 * Perform steps 26-40 of SPU context restore sequence, in restore_lscsa()
1959 set_watchdog_timer(next, spu); /* Step 26. */ in restore_lscsa()
1960 setup_spu_status_part1(next, spu); /* Step 27. */ in restore_lscsa()
1961 setup_spu_status_part2(next, spu); /* Step 28. */ in restore_lscsa()
1962 restore_mfc_rag(next, spu); /* Step 29. */ in restore_lscsa()
1964 setup_mfc_slbs(next, spu, spu_restore_code, sizeof(spu_restore_code)); in restore_lscsa()
1965 set_spu_npc(next, spu); /* Step 31. */ in restore_lscsa()
1966 set_signot1(next, spu); /* Step 32. */ in restore_lscsa()
1967 set_signot2(next, spu); /* Step 33. */ in restore_lscsa()
1968 setup_decr(next, spu); /* Step 34. */ in restore_lscsa()
1969 setup_ppu_mb(next, spu); /* Step 35. */ in restore_lscsa()
1970 setup_ppuint_mb(next, spu); /* Step 36. */ in restore_lscsa()
1971 send_restore_code(next, spu); /* Step 37. */ in restore_lscsa()
1972 set_ppu_querymask(next, spu); /* Step 38. */ in restore_lscsa()
1973 wait_tag_complete(next, spu); /* Step 39. */ in restore_lscsa()
1974 wait_spu_stopped(next, spu); /* Step 40. */ in restore_lscsa()
1977 static void restore_csa(struct spu_state *next, struct spu *spu) in restore_csa() argument
1980 * Combine steps 41-76 of SPU context restore sequence, which in restore_csa()
1984 restore_spu_privcntl(next, spu); /* Step 41. */ in restore_csa()
1985 restore_status_part1(next, spu); /* Step 42. */ in restore_csa()
1986 restore_status_part2(next, spu); /* Step 43. */ in restore_csa()
1987 restore_ls_16kb(next, spu); /* Step 44. */ in restore_csa()
1988 wait_tag_complete(next, spu); /* Step 45. */ in restore_csa()
1989 suspend_mfc(next, spu); /* Step 46. */ in restore_csa()
1990 wait_suspend_mfc_complete(next, spu); /* Step 47. */ in restore_csa()
1991 issue_mfc_tlbie(next, spu); /* Step 48. */ in restore_csa()
1992 clear_interrupts(next, spu); /* Step 49. */ in restore_csa()
1993 restore_mfc_queues(next, spu); /* Step 50. */ in restore_csa()
1994 restore_ppu_querymask(next, spu); /* Step 51. */ in restore_csa()
1995 restore_ppu_querytype(next, spu); /* Step 52. */ in restore_csa()
1996 restore_mfc_csr_tsq(next, spu); /* Step 53. */ in restore_csa()
1997 restore_mfc_csr_cmd(next, spu); /* Step 54. */ in restore_csa()
1998 restore_mfc_csr_ato(next, spu); /* Step 55. */ in restore_csa()
1999 restore_mfc_tclass_id(next, spu); /* Step 56. */ in restore_csa()
2000 set_llr_event(next, spu); /* Step 57. */ in restore_csa()
2001 restore_decr_wrapped(next, spu); /* Step 58. */ in restore_csa()
2002 restore_ch_part1(next, spu); /* Step 59. */ in restore_csa()
2003 restore_ch_part2(next, spu); /* Step 60. */ in restore_csa()
2004 restore_spu_lslr(next, spu); /* Step 61. */ in restore_csa()
2005 restore_spu_cfg(next, spu); /* Step 62. */ in restore_csa()
2006 restore_pm_trace(next, spu); /* Step 63. */ in restore_csa()
2007 restore_spu_npc(next, spu); /* Step 64. */ in restore_csa()
2008 restore_spu_mb(next, spu); /* Step 65. */ in restore_csa()
2009 check_ppu_mb_stat(next, spu); /* Step 66. */ in restore_csa()
2010 check_ppuint_mb_stat(next, spu); /* Step 67. */ in restore_csa()
2011 spu_invalidate_slbs(spu); /* Modified Step 68. */ in restore_csa()
2012 restore_mfc_sr1(next, spu); /* Step 69. */ in restore_csa()
2013 set_int_route(next, spu); /* NEW */ in restore_csa()
2014 restore_other_spu_access(next, spu); /* Step 70. */ in restore_csa()
2015 restore_spu_runcntl(next, spu); /* Step 71. */ in restore_csa()
2016 restore_mfc_cntl(next, spu); /* Step 72. */ in restore_csa()
2017 enable_user_access(next, spu); /* Step 73. */ in restore_csa()
2018 reset_switch_active(next, spu); /* Step 74. */ in restore_csa()
2019 reenable_interrupts(next, spu); /* Step 75. */ in restore_csa()
2022 static int __do_spu_save(struct spu_state *prev, struct spu *spu) in __do_spu_save() argument
2027 * SPU context save can be broken into three phases: in __do_spu_save()
2031 * (c) save of LSCSA, mostly performed by SPU [steps 43-52]. in __do_spu_save()
2034 * 2,6 if failed to quiece SPU in __do_spu_save()
2035 * 53 if SPU-side of save failed. in __do_spu_save()
2038 rc = quiece_spu(prev, spu); /* Steps 2-16. */ in __do_spu_save()
2043 harvest(prev, spu); in __do_spu_save()
2049 save_csa(prev, spu); /* Steps 17-43. */ in __do_spu_save()
2050 save_lscsa(prev, spu); /* Steps 44-53. */ in __do_spu_save()
2051 return check_save_status(prev, spu); /* Step 54. */ in __do_spu_save()
2054 static int __do_spu_restore(struct spu_state *next, struct spu *spu) in __do_spu_restore() argument
2059 * SPU context restore can be broken into three phases: in __do_spu_restore()
2061 * (a) harvest (or reset) SPU [steps 2-24]. in __do_spu_restore()
2062 * (b) restore LSCSA [steps 25-40], mostly performed by SPU. in __do_spu_restore()
2069 restore_lscsa(next, spu); /* Steps 24-39. */ in __do_spu_restore()
2070 rc = check_restore_status(next, spu); /* Step 40. */ in __do_spu_restore()
2080 restore_csa(next, spu); in __do_spu_restore()
2086 * spu_save - SPU context save, with locking.
2087 * @prev: pointer to SPU context save area, to be saved.
2088 * @spu: pointer to SPU iomem structure.
2092 int spu_save(struct spu_state *prev, struct spu *spu) in spu_save() argument
2096 acquire_spu_lock(spu); /* Step 1. */ in spu_save()
2097 rc = __do_spu_save(prev, spu); /* Steps 2-53. */ in spu_save()
2098 release_spu_lock(spu); in spu_save()
2100 panic("%s failed on SPU[%d], rc=%d.\n", in spu_save()
2101 __func__, spu->number, rc); in spu_save()
2108 * spu_restore - SPU context restore, with harvest and locking.
2109 * @new: pointer to SPU context save area, to be restored.
2110 * @spu: pointer to SPU iomem structure.
2116 int spu_restore(struct spu_state *new, struct spu *spu) in spu_restore() argument
2120 acquire_spu_lock(spu); in spu_restore()
2121 harvest(NULL, spu); in spu_restore()
2122 spu->slb_replace = 0; in spu_restore()
2123 rc = __do_spu_restore(new, spu); in spu_restore()
2124 release_spu_lock(spu); in spu_restore()
2126 panic("%s failed on SPU[%d] rc=%d.\n", in spu_restore()
2127 __func__, spu->number, rc); in spu_restore()
2172 * spu_alloc_csa - allocate and initialize an SPU context save area.
2174 * Allocate and initialize the contents of an SPU context save area.