Lines Matching refs:scr2
59 #define GET_CPU_PRIVATE_PTR(off_reg, scr1, scr2, label) \ argument
60 CPU_ADDR(scr1, scr2); \
78 #define GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3) \ argument
81 srlx afar, CH_DCTAG_PA_SHIFT, scr2; \
83 or scr2, CH_DCTAG_VALID_BIT, scr2; /* tag we want */ \
87 cmp scr1, scr2; \
99 clr scr2; \
102 ldxa [scr3 + scr2]ASI_DC_DATA, scr1; /* read data */ \
106 cmp scr2, CH_DC_DATA_REG_SIZE - 8; \
108 add scr2, 8, scr2; \
110 GET_CPU_IMPL(scr2); /* Parity bits are elsewhere for */ \
111 cmp scr2, PANTHER_IMPL; /* panther processors. */ \
114 clr scr2; \
121 ldxa [scr3 + scr2]ASI_DC_DATA, scr1; /* read parity bits */ \
125 cmp scr2, CH_DC_DATA_REG_SIZE - 8; \
127 add scr2, 8, scr2; \
158 #define GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3) \ argument
163 GET_CPU_IMPL(scr2); /* Panther only uses VA[13:6] */ \
164 cmp scr2, PANTHER_IMPL; /* and we also want to mask */ \
175 srlx afar, CH_ICPATAG_SHIFT, scr2; /* pa tag we want */ \
176 andn scr2, CH_ICPATAG_LBITS, scr2; /* mask off lower */ \
183 cmp scr1, scr2; \
201 clr scr2; \
203 ldxa [scr3 + scr2]ASI_IC_DATA, scr1; /* read ins. data */ \
206 cmp scr2, PN_IC_DATA_REG_SIZE - 8; \
208 add scr2, 8, scr2; \
232 #define GET_ECACHE_DTAG(afar, datap, ec_way, scr1, scr2, scr3) \ argument
236 set ((JP_ECACHE_MAX_SIZE / JP_ECACHE_NWAY) - 1), scr2; \
237 and afar, scr2, scr3; /* get set offset */ \
253 ldxa [scr1]ASI_EC_DATA, scr2; \
254 stxa scr2, [datap]%asi; \
263 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \ argument
264 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
265 GET_ECACHE_DTAG(afar, datap, 1, scr1, scr2, scr3); \
266 GET_ECACHE_DTAG(afar, datap, 2, scr1, scr2, scr3); \
267 GET_ECACHE_DTAG(afar, datap, 3, scr1, scr2, scr3); \
274 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2) argument
275 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2) argument
286 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \ argument
289 ldxa [%g0]ASI_AFSR, scr2; \
290 stxa scr2, [datap + (CH_CLO_DATA + CH_CHD_AFSR)]%asi; \
293 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
294 stxa scr2, [%g0]ASI_AFSR; /* they're left as is in AFSR */ \
300 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) argument
312 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
318 ldxa [%g0]ASI_AFSR, scr2; \
319 stxa scr2, [datap + (CH_CLO_DATA + CH_CHD_AFSR)]%asi; \
322 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
323 stxa scr2, [%g0]ASI_AFSR; /* they're left as is in AFSR */ \
334 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) \
337 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
338 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
339 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
355 #define GET_ECACHE_DTAG(afar, datap, pn_way, scr1, scr2, scr3) \
358 set (CH_ECACHE_8M_SIZE - 1), scr2; \
359 and scr3, scr2, scr3; /* VA<63:23>=0 */ \
378 ldxa [scr1]ASI_EC_DATA, scr2; \
379 stxa scr2, [datap]%asi; \
401 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2) \
407 ldxa [scr1]ASI_CMP_SHARED, scr2; /* is it running? */ \
408 cmp scr2, PN_BOTH_CORES_RUNNING; \
413 ldxa [scr1]ASI_CMP_PER_CORE, scr2; \
414 and scr2, COREID_MASK, scr2; \
416 sll scr1, scr2, scr2; /* ... we need to park... */ \
420 btst scr1, scr2; /* check our own parked status */ \
424 stxa scr2, [scr1]ASI_CMP_SHARED; /* ... and park it. */ \
428 cmp scr1, scr2; /* ...core is parked according to... */ \
446 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2) \
451 set PN_BOTH_CORES_RUNNING, scr2; /* we want both... */ \
452 stxa scr2, [scr1]ASI_CMP_SHARED; /* ...cores running. */ \
466 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
468 ldxa [scr1]ASI_AFAR, scr2; \
469 stxa scr2, [datap + (CH_CLO_SDW_DATA + CH_CHD_AFAR)]%asi; \
470 ldxa [scr1]ASI_AFSR, scr2; \
471 stxa scr2, [datap + (CH_CLO_SDW_DATA + CH_CHD_AFSR)]%asi; \
474 ldxa [%g0]ASI_AFSR, scr2; \
475 stxa scr2, [datap + (CH_CLO_DATA + CH_CHD_AFSR)]%asi; \
478 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
479 stxa scr2, [%g0]ASI_AFSR; /* they're left as is in AFSR */ \
486 ldxa [scr1]ASI_AFSR, scr2; \
487 stxa scr2, [datap + (CH_CLO_SDW_DATA + CH_CHD_AFSR_EXT)]%asi; \
489 ldxa [scr1]ASI_AFSR, scr2; \
490 stxa scr2, [datap + (CH_CLO_DATA + CH_CHD_AFSR_EXT)]%asi; \
492 bclr scr1, scr2; /* Clear fatal error bits here, */ \
494 stxa scr2, [scr1]ASI_AFSR; /* as is in AFSR_EXT */ \
507 #define GET_PN_L2_CACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
521 ldxa [scr3 + scr1]ASI_L2_DATA, scr2; /* loop through */ \
522 stxa scr2, [datap]%asi; /* <511:256> of L2 */ \
527 set PN_L2_DATA_ECC_SEL, scr2; /* ECC_sel bit. */ \
528 ldxa [scr3 + scr2]ASI_L2_DATA, scr2; /* Read and record */ \
529 stxa scr2, [datap]%asi; /* ecc of <511:256> */ \
532 ldxa [scr3 + scr1]ASI_L2_DATA, scr2; /* loop through */ \
533 stxa scr2, [datap]%asi; /* <255:0> of L2 */ \
538 set PN_L2_DATA_ECC_SEL, scr2; /* ECC_sel bit. */ \
539 add scr2, PN_L2_ECC_LO_REG, scr2; \
540 ldxa [scr3 + scr2]ASI_L2_DATA, scr2; /* Read and record */ \
541 stxa scr2, [datap]%asi; /* ecc of <255:0>. */ \
543 set PN_L2_SET_SIZE, scr2; \
547 add scr3, scr2, scr3
557 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
562 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
563 GET_ECACHE_DTAG(afar, datap, 1, scr1, scr2, scr3); \
564 GET_ECACHE_DTAG(afar, datap, 2, scr1, scr2, scr3); \
565 GET_ECACHE_DTAG(afar, datap, 3, scr1, scr2, scr3); \
567 GET_PN_L2_CACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
571 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
572 GET_ECACHE_WAY_BIT(scr1, scr2); \
574 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
575 GET_ECACHE_WAY_BIT(scr1, scr2); /* restore AFAR */ \
590 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) \
593 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
594 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
595 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
601 #define GET_ECACHE_WAY_BIT(scr1, scr2) \
602 CPU_INDEX(scr1, scr2); \
605 set cpunodes, scr2; \
606 ld [scr1 + scr2], scr1; \
619 #define GET_ECACHE_DTAG(afar, datap, scr1, scr2, scr3) \
622 set (CH_ECACHE_8M_SIZE - 1), scr2; \
623 and scr3, scr2, scr3; /* VA<63:23>=0 */ \
635 ldxa [scr1]ASI_EC_DATA, scr2; \
636 stxa scr2, [datap]%asi; \
648 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2)
649 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2)
659 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
662 ldxa [%g0]ASI_AFSR, scr2; \
663 stxa scr2, [datap + (CH_CLO_DATA + CH_CHD_AFSR)]%asi; \
666 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
667 stxa scr2, [%g0]ASI_AFSR; /* they're left as is in AFSR */ \
678 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
679 GET_ECACHE_DTAG(afar, datap, scr1, scr2, scr3); \
686 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3)
724 #define DO_TL1_CPU_LOGOUT(r_val, afar, t_flags, datap, scr1, scr2, scr3) \ argument
725 setx LOGOUT_INVALID, scr2, scr1; \
726 ldxa [datap + (CH_CLO_DATA + CH_CHD_AFAR)]%asi, scr2; \
727 cmp scr2, scr1; \
731 GET_AFSR_AFAR(datap, afar, scr1, scr2); \
733 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
734 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
735 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
737 GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3); \
782 #define DO_CPU_LOGOUT(r_val, afar, r_or_s, t_flags, scr1, scr2, scr3, scr4) \ argument
785 DO_TL1_CPU_LOGOUT(r_val, afar, t_flags, scr1, scr2, scr3, scr4) \
979 #define PN_L2_FLUSHALL(scr1, scr2, scr3) \ argument
985 set PN_L2_LINESIZE, scr2; \
988 subcc scr1, scr2, scr1; \
993 #define PN_L2_FLUSHALL(scr1, scr2, scr3)