Lines Matching refs:scr1
59 #define GET_CPU_PRIVATE_PTR(off_reg, scr1, scr2, label) \ argument
60 CPU_ADDR(scr1, scr2); \
61 ldn [scr1 + CPU_PRIVATE], scr1; \
62 cmp scr1, 0; \
65 add scr1, off_reg, scr1
78 #define GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3) \ argument
86 ldxa [scr3]ASI_DC_TAG, scr1; /* read tag */ \
87 cmp scr1, scr2; \
91 stxa scr1, [datap + CH_DC_TAG]%asi; /* store tag */ \
93 ldxa [scr3]ASI_DC_UTAG, scr1; /* read utag */ \
95 stxa scr1, [datap + CH_DC_UTAG]%asi; \
96 ldxa [scr3]ASI_DC_SNP_TAG, scr1; /* read snoop tag */ \
97 stxa scr1, [datap + CH_DC_SNTAG]%asi; \
102 ldxa [scr3 + scr2]ASI_DC_DATA, scr1; /* read data */ \
104 stxa scr1, [datap]%asi; \
116 mov 1, scr1; \
117 sll scr1, PN_DC_DATA_PARITY_BIT_SHIFT, scr1; \
118 or scr3, scr1, scr3; /* add DC_data_parity bit to index */ \
121 ldxa [scr3 + scr2]ASI_DC_DATA, scr1; /* read parity bits */ \
123 stba scr1, [datap]%asi; \
131 set CH_DCACHE_IDX_INCR, scr1; /* incr. idx (scr3) */ \
132 add scr3, scr1, scr3; \
133 set CH_DCACHE_IDX_LIMIT, scr1; /* done? */ \
134 cmp scr3, scr1; \
158 #define GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3) \ argument
159 ldxa [%g0]ASI_DCU, scr1; \
160 btst DCU_IC, scr1; /* is Icache enabled? */ \
181 ldxa [scr3]ASI_IC_TAG, scr1; /* read pa tag */ \
182 andn scr1, CH_ICPATAG_LBITS, scr1; /* mask off lower */ \
183 cmp scr1, scr2; \
187 stxa scr1, [datap + CH_IC_PATAG]%asi; /* store pa tag */ \
189 ldxa [scr3]ASI_IC_TAG, scr1; \
191 stxa scr1, [datap + CH_IC_UTAG]%asi; \
192 ldxa [scr3]ASI_IC_TAG, scr1; /* read upper tag */ \
194 stxa scr1, [datap + CH_IC_UPPER]%asi; \
195 ldxa [scr3]ASI_IC_TAG, scr1; /* read lower tag */ \
197 stxa scr1, [datap + CH_IC_LOWER]%asi; \
198 ldxa [scr3]ASI_IC_SNP_TAG, scr1; /* read snoop tag */ \
199 stxa scr1, [datap + CH_IC_SNTAG]%asi; \
203 ldxa [scr3 + scr2]ASI_IC_DATA, scr1; /* read ins. data */ \
204 stxa scr1, [datap]%asi; \
212 set CH_ICACHE_IDX_INCR, scr1; /* incr. idx (scr3) */ \
213 add scr3, scr1, scr3; \
214 set PN_ICACHE_IDX_LIMIT, scr1; /* done? */ \
215 cmp scr3, scr1; \
232 #define GET_ECACHE_DTAG(afar, datap, ec_way, scr1, scr2, scr3) \ argument
233 mov ec_way, scr1; \
234 and scr1, JP_ECACHE_NWAY - 1, scr1; /* mask E$ way bits */ \
235 sllx scr1, JP_EC_TAG_DATA_WAY_SHIFT, scr1; \
239 or scr3, scr1, scr3; /* or WAY bits */ \
245 ldxa [scr3]ASI_EC_DIAG, scr1; /* get E$ tag */ \
247 stxa scr1, [datap + CH_EC_TAG]%asi; \
251 clr scr1; \
253 ldxa [scr1]ASI_EC_DATA, scr2; \
256 cmp scr1, CH_ECACHE_STGREG_TOTALSIZE - 8; \
258 add scr1, 8, scr1; \
263 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \ argument
264 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
265 GET_ECACHE_DTAG(afar, datap, 1, scr1, scr2, scr3); \
266 GET_ECACHE_DTAG(afar, datap, 2, scr1, scr2, scr3); \
267 GET_ECACHE_DTAG(afar, datap, 3, scr1, scr2, scr3); \
274 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2) argument
275 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2) argument
286 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \ argument
291 sethi %hh(C_AFSR_FATAL_ERRS), scr1; \
292 sllx scr1, 32, scr1; \
293 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
300 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) argument
312 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
313 set ASI_MCU_AFAR2_VA, scr1; \
314 ldxa [scr1]ASI_MCU_CTRL, afar; \
320 sethi %hh(C_AFSR_FATAL_ERRS), scr1; \
321 sllx scr1, 32, scr1; \
322 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
334 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) \
337 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
338 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
339 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
355 #define GET_ECACHE_DTAG(afar, datap, pn_way, scr1, scr2, scr3) \
360 mov pn_way, scr1; /* panther L3$ is 4-way so we ... */ \
361 sllx scr1, PN_L3_WAY_SHIFT, scr1; /* need to mask... */ \
362 or scr3, scr1, scr3; /* in the way bits <24:23>. */ \
367 ldxa [scr3]ASI_EC_DIAG, scr1; /* get E$ tag */ \
368 stxa scr1, [datap + CH_EC_TAG]%asi; \
369 set CHP_ECACHE_IDX_TAG_ECC, scr1; \
370 or scr3, scr1, scr1; \
371 ldxa [scr1]ASI_EC_DIAG, scr1; /* get E$ tag ECC */ \
372 stxa scr1, [datap + CH_EC_TAG_ECC]%asi; \
376 clr scr1; \
378 ldxa [scr1]ASI_EC_DATA, scr2; \
381 cmp scr1, CH_ECACHE_STGREG_TOTALSIZE - 8; \
383 add scr1, 8, scr1; \
401 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2) \
402 GET_CPU_IMPL(scr1); \
403 cmp scr1, PANTHER_IMPL; /* only park for panthers */ \
406 set ASI_CORE_RUNNING_STATUS, scr1; /* check other core */ \
407 ldxa [scr1]ASI_CMP_SHARED, scr2; /* is it running? */ \
412 set ASI_CORE_ID, scr1; \
413 ldxa [scr1]ASI_CMP_PER_CORE, scr2; \
415 or %g0, 1, scr1; /* find out which core... */ \
416 sll scr1, scr2, scr2; /* ... we need to park... */ \
418 set ASI_CORE_RUNNING_RW, scr1; \
419 ldxa [scr1]ASI_CMP_SHARED, scr1; /* ...but are we? */ \
420 btst scr1, scr2; /* check our own parked status */ \
423 set ASI_CORE_RUNNING_RW, scr1; /* else proceed... */ \
424 stxa scr2, [scr1]ASI_CMP_SHARED; /* ... and park it. */ \
426 set ASI_CORE_RUNNING_STATUS, scr1; /* spin until... */ \
427 ldxa [scr1]ASI_CMP_SHARED, scr1; /* ... the other... */ \
428 cmp scr1, scr2; /* ...core is parked according to... */ \
446 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2) \
450 set ASI_CORE_RUNNING_RW, scr1; \
452 stxa scr2, [scr1]ASI_CMP_SHARED; /* ...cores running. */ \
466 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
467 set ASI_SHADOW_REG_VA, scr1; \
468 ldxa [scr1]ASI_AFAR, scr2; \
470 ldxa [scr1]ASI_AFSR, scr2; \
476 sethi %hh(C_AFSR_FATAL_ERRS), scr1; \
477 sllx scr1, 32, scr1; \
478 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
481 GET_CPU_IMPL(scr1); \
482 cmp scr1, PANTHER_IMPL; \
485 set ASI_SHADOW_AFSR_EXT_VA, scr1; /* shadow AFSR_EXT */ \
486 ldxa [scr1]ASI_AFSR, scr2; \
488 set ASI_AFSR_EXT_VA, scr1; /* primary AFSR_EXT */ \
489 ldxa [scr1]ASI_AFSR, scr2; \
491 set C_AFSR_EXT_FATAL_ERRS, scr1; \
492 bclr scr1, scr2; /* Clear fatal error bits here, */ \
493 set ASI_AFSR_EXT_VA, scr1; /* so they're left */ \
494 stxa scr2, [scr1]ASI_AFSR; /* as is in AFSR_EXT */ \
507 #define GET_PN_L2_CACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
509 set PN_L2_INDEX_MASK, scr1; \
510 and scr3, scr1, scr3; \
516 ldxa [scr3]ASI_L2_TAG, scr1; /* read the L2$ tag */ \
517 stxa scr1, [datap + CH_EC_TAG]%asi; \
519 clr scr1; \
521 ldxa [scr3 + scr1]ASI_L2_DATA, scr2; /* loop through */ \
524 cmp scr1, (PN_L2_LINESIZE / 2) - 8; /* it in the cpu */ \
526 add scr1, 8, scr1; \
532 ldxa [scr3 + scr1]ASI_L2_DATA, scr2; /* loop through */ \
535 cmp scr1, PN_L2_LINESIZE - 8; /* it in the cpu */ \
537 add scr1, 8, scr1; \
544 set PN_L2_MAX_SET, scr1; \
545 cmp scr1, scr3; /* more ways to try for this line? */ \
557 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
558 GET_CPU_IMPL(scr1); \
559 cmp scr1, PANTHER_IMPL; \
562 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
563 GET_ECACHE_DTAG(afar, datap, 1, scr1, scr2, scr3); \
564 GET_ECACHE_DTAG(afar, datap, 2, scr1, scr2, scr3); \
565 GET_ECACHE_DTAG(afar, datap, 3, scr1, scr2, scr3); \
567 GET_PN_L2_CACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
571 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
572 GET_ECACHE_WAY_BIT(scr1, scr2); \
573 xor afar, scr1, afar; \
574 GET_ECACHE_DTAG(afar, datap, 0, scr1, scr2, scr3); \
575 GET_ECACHE_WAY_BIT(scr1, scr2); /* restore AFAR */ \
576 xor afar, scr1, afar; \
590 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3) \
593 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
594 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
595 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
601 #define GET_ECACHE_WAY_BIT(scr1, scr2) \
602 CPU_INDEX(scr1, scr2); \
603 mulx scr1, CPU_NODE_SIZE, scr1; \
604 add scr1, ECACHE_SIZE, scr1; \
606 ld [scr1 + scr2], scr1; \
607 srlx scr1, 1, scr1
619 #define GET_ECACHE_DTAG(afar, datap, scr1, scr2, scr3) \
628 ldxa [scr3]ASI_EC_DIAG, scr1; /* get E$ tag */ \
629 stxa scr1, [datap + CH_EC_TAG]%asi; \
633 clr scr1; \
635 ldxa [scr1]ASI_EC_DATA, scr2; \
638 cmp scr1, CH_ECACHE_STGREG_TOTALSIZE - 8; \
640 add scr1, 8, scr1; \
648 #define PARK_SIBLING_CORE(dcucr_reg, scr1, scr2)
649 #define UNPARK_SIBLING_CORE(dcucr_reg, scr1, scr2)
659 #define GET_AFSR_AFAR(datap, afar, scr1, scr2) \
664 sethi %hh(C_AFSR_FATAL_ERRS), scr1; \
665 sllx scr1, 32, scr1; \
666 bclr scr1, scr2; /* Clear fatal error bits here, so */ \
678 #define GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3) \
679 GET_ECACHE_DTAG(afar, datap, scr1, scr2, scr3); \
686 #define GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3)
724 #define DO_TL1_CPU_LOGOUT(r_val, afar, t_flags, datap, scr1, scr2, scr3) \ argument
725 setx LOGOUT_INVALID, scr2, scr1; \
727 cmp scr2, scr1; \
731 GET_AFSR_AFAR(datap, afar, scr1, scr2); \
733 GET_ECACHE_DTAGS(afar, datap, scr1, scr2, scr3); \
734 GET_DCACHE_DTAG(afar, datap, scr1, scr2, scr3); \
735 GET_ICACHE_DTAG(afar, datap, scr1, scr2, scr3); \
737 GET_SHADOW_DATA(afar, datap, scr1, scr2, scr3); \
782 #define DO_CPU_LOGOUT(r_val, afar, r_or_s, t_flags, scr1, scr2, scr3, scr4) \ argument
783 GET_CPU_PRIVATE_PTR(r_or_s, scr1, scr3, 7f); /* can't use scr2/4 */ \
785 DO_TL1_CPU_LOGOUT(r_val, afar, t_flags, scr1, scr2, scr3, scr4) \
979 #define PN_L2_FLUSHALL(scr1, scr2, scr3) \ argument
984 set PN_L2_SIZE, scr1; \
988 subcc scr1, scr2, scr1; \
990 ldxa [scr1 + scr3]ASI_L2_TAG, %g0; \
993 #define PN_L2_FLUSHALL(scr1, scr2, scr3)