Lines Matching +full:interleave +full:- +full:mode

32 #define	BITX(u, h, l)	(((u) >> (l)) & ((1LU << ((h) - (l) + 1LU)) - 1LU))
44 #define IMC_DECODE_CONV_MAX 0x00009ffffULL /* 640 KiB - 1 */
85 if (dec->ids_pa >= imc_legacy_ranges[i].ilr_base && in imc_decode_addr_resvd()
86 dec->ids_pa < end) { in imc_decode_addr_resvd()
87 dec->ids_fail = IMC_DECODE_F_LEGACY_RANGE; in imc_decode_addr_resvd()
88 dec->ids_fail_data = i; in imc_decode_addr_resvd()
96 * While we technically have this information on a per-socket basis, we in imc_decode_addr_resvd()
101 sad = &imc->imc_sockets[0].isock_sad; in imc_decode_addr_resvd()
102 if (sad->isad_valid != IMC_SAD_V_VALID) { in imc_decode_addr_resvd()
103 dec->ids_fail = IMC_DECODE_F_BAD_SAD; in imc_decode_addr_resvd()
116 if (dec->ids_pa <= IMC_DECODE_CONV_MAX) { in imc_decode_addr_resvd()
120 if (dec->ids_pa >= IMC_DECODE_LOW_BASE && in imc_decode_addr_resvd()
121 dec->ids_pa < sad->isad_tolm) { in imc_decode_addr_resvd()
125 if (dec->ids_pa >= IMC_DECODE_HIGH_BASE && in imc_decode_addr_resvd()
126 dec->ids_pa < sad->isad_tohm) { in imc_decode_addr_resvd()
133 dec->ids_fail = IMC_DECODE_F_OUTSIDE_DRAM; in imc_decode_addr_resvd()
142 switch (rule->isr_imode) { in imc_decode_sad_interleave()
144 if (rule->isr_a7mode) { in imc_decode_sad_interleave()
152 if (rule->isr_a7mode) { in imc_decode_sad_interleave()
186 const imc_socket_t *socket = &imc->imc_sockets[0]; in imc_decode_sad()
187 const imc_sad_t *sad = &socket->isock_sad; in imc_decode_sad()
196 for (rule = NULL, i = 0, base = 0; i < sad->isad_nrules; i++) { in imc_decode_sad()
197 rule = &sad->isad_rules[i]; in imc_decode_sad()
199 if (rule->isr_enable && dec->ids_pa >= base && in imc_decode_sad()
200 dec->ids_pa < rule->isr_limit) { in imc_decode_sad()
204 base = rule->isr_limit; in imc_decode_sad()
207 if (rule == NULL || i == sad->isad_nrules) { in imc_decode_sad()
208 dec->ids_fail = IMC_DECODE_F_NO_SAD_RULE; in imc_decode_sad()
215 dec->ids_sad = sad; in imc_decode_sad()
216 dec->ids_sad_rule = rule; in imc_decode_sad()
220 * corresponding target based on its mode, etc. The way we do this in imc_decode_sad()
224 * interleave list. in imc_decode_sad()
226 ileaveidx = imc_decode_sad_interleave(rule, dec->ids_pa); in imc_decode_sad()
227 if (ileaveidx >= rule->isr_ntargets) { in imc_decode_sad()
228 dec->ids_fail = IMC_DECODE_F_BAD_SAD_INTERLEAVE; in imc_decode_sad()
229 dec->ids_fail_data = ileaveidx; in imc_decode_sad()
232 ileavetgt = rule->isr_targets[ileaveidx]; in imc_decode_sad()
233 if (imc->imc_gen >= IMC_GEN_SKYLAKE && in imc_decode_sad()
236 * If we're in this case, the interleave rule said we had a in imc_decode_sad()
243 dec->ids_fail = IMC_DECODE_F_SAD_SEARCH_LOOP; in imc_decode_sad()
247 for (i = 0; i < imc->imc_nsockets; i++) { in imc_decode_sad()
248 if (imc->imc_sockets[i].isock_valid == in imc_decode_sad()
250 imc->imc_sockets[i].isock_nodeid == nodeid) { in imc_decode_sad()
251 socket = &imc->imc_sockets[i]; in imc_decode_sad()
252 sad = &imc->imc_sockets[i].isock_sad; in imc_decode_sad()
258 dec->ids_fail = IMC_DECODE_F_BAD_REMOTE_MC_ROUTE; in imc_decode_sad()
259 dec->ids_fail_data = nodeid; in imc_decode_sad()
269 if (rule->isr_need_mod3) { in imc_decode_sad()
273 switch (rule->isr_mod_mode) { in imc_decode_sad()
275 addr = dec->ids_pa >> 6; in imc_decode_sad()
278 addr = dec->ids_pa >> 8; in imc_decode_sad()
281 addr = dec->ids_pa >> 12; in imc_decode_sad()
284 dec->ids_fail = IMC_DECODE_F_SAD_BAD_MOD; in imc_decode_sad()
288 switch (rule->isr_mod_type) { in imc_decode_sad()
307 dec->ids_fail = IMC_DECODE_F_SAD_BAD_MOD; in imc_decode_sad()
314 switch (imc->imc_gen) { in imc_decode_sad()
318 * interleave target is always the node id. in imc_decode_sad()
328 * On these generations, the interleave NodeID in the SAD in imc_decode_sad()
339 * On Skylake generation systems we take the interleave target in imc_decode_sad()
344 nodeid = socket->isock_nodeid; in imc_decode_sad()
346 dec->ids_fail = IMC_DECODE_F_BAD_SAD_INTERLEAVE; in imc_decode_sad()
347 dec->ids_fail_data = ileavetgt; in imc_decode_sad()
351 if (ileavetgt > sad->isad_mcroute.ismc_nroutes) { in imc_decode_sad()
352 dec->ids_fail = IMC_DECODE_F_BAD_SAD_INTERLEAVE; in imc_decode_sad()
353 dec->ids_fail_data = ileavetgt; in imc_decode_sad()
356 tadid = sad->isad_mcroute.ismc_mcroutes[ileavetgt].ismce_imc; in imc_decode_sad()
358 sad->isad_mcroute.ismc_mcroutes[ileavetgt].ismce_pchannel; in imc_decode_sad()
369 dec->ids_socket = NULL; in imc_decode_sad()
370 for (i = 0; i < imc->imc_nsockets; i++) { in imc_decode_sad()
371 if (imc->imc_sockets[i].isock_nodeid == nodeid) { in imc_decode_sad()
372 dec->ids_socket = &imc->imc_sockets[i]; in imc_decode_sad()
376 if (dec->ids_socket == NULL) { in imc_decode_sad()
377 dec->ids_fail = IMC_DECODE_F_SAD_BAD_SOCKET; in imc_decode_sad()
378 dec->ids_fail_data = nodeid; in imc_decode_sad()
382 if (tadid >= dec->ids_socket->isock_ntad) { in imc_decode_sad()
383 dec->ids_fail = IMC_DECODE_F_SAD_BAD_TAD; in imc_decode_sad()
384 dec->ids_fail_data = tadid; in imc_decode_sad()
388 dec->ids_nodeid = nodeid; in imc_decode_sad()
389 dec->ids_tadid = tadid; in imc_decode_sad()
390 dec->ids_channelid = channelid; in imc_decode_sad()
391 dec->ids_tad = &dec->ids_socket->isock_tad[tadid]; in imc_decode_sad()
392 dec->ids_mc = &dec->ids_socket->isock_imcs[tadid]; in imc_decode_sad()
404 * idx = [(dec->ids_pa >> 6) / socket-ways] % channel-ways
417 * that's programmed on a per-channel basis to offset the system address.
426 const imc_tad_rule_t *rule = dec->ids_tad_rule; in imc_decode_tad_channel()
428 index = dec->ids_pa >> 6; in imc_decode_tad_channel()
429 if ((dec->ids_tad->itad_flags & IMC_TAD_FLAG_CHANSHIFT) != 0) { in imc_decode_tad_channel()
437 index = index / rule->itr_sock_way; in imc_decode_tad_channel()
439 if ((dec->ids_tad->itad_flags & IMC_TAD_FLAG_CHANHASH) != 0) { in imc_decode_tad_channel()
442 uint64_t shift = (dec->ids_pa >> i) & 0x3; in imc_decode_tad_channel()
447 index %= rule->itr_chan_way; in imc_decode_tad_channel()
448 if (index >= rule->itr_ntargets) { in imc_decode_tad_channel()
449 dec->ids_fail = IMC_DECODE_F_TAD_BAD_TARGET_INDEX; in imc_decode_tad_channel()
450 dec->ids_fail_data = index; in imc_decode_tad_channel()
454 dec->ids_channelid = rule->itr_targets[index]; in imc_decode_tad_channel()
466 if ((tad->itad_flags & IMC_TAD_FLAG_CHANSHIFT) != 0) { in imc_tad_gran_to_shift()
490 const imc_tad_t *tad = dec->ids_tad; in imc_decode_tad()
491 const imc_mc_t *mc = dec->ids_mc; in imc_decode_tad()
499 for (i = 0; i < tad->itad_nrules; i++) { in imc_decode_tad()
500 rule = &tad->itad_rules[i]; in imc_decode_tad()
502 if (dec->ids_pa >= rule->itr_base && in imc_decode_tad()
503 dec->ids_pa < rule->itr_limit) { in imc_decode_tad()
508 if (rule == NULL || i == tad->itad_nrules) { in imc_decode_tad()
509 dec->ids_fail = IMC_DECODE_F_NO_TAD_RULE; in imc_decode_tad()
513 dec->ids_tad_rule = rule; in imc_decode_tad()
516 * Check if our TAD rule requires 3-way interleaving on the channel. We in imc_decode_tad()
520 if (rule->itr_chan_way == 3) { in imc_decode_tad()
521 dec->ids_fail = IMC_DECODE_F_TAD_3_ILEAVE; in imc_decode_tad()
530 switch (imc->imc_gen) { in imc_decode_tad()
552 if (dec->ids_channelid >= mc->icn_nchannels) { in imc_decode_tad()
553 dec->ids_fail = IMC_DECODE_F_BAD_CHANNEL_ID; in imc_decode_tad()
554 dec->ids_fail_data = dec->ids_channelid; in imc_decode_tad()
557 chan = &mc->icn_channels[dec->ids_channelid]; in imc_decode_tad()
558 dec->ids_chan = chan; in imc_decode_tad()
560 if (tadruleno >= chan->ich_ntad_offsets) { in imc_decode_tad()
561 dec->ids_fail = IMC_DECODE_F_BAD_CHANNEL_TAD_OFFSET; in imc_decode_tad()
562 dec->ids_fail_data = tadruleno; in imc_decode_tad()
570 * chan_addr = (sys_addr - off) / (chan way * sock way). in imc_decode_tad()
584 off = chan->ich_tad_offsets[tadruleno]; in imc_decode_tad()
585 if (off > dec->ids_pa) { in imc_decode_tad()
586 dec->ids_fail = IMC_DECODE_F_CHANOFF_UNDERFLOW; in imc_decode_tad()
589 chanshift = imc_tad_gran_to_shift(tad, rule->itr_chan_gran); in imc_decode_tad()
590 sockshift = imc_tad_gran_to_shift(tad, rule->itr_sock_gran); in imc_decode_tad()
591 chanmask = (1 << chanshift) - 1; in imc_decode_tad()
592 sockmask = (1 << sockshift) - 1; in imc_decode_tad()
594 chanaddr = dec->ids_pa - off; in imc_decode_tad()
596 chanaddr /= rule->itr_sock_way; in imc_decode_tad()
598 chanaddr |= dec->ids_pa & sockmask; in imc_decode_tad()
600 chanaddr /= rule->itr_chan_way; in imc_decode_tad()
602 chanaddr |= dec->ids_pa & chanmask; in imc_decode_tad()
604 dec->ids_chanaddr = chanaddr; in imc_decode_tad()
612 const imc_mc_t *mc = dec->ids_mc; in imc_decode_rir()
613 const imc_channel_t *chan = dec->ids_chan; in imc_decode_rir()
621 if (mc->icn_closed) { in imc_decode_rir()
626 mask = (1UL << shift) - 1; in imc_decode_rir()
628 for (i = 0, base = 0; i < chan->ich_nrankileaves; i++) { in imc_decode_rir()
629 rir = &chan->ich_rankileaves[i]; in imc_decode_rir()
630 if (rir->irle_enabled && dec->ids_chanaddr >= base && in imc_decode_rir()
631 dec->ids_chanaddr < rir->irle_limit) { in imc_decode_rir()
635 base = rir->irle_limit; in imc_decode_rir()
638 if (rir == NULL || i == chan->ich_nrankileaves) { in imc_decode_rir()
639 dec->ids_fail = IMC_DECODE_F_NO_RIR_RULE; in imc_decode_rir()
642 dec->ids_rir = rir; in imc_decode_rir()
649 index = (dec->ids_chanaddr >> shift) % rir->irle_nways; in imc_decode_rir()
650 if (index >= rir->irle_nentries) { in imc_decode_rir()
651 dec->ids_fail = IMC_DECODE_F_BAD_RIR_ILEAVE_TARGET; in imc_decode_rir()
652 dec->ids_fail_data = index; in imc_decode_rir()
655 rirtarg = &rir->irle_entries[index]; in imc_decode_rir()
670 dec->ids_physrankid = rirtarg->irle_target; in imc_decode_rir()
671 dimmid = dec->ids_physrankid / 4; in imc_decode_rir()
672 rankid = dec->ids_physrankid % 4; in imc_decode_rir()
674 if (dimmid >= chan->ich_ndimms) { in imc_decode_rir()
675 dec->ids_fail = IMC_DECODE_F_BAD_DIMM_INDEX; in imc_decode_rir()
676 dec->ids_fail_data = dimmid; in imc_decode_rir()
680 dimm = &chan->ich_dimms[dimmid]; in imc_decode_rir()
681 if (!dimm->idimm_present) { in imc_decode_rir()
682 dec->ids_fail = IMC_DECODE_F_DIMM_NOT_PRESENT; in imc_decode_rir()
685 dec->ids_dimmid = dimmid; in imc_decode_rir()
686 dec->ids_dimm = dimm; in imc_decode_rir()
688 if (rankid >= dimm->idimm_nranks) { in imc_decode_rir()
689 dec->ids_fail = IMC_DECODE_F_BAD_DIMM_RANK; in imc_decode_rir()
690 dec->ids_fail_data = rankid; in imc_decode_rir()
693 dec->ids_rankid = rankid; in imc_decode_rir()
699 rankaddr = dec->ids_chanaddr; in imc_decode_rir()
701 rankaddr /= rir->irle_nways; in imc_decode_rir()
703 rankaddr |= dec->ids_chanaddr & mask; in imc_decode_rir()
705 if (rirtarg->irle_offset > rankaddr) { in imc_decode_rir()
706 dec->ids_fail = IMC_DECODE_F_RANKOFF_UNDERFLOW; in imc_decode_rir()
709 rankaddr -= rirtarg->irle_offset; in imc_decode_rir()
710 dec->ids_rankaddr = rankaddr; in imc_decode_rir()
719 dec->ids_pa = pa; in imc_decode_pa()
720 dec->ids_nodeid = dec->ids_tadid = dec->ids_channelid = UINT32_MAX; in imc_decode_pa()
726 if (imc->imc_nsockets < 1 || in imc_decode_pa()
727 imc->imc_sockets[0].isock_valid != IMC_SOCKET_V_VALID) { in imc_decode_pa()
728 dec->ids_fail = IMC_DECODE_F_BAD_SOCKET; in imc_decode_pa()
729 dec->ids_fail_data = 0; in imc_decode_pa()