1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * AMD Address Translation Library
4 *
5 * dehash.c : Functions to account for hashing bits
6 *
7 * Copyright (c) 2023, Advanced Micro Devices, Inc.
8 * All Rights Reserved.
9 *
10 * Author: Yazen Ghannam <Yazen.Ghannam@amd.com>
11 */
12
13 #include "internal.h"
14
df2_dehash_addr(struct addr_ctx * ctx)15 static int df2_dehash_addr(struct addr_ctx *ctx)
16 {
17 u8 hashed_bit, intlv_bit, intlv_bit_pos;
18
19 intlv_bit_pos = ctx->map.intlv_bit_pos;
20 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr);
21
22 hashed_bit = intlv_bit;
23 hashed_bit ^= FIELD_GET(BIT_ULL(12), ctx->ret_addr);
24 hashed_bit ^= FIELD_GET(BIT_ULL(18), ctx->ret_addr);
25 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr);
26 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr);
27
28 if (hashed_bit != intlv_bit)
29 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos);
30
31 return 0;
32 }
33
df3_dehash_addr(struct addr_ctx * ctx)34 static int df3_dehash_addr(struct addr_ctx *ctx)
35 {
36 bool hash_ctl_64k, hash_ctl_2M, hash_ctl_1G;
37 u8 hashed_bit, intlv_bit, intlv_bit_pos;
38
39 hash_ctl_64k = FIELD_GET(DF3_HASH_CTL_64K, ctx->map.ctl);
40 hash_ctl_2M = FIELD_GET(DF3_HASH_CTL_2M, ctx->map.ctl);
41 hash_ctl_1G = FIELD_GET(DF3_HASH_CTL_1G, ctx->map.ctl);
42
43 intlv_bit_pos = ctx->map.intlv_bit_pos;
44 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr);
45
46 hashed_bit = intlv_bit;
47 hashed_bit ^= FIELD_GET(BIT_ULL(14), ctx->ret_addr);
48 hashed_bit ^= FIELD_GET(BIT_ULL(18), ctx->ret_addr) & hash_ctl_64k;
49 hashed_bit ^= FIELD_GET(BIT_ULL(23), ctx->ret_addr) & hash_ctl_2M;
50 hashed_bit ^= FIELD_GET(BIT_ULL(32), ctx->ret_addr) & hash_ctl_1G;
51
52 if (hashed_bit != intlv_bit)
53 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos);
54
55 /* Calculation complete for 2 channels. Continue for 4 and 8 channels. */
56 if (ctx->map.intlv_mode == DF3_COD4_2CHAN_HASH)
57 return 0;
58
59 intlv_bit = FIELD_GET(BIT_ULL(12), ctx->ret_addr);
60
61 hashed_bit = intlv_bit;
62 hashed_bit ^= FIELD_GET(BIT_ULL(16), ctx->ret_addr) & hash_ctl_64k;
63 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr) & hash_ctl_2M;
64 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr) & hash_ctl_1G;
65
66 if (hashed_bit != intlv_bit)
67 ctx->ret_addr ^= BIT_ULL(12);
68
69 /* Calculation complete for 4 channels. Continue for 8 channels. */
70 if (ctx->map.intlv_mode == DF3_COD2_4CHAN_HASH)
71 return 0;
72
73 intlv_bit = FIELD_GET(BIT_ULL(13), ctx->ret_addr);
74
75 hashed_bit = intlv_bit;
76 hashed_bit ^= FIELD_GET(BIT_ULL(17), ctx->ret_addr) & hash_ctl_64k;
77 hashed_bit ^= FIELD_GET(BIT_ULL(22), ctx->ret_addr) & hash_ctl_2M;
78 hashed_bit ^= FIELD_GET(BIT_ULL(31), ctx->ret_addr) & hash_ctl_1G;
79
80 if (hashed_bit != intlv_bit)
81 ctx->ret_addr ^= BIT_ULL(13);
82
83 return 0;
84 }
85
df3_6chan_dehash_addr(struct addr_ctx * ctx)86 static int df3_6chan_dehash_addr(struct addr_ctx *ctx)
87 {
88 u8 intlv_bit_pos = ctx->map.intlv_bit_pos;
89 u8 hashed_bit, intlv_bit, num_intlv_bits;
90 bool hash_ctl_2M, hash_ctl_1G;
91
92 if (ctx->map.intlv_mode != DF3_6CHAN) {
93 atl_debug_on_bad_intlv_mode(ctx);
94 return -EINVAL;
95 }
96
97 num_intlv_bits = ilog2(ctx->map.num_intlv_chan) + 1;
98
99 hash_ctl_2M = FIELD_GET(DF3_HASH_CTL_2M, ctx->map.ctl);
100 hash_ctl_1G = FIELD_GET(DF3_HASH_CTL_1G, ctx->map.ctl);
101
102 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr);
103
104 hashed_bit = intlv_bit;
105 hashed_bit ^= !!(BIT_ULL(intlv_bit_pos + num_intlv_bits) & ctx->ret_addr);
106 hashed_bit ^= FIELD_GET(BIT_ULL(23), ctx->ret_addr) & hash_ctl_2M;
107 hashed_bit ^= FIELD_GET(BIT_ULL(32), ctx->ret_addr) & hash_ctl_1G;
108
109 if (hashed_bit != intlv_bit)
110 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos);
111
112 intlv_bit_pos++;
113 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr);
114
115 hashed_bit = intlv_bit;
116 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr) & hash_ctl_2M;
117 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr) & hash_ctl_1G;
118
119 if (hashed_bit != intlv_bit)
120 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos);
121
122 intlv_bit_pos++;
123 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr);
124
125 hashed_bit = intlv_bit;
126 hashed_bit ^= FIELD_GET(BIT_ULL(22), ctx->ret_addr) & hash_ctl_2M;
127 hashed_bit ^= FIELD_GET(BIT_ULL(31), ctx->ret_addr) & hash_ctl_1G;
128
129 if (hashed_bit != intlv_bit)
130 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos);
131
132 return 0;
133 }
134
df4_dehash_addr(struct addr_ctx * ctx)135 static int df4_dehash_addr(struct addr_ctx *ctx)
136 {
137 bool hash_ctl_64k, hash_ctl_2M, hash_ctl_1G;
138 u8 hashed_bit, intlv_bit;
139
140 hash_ctl_64k = FIELD_GET(DF4_HASH_CTL_64K, ctx->map.ctl);
141 hash_ctl_2M = FIELD_GET(DF4_HASH_CTL_2M, ctx->map.ctl);
142 hash_ctl_1G = FIELD_GET(DF4_HASH_CTL_1G, ctx->map.ctl);
143
144 intlv_bit = FIELD_GET(BIT_ULL(8), ctx->ret_addr);
145
146 hashed_bit = intlv_bit;
147 hashed_bit ^= FIELD_GET(BIT_ULL(16), ctx->ret_addr) & hash_ctl_64k;
148 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr) & hash_ctl_2M;
149 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr) & hash_ctl_1G;
150
151 if (ctx->map.num_intlv_sockets == 1)
152 hashed_bit ^= FIELD_GET(BIT_ULL(14), ctx->ret_addr);
153
154 if (hashed_bit != intlv_bit)
155 ctx->ret_addr ^= BIT_ULL(8);
156
157 /*
158 * Hashing is possible with socket interleaving, so check the total number
159 * of channels in the system rather than DRAM map interleaving mode.
160 *
161 * Calculation complete for 2 channels. Continue for 4, 8, and 16 channels.
162 */
163 if (ctx->map.total_intlv_chan <= 2)
164 return 0;
165
166 intlv_bit = FIELD_GET(BIT_ULL(12), ctx->ret_addr);
167
168 hashed_bit = intlv_bit;
169 hashed_bit ^= FIELD_GET(BIT_ULL(17), ctx->ret_addr) & hash_ctl_64k;
170 hashed_bit ^= FIELD_GET(BIT_ULL(22), ctx->ret_addr) & hash_ctl_2M;
171 hashed_bit ^= FIELD_GET(BIT_ULL(31), ctx->ret_addr) & hash_ctl_1G;
172
173 if (hashed_bit != intlv_bit)
174 ctx->ret_addr ^= BIT_ULL(12);
175
176 /* Calculation complete for 4 channels. Continue for 8 and 16 channels. */
177 if (ctx->map.total_intlv_chan <= 4)
178 return 0;
179
180 intlv_bit = FIELD_GET(BIT_ULL(13), ctx->ret_addr);
181
182 hashed_bit = intlv_bit;
183 hashed_bit ^= FIELD_GET(BIT_ULL(18), ctx->ret_addr) & hash_ctl_64k;
184 hashed_bit ^= FIELD_GET(BIT_ULL(23), ctx->ret_addr) & hash_ctl_2M;
185 hashed_bit ^= FIELD_GET(BIT_ULL(32), ctx->ret_addr) & hash_ctl_1G;
186
187 if (hashed_bit != intlv_bit)
188 ctx->ret_addr ^= BIT_ULL(13);
189
190 /* Calculation complete for 8 channels. Continue for 16 channels. */
191 if (ctx->map.total_intlv_chan <= 8)
192 return 0;
193
194 intlv_bit = FIELD_GET(BIT_ULL(14), ctx->ret_addr);
195
196 hashed_bit = intlv_bit;
197 hashed_bit ^= FIELD_GET(BIT_ULL(19), ctx->ret_addr) & hash_ctl_64k;
198 hashed_bit ^= FIELD_GET(BIT_ULL(24), ctx->ret_addr) & hash_ctl_2M;
199 hashed_bit ^= FIELD_GET(BIT_ULL(33), ctx->ret_addr) & hash_ctl_1G;
200
201 if (hashed_bit != intlv_bit)
202 ctx->ret_addr ^= BIT_ULL(14);
203
204 return 0;
205 }
206
df4p5_dehash_addr(struct addr_ctx * ctx)207 static int df4p5_dehash_addr(struct addr_ctx *ctx)
208 {
209 bool hash_ctl_64k, hash_ctl_2M, hash_ctl_1G, hash_ctl_1T;
210 u8 hashed_bit, intlv_bit;
211 u64 rehash_vector;
212
213 hash_ctl_64k = FIELD_GET(DF4_HASH_CTL_64K, ctx->map.ctl);
214 hash_ctl_2M = FIELD_GET(DF4_HASH_CTL_2M, ctx->map.ctl);
215 hash_ctl_1G = FIELD_GET(DF4_HASH_CTL_1G, ctx->map.ctl);
216 hash_ctl_1T = FIELD_GET(DF4p5_HASH_CTL_1T, ctx->map.ctl);
217
218 /*
219 * Generate a unique address to determine which bits
220 * need to be dehashed.
221 *
222 * Start with a contiguous bitmask for the total
223 * number of channels starting at bit 8.
224 *
225 * Then make a gap in the proper place based on
226 * interleave mode.
227 */
228 rehash_vector = ctx->map.total_intlv_chan - 1;
229 rehash_vector <<= 8;
230
231 if (ctx->map.intlv_mode == DF4p5_NPS2_4CHAN_1K_HASH ||
232 ctx->map.intlv_mode == DF4p5_NPS1_8CHAN_1K_HASH ||
233 ctx->map.intlv_mode == DF4p5_NPS1_16CHAN_1K_HASH)
234 rehash_vector = expand_bits(10, 2, rehash_vector);
235 else
236 rehash_vector = expand_bits(9, 3, rehash_vector);
237
238 if (rehash_vector & BIT_ULL(8)) {
239 intlv_bit = FIELD_GET(BIT_ULL(8), ctx->ret_addr);
240
241 hashed_bit = intlv_bit;
242 hashed_bit ^= FIELD_GET(BIT_ULL(16), ctx->ret_addr) & hash_ctl_64k;
243 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr) & hash_ctl_2M;
244 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr) & hash_ctl_1G;
245 hashed_bit ^= FIELD_GET(BIT_ULL(40), ctx->ret_addr) & hash_ctl_1T;
246
247 if (hashed_bit != intlv_bit)
248 ctx->ret_addr ^= BIT_ULL(8);
249 }
250
251 if (rehash_vector & BIT_ULL(9)) {
252 intlv_bit = FIELD_GET(BIT_ULL(9), ctx->ret_addr);
253
254 hashed_bit = intlv_bit;
255 hashed_bit ^= FIELD_GET(BIT_ULL(17), ctx->ret_addr) & hash_ctl_64k;
256 hashed_bit ^= FIELD_GET(BIT_ULL(22), ctx->ret_addr) & hash_ctl_2M;
257 hashed_bit ^= FIELD_GET(BIT_ULL(31), ctx->ret_addr) & hash_ctl_1G;
258 hashed_bit ^= FIELD_GET(BIT_ULL(41), ctx->ret_addr) & hash_ctl_1T;
259
260 if (hashed_bit != intlv_bit)
261 ctx->ret_addr ^= BIT_ULL(9);
262 }
263
264 if (rehash_vector & BIT_ULL(12)) {
265 intlv_bit = FIELD_GET(BIT_ULL(12), ctx->ret_addr);
266
267 hashed_bit = intlv_bit;
268 hashed_bit ^= FIELD_GET(BIT_ULL(18), ctx->ret_addr) & hash_ctl_64k;
269 hashed_bit ^= FIELD_GET(BIT_ULL(23), ctx->ret_addr) & hash_ctl_2M;
270 hashed_bit ^= FIELD_GET(BIT_ULL(32), ctx->ret_addr) & hash_ctl_1G;
271 hashed_bit ^= FIELD_GET(BIT_ULL(42), ctx->ret_addr) & hash_ctl_1T;
272
273 if (hashed_bit != intlv_bit)
274 ctx->ret_addr ^= BIT_ULL(12);
275 }
276
277 if (rehash_vector & BIT_ULL(13)) {
278 intlv_bit = FIELD_GET(BIT_ULL(13), ctx->ret_addr);
279
280 hashed_bit = intlv_bit;
281 hashed_bit ^= FIELD_GET(BIT_ULL(19), ctx->ret_addr) & hash_ctl_64k;
282 hashed_bit ^= FIELD_GET(BIT_ULL(24), ctx->ret_addr) & hash_ctl_2M;
283 hashed_bit ^= FIELD_GET(BIT_ULL(33), ctx->ret_addr) & hash_ctl_1G;
284 hashed_bit ^= FIELD_GET(BIT_ULL(43), ctx->ret_addr) & hash_ctl_1T;
285
286 if (hashed_bit != intlv_bit)
287 ctx->ret_addr ^= BIT_ULL(13);
288 }
289
290 if (rehash_vector & BIT_ULL(14)) {
291 intlv_bit = FIELD_GET(BIT_ULL(14), ctx->ret_addr);
292
293 hashed_bit = intlv_bit;
294 hashed_bit ^= FIELD_GET(BIT_ULL(20), ctx->ret_addr) & hash_ctl_64k;
295 hashed_bit ^= FIELD_GET(BIT_ULL(25), ctx->ret_addr) & hash_ctl_2M;
296 hashed_bit ^= FIELD_GET(BIT_ULL(34), ctx->ret_addr) & hash_ctl_1G;
297 hashed_bit ^= FIELD_GET(BIT_ULL(44), ctx->ret_addr) & hash_ctl_1T;
298
299 if (hashed_bit != intlv_bit)
300 ctx->ret_addr ^= BIT_ULL(14);
301 }
302
303 return 0;
304 }
305
306 /*
307 * MI300 hash bits
308 * 4K 64K 2M 1G 1T 1T
309 * COH_ST_Select[0] = XOR of addr{8, 12, 15, 22, 29, 36, 43}
310 * COH_ST_Select[1] = XOR of addr{9, 13, 16, 23, 30, 37, 44}
311 * COH_ST_Select[2] = XOR of addr{10, 14, 17, 24, 31, 38, 45}
312 * COH_ST_Select[3] = XOR of addr{11, 18, 25, 32, 39, 46}
313 * COH_ST_Select[4] = XOR of addr{14, 19, 26, 33, 40, 47} aka Stack
314 * DieID[0] = XOR of addr{12, 20, 27, 34, 41 }
315 * DieID[1] = XOR of addr{13, 21, 28, 35, 42 }
316 */
mi300_dehash_addr(struct addr_ctx * ctx)317 static int mi300_dehash_addr(struct addr_ctx *ctx)
318 {
319 bool hash_ctl_4k, hash_ctl_64k, hash_ctl_2M, hash_ctl_1G, hash_ctl_1T;
320 bool hashed_bit, intlv_bit, test_bit;
321 u8 num_intlv_bits, base_bit, i;
322
323 hash_ctl_4k = FIELD_GET(DF4p5_HASH_CTL_4K, ctx->map.ctl);
324 hash_ctl_64k = FIELD_GET(DF4_HASH_CTL_64K, ctx->map.ctl);
325 hash_ctl_2M = FIELD_GET(DF4_HASH_CTL_2M, ctx->map.ctl);
326 hash_ctl_1G = FIELD_GET(DF4_HASH_CTL_1G, ctx->map.ctl);
327 hash_ctl_1T = FIELD_GET(DF4p5_HASH_CTL_1T, ctx->map.ctl);
328
329 /* Channel bits */
330 num_intlv_bits = ilog2(ctx->map.num_intlv_chan);
331
332 for (i = 0; i < num_intlv_bits; i++) {
333 base_bit = 8 + i;
334
335 /* COH_ST_Select[4] jumps to a base bit of 14. */
336 if (i == 4)
337 base_bit = 14;
338
339 intlv_bit = BIT_ULL(base_bit) & ctx->ret_addr;
340
341 hashed_bit = intlv_bit;
342
343 /* 4k hash bit only applies to the first 3 bits. */
344 if (i <= 2) {
345 test_bit = BIT_ULL(12 + i) & ctx->ret_addr;
346 hashed_bit ^= test_bit & hash_ctl_4k;
347 }
348
349 /* Use temporary 'test_bit' value to avoid Sparse warnings. */
350 test_bit = BIT_ULL(15 + i) & ctx->ret_addr;
351 hashed_bit ^= test_bit & hash_ctl_64k;
352 test_bit = BIT_ULL(22 + i) & ctx->ret_addr;
353 hashed_bit ^= test_bit & hash_ctl_2M;
354 test_bit = BIT_ULL(29 + i) & ctx->ret_addr;
355 hashed_bit ^= test_bit & hash_ctl_1G;
356 test_bit = BIT_ULL(36 + i) & ctx->ret_addr;
357 hashed_bit ^= test_bit & hash_ctl_1T;
358 test_bit = BIT_ULL(43 + i) & ctx->ret_addr;
359 hashed_bit ^= test_bit & hash_ctl_1T;
360
361 if (hashed_bit != intlv_bit)
362 ctx->ret_addr ^= BIT_ULL(base_bit);
363 }
364
365 /* Die bits */
366 num_intlv_bits = ilog2(ctx->map.num_intlv_dies);
367
368 for (i = 0; i < num_intlv_bits; i++) {
369 base_bit = 12 + i;
370
371 intlv_bit = BIT_ULL(base_bit) & ctx->ret_addr;
372
373 hashed_bit = intlv_bit;
374
375 test_bit = BIT_ULL(20 + i) & ctx->ret_addr;
376 hashed_bit ^= test_bit & hash_ctl_64k;
377 test_bit = BIT_ULL(27 + i) & ctx->ret_addr;
378 hashed_bit ^= test_bit & hash_ctl_2M;
379 test_bit = BIT_ULL(34 + i) & ctx->ret_addr;
380 hashed_bit ^= test_bit & hash_ctl_1G;
381 test_bit = BIT_ULL(41 + i) & ctx->ret_addr;
382 hashed_bit ^= test_bit & hash_ctl_1T;
383
384 if (hashed_bit != intlv_bit)
385 ctx->ret_addr ^= BIT_ULL(base_bit);
386 }
387
388 return 0;
389 }
390
dehash_address(struct addr_ctx * ctx)391 int dehash_address(struct addr_ctx *ctx)
392 {
393 switch (ctx->map.intlv_mode) {
394 /* No hashing cases. */
395 case NONE:
396 case NOHASH_2CHAN:
397 case NOHASH_4CHAN:
398 case NOHASH_8CHAN:
399 case NOHASH_16CHAN:
400 case NOHASH_32CHAN:
401 /* Hashing bits handled earlier during CS ID calculation. */
402 case DF4_NPS4_3CHAN_HASH:
403 case DF4_NPS2_5CHAN_HASH:
404 case DF4_NPS2_6CHAN_HASH:
405 case DF4_NPS1_10CHAN_HASH:
406 case DF4_NPS1_12CHAN_HASH:
407 case DF4p5_NPS2_6CHAN_1K_HASH:
408 case DF4p5_NPS2_6CHAN_2K_HASH:
409 case DF4p5_NPS1_10CHAN_1K_HASH:
410 case DF4p5_NPS1_10CHAN_2K_HASH:
411 case DF4p5_NPS1_12CHAN_1K_HASH:
412 case DF4p5_NPS1_12CHAN_2K_HASH:
413 case DF4p5_NPS0_24CHAN_1K_HASH:
414 case DF4p5_NPS0_24CHAN_2K_HASH:
415 /* No hash physical address bits, so nothing to do. */
416 case DF4p5_NPS4_3CHAN_1K_HASH:
417 case DF4p5_NPS4_3CHAN_2K_HASH:
418 case DF4p5_NPS2_5CHAN_1K_HASH:
419 case DF4p5_NPS2_5CHAN_2K_HASH:
420 return 0;
421
422 case DF2_2CHAN_HASH:
423 return df2_dehash_addr(ctx);
424
425 case DF3_COD4_2CHAN_HASH:
426 case DF3_COD2_4CHAN_HASH:
427 case DF3_COD1_8CHAN_HASH:
428 return df3_dehash_addr(ctx);
429
430 case DF3_6CHAN:
431 return df3_6chan_dehash_addr(ctx);
432
433 case DF4_NPS4_2CHAN_HASH:
434 case DF4_NPS2_4CHAN_HASH:
435 case DF4_NPS1_8CHAN_HASH:
436 return df4_dehash_addr(ctx);
437
438 case DF4p5_NPS4_2CHAN_1K_HASH:
439 case DF4p5_NPS4_2CHAN_2K_HASH:
440 case DF4p5_NPS2_4CHAN_2K_HASH:
441 case DF4p5_NPS2_4CHAN_1K_HASH:
442 case DF4p5_NPS1_8CHAN_1K_HASH:
443 case DF4p5_NPS1_8CHAN_2K_HASH:
444 case DF4p5_NPS1_16CHAN_1K_HASH:
445 case DF4p5_NPS1_16CHAN_2K_HASH:
446 return df4p5_dehash_addr(ctx);
447
448 case MI3_HASH_8CHAN:
449 case MI3_HASH_16CHAN:
450 case MI3_HASH_32CHAN:
451 return mi300_dehash_addr(ctx);
452
453 default:
454 atl_debug_on_bad_intlv_mode(ctx);
455 return -EINVAL;
456 }
457 }
458