Lines Matching +full:0 +full:xe

22 #define _PAT_ATS				0x47fc
24 0x4800, 0x4804, \
25 0x4848, 0x484c)
26 #define _PAT_PTA 0x4820
33 #define XE2_COH_MODE REG_GENMASK(1, 0)
38 #define XELPG_PAT_0_WB REG_FIELD_PREP(XELPG_L4_POLICY_MASK, 0)
39 #define XELPG_INDEX_COH_MODE_MASK REG_GENMASK(1, 0)
42 #define XELPG_0_COH_NON REG_FIELD_PREP(XELPG_INDEX_COH_MODE_MASK, 0)
47 #define XELP_MEM_TYPE_MASK REG_GENMASK(1, 0)
51 #define XELP_PAT_UC REG_FIELD_PREP(XELP_MEM_TYPE_MASK, 0)
64 [0] = { XELP_PAT_WB, XE_COH_AT_LEAST_1WAY },
71 [0] = { XELP_PAT_UC, XE_COH_NONE },
82 [0] = { XELPG_PAT_0_WB, XE_COH_NONE },
93 * - no_promote: 0=promotable, 1=no promote
94 * - comp_en: 0=disable, 1=enable
95 * - l3clos: L3 class of service (0-3)
96 * - l3_policy: 0=WB, 1=XD ("WB - Transient Display"), 3=UC
97 * - l4_policy: 0=WB, 1=WT, 3=UC
98 * - coh_mode: 0=no snoop, 2=1-way coherent, 3=2-way coherent
101 * coherency (which matches an all-0's encoding), so we can just omit them
111 .value = (no_promote ? XE2_NO_PROMOTE : 0) | \
112 (comp_en ? XE2_COMP_EN : 0) | \
122 [ 0] = XE2_PAT( 0, 0, 0, 0, 3, 0 ),
123 [ 1] = XE2_PAT( 0, 0, 0, 0, 3, 2 ),
124 [ 2] = XE2_PAT( 0, 0, 0, 0, 3, 3 ),
125 [ 3] = XE2_PAT( 0, 0, 0, 3, 3, 0 ),
126 [ 4] = XE2_PAT( 0, 0, 0, 3, 0, 2 ),
127 [ 5] = XE2_PAT( 0, 0, 0, 3, 3, 2 ),
128 [ 6] = XE2_PAT( 1, 0, 0, 1, 3, 0 ),
129 [ 7] = XE2_PAT( 0, 0, 0, 3, 0, 3 ),
130 [ 8] = XE2_PAT( 0, 0, 0, 3, 0, 0 ),
131 [ 9] = XE2_PAT( 0, 1, 0, 0, 3, 0 ),
132 [10] = XE2_PAT( 0, 1, 0, 3, 0, 0 ),
133 [11] = XE2_PAT( 1, 1, 0, 1, 3, 0 ),
134 [12] = XE2_PAT( 0, 1, 0, 3, 3, 0 ),
135 [13] = XE2_PAT( 0, 0, 0, 0, 0, 0 ),
136 [14] = XE2_PAT( 0, 1, 0, 0, 0, 0 ),
137 [15] = XE2_PAT( 1, 1, 0, 1, 1, 0 ),
138 /* 16..19 are reserved; leave set to all 0's */
139 [20] = XE2_PAT( 0, 0, 1, 0, 3, 0 ),
140 [21] = XE2_PAT( 0, 1, 1, 0, 3, 0 ),
141 [22] = XE2_PAT( 0, 0, 1, 0, 3, 2 ),
142 [23] = XE2_PAT( 0, 0, 1, 0, 3, 3 ),
143 [24] = XE2_PAT( 0, 0, 2, 0, 3, 0 ),
144 [25] = XE2_PAT( 0, 1, 2, 0, 3, 0 ),
145 [26] = XE2_PAT( 0, 0, 2, 0, 3, 2 ),
146 [27] = XE2_PAT( 0, 0, 2, 0, 3, 3 ),
147 [28] = XE2_PAT( 0, 0, 3, 0, 3, 0 ),
148 [29] = XE2_PAT( 0, 1, 3, 0, 3, 0 ),
149 [30] = XE2_PAT( 0, 0, 3, 0, 3, 2 ),
150 [31] = XE2_PAT( 0, 0, 3, 0, 3, 3 ),
154 static const struct xe_pat_table_entry xe2_pat_ats = XE2_PAT( 0, 0, 0, 0, 3, 3 );
155 static const struct xe_pat_table_entry xe2_pat_pta = XE2_PAT( 0, 0, 0, 0, 3, 0 );
157 u16 xe_pat_index_get_coh_mode(struct xe_device *xe, u16 pat_index)
159 WARN_ON(pat_index >= xe->pat.n_entries);
160 return xe->pat.table[pat_index].coh_mode;
166 struct xe_device *xe = gt_to_xe(gt);
168 for (int i = 0; i < n_entries; i++) {
174 if (xe->pat.pat_ats)
175 xe_mmio_write32(&gt->mmio, XE_REG(_PAT_ATS), xe->pat.pat_ats->value);
176 if (xe->pat.pat_pta)
177 xe_mmio_write32(&gt->mmio, XE_REG(_PAT_PTA), xe->pat.pat_pta->value);
183 struct xe_device *xe = gt_to_xe(gt);
185 for (int i = 0; i < n_entries; i++) {
191 if (xe->pat.pat_ats)
192 xe_gt_mcr_multicast_write(gt, XE_REG_MCR(_PAT_ATS), xe->pat.pat_ats->value);
193 if (xe->pat.pat_pta)
194 xe_gt_mcr_multicast_write(gt, XE_REG_MCR(_PAT_PTA), xe->pat.pat_pta->value);
199 struct xe_device *xe = gt_to_xe(gt);
209 for (i = 0; i < xe->pat.n_entries; i++) {
227 struct xe_device *xe = gt_to_xe(gt);
237 for (i = 0; i < xe->pat.n_entries; i++) {
257 struct xe_device *xe = gt_to_xe(gt);
267 for (i = 0; i < xe->pat.n_entries; i++) {
285 struct xe_device *xe = gt_to_xe(gt);
295 for (i = 0; i < xe->pat.n_entries; i++) {
323 struct xe_device *xe = gt_to_xe(gt);
334 for (i = 0; i < xe->pat.n_entries; i++) {
378 void xe_pat_init_early(struct xe_device *xe)
380 if (GRAPHICS_VER(xe) == 30 || GRAPHICS_VER(xe) == 20) {
381 xe->pat.ops = &xe2_pat_ops;
382 xe->pat.table = xe2_pat_table;
383 xe->pat.pat_ats = &xe2_pat_ats;
384 if (IS_DGFX(xe))
385 xe->pat.pat_pta = &xe2_pat_pta;
388 if (GRAPHICS_VERx100(xe) == 2001)
389 xe->pat.n_entries = 28; /* Disable CLOS3 */
391 xe->pat.n_entries = ARRAY_SIZE(xe2_pat_table);
393 xe->pat.idx[XE_CACHE_NONE] = 3;
394 xe->pat.idx[XE_CACHE_WT] = 15;
395 xe->pat.idx[XE_CACHE_WB] = 2;
396 xe->pat.idx[XE_CACHE_NONE_COMPRESSION] = 12; /*Applicable on xe2 and beyond */
397 } else if (xe->info.platform == XE_METEORLAKE) {
398 xe->pat.ops = &xelpg_pat_ops;
399 xe->pat.table = xelpg_pat_table;
400 xe->pat.n_entries = ARRAY_SIZE(xelpg_pat_table);
401 xe->pat.idx[XE_CACHE_NONE] = 2;
402 xe->pat.idx[XE_CACHE_WT] = 1;
403 xe->pat.idx[XE_CACHE_WB] = 3;
404 } else if (xe->info.platform == XE_PVC) {
405 xe->pat.ops = &xehpc_pat_ops;
406 xe->pat.table = xehpc_pat_table;
407 xe->pat.n_entries = ARRAY_SIZE(xehpc_pat_table);
408 xe->pat.idx[XE_CACHE_NONE] = 0;
409 xe->pat.idx[XE_CACHE_WT] = 2;
410 xe->pat.idx[XE_CACHE_WB] = 3;
411 } else if (xe->info.platform == XE_DG2) {
416 xe->pat.ops = &xehp_pat_ops;
417 xe->pat.table = xelp_pat_table;
418 xe->pat.n_entries = ARRAY_SIZE(xelp_pat_table);
419 xe->pat.idx[XE_CACHE_NONE] = 3;
420 xe->pat.idx[XE_CACHE_WT] = 2;
421 xe->pat.idx[XE_CACHE_WB] = 0;
422 } else if (GRAPHICS_VERx100(xe) <= 1210) {
423 WARN_ON_ONCE(!IS_DGFX(xe) && !xe->info.has_llc);
424 xe->pat.ops = &xelp_pat_ops;
425 xe->pat.table = xelp_pat_table;
426 xe->pat.n_entries = ARRAY_SIZE(xelp_pat_table);
427 xe->pat.idx[XE_CACHE_NONE] = 3;
428 xe->pat.idx[XE_CACHE_WT] = 2;
429 xe->pat.idx[XE_CACHE_WB] = 0;
439 drm_err(&xe->drm, "Missing PAT table for platform with graphics version %d.%02d!\n",
440 GRAPHICS_VER(xe), GRAPHICS_VERx100(xe) % 100);
444 if (IS_SRIOV_VF(xe))
445 xe->pat.ops = NULL;
447 xe_assert(xe, !xe->pat.ops || xe->pat.ops->dump);
448 xe_assert(xe, !xe->pat.ops || xe->pat.ops->program_graphics);
449 xe_assert(xe, !xe->pat.ops || MEDIA_VER(xe) < 13 || xe->pat.ops->program_media);
454 struct xe_device *xe = gt_to_xe(gt);
456 if (!xe->pat.ops)
460 xe->pat.ops->program_media(gt, xe->pat.table, xe->pat.n_entries);
462 xe->pat.ops->program_graphics(gt, xe->pat.table, xe->pat.n_entries);
467 struct xe_device *xe = gt_to_xe(gt);
469 if (!xe->pat.ops)
472 xe->pat.ops->dump(gt, p);