xref: /linux/include/uapi/drm/ivpu_accel.h (revision 576d7fed09c7edbae7600f29a8a3ed6c1ead904f)
1 /* SPDX-License-Identifier: GPL-2.0-only WITH Linux-syscall-note */
2 /*
3  * Copyright (C) 2020-2023 Intel Corporation
4  */
5 
6 #ifndef __UAPI_IVPU_DRM_H__
7 #define __UAPI_IVPU_DRM_H__
8 
9 #include "drm.h"
10 
11 #if defined(__cplusplus)
12 extern "C" {
13 #endif
14 
15 #define DRM_IVPU_DRIVER_MAJOR 1
16 #define DRM_IVPU_DRIVER_MINOR 0
17 
18 #define DRM_IVPU_GET_PARAM		  0x00
19 #define DRM_IVPU_SET_PARAM		  0x01
20 #define DRM_IVPU_BO_CREATE		  0x02
21 #define DRM_IVPU_BO_INFO		  0x03
22 #define DRM_IVPU_SUBMIT			  0x05
23 #define DRM_IVPU_BO_WAIT		  0x06
24 
25 #define DRM_IOCTL_IVPU_GET_PARAM                                               \
26 	DRM_IOWR(DRM_COMMAND_BASE + DRM_IVPU_GET_PARAM, struct drm_ivpu_param)
27 
28 #define DRM_IOCTL_IVPU_SET_PARAM                                               \
29 	DRM_IOW(DRM_COMMAND_BASE + DRM_IVPU_SET_PARAM, struct drm_ivpu_param)
30 
31 #define DRM_IOCTL_IVPU_BO_CREATE                                               \
32 	DRM_IOWR(DRM_COMMAND_BASE + DRM_IVPU_BO_CREATE, struct drm_ivpu_bo_create)
33 
34 #define DRM_IOCTL_IVPU_BO_INFO                                                 \
35 	DRM_IOWR(DRM_COMMAND_BASE + DRM_IVPU_BO_INFO, struct drm_ivpu_bo_info)
36 
37 #define DRM_IOCTL_IVPU_SUBMIT                                                  \
38 	DRM_IOW(DRM_COMMAND_BASE + DRM_IVPU_SUBMIT, struct drm_ivpu_submit)
39 
40 #define DRM_IOCTL_IVPU_BO_WAIT                                                 \
41 	DRM_IOWR(DRM_COMMAND_BASE + DRM_IVPU_BO_WAIT, struct drm_ivpu_bo_wait)
42 
43 /**
44  * DOC: contexts
45  *
46  * VPU contexts have private virtual address space, job queues and priority.
47  * Each context is identified by an unique ID. Context is created on open().
48  */
49 
50 #define DRM_IVPU_PARAM_DEVICE_ID	    0
51 #define DRM_IVPU_PARAM_DEVICE_REVISION	    1
52 #define DRM_IVPU_PARAM_PLATFORM_TYPE	    2
53 #define DRM_IVPU_PARAM_CORE_CLOCK_RATE	    3
54 #define DRM_IVPU_PARAM_NUM_CONTEXTS	    4
55 #define DRM_IVPU_PARAM_CONTEXT_BASE_ADDRESS 5
56 #define DRM_IVPU_PARAM_CONTEXT_PRIORITY	    6
57 #define DRM_IVPU_PARAM_CONTEXT_ID	    7
58 #define DRM_IVPU_PARAM_FW_API_VERSION	    8
59 #define DRM_IVPU_PARAM_ENGINE_HEARTBEAT	    9
60 #define DRM_IVPU_PARAM_UNIQUE_INFERENCE_ID  10
61 #define DRM_IVPU_PARAM_TILE_CONFIG	    11
62 #define DRM_IVPU_PARAM_SKU		    12
63 #define DRM_IVPU_PARAM_CAPABILITIES	    13
64 
65 #define DRM_IVPU_PLATFORM_TYPE_SILICON	    0
66 
67 #define DRM_IVPU_CONTEXT_PRIORITY_IDLE	    0
68 #define DRM_IVPU_CONTEXT_PRIORITY_NORMAL    1
69 #define DRM_IVPU_CONTEXT_PRIORITY_FOCUS	    2
70 #define DRM_IVPU_CONTEXT_PRIORITY_REALTIME  3
71 
72 /**
73  * DRM_IVPU_CAP_METRIC_STREAMER
74  *
75  * Metric streamer support. Provides sampling of various hardware performance
76  * metrics like DMA bandwidth and cache miss/hits. Can be used for profiling.
77  */
78 #define DRM_IVPU_CAP_METRIC_STREAMER	1
79 /**
80  * DRM_IVPU_CAP_DMA_MEMORY_RANGE
81  *
82  * Driver has capability to allocate separate memory range
83  * accessible by hardware DMA.
84  */
85 #define DRM_IVPU_CAP_DMA_MEMORY_RANGE	2
86 
87 /**
88  * struct drm_ivpu_param - Get/Set VPU parameters
89  */
90 struct drm_ivpu_param {
91 	/**
92 	 * @param:
93 	 *
94 	 * Supported params:
95 	 *
96 	 * %DRM_IVPU_PARAM_DEVICE_ID:
97 	 * PCI Device ID of the VPU device (read-only)
98 	 *
99 	 * %DRM_IVPU_PARAM_DEVICE_REVISION:
100 	 * VPU device revision (read-only)
101 	 *
102 	 * %DRM_IVPU_PARAM_PLATFORM_TYPE:
103 	 * Returns %DRM_IVPU_PLATFORM_TYPE_SILICON on real hardware or device specific
104 	 * platform type when executing on a simulator or emulator (read-only)
105 	 *
106 	 * %DRM_IVPU_PARAM_CORE_CLOCK_RATE:
107 	 * Current PLL frequency (read-only)
108 	 *
109 	 * %DRM_IVPU_PARAM_NUM_CONTEXTS:
110 	 * Maximum number of simultaneously existing contexts (read-only)
111 	 *
112 	 * %DRM_IVPU_PARAM_CONTEXT_BASE_ADDRESS:
113 	 * Lowest VPU virtual address available in the current context (read-only)
114 	 *
115 	 * %DRM_IVPU_PARAM_CONTEXT_PRIORITY:
116 	 * Value of current context scheduling priority (read-write).
117 	 * See DRM_IVPU_CONTEXT_PRIORITY_* for possible values.
118 	 *
119 	 * %DRM_IVPU_PARAM_CONTEXT_ID:
120 	 * Current context ID, always greater than 0 (read-only)
121 	 *
122 	 * %DRM_IVPU_PARAM_FW_API_VERSION:
123 	 * Firmware API version array (read-only)
124 	 *
125 	 * %DRM_IVPU_PARAM_ENGINE_HEARTBEAT:
126 	 * Heartbeat value from an engine (read-only).
127 	 * Engine ID (i.e. DRM_IVPU_ENGINE_COMPUTE) is given via index.
128 	 *
129 	 * %DRM_IVPU_PARAM_UNIQUE_INFERENCE_ID:
130 	 * Device-unique inference ID (read-only)
131 	 *
132 	 * %DRM_IVPU_PARAM_TILE_CONFIG:
133 	 * VPU tile configuration  (read-only)
134 	 *
135 	 * %DRM_IVPU_PARAM_SKU:
136 	 * VPU SKU ID (read-only)
137 	 *
138 	 * %DRM_IVPU_PARAM_CAPABILITIES:
139 	 * Supported capabilities (read-only)
140 	 */
141 	__u32 param;
142 
143 	/** @index: Index for params that have multiple instances */
144 	__u32 index;
145 
146 	/** @value: Param value */
147 	__u64 value;
148 };
149 
150 #define DRM_IVPU_BO_SHAVE_MEM  0x00000001
151 #define DRM_IVPU_BO_HIGH_MEM   DRM_IVPU_BO_SHAVE_MEM
152 #define DRM_IVPU_BO_MAPPABLE   0x00000002
153 #define DRM_IVPU_BO_DMA_MEM    0x00000004
154 
155 #define DRM_IVPU_BO_CACHED     0x00000000
156 #define DRM_IVPU_BO_UNCACHED   0x00010000
157 #define DRM_IVPU_BO_WC	       0x00020000
158 #define DRM_IVPU_BO_CACHE_MASK 0x00030000
159 
160 #define DRM_IVPU_BO_FLAGS \
161 	(DRM_IVPU_BO_HIGH_MEM | \
162 	 DRM_IVPU_BO_MAPPABLE | \
163 	 DRM_IVPU_BO_DMA_MEM | \
164 	 DRM_IVPU_BO_CACHE_MASK)
165 
166 /**
167  * struct drm_ivpu_bo_create - Create BO backed by SHMEM
168  *
169  * Create GEM buffer object allocated in SHMEM memory.
170  */
171 struct drm_ivpu_bo_create {
172 	/** @size: The size in bytes of the allocated memory */
173 	__u64 size;
174 
175 	/**
176 	 * @flags:
177 	 *
178 	 * Supported flags:
179 	 *
180 	 * %DRM_IVPU_BO_HIGH_MEM:
181 	 *
182 	 * Allocate VPU address from >4GB range.
183 	 * Buffer object with vpu address >4GB can be always accessed by the
184 	 * VPU DMA engine, but some HW generation may not be able to access
185 	 * this memory from then firmware running on the VPU management processor.
186 	 * Suitable for input, output and some scratch buffers.
187 	 *
188 	 * %DRM_IVPU_BO_MAPPABLE:
189 	 *
190 	 * Buffer object can be mapped using mmap().
191 	 *
192 	 * %DRM_IVPU_BO_CACHED:
193 	 *
194 	 * Allocated BO will be cached on host side (WB) and snooped on the VPU side.
195 	 * This is the default caching mode.
196 	 *
197 	 * %DRM_IVPU_BO_UNCACHED:
198 	 *
199 	 * Allocated BO will not be cached on host side nor snooped on the VPU side.
200 	 *
201 	 * %DRM_IVPU_BO_WC:
202 	 *
203 	 * Allocated BO will use write combining buffer for writes but reads will be
204 	 * uncached.
205 	 */
206 	__u32 flags;
207 
208 	/** @handle: Returned GEM object handle */
209 	__u32 handle;
210 
211 	/** @vpu_addr: Returned VPU virtual address */
212 	__u64 vpu_addr;
213 };
214 
215 /**
216  * struct drm_ivpu_bo_info - Query buffer object info
217  */
218 struct drm_ivpu_bo_info {
219 	/** @handle: Handle of the queried BO */
220 	__u32 handle;
221 
222 	/** @flags: Returned flags used to create the BO */
223 	__u32 flags;
224 
225 	/** @vpu_addr: Returned VPU virtual address */
226 	__u64 vpu_addr;
227 
228 	/**
229 	 * @mmap_offset:
230 	 *
231 	 * Returned offset to be used in mmap(). 0 in case the BO is not mappable.
232 	 */
233 	__u64 mmap_offset;
234 
235 	/** @size: Returned GEM object size, aligned to PAGE_SIZE */
236 	__u64 size;
237 };
238 
239 /* drm_ivpu_submit engines */
240 #define DRM_IVPU_ENGINE_COMPUTE 0
241 #define DRM_IVPU_ENGINE_COPY    1
242 
243 /**
244  * struct drm_ivpu_submit - Submit commands to the VPU
245  *
246  * Execute a single command buffer on a given VPU engine.
247  * Handles to all referenced buffer objects have to be provided in @buffers_ptr.
248  *
249  * User space may wait on job completion using %DRM_IVPU_BO_WAIT ioctl.
250  */
251 struct drm_ivpu_submit {
252 	/**
253 	 * @buffers_ptr:
254 	 *
255 	 * A pointer to an u32 array of GEM handles of the BOs required for this job.
256 	 * The number of elements in the array must be equal to the value given by @buffer_count.
257 	 *
258 	 * The first BO is the command buffer. The rest of array has to contain all
259 	 * BOs referenced from the command buffer.
260 	 */
261 	__u64 buffers_ptr;
262 
263 	/** @buffer_count: Number of elements in the @buffers_ptr */
264 	__u32 buffer_count;
265 
266 	/**
267 	 * @engine: Select the engine this job should be executed on
268 	 *
269 	 * %DRM_IVPU_ENGINE_COMPUTE:
270 	 *
271 	 * Performs Deep Learning Neural Compute Inference Operations
272 	 *
273 	 * %DRM_IVPU_ENGINE_COPY:
274 	 *
275 	 * Performs memory copy operations to/from system memory allocated for VPU
276 	 */
277 	__u32 engine;
278 
279 	/** @flags: Reserved for future use - must be zero */
280 	__u32 flags;
281 
282 	/**
283 	 * @commands_offset:
284 	 *
285 	 * Offset inside the first buffer in @buffers_ptr containing commands
286 	 * to be executed. The offset has to be 8-byte aligned.
287 	 */
288 	__u32 commands_offset;
289 };
290 
291 /* drm_ivpu_bo_wait job status codes */
292 #define DRM_IVPU_JOB_STATUS_SUCCESS 0
293 
294 /**
295  * struct drm_ivpu_bo_wait - Wait for BO to become inactive
296  *
297  * Blocks until a given buffer object becomes inactive.
298  * With @timeout_ms set to 0 returns immediately.
299  */
300 struct drm_ivpu_bo_wait {
301 	/** @handle: Handle to the buffer object to be waited on */
302 	__u32 handle;
303 
304 	/** @flags: Reserved for future use - must be zero */
305 	__u32 flags;
306 
307 	/** @timeout_ns: Absolute timeout in nanoseconds (may be zero) */
308 	__s64 timeout_ns;
309 
310 	/**
311 	 * @job_status:
312 	 *
313 	 * Job status code which is updated after the job is completed.
314 	 * &DRM_IVPU_JOB_STATUS_SUCCESS or device specific error otherwise.
315 	 * Valid only if @handle points to a command buffer.
316 	 */
317 	__u32 job_status;
318 
319 	/** @pad: Padding - must be zero */
320 	__u32 pad;
321 };
322 
323 #if defined(__cplusplus)
324 }
325 #endif
326 
327 #endif /* __UAPI_IVPU_DRM_H__ */
328