blob: 0cec66fa0b5e39f9d891ddc59b219cbdd1509894 [file] [log] [blame]
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001/*
2 * Copyright 2017 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Jason Macnak1de7f662020-01-24 15:05:57 -08007#include <assert.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -07008#include <errno.h>
David Stevens7eb9e822022-10-24 17:51:46 +09009#include <pthread.h>
David Stevens0fe561f2020-10-28 16:06:38 +090010#include <stdatomic.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070011#include <stdint.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070012#include <string.h>
13#include <sys/mman.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070014#include <xf86drm.h>
15
Yiwei Zhangb7a64442021-09-30 05:13:10 +000016#include "drv_helpers.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070017#include "drv_priv.h"
Gurchetan Singh9f3110b2020-04-03 15:15:30 -070018#include "external/virgl_hw.h"
19#include "external/virgl_protocol.h"
20#include "external/virtgpu_drm.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070021#include "util.h"
Gurchetan Singh73c141e2021-01-21 14:51:19 -080022#include "virtgpu.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070023
Zach Reizner85c4c5f2017-10-04 13:15:57 -070024#define PIPE_TEXTURE_2D 2
25
Jason Macnakd6666c82021-09-29 11:13:25 -070026#define MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS 15
27#define MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE (1 << (MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS - 1))
Lepton Wu249e8632018-04-05 12:50:03 -070028#define MESA_LLVMPIPE_TILE_ORDER 6
29#define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER)
30
Jason Macnakc06cc9c2021-10-06 10:16:19 -070031// This comes from a combination of SwiftShader's VkPhysicalDeviceLimits::maxFramebufferWidth and
32// VkPhysicalDeviceLimits::maxImageDimension2D (see https://crrev.com/c/1917130).
33#define ANGLE_ON_SWIFTSHADER_MAX_TEXTURE_2D_SIZE 8192
34
35#ifndef MIN
36#define MIN(a, b) ((a) < (b) ? (a) : (b))
37#endif
38#define VIRGL_2D_MAX_TEXTURE_2D_SIZE \
39 MIN(ANGLE_ON_SWIFTSHADER_MAX_TEXTURE_2D_SIZE, MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE)
40
Zach Reizner85c4c5f2017-10-04 13:15:57 -070041static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
Gurchetan Singh71bc6652018-09-17 17:42:05 -070042 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
43 DRM_FORMAT_XRGB8888 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -070044
Jason Macnak1de7f662020-01-24 15:05:57 -080045static const uint32_t dumb_texture_source_formats[] = {
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000046 DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_YVU420,
47 DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_YVU420_ANDROID,
48 DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Jason Macnak1de7f662020-01-24 15:05:57 -080049};
Lepton Wu249e8632018-04-05 12:50:03 -070050
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000051static const uint32_t texture_source_formats[] = {
Yiwei Zhang9420ffe2021-09-24 06:24:30 +000052 DRM_FORMAT_NV21, DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_RG88,
53 DRM_FORMAT_YVU420_ANDROID, DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000054};
Zach Reizner85c4c5f2017-10-04 13:15:57 -070055
Gurchetan Singh73c141e2021-01-21 14:51:19 -080056extern struct virtgpu_param params[];
57
David Stevens7eb9e822022-10-24 17:51:46 +090058struct virgl_blob_metadata_cache {
59 struct lru_entry entry;
60 struct bo_metadata meta;
61};
62
63#define lru_entry_to_metadata(entry) ((struct virgl_blob_metadata_cache *)(void*)(entry))
64
65#define MAX_CACHED_FORMATS 128
66
Gurchetan Singh73c141e2021-01-21 14:51:19 -080067struct virgl_priv {
Lepton Wueebce652020-02-26 15:13:34 -080068 int caps_is_v2;
Jason Macnakddf4ec02020-02-03 16:36:46 -080069 union virgl_caps caps;
Jason Macnak1de7f662020-01-24 15:05:57 -080070 int host_gbm_enabled;
David Stevens0fe561f2020-10-28 16:06:38 +090071 atomic_int next_blob_id;
David Stevens7eb9e822022-10-24 17:51:46 +090072
73 pthread_mutex_t host_blob_format_lock;
74 struct lru virgl_blob_metadata_cache;
Lepton Wu249e8632018-04-05 12:50:03 -070075};
76
Kansho Nishidad97877b2019-06-14 18:28:18 +090077static uint32_t translate_format(uint32_t drm_fourcc)
Zach Reizner85c4c5f2017-10-04 13:15:57 -070078{
79 switch (drm_fourcc) {
Jason Macnak1de7f662020-01-24 15:05:57 -080080 case DRM_FORMAT_BGR888:
81 case DRM_FORMAT_RGB888:
82 return VIRGL_FORMAT_R8G8B8_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070083 case DRM_FORMAT_XRGB8888:
84 return VIRGL_FORMAT_B8G8R8X8_UNORM;
85 case DRM_FORMAT_ARGB8888:
86 return VIRGL_FORMAT_B8G8R8A8_UNORM;
87 case DRM_FORMAT_XBGR8888:
88 return VIRGL_FORMAT_R8G8B8X8_UNORM;
89 case DRM_FORMAT_ABGR8888:
90 return VIRGL_FORMAT_R8G8B8A8_UNORM;
Jason Macnak1de7f662020-01-24 15:05:57 -080091 case DRM_FORMAT_ABGR16161616F:
Lepton Wufef113c2020-10-30 16:29:26 -070092 return VIRGL_FORMAT_R16G16B16A16_FLOAT;
Nataraj Deshpande450e5762021-06-30 12:10:55 -070093 case DRM_FORMAT_ABGR2101010:
94 return VIRGL_FORMAT_R10G10B10A2_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070095 case DRM_FORMAT_RGB565:
96 return VIRGL_FORMAT_B5G6R5_UNORM;
97 case DRM_FORMAT_R8:
98 return VIRGL_FORMAT_R8_UNORM;
Jason Macnak6e200ea2021-02-11 19:34:57 -080099 case DRM_FORMAT_R16:
100 return VIRGL_FORMAT_R16_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700101 case DRM_FORMAT_RG88:
102 return VIRGL_FORMAT_R8G8_UNORM;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700103 case DRM_FORMAT_NV12:
104 return VIRGL_FORMAT_NV12;
Jason Macnak1de7f662020-01-24 15:05:57 -0800105 case DRM_FORMAT_NV21:
106 return VIRGL_FORMAT_NV21;
Jason Macnakbc51ff32022-04-11 15:32:58 -0700107 case DRM_FORMAT_P010:
108 return VIRGL_FORMAT_P010;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -0700109 case DRM_FORMAT_YVU420:
110 case DRM_FORMAT_YVU420_ANDROID:
111 return VIRGL_FORMAT_YV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700112 default:
Yiwei Zhang04954732022-07-13 23:34:33 +0000113 drv_loge("Unhandled format:%d\n", drm_fourcc);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700114 return 0;
115 }
116}
117
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800118static bool virgl_bitmask_supports_format(struct virgl_supported_format_mask *supported,
119 uint32_t drm_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800120{
121 uint32_t virgl_format = translate_format(drm_format);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800122 if (!virgl_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800123 return false;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800124
125 uint32_t bitmask_index = virgl_format / 32;
126 uint32_t bit_index = virgl_format % 32;
127 return supported->bitmask[bitmask_index] & (1 << bit_index);
128}
129
Jason Macnak1de7f662020-01-24 15:05:57 -0800130// The metadata generated here for emulated buffers is slightly different than the metadata
131// generated by drv_bo_from_format. In order to simplify transfers in the flush and invalidate
132// functions below, the emulated buffers are oversized. For example, ignoring stride alignment
133// requirements to demonstrate, a 6x6 YUV420 image buffer might have the following layout from
134// drv_bo_from_format:
135//
136// | Y | Y | Y | Y | Y | Y |
137// | Y | Y | Y | Y | Y | Y |
138// | Y | Y | Y | Y | Y | Y |
139// | Y | Y | Y | Y | Y | Y |
140// | Y | Y | Y | Y | Y | Y |
141// | Y | Y | Y | Y | Y | Y |
142// | U | U | U | U | U | U |
143// | U | U | U | V | V | V |
144// | V | V | V | V | V | V |
145//
146// where each plane immediately follows the previous plane in memory. This layout makes it
147// difficult to compute the transfers needed for example when the middle 2x2 region of the
148// image is locked and needs to be flushed/invalidated.
149//
150// Emulated multi-plane buffers instead have a layout of:
151//
152// | Y | Y | Y | Y | Y | Y |
153// | Y | Y | Y | Y | Y | Y |
154// | Y | Y | Y | Y | Y | Y |
155// | Y | Y | Y | Y | Y | Y |
156// | Y | Y | Y | Y | Y | Y |
157// | Y | Y | Y | Y | Y | Y |
158// | U | U | U | | | |
159// | U | U | U | | | |
160// | U | U | U | | | |
161// | V | V | V | | | |
162// | V | V | V | | | |
163// | V | V | V | | | |
164//
165// where each plane is placed as a sub-image (albeit with a very large stride) in order to
166// simplify transfers into 3 sub-image transfers for the above example.
167//
168// Additional note: the V-plane is not placed to the right of the U-plane due to some
169// observed failures in media framework code which assumes the V-plane is not
170// "row-interlaced" with the U-plane.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800171static void virgl_get_emulated_metadata(const struct bo *bo, struct bo_metadata *metadata)
Jason Macnak1de7f662020-01-24 15:05:57 -0800172{
173 uint32_t y_plane_height;
174 uint32_t c_plane_height;
175 uint32_t original_width = bo->meta.width;
176 uint32_t original_height = bo->meta.height;
177
178 metadata->format = DRM_FORMAT_R8;
179 switch (bo->meta.format) {
180 case DRM_FORMAT_NV12:
181 case DRM_FORMAT_NV21:
182 // Bi-planar
183 metadata->num_planes = 2;
184
185 y_plane_height = original_height;
186 c_plane_height = DIV_ROUND_UP(original_height, 2);
187
188 metadata->width = original_width;
189 metadata->height = y_plane_height + c_plane_height;
190
191 // Y-plane (full resolution)
192 metadata->strides[0] = metadata->width;
193 metadata->offsets[0] = 0;
194 metadata->sizes[0] = metadata->width * y_plane_height;
195
196 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
197 metadata->strides[1] = metadata->width;
198 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
199 metadata->sizes[1] = metadata->width * c_plane_height;
200
201 metadata->total_size = metadata->width * metadata->height;
202 break;
203 case DRM_FORMAT_YVU420:
204 case DRM_FORMAT_YVU420_ANDROID:
205 // Tri-planar
206 metadata->num_planes = 3;
207
208 y_plane_height = original_height;
209 c_plane_height = DIV_ROUND_UP(original_height, 2);
210
211 metadata->width = ALIGN(original_width, 32);
212 metadata->height = y_plane_height + (2 * c_plane_height);
213
214 // Y-plane (full resolution)
215 metadata->strides[0] = metadata->width;
216 metadata->offsets[0] = 0;
217 metadata->sizes[0] = metadata->width * original_height;
218
219 // Cb-plane (half resolution, placed below Y-plane)
220 metadata->strides[1] = metadata->width;
221 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
222 metadata->sizes[1] = metadata->width * c_plane_height;
223
224 // Cr-plane (half resolution, placed below Cb-plane)
225 metadata->strides[2] = metadata->width;
226 metadata->offsets[2] = metadata->offsets[1] + metadata->sizes[1];
227 metadata->sizes[2] = metadata->width * c_plane_height;
228
229 metadata->total_size = metadata->width * metadata->height;
230 break;
231 default:
232 break;
233 }
234}
235
236struct virtio_transfers_params {
237 size_t xfers_needed;
238 struct rectangle xfer_boxes[DRV_MAX_PLANES];
239};
240
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800241static void virgl_get_emulated_transfers_params(const struct bo *bo,
242 const struct rectangle *transfer_box,
243 struct virtio_transfers_params *xfer_params)
Jason Macnak1de7f662020-01-24 15:05:57 -0800244{
245 uint32_t y_plane_height;
246 uint32_t c_plane_height;
247 struct bo_metadata emulated_metadata;
248
249 if (transfer_box->x == 0 && transfer_box->y == 0 && transfer_box->width == bo->meta.width &&
250 transfer_box->height == bo->meta.height) {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800251 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800252
253 xfer_params->xfers_needed = 1;
254 xfer_params->xfer_boxes[0].x = 0;
255 xfer_params->xfer_boxes[0].y = 0;
256 xfer_params->xfer_boxes[0].width = emulated_metadata.width;
257 xfer_params->xfer_boxes[0].height = emulated_metadata.height;
258
259 return;
260 }
261
262 switch (bo->meta.format) {
263 case DRM_FORMAT_NV12:
264 case DRM_FORMAT_NV21:
265 // Bi-planar
266 xfer_params->xfers_needed = 2;
267
268 y_plane_height = bo->meta.height;
269 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
270
271 // Y-plane (full resolution)
272 xfer_params->xfer_boxes[0].x = transfer_box->x;
273 xfer_params->xfer_boxes[0].y = transfer_box->y;
274 xfer_params->xfer_boxes[0].width = transfer_box->width;
275 xfer_params->xfer_boxes[0].height = transfer_box->height;
276
277 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
278 xfer_params->xfer_boxes[1].x = transfer_box->x;
279 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
280 xfer_params->xfer_boxes[1].width = transfer_box->width;
281 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
282
283 break;
284 case DRM_FORMAT_YVU420:
285 case DRM_FORMAT_YVU420_ANDROID:
286 // Tri-planar
287 xfer_params->xfers_needed = 3;
288
289 y_plane_height = bo->meta.height;
290 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
291
292 // Y-plane (full resolution)
293 xfer_params->xfer_boxes[0].x = transfer_box->x;
294 xfer_params->xfer_boxes[0].y = transfer_box->y;
295 xfer_params->xfer_boxes[0].width = transfer_box->width;
296 xfer_params->xfer_boxes[0].height = transfer_box->height;
297
298 // Cb-plane (half resolution, placed below Y-plane)
299 xfer_params->xfer_boxes[1].x = transfer_box->x;
300 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
301 xfer_params->xfer_boxes[1].width = DIV_ROUND_UP(transfer_box->width, 2);
302 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
303
304 // Cr-plane (half resolution, placed below Cb-plane)
305 xfer_params->xfer_boxes[2].x = transfer_box->x;
306 xfer_params->xfer_boxes[2].y = transfer_box->y + y_plane_height + c_plane_height;
307 xfer_params->xfer_boxes[2].width = DIV_ROUND_UP(transfer_box->width, 2);
308 xfer_params->xfer_boxes[2].height = DIV_ROUND_UP(transfer_box->height, 2);
309
310 break;
311 }
312}
313
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800314static bool virgl_supports_combination_natively(struct driver *drv, uint32_t drm_format,
315 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800316{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800317 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800318
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800319 if (priv->caps.max_version == 0)
Jason Macnak1de7f662020-01-24 15:05:57 -0800320 return true;
Jason Macnak1de7f662020-01-24 15:05:57 -0800321
322 if ((use_flags & BO_USE_RENDERING) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800323 !virgl_bitmask_supports_format(&priv->caps.v1.render, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800324 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800325
326 if ((use_flags & BO_USE_TEXTURE) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800327 !virgl_bitmask_supports_format(&priv->caps.v1.sampler, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800328 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800329
330 if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800331 !virgl_bitmask_supports_format(&priv->caps.v2.scanout, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800332 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800333
334 return true;
335}
336
337// For virtio backends that do not support formats natively (e.g. multi-planar formats are not
338// supported in virglrenderer when gbm is unavailable on the host machine), whether or not the
339// format and usage combination can be handled as a blob (byte buffer).
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800340static bool virgl_supports_combination_through_emulation(struct driver *drv, uint32_t drm_format,
341 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800342{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800343 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800344
345 // Only enable emulation on non-gbm virtio backends.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800346 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800347 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800348
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800349 if (use_flags & (BO_USE_RENDERING | BO_USE_SCANOUT))
Jason Macnak1de7f662020-01-24 15:05:57 -0800350 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800351
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800352 if (!virgl_supports_combination_natively(drv, DRM_FORMAT_R8, use_flags))
Jason Macnak1de7f662020-01-24 15:05:57 -0800353 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800354
355 return drm_format == DRM_FORMAT_NV12 || drm_format == DRM_FORMAT_NV21 ||
356 drm_format == DRM_FORMAT_YVU420 || drm_format == DRM_FORMAT_YVU420_ANDROID;
357}
358
Jason Macnakddf4ec02020-02-03 16:36:46 -0800359// Adds the given buffer combination to the list of supported buffer combinations if the
360// combination is supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800361static void virgl_add_combination(struct driver *drv, uint32_t drm_format,
362 struct format_metadata *metadata, uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800363{
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000364 if (params[param_3d].value) {
365 if ((use_flags & BO_USE_SCANOUT) &&
366 !virgl_supports_combination_natively(drv, drm_format, BO_USE_SCANOUT)) {
Yiwei Zhang7b3cbea2022-07-13 00:25:11 +0000367 drv_logi("Strip scanout on format: %d\n", drm_format);
Jason Macnak1de7f662020-01-24 15:05:57 -0800368 use_flags &= ~BO_USE_SCANOUT;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800369 }
370
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800371 if (!virgl_supports_combination_natively(drv, drm_format, use_flags) &&
372 !virgl_supports_combination_through_emulation(drv, drm_format, use_flags)) {
Yiwei Zhang7b3cbea2022-07-13 00:25:11 +0000373 drv_logi("Skipping unsupported combination format:%d\n", drm_format);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800374 return;
375 }
376 }
377
378 drv_add_combination(drv, drm_format, metadata, use_flags);
379}
380
381// Adds each given buffer combination to the list of supported buffer combinations if the
382// combination supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800383static void virgl_add_combinations(struct driver *drv, const uint32_t *drm_formats,
384 uint32_t num_formats, struct format_metadata *metadata,
385 uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800386{
387 uint32_t i;
388
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800389 for (i = 0; i < num_formats; i++)
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800390 virgl_add_combination(drv, drm_formats[i], metadata, use_flags);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800391}
392
Jason Macnakc06cc9c2021-10-06 10:16:19 -0700393static int virgl_2d_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
394 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700395{
Gurchetan Singh298b7572019-09-19 09:55:18 -0700396 if (bo->meta.format != DRM_FORMAT_R8) {
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900397 width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE);
398 height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE);
399 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700400
Dominik Behr6e6dc492019-10-09 15:43:52 -0700401 return drv_dumb_bo_create_ex(bo, width, height, format, use_flags, BO_QUIRK_DUMB32BPP);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700402}
403
Lepton Wudbab0832019-04-19 12:26:39 -0700404static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind,
405 uint32_t virgl_bind)
406{
407 if ((*flag) & check_flag) {
408 (*flag) &= ~check_flag;
409 (*bind) |= virgl_bind;
410 }
411}
412
David Stevensbef7c872022-11-09 16:38:48 +0900413static uint32_t compute_virgl_bind_flags(uint64_t use_flags)
Lepton Wudbab0832019-04-19 12:26:39 -0700414{
Kansho Nishidad97877b2019-06-14 18:28:18 +0900415 /* In crosvm, VIRGL_BIND_SHARED means minigbm will allocate, not virglrenderer. */
416 uint32_t bind = VIRGL_BIND_SHARED;
Lepton Wudbab0832019-04-19 12:26:39 -0700417
418 handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW);
419 handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET);
420 handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT);
David Stevens55a6cf92019-09-03 10:45:33 +0900421 handle_flag(&use_flags, BO_USE_CURSOR, &bind, VIRGL_BIND_CURSOR);
422 handle_flag(&use_flags, BO_USE_LINEAR, &bind, VIRGL_BIND_LINEAR);
Jason Macnak98d34a82022-07-19 14:47:18 -0700423 handle_flag(&use_flags, BO_USE_SENSOR_DIRECT_DATA, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000424 handle_flag(&use_flags, BO_USE_GPU_DATA_BUFFER, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangd3a73ff2021-07-08 05:48:01 +0000425 handle_flag(&use_flags, BO_USE_FRONT_RENDERING, &bind, VIRGL_BIND_LINEAR);
David Stevens55a6cf92019-09-03 10:45:33 +0900426
David Stevens23de4e22020-05-15 14:15:35 +0900427 if (use_flags & BO_USE_PROTECTED) {
428 handle_flag(&use_flags, BO_USE_PROTECTED, &bind, VIRGL_BIND_MINIGBM_PROTECTED);
429 } else {
430 // Make sure we don't set both flags, since that could be mistaken for
431 // protected. Give OFTEN priority over RARELY.
432 if (use_flags & BO_USE_SW_READ_OFTEN) {
433 handle_flag(&use_flags, BO_USE_SW_READ_OFTEN, &bind,
434 VIRGL_BIND_MINIGBM_SW_READ_OFTEN);
435 } else {
436 handle_flag(&use_flags, BO_USE_SW_READ_RARELY, &bind,
437 VIRGL_BIND_MINIGBM_SW_READ_RARELY);
438 }
439 if (use_flags & BO_USE_SW_WRITE_OFTEN) {
440 handle_flag(&use_flags, BO_USE_SW_WRITE_OFTEN, &bind,
441 VIRGL_BIND_MINIGBM_SW_WRITE_OFTEN);
442 } else {
443 handle_flag(&use_flags, BO_USE_SW_WRITE_RARELY, &bind,
444 VIRGL_BIND_MINIGBM_SW_WRITE_RARELY);
445 }
446 }
David Stevens55a6cf92019-09-03 10:45:33 +0900447
David Stevens23de4e22020-05-15 14:15:35 +0900448 handle_flag(&use_flags, BO_USE_CAMERA_WRITE, &bind, VIRGL_BIND_MINIGBM_CAMERA_WRITE);
449 handle_flag(&use_flags, BO_USE_CAMERA_READ, &bind, VIRGL_BIND_MINIGBM_CAMERA_READ);
450 handle_flag(&use_flags, BO_USE_HW_VIDEO_DECODER, &bind,
451 VIRGL_BIND_MINIGBM_HW_VIDEO_DECODER);
452 handle_flag(&use_flags, BO_USE_HW_VIDEO_ENCODER, &bind,
453 VIRGL_BIND_MINIGBM_HW_VIDEO_ENCODER);
David Stevens55a6cf92019-09-03 10:45:33 +0900454
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800455 if (use_flags)
Yiwei Zhang04954732022-07-13 23:34:33 +0000456 drv_loge("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900457
Lepton Wudbab0832019-04-19 12:26:39 -0700458 return bind;
459}
460
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800461static int virgl_3d_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
462 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700463{
464 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800465 size_t i;
Kansho Nishidad97877b2019-06-14 18:28:18 +0900466 uint32_t stride;
Gurchetan Singh99644382020-10-07 15:28:11 -0700467 struct drm_virtgpu_resource_create res_create = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800468 struct bo_metadata emulated_metadata;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700469
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800470 if (virgl_supports_combination_natively(bo->drv, format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800471 stride = drv_stride_from_format(format, width, 0);
472 drv_bo_from_format(bo, stride, height, format);
473 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800474 assert(virgl_supports_combination_through_emulation(bo->drv, format, use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800475
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800476 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800477
478 format = emulated_metadata.format;
479 width = emulated_metadata.width;
480 height = emulated_metadata.height;
481 for (i = 0; i < emulated_metadata.num_planes; i++) {
482 bo->meta.strides[i] = emulated_metadata.strides[i];
483 bo->meta.offsets[i] = emulated_metadata.offsets[i];
484 bo->meta.sizes[i] = emulated_metadata.sizes[i];
485 }
486 bo->meta.total_size = emulated_metadata.total_size;
487 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700488
Kansho Nishidad97877b2019-06-14 18:28:18 +0900489 /*
490 * Setting the target is intended to ensure this resource gets bound as a 2D
491 * texture in the host renderer's GL state. All of these resource properties are
492 * sent unchanged by the kernel to the host, which in turn sends them unchanged to
493 * virglrenderer. When virglrenderer makes a resource, it will convert the target
494 * enum to the equivalent one in GL and then bind the resource to that target.
495 */
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700496
Kansho Nishidad97877b2019-06-14 18:28:18 +0900497 res_create.target = PIPE_TEXTURE_2D;
498 res_create.format = translate_format(format);
David Stevensbef7c872022-11-09 16:38:48 +0900499 res_create.bind = compute_virgl_bind_flags(use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900500 res_create.width = width;
501 res_create.height = height;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700502
Kansho Nishidad97877b2019-06-14 18:28:18 +0900503 /* For virgl 3D */
504 res_create.depth = 1;
505 res_create.array_size = 1;
506 res_create.last_level = 0;
507 res_create.nr_samples = 0;
508
Gurchetan Singh298b7572019-09-19 09:55:18 -0700509 res_create.size = ALIGN(bo->meta.total_size, PAGE_SIZE); // PAGE_SIZE = 0x1000
Kansho Nishidad97877b2019-06-14 18:28:18 +0900510 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create);
511 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000512 drv_loge("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", strerror(errno));
Kansho Nishidad97877b2019-06-14 18:28:18 +0900513 return ret;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700514 }
515
Gurchetan Singh298b7572019-09-19 09:55:18 -0700516 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
Kansho Nishidad97877b2019-06-14 18:28:18 +0900517 bo->handles[plane].u32 = res_create.bo_handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700518
519 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700520}
521
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800522static void *virgl_3d_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700523{
524 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700525 struct drm_virtgpu_map gem_map = { 0 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700526
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700527 gem_map.handle = bo->handles[0].u32;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700528 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map);
529 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000530 drv_loge("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno));
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700531 return MAP_FAILED;
532 }
533
Gurchetan Singh298b7572019-09-19 09:55:18 -0700534 vma->length = bo->meta.total_size;
535 return mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700536 gem_map.offset);
537}
538
Jason Macnakd6666c82021-09-29 11:13:25 -0700539static uint32_t virgl_3d_get_max_texture_2d_size(struct driver *drv)
540{
541 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
542
543 if (priv->caps.v2.max_texture_2d_size)
544 return priv->caps.v2.max_texture_2d_size;
545
546 return UINT32_MAX;
547}
548
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800549static int virgl_get_caps(struct driver *drv, union virgl_caps *caps, int *caps_is_v2)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800550{
551 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700552 struct drm_virtgpu_get_caps cap_args = { 0 };
Jason Macnakddf4ec02020-02-03 16:36:46 -0800553
Lepton Wueebce652020-02-26 15:13:34 -0800554 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800555 cap_args.addr = (unsigned long long)caps;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800556 if (params[param_capset_fix].value) {
Lepton Wueebce652020-02-26 15:13:34 -0800557 *caps_is_v2 = 1;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800558 cap_args.cap_set_id = 2;
559 cap_args.size = sizeof(union virgl_caps);
560 } else {
561 cap_args.cap_set_id = 1;
562 cap_args.size = sizeof(struct virgl_caps_v1);
563 }
564
565 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
566 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000567 drv_loge("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Lepton Wueebce652020-02-26 15:13:34 -0800568 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800569
570 // Fallback to v1
571 cap_args.cap_set_id = 1;
572 cap_args.size = sizeof(struct virgl_caps_v1);
573
574 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800575 if (ret)
Yiwei Zhang04954732022-07-13 23:34:33 +0000576 drv_loge("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Jason Macnakddf4ec02020-02-03 16:36:46 -0800577 }
578
579 return ret;
580}
581
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800582static void virgl_init_params_and_caps(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700583{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800584 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
585 if (params[param_3d].value) {
586 virgl_get_caps(drv, &priv->caps, &priv->caps_is_v2);
Lepton Wu249e8632018-04-05 12:50:03 -0700587
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800588 // We use two criteria to determine whether host minigbm is used on the host for
589 // swapchain allocations.
590 //
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800591 // (1) Host minigbm is only available via virglrenderer, and only virglrenderer
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800592 // advertises capabilities.
593 // (2) Only host minigbm doesn't emulate YUV formats. Checking this is a bit of a
594 // proxy, but it works.
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800595 priv->host_gbm_enabled =
596 priv->caps.max_version > 0 &&
597 virgl_supports_combination_natively(drv, DRM_FORMAT_NV12, BO_USE_TEXTURE);
Lepton Wu249e8632018-04-05 12:50:03 -0700598 }
Jason Macnak1de7f662020-01-24 15:05:57 -0800599}
600
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800601static int virgl_init(struct driver *drv)
Jason Macnak1de7f662020-01-24 15:05:57 -0800602{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800603 struct virgl_priv *priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800604
605 priv = calloc(1, sizeof(*priv));
Yiwei Zhangafdf87d2021-09-28 04:06:06 +0000606 if (!priv)
607 return -ENOMEM;
608
David Stevens7eb9e822022-10-24 17:51:46 +0900609 int ret = pthread_mutex_init(&priv->host_blob_format_lock, NULL);
610 if (ret)
611 return ret;
612
Jason Macnak1de7f662020-01-24 15:05:57 -0800613 drv->priv = priv;
David Stevens7eb9e822022-10-24 17:51:46 +0900614 lru_init(&priv->virgl_blob_metadata_cache, MAX_CACHED_FORMATS);
Jason Macnak1de7f662020-01-24 15:05:57 -0800615
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800616 virgl_init_params_and_caps(drv);
Jason Macnak1de7f662020-01-24 15:05:57 -0800617
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800618 if (params[param_3d].value) {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700619 /* This doesn't mean host can scanout everything, it just means host
620 * hypervisor can show it. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800621 virgl_add_combinations(drv, render_target_formats,
622 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
623 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
624 virgl_add_combinations(drv, texture_source_formats,
625 ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA,
626 BO_USE_TEXTURE_MASK);
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000627 /* NV12 with scanout must flow through virgl_add_combination, so that the native
628 * support is checked and scanout use_flag can be conditionally stripped. */
629 virgl_add_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
630 BO_USE_TEXTURE_MASK | BO_USE_CAMERA_READ |
631 BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
632 BO_USE_HW_VIDEO_ENCODER | BO_USE_SCANOUT);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700633 } else {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700634 /* Virtio primary plane only allows this format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800635 virgl_add_combination(drv, DRM_FORMAT_XRGB8888, &LINEAR_METADATA,
636 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700637 /* Virtio cursor plane only allows this format and Chrome cannot live without
638 * ARGB888 renderable format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800639 virgl_add_combination(drv, DRM_FORMAT_ARGB8888, &LINEAR_METADATA,
640 BO_USE_RENDER_MASK | BO_USE_CURSOR);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700641 /* Android needs more, but they cannot be bound as scanouts anymore after
642 * "drm/virtio: fix DRM_FORMAT_* handling" */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800643 virgl_add_combinations(drv, render_target_formats,
644 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
645 BO_USE_RENDER_MASK);
646 virgl_add_combinations(drv, dumb_texture_source_formats,
647 ARRAY_SIZE(dumb_texture_source_formats), &LINEAR_METADATA,
648 BO_USE_TEXTURE_MASK);
Yiwei Zhang9fa17e72021-09-17 22:11:29 +0000649 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
650 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
651 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700652 }
Lepton Wu249e8632018-04-05 12:50:03 -0700653
Gurchetan Singh71bc6652018-09-17 17:42:05 -0700654 /* Android CTS tests require this. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800655 virgl_add_combination(drv, DRM_FORMAT_RGB888, &LINEAR_METADATA, BO_USE_SW_MASK);
656 virgl_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK);
Jason Macnakbc51ff32022-04-11 15:32:58 -0700657 /* Android Camera CTS tests requires this. Additionally, the scanout usage is needed for
658 * Camera preview and is expected to be conditionally stripped by virgl_add_combination
659 * when not natively supported and instead handled by HWComposer. */
660 virgl_add_combination(drv, DRM_FORMAT_P010, &LINEAR_METADATA,
661 BO_USE_SCANOUT | BO_USE_TEXTURE | BO_USE_SW_MASK |
662 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Jason Macnak98d34a82022-07-19 14:47:18 -0700663 /* Android VTS sensors hal tests require BO_USE_SENSOR_DIRECT_DATA. */
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900664 drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA,
David Staessens04b7e242020-05-28 15:47:15 +0900665 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
Jason Macnak98d34a82022-07-19 14:47:18 -0700666 BO_USE_HW_VIDEO_ENCODER | BO_USE_SENSOR_DIRECT_DATA |
667 BO_USE_GPU_DATA_BUFFER);
David Stevens519978f2020-12-11 14:09:56 +0900668
669 if (!priv->host_gbm_enabled) {
670 drv_modify_combination(drv, DRM_FORMAT_ABGR8888, &LINEAR_METADATA,
671 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
672 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
673 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &LINEAR_METADATA,
674 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
675 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
676 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA,
677 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
678 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
679 drv_modify_combination(drv, DRM_FORMAT_R16, &LINEAR_METADATA,
680 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
681 BO_USE_HW_VIDEO_DECODER);
682 drv_modify_combination(drv, DRM_FORMAT_YVU420, &LINEAR_METADATA,
683 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
684 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
685 drv_modify_combination(drv, DRM_FORMAT_YVU420_ANDROID, &LINEAR_METADATA,
686 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
687 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
688 }
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900689
Lepton Wu249e8632018-04-05 12:50:03 -0700690 return drv_modify_linear_combinations(drv);
691}
692
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800693static void virgl_close(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700694{
695 free(drv->priv);
696 drv->priv = NULL;
697}
698
David Stevens7eb9e822022-10-24 17:51:46 +0900699static uint32_t blob_flags_from_use_flags(uint32_t use_flags)
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700700{
David Stevensd3f07bd2020-09-25 18:52:26 +0900701 uint32_t blob_flags = VIRTGPU_BLOB_FLAG_USE_SHAREABLE;
David Stevens7eb9e822022-10-24 17:51:46 +0900702 if (use_flags & (BO_USE_SW_MASK | BO_USE_GPU_DATA_BUFFER))
David Stevensd3f07bd2020-09-25 18:52:26 +0900703 blob_flags |= VIRTGPU_BLOB_FLAG_USE_MAPPABLE;
David Stevens1b252e22021-08-03 16:48:17 +0900704
705 // For now, all blob use cases are cross device. When we add wider
706 // support for blobs, we can revisit making this unconditional.
707 blob_flags |= VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE;
David Stevensb42624c2020-09-10 10:50:26 +0900708
David Stevens7eb9e822022-10-24 17:51:46 +0900709 return blob_flags;
710}
711
712static bool virgl_blob_metadata_eq(struct lru_entry *entry, void *data)
713{
714 struct virgl_blob_metadata_cache *e = lru_entry_to_metadata(entry);
715 struct bo_metadata *meta = data;
716 uint32_t virgl_format1 = translate_format(e->meta.format);
717 uint32_t virgl_format2 = translate_format(meta->format);
718
719 return e->meta.height == meta->height && e->meta.width == meta->width &&
720 e->meta.use_flags == meta->use_flags && virgl_format1 == virgl_format2;
721}
722
723static int virgl_blob_do_create(struct driver *drv, uint32_t width, uint32_t height,
724 uint32_t use_flags, uint32_t virgl_format,
725 uint32_t total_size, uint32_t *bo_handle)
726{
727 int ret;
728 uint32_t cur_blob_id;
729 uint32_t cmd[VIRGL_PIPE_RES_CREATE_SIZE + 1] = { 0 };
730 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
731 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
732 uint32_t virgl_bind_flags = compute_virgl_bind_flags(use_flags);
733 uint32_t blob_flags = blob_flags_from_use_flags(use_flags);
734
David Stevens0fe561f2020-10-28 16:06:38 +0900735 cur_blob_id = atomic_fetch_add(&priv->next_blob_id, 1);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700736
737 cmd[0] = VIRGL_CMD0(VIRGL_CCMD_PIPE_RESOURCE_CREATE, 0, VIRGL_PIPE_RES_CREATE_SIZE);
738 cmd[VIRGL_PIPE_RES_CREATE_TARGET] = PIPE_TEXTURE_2D;
David Stevens7eb9e822022-10-24 17:51:46 +0900739 cmd[VIRGL_PIPE_RES_CREATE_WIDTH] = width;
740 cmd[VIRGL_PIPE_RES_CREATE_HEIGHT] = height;
741 cmd[VIRGL_PIPE_RES_CREATE_FORMAT] = virgl_format;
742 cmd[VIRGL_PIPE_RES_CREATE_BIND] = virgl_bind_flags;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700743 cmd[VIRGL_PIPE_RES_CREATE_DEPTH] = 1;
David Stevens0fe561f2020-10-28 16:06:38 +0900744 cmd[VIRGL_PIPE_RES_CREATE_BLOB_ID] = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700745
746 drm_rc_blob.cmd = (uint64_t)&cmd;
747 drm_rc_blob.cmd_size = 4 * (VIRGL_PIPE_RES_CREATE_SIZE + 1);
David Stevens7eb9e822022-10-24 17:51:46 +0900748 drm_rc_blob.size = total_size;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700749 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST3D;
David Stevensb42624c2020-09-10 10:50:26 +0900750 drm_rc_blob.blob_flags = blob_flags;
David Stevens0fe561f2020-10-28 16:06:38 +0900751 drm_rc_blob.blob_id = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700752
753 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
754 if (ret < 0) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000755 drv_loge("DRM_VIRTGPU_RESOURCE_CREATE_BLOB failed with %s\n", strerror(errno));
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700756 return -errno;
757 }
758
David Stevens7eb9e822022-10-24 17:51:46 +0900759 *bo_handle = drm_rc_blob.bo_handle;
760 return 0;
761}
762
763// Queries the host layout for the requested buffer metadata.
764//
765// Of particular interest is total_size. This value is passed to the kernel when creating
766// a buffer via drm_virtgpu_resource_create_blob.size, to specify how much "vram" to
767// allocate for use when exposing the host buffer to the guest. As such, we need to know
768// this value before allocating a buffer to ensure that the full host buffer is actually
769// visible to the guest.
770//
771// Note that we can't reuse these test buffers as actual allocations because our guess for
772// total_size is insufficient if width!=stride or padding!=0.
773static int virgl_blob_get_host_format(struct driver *drv, struct bo_metadata *meta)
774{
775 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
776 int num_planes = drv_num_planes_from_format(meta->format);
777
778 pthread_mutex_lock(&priv->host_blob_format_lock);
779 if (meta->format == DRM_FORMAT_R8) {
780 meta->offsets[0] = 0;
781 meta->sizes[0] = meta->width;
782 meta->strides[0] = meta->width;
783 meta->total_size = meta->width;
784 } else {
785 uint32_t virgl_format = translate_format(meta->format);
786 struct virgl_blob_metadata_cache *entry;
787
788 entry = lru_entry_to_metadata(lru_find(
789 &priv->virgl_blob_metadata_cache, virgl_blob_metadata_eq, meta));
790
791 if (!entry) {
792 uint32_t total_size = 0;
793 for (int i = 0; i < num_planes; i++) {
794 uint32_t stride =
795 drv_stride_from_format(meta->format, meta->width, i);
796 total_size +=
797 drv_size_from_format(meta->format, stride, meta->height, i);
798 }
799
800 uint32_t handle;
801 int ret = virgl_blob_do_create(drv, meta->width, meta->height,
802 meta->use_flags, virgl_format,
803 total_size, &handle);
804 if (ret) {
805 pthread_mutex_unlock(&priv->host_blob_format_lock);
806 return ret;
807 }
808
809 struct drm_virtgpu_resource_info_cros info = { 0 };
810 info.bo_handle = handle;
811 info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED;
812 int info_ret =
813 drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &info);
814
815 struct drm_gem_close gem_close = { 0 };
816 gem_close.handle = handle;
817 int close_ret = drmIoctl(drv->fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
818 if (close_ret)
819 drv_loge("DRM_IOCTL_GEM_CLOSE failed (handle=%x) error %d\n",
820 handle, close_ret);
821
822 if (info_ret) {
823 pthread_mutex_unlock(&priv->host_blob_format_lock);
824 drv_loge("Getting resource info failed with %s\n", strerror(errno));
825 return info_ret;
826 }
827
828 entry = calloc(1, sizeof(*entry));
829 entry->meta = *meta;
830
831 for (int i = 0; i < num_planes; i++) {
832 entry->meta.strides[i] = info.strides[i];
833 entry->meta.sizes[i] = info.strides[i] *
834 drv_height_from_format(meta->format, meta->height, i);
835 entry->meta.offsets[i] = info.offsets[i];
836 }
837 entry->meta.total_size = entry->meta.offsets[num_planes - 1] +
838 entry->meta.sizes[num_planes - 1];
839
840 lru_insert(&priv->virgl_blob_metadata_cache, &entry->entry);
841 }
842
843 memcpy(meta->offsets, entry->meta.offsets, sizeof(meta->offsets));
844 memcpy(meta->sizes, entry->meta.sizes, sizeof(meta->sizes));
845 memcpy(meta->strides, entry->meta.strides, sizeof(meta->strides));
846 meta->total_size = entry->meta.total_size;
847 }
848 pthread_mutex_unlock(&priv->host_blob_format_lock);
849
850 meta->total_size = ALIGN(meta->total_size, PAGE_SIZE);
851 meta->tiling = blob_flags_from_use_flags(meta->use_flags);
852
853 return 0;
854}
855
856static int virgl_bo_create_blob(struct driver *drv, struct bo *bo)
857{
858 int ret;
859 uint32_t virgl_format = translate_format(bo->meta.format);
860 uint32_t bo_handle;
861
862 virgl_blob_get_host_format(drv, &bo->meta);
863 ret = virgl_blob_do_create(drv, bo->meta.width, bo->meta.height, bo->meta.use_flags,
864 virgl_format, bo->meta.total_size, &bo_handle);
865 if (ret)
866 return ret;
867
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700868 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
David Stevens7eb9e822022-10-24 17:51:46 +0900869 bo->handles[plane].u32 = bo_handle;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700870
871 return 0;
872}
873
874static bool should_use_blob(struct driver *drv, uint32_t format, uint64_t use_flags)
875{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800876 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700877
878 // TODO(gurchetansingh): remove once all minigbm users are blob-safe
879#ifndef VIRTIO_GPU_NEXT
880 return false;
881#endif
882
883 // Only use blob when host gbm is available
884 if (!priv->host_gbm_enabled)
885 return false;
886
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000887 // Use regular resources if only the GPU needs efficient access. Blob resource is a better
888 // fit for BO_USE_GPU_DATA_BUFFER which is mapped to VIRGL_BIND_LINEAR.
889 if (!(use_flags & (BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN | BO_USE_LINEAR |
890 BO_USE_NON_GPU_HW | BO_USE_GPU_DATA_BUFFER)))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700891 return false;
892
David Stevensd3f07bd2020-09-25 18:52:26 +0900893 switch (format) {
David Stevensd3f07bd2020-09-25 18:52:26 +0900894 case DRM_FORMAT_R8:
895 // Formats with strictly defined strides are supported
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700896 return true;
David Stevensc6df2b22021-08-10 19:02:09 +0900897 case DRM_FORMAT_YVU420_ANDROID:
David Stevensd3f07bd2020-09-25 18:52:26 +0900898 case DRM_FORMAT_NV12:
David Stevens7eb9e822022-10-24 17:51:46 +0900899 // Zero copy buffers are exposed for guest software access via a persistent
900 // mapping, with no flush/invalidate messages. However, the virtio-video
901 // device relies transfers to/from the host waiting on implicit fences in
902 // the host kernel to synchronize with hardware output. As such, we can only
903 // use zero copy if the guest doesn't need software access or if we're encoder
904 // input.
905 return (use_flags & BO_USE_SW_MASK) == 0 || (use_flags & BO_USE_HW_VIDEO_ENCODER);
David Stevensd3f07bd2020-09-25 18:52:26 +0900906 default:
907 return false;
908 }
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700909}
910
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800911static int virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
912 uint64_t use_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700913{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800914 if (params[param_resource_blob].value && params[param_host_visible].value &&
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700915 should_use_blob(bo->drv, format, use_flags))
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800916 return virgl_bo_create_blob(bo->drv, bo);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700917
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800918 if (params[param_3d].value)
919 return virgl_3d_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700920 else
Jason Macnakc06cc9c2021-10-06 10:16:19 -0700921 return virgl_2d_dumb_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700922}
923
Nicholas Bishopdbe7f032022-08-29 13:29:20 -0400924static int virgl_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
Fei Shaofc3146f2022-11-02 16:18:43 +0800925 uint32_t format, const uint64_t *modifiers,
926 uint32_t count)
Nicholas Bishopdbe7f032022-08-29 13:29:20 -0400927{
928 uint64_t use_flags = 0;
929
930 for (uint32_t i = 0; i < count; i++) {
931 if (modifiers[i] == DRM_FORMAT_MOD_LINEAR) {
932 return virgl_bo_create(bo, width, height, format, use_flags);
933 }
934 }
935
936 return -EINVAL;
937}
938
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800939static int virgl_bo_destroy(struct bo *bo)
Lepton Wu249e8632018-04-05 12:50:03 -0700940{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800941 if (params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700942 return drv_gem_bo_destroy(bo);
943 else
944 return drv_dumb_bo_destroy(bo);
945}
946
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800947static void *virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700948{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800949 if (params[param_3d].value)
950 return virgl_3d_bo_map(bo, vma, plane, map_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700951 else
952 return drv_dumb_bo_map(bo, vma, plane, map_flags);
953}
954
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800955static int virgl_bo_invalidate(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700956{
957 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800958 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700959 struct drm_virtgpu_3d_transfer_from_host xfer = { 0 };
960 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800961 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800962 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
David Stevens9fe8c202020-12-21 18:47:55 +0900963 uint64_t host_write_flags;
Lepton Wu249e8632018-04-05 12:50:03 -0700964
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800965 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700966 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700967
David Stevens9fe8c202020-12-21 18:47:55 +0900968 // Invalidate is only necessary if the host writes to the buffer. The encoder and
969 // decoder flags don't differentiate between input and output buffers, but we can
970 // use the format to determine whether this buffer could be encoder/decoder output.
Jason Macnakf08bb292022-06-02 12:42:02 -0700971 host_write_flags = BO_USE_RENDERING | BO_USE_CAMERA_WRITE | BO_USE_GPU_DATA_BUFFER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800972 if (bo->meta.format == DRM_FORMAT_R8)
David Stevens9fe8c202020-12-21 18:47:55 +0900973 host_write_flags |= BO_USE_HW_VIDEO_ENCODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800974 else
David Stevens9fe8c202020-12-21 18:47:55 +0900975 host_write_flags |= BO_USE_HW_VIDEO_DECODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800976
David Stevens9fe8c202020-12-21 18:47:55 +0900977 if ((bo->meta.use_flags & host_write_flags) == 0)
David Stevens4d5358d2019-10-24 14:59:31 +0900978 return 0;
979
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800980 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700981 return 0;
982
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700983 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700984
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700985 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700986 /*
987 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
988 * images
989 */
990 if (bo->meta.num_planes == 1) {
991 xfer.offset =
992 (bo->meta.strides[0] * mapping->rect.y) +
993 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
994 }
995 }
996
David Stevensbaab6c82020-02-26 17:14:43 +0900997 if ((bo->meta.use_flags & BO_USE_RENDERING) == 0) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800998 // Unfortunately, the kernel doesn't actually pass the guest layer_stride
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800999 // and guest stride to the host (compare virgl.h and virtgpu_drm.h).
Jason Macnak1de7f662020-01-24 15:05:57 -08001000 // For gbm based resources, we can work around this by using the level field
1001 // to pass the stride to virglrenderer's gbm transfer code. However, we need
1002 // to avoid doing this for resources which don't rely on that transfer code,
1003 // which is resources with the BO_USE_RENDERING flag set.
David Stevensbaab6c82020-02-26 17:14:43 +09001004 // TODO(b/145993887): Send also stride when the patches are landed
Gurchetan Singhcadc54f2021-02-01 12:03:11 -08001005 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -08001006 xfer.level = bo->meta.strides[0];
David Stevensbaab6c82020-02-26 17:14:43 +09001007 }
Gurchetan Singh05e67cc2019-06-28 17:21:40 -07001008
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001009 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -08001010 xfer_params.xfers_needed = 1;
1011 xfer_params.xfer_boxes[0] = mapping->rect;
1012 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001013 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
1014 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -08001015
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001016 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -08001017 }
1018
1019 for (i = 0; i < xfer_params.xfers_needed; i++) {
1020 xfer.box.x = xfer_params.xfer_boxes[i].x;
1021 xfer.box.y = xfer_params.xfer_boxes[i].y;
1022 xfer.box.w = xfer_params.xfer_boxes[i].width;
1023 xfer.box.h = xfer_params.xfer_boxes[i].height;
1024 xfer.box.d = 1;
1025
1026 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer);
1027 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001028 drv_loge("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n",
1029 strerror(errno));
Jason Macnak1de7f662020-01-24 15:05:57 -08001030 return -errno;
1031 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001032 }
1033
David Stevens4d5358d2019-10-24 14:59:31 +09001034 // The transfer needs to complete before invalidate returns so that any host changes
1035 // are visible and to ensure the host doesn't overwrite subsequent guest changes.
1036 // TODO(b/136733358): Support returning fences from transfers
David Stevens4d5358d2019-10-24 14:59:31 +09001037 waitcmd.handle = mapping->vma->handle;
1038 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
1039 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001040 drv_loge("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
David Stevens4d5358d2019-10-24 14:59:31 +09001041 return -errno;
1042 }
1043
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001044 return 0;
1045}
1046
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001047static int virgl_bo_flush(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001048{
1049 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -08001050 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -07001051 struct drm_virtgpu_3d_transfer_to_host xfer = { 0 };
1052 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -08001053 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001054 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
Lepton Wu249e8632018-04-05 12:50:03 -07001055
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001056 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -07001057 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001058
1059 if (!(mapping->vma->map_flags & BO_MAP_WRITE))
1060 return 0;
1061
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001062 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -07001063 return 0;
1064
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001065 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001066
Gurchetan Singh1b57fe22020-05-05 09:18:22 -07001067 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -07001068 /*
1069 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
1070 * images
1071 */
1072 if (bo->meta.num_planes == 1) {
1073 xfer.offset =
1074 (bo->meta.strides[0] * mapping->rect.y) +
1075 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
1076 }
1077 }
1078
Gurchetan Singh05e67cc2019-06-28 17:21:40 -07001079 // Unfortunately, the kernel doesn't actually pass the guest layer_stride and
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001080 // guest stride to the host (compare virgl.h and virtgpu_drm.h). We can use
Gurchetan Singh05e67cc2019-06-28 17:21:40 -07001081 // the level to work around this.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -08001082 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -08001083 xfer.level = bo->meta.strides[0];
Gurchetan Singh05e67cc2019-06-28 17:21:40 -07001084
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001085 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -08001086 xfer_params.xfers_needed = 1;
1087 xfer_params.xfer_boxes[0] = mapping->rect;
1088 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001089 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
1090 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -08001091
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001092 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -08001093 }
1094
1095 for (i = 0; i < xfer_params.xfers_needed; i++) {
1096 xfer.box.x = xfer_params.xfer_boxes[i].x;
1097 xfer.box.y = xfer_params.xfer_boxes[i].y;
1098 xfer.box.w = xfer_params.xfer_boxes[i].width;
1099 xfer.box.h = xfer_params.xfer_boxes[i].height;
1100 xfer.box.d = 1;
1101
1102 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer);
1103 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001104 drv_loge("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n",
1105 strerror(errno));
Jason Macnak1de7f662020-01-24 15:05:57 -08001106 return -errno;
1107 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001108 }
1109
David Stevensbaab6c82020-02-26 17:14:43 +09001110 // If the buffer is only accessed by the host GPU, then the flush is ordered
1111 // with subsequent commands. However, if other host hardware can access the
1112 // buffer, we need to wait for the transfer to complete for consistency.
1113 // TODO(b/136733358): Support returning fences from transfers
1114 if (bo->meta.use_flags & BO_USE_NON_GPU_HW) {
David Stevensbaab6c82020-02-26 17:14:43 +09001115 waitcmd.handle = mapping->vma->handle;
1116
1117 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
1118 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001119 drv_loge("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
David Stevensbaab6c82020-02-26 17:14:43 +09001120 return -errno;
1121 }
1122 }
1123
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001124 return 0;
1125}
1126
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001127static void virgl_3d_resolve_format_and_use_flags(struct driver *drv, uint32_t format,
1128 uint64_t use_flags, uint32_t *out_format,
1129 uint64_t *out_use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001130{
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001131 *out_format = format;
1132 *out_use_flags = use_flags;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001133 switch (format) {
1134 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Keiichi Watanabea13dda72018-08-02 22:45:05 +09001135 /* Camera subsystem requires NV12. */
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001136 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) {
1137 *out_format = DRM_FORMAT_NV12;
1138 } else {
1139 /* HACK: See b/28671744 */
1140 *out_format = DRM_FORMAT_XBGR8888;
Yiwei Zhang3a171db2021-10-01 22:12:05 +00001141 *out_use_flags &= ~BO_USE_HW_VIDEO_ENCODER;
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001142 }
1143 break;
Lepton Wu249e8632018-04-05 12:50:03 -07001144 case DRM_FORMAT_FLEX_YCbCr_420_888:
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001145 /* All of our host drivers prefer NV12 as their flexible media format.
1146 * If that changes, this will need to be modified. */
1147 *out_format = DRM_FORMAT_NV12;
1148 /* fallthrough */
1149 case DRM_FORMAT_NV12:
1150 case DRM_FORMAT_ABGR8888:
1151 case DRM_FORMAT_ARGB8888:
1152 case DRM_FORMAT_RGB565:
1153 case DRM_FORMAT_XBGR8888:
1154 case DRM_FORMAT_XRGB8888:
1155 /* These are the scanout capable formats to the guest. Strip scanout use_flag if the
1156 * host does not natively support scanout on the requested format. */
1157 if ((use_flags & BO_USE_SCANOUT) &&
1158 !virgl_supports_combination_natively(drv, format, BO_USE_SCANOUT))
1159 *out_use_flags &= ~BO_USE_SCANOUT;
1160 break;
1161 case DRM_FORMAT_YVU420_ANDROID:
1162 *out_use_flags &= ~BO_USE_SCANOUT;
1163 /* HACK: See b/172389166. Also see gbm_bo_create. */
1164 *out_use_flags |= BO_USE_LINEAR;
1165 break;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001166 default:
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001167 break;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001168 }
1169}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001170
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001171static void virgl_2d_resolve_format_and_use_flags(uint32_t format, uint64_t use_flags,
1172 uint32_t *out_format, uint64_t *out_use_flags)
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001173{
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001174 *out_format = format;
1175 *out_use_flags = use_flags;
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001176
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001177 /* HACK: See crrev/c/1849773 */
1178 if (format != DRM_FORMAT_XRGB8888)
1179 *out_use_flags &= ~BO_USE_SCANOUT;
1180
1181 switch (format) {
1182 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
1183 /* Camera subsystem requires NV12. */
1184 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) {
1185 *out_format = DRM_FORMAT_NV12;
1186 } else {
1187 /* HACK: See b/28671744 */
1188 *out_format = DRM_FORMAT_XBGR8888;
Yiwei Zhang3a171db2021-10-01 22:12:05 +00001189 *out_use_flags &= ~BO_USE_HW_VIDEO_ENCODER;
Yiwei Zhang9420ffe2021-09-24 06:24:30 +00001190 }
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001191 break;
1192 case DRM_FORMAT_FLEX_YCbCr_420_888:
1193 *out_format = DRM_FORMAT_YVU420_ANDROID;
1194 /* fallthrough */
1195 case DRM_FORMAT_YVU420_ANDROID:
1196 *out_use_flags &= ~BO_USE_SCANOUT;
1197 /* HACK: See b/172389166. Also see gbm_bo_create. */
1198 *out_use_flags |= BO_USE_LINEAR;
1199 break;
1200 default:
1201 break;
Yiwei Zhang9420ffe2021-09-24 06:24:30 +00001202 }
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001203}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001204
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001205static void virgl_resolve_format_and_use_flags(struct driver *drv, uint32_t format,
1206 uint64_t use_flags, uint32_t *out_format,
1207 uint64_t *out_use_flags)
1208{
1209 if (params[param_3d].value) {
1210 return virgl_3d_resolve_format_and_use_flags(drv, format, use_flags, out_format,
1211 out_use_flags);
1212 } else {
1213 return virgl_2d_resolve_format_and_use_flags(format, use_flags, out_format,
1214 out_use_flags);
1215 }
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001216}
1217
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001218static int virgl_resource_info(struct bo *bo, uint32_t strides[DRV_MAX_PLANES],
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001219 uint32_t offsets[DRV_MAX_PLANES], uint64_t *format_modifier)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001220{
1221 int ret;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001222 struct drm_virtgpu_resource_info_cros res_info = { 0 };
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001223
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001224 if (!params[param_3d].value)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001225 return 0;
1226
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001227 res_info.bo_handle = bo->handles[0].u32;
Chia-I Wu50855622021-01-12 12:38:09 -08001228 res_info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001229 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &res_info);
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001230 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001231 drv_loge("DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed with %s\n", strerror(errno));
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001232 return ret;
1233 }
1234
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001235 for (uint32_t plane = 0; plane < DRV_MAX_PLANES; plane++) {
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001236 /*
1237 * Currently, kernel v4.14 (Betty) doesn't have the extended resource info
1238 * ioctl.
1239 */
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001240 if (!res_info.strides[plane])
1241 break;
1242
1243 strides[plane] = res_info.strides[plane];
1244 offsets[plane] = res_info.offsets[plane];
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001245 }
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001246 *format_modifier = res_info.format_modifier;
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001247
1248 return 0;
1249}
1250
Jason Macnakd6666c82021-09-29 11:13:25 -07001251static uint32_t virgl_get_max_texture_2d_size(struct driver *drv)
1252{
1253 if (params[param_3d].value)
1254 return virgl_3d_get_max_texture_2d_size(drv);
1255 else
Jason Macnakc06cc9c2021-10-06 10:16:19 -07001256 return VIRGL_2D_MAX_TEXTURE_2D_SIZE;
Jason Macnakd6666c82021-09-29 11:13:25 -07001257}
1258
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001259const struct backend virtgpu_virgl = { .name = "virtgpu_virgl",
1260 .init = virgl_init,
1261 .close = virgl_close,
1262 .bo_create = virgl_bo_create,
Nicholas Bishopdbe7f032022-08-29 13:29:20 -04001263 .bo_create_with_modifiers = virgl_bo_create_with_modifiers,
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001264 .bo_destroy = virgl_bo_destroy,
1265 .bo_import = drv_prime_bo_import,
1266 .bo_map = virgl_bo_map,
1267 .bo_unmap = drv_bo_munmap,
1268 .bo_invalidate = virgl_bo_invalidate,
1269 .bo_flush = virgl_bo_flush,
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001270 .resolve_format_and_use_flags =
1271 virgl_resolve_format_and_use_flags,
Jason Macnakd6666c82021-09-29 11:13:25 -07001272 .resource_info = virgl_resource_info,
1273 .get_max_texture_2d_size = virgl_get_max_texture_2d_size };