blob: cba7be08e2d58bac35d0558b06d804415a5604b3 [file] [log] [blame]
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001/*
2 * Copyright 2017 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6
Jason Macnak1de7f662020-01-24 15:05:57 -08007#include <assert.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -07008#include <errno.h>
David Stevens0fe561f2020-10-28 16:06:38 +09009#include <stdatomic.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070010#include <stdint.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070011#include <string.h>
12#include <sys/mman.h>
Zach Reizner85c4c5f2017-10-04 13:15:57 -070013#include <xf86drm.h>
14
Yiwei Zhangb7a64442021-09-30 05:13:10 +000015#include "drv_helpers.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070016#include "drv_priv.h"
Gurchetan Singh9f3110b2020-04-03 15:15:30 -070017#include "external/virgl_hw.h"
18#include "external/virgl_protocol.h"
19#include "external/virtgpu_drm.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070020#include "util.h"
Gurchetan Singh73c141e2021-01-21 14:51:19 -080021#include "virtgpu.h"
Zach Reizner85c4c5f2017-10-04 13:15:57 -070022
Zach Reizner85c4c5f2017-10-04 13:15:57 -070023#define PIPE_TEXTURE_2D 2
24
Jason Macnakd6666c82021-09-29 11:13:25 -070025#define MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS 15
26#define MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE (1 << (MESA_LLVMPIPE_MAX_TEXTURE_2D_LEVELS - 1))
Lepton Wu249e8632018-04-05 12:50:03 -070027#define MESA_LLVMPIPE_TILE_ORDER 6
28#define MESA_LLVMPIPE_TILE_SIZE (1 << MESA_LLVMPIPE_TILE_ORDER)
29
Jason Macnakc06cc9c2021-10-06 10:16:19 -070030// This comes from a combination of SwiftShader's VkPhysicalDeviceLimits::maxFramebufferWidth and
31// VkPhysicalDeviceLimits::maxImageDimension2D (see https://crrev.com/c/1917130).
32#define ANGLE_ON_SWIFTSHADER_MAX_TEXTURE_2D_SIZE 8192
33
34#ifndef MIN
35#define MIN(a, b) ((a) < (b) ? (a) : (b))
36#endif
37#define VIRGL_2D_MAX_TEXTURE_2D_SIZE \
38 MIN(ANGLE_ON_SWIFTSHADER_MAX_TEXTURE_2D_SIZE, MESA_LLVMPIPE_MAX_TEXTURE_2D_SIZE)
39
Zach Reizner85c4c5f2017-10-04 13:15:57 -070040static const uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
Gurchetan Singh71bc6652018-09-17 17:42:05 -070041 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
42 DRM_FORMAT_XRGB8888 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -070043
Jason Macnak1de7f662020-01-24 15:05:57 -080044static const uint32_t dumb_texture_source_formats[] = {
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000045 DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_YVU420,
46 DRM_FORMAT_NV12, DRM_FORMAT_NV21, DRM_FORMAT_YVU420_ANDROID,
47 DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Jason Macnak1de7f662020-01-24 15:05:57 -080048};
Lepton Wu249e8632018-04-05 12:50:03 -070049
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000050static const uint32_t texture_source_formats[] = {
Yiwei Zhang9420ffe2021-09-24 06:24:30 +000051 DRM_FORMAT_NV21, DRM_FORMAT_R8, DRM_FORMAT_R16, DRM_FORMAT_RG88,
52 DRM_FORMAT_YVU420_ANDROID, DRM_FORMAT_ABGR2101010, DRM_FORMAT_ABGR16161616F
Yiwei Zhang35aa91b2021-09-17 22:14:11 +000053};
Zach Reizner85c4c5f2017-10-04 13:15:57 -070054
Gurchetan Singh73c141e2021-01-21 14:51:19 -080055extern struct virtgpu_param params[];
56
57struct virgl_priv {
Lepton Wueebce652020-02-26 15:13:34 -080058 int caps_is_v2;
Jason Macnakddf4ec02020-02-03 16:36:46 -080059 union virgl_caps caps;
Jason Macnak1de7f662020-01-24 15:05:57 -080060 int host_gbm_enabled;
David Stevens0fe561f2020-10-28 16:06:38 +090061 atomic_int next_blob_id;
Lepton Wu249e8632018-04-05 12:50:03 -070062};
63
Kansho Nishidad97877b2019-06-14 18:28:18 +090064static uint32_t translate_format(uint32_t drm_fourcc)
Zach Reizner85c4c5f2017-10-04 13:15:57 -070065{
66 switch (drm_fourcc) {
Jason Macnak1de7f662020-01-24 15:05:57 -080067 case DRM_FORMAT_BGR888:
68 case DRM_FORMAT_RGB888:
69 return VIRGL_FORMAT_R8G8B8_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070070 case DRM_FORMAT_XRGB8888:
71 return VIRGL_FORMAT_B8G8R8X8_UNORM;
72 case DRM_FORMAT_ARGB8888:
73 return VIRGL_FORMAT_B8G8R8A8_UNORM;
74 case DRM_FORMAT_XBGR8888:
75 return VIRGL_FORMAT_R8G8B8X8_UNORM;
76 case DRM_FORMAT_ABGR8888:
77 return VIRGL_FORMAT_R8G8B8A8_UNORM;
Jason Macnak1de7f662020-01-24 15:05:57 -080078 case DRM_FORMAT_ABGR16161616F:
Lepton Wufef113c2020-10-30 16:29:26 -070079 return VIRGL_FORMAT_R16G16B16A16_FLOAT;
Nataraj Deshpande450e5762021-06-30 12:10:55 -070080 case DRM_FORMAT_ABGR2101010:
81 return VIRGL_FORMAT_R10G10B10A2_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070082 case DRM_FORMAT_RGB565:
83 return VIRGL_FORMAT_B5G6R5_UNORM;
84 case DRM_FORMAT_R8:
85 return VIRGL_FORMAT_R8_UNORM;
Jason Macnak6e200ea2021-02-11 19:34:57 -080086 case DRM_FORMAT_R16:
87 return VIRGL_FORMAT_R16_UNORM;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070088 case DRM_FORMAT_RG88:
89 return VIRGL_FORMAT_R8G8_UNORM;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070090 case DRM_FORMAT_NV12:
91 return VIRGL_FORMAT_NV12;
Jason Macnak1de7f662020-01-24 15:05:57 -080092 case DRM_FORMAT_NV21:
93 return VIRGL_FORMAT_NV21;
Jason Macnakbc51ff32022-04-11 15:32:58 -070094 case DRM_FORMAT_P010:
95 return VIRGL_FORMAT_P010;
Gurchetan Singhf5d280d2019-06-04 19:43:41 -070096 case DRM_FORMAT_YVU420:
97 case DRM_FORMAT_YVU420_ANDROID:
98 return VIRGL_FORMAT_YV12;
Zach Reizner85c4c5f2017-10-04 13:15:57 -070099 default:
Yiwei Zhang04954732022-07-13 23:34:33 +0000100 drv_loge("Unhandled format:%d\n", drm_fourcc);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700101 return 0;
102 }
103}
104
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800105static bool virgl_bitmask_supports_format(struct virgl_supported_format_mask *supported,
106 uint32_t drm_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800107{
108 uint32_t virgl_format = translate_format(drm_format);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800109 if (!virgl_format)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800110 return false;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800111
112 uint32_t bitmask_index = virgl_format / 32;
113 uint32_t bit_index = virgl_format % 32;
114 return supported->bitmask[bitmask_index] & (1 << bit_index);
115}
116
Jason Macnak1de7f662020-01-24 15:05:57 -0800117// The metadata generated here for emulated buffers is slightly different than the metadata
118// generated by drv_bo_from_format. In order to simplify transfers in the flush and invalidate
119// functions below, the emulated buffers are oversized. For example, ignoring stride alignment
120// requirements to demonstrate, a 6x6 YUV420 image buffer might have the following layout from
121// drv_bo_from_format:
122//
123// | Y | Y | Y | Y | Y | Y |
124// | Y | Y | Y | Y | Y | Y |
125// | Y | Y | Y | Y | Y | Y |
126// | Y | Y | Y | Y | Y | Y |
127// | Y | Y | Y | Y | Y | Y |
128// | Y | Y | Y | Y | Y | Y |
129// | U | U | U | U | U | U |
130// | U | U | U | V | V | V |
131// | V | V | V | V | V | V |
132//
133// where each plane immediately follows the previous plane in memory. This layout makes it
134// difficult to compute the transfers needed for example when the middle 2x2 region of the
135// image is locked and needs to be flushed/invalidated.
136//
137// Emulated multi-plane buffers instead have a layout of:
138//
139// | Y | Y | Y | Y | Y | Y |
140// | Y | Y | Y | Y | Y | Y |
141// | Y | Y | Y | Y | Y | Y |
142// | Y | Y | Y | Y | Y | Y |
143// | Y | Y | Y | Y | Y | Y |
144// | Y | Y | Y | Y | Y | Y |
145// | U | U | U | | | |
146// | U | U | U | | | |
147// | U | U | U | | | |
148// | V | V | V | | | |
149// | V | V | V | | | |
150// | V | V | V | | | |
151//
152// where each plane is placed as a sub-image (albeit with a very large stride) in order to
153// simplify transfers into 3 sub-image transfers for the above example.
154//
155// Additional note: the V-plane is not placed to the right of the U-plane due to some
156// observed failures in media framework code which assumes the V-plane is not
157// "row-interlaced" with the U-plane.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800158static void virgl_get_emulated_metadata(const struct bo *bo, struct bo_metadata *metadata)
Jason Macnak1de7f662020-01-24 15:05:57 -0800159{
160 uint32_t y_plane_height;
161 uint32_t c_plane_height;
162 uint32_t original_width = bo->meta.width;
163 uint32_t original_height = bo->meta.height;
164
165 metadata->format = DRM_FORMAT_R8;
166 switch (bo->meta.format) {
167 case DRM_FORMAT_NV12:
168 case DRM_FORMAT_NV21:
169 // Bi-planar
170 metadata->num_planes = 2;
171
172 y_plane_height = original_height;
173 c_plane_height = DIV_ROUND_UP(original_height, 2);
174
175 metadata->width = original_width;
176 metadata->height = y_plane_height + c_plane_height;
177
178 // Y-plane (full resolution)
179 metadata->strides[0] = metadata->width;
180 metadata->offsets[0] = 0;
181 metadata->sizes[0] = metadata->width * y_plane_height;
182
183 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
184 metadata->strides[1] = metadata->width;
185 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
186 metadata->sizes[1] = metadata->width * c_plane_height;
187
188 metadata->total_size = metadata->width * metadata->height;
189 break;
190 case DRM_FORMAT_YVU420:
191 case DRM_FORMAT_YVU420_ANDROID:
192 // Tri-planar
193 metadata->num_planes = 3;
194
195 y_plane_height = original_height;
196 c_plane_height = DIV_ROUND_UP(original_height, 2);
197
198 metadata->width = ALIGN(original_width, 32);
199 metadata->height = y_plane_height + (2 * c_plane_height);
200
201 // Y-plane (full resolution)
202 metadata->strides[0] = metadata->width;
203 metadata->offsets[0] = 0;
204 metadata->sizes[0] = metadata->width * original_height;
205
206 // Cb-plane (half resolution, placed below Y-plane)
207 metadata->strides[1] = metadata->width;
208 metadata->offsets[1] = metadata->offsets[0] + metadata->sizes[0];
209 metadata->sizes[1] = metadata->width * c_plane_height;
210
211 // Cr-plane (half resolution, placed below Cb-plane)
212 metadata->strides[2] = metadata->width;
213 metadata->offsets[2] = metadata->offsets[1] + metadata->sizes[1];
214 metadata->sizes[2] = metadata->width * c_plane_height;
215
216 metadata->total_size = metadata->width * metadata->height;
217 break;
218 default:
219 break;
220 }
221}
222
223struct virtio_transfers_params {
224 size_t xfers_needed;
225 struct rectangle xfer_boxes[DRV_MAX_PLANES];
226};
227
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800228static void virgl_get_emulated_transfers_params(const struct bo *bo,
229 const struct rectangle *transfer_box,
230 struct virtio_transfers_params *xfer_params)
Jason Macnak1de7f662020-01-24 15:05:57 -0800231{
232 uint32_t y_plane_height;
233 uint32_t c_plane_height;
234 struct bo_metadata emulated_metadata;
235
236 if (transfer_box->x == 0 && transfer_box->y == 0 && transfer_box->width == bo->meta.width &&
237 transfer_box->height == bo->meta.height) {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800238 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800239
240 xfer_params->xfers_needed = 1;
241 xfer_params->xfer_boxes[0].x = 0;
242 xfer_params->xfer_boxes[0].y = 0;
243 xfer_params->xfer_boxes[0].width = emulated_metadata.width;
244 xfer_params->xfer_boxes[0].height = emulated_metadata.height;
245
246 return;
247 }
248
249 switch (bo->meta.format) {
250 case DRM_FORMAT_NV12:
251 case DRM_FORMAT_NV21:
252 // Bi-planar
253 xfer_params->xfers_needed = 2;
254
255 y_plane_height = bo->meta.height;
256 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
257
258 // Y-plane (full resolution)
259 xfer_params->xfer_boxes[0].x = transfer_box->x;
260 xfer_params->xfer_boxes[0].y = transfer_box->y;
261 xfer_params->xfer_boxes[0].width = transfer_box->width;
262 xfer_params->xfer_boxes[0].height = transfer_box->height;
263
264 // CbCr-plane (half resolution, interleaved, placed below Y-plane)
265 xfer_params->xfer_boxes[1].x = transfer_box->x;
266 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
267 xfer_params->xfer_boxes[1].width = transfer_box->width;
268 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
269
270 break;
271 case DRM_FORMAT_YVU420:
272 case DRM_FORMAT_YVU420_ANDROID:
273 // Tri-planar
274 xfer_params->xfers_needed = 3;
275
276 y_plane_height = bo->meta.height;
277 c_plane_height = DIV_ROUND_UP(bo->meta.height, 2);
278
279 // Y-plane (full resolution)
280 xfer_params->xfer_boxes[0].x = transfer_box->x;
281 xfer_params->xfer_boxes[0].y = transfer_box->y;
282 xfer_params->xfer_boxes[0].width = transfer_box->width;
283 xfer_params->xfer_boxes[0].height = transfer_box->height;
284
285 // Cb-plane (half resolution, placed below Y-plane)
286 xfer_params->xfer_boxes[1].x = transfer_box->x;
287 xfer_params->xfer_boxes[1].y = transfer_box->y + y_plane_height;
288 xfer_params->xfer_boxes[1].width = DIV_ROUND_UP(transfer_box->width, 2);
289 xfer_params->xfer_boxes[1].height = DIV_ROUND_UP(transfer_box->height, 2);
290
291 // Cr-plane (half resolution, placed below Cb-plane)
292 xfer_params->xfer_boxes[2].x = transfer_box->x;
293 xfer_params->xfer_boxes[2].y = transfer_box->y + y_plane_height + c_plane_height;
294 xfer_params->xfer_boxes[2].width = DIV_ROUND_UP(transfer_box->width, 2);
295 xfer_params->xfer_boxes[2].height = DIV_ROUND_UP(transfer_box->height, 2);
296
297 break;
298 }
299}
300
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800301static bool virgl_supports_combination_natively(struct driver *drv, uint32_t drm_format,
302 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800303{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800304 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800305
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800306 if (priv->caps.max_version == 0)
Jason Macnak1de7f662020-01-24 15:05:57 -0800307 return true;
Jason Macnak1de7f662020-01-24 15:05:57 -0800308
309 if ((use_flags & BO_USE_RENDERING) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800310 !virgl_bitmask_supports_format(&priv->caps.v1.render, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800311 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800312
313 if ((use_flags & BO_USE_TEXTURE) &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800314 !virgl_bitmask_supports_format(&priv->caps.v1.sampler, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800315 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800316
317 if ((use_flags & BO_USE_SCANOUT) && priv->caps_is_v2 &&
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800318 !virgl_bitmask_supports_format(&priv->caps.v2.scanout, drm_format))
Jason Macnak1de7f662020-01-24 15:05:57 -0800319 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800320
321 return true;
322}
323
324// For virtio backends that do not support formats natively (e.g. multi-planar formats are not
325// supported in virglrenderer when gbm is unavailable on the host machine), whether or not the
326// format and usage combination can be handled as a blob (byte buffer).
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800327static bool virgl_supports_combination_through_emulation(struct driver *drv, uint32_t drm_format,
328 uint64_t use_flags)
Jason Macnak1de7f662020-01-24 15:05:57 -0800329{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800330 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800331
332 // Only enable emulation on non-gbm virtio backends.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800333 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800334 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800335
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800336 if (use_flags & (BO_USE_RENDERING | BO_USE_SCANOUT))
Jason Macnak1de7f662020-01-24 15:05:57 -0800337 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800338
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800339 if (!virgl_supports_combination_natively(drv, DRM_FORMAT_R8, use_flags))
Jason Macnak1de7f662020-01-24 15:05:57 -0800340 return false;
Jason Macnak1de7f662020-01-24 15:05:57 -0800341
342 return drm_format == DRM_FORMAT_NV12 || drm_format == DRM_FORMAT_NV21 ||
343 drm_format == DRM_FORMAT_YVU420 || drm_format == DRM_FORMAT_YVU420_ANDROID;
344}
345
Jason Macnakddf4ec02020-02-03 16:36:46 -0800346// Adds the given buffer combination to the list of supported buffer combinations if the
347// combination is supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800348static void virgl_add_combination(struct driver *drv, uint32_t drm_format,
349 struct format_metadata *metadata, uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800350{
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000351 if (params[param_3d].value) {
352 if ((use_flags & BO_USE_SCANOUT) &&
353 !virgl_supports_combination_natively(drv, drm_format, BO_USE_SCANOUT)) {
Yiwei Zhang7b3cbea2022-07-13 00:25:11 +0000354 drv_logi("Strip scanout on format: %d\n", drm_format);
Jason Macnak1de7f662020-01-24 15:05:57 -0800355 use_flags &= ~BO_USE_SCANOUT;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800356 }
357
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800358 if (!virgl_supports_combination_natively(drv, drm_format, use_flags) &&
359 !virgl_supports_combination_through_emulation(drv, drm_format, use_flags)) {
Yiwei Zhang7b3cbea2022-07-13 00:25:11 +0000360 drv_logi("Skipping unsupported combination format:%d\n", drm_format);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800361 return;
362 }
363 }
364
365 drv_add_combination(drv, drm_format, metadata, use_flags);
366}
367
368// Adds each given buffer combination to the list of supported buffer combinations if the
369// combination supported by the virtio backend.
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800370static void virgl_add_combinations(struct driver *drv, const uint32_t *drm_formats,
371 uint32_t num_formats, struct format_metadata *metadata,
372 uint64_t use_flags)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800373{
374 uint32_t i;
375
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800376 for (i = 0; i < num_formats; i++)
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800377 virgl_add_combination(drv, drm_formats[i], metadata, use_flags);
Jason Macnakddf4ec02020-02-03 16:36:46 -0800378}
379
Jason Macnakc06cc9c2021-10-06 10:16:19 -0700380static int virgl_2d_dumb_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
381 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700382{
Gurchetan Singh298b7572019-09-19 09:55:18 -0700383 if (bo->meta.format != DRM_FORMAT_R8) {
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900384 width = ALIGN(width, MESA_LLVMPIPE_TILE_SIZE);
385 height = ALIGN(height, MESA_LLVMPIPE_TILE_SIZE);
386 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700387
Dominik Behr6e6dc492019-10-09 15:43:52 -0700388 return drv_dumb_bo_create_ex(bo, width, height, format, use_flags, BO_QUIRK_DUMB32BPP);
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700389}
390
Lepton Wudbab0832019-04-19 12:26:39 -0700391static inline void handle_flag(uint64_t *flag, uint64_t check_flag, uint32_t *bind,
392 uint32_t virgl_bind)
393{
394 if ((*flag) & check_flag) {
395 (*flag) &= ~check_flag;
396 (*bind) |= virgl_bind;
397 }
398}
399
David Stevensbef7c872022-11-09 16:38:48 +0900400static uint32_t compute_virgl_bind_flags(uint64_t use_flags)
Lepton Wudbab0832019-04-19 12:26:39 -0700401{
Kansho Nishidad97877b2019-06-14 18:28:18 +0900402 /* In crosvm, VIRGL_BIND_SHARED means minigbm will allocate, not virglrenderer. */
403 uint32_t bind = VIRGL_BIND_SHARED;
Lepton Wudbab0832019-04-19 12:26:39 -0700404
405 handle_flag(&use_flags, BO_USE_TEXTURE, &bind, VIRGL_BIND_SAMPLER_VIEW);
406 handle_flag(&use_flags, BO_USE_RENDERING, &bind, VIRGL_BIND_RENDER_TARGET);
407 handle_flag(&use_flags, BO_USE_SCANOUT, &bind, VIRGL_BIND_SCANOUT);
David Stevens55a6cf92019-09-03 10:45:33 +0900408 handle_flag(&use_flags, BO_USE_CURSOR, &bind, VIRGL_BIND_CURSOR);
409 handle_flag(&use_flags, BO_USE_LINEAR, &bind, VIRGL_BIND_LINEAR);
Jason Macnak98d34a82022-07-19 14:47:18 -0700410 handle_flag(&use_flags, BO_USE_SENSOR_DIRECT_DATA, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000411 handle_flag(&use_flags, BO_USE_GPU_DATA_BUFFER, &bind, VIRGL_BIND_LINEAR);
Yiwei Zhangd3a73ff2021-07-08 05:48:01 +0000412 handle_flag(&use_flags, BO_USE_FRONT_RENDERING, &bind, VIRGL_BIND_LINEAR);
David Stevens55a6cf92019-09-03 10:45:33 +0900413
David Stevens23de4e22020-05-15 14:15:35 +0900414 if (use_flags & BO_USE_PROTECTED) {
415 handle_flag(&use_flags, BO_USE_PROTECTED, &bind, VIRGL_BIND_MINIGBM_PROTECTED);
416 } else {
417 // Make sure we don't set both flags, since that could be mistaken for
418 // protected. Give OFTEN priority over RARELY.
419 if (use_flags & BO_USE_SW_READ_OFTEN) {
420 handle_flag(&use_flags, BO_USE_SW_READ_OFTEN, &bind,
421 VIRGL_BIND_MINIGBM_SW_READ_OFTEN);
422 } else {
423 handle_flag(&use_flags, BO_USE_SW_READ_RARELY, &bind,
424 VIRGL_BIND_MINIGBM_SW_READ_RARELY);
425 }
426 if (use_flags & BO_USE_SW_WRITE_OFTEN) {
427 handle_flag(&use_flags, BO_USE_SW_WRITE_OFTEN, &bind,
428 VIRGL_BIND_MINIGBM_SW_WRITE_OFTEN);
429 } else {
430 handle_flag(&use_flags, BO_USE_SW_WRITE_RARELY, &bind,
431 VIRGL_BIND_MINIGBM_SW_WRITE_RARELY);
432 }
433 }
David Stevens55a6cf92019-09-03 10:45:33 +0900434
David Stevens23de4e22020-05-15 14:15:35 +0900435 handle_flag(&use_flags, BO_USE_CAMERA_WRITE, &bind, VIRGL_BIND_MINIGBM_CAMERA_WRITE);
436 handle_flag(&use_flags, BO_USE_CAMERA_READ, &bind, VIRGL_BIND_MINIGBM_CAMERA_READ);
437 handle_flag(&use_flags, BO_USE_HW_VIDEO_DECODER, &bind,
438 VIRGL_BIND_MINIGBM_HW_VIDEO_DECODER);
439 handle_flag(&use_flags, BO_USE_HW_VIDEO_ENCODER, &bind,
440 VIRGL_BIND_MINIGBM_HW_VIDEO_ENCODER);
David Stevens55a6cf92019-09-03 10:45:33 +0900441
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800442 if (use_flags)
Yiwei Zhang04954732022-07-13 23:34:33 +0000443 drv_loge("Unhandled bo use flag: %llx\n", (unsigned long long)use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900444
Lepton Wudbab0832019-04-19 12:26:39 -0700445 return bind;
446}
447
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800448static int virgl_3d_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
449 uint64_t use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700450{
451 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800452 size_t i;
Kansho Nishidad97877b2019-06-14 18:28:18 +0900453 uint32_t stride;
Gurchetan Singh99644382020-10-07 15:28:11 -0700454 struct drm_virtgpu_resource_create res_create = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800455 struct bo_metadata emulated_metadata;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700456
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800457 if (virgl_supports_combination_natively(bo->drv, format, use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800458 stride = drv_stride_from_format(format, width, 0);
459 drv_bo_from_format(bo, stride, height, format);
460 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800461 assert(virgl_supports_combination_through_emulation(bo->drv, format, use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800462
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800463 virgl_get_emulated_metadata(bo, &emulated_metadata);
Jason Macnak1de7f662020-01-24 15:05:57 -0800464
465 format = emulated_metadata.format;
466 width = emulated_metadata.width;
467 height = emulated_metadata.height;
468 for (i = 0; i < emulated_metadata.num_planes; i++) {
469 bo->meta.strides[i] = emulated_metadata.strides[i];
470 bo->meta.offsets[i] = emulated_metadata.offsets[i];
471 bo->meta.sizes[i] = emulated_metadata.sizes[i];
472 }
473 bo->meta.total_size = emulated_metadata.total_size;
474 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700475
Kansho Nishidad97877b2019-06-14 18:28:18 +0900476 /*
477 * Setting the target is intended to ensure this resource gets bound as a 2D
478 * texture in the host renderer's GL state. All of these resource properties are
479 * sent unchanged by the kernel to the host, which in turn sends them unchanged to
480 * virglrenderer. When virglrenderer makes a resource, it will convert the target
481 * enum to the equivalent one in GL and then bind the resource to that target.
482 */
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700483
Kansho Nishidad97877b2019-06-14 18:28:18 +0900484 res_create.target = PIPE_TEXTURE_2D;
485 res_create.format = translate_format(format);
David Stevensbef7c872022-11-09 16:38:48 +0900486 res_create.bind = compute_virgl_bind_flags(use_flags);
Kansho Nishidad97877b2019-06-14 18:28:18 +0900487 res_create.width = width;
488 res_create.height = height;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700489
Kansho Nishidad97877b2019-06-14 18:28:18 +0900490 /* For virgl 3D */
491 res_create.depth = 1;
492 res_create.array_size = 1;
493 res_create.last_level = 0;
494 res_create.nr_samples = 0;
495
Gurchetan Singh298b7572019-09-19 09:55:18 -0700496 res_create.size = ALIGN(bo->meta.total_size, PAGE_SIZE); // PAGE_SIZE = 0x1000
Kansho Nishidad97877b2019-06-14 18:28:18 +0900497 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create);
498 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000499 drv_loge("DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s\n", strerror(errno));
Kansho Nishidad97877b2019-06-14 18:28:18 +0900500 return ret;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700501 }
502
Gurchetan Singh298b7572019-09-19 09:55:18 -0700503 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
Kansho Nishidad97877b2019-06-14 18:28:18 +0900504 bo->handles[plane].u32 = res_create.bo_handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700505
506 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700507}
508
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800509static void *virgl_3d_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700510{
511 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700512 struct drm_virtgpu_map gem_map = { 0 };
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700513
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700514 gem_map.handle = bo->handles[0].u32;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700515 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_MAP, &gem_map);
516 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000517 drv_loge("DRM_IOCTL_VIRTGPU_MAP failed with %s\n", strerror(errno));
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700518 return MAP_FAILED;
519 }
520
Gurchetan Singh298b7572019-09-19 09:55:18 -0700521 vma->length = bo->meta.total_size;
522 return mmap(0, bo->meta.total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700523 gem_map.offset);
524}
525
Jason Macnakd6666c82021-09-29 11:13:25 -0700526static uint32_t virgl_3d_get_max_texture_2d_size(struct driver *drv)
527{
528 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
529
530 if (priv->caps.v2.max_texture_2d_size)
531 return priv->caps.v2.max_texture_2d_size;
532
533 return UINT32_MAX;
534}
535
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800536static int virgl_get_caps(struct driver *drv, union virgl_caps *caps, int *caps_is_v2)
Jason Macnakddf4ec02020-02-03 16:36:46 -0800537{
538 int ret;
Gurchetan Singh99644382020-10-07 15:28:11 -0700539 struct drm_virtgpu_get_caps cap_args = { 0 };
Jason Macnakddf4ec02020-02-03 16:36:46 -0800540
Lepton Wueebce652020-02-26 15:13:34 -0800541 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800542 cap_args.addr = (unsigned long long)caps;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800543 if (params[param_capset_fix].value) {
Lepton Wueebce652020-02-26 15:13:34 -0800544 *caps_is_v2 = 1;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800545 cap_args.cap_set_id = 2;
546 cap_args.size = sizeof(union virgl_caps);
547 } else {
548 cap_args.cap_set_id = 1;
549 cap_args.size = sizeof(struct virgl_caps_v1);
550 }
551
552 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
553 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000554 drv_loge("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Lepton Wueebce652020-02-26 15:13:34 -0800555 *caps_is_v2 = 0;
Jason Macnakddf4ec02020-02-03 16:36:46 -0800556
557 // Fallback to v1
558 cap_args.cap_set_id = 1;
559 cap_args.size = sizeof(struct virgl_caps_v1);
560
561 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_GET_CAPS, &cap_args);
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800562 if (ret)
Yiwei Zhang04954732022-07-13 23:34:33 +0000563 drv_loge("DRM_IOCTL_VIRTGPU_GET_CAPS failed with %s\n", strerror(errno));
Jason Macnakddf4ec02020-02-03 16:36:46 -0800564 }
565
566 return ret;
567}
568
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800569static void virgl_init_params_and_caps(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700570{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800571 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
572 if (params[param_3d].value) {
573 virgl_get_caps(drv, &priv->caps, &priv->caps_is_v2);
Lepton Wu249e8632018-04-05 12:50:03 -0700574
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800575 // We use two criteria to determine whether host minigbm is used on the host for
576 // swapchain allocations.
577 //
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800578 // (1) Host minigbm is only available via virglrenderer, and only virglrenderer
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800579 // advertises capabilities.
580 // (2) Only host minigbm doesn't emulate YUV formats. Checking this is a bit of a
581 // proxy, but it works.
Gurchetan Singhbbde01e2021-02-17 08:54:28 -0800582 priv->host_gbm_enabled =
583 priv->caps.max_version > 0 &&
584 virgl_supports_combination_natively(drv, DRM_FORMAT_NV12, BO_USE_TEXTURE);
Lepton Wu249e8632018-04-05 12:50:03 -0700585 }
Jason Macnak1de7f662020-01-24 15:05:57 -0800586}
587
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800588static int virgl_init(struct driver *drv)
Jason Macnak1de7f662020-01-24 15:05:57 -0800589{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800590 struct virgl_priv *priv;
Jason Macnak1de7f662020-01-24 15:05:57 -0800591
592 priv = calloc(1, sizeof(*priv));
Yiwei Zhangafdf87d2021-09-28 04:06:06 +0000593 if (!priv)
594 return -ENOMEM;
595
Jason Macnak1de7f662020-01-24 15:05:57 -0800596 drv->priv = priv;
597
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800598 virgl_init_params_and_caps(drv);
Jason Macnak1de7f662020-01-24 15:05:57 -0800599
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800600 if (params[param_3d].value) {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700601 /* This doesn't mean host can scanout everything, it just means host
602 * hypervisor can show it. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800603 virgl_add_combinations(drv, render_target_formats,
604 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
605 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
606 virgl_add_combinations(drv, texture_source_formats,
607 ARRAY_SIZE(texture_source_formats), &LINEAR_METADATA,
608 BO_USE_TEXTURE_MASK);
Yiwei Zhang9420ffe2021-09-24 06:24:30 +0000609 /* NV12 with scanout must flow through virgl_add_combination, so that the native
610 * support is checked and scanout use_flag can be conditionally stripped. */
611 virgl_add_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
612 BO_USE_TEXTURE_MASK | BO_USE_CAMERA_READ |
613 BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
614 BO_USE_HW_VIDEO_ENCODER | BO_USE_SCANOUT);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700615 } else {
Dominik Behr6e6dc492019-10-09 15:43:52 -0700616 /* Virtio primary plane only allows this format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800617 virgl_add_combination(drv, DRM_FORMAT_XRGB8888, &LINEAR_METADATA,
618 BO_USE_RENDER_MASK | BO_USE_SCANOUT);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700619 /* Virtio cursor plane only allows this format and Chrome cannot live without
620 * ARGB888 renderable format. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800621 virgl_add_combination(drv, DRM_FORMAT_ARGB8888, &LINEAR_METADATA,
622 BO_USE_RENDER_MASK | BO_USE_CURSOR);
Dominik Behr6e6dc492019-10-09 15:43:52 -0700623 /* Android needs more, but they cannot be bound as scanouts anymore after
624 * "drm/virtio: fix DRM_FORMAT_* handling" */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800625 virgl_add_combinations(drv, render_target_formats,
626 ARRAY_SIZE(render_target_formats), &LINEAR_METADATA,
627 BO_USE_RENDER_MASK);
628 virgl_add_combinations(drv, dumb_texture_source_formats,
629 ARRAY_SIZE(dumb_texture_source_formats), &LINEAR_METADATA,
630 BO_USE_TEXTURE_MASK);
Yiwei Zhang9fa17e72021-09-17 22:11:29 +0000631 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
632 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
633 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
Gurchetan Singh3f3e5f92019-07-08 09:50:01 -0700634 }
Lepton Wu249e8632018-04-05 12:50:03 -0700635
Gurchetan Singh71bc6652018-09-17 17:42:05 -0700636 /* Android CTS tests require this. */
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800637 virgl_add_combination(drv, DRM_FORMAT_RGB888, &LINEAR_METADATA, BO_USE_SW_MASK);
638 virgl_add_combination(drv, DRM_FORMAT_BGR888, &LINEAR_METADATA, BO_USE_SW_MASK);
Jason Macnakbc51ff32022-04-11 15:32:58 -0700639 /* Android Camera CTS tests requires this. Additionally, the scanout usage is needed for
640 * Camera preview and is expected to be conditionally stripped by virgl_add_combination
641 * when not natively supported and instead handled by HWComposer. */
642 virgl_add_combination(drv, DRM_FORMAT_P010, &LINEAR_METADATA,
643 BO_USE_SCANOUT | BO_USE_TEXTURE | BO_USE_SW_MASK |
644 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
Jason Macnak98d34a82022-07-19 14:47:18 -0700645 /* Android VTS sensors hal tests require BO_USE_SENSOR_DIRECT_DATA. */
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900646 drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA,
David Staessens04b7e242020-05-28 15:47:15 +0900647 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE | BO_USE_HW_VIDEO_DECODER |
Jason Macnak98d34a82022-07-19 14:47:18 -0700648 BO_USE_HW_VIDEO_ENCODER | BO_USE_SENSOR_DIRECT_DATA |
649 BO_USE_GPU_DATA_BUFFER);
David Stevens519978f2020-12-11 14:09:56 +0900650
651 if (!priv->host_gbm_enabled) {
652 drv_modify_combination(drv, DRM_FORMAT_ABGR8888, &LINEAR_METADATA,
653 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
654 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
655 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &LINEAR_METADATA,
656 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
657 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
658 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA,
659 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
660 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
661 drv_modify_combination(drv, DRM_FORMAT_R16, &LINEAR_METADATA,
662 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
663 BO_USE_HW_VIDEO_DECODER);
664 drv_modify_combination(drv, DRM_FORMAT_YVU420, &LINEAR_METADATA,
665 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
666 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
667 drv_modify_combination(drv, DRM_FORMAT_YVU420_ANDROID, &LINEAR_METADATA,
668 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE |
669 BO_USE_HW_VIDEO_DECODER | BO_USE_HW_VIDEO_ENCODER);
670 }
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900671
Lepton Wu249e8632018-04-05 12:50:03 -0700672 return drv_modify_linear_combinations(drv);
673}
674
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800675static void virgl_close(struct driver *drv)
Lepton Wu249e8632018-04-05 12:50:03 -0700676{
677 free(drv->priv);
678 drv->priv = NULL;
679}
680
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800681static int virgl_bo_create_blob(struct driver *drv, struct bo *bo)
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700682{
683 int ret;
684 uint32_t stride;
David Stevens0fe561f2020-10-28 16:06:38 +0900685 uint32_t cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700686 uint32_t cmd[VIRGL_PIPE_RES_CREATE_SIZE + 1] = { 0 };
687 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800688 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700689
David Stevensd3f07bd2020-09-25 18:52:26 +0900690 uint32_t blob_flags = VIRTGPU_BLOB_FLAG_USE_SHAREABLE;
Yiwei Zhang8f16db92022-09-14 20:52:26 +0000691 if (bo->meta.use_flags & (BO_USE_SW_MASK | BO_USE_GPU_DATA_BUFFER))
David Stevensd3f07bd2020-09-25 18:52:26 +0900692 blob_flags |= VIRTGPU_BLOB_FLAG_USE_MAPPABLE;
David Stevens1b252e22021-08-03 16:48:17 +0900693
694 // For now, all blob use cases are cross device. When we add wider
695 // support for blobs, we can revisit making this unconditional.
696 blob_flags |= VIRTGPU_BLOB_FLAG_USE_CROSS_DEVICE;
David Stevensb42624c2020-09-10 10:50:26 +0900697
David Stevens0fe561f2020-10-28 16:06:38 +0900698 cur_blob_id = atomic_fetch_add(&priv->next_blob_id, 1);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700699 stride = drv_stride_from_format(bo->meta.format, bo->meta.width, 0);
700 drv_bo_from_format(bo, stride, bo->meta.height, bo->meta.format);
701 bo->meta.total_size = ALIGN(bo->meta.total_size, PAGE_SIZE);
David Stevensb42624c2020-09-10 10:50:26 +0900702 bo->meta.tiling = blob_flags;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700703
704 cmd[0] = VIRGL_CMD0(VIRGL_CCMD_PIPE_RESOURCE_CREATE, 0, VIRGL_PIPE_RES_CREATE_SIZE);
705 cmd[VIRGL_PIPE_RES_CREATE_TARGET] = PIPE_TEXTURE_2D;
706 cmd[VIRGL_PIPE_RES_CREATE_WIDTH] = bo->meta.width;
707 cmd[VIRGL_PIPE_RES_CREATE_HEIGHT] = bo->meta.height;
708 cmd[VIRGL_PIPE_RES_CREATE_FORMAT] = translate_format(bo->meta.format);
David Stevensbef7c872022-11-09 16:38:48 +0900709 cmd[VIRGL_PIPE_RES_CREATE_BIND] = compute_virgl_bind_flags(bo->meta.use_flags);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700710 cmd[VIRGL_PIPE_RES_CREATE_DEPTH] = 1;
David Stevens0fe561f2020-10-28 16:06:38 +0900711 cmd[VIRGL_PIPE_RES_CREATE_BLOB_ID] = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700712
713 drm_rc_blob.cmd = (uint64_t)&cmd;
714 drm_rc_blob.cmd_size = 4 * (VIRGL_PIPE_RES_CREATE_SIZE + 1);
715 drm_rc_blob.size = bo->meta.total_size;
716 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST3D;
David Stevensb42624c2020-09-10 10:50:26 +0900717 drm_rc_blob.blob_flags = blob_flags;
David Stevens0fe561f2020-10-28 16:06:38 +0900718 drm_rc_blob.blob_id = cur_blob_id;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700719
720 ret = drmIoctl(drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
721 if (ret < 0) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000722 drv_loge("DRM_VIRTGPU_RESOURCE_CREATE_BLOB failed with %s\n", strerror(errno));
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700723 return -errno;
724 }
725
726 for (uint32_t plane = 0; plane < bo->meta.num_planes; plane++)
727 bo->handles[plane].u32 = drm_rc_blob.bo_handle;
728
729 return 0;
730}
731
732static bool should_use_blob(struct driver *drv, uint32_t format, uint64_t use_flags)
733{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800734 struct virgl_priv *priv = (struct virgl_priv *)drv->priv;
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700735
736 // TODO(gurchetansingh): remove once all minigbm users are blob-safe
737#ifndef VIRTIO_GPU_NEXT
738 return false;
739#endif
740
741 // Only use blob when host gbm is available
742 if (!priv->host_gbm_enabled)
743 return false;
744
Yiwei Zhangbb9d4af2021-06-20 19:23:38 +0000745 // Use regular resources if only the GPU needs efficient access. Blob resource is a better
746 // fit for BO_USE_GPU_DATA_BUFFER which is mapped to VIRGL_BIND_LINEAR.
747 if (!(use_flags & (BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN | BO_USE_LINEAR |
748 BO_USE_NON_GPU_HW | BO_USE_GPU_DATA_BUFFER)))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700749 return false;
750
David Stevensd3f07bd2020-09-25 18:52:26 +0900751 switch (format) {
David Stevensd3f07bd2020-09-25 18:52:26 +0900752 case DRM_FORMAT_R8:
753 // Formats with strictly defined strides are supported
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700754 return true;
David Stevensc6df2b22021-08-10 19:02:09 +0900755 case DRM_FORMAT_YVU420_ANDROID:
David Stevensd3f07bd2020-09-25 18:52:26 +0900756 case DRM_FORMAT_NV12:
757 // Knowing buffer metadata at buffer creation isn't yet supported, so buffers
758 // can't be properly mapped into the guest.
759 return (use_flags & BO_USE_SW_MASK) == 0;
760 default:
761 return false;
762 }
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700763}
764
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800765static int virgl_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
766 uint64_t use_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700767{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800768 if (params[param_resource_blob].value && params[param_host_visible].value &&
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700769 should_use_blob(bo->drv, format, use_flags))
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800770 return virgl_bo_create_blob(bo->drv, bo);
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700771
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800772 if (params[param_3d].value)
773 return virgl_3d_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700774 else
Jason Macnakc06cc9c2021-10-06 10:16:19 -0700775 return virgl_2d_dumb_bo_create(bo, width, height, format, use_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700776}
777
Nicholas Bishopdbe7f032022-08-29 13:29:20 -0400778static int virgl_bo_create_with_modifiers(struct bo *bo, uint32_t width, uint32_t height,
779 uint32_t format, const uint64_t *modifiers, uint32_t count)
780{
781 uint64_t use_flags = 0;
782
783 for (uint32_t i = 0; i < count; i++) {
784 if (modifiers[i] == DRM_FORMAT_MOD_LINEAR) {
785 return virgl_bo_create(bo, width, height, format, use_flags);
786 }
787 }
788
789 return -EINVAL;
790}
791
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800792static int virgl_bo_destroy(struct bo *bo)
Lepton Wu249e8632018-04-05 12:50:03 -0700793{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800794 if (params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700795 return drv_gem_bo_destroy(bo);
796 else
797 return drv_dumb_bo_destroy(bo);
798}
799
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800800static void *virgl_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Lepton Wu249e8632018-04-05 12:50:03 -0700801{
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800802 if (params[param_3d].value)
803 return virgl_3d_bo_map(bo, vma, plane, map_flags);
Lepton Wu249e8632018-04-05 12:50:03 -0700804 else
805 return drv_dumb_bo_map(bo, vma, plane, map_flags);
806}
807
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800808static int virgl_bo_invalidate(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700809{
810 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800811 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700812 struct drm_virtgpu_3d_transfer_from_host xfer = { 0 };
813 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800814 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800815 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
David Stevens9fe8c202020-12-21 18:47:55 +0900816 uint64_t host_write_flags;
Lepton Wu249e8632018-04-05 12:50:03 -0700817
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800818 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700819 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700820
David Stevens9fe8c202020-12-21 18:47:55 +0900821 // Invalidate is only necessary if the host writes to the buffer. The encoder and
822 // decoder flags don't differentiate between input and output buffers, but we can
823 // use the format to determine whether this buffer could be encoder/decoder output.
Jason Macnakf08bb292022-06-02 12:42:02 -0700824 host_write_flags = BO_USE_RENDERING | BO_USE_CAMERA_WRITE | BO_USE_GPU_DATA_BUFFER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800825 if (bo->meta.format == DRM_FORMAT_R8)
David Stevens9fe8c202020-12-21 18:47:55 +0900826 host_write_flags |= BO_USE_HW_VIDEO_ENCODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800827 else
David Stevens9fe8c202020-12-21 18:47:55 +0900828 host_write_flags |= BO_USE_HW_VIDEO_DECODER;
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800829
David Stevens9fe8c202020-12-21 18:47:55 +0900830 if ((bo->meta.use_flags & host_write_flags) == 0)
David Stevens4d5358d2019-10-24 14:59:31 +0900831 return 0;
832
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800833 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700834 return 0;
835
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700836 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700837
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700838 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700839 /*
840 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
841 * images
842 */
843 if (bo->meta.num_planes == 1) {
844 xfer.offset =
845 (bo->meta.strides[0] * mapping->rect.y) +
846 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
847 }
848 }
849
David Stevensbaab6c82020-02-26 17:14:43 +0900850 if ((bo->meta.use_flags & BO_USE_RENDERING) == 0) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800851 // Unfortunately, the kernel doesn't actually pass the guest layer_stride
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800852 // and guest stride to the host (compare virgl.h and virtgpu_drm.h).
Jason Macnak1de7f662020-01-24 15:05:57 -0800853 // For gbm based resources, we can work around this by using the level field
854 // to pass the stride to virglrenderer's gbm transfer code. However, we need
855 // to avoid doing this for resources which don't rely on that transfer code,
856 // which is resources with the BO_USE_RENDERING flag set.
David Stevensbaab6c82020-02-26 17:14:43 +0900857 // TODO(b/145993887): Send also stride when the patches are landed
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800858 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800859 xfer.level = bo->meta.strides[0];
David Stevensbaab6c82020-02-26 17:14:43 +0900860 }
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700861
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800862 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800863 xfer_params.xfers_needed = 1;
864 xfer_params.xfer_boxes[0] = mapping->rect;
865 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800866 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
867 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800868
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800869 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800870 }
871
872 for (i = 0; i < xfer_params.xfers_needed; i++) {
873 xfer.box.x = xfer_params.xfer_boxes[i].x;
874 xfer.box.y = xfer_params.xfer_boxes[i].y;
875 xfer.box.w = xfer_params.xfer_boxes[i].width;
876 xfer.box.h = xfer_params.xfer_boxes[i].height;
877 xfer.box.d = 1;
878
879 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST, &xfer);
880 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000881 drv_loge("DRM_IOCTL_VIRTGPU_TRANSFER_FROM_HOST failed with %s\n",
882 strerror(errno));
Jason Macnak1de7f662020-01-24 15:05:57 -0800883 return -errno;
884 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700885 }
886
David Stevens4d5358d2019-10-24 14:59:31 +0900887 // The transfer needs to complete before invalidate returns so that any host changes
888 // are visible and to ensure the host doesn't overwrite subsequent guest changes.
889 // TODO(b/136733358): Support returning fences from transfers
David Stevens4d5358d2019-10-24 14:59:31 +0900890 waitcmd.handle = mapping->vma->handle;
891 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
892 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000893 drv_loge("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
David Stevens4d5358d2019-10-24 14:59:31 +0900894 return -errno;
895 }
896
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700897 return 0;
898}
899
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800900static int virgl_bo_flush(struct bo *bo, struct mapping *mapping)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700901{
902 int ret;
Jason Macnak1de7f662020-01-24 15:05:57 -0800903 size_t i;
Gurchetan Singh99644382020-10-07 15:28:11 -0700904 struct drm_virtgpu_3d_transfer_to_host xfer = { 0 };
905 struct drm_virtgpu_3d_wait waitcmd = { 0 };
Jason Macnak1de7f662020-01-24 15:05:57 -0800906 struct virtio_transfers_params xfer_params;
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800907 struct virgl_priv *priv = (struct virgl_priv *)bo->drv->priv;
Lepton Wu249e8632018-04-05 12:50:03 -0700908
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800909 if (!params[param_3d].value)
Lepton Wu249e8632018-04-05 12:50:03 -0700910 return 0;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700911
912 if (!(mapping->vma->map_flags & BO_MAP_WRITE))
913 return 0;
914
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800915 if (params[param_resource_blob].value && (bo->meta.tiling & VIRTGPU_BLOB_FLAG_USE_MAPPABLE))
Gurchetan Singh0ee06fb2019-09-13 17:49:20 -0700916 return 0;
917
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700918 xfer.bo_handle = mapping->vma->handle;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700919
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700920 if (mapping->rect.x || mapping->rect.y) {
Gurchetan Singh1b57fe22020-05-05 09:18:22 -0700921 /*
922 * virglrenderer uses the box parameters and assumes that offset == 0 for planar
923 * images
924 */
925 if (bo->meta.num_planes == 1) {
926 xfer.offset =
927 (bo->meta.strides[0] * mapping->rect.y) +
928 drv_bytes_per_pixel_from_format(bo->meta.format, 0) * mapping->rect.x;
929 }
930 }
931
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700932 // Unfortunately, the kernel doesn't actually pass the guest layer_stride and
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800933 // guest stride to the host (compare virgl.h and virtgpu_drm.h). We can use
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700934 // the level to work around this.
Gurchetan Singhcadc54f2021-02-01 12:03:11 -0800935 if (priv->host_gbm_enabled)
Jason Macnak1de7f662020-01-24 15:05:57 -0800936 xfer.level = bo->meta.strides[0];
Gurchetan Singh05e67cc2019-06-28 17:21:40 -0700937
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800938 if (virgl_supports_combination_natively(bo->drv, bo->meta.format, bo->meta.use_flags)) {
Jason Macnak1de7f662020-01-24 15:05:57 -0800939 xfer_params.xfers_needed = 1;
940 xfer_params.xfer_boxes[0] = mapping->rect;
941 } else {
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800942 assert(virgl_supports_combination_through_emulation(bo->drv, bo->meta.format,
943 bo->meta.use_flags));
Jason Macnak1de7f662020-01-24 15:05:57 -0800944
Gurchetan Singh73c141e2021-01-21 14:51:19 -0800945 virgl_get_emulated_transfers_params(bo, &mapping->rect, &xfer_params);
Jason Macnak1de7f662020-01-24 15:05:57 -0800946 }
947
948 for (i = 0; i < xfer_params.xfers_needed; i++) {
949 xfer.box.x = xfer_params.xfer_boxes[i].x;
950 xfer.box.y = xfer_params.xfer_boxes[i].y;
951 xfer.box.w = xfer_params.xfer_boxes[i].width;
952 xfer.box.h = xfer_params.xfer_boxes[i].height;
953 xfer.box.d = 1;
954
955 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST, &xfer);
956 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000957 drv_loge("DRM_IOCTL_VIRTGPU_TRANSFER_TO_HOST failed with %s\n",
958 strerror(errno));
Jason Macnak1de7f662020-01-24 15:05:57 -0800959 return -errno;
960 }
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700961 }
962
David Stevensbaab6c82020-02-26 17:14:43 +0900963 // If the buffer is only accessed by the host GPU, then the flush is ordered
964 // with subsequent commands. However, if other host hardware can access the
965 // buffer, we need to wait for the transfer to complete for consistency.
966 // TODO(b/136733358): Support returning fences from transfers
967 if (bo->meta.use_flags & BO_USE_NON_GPU_HW) {
David Stevensbaab6c82020-02-26 17:14:43 +0900968 waitcmd.handle = mapping->vma->handle;
969
970 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_WAIT, &waitcmd);
971 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +0000972 drv_loge("DRM_IOCTL_VIRTGPU_WAIT failed with %s\n", strerror(errno));
David Stevensbaab6c82020-02-26 17:14:43 +0900973 return -errno;
974 }
975 }
976
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700977 return 0;
978}
979
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +0000980static void virgl_3d_resolve_format_and_use_flags(struct driver *drv, uint32_t format,
981 uint64_t use_flags, uint32_t *out_format,
982 uint64_t *out_use_flags)
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700983{
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +0000984 *out_format = format;
985 *out_use_flags = use_flags;
Zach Reizner85c4c5f2017-10-04 13:15:57 -0700986 switch (format) {
987 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
Keiichi Watanabea13dda72018-08-02 22:45:05 +0900988 /* Camera subsystem requires NV12. */
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +0000989 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) {
990 *out_format = DRM_FORMAT_NV12;
991 } else {
992 /* HACK: See b/28671744 */
993 *out_format = DRM_FORMAT_XBGR8888;
Yiwei Zhang3a171db2021-10-01 22:12:05 +0000994 *out_use_flags &= ~BO_USE_HW_VIDEO_ENCODER;
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +0000995 }
996 break;
Lepton Wu249e8632018-04-05 12:50:03 -0700997 case DRM_FORMAT_FLEX_YCbCr_420_888:
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +0000998 /* All of our host drivers prefer NV12 as their flexible media format.
999 * If that changes, this will need to be modified. */
1000 *out_format = DRM_FORMAT_NV12;
1001 /* fallthrough */
1002 case DRM_FORMAT_NV12:
1003 case DRM_FORMAT_ABGR8888:
1004 case DRM_FORMAT_ARGB8888:
1005 case DRM_FORMAT_RGB565:
1006 case DRM_FORMAT_XBGR8888:
1007 case DRM_FORMAT_XRGB8888:
1008 /* These are the scanout capable formats to the guest. Strip scanout use_flag if the
1009 * host does not natively support scanout on the requested format. */
1010 if ((use_flags & BO_USE_SCANOUT) &&
1011 !virgl_supports_combination_natively(drv, format, BO_USE_SCANOUT))
1012 *out_use_flags &= ~BO_USE_SCANOUT;
1013 break;
1014 case DRM_FORMAT_YVU420_ANDROID:
1015 *out_use_flags &= ~BO_USE_SCANOUT;
1016 /* HACK: See b/172389166. Also see gbm_bo_create. */
1017 *out_use_flags |= BO_USE_LINEAR;
1018 break;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001019 default:
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001020 break;
Zach Reizner85c4c5f2017-10-04 13:15:57 -07001021 }
1022}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001023
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001024static void virgl_2d_resolve_format_and_use_flags(uint32_t format, uint64_t use_flags,
1025 uint32_t *out_format, uint64_t *out_use_flags)
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001026{
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001027 *out_format = format;
1028 *out_use_flags = use_flags;
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001029
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001030 /* HACK: See crrev/c/1849773 */
1031 if (format != DRM_FORMAT_XRGB8888)
1032 *out_use_flags &= ~BO_USE_SCANOUT;
1033
1034 switch (format) {
1035 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
1036 /* Camera subsystem requires NV12. */
1037 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE)) {
1038 *out_format = DRM_FORMAT_NV12;
1039 } else {
1040 /* HACK: See b/28671744 */
1041 *out_format = DRM_FORMAT_XBGR8888;
Yiwei Zhang3a171db2021-10-01 22:12:05 +00001042 *out_use_flags &= ~BO_USE_HW_VIDEO_ENCODER;
Yiwei Zhang9420ffe2021-09-24 06:24:30 +00001043 }
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001044 break;
1045 case DRM_FORMAT_FLEX_YCbCr_420_888:
1046 *out_format = DRM_FORMAT_YVU420_ANDROID;
1047 /* fallthrough */
1048 case DRM_FORMAT_YVU420_ANDROID:
1049 *out_use_flags &= ~BO_USE_SCANOUT;
1050 /* HACK: See b/172389166. Also see gbm_bo_create. */
1051 *out_use_flags |= BO_USE_LINEAR;
1052 break;
1053 default:
1054 break;
Yiwei Zhang9420ffe2021-09-24 06:24:30 +00001055 }
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001056}
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001057
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001058static void virgl_resolve_format_and_use_flags(struct driver *drv, uint32_t format,
1059 uint64_t use_flags, uint32_t *out_format,
1060 uint64_t *out_use_flags)
1061{
1062 if (params[param_3d].value) {
1063 return virgl_3d_resolve_format_and_use_flags(drv, format, use_flags, out_format,
1064 out_use_flags);
1065 } else {
1066 return virgl_2d_resolve_format_and_use_flags(format, use_flags, out_format,
1067 out_use_flags);
1068 }
Yiwei Zhangc1413ea2021-09-17 08:20:21 +00001069}
1070
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001071static int virgl_resource_info(struct bo *bo, uint32_t strides[DRV_MAX_PLANES],
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001072 uint32_t offsets[DRV_MAX_PLANES], uint64_t *format_modifier)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001073{
1074 int ret;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001075 struct drm_virtgpu_resource_info_cros res_info = { 0 };
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001076
Gurchetan Singh73c141e2021-01-21 14:51:19 -08001077 if (!params[param_3d].value)
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001078 return 0;
1079
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001080 res_info.bo_handle = bo->handles[0].u32;
Chia-I Wu50855622021-01-12 12:38:09 -08001081 res_info.type = VIRTGPU_RESOURCE_INFO_TYPE_EXTENDED;
Chia-I Wu2e41f632021-01-11 11:08:21 -08001082 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO_CROS, &res_info);
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001083 if (ret) {
Yiwei Zhang04954732022-07-13 23:34:33 +00001084 drv_loge("DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed with %s\n", strerror(errno));
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001085 return ret;
1086 }
1087
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001088 for (uint32_t plane = 0; plane < DRV_MAX_PLANES; plane++) {
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001089 /*
1090 * Currently, kernel v4.14 (Betty) doesn't have the extended resource info
1091 * ioctl.
1092 */
Yiwei Zhangf58616e2021-08-26 05:54:15 +00001093 if (!res_info.strides[plane])
1094 break;
1095
1096 strides[plane] = res_info.strides[plane];
1097 offsets[plane] = res_info.offsets[plane];
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001098 }
Yiwei Zhanga1e93fd2021-04-30 07:01:55 +00001099 *format_modifier = res_info.format_modifier;
Gurchetan Singhbc4f0232019-06-27 20:05:54 -07001100
1101 return 0;
1102}
1103
Jason Macnakd6666c82021-09-29 11:13:25 -07001104static uint32_t virgl_get_max_texture_2d_size(struct driver *drv)
1105{
1106 if (params[param_3d].value)
1107 return virgl_3d_get_max_texture_2d_size(drv);
1108 else
Jason Macnakc06cc9c2021-10-06 10:16:19 -07001109 return VIRGL_2D_MAX_TEXTURE_2D_SIZE;
Jason Macnakd6666c82021-09-29 11:13:25 -07001110}
1111
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001112const struct backend virtgpu_virgl = { .name = "virtgpu_virgl",
1113 .init = virgl_init,
1114 .close = virgl_close,
1115 .bo_create = virgl_bo_create,
Nicholas Bishopdbe7f032022-08-29 13:29:20 -04001116 .bo_create_with_modifiers = virgl_bo_create_with_modifiers,
Gurchetan Singhbbde01e2021-02-17 08:54:28 -08001117 .bo_destroy = virgl_bo_destroy,
1118 .bo_import = drv_prime_bo_import,
1119 .bo_map = virgl_bo_map,
1120 .bo_unmap = drv_bo_munmap,
1121 .bo_invalidate = virgl_bo_invalidate,
1122 .bo_flush = virgl_bo_flush,
Yiwei Zhangb8ad7b82021-10-01 17:55:14 +00001123 .resolve_format_and_use_flags =
1124 virgl_resolve_format_and_use_flags,
Jason Macnakd6666c82021-09-29 11:13:25 -07001125 .resource_info = virgl_resource_info,
1126 .get_max_texture_2d_size = virgl_get_max_texture_2d_size };