blob: 92ca983a2f7694da946621d01208c76b869a6e9d [file] [log] [blame]
Chia-I Wuf1e2e992014-12-27 14:12:52 +08001// XGL tests
2//
3// Copyright (C) 2014 LunarG, Inc.
4//
5// Permission is hereby granted, free of charge, to any person obtaining a
6// copy of this software and associated documentation files (the "Software"),
7// to deal in the Software without restriction, including without limitation
8// the rights to use, copy, modify, merge, publish, distribute, sublicense,
9// and/or sell copies of the Software, and to permit persons to whom the
10// Software is furnished to do so, subject to the following conditions:
11//
12// The above copyright notice and this permission notice shall be included
13// in all copies or substantial portions of the Software.
14//
15// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21// DEALINGS IN THE SOFTWARE.
22
23#include <iostream>
24#include <string.h> // memset(), memcmp()
25#include "xgltestbinding.h"
26
27namespace {
28
29#define DERIVED_OBJECT_INIT(create_func, ...) \
30 do { \
31 obj_type obj; \
32 if (EXPECT(create_func(__VA_ARGS__, &obj) == XGL_SUCCESS)) \
33 base_type::init(obj); \
34 } while (0)
35
36#define STRINGIFY(x) #x
37#define EXPECT(expr) ((expr) ? true : expect_failure(STRINGIFY(expr), __FILE__, __LINE__, __FUNCTION__))
38
39xgl_testing::ErrorCallback error_callback;
40
41bool expect_failure(const char *expr, const char *file, unsigned int line, const char *function)
42{
43 if (error_callback) {
44 error_callback(expr, file, line, function);
45 } else {
46 std::cerr << file << ":" << line << ": " << function <<
47 ": Expectation `" << expr << "' failed.\n";
48 }
49
50 return false;
51}
52
53template<class T, class S>
54std::vector<T> make_objects(const std::vector<S> &v)
55{
56 std::vector<T> objs;
57 objs.reserve(v.size());
58 for (typename std::vector<S>::const_iterator it = v.begin(); it != v.end(); it++)
59 objs.push_back((*it)->obj());
60 return objs;
61}
62
63template<typename T>
64std::vector<T> get_info(XGL_PHYSICAL_GPU gpu, XGL_PHYSICAL_GPU_INFO_TYPE type, size_t min_elems)
65{
66 std::vector<T> info;
Jon Ashburnc0521e72015-02-11 09:36:41 -070067 size_t size;
Chia-I Wuf1e2e992014-12-27 14:12:52 +080068 if (EXPECT(xglGetGpuInfo(gpu, type, &size, NULL) == XGL_SUCCESS && size % sizeof(T) == 0)) {
69 info.resize(size / sizeof(T));
70 if (!EXPECT(xglGetGpuInfo(gpu, type, &size, &info[0]) == XGL_SUCCESS && size == info.size() * sizeof(T)))
71 info.clear();
72 }
73
74 if (info.size() < min_elems)
75 info.resize(min_elems);
76
77 return info;
78}
79
80template<typename T>
81std::vector<T> get_info(XGL_BASE_OBJECT obj, XGL_OBJECT_INFO_TYPE type, size_t min_elems)
82{
83 std::vector<T> info;
Jon Ashburnc0521e72015-02-11 09:36:41 -070084 size_t size;
Chia-I Wuf1e2e992014-12-27 14:12:52 +080085 if (EXPECT(xglGetObjectInfo(obj, type, &size, NULL) == XGL_SUCCESS && size % sizeof(T) == 0)) {
86 info.resize(size / sizeof(T));
87 if (!EXPECT(xglGetObjectInfo(obj, type, &size, &info[0]) == XGL_SUCCESS && size == info.size() * sizeof(T)))
88 info.clear();
89 }
90
91 if (info.size() < min_elems)
92 info.resize(min_elems);
93
94 return info;
95}
96
97} // namespace
98
99namespace xgl_testing {
100
101void set_error_callback(ErrorCallback callback)
102{
103 error_callback = callback;
104}
105
106XGL_PHYSICAL_GPU_PROPERTIES PhysicalGpu::properties() const
107{
108 return get_info<XGL_PHYSICAL_GPU_PROPERTIES>(gpu_, XGL_INFO_TYPE_PHYSICAL_GPU_PROPERTIES, 1)[0];
109}
110
111XGL_PHYSICAL_GPU_PERFORMANCE PhysicalGpu::performance() const
112{
113 return get_info<XGL_PHYSICAL_GPU_PERFORMANCE>(gpu_, XGL_INFO_TYPE_PHYSICAL_GPU_PERFORMANCE, 1)[0];
114}
115
116std::vector<XGL_PHYSICAL_GPU_QUEUE_PROPERTIES> PhysicalGpu::queue_properties() const
117{
118 return get_info<XGL_PHYSICAL_GPU_QUEUE_PROPERTIES>(gpu_, XGL_INFO_TYPE_PHYSICAL_GPU_QUEUE_PROPERTIES, 0);
119}
120
121XGL_PHYSICAL_GPU_MEMORY_PROPERTIES PhysicalGpu::memory_properties() const
122{
123 return get_info<XGL_PHYSICAL_GPU_MEMORY_PROPERTIES>(gpu_, XGL_INFO_TYPE_PHYSICAL_GPU_MEMORY_PROPERTIES, 1)[0];
124}
125
126std::vector<const char *> PhysicalGpu::layers(std::vector<char> &buf) const
127{
128 const size_t max_layer_count = 16;
129 const size_t max_string_size = 256;
130
131 buf.resize(max_layer_count * max_string_size);
132
133 std::vector<const char *> layers;
134 layers.reserve(max_layer_count);
135 for (size_t i = 0; i < max_layer_count; i++)
136 layers.push_back(&buf[0] + max_string_size * i);
137
138 char * const *out = const_cast<char * const *>(&layers[0]);
139 size_t count;
Mark Lobodzinski391bb6d2015-01-09 15:12:03 -0600140 if (!EXPECT(xglEnumerateLayers(gpu_, max_layer_count, max_string_size, &count, out, NULL) == XGL_SUCCESS))
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800141 count = 0;
142 layers.resize(count);
143
144 return layers;
145}
146
147std::vector<const char *> PhysicalGpu::extensions() const
148{
149 static const char *known_exts[] = {
150 "XGL_WSI_X11",
151 };
152
153 std::vector<const char *> exts;
154 for (int i = 0; i < sizeof(known_exts) / sizeof(known_exts[0]); i++) {
155 XGL_RESULT err = xglGetExtensionSupport(gpu_, known_exts[i]);
156 if (err == XGL_SUCCESS)
157 exts.push_back(known_exts[i]);
158 }
159
160 return exts;
161}
162
163XGL_GPU_COMPATIBILITY_INFO PhysicalGpu::compatibility(const PhysicalGpu &other) const
164{
165 XGL_GPU_COMPATIBILITY_INFO data;
166 if (!EXPECT(xglGetMultiGpuCompatibility(gpu_, other.gpu_, &data) == XGL_SUCCESS))
167 memset(&data, 0, sizeof(data));
168
169 return data;
170}
171
172void BaseObject::init(XGL_BASE_OBJECT obj, bool own)
173{
174 EXPECT(!initialized());
175 reinit(obj, own);
176}
177
178void BaseObject::reinit(XGL_BASE_OBJECT obj, bool own)
179{
180 obj_ = obj;
181 own_obj_ = own;
182}
183
184uint32_t BaseObject::memory_allocation_count() const
185{
Jon Ashburn7e781952015-01-16 09:37:43 -0700186 return get_info<uint32_t>(obj_, XGL_INFO_TYPE_MEMORY_ALLOCATION_COUNT, 1)[0];
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800187}
188
189std::vector<XGL_MEMORY_REQUIREMENTS> BaseObject::memory_requirements() const
190{
Jon Ashburn7e781952015-01-16 09:37:43 -0700191 XGL_RESULT err;
Mark Lobodzinski17caf572015-01-29 08:55:56 -0600192 uint32_t num_allocations = 0;
193 size_t num_alloc_size = sizeof(num_allocations);
Jon Ashburn7e781952015-01-16 09:37:43 -0700194 err = xglGetObjectInfo(obj_, XGL_INFO_TYPE_MEMORY_ALLOCATION_COUNT,
195 &num_alloc_size, &num_allocations);
196 EXPECT(err == XGL_SUCCESS && num_alloc_size == sizeof(num_allocations));
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800197 std::vector<XGL_MEMORY_REQUIREMENTS> info =
198 get_info<XGL_MEMORY_REQUIREMENTS>(obj_, XGL_INFO_TYPE_MEMORY_REQUIREMENTS, 0);
Jon Ashburn7e781952015-01-16 09:37:43 -0700199 EXPECT(info.size() == num_allocations);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800200 if (info.size() == 1 && !info[0].size)
201 info.clear();
202
203 return info;
204}
205
206void Object::init(XGL_OBJECT obj, bool own)
207{
208 BaseObject::init(obj, own);
209 mem_alloc_count_ = memory_allocation_count();
210}
211
212void Object::reinit(XGL_OBJECT obj, bool own)
213{
214 cleanup();
215 BaseObject::reinit(obj, own);
216 mem_alloc_count_ = memory_allocation_count();
217}
218
219void Object::cleanup()
220{
221 if (!initialized())
222 return;
223
224 unbind_memory();
225
226 if (internal_mems_) {
227 delete[] internal_mems_;
228 internal_mems_ = NULL;
229 primary_mem_ = NULL;
230 }
231
232 mem_alloc_count_ = 0;
233
234 if (own())
235 EXPECT(xglDestroyObject(obj()) == XGL_SUCCESS);
236}
237
238void Object::bind_memory(uint32_t alloc_idx, const GpuMemory &mem, XGL_GPU_SIZE mem_offset)
239{
Jon Ashburn7e781952015-01-16 09:37:43 -0700240 EXPECT(xglBindObjectMemory(obj(), alloc_idx, mem.obj(), mem_offset) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800241}
242
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800243void Object::bind_memory(uint32_t alloc_idx, XGL_GPU_SIZE offset, XGL_GPU_SIZE size,
244 const GpuMemory &mem, XGL_GPU_SIZE mem_offset)
245{
Jon Ashburn9b6eae52015-01-15 10:39:19 -0700246 EXPECT(!alloc_idx && xglBindObjectMemoryRange(obj(), 0, offset, size, mem.obj(), mem_offset) == XGL_SUCCESS);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800247}
248
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800249void Object::unbind_memory(uint32_t alloc_idx)
250{
Jon Ashburn7e781952015-01-16 09:37:43 -0700251 EXPECT(xglBindObjectMemory(obj(), alloc_idx, XGL_NULL_HANDLE, 0) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800252}
253
254void Object::unbind_memory()
255{
256 for (uint32_t i = 0; i < mem_alloc_count_; i++)
257 unbind_memory(i);
258}
259
Jon Ashburn25bbe462015-01-20 08:50:12 -0700260void Object::alloc_memory(const Device &dev, bool for_buf, bool for_img)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800261{
262 if (!EXPECT(!internal_mems_) || !mem_alloc_count_)
263 return;
264
265 internal_mems_ = new GpuMemory[mem_alloc_count_];
266
267 const std::vector<XGL_MEMORY_REQUIREMENTS> mem_reqs = memory_requirements();
Jon Ashburn8a5da752015-01-19 15:00:26 -0700268 std::vector<XGL_IMAGE_MEMORY_REQUIREMENTS> img_reqs;
Jon Ashburn25bbe462015-01-20 08:50:12 -0700269 std::vector<XGL_BUFFER_MEMORY_REQUIREMENTS> buf_reqs;
270 XGL_MEMORY_ALLOC_IMAGE_INFO img_info;
271 XGL_MEMORY_ALLOC_BUFFER_INFO buf_info;
272 XGL_MEMORY_ALLOC_INFO info, *next_info = NULL;
Jon Ashburn8a5da752015-01-19 15:00:26 -0700273
Jon Ashburn25bbe462015-01-20 08:50:12 -0700274 if (for_img) {
Jon Ashburn8a5da752015-01-19 15:00:26 -0700275 img_reqs = get_info<XGL_IMAGE_MEMORY_REQUIREMENTS>(obj(),
276 XGL_INFO_TYPE_IMAGE_MEMORY_REQUIREMENTS, 0);
277 EXPECT(img_reqs.size() == 1);
Jon Ashburn25bbe462015-01-20 08:50:12 -0700278 next_info = (XGL_MEMORY_ALLOC_INFO *) &img_info;
Jon Ashburn8a5da752015-01-19 15:00:26 -0700279 img_info.pNext = NULL;
280 img_info.sType = XGL_STRUCTURE_TYPE_MEMORY_ALLOC_IMAGE_INFO;
281 img_info.usage = img_reqs[0].usage;
282 img_info.formatClass = img_reqs[0].formatClass;
283 img_info.samples = img_reqs[0].samples;
284 }
285
286
Jon Ashburn25bbe462015-01-20 08:50:12 -0700287 if (for_buf) {
288 buf_reqs = get_info<XGL_BUFFER_MEMORY_REQUIREMENTS>(obj(),
289 XGL_INFO_TYPE_BUFFER_MEMORY_REQUIREMENTS, 0);
290 if (for_img)
291 img_info.pNext = &buf_info;
292 else
293 next_info = (XGL_MEMORY_ALLOC_INFO *) &buf_info;
294 buf_info.pNext = NULL;
295 buf_info.sType = XGL_STRUCTURE_TYPE_MEMORY_ALLOC_BUFFER_INFO;
296 buf_info.usage = buf_reqs[0].usage;
297 }
298
299
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800300 for (int i = 0; i < mem_reqs.size(); i++) {
Jon Ashburn25bbe462015-01-20 08:50:12 -0700301 info = GpuMemory::alloc_info(mem_reqs[i], next_info);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800302
Jon Ashburn698ee782015-01-22 11:40:11 -0700303 switch (info.memType) {
304 case XGL_MEMORY_TYPE_BUFFER:
305 EXPECT(for_buf);
306 info.memProps |= XGL_MEMORY_PROPERTY_CPU_VISIBLE_BIT;
307 primary_mem_ = &internal_mems_[i];
308 break;
309 case XGL_MEMORY_TYPE_IMAGE:
310 EXPECT(for_img);
311 primary_mem_ = &internal_mems_[i];
312 break;
313 default:
314 break;
315 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800316
317 internal_mems_[i].init(dev, info);
318 bind_memory(i, internal_mems_[i], 0);
319 }
320}
321
322void Object::alloc_memory(const std::vector<XGL_GPU_MEMORY> &mems)
323{
324 if (!EXPECT(!internal_mems_) || !mem_alloc_count_)
325 return;
326
327 internal_mems_ = new GpuMemory[mem_alloc_count_];
328
329 const std::vector<XGL_MEMORY_REQUIREMENTS> mem_reqs = memory_requirements();
330 if (!EXPECT(mem_reqs.size() == mems.size()))
331 return;
332
333 for (int i = 0; i < mem_reqs.size(); i++) {
334 primary_mem_ = &internal_mems_[i];
335
336 internal_mems_[i].init(mems[i]);
337 bind_memory(i, internal_mems_[i], 0);
338 }
339}
340
341std::vector<XGL_GPU_MEMORY> Object::memories() const
342{
343 std::vector<XGL_GPU_MEMORY> mems;
344 if (internal_mems_) {
345 mems.reserve(mem_alloc_count_);
346 for (uint32_t i = 0; i < mem_alloc_count_; i++)
347 mems.push_back(internal_mems_[i].obj());
348 }
349
350 return mems;
351}
352
353Device::~Device()
354{
355 if (!initialized())
356 return;
357
358 for (int i = 0; i < QUEUE_COUNT; i++) {
359 for (std::vector<Queue *>::iterator it = queues_[i].begin(); it != queues_[i].end(); it++)
360 delete *it;
361 queues_[i].clear();
362 }
363
364 EXPECT(xglDestroyDevice(obj()) == XGL_SUCCESS);
365}
366
Chia-I Wu510c9992015-01-06 10:40:45 +0800367void Device::init(bool enable_layers)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800368{
369 // request all queues
370 const std::vector<XGL_PHYSICAL_GPU_QUEUE_PROPERTIES> queue_props = gpu_.queue_properties();
371 std::vector<XGL_DEVICE_QUEUE_CREATE_INFO> queue_info;
372 queue_info.reserve(queue_props.size());
373 for (int i = 0; i < queue_props.size(); i++) {
374 XGL_DEVICE_QUEUE_CREATE_INFO qi = {};
375 qi.queueNodeIndex = i;
376 qi.queueCount = queue_props[i].queueCount;
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700377 if (queue_props[i].queueFlags & XGL_QUEUE_GRAPHICS_BIT) {
378 graphics_queue_node_index_ = i;
379 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800380 queue_info.push_back(qi);
381 }
382
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800383 XGL_LAYER_CREATE_INFO layer_info = {};
384 layer_info.sType = XGL_STRUCTURE_TYPE_LAYER_CREATE_INFO;
Chia-I Wu510c9992015-01-06 10:40:45 +0800385
386 std::vector<const char *> layers;
387 std::vector<char> layer_buf;
388 // request all layers
389 if (enable_layers) {
390 layers = gpu_.layers(layer_buf);
391 layer_info.layerCount = layers.size();
392 layer_info.ppActiveLayerNames = &layers[0];
393 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800394
395 const std::vector<const char *> exts = gpu_.extensions();
396
397 XGL_DEVICE_CREATE_INFO dev_info = {};
398 dev_info.sType = XGL_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
Jon Ashburnf7e282a2015-01-22 13:33:15 -0700399 dev_info.pNext = (enable_layers) ? static_cast<void *>(&layer_info) : NULL;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800400 dev_info.queueRecordCount = queue_info.size();
401 dev_info.pRequestedQueues = &queue_info[0];
402 dev_info.extensionCount = exts.size();
403 dev_info.ppEnabledExtensionNames = &exts[0];
404 dev_info.maxValidationLevel = XGL_VALIDATION_LEVEL_END_RANGE;
405 dev_info.flags = XGL_DEVICE_CREATE_VALIDATION_BIT;
406
407 init(dev_info);
408}
409
410void Device::init(const XGL_DEVICE_CREATE_INFO &info)
411{
412 DERIVED_OBJECT_INIT(xglCreateDevice, gpu_.obj(), &info);
413
414 init_queues();
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800415 init_formats();
416}
417
418void Device::init_queues()
419{
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700420 XGL_RESULT err;
421 size_t data_size;
422 uint32_t queue_node_count;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800423
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700424 err = xglGetGpuInfo(gpu_.obj(), XGL_INFO_TYPE_PHYSICAL_GPU_QUEUE_PROPERTIES,
425 &data_size, NULL);
426 EXPECT(err == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800427
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700428 queue_node_count = data_size / sizeof(XGL_PHYSICAL_GPU_QUEUE_PROPERTIES);
429 EXPECT(queue_node_count >= 1);
430
431 XGL_PHYSICAL_GPU_QUEUE_PROPERTIES queue_props[queue_node_count];
432
433 err = xglGetGpuInfo(gpu_.obj(), XGL_INFO_TYPE_PHYSICAL_GPU_QUEUE_PROPERTIES,
434 &data_size, queue_props);
435 EXPECT(err == XGL_SUCCESS);
436
437 for (int i = 0; i < queue_node_count; i++) {
438 XGL_QUEUE queue;
439
440 for (int j = 0; j < queue_props[i].queueCount; j++) {
441 err = xglGetDeviceQueue(obj(), i, j, &queue);
442 EXPECT(err == XGL_SUCCESS);
443
444 if (queue_props[i].queueFlags & XGL_QUEUE_GRAPHICS_BIT) {
445 queues_[GRAPHICS].push_back(new Queue(queue));
446 }
447
448 if (queue_props[i].queueFlags & XGL_QUEUE_COMPUTE_BIT) {
449 queues_[COMPUTE].push_back(new Queue(queue));
450 }
451
452 if (queue_props[i].queueFlags & XGL_QUEUE_DMA_BIT) {
453 queues_[DMA].push_back(new Queue(queue));
454 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800455 }
456 }
457
458 EXPECT(!queues_[GRAPHICS].empty() || !queues_[COMPUTE].empty());
459}
460
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800461void Device::init_formats()
462{
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700463 for (int f = XGL_FMT_BEGIN_RANGE; f <= XGL_FMT_END_RANGE; f++) {
464 const XGL_FORMAT fmt = static_cast<XGL_FORMAT>(f);
465 const XGL_FORMAT_PROPERTIES props = format_properties(fmt);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800466
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700467 if (props.linearTilingFeatures) {
468 const Format tmp = { fmt, XGL_LINEAR_TILING, props.linearTilingFeatures };
469 formats_.push_back(tmp);
470 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800471
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700472 if (props.optimalTilingFeatures) {
473 const Format tmp = { fmt, XGL_OPTIMAL_TILING, props.optimalTilingFeatures };
474 formats_.push_back(tmp);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800475 }
476 }
477
478 EXPECT(!formats_.empty());
479}
480
481XGL_FORMAT_PROPERTIES Device::format_properties(XGL_FORMAT format)
482{
483 const XGL_FORMAT_INFO_TYPE type = XGL_INFO_TYPE_FORMAT_PROPERTIES;
484 XGL_FORMAT_PROPERTIES data;
485 size_t size = sizeof(data);
486 if (!EXPECT(xglGetFormatInfo(obj(), format, type, &size, &data) == XGL_SUCCESS && size == sizeof(data)))
487 memset(&data, 0, sizeof(data));
488
489 return data;
490}
491
492void Device::wait()
493{
494 EXPECT(xglDeviceWaitIdle(obj()) == XGL_SUCCESS);
495}
496
497XGL_RESULT Device::wait(const std::vector<const Fence *> &fences, bool wait_all, uint64_t timeout)
498{
499 const std::vector<XGL_FENCE> fence_objs = make_objects<XGL_FENCE>(fences);
500 XGL_RESULT err = xglWaitForFences(obj(), fence_objs.size(), &fence_objs[0], wait_all, timeout);
501 EXPECT(err == XGL_SUCCESS || err == XGL_TIMEOUT);
502
503 return err;
504}
505
Chia-I Wu985ba162015-03-26 13:14:16 +0800506void Device::begin_descriptor_pool_update(XGL_DESCRIPTOR_UPDATE_MODE mode)
Chia-I Wu11078b02015-01-04 16:27:24 +0800507{
Chia-I Wu985ba162015-03-26 13:14:16 +0800508 EXPECT(xglBeginDescriptorPoolUpdate(obj(), mode) == XGL_SUCCESS);
Chia-I Wu11078b02015-01-04 16:27:24 +0800509}
510
Chia-I Wu985ba162015-03-26 13:14:16 +0800511void Device::end_descriptor_pool_update(CmdBuffer &cmd)
Chia-I Wu11078b02015-01-04 16:27:24 +0800512{
Chia-I Wu985ba162015-03-26 13:14:16 +0800513 EXPECT(xglEndDescriptorPoolUpdate(obj(), cmd.obj()) == XGL_SUCCESS);
Chia-I Wu11078b02015-01-04 16:27:24 +0800514}
515
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600516void Queue::submit(const std::vector<const CmdBuffer *> &cmds, Fence &fence)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800517{
518 const std::vector<XGL_CMD_BUFFER> cmd_objs = make_objects<XGL_CMD_BUFFER>(cmds);
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600519 EXPECT(xglQueueSubmit(obj(), cmd_objs.size(), &cmd_objs[0], fence.obj()) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800520}
521
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600522void Queue::submit(const CmdBuffer &cmd, Fence &fence)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800523{
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600524 submit(std::vector<const CmdBuffer*>(1, &cmd), fence);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800525}
526
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600527void Queue::submit(const CmdBuffer &cmd)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800528{
529 Fence fence;
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600530 submit(cmd, fence);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800531}
532
Courtney Goeltzenleuchter97b75232015-04-07 17:13:38 -0600533void Queue::add_mem_references(const std::vector<XGL_GPU_MEMORY> &mem_refs)
534{
535 for (int i = 0; i < mem_refs.size(); i++) {
536 EXPECT(xglQueueAddMemReference(obj(), mem_refs[i]) == XGL_SUCCESS);
537 }
538}
539
540void Queue::remove_mem_references(const std::vector<XGL_GPU_MEMORY> &mem_refs)
541{
542 for (int i = 0; i < mem_refs.size(); i++) {
543 EXPECT(xglQueueRemoveMemReference(obj(), mem_refs[i]) == XGL_SUCCESS);
544 }
545}
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800546
547void Queue::wait()
548{
549 EXPECT(xglQueueWaitIdle(obj()) == XGL_SUCCESS);
550}
551
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600552void Queue::signal_semaphore(Semaphore &sem)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800553{
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600554 EXPECT(xglQueueSignalSemaphore(obj(), sem.obj()) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800555}
556
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600557void Queue::wait_semaphore(Semaphore &sem)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800558{
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600559 EXPECT(xglQueueWaitSemaphore(obj(), sem.obj()) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800560}
561
562GpuMemory::~GpuMemory()
563{
564 if (initialized() && own())
565 EXPECT(xglFreeMemory(obj()) == XGL_SUCCESS);
566}
567
568void GpuMemory::init(const Device &dev, const XGL_MEMORY_ALLOC_INFO &info)
569{
570 DERIVED_OBJECT_INIT(xglAllocMemory, dev.obj(), &info);
571}
572
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800573void GpuMemory::init(const Device &dev, size_t size, const void *data)
574{
575 DERIVED_OBJECT_INIT(xglPinSystemMemory, dev.obj(), data, size);
576}
577
578void GpuMemory::init(const Device &dev, const XGL_MEMORY_OPEN_INFO &info)
579{
580 DERIVED_OBJECT_INIT(xglOpenSharedMemory, dev.obj(), &info);
581}
582
583void GpuMemory::init(const Device &dev, const XGL_PEER_MEMORY_OPEN_INFO &info)
584{
585 DERIVED_OBJECT_INIT(xglOpenPeerMemory, dev.obj(), &info);
586}
587
588void GpuMemory::set_priority(XGL_MEMORY_PRIORITY priority)
589{
590 EXPECT(xglSetMemoryPriority(obj(), priority) == XGL_SUCCESS);
591}
592
593const void *GpuMemory::map(XGL_FLAGS flags) const
594{
595 void *data;
596 if (!EXPECT(xglMapMemory(obj(), flags, &data) == XGL_SUCCESS))
597 data = NULL;
598
599 return data;
600}
601
602void *GpuMemory::map(XGL_FLAGS flags)
603{
604 void *data;
605 if (!EXPECT(xglMapMemory(obj(), flags, &data) == XGL_SUCCESS))
606 data = NULL;
607
608 return data;
609}
610
611void GpuMemory::unmap() const
612{
613 EXPECT(xglUnmapMemory(obj()) == XGL_SUCCESS);
614}
615
616void Fence::init(const Device &dev, const XGL_FENCE_CREATE_INFO &info)
617{
618 DERIVED_OBJECT_INIT(xglCreateFence, dev.obj(), &info);
619 alloc_memory(dev);
620}
621
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600622void Semaphore::init(const Device &dev, const XGL_SEMAPHORE_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800623{
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600624 DERIVED_OBJECT_INIT(xglCreateSemaphore, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800625 alloc_memory(dev);
626}
627
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600628void Semaphore::init(const Device &dev, const XGL_SEMAPHORE_OPEN_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800629{
Courtney Goeltzenleuchterebb95842015-03-25 17:14:29 -0600630 DERIVED_OBJECT_INIT(xglOpenSharedSemaphore, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800631}
632
633void Event::init(const Device &dev, const XGL_EVENT_CREATE_INFO &info)
634{
635 DERIVED_OBJECT_INIT(xglCreateEvent, dev.obj(), &info);
636 alloc_memory(dev);
637}
638
639void Event::set()
640{
641 EXPECT(xglSetEvent(obj()) == XGL_SUCCESS);
642}
643
644void Event::reset()
645{
646 EXPECT(xglResetEvent(obj()) == XGL_SUCCESS);
647}
648
649void QueryPool::init(const Device &dev, const XGL_QUERY_POOL_CREATE_INFO &info)
650{
651 DERIVED_OBJECT_INIT(xglCreateQueryPool, dev.obj(), &info);
652 alloc_memory(dev);
653}
654
655XGL_RESULT QueryPool::results(uint32_t start, uint32_t count, size_t size, void *data)
656{
657 size_t tmp = size;
658 XGL_RESULT err = xglGetQueryPoolResults(obj(), start, count, &tmp, data);
659 if (err == XGL_SUCCESS) {
660 if (!EXPECT(tmp == size))
661 memset(data, 0, size);
662 } else {
663 EXPECT(err == XGL_NOT_READY);
664 }
665
666 return err;
667}
668
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800669void Buffer::init(const Device &dev, const XGL_BUFFER_CREATE_INFO &info)
670{
671 init_no_mem(dev, info);
Jon Ashburn25bbe462015-01-20 08:50:12 -0700672 alloc_memory(dev, true, false);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800673}
674
675void Buffer::init_no_mem(const Device &dev, const XGL_BUFFER_CREATE_INFO &info)
676{
677 DERIVED_OBJECT_INIT(xglCreateBuffer, dev.obj(), &info);
678 create_info_ = info;
679}
680
681void BufferView::init(const Device &dev, const XGL_BUFFER_VIEW_CREATE_INFO &info)
682{
683 DERIVED_OBJECT_INIT(xglCreateBufferView, dev.obj(), &info);
684 alloc_memory(dev);
685}
686
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800687void Image::init(const Device &dev, const XGL_IMAGE_CREATE_INFO &info)
688{
689 init_no_mem(dev, info);
Jon Ashburn8a5da752015-01-19 15:00:26 -0700690 alloc_memory(dev, info.tiling == XGL_LINEAR_TILING, true);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800691}
692
693void Image::init_no_mem(const Device &dev, const XGL_IMAGE_CREATE_INFO &info)
694{
695 DERIVED_OBJECT_INIT(xglCreateImage, dev.obj(), &info);
696 init_info(dev, info);
697}
698
699void Image::init(const Device &dev, const XGL_PEER_IMAGE_OPEN_INFO &info, const XGL_IMAGE_CREATE_INFO &original_info)
700{
701 XGL_IMAGE img;
702 XGL_GPU_MEMORY mem;
703 EXPECT(xglOpenPeerImage(dev.obj(), &info, &img, &mem) == XGL_SUCCESS);
704 Object::init(img);
705
706 init_info(dev, original_info);
707 alloc_memory(std::vector<XGL_GPU_MEMORY>(1, mem));
708}
709
710void Image::init_info(const Device &dev, const XGL_IMAGE_CREATE_INFO &info)
711{
712 create_info_ = info;
713
714 for (std::vector<Device::Format>::const_iterator it = dev.formats().begin(); it != dev.formats().end(); it++) {
715 if (memcmp(&it->format, &create_info_.format, sizeof(it->format)) == 0 && it->tiling == create_info_.tiling) {
716 format_features_ = it->features;
717 break;
718 }
719 }
720}
721
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800722void Image::bind_memory(uint32_t alloc_idx, const XGL_IMAGE_MEMORY_BIND_INFO &info,
723 const GpuMemory &mem, XGL_GPU_SIZE mem_offset)
724{
Jon Ashburn9b6eae52015-01-15 10:39:19 -0700725 EXPECT(!alloc_idx && xglBindImageMemoryRange(obj(), 0, &info, mem.obj(), mem_offset) == XGL_SUCCESS);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800726}
727
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800728XGL_SUBRESOURCE_LAYOUT Image::subresource_layout(const XGL_IMAGE_SUBRESOURCE &subres) const
729{
730 const XGL_SUBRESOURCE_INFO_TYPE type = XGL_INFO_TYPE_SUBRESOURCE_LAYOUT;
731 XGL_SUBRESOURCE_LAYOUT data;
732 size_t size = sizeof(data);
733 if (!EXPECT(xglGetImageSubresourceInfo(obj(), &subres, type, &size, &data) == XGL_SUCCESS && size == sizeof(data)))
734 memset(&data, 0, sizeof(data));
735
736 return data;
737}
738
739bool Image::transparent() const
740{
741 return (create_info_.tiling == XGL_LINEAR_TILING &&
742 create_info_.samples == 1 &&
743 !(create_info_.usage & (XGL_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
744 XGL_IMAGE_USAGE_DEPTH_STENCIL_BIT)));
745}
746
747void ImageView::init(const Device &dev, const XGL_IMAGE_VIEW_CREATE_INFO &info)
748{
749 DERIVED_OBJECT_INIT(xglCreateImageView, dev.obj(), &info);
750 alloc_memory(dev);
751}
752
753void ColorAttachmentView::init(const Device &dev, const XGL_COLOR_ATTACHMENT_VIEW_CREATE_INFO &info)
754{
755 DERIVED_OBJECT_INIT(xglCreateColorAttachmentView, dev.obj(), &info);
756 alloc_memory(dev);
757}
758
759void DepthStencilView::init(const Device &dev, const XGL_DEPTH_STENCIL_VIEW_CREATE_INFO &info)
760{
761 DERIVED_OBJECT_INIT(xglCreateDepthStencilView, dev.obj(), &info);
762 alloc_memory(dev);
763}
764
765void Shader::init(const Device &dev, const XGL_SHADER_CREATE_INFO &info)
766{
767 DERIVED_OBJECT_INIT(xglCreateShader, dev.obj(), &info);
768}
769
770XGL_RESULT Shader::init_try(const Device &dev, const XGL_SHADER_CREATE_INFO &info)
771{
772 XGL_SHADER sh;
773 XGL_RESULT err = xglCreateShader(dev.obj(), &info, &sh);
774 if (err == XGL_SUCCESS)
775 Object::init(sh);
776
777 return err;
778}
779
780void Pipeline::init(const Device &dev, const XGL_GRAPHICS_PIPELINE_CREATE_INFO &info)
781{
782 DERIVED_OBJECT_INIT(xglCreateGraphicsPipeline, dev.obj(), &info);
783 alloc_memory(dev);
784}
785
Courtney Goeltzenleuchter0d40f152015-03-25 15:37:49 -0600786void Pipeline::init(
787 const Device &dev,
788 const XGL_GRAPHICS_PIPELINE_CREATE_INFO &info,
789 const XGL_PIPELINE basePipeline)
790{
791 DERIVED_OBJECT_INIT(xglCreateGraphicsPipelineDerivative, dev.obj(), &info, basePipeline);
792 alloc_memory(dev);
793}
794
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800795void Pipeline::init(const Device &dev, const XGL_COMPUTE_PIPELINE_CREATE_INFO &info)
796{
797 DERIVED_OBJECT_INIT(xglCreateComputePipeline, dev.obj(), &info);
798 alloc_memory(dev);
799}
800
801void Pipeline::init(const Device&dev, size_t size, const void *data)
802{
803 DERIVED_OBJECT_INIT(xglLoadPipeline, dev.obj(), size, data);
804 alloc_memory(dev);
805}
806
Courtney Goeltzenleuchter0d40f152015-03-25 15:37:49 -0600807void Pipeline::init(
808 const Device&dev,
809 size_t size,
810 const void *data,
811 const XGL_PIPELINE basePipeline)
812{
813 DERIVED_OBJECT_INIT(xglLoadPipelineDerivative, dev.obj(), size, data, basePipeline);
814 alloc_memory(dev);
815}
816
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800817size_t Pipeline::store(size_t size, void *data)
818{
819 if (!EXPECT(xglStorePipeline(obj(), &size, data) == XGL_SUCCESS))
820 size = 0;
821
822 return size;
823}
824
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800825void Sampler::init(const Device &dev, const XGL_SAMPLER_CREATE_INFO &info)
826{
827 DERIVED_OBJECT_INIT(xglCreateSampler, dev.obj(), &info);
828 alloc_memory(dev);
829}
830
Chia-I Wu41126e52015-03-26 15:27:55 +0800831void DescriptorSetLayout::init(const Device &dev, const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800832{
Chia-I Wu41126e52015-03-26 15:27:55 +0800833 DERIVED_OBJECT_INIT(xglCreateDescriptorSetLayout, dev.obj(), &info);
Chia-I Wu11078b02015-01-04 16:27:24 +0800834 alloc_memory(dev);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800835}
836
Chia-I Wu41126e52015-03-26 15:27:55 +0800837void DescriptorSetLayoutChain::init(const Device &dev, const std::vector<const DescriptorSetLayout *> &layouts)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800838{
Chia-I Wu41126e52015-03-26 15:27:55 +0800839 const std::vector<XGL_DESCRIPTOR_SET_LAYOUT> layout_objs = make_objects<XGL_DESCRIPTOR_SET_LAYOUT>(layouts);
840
841 DERIVED_OBJECT_INIT(xglCreateDescriptorSetLayoutChain, dev.obj(), layout_objs.size(), &layout_objs[0]);
842 alloc_memory(dev);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800843}
844
Chia-I Wu985ba162015-03-26 13:14:16 +0800845void DescriptorPool::init(const Device &dev, XGL_DESCRIPTOR_POOL_USAGE usage,
Chia-I Wu41126e52015-03-26 15:27:55 +0800846 uint32_t max_sets, const XGL_DESCRIPTOR_POOL_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800847{
Chia-I Wu985ba162015-03-26 13:14:16 +0800848 DERIVED_OBJECT_INIT(xglCreateDescriptorPool, dev.obj(), usage, max_sets, &info);
Chia-I Wu11078b02015-01-04 16:27:24 +0800849 alloc_memory(dev);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800850}
851
Chia-I Wufae40bc2015-03-26 15:23:52 +0800852void DescriptorPool::reset()
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800853{
Chia-I Wufae40bc2015-03-26 15:23:52 +0800854 EXPECT(xglResetDescriptorPool(obj()) == XGL_SUCCESS);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800855}
856
Chia-I Wu985ba162015-03-26 13:14:16 +0800857std::vector<DescriptorSet *> DescriptorPool::alloc_sets(XGL_DESCRIPTOR_SET_USAGE usage, const std::vector<const DescriptorSetLayout *> &layouts)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800858{
Chia-I Wu11078b02015-01-04 16:27:24 +0800859 const std::vector<XGL_DESCRIPTOR_SET_LAYOUT> layout_objs = make_objects<XGL_DESCRIPTOR_SET_LAYOUT>(layouts);
860
861 std::vector<XGL_DESCRIPTOR_SET> set_objs;
862 set_objs.resize(layout_objs.size());
863
864 uint32_t set_count;
865 XGL_RESULT err = xglAllocDescriptorSets(obj(), usage, layout_objs.size(), &layout_objs[0], &set_objs[0], &set_count);
866 if (err == XGL_SUCCESS)
867 EXPECT(set_count == set_objs.size());
868 set_objs.resize(set_count);
869
870 std::vector<DescriptorSet *> sets;
871 sets.reserve(set_count);
872 for (std::vector<XGL_DESCRIPTOR_SET>::const_iterator it = set_objs.begin(); it != set_objs.end(); it++) {
873 // do descriptor sets need memories bound?
874 sets.push_back(new DescriptorSet(*it));
875 }
876
877 return sets;
878}
879
Chia-I Wu985ba162015-03-26 13:14:16 +0800880std::vector<DescriptorSet *> DescriptorPool::alloc_sets(XGL_DESCRIPTOR_SET_USAGE usage, const DescriptorSetLayout &layout, uint32_t count)
Chia-I Wu11078b02015-01-04 16:27:24 +0800881{
882 return alloc_sets(usage, std::vector<const DescriptorSetLayout *>(count, &layout));
883}
884
Chia-I Wu985ba162015-03-26 13:14:16 +0800885DescriptorSet *DescriptorPool::alloc_sets(XGL_DESCRIPTOR_SET_USAGE usage, const DescriptorSetLayout &layout)
Chia-I Wu11078b02015-01-04 16:27:24 +0800886{
887 std::vector<DescriptorSet *> set = alloc_sets(usage, layout, 1);
888 return (set.empty()) ? NULL : set[0];
889}
890
Chia-I Wu985ba162015-03-26 13:14:16 +0800891void DescriptorPool::clear_sets(const std::vector<DescriptorSet *> &sets)
Chia-I Wu11078b02015-01-04 16:27:24 +0800892{
893 const std::vector<XGL_DESCRIPTOR_SET> set_objs = make_objects<XGL_DESCRIPTOR_SET>(sets);
894 xglClearDescriptorSets(obj(), set_objs.size(), &set_objs[0]);
895}
896
Chia-I Wu41126e52015-03-26 15:27:55 +0800897void DescriptorSet::update(const std::vector<const void *> &update_array)
Chia-I Wu11078b02015-01-04 16:27:24 +0800898{
Chia-I Wu41126e52015-03-26 15:27:55 +0800899 xglUpdateDescriptors(obj(), update_array.size(), const_cast<const void **>(&update_array[0]));
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800900}
901
Tony Barbourf52346d2015-01-16 14:27:35 -0700902void DynamicVpStateObject::init(const Device &dev, const XGL_DYNAMIC_VP_STATE_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800903{
Tony Barbourf52346d2015-01-16 14:27:35 -0700904 DERIVED_OBJECT_INIT(xglCreateDynamicViewportState, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800905 alloc_memory(dev);
906}
907
Tony Barbourf52346d2015-01-16 14:27:35 -0700908void DynamicRsStateObject::init(const Device &dev, const XGL_DYNAMIC_RS_STATE_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800909{
Tony Barbourf52346d2015-01-16 14:27:35 -0700910 DERIVED_OBJECT_INIT(xglCreateDynamicRasterState, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800911 alloc_memory(dev);
912}
913
Tony Barbourf52346d2015-01-16 14:27:35 -0700914void DynamicCbStateObject::init(const Device &dev, const XGL_DYNAMIC_CB_STATE_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800915{
Tony Barbourf52346d2015-01-16 14:27:35 -0700916 DERIVED_OBJECT_INIT(xglCreateDynamicColorBlendState, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800917 alloc_memory(dev);
918}
919
Tony Barbourf52346d2015-01-16 14:27:35 -0700920void DynamicDsStateObject::init(const Device &dev, const XGL_DYNAMIC_DS_STATE_CREATE_INFO &info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800921{
Tony Barbourf52346d2015-01-16 14:27:35 -0700922 DERIVED_OBJECT_INIT(xglCreateDynamicDepthStencilState, dev.obj(), &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800923 alloc_memory(dev);
924}
925
926void CmdBuffer::init(const Device &dev, const XGL_CMD_BUFFER_CREATE_INFO &info)
927{
928 DERIVED_OBJECT_INIT(xglCreateCommandBuffer, dev.obj(), &info);
929}
930
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700931void CmdBuffer::begin(const XGL_CMD_BUFFER_BEGIN_INFO *info)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800932{
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700933 EXPECT(xglBeginCommandBuffer(obj(), info) == XGL_SUCCESS);
934}
935
Courtney Goeltzenleuchter69894b72015-04-03 15:25:24 -0600936void CmdBuffer::begin(XGL_RENDER_PASS renderpass_obj, XGL_FRAMEBUFFER framebuffer_obj)
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700937{
938 XGL_CMD_BUFFER_BEGIN_INFO info = {};
Tony Barbourbdf0a312015-04-01 17:10:07 -0600939 XGL_CMD_BUFFER_GRAPHICS_BEGIN_INFO graphics_cmd_buf_info = {};
940 graphics_cmd_buf_info.sType = XGL_STRUCTURE_TYPE_CMD_BUFFER_GRAPHICS_BEGIN_INFO;
941 graphics_cmd_buf_info.pNext = NULL;
Courtney Goeltzenleuchter69894b72015-04-03 15:25:24 -0600942 graphics_cmd_buf_info.renderPassContinue.renderPass = renderpass_obj;
943 graphics_cmd_buf_info.renderPassContinue.framebuffer = framebuffer_obj;
Tony Barbourbdf0a312015-04-01 17:10:07 -0600944
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700945 info.flags = XGL_CMD_BUFFER_OPTIMIZE_GPU_SMALL_BATCH_BIT |
946 XGL_CMD_BUFFER_OPTIMIZE_ONE_TIME_SUBMIT_BIT;
947 info.sType = XGL_STRUCTURE_TYPE_CMD_BUFFER_BEGIN_INFO;
948 info.pNext = &graphics_cmd_buf_info;
949
950 begin(&info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800951}
952
953void CmdBuffer::begin()
954{
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700955 XGL_CMD_BUFFER_BEGIN_INFO info = {};
956 info.flags = XGL_CMD_BUFFER_OPTIMIZE_GPU_SMALL_BATCH_BIT |
957 XGL_CMD_BUFFER_OPTIMIZE_ONE_TIME_SUBMIT_BIT;
958 info.sType = XGL_STRUCTURE_TYPE_CMD_BUFFER_BEGIN_INFO;
959
960 begin(&info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800961}
962
963void CmdBuffer::end()
964{
965 EXPECT(xglEndCommandBuffer(obj()) == XGL_SUCCESS);
966}
967
968void CmdBuffer::reset()
969{
970 EXPECT(xglResetCommandBuffer(obj()) == XGL_SUCCESS);
971}
972
973}; // namespace xgl_testing