blob: a874fd167cb82c3991245006426a1894b39fd011 [file] [log] [blame]
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05301/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2016 The Chromium OS Authors. All rights reserved.
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05303 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6#ifdef DRV_AMDGPU
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -08007#include <amdgpu.h>
8#include <amdgpu_drm.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05309#include <errno.h>
10#include <stdio.h>
11#include <stdlib.h>
12#include <string.h>
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +053013#include <sys/mman.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053014#include <xf86drm.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053015
16#include "addrinterface.h"
17#include "drv_priv.h"
18#include "helpers.h"
19#include "util.h"
20
21#ifndef CIASICIDGFXENGINE_SOUTHERNISLAND
22#define CIASICIDGFXENGINE_SOUTHERNISLAND 0x0000000A
23#endif
24
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080025// clang-format off
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053026#define mmCC_RB_BACKEND_DISABLE 0x263d
27#define mmGB_TILE_MODE0 0x2644
28#define mmGB_MACROTILE_MODE0 0x2664
29#define mmGB_ADDR_CONFIG 0x263e
30#define mmMC_ARB_RAMCFG 0x9d8
31
32enum {
33 FAMILY_UNKNOWN,
34 FAMILY_SI,
35 FAMILY_CI,
36 FAMILY_KV,
37 FAMILY_VI,
38 FAMILY_CZ,
39 FAMILY_PI,
40 FAMILY_LAST,
41};
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080042// clang-format on
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053043
Alec Thileniusa29bf672017-10-31 14:39:16 -060044const static uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
45 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
46 DRM_FORMAT_XRGB8888 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070047
Shirish S8317bc02017-10-13 09:54:03 +053048const static uint32_t texture_source_formats[] = { DRM_FORMAT_GR88, DRM_FORMAT_R8, DRM_FORMAT_NV21,
49 DRM_FORMAT_NV12 };
Shirish Sdf423df2017-04-18 16:21:59 +053050
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080051static int amdgpu_set_metadata(int fd, uint32_t handle, struct amdgpu_bo_metadata *info)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053052{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080053 struct drm_amdgpu_gem_metadata args = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053054
55 if (!info)
56 return -EINVAL;
57
58 args.handle = handle;
59 args.op = AMDGPU_GEM_METADATA_OP_SET_METADATA;
60 args.data.flags = info->flags;
61 args.data.tiling_info = info->tiling_info;
62
63 if (info->size_metadata > sizeof(args.data.data))
64 return -EINVAL;
65
66 if (info->size_metadata) {
67 args.data.data_size_bytes = info->size_metadata;
68 memcpy(args.data.data, info->umd_metadata, info->size_metadata);
69 }
70
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080071 return drmCommandWriteRead(fd, DRM_AMDGPU_GEM_METADATA, &args, sizeof(args));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053072}
73
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080074static int amdgpu_read_mm_regs(int fd, unsigned dword_offset, unsigned count, uint32_t instance,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053075 uint32_t flags, uint32_t *values)
76{
77 struct drm_amdgpu_info request;
78
79 memset(&request, 0, sizeof(request));
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080080 request.return_pointer = (uintptr_t)values;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053081 request.return_size = count * sizeof(uint32_t);
82 request.query = AMDGPU_INFO_READ_MMR_REG;
83 request.read_mmr_reg.dword_offset = dword_offset;
84 request.read_mmr_reg.count = count;
85 request.read_mmr_reg.instance = instance;
86 request.read_mmr_reg.flags = flags;
87
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080088 return drmCommandWrite(fd, DRM_AMDGPU_INFO, &request, sizeof(struct drm_amdgpu_info));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053089}
90
91static int amdgpu_query_gpu(int fd, struct amdgpu_gpu_info *gpu_info)
92{
93 int ret;
94 uint32_t instance;
95
96 if (!gpu_info)
97 return -EINVAL;
98
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080099 instance = AMDGPU_INFO_MMR_SH_INDEX_MASK << AMDGPU_INFO_MMR_SH_INDEX_SHIFT;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530100
101 ret = amdgpu_read_mm_regs(fd, mmCC_RB_BACKEND_DISABLE, 1, instance, 0,
102 &gpu_info->backend_disable[0]);
103 if (ret)
104 return ret;
105 /* extract bitfield CC_RB_BACKEND_DISABLE.BACKEND_DISABLE */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800106 gpu_info->backend_disable[0] = (gpu_info->backend_disable[0] >> 16) & 0xff;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530107
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800108 ret = amdgpu_read_mm_regs(fd, mmGB_TILE_MODE0, 32, 0xffffffff, 0, gpu_info->gb_tile_mode);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530109 if (ret)
110 return ret;
111
112 ret = amdgpu_read_mm_regs(fd, mmGB_MACROTILE_MODE0, 16, 0xffffffff, 0,
113 gpu_info->gb_macro_tile_mode);
114 if (ret)
115 return ret;
116
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800117 ret = amdgpu_read_mm_regs(fd, mmGB_ADDR_CONFIG, 1, 0xffffffff, 0, &gpu_info->gb_addr_cfg);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530118 if (ret)
119 return ret;
120
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800121 ret = amdgpu_read_mm_regs(fd, mmMC_ARB_RAMCFG, 1, 0xffffffff, 0, &gpu_info->mc_arb_ramcfg);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530122 if (ret)
123 return ret;
124
125 return 0;
126}
127
128static void *ADDR_API alloc_sys_mem(const ADDR_ALLOCSYSMEM_INPUT *in)
129{
130 return malloc(in->sizeInBytes);
131}
132
133static ADDR_E_RETURNCODE ADDR_API free_sys_mem(const ADDR_FREESYSMEM_INPUT *in)
134{
135 free(in->pVirtAddr);
136 return ADDR_OK;
137}
138
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800139static int amdgpu_addrlib_compute(void *addrlib, uint32_t width, uint32_t height, uint32_t format,
Gurchetan Singha1892b22017-09-28 16:40:52 -0700140 uint64_t use_flags, uint32_t *tiling_flags,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530141 ADDR_COMPUTE_SURFACE_INFO_OUTPUT *addr_out)
142{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800143 ADDR_COMPUTE_SURFACE_INFO_INPUT addr_surf_info_in = { 0 };
144 ADDR_TILEINFO addr_tile_info = { 0 };
145 ADDR_TILEINFO addr_tile_info_out = { 0 };
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700146 uint32_t bits_per_pixel;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530147
148 addr_surf_info_in.size = sizeof(ADDR_COMPUTE_SURFACE_INFO_INPUT);
149
150 /* Set the requested tiling mode. */
151 addr_surf_info_in.tileMode = ADDR_TM_2D_TILED_THIN1;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700152 if (use_flags &
153 (BO_USE_CURSOR | BO_USE_LINEAR | BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN))
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530154 addr_surf_info_in.tileMode = ADDR_TM_LINEAR_ALIGNED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800155 else if (width <= 16 || height <= 16)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530156 addr_surf_info_in.tileMode = ADDR_TM_1D_TILED_THIN1;
157
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700158 bits_per_pixel = drv_stride_from_format(format, 1, 0) * 8;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530159 /* Bits per pixel should be calculated from format*/
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700160 addr_surf_info_in.bpp = bits_per_pixel;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530161 addr_surf_info_in.numSamples = 1;
162 addr_surf_info_in.width = width;
163 addr_surf_info_in.height = height;
164 addr_surf_info_in.numSlices = 1;
165 addr_surf_info_in.pTileInfo = &addr_tile_info;
166 addr_surf_info_in.tileIndex = -1;
167
168 /* This disables incorrect calculations (hacks) in addrlib. */
169 addr_surf_info_in.flags.noStencil = 1;
170
171 /* Set the micro tile type. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700172 if (use_flags & BO_USE_SCANOUT)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530173 addr_surf_info_in.tileType = ADDR_DISPLAYABLE;
174 else
175 addr_surf_info_in.tileType = ADDR_NON_DISPLAYABLE;
176
177 addr_out->size = sizeof(ADDR_COMPUTE_SURFACE_INFO_OUTPUT);
178 addr_out->pTileInfo = &addr_tile_info_out;
179
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800180 if (AddrComputeSurfaceInfo(addrlib, &addr_surf_info_in, addr_out) != ADDR_OK)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530181 return -EINVAL;
182
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800183 ADDR_CONVERT_TILEINFOTOHW_INPUT s_in = { 0 };
184 ADDR_CONVERT_TILEINFOTOHW_OUTPUT s_out = { 0 };
185 ADDR_TILEINFO s_tile_hw_info_out = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530186
187 s_in.size = sizeof(ADDR_CONVERT_TILEINFOTOHW_INPUT);
188 /* Convert from real value to HW value */
189 s_in.reverse = 0;
190 s_in.pTileInfo = &addr_tile_info_out;
191 s_in.tileIndex = -1;
192
193 s_out.size = sizeof(ADDR_CONVERT_TILEINFOTOHW_OUTPUT);
194 s_out.pTileInfo = &s_tile_hw_info_out;
195
196 if (AddrConvertTileInfoToHW(addrlib, &s_in, &s_out) != ADDR_OK)
197 return -EINVAL;
198
199 if (addr_out->tileMode >= ADDR_TM_2D_TILED_THIN1)
200 /* 2D_TILED_THIN1 */
201 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 4);
202 else if (addr_out->tileMode >= ADDR_TM_1D_TILED_THIN1)
203 /* 1D_TILED_THIN1 */
204 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 2);
205 else
206 /* LINEAR_ALIGNED */
207 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 1);
208
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800209 *tiling_flags |= AMDGPU_TILING_SET(BANK_WIDTH, drv_log_base2(addr_tile_info_out.bankWidth));
210 *tiling_flags |=
211 AMDGPU_TILING_SET(BANK_HEIGHT, drv_log_base2(addr_tile_info_out.bankHeight));
212 *tiling_flags |= AMDGPU_TILING_SET(TILE_SPLIT, s_tile_hw_info_out.tileSplitBytes);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530213 *tiling_flags |= AMDGPU_TILING_SET(MACRO_TILE_ASPECT,
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800214 drv_log_base2(addr_tile_info_out.macroAspectRatio));
215 *tiling_flags |= AMDGPU_TILING_SET(PIPE_CONFIG, s_tile_hw_info_out.pipeConfig);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530216 *tiling_flags |= AMDGPU_TILING_SET(NUM_BANKS, s_tile_hw_info_out.banks);
217
218 return 0;
219}
220
221static void *amdgpu_addrlib_init(int fd)
222{
223 int ret;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800224 ADDR_CREATE_INPUT addr_create_input = { 0 };
225 ADDR_CREATE_OUTPUT addr_create_output = { 0 };
226 ADDR_REGISTER_VALUE reg_value = { 0 };
227 ADDR_CREATE_FLAGS create_flags = { { 0 } };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530228 ADDR_E_RETURNCODE addr_ret;
229
230 addr_create_input.size = sizeof(ADDR_CREATE_INPUT);
231 addr_create_output.size = sizeof(ADDR_CREATE_OUTPUT);
232
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800233 struct amdgpu_gpu_info gpu_info = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530234
235 ret = amdgpu_query_gpu(fd, &gpu_info);
236
237 if (ret) {
238 fprintf(stderr, "[%s]failed with error =%d\n", __func__, ret);
239 return NULL;
240 }
241
242 reg_value.noOfBanks = gpu_info.mc_arb_ramcfg & 0x3;
243 reg_value.gbAddrConfig = gpu_info.gb_addr_cfg;
244 reg_value.noOfRanks = (gpu_info.mc_arb_ramcfg & 0x4) >> 2;
245
246 reg_value.backendDisables = gpu_info.backend_disable[0];
247 reg_value.pTileConfig = gpu_info.gb_tile_mode;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800248 reg_value.noOfEntries = sizeof(gpu_info.gb_tile_mode) / sizeof(gpu_info.gb_tile_mode[0]);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530249 reg_value.pMacroTileConfig = gpu_info.gb_macro_tile_mode;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800250 reg_value.noOfMacroEntries =
251 sizeof(gpu_info.gb_macro_tile_mode) / sizeof(gpu_info.gb_macro_tile_mode[0]);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530252 create_flags.value = 0;
253 create_flags.useTileIndex = 1;
254
255 addr_create_input.chipEngine = CIASICIDGFXENGINE_SOUTHERNISLAND;
256
257 addr_create_input.chipFamily = FAMILY_CZ;
258 addr_create_input.createFlags = create_flags;
259 addr_create_input.callbacks.allocSysMem = alloc_sys_mem;
260 addr_create_input.callbacks.freeSysMem = free_sys_mem;
261 addr_create_input.callbacks.debugPrint = 0;
262 addr_create_input.regValue = reg_value;
263
264 addr_ret = AddrCreate(&addr_create_input, &addr_create_output);
265
266 if (addr_ret != ADDR_OK) {
267 fprintf(stderr, "[%s]failed error =%d\n", __func__, addr_ret);
268 return NULL;
269 }
270
271 return addr_create_output.hLib;
272}
273
274static int amdgpu_init(struct driver *drv)
275{
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800276 int ret;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530277 void *addrlib;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800278 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700279 uint64_t use_flags = BO_USE_RENDER_MASK;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530280
281 addrlib = amdgpu_addrlib_init(drv_get_fd(drv));
282 if (!addrlib)
283 return -1;
284
285 drv->priv = addrlib;
286
Shirish Sdf423df2017-04-18 16:21:59 +0530287 ret = drv_add_combinations(drv, texture_source_formats, ARRAY_SIZE(texture_source_formats),
288 &LINEAR_METADATA, BO_USE_TEXTURE_MASK);
289 if (ret)
290 return ret;
291
292 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA, BO_USE_SCANOUT);
293 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA, BO_USE_SCANOUT);
294
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800295 metadata.tiling = ADDR_DISPLAYABLE << 16 | ADDR_TM_LINEAR_ALIGNED;
Shirish Sdf423df2017-04-18 16:21:59 +0530296 metadata.priority = 2;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700297 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800298
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700299 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
Gurchetan Singha1892b22017-09-28 16:40:52 -0700300 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800301 if (ret)
302 return ret;
303
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800304 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
305 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
306 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800307
308 metadata.tiling = ADDR_NON_DISPLAYABLE << 16 | ADDR_TM_LINEAR_ALIGNED;
Shirish Sdf423df2017-04-18 16:21:59 +0530309 metadata.priority = 3;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700310 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800311
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700312 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
Gurchetan Singha1892b22017-09-28 16:40:52 -0700313 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800314 if (ret)
315 return ret;
316
Gurchetan Singha1892b22017-09-28 16:40:52 -0700317 use_flags &= ~BO_USE_SW_WRITE_OFTEN;
318 use_flags &= ~BO_USE_SW_READ_OFTEN;
319 use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800320
321 metadata.tiling = ADDR_DISPLAYABLE << 16 | ADDR_TM_2D_TILED_THIN1;
Shirish Sdf423df2017-04-18 16:21:59 +0530322 metadata.priority = 4;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800323
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700324 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
Gurchetan Singha1892b22017-09-28 16:40:52 -0700325 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800326 if (ret)
327 return ret;
328
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800329 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_SCANOUT);
330 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_SCANOUT);
331 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800332
333 metadata.tiling = ADDR_NON_DISPLAYABLE << 16 | ADDR_TM_2D_TILED_THIN1;
Shirish Sdf423df2017-04-18 16:21:59 +0530334 metadata.priority = 5;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800335
Gurchetan Singh8ac0c9a2017-05-15 09:34:22 -0700336 ret = drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
Gurchetan Singha1892b22017-09-28 16:40:52 -0700337 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800338 if (ret)
339 return ret;
340
341 return ret;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530342}
343
344static void amdgpu_close(struct driver *drv)
345{
346 AddrDestroy(drv->priv);
347 drv->priv = NULL;
348}
349
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800350static int amdgpu_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
Gurchetan Singha1892b22017-09-28 16:40:52 -0700351 uint64_t use_flags)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530352{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800353 void *addrlib = bo->drv->priv;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530354 union drm_amdgpu_gem_create gem_create;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800355 struct amdgpu_bo_metadata metadata = { 0 };
356 ADDR_COMPUTE_SURFACE_INFO_OUTPUT addr_out = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530357 uint32_t tiling_flags = 0;
Shirish Sdf423df2017-04-18 16:21:59 +0530358 size_t plane;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530359 int ret;
360
Shirish Sdf423df2017-04-18 16:21:59 +0530361 if (format == DRM_FORMAT_NV12 || format == DRM_FORMAT_NV21) {
362 drv_bo_from_format(bo, ALIGN(width, 64), height, format);
363 } else {
Gurchetan Singha1892b22017-09-28 16:40:52 -0700364 if (amdgpu_addrlib_compute(addrlib, width, height, format, use_flags, &tiling_flags,
Shirish Sdf423df2017-04-18 16:21:59 +0530365 &addr_out) < 0)
366 return -EINVAL;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530367
Shirish Sdf423df2017-04-18 16:21:59 +0530368 bo->tiling = tiling_flags;
369 /* RGB has 1 plane only */
370 bo->offsets[0] = 0;
371 bo->total_size = bo->sizes[0] = addr_out.surfSize;
372 bo->strides[0] = addr_out.pixelPitch * DIV_ROUND_UP(addr_out.pixelBits, 8);
373 }
374
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530375 memset(&gem_create, 0, sizeof(gem_create));
Shirish Sdf423df2017-04-18 16:21:59 +0530376
377 gem_create.in.bo_size = bo->total_size;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530378 gem_create.in.alignment = addr_out.baseAlign;
379 /* Set the placement. */
380 gem_create.in.domains = AMDGPU_GEM_DOMAIN_VRAM;
Shirish S0913b8b2017-10-19 11:50:34 +0530381 gem_create.in.domain_flags = AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530382 /* Allocate the buffer with the preferred heap. */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800383 ret = drmCommandWriteRead(drv_get_fd(bo->drv), DRM_AMDGPU_GEM_CREATE, &gem_create,
384 sizeof(gem_create));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530385
386 if (ret < 0)
387 return ret;
388
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530389 metadata.tiling_info = tiling_flags;
390
Shirish Sdf423df2017-04-18 16:21:59 +0530391 for (plane = 0; plane < bo->num_planes; plane++)
392 bo->handles[plane].u32 = gem_create.out.handle;
393
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800394 ret = amdgpu_set_metadata(drv_get_fd(bo->drv), bo->handles[0].u32, &metadata);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530395
396 return ret;
397}
398
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700399static void *amdgpu_bo_map(struct bo *bo, struct mapping *mapping, size_t plane, uint32_t map_flags)
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530400{
401 int ret;
402 union drm_amdgpu_gem_mmap gem_map;
403
404 memset(&gem_map, 0, sizeof(gem_map));
Shirish Sdf423df2017-04-18 16:21:59 +0530405 gem_map.in.handle = bo->handles[plane].u32;
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530406
407 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_AMDGPU_GEM_MMAP, &gem_map);
408 if (ret) {
409 fprintf(stderr, "drv: DRM_IOCTL_AMDGPU_GEM_MMAP failed\n");
410 return MAP_FAILED;
411 }
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700412
Gurchetan Singh47e629b2017-11-02 14:07:18 -0700413 mapping->vma->length = bo->total_size;
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530414
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700415 return mmap(0, bo->total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
416 gem_map.out.addr_ptr);
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530417}
418
Gurchetan Singha1892b22017-09-28 16:40:52 -0700419static uint32_t amdgpu_resolve_format(uint32_t format, uint64_t use_flags)
Shirish Sdf423df2017-04-18 16:21:59 +0530420{
421 switch (format) {
422 case DRM_FORMAT_FLEX_YCbCr_420_888:
423 return DRM_FORMAT_NV12;
424 default:
425 return format;
426 }
427}
428
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700429const struct backend backend_amdgpu = {
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530430 .name = "amdgpu",
431 .init = amdgpu_init,
432 .close = amdgpu_close,
433 .bo_create = amdgpu_bo_create,
434 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singh71611d62017-01-03 16:49:56 -0800435 .bo_import = drv_prime_bo_import,
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530436 .bo_map = amdgpu_bo_map,
Gurchetan Singhba6bd502017-09-18 15:29:47 -0700437 .bo_unmap = drv_bo_munmap,
Shirish Sdf423df2017-04-18 16:21:59 +0530438 .resolve_format = amdgpu_resolve_format,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530439};
440
441#endif