blob: 1a68a5b78155677ef179cff9ab59b320322ef22b [file] [log] [blame]
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05301/*
Daniele Castagna7a755de2016-12-16 17:32:30 -05002 * Copyright 2016 The Chromium OS Authors. All rights reserved.
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05303 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
5 */
6#ifdef DRV_AMDGPU
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -08007#include <amdgpu.h>
8#include <amdgpu_drm.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +05309#include <errno.h>
10#include <stdio.h>
11#include <stdlib.h>
12#include <string.h>
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +053013#include <sys/mman.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053014#include <xf86drm.h>
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053015
16#include "addrinterface.h"
17#include "drv_priv.h"
18#include "helpers.h"
19#include "util.h"
20
21#ifndef CIASICIDGFXENGINE_SOUTHERNISLAND
22#define CIASICIDGFXENGINE_SOUTHERNISLAND 0x0000000A
23#endif
24
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080025// clang-format off
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053026#define mmCC_RB_BACKEND_DISABLE 0x263d
27#define mmGB_TILE_MODE0 0x2644
28#define mmGB_MACROTILE_MODE0 0x2664
29#define mmGB_ADDR_CONFIG 0x263e
30#define mmMC_ARB_RAMCFG 0x9d8
31
32enum {
33 FAMILY_UNKNOWN,
34 FAMILY_SI,
35 FAMILY_CI,
36 FAMILY_KV,
37 FAMILY_VI,
38 FAMILY_CZ,
39 FAMILY_PI,
40 FAMILY_LAST,
41};
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080042// clang-format on
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053043
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +010044struct amdgpu_priv {
45 void *addrlib;
46 int drm_version;
47};
48
Alec Thileniusa29bf672017-10-31 14:39:16 -060049const static uint32_t render_target_formats[] = { DRM_FORMAT_ABGR8888, DRM_FORMAT_ARGB8888,
50 DRM_FORMAT_RGB565, DRM_FORMAT_XBGR8888,
51 DRM_FORMAT_XRGB8888 };
Gurchetan Singh179687e2016-10-28 10:07:35 -070052
Shirish S8317bc02017-10-13 09:54:03 +053053const static uint32_t texture_source_formats[] = { DRM_FORMAT_GR88, DRM_FORMAT_R8, DRM_FORMAT_NV21,
Deepak Sharmaadc70fa2018-02-20 14:58:26 -080054 DRM_FORMAT_NV12, DRM_FORMAT_YVU420_ANDROID };
Shirish Sdf423df2017-04-18 16:21:59 +053055
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080056static int amdgpu_set_metadata(int fd, uint32_t handle, struct amdgpu_bo_metadata *info)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053057{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080058 struct drm_amdgpu_gem_metadata args = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053059
60 if (!info)
61 return -EINVAL;
62
63 args.handle = handle;
64 args.op = AMDGPU_GEM_METADATA_OP_SET_METADATA;
65 args.data.flags = info->flags;
66 args.data.tiling_info = info->tiling_info;
67
68 if (info->size_metadata > sizeof(args.data.data))
69 return -EINVAL;
70
71 if (info->size_metadata) {
72 args.data.data_size_bytes = info->size_metadata;
73 memcpy(args.data.data, info->umd_metadata, info->size_metadata);
74 }
75
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080076 return drmCommandWriteRead(fd, DRM_AMDGPU_GEM_METADATA, &args, sizeof(args));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053077}
78
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080079static int amdgpu_read_mm_regs(int fd, unsigned dword_offset, unsigned count, uint32_t instance,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053080 uint32_t flags, uint32_t *values)
81{
82 struct drm_amdgpu_info request;
83
84 memset(&request, 0, sizeof(request));
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080085 request.return_pointer = (uintptr_t)values;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053086 request.return_size = count * sizeof(uint32_t);
87 request.query = AMDGPU_INFO_READ_MMR_REG;
88 request.read_mmr_reg.dword_offset = dword_offset;
89 request.read_mmr_reg.count = count;
90 request.read_mmr_reg.instance = instance;
91 request.read_mmr_reg.flags = flags;
92
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -080093 return drmCommandWrite(fd, DRM_AMDGPU_INFO, &request, sizeof(struct drm_amdgpu_info));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +053094}
95
96static int amdgpu_query_gpu(int fd, struct amdgpu_gpu_info *gpu_info)
97{
98 int ret;
99 uint32_t instance;
100
101 if (!gpu_info)
102 return -EINVAL;
103
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800104 instance = AMDGPU_INFO_MMR_SH_INDEX_MASK << AMDGPU_INFO_MMR_SH_INDEX_SHIFT;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530105
106 ret = amdgpu_read_mm_regs(fd, mmCC_RB_BACKEND_DISABLE, 1, instance, 0,
107 &gpu_info->backend_disable[0]);
108 if (ret)
109 return ret;
110 /* extract bitfield CC_RB_BACKEND_DISABLE.BACKEND_DISABLE */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800111 gpu_info->backend_disable[0] = (gpu_info->backend_disable[0] >> 16) & 0xff;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530112
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800113 ret = amdgpu_read_mm_regs(fd, mmGB_TILE_MODE0, 32, 0xffffffff, 0, gpu_info->gb_tile_mode);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530114 if (ret)
115 return ret;
116
117 ret = amdgpu_read_mm_regs(fd, mmGB_MACROTILE_MODE0, 16, 0xffffffff, 0,
118 gpu_info->gb_macro_tile_mode);
119 if (ret)
120 return ret;
121
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800122 ret = amdgpu_read_mm_regs(fd, mmGB_ADDR_CONFIG, 1, 0xffffffff, 0, &gpu_info->gb_addr_cfg);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530123 if (ret)
124 return ret;
125
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800126 ret = amdgpu_read_mm_regs(fd, mmMC_ARB_RAMCFG, 1, 0xffffffff, 0, &gpu_info->mc_arb_ramcfg);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530127 if (ret)
128 return ret;
129
130 return 0;
131}
132
133static void *ADDR_API alloc_sys_mem(const ADDR_ALLOCSYSMEM_INPUT *in)
134{
135 return malloc(in->sizeInBytes);
136}
137
138static ADDR_E_RETURNCODE ADDR_API free_sys_mem(const ADDR_FREESYSMEM_INPUT *in)
139{
140 free(in->pVirtAddr);
141 return ADDR_OK;
142}
143
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800144static int amdgpu_addrlib_compute(void *addrlib, uint32_t width, uint32_t height, uint32_t format,
Gurchetan Singha1892b22017-09-28 16:40:52 -0700145 uint64_t use_flags, uint32_t *tiling_flags,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530146 ADDR_COMPUTE_SURFACE_INFO_OUTPUT *addr_out)
147{
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800148 ADDR_COMPUTE_SURFACE_INFO_INPUT addr_surf_info_in = { 0 };
149 ADDR_TILEINFO addr_tile_info = { 0 };
150 ADDR_TILEINFO addr_tile_info_out = { 0 };
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700151 uint32_t bits_per_pixel;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530152
153 addr_surf_info_in.size = sizeof(ADDR_COMPUTE_SURFACE_INFO_INPUT);
154
155 /* Set the requested tiling mode. */
156 addr_surf_info_in.tileMode = ADDR_TM_2D_TILED_THIN1;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700157 if (use_flags &
158 (BO_USE_CURSOR | BO_USE_LINEAR | BO_USE_SW_READ_OFTEN | BO_USE_SW_WRITE_OFTEN))
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530159 addr_surf_info_in.tileMode = ADDR_TM_LINEAR_ALIGNED;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800160 else if (width <= 16 || height <= 16)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530161 addr_surf_info_in.tileMode = ADDR_TM_1D_TILED_THIN1;
162
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700163 bits_per_pixel = drv_stride_from_format(format, 1, 0) * 8;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530164 /* Bits per pixel should be calculated from format*/
Gurchetan Singh6423ecb2017-03-29 08:23:40 -0700165 addr_surf_info_in.bpp = bits_per_pixel;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530166 addr_surf_info_in.numSamples = 1;
167 addr_surf_info_in.width = width;
168 addr_surf_info_in.height = height;
169 addr_surf_info_in.numSlices = 1;
170 addr_surf_info_in.pTileInfo = &addr_tile_info;
171 addr_surf_info_in.tileIndex = -1;
172
173 /* This disables incorrect calculations (hacks) in addrlib. */
174 addr_surf_info_in.flags.noStencil = 1;
175
176 /* Set the micro tile type. */
Gurchetan Singha1892b22017-09-28 16:40:52 -0700177 if (use_flags & BO_USE_SCANOUT)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530178 addr_surf_info_in.tileType = ADDR_DISPLAYABLE;
179 else
180 addr_surf_info_in.tileType = ADDR_NON_DISPLAYABLE;
181
182 addr_out->size = sizeof(ADDR_COMPUTE_SURFACE_INFO_OUTPUT);
183 addr_out->pTileInfo = &addr_tile_info_out;
184
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800185 if (AddrComputeSurfaceInfo(addrlib, &addr_surf_info_in, addr_out) != ADDR_OK)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530186 return -EINVAL;
187
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800188 ADDR_CONVERT_TILEINFOTOHW_INPUT s_in = { 0 };
189 ADDR_CONVERT_TILEINFOTOHW_OUTPUT s_out = { 0 };
190 ADDR_TILEINFO s_tile_hw_info_out = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530191
192 s_in.size = sizeof(ADDR_CONVERT_TILEINFOTOHW_INPUT);
193 /* Convert from real value to HW value */
194 s_in.reverse = 0;
195 s_in.pTileInfo = &addr_tile_info_out;
196 s_in.tileIndex = -1;
197
198 s_out.size = sizeof(ADDR_CONVERT_TILEINFOTOHW_OUTPUT);
199 s_out.pTileInfo = &s_tile_hw_info_out;
200
201 if (AddrConvertTileInfoToHW(addrlib, &s_in, &s_out) != ADDR_OK)
202 return -EINVAL;
203
204 if (addr_out->tileMode >= ADDR_TM_2D_TILED_THIN1)
205 /* 2D_TILED_THIN1 */
206 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 4);
207 else if (addr_out->tileMode >= ADDR_TM_1D_TILED_THIN1)
208 /* 1D_TILED_THIN1 */
209 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 2);
210 else
211 /* LINEAR_ALIGNED */
212 *tiling_flags |= AMDGPU_TILING_SET(ARRAY_MODE, 1);
213
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800214 *tiling_flags |= AMDGPU_TILING_SET(BANK_WIDTH, drv_log_base2(addr_tile_info_out.bankWidth));
215 *tiling_flags |=
216 AMDGPU_TILING_SET(BANK_HEIGHT, drv_log_base2(addr_tile_info_out.bankHeight));
217 *tiling_flags |= AMDGPU_TILING_SET(TILE_SPLIT, s_tile_hw_info_out.tileSplitBytes);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530218 *tiling_flags |= AMDGPU_TILING_SET(MACRO_TILE_ASPECT,
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800219 drv_log_base2(addr_tile_info_out.macroAspectRatio));
220 *tiling_flags |= AMDGPU_TILING_SET(PIPE_CONFIG, s_tile_hw_info_out.pipeConfig);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530221 *tiling_flags |= AMDGPU_TILING_SET(NUM_BANKS, s_tile_hw_info_out.banks);
222
223 return 0;
224}
225
226static void *amdgpu_addrlib_init(int fd)
227{
228 int ret;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800229 ADDR_CREATE_INPUT addr_create_input = { 0 };
230 ADDR_CREATE_OUTPUT addr_create_output = { 0 };
231 ADDR_REGISTER_VALUE reg_value = { 0 };
232 ADDR_CREATE_FLAGS create_flags = { { 0 } };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530233 ADDR_E_RETURNCODE addr_ret;
234
235 addr_create_input.size = sizeof(ADDR_CREATE_INPUT);
236 addr_create_output.size = sizeof(ADDR_CREATE_OUTPUT);
237
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800238 struct amdgpu_gpu_info gpu_info = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530239
240 ret = amdgpu_query_gpu(fd, &gpu_info);
241
242 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700243 drv_log("failed with error =%d\n", ret);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530244 return NULL;
245 }
246
247 reg_value.noOfBanks = gpu_info.mc_arb_ramcfg & 0x3;
248 reg_value.gbAddrConfig = gpu_info.gb_addr_cfg;
249 reg_value.noOfRanks = (gpu_info.mc_arb_ramcfg & 0x4) >> 2;
250
251 reg_value.backendDisables = gpu_info.backend_disable[0];
252 reg_value.pTileConfig = gpu_info.gb_tile_mode;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800253 reg_value.noOfEntries = sizeof(gpu_info.gb_tile_mode) / sizeof(gpu_info.gb_tile_mode[0]);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530254 reg_value.pMacroTileConfig = gpu_info.gb_macro_tile_mode;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800255 reg_value.noOfMacroEntries =
256 sizeof(gpu_info.gb_macro_tile_mode) / sizeof(gpu_info.gb_macro_tile_mode[0]);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530257 create_flags.value = 0;
258 create_flags.useTileIndex = 1;
259
260 addr_create_input.chipEngine = CIASICIDGFXENGINE_SOUTHERNISLAND;
261
262 addr_create_input.chipFamily = FAMILY_CZ;
263 addr_create_input.createFlags = create_flags;
264 addr_create_input.callbacks.allocSysMem = alloc_sys_mem;
265 addr_create_input.callbacks.freeSysMem = free_sys_mem;
266 addr_create_input.callbacks.debugPrint = 0;
267 addr_create_input.regValue = reg_value;
268
269 addr_ret = AddrCreate(&addr_create_input, &addr_create_output);
270
271 if (addr_ret != ADDR_OK) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700272 drv_log("failed error =%d\n", addr_ret);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530273 return NULL;
274 }
275
276 return addr_create_output.hLib;
277}
278
279static int amdgpu_init(struct driver *drv)
280{
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100281 struct amdgpu_priv *priv;
282 drmVersionPtr drm_version;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800283 struct format_metadata metadata;
Gurchetan Singha1892b22017-09-28 16:40:52 -0700284 uint64_t use_flags = BO_USE_RENDER_MASK;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530285
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100286 priv = calloc(1, sizeof(struct amdgpu_priv));
287 if (!priv)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530288 return -1;
289
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100290 drm_version = drmGetVersion(drv_get_fd(drv));
291 if (!drm_version) {
292 free(priv);
293 return -1;
294 }
295
296 priv->drm_version = drm_version->version_minor;
297 drmFreeVersion(drm_version);
298
299 priv->addrlib = amdgpu_addrlib_init(drv_get_fd(drv));
300 if (!priv->addrlib) {
301 free(priv);
302 return -1;
303 }
304
305 drv->priv = priv;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530306
Gurchetan Singhd3001452017-11-03 17:18:36 -0700307 drv_add_combinations(drv, texture_source_formats, ARRAY_SIZE(texture_source_formats),
308 &LINEAR_METADATA, BO_USE_TEXTURE_MASK);
Shirish Sdf423df2017-04-18 16:21:59 +0530309
Ricky Liang0b78e072017-11-10 09:17:17 +0800310 /* YUV format for camera */
311 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA,
312 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
313 /*
314 * R8 format is used for Android's HAL_PIXEL_FORMAT_BLOB and is used for JPEG snapshots
315 * from camera.
316 */
317 drv_modify_combination(drv, DRM_FORMAT_R8, &LINEAR_METADATA,
318 BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE);
319
Shirish Sdf423df2017-04-18 16:21:59 +0530320 drv_modify_combination(drv, DRM_FORMAT_NV21, &LINEAR_METADATA, BO_USE_SCANOUT);
321 drv_modify_combination(drv, DRM_FORMAT_NV12, &LINEAR_METADATA, BO_USE_SCANOUT);
322
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800323 metadata.tiling = ADDR_DISPLAYABLE << 16 | ADDR_TM_LINEAR_ALIGNED;
Shirish Sdf423df2017-04-18 16:21:59 +0530324 metadata.priority = 2;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700325 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800326
Gurchetan Singhd3001452017-11-03 17:18:36 -0700327 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
328 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800329
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800330 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
331 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_CURSOR | BO_USE_SCANOUT);
332 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800333
334 metadata.tiling = ADDR_NON_DISPLAYABLE << 16 | ADDR_TM_LINEAR_ALIGNED;
Shirish Sdf423df2017-04-18 16:21:59 +0530335 metadata.priority = 3;
Kristian H. Kristensenbc8c5932017-10-24 18:36:32 -0700336 metadata.modifier = DRM_FORMAT_MOD_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800337
Gurchetan Singhd3001452017-11-03 17:18:36 -0700338 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
339 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800340
Gurchetan Singha1892b22017-09-28 16:40:52 -0700341 use_flags &= ~BO_USE_SW_WRITE_OFTEN;
342 use_flags &= ~BO_USE_SW_READ_OFTEN;
343 use_flags &= ~BO_USE_LINEAR;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800344
345 metadata.tiling = ADDR_DISPLAYABLE << 16 | ADDR_TM_2D_TILED_THIN1;
Shirish Sdf423df2017-04-18 16:21:59 +0530346 metadata.priority = 4;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800347
Gurchetan Singhd3001452017-11-03 17:18:36 -0700348 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
349 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800350
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800351 drv_modify_combination(drv, DRM_FORMAT_ARGB8888, &metadata, BO_USE_SCANOUT);
352 drv_modify_combination(drv, DRM_FORMAT_XRGB8888, &metadata, BO_USE_SCANOUT);
353 drv_modify_combination(drv, DRM_FORMAT_XBGR8888, &metadata, BO_USE_SCANOUT);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800354
355 metadata.tiling = ADDR_NON_DISPLAYABLE << 16 | ADDR_TM_2D_TILED_THIN1;
Shirish Sdf423df2017-04-18 16:21:59 +0530356 metadata.priority = 5;
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800357
Gurchetan Singhd3001452017-11-03 17:18:36 -0700358 drv_add_combinations(drv, render_target_formats, ARRAY_SIZE(render_target_formats),
359 &metadata, use_flags);
Gurchetan Singh6b41fb52017-03-01 20:14:39 -0800360
Gurchetan Singhd3001452017-11-03 17:18:36 -0700361 return 0;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530362}
363
364static void amdgpu_close(struct driver *drv)
365{
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100366 struct amdgpu_priv *priv = (struct amdgpu_priv *)drv->priv;
367 AddrDestroy(priv->addrlib);
368 free(priv);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530369 drv->priv = NULL;
370}
371
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800372static int amdgpu_bo_create(struct bo *bo, uint32_t width, uint32_t height, uint32_t format,
Gurchetan Singha1892b22017-09-28 16:40:52 -0700373 uint64_t use_flags)
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530374{
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100375 struct amdgpu_priv *priv = (struct amdgpu_priv *)bo->drv->priv;
376 void *addrlib = priv->addrlib;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530377 union drm_amdgpu_gem_create gem_create;
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800378 struct amdgpu_bo_metadata metadata = { 0 };
379 ADDR_COMPUTE_SURFACE_INFO_OUTPUT addr_out = { 0 };
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530380 uint32_t tiling_flags = 0;
Shirish Sdf423df2017-04-18 16:21:59 +0530381 size_t plane;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530382 int ret;
383
Shirish Sdf423df2017-04-18 16:21:59 +0530384 if (format == DRM_FORMAT_NV12 || format == DRM_FORMAT_NV21) {
385 drv_bo_from_format(bo, ALIGN(width, 64), height, format);
Deepak Sharmaadc70fa2018-02-20 14:58:26 -0800386 } else if (format == DRM_FORMAT_YVU420_ANDROID) {
387 drv_bo_from_format(bo, ALIGN(width, 128), height, format);
Shirish Sdf423df2017-04-18 16:21:59 +0530388 } else {
Gurchetan Singha1892b22017-09-28 16:40:52 -0700389 if (amdgpu_addrlib_compute(addrlib, width, height, format, use_flags, &tiling_flags,
Shirish Sdf423df2017-04-18 16:21:59 +0530390 &addr_out) < 0)
391 return -EINVAL;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530392
Shirish Sdf423df2017-04-18 16:21:59 +0530393 bo->tiling = tiling_flags;
394 /* RGB has 1 plane only */
395 bo->offsets[0] = 0;
396 bo->total_size = bo->sizes[0] = addr_out.surfSize;
397 bo->strides[0] = addr_out.pixelPitch * DIV_ROUND_UP(addr_out.pixelBits, 8);
398 }
399
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530400 memset(&gem_create, 0, sizeof(gem_create));
Shirish Sdf423df2017-04-18 16:21:59 +0530401
402 gem_create.in.bo_size = bo->total_size;
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530403 gem_create.in.alignment = addr_out.baseAlign;
404 /* Set the placement. */
Dominik Behrfa17cdd2017-11-30 12:23:06 -0800405
406 gem_create.in.domain_flags = 0;
407 if (use_flags & (BO_USE_LINEAR | BO_USE_SW))
408 gem_create.in.domain_flags |= AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED;
409
410 if (use_flags & (BO_USE_SCANOUT | BO_USE_CURSOR)) {
411 /* TODO(dbehr) do not use VRAM after we enable display VM */
412 gem_create.in.domains = AMDGPU_GEM_DOMAIN_VRAM;
413 } else {
414 gem_create.in.domains = AMDGPU_GEM_DOMAIN_GTT;
415 if (!(use_flags & BO_USE_SW_READ_OFTEN))
416 gem_create.in.domain_flags |= AMDGPU_GEM_CREATE_CPU_GTT_USWC;
417 }
418
Bas Nieuwenhuizen3cf8c922018-03-23 17:21:37 +0100419 /* If drm_version >= 21 everything exposes explicit synchronization primitives
420 and chromeos/arc++ will use them. Disable implicit synchronization. */
421 if (priv->drm_version >= 21) {
422 gem_create.in.domain_flags |= AMDGPU_GEM_CREATE_EXPLICIT_SYNC;
423 }
424
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530425 /* Allocate the buffer with the preferred heap. */
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800426 ret = drmCommandWriteRead(drv_get_fd(bo->drv), DRM_AMDGPU_GEM_CREATE, &gem_create,
427 sizeof(gem_create));
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530428
429 if (ret < 0)
430 return ret;
431
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530432 metadata.tiling_info = tiling_flags;
433
Shirish Sdf423df2017-04-18 16:21:59 +0530434 for (plane = 0; plane < bo->num_planes; plane++)
435 bo->handles[plane].u32 = gem_create.out.handle;
436
Gurchetan Singh1b1d56a2017-03-10 16:25:23 -0800437 ret = amdgpu_set_metadata(drv_get_fd(bo->drv), bo->handles[0].u32, &metadata);
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530438
439 return ret;
440}
441
Gurchetan Singhee43c302017-11-14 18:20:27 -0800442static void *amdgpu_bo_map(struct bo *bo, struct vma *vma, size_t plane, uint32_t map_flags)
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530443{
444 int ret;
445 union drm_amdgpu_gem_mmap gem_map;
446
447 memset(&gem_map, 0, sizeof(gem_map));
Shirish Sdf423df2017-04-18 16:21:59 +0530448 gem_map.in.handle = bo->handles[plane].u32;
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530449
450 ret = drmIoctl(bo->drv->fd, DRM_IOCTL_AMDGPU_GEM_MMAP, &gem_map);
451 if (ret) {
Alistair Strachan0cfaaa52018-03-19 14:03:23 -0700452 drv_log("DRM_IOCTL_AMDGPU_GEM_MMAP failed\n");
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530453 return MAP_FAILED;
454 }
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700455
Gurchetan Singhee43c302017-11-14 18:20:27 -0800456 vma->length = bo->total_size;
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530457
Gurchetan Singhcfb88762017-09-28 17:14:50 -0700458 return mmap(0, bo->total_size, drv_get_prot(map_flags), MAP_SHARED, bo->drv->fd,
459 gem_map.out.addr_ptr);
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530460}
461
Gurchetan Singha1892b22017-09-28 16:40:52 -0700462static uint32_t amdgpu_resolve_format(uint32_t format, uint64_t use_flags)
Shirish Sdf423df2017-04-18 16:21:59 +0530463{
464 switch (format) {
Ricky Liang0b78e072017-11-10 09:17:17 +0800465 case DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED:
466 /* Camera subsystem requires NV12. */
467 if (use_flags & (BO_USE_CAMERA_READ | BO_USE_CAMERA_WRITE))
468 return DRM_FORMAT_NV12;
469 /*HACK: See b/28671744 */
470 return DRM_FORMAT_XBGR8888;
Shirish Sdf423df2017-04-18 16:21:59 +0530471 case DRM_FORMAT_FLEX_YCbCr_420_888:
472 return DRM_FORMAT_NV12;
473 default:
474 return format;
475 }
476}
477
Gurchetan Singh3e9d3832017-10-31 10:36:25 -0700478const struct backend backend_amdgpu = {
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530479 .name = "amdgpu",
480 .init = amdgpu_init,
481 .close = amdgpu_close,
482 .bo_create = amdgpu_bo_create,
483 .bo_destroy = drv_gem_bo_destroy,
Gurchetan Singh71611d62017-01-03 16:49:56 -0800484 .bo_import = drv_prime_bo_import,
Pratik Vishwakarmabc1b5352016-12-12 14:22:10 +0530485 .bo_map = amdgpu_bo_map,
Gurchetan Singhba6bd502017-09-18 15:29:47 -0700486 .bo_unmap = drv_bo_munmap,
Shirish Sdf423df2017-04-18 16:21:59 +0530487 .resolve_format = amdgpu_resolve_format,
Akshu Agrawal0337d9b2016-07-28 15:35:45 +0530488};
489
490#endif