Thomas Gleixner | c942fdd | 2019-05-27 08:55:06 +0200 | [diff] [blame^] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (c) 2010 Sascha Hauer <s.hauer@pengutronix.de> |
| 4 | * Copyright (C) 2005-2009 Freescale Semiconductor, Inc. |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 5 | */ |
| 6 | #include <linux/module.h> |
| 7 | #include <linux/export.h> |
| 8 | #include <linux/types.h> |
Philipp Zabel | 6c64155 | 2013-03-28 17:35:21 +0100 | [diff] [blame] | 9 | #include <linux/reset.h> |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 10 | #include <linux/platform_device.h> |
| 11 | #include <linux/err.h> |
| 12 | #include <linux/spinlock.h> |
| 13 | #include <linux/delay.h> |
| 14 | #include <linux/interrupt.h> |
| 15 | #include <linux/io.h> |
| 16 | #include <linux/clk.h> |
| 17 | #include <linux/list.h> |
| 18 | #include <linux/irq.h> |
Catalin Marinas | de88cbb | 2013-01-18 15:31:37 +0000 | [diff] [blame] | 19 | #include <linux/irqchip/chained_irq.h> |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 20 | #include <linux/irqdomain.h> |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 21 | #include <linux/of_device.h> |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 22 | #include <linux/of_graph.h> |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 23 | |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 24 | #include <drm/drm_fourcc.h> |
| 25 | |
Philipp Zabel | 39b9004 | 2013-09-30 16:13:39 +0200 | [diff] [blame] | 26 | #include <video/imx-ipu-v3.h> |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 27 | #include "ipu-prv.h" |
| 28 | |
| 29 | static inline u32 ipu_cm_read(struct ipu_soc *ipu, unsigned offset) |
| 30 | { |
| 31 | return readl(ipu->cm_reg + offset); |
| 32 | } |
| 33 | |
| 34 | static inline void ipu_cm_write(struct ipu_soc *ipu, u32 value, unsigned offset) |
| 35 | { |
| 36 | writel(value, ipu->cm_reg + offset); |
| 37 | } |
| 38 | |
Steve Longerbeam | 572a761 | 2016-07-19 18:11:02 -0700 | [diff] [blame] | 39 | int ipu_get_num(struct ipu_soc *ipu) |
| 40 | { |
| 41 | return ipu->id; |
| 42 | } |
| 43 | EXPORT_SYMBOL_GPL(ipu_get_num); |
| 44 | |
Philipp Zabel | f9bb7ac | 2017-02-24 18:23:55 +0100 | [diff] [blame] | 45 | void ipu_srm_dp_update(struct ipu_soc *ipu, bool sync) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 46 | { |
| 47 | u32 val; |
| 48 | |
| 49 | val = ipu_cm_read(ipu, IPU_SRM_PRI2); |
Philipp Zabel | f9bb7ac | 2017-02-24 18:23:55 +0100 | [diff] [blame] | 50 | val &= ~DP_S_SRM_MODE_MASK; |
| 51 | val |= sync ? DP_S_SRM_MODE_NEXT_FRAME : |
| 52 | DP_S_SRM_MODE_NOW; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 53 | ipu_cm_write(ipu, val, IPU_SRM_PRI2); |
| 54 | } |
Philipp Zabel | f9bb7ac | 2017-02-24 18:23:55 +0100 | [diff] [blame] | 55 | EXPORT_SYMBOL_GPL(ipu_srm_dp_update); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 56 | |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 57 | enum ipu_color_space ipu_drm_fourcc_to_colorspace(u32 drm_fourcc) |
| 58 | { |
| 59 | switch (drm_fourcc) { |
Philipp Zabel | 0cb8b75 | 2014-12-12 13:40:14 +0100 | [diff] [blame] | 60 | case DRM_FORMAT_ARGB1555: |
| 61 | case DRM_FORMAT_ABGR1555: |
| 62 | case DRM_FORMAT_RGBA5551: |
| 63 | case DRM_FORMAT_BGRA5551: |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 64 | case DRM_FORMAT_RGB565: |
| 65 | case DRM_FORMAT_BGR565: |
| 66 | case DRM_FORMAT_RGB888: |
| 67 | case DRM_FORMAT_BGR888: |
Lucas Stach | 7d2e8a2 | 2015-08-04 17:21:04 +0200 | [diff] [blame] | 68 | case DRM_FORMAT_ARGB4444: |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 69 | case DRM_FORMAT_XRGB8888: |
| 70 | case DRM_FORMAT_XBGR8888: |
| 71 | case DRM_FORMAT_RGBX8888: |
| 72 | case DRM_FORMAT_BGRX8888: |
| 73 | case DRM_FORMAT_ARGB8888: |
| 74 | case DRM_FORMAT_ABGR8888: |
| 75 | case DRM_FORMAT_RGBA8888: |
| 76 | case DRM_FORMAT_BGRA8888: |
Philipp Zabel | e72db3b | 2015-01-09 11:03:13 +0100 | [diff] [blame] | 77 | case DRM_FORMAT_RGB565_A8: |
| 78 | case DRM_FORMAT_BGR565_A8: |
| 79 | case DRM_FORMAT_RGB888_A8: |
| 80 | case DRM_FORMAT_BGR888_A8: |
| 81 | case DRM_FORMAT_RGBX8888_A8: |
| 82 | case DRM_FORMAT_BGRX8888_A8: |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 83 | return IPUV3_COLORSPACE_RGB; |
| 84 | case DRM_FORMAT_YUYV: |
| 85 | case DRM_FORMAT_UYVY: |
| 86 | case DRM_FORMAT_YUV420: |
| 87 | case DRM_FORMAT_YVU420: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 88 | case DRM_FORMAT_YUV422: |
| 89 | case DRM_FORMAT_YVU422: |
Philipp Zabel | c9d508c | 2016-10-18 13:36:33 +0200 | [diff] [blame] | 90 | case DRM_FORMAT_YUV444: |
| 91 | case DRM_FORMAT_YVU444: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 92 | case DRM_FORMAT_NV12: |
| 93 | case DRM_FORMAT_NV21: |
| 94 | case DRM_FORMAT_NV16: |
| 95 | case DRM_FORMAT_NV61: |
Philipp Zabel | 7cb1779 | 2013-10-10 16:18:38 +0200 | [diff] [blame] | 96 | return IPUV3_COLORSPACE_YUV; |
| 97 | default: |
| 98 | return IPUV3_COLORSPACE_UNKNOWN; |
| 99 | } |
| 100 | } |
| 101 | EXPORT_SYMBOL_GPL(ipu_drm_fourcc_to_colorspace); |
| 102 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 103 | enum ipu_color_space ipu_pixelformat_to_colorspace(u32 pixelformat) |
| 104 | { |
| 105 | switch (pixelformat) { |
| 106 | case V4L2_PIX_FMT_YUV420: |
Philipp Zabel | d3e4e61 | 2012-11-12 16:29:00 +0100 | [diff] [blame] | 107 | case V4L2_PIX_FMT_YVU420: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 108 | case V4L2_PIX_FMT_YUV422P: |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 109 | case V4L2_PIX_FMT_UYVY: |
Michael Olbrich | c096ae1 | 2012-11-12 16:28:59 +0100 | [diff] [blame] | 110 | case V4L2_PIX_FMT_YUYV: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 111 | case V4L2_PIX_FMT_NV12: |
| 112 | case V4L2_PIX_FMT_NV21: |
| 113 | case V4L2_PIX_FMT_NV16: |
| 114 | case V4L2_PIX_FMT_NV61: |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 115 | return IPUV3_COLORSPACE_YUV; |
Philipp Zabel | 5c41bb6 | 2018-08-02 10:40:33 +0200 | [diff] [blame] | 116 | case V4L2_PIX_FMT_XRGB32: |
| 117 | case V4L2_PIX_FMT_XBGR32: |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 118 | case V4L2_PIX_FMT_RGB32: |
| 119 | case V4L2_PIX_FMT_BGR32: |
| 120 | case V4L2_PIX_FMT_RGB24: |
| 121 | case V4L2_PIX_FMT_BGR24: |
| 122 | case V4L2_PIX_FMT_RGB565: |
| 123 | return IPUV3_COLORSPACE_RGB; |
| 124 | default: |
| 125 | return IPUV3_COLORSPACE_UNKNOWN; |
| 126 | } |
| 127 | } |
| 128 | EXPORT_SYMBOL_GPL(ipu_pixelformat_to_colorspace); |
| 129 | |
Steve Longerbeam | 4cea940 | 2014-06-25 18:05:38 -0700 | [diff] [blame] | 130 | bool ipu_pixelformat_is_planar(u32 pixelformat) |
| 131 | { |
| 132 | switch (pixelformat) { |
| 133 | case V4L2_PIX_FMT_YUV420: |
| 134 | case V4L2_PIX_FMT_YVU420: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 135 | case V4L2_PIX_FMT_YUV422P: |
| 136 | case V4L2_PIX_FMT_NV12: |
| 137 | case V4L2_PIX_FMT_NV21: |
| 138 | case V4L2_PIX_FMT_NV16: |
| 139 | case V4L2_PIX_FMT_NV61: |
Steve Longerbeam | 4cea940 | 2014-06-25 18:05:38 -0700 | [diff] [blame] | 140 | return true; |
| 141 | } |
| 142 | |
| 143 | return false; |
| 144 | } |
| 145 | EXPORT_SYMBOL_GPL(ipu_pixelformat_is_planar); |
| 146 | |
Steve Longerbeam | ae0e970 | 2014-06-25 18:05:36 -0700 | [diff] [blame] | 147 | enum ipu_color_space ipu_mbus_code_to_colorspace(u32 mbus_code) |
| 148 | { |
| 149 | switch (mbus_code & 0xf000) { |
| 150 | case 0x1000: |
| 151 | return IPUV3_COLORSPACE_RGB; |
| 152 | case 0x2000: |
| 153 | return IPUV3_COLORSPACE_YUV; |
| 154 | default: |
| 155 | return IPUV3_COLORSPACE_UNKNOWN; |
| 156 | } |
| 157 | } |
| 158 | EXPORT_SYMBOL_GPL(ipu_mbus_code_to_colorspace); |
| 159 | |
Steve Longerbeam | 6930afd | 2014-06-25 18:05:43 -0700 | [diff] [blame] | 160 | int ipu_stride_to_bytes(u32 pixel_stride, u32 pixelformat) |
| 161 | { |
| 162 | switch (pixelformat) { |
| 163 | case V4L2_PIX_FMT_YUV420: |
| 164 | case V4L2_PIX_FMT_YVU420: |
Steve Longerbeam | 9a34cef | 2014-06-25 18:05:53 -0700 | [diff] [blame] | 165 | case V4L2_PIX_FMT_YUV422P: |
| 166 | case V4L2_PIX_FMT_NV12: |
| 167 | case V4L2_PIX_FMT_NV21: |
| 168 | case V4L2_PIX_FMT_NV16: |
| 169 | case V4L2_PIX_FMT_NV61: |
Steve Longerbeam | 6930afd | 2014-06-25 18:05:43 -0700 | [diff] [blame] | 170 | /* |
| 171 | * for the planar YUV formats, the stride passed to |
| 172 | * cpmem must be the stride in bytes of the Y plane. |
| 173 | * And all the planar YUV formats have an 8-bit |
| 174 | * Y component. |
| 175 | */ |
| 176 | return (8 * pixel_stride) >> 3; |
| 177 | case V4L2_PIX_FMT_RGB565: |
| 178 | case V4L2_PIX_FMT_YUYV: |
| 179 | case V4L2_PIX_FMT_UYVY: |
| 180 | return (16 * pixel_stride) >> 3; |
| 181 | case V4L2_PIX_FMT_BGR24: |
| 182 | case V4L2_PIX_FMT_RGB24: |
| 183 | return (24 * pixel_stride) >> 3; |
| 184 | case V4L2_PIX_FMT_BGR32: |
| 185 | case V4L2_PIX_FMT_RGB32: |
Philipp Zabel | 5c41bb6 | 2018-08-02 10:40:33 +0200 | [diff] [blame] | 186 | case V4L2_PIX_FMT_XBGR32: |
| 187 | case V4L2_PIX_FMT_XRGB32: |
Steve Longerbeam | 6930afd | 2014-06-25 18:05:43 -0700 | [diff] [blame] | 188 | return (32 * pixel_stride) >> 3; |
| 189 | default: |
| 190 | break; |
| 191 | } |
| 192 | |
| 193 | return -EINVAL; |
| 194 | } |
| 195 | EXPORT_SYMBOL_GPL(ipu_stride_to_bytes); |
| 196 | |
Steve Longerbeam | f835f38 | 2014-06-25 18:05:37 -0700 | [diff] [blame] | 197 | int ipu_degrees_to_rot_mode(enum ipu_rotate_mode *mode, int degrees, |
| 198 | bool hflip, bool vflip) |
| 199 | { |
| 200 | u32 r90, vf, hf; |
| 201 | |
| 202 | switch (degrees) { |
| 203 | case 0: |
| 204 | vf = hf = r90 = 0; |
| 205 | break; |
| 206 | case 90: |
| 207 | vf = hf = 0; |
| 208 | r90 = 1; |
| 209 | break; |
| 210 | case 180: |
| 211 | vf = hf = 1; |
| 212 | r90 = 0; |
| 213 | break; |
| 214 | case 270: |
| 215 | vf = hf = r90 = 1; |
| 216 | break; |
| 217 | default: |
| 218 | return -EINVAL; |
| 219 | } |
| 220 | |
| 221 | hf ^= (u32)hflip; |
| 222 | vf ^= (u32)vflip; |
| 223 | |
| 224 | *mode = (enum ipu_rotate_mode)((r90 << 2) | (hf << 1) | vf); |
| 225 | return 0; |
| 226 | } |
| 227 | EXPORT_SYMBOL_GPL(ipu_degrees_to_rot_mode); |
| 228 | |
| 229 | int ipu_rot_mode_to_degrees(int *degrees, enum ipu_rotate_mode mode, |
| 230 | bool hflip, bool vflip) |
| 231 | { |
| 232 | u32 r90, vf, hf; |
| 233 | |
| 234 | r90 = ((u32)mode >> 2) & 0x1; |
| 235 | hf = ((u32)mode >> 1) & 0x1; |
| 236 | vf = ((u32)mode >> 0) & 0x1; |
| 237 | hf ^= (u32)hflip; |
| 238 | vf ^= (u32)vflip; |
| 239 | |
| 240 | switch ((enum ipu_rotate_mode)((r90 << 2) | (hf << 1) | vf)) { |
| 241 | case IPU_ROTATE_NONE: |
| 242 | *degrees = 0; |
| 243 | break; |
| 244 | case IPU_ROTATE_90_RIGHT: |
| 245 | *degrees = 90; |
| 246 | break; |
| 247 | case IPU_ROTATE_180: |
| 248 | *degrees = 180; |
| 249 | break; |
| 250 | case IPU_ROTATE_90_LEFT: |
| 251 | *degrees = 270; |
| 252 | break; |
| 253 | default: |
| 254 | return -EINVAL; |
| 255 | } |
| 256 | |
| 257 | return 0; |
| 258 | } |
| 259 | EXPORT_SYMBOL_GPL(ipu_rot_mode_to_degrees); |
| 260 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 261 | struct ipuv3_channel *ipu_idmac_get(struct ipu_soc *ipu, unsigned num) |
| 262 | { |
| 263 | struct ipuv3_channel *channel; |
| 264 | |
| 265 | dev_dbg(ipu->dev, "%s %d\n", __func__, num); |
| 266 | |
| 267 | if (num > 63) |
| 268 | return ERR_PTR(-ENODEV); |
| 269 | |
| 270 | mutex_lock(&ipu->channel_lock); |
| 271 | |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 272 | list_for_each_entry(channel, &ipu->channels, list) { |
| 273 | if (channel->num == num) { |
| 274 | channel = ERR_PTR(-EBUSY); |
| 275 | goto out; |
| 276 | } |
| 277 | } |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 278 | |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 279 | channel = kzalloc(sizeof(*channel), GFP_KERNEL); |
| 280 | if (!channel) { |
| 281 | channel = ERR_PTR(-ENOMEM); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 282 | goto out; |
| 283 | } |
| 284 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 285 | channel->num = num; |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 286 | channel->ipu = ipu; |
| 287 | list_add(&channel->list, &ipu->channels); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 288 | |
| 289 | out: |
| 290 | mutex_unlock(&ipu->channel_lock); |
| 291 | |
| 292 | return channel; |
| 293 | } |
| 294 | EXPORT_SYMBOL_GPL(ipu_idmac_get); |
| 295 | |
| 296 | void ipu_idmac_put(struct ipuv3_channel *channel) |
| 297 | { |
| 298 | struct ipu_soc *ipu = channel->ipu; |
| 299 | |
| 300 | dev_dbg(ipu->dev, "%s %d\n", __func__, channel->num); |
| 301 | |
| 302 | mutex_lock(&ipu->channel_lock); |
| 303 | |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 304 | list_del(&channel->list); |
| 305 | kfree(channel); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 306 | |
| 307 | mutex_unlock(&ipu->channel_lock); |
| 308 | } |
| 309 | EXPORT_SYMBOL_GPL(ipu_idmac_put); |
| 310 | |
Steve Longerbeam | aa52f57 | 2014-06-25 18:05:40 -0700 | [diff] [blame] | 311 | #define idma_mask(ch) (1 << ((ch) & 0x1f)) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 312 | |
Steve Longerbeam | e7268c6 | 2014-06-25 18:05:42 -0700 | [diff] [blame] | 313 | /* |
| 314 | * This is an undocumented feature, a write one to a channel bit in |
| 315 | * IPU_CHA_CUR_BUF and IPU_CHA_TRIPLE_CUR_BUF will reset the channel's |
| 316 | * internal current buffer pointer so that transfers start from buffer |
| 317 | * 0 on the next channel enable (that's the theory anyway, the imx6 TRM |
| 318 | * only says these are read-only registers). This operation is required |
| 319 | * for channel linking to work correctly, for instance video capture |
| 320 | * pipelines that carry out image rotations will fail after the first |
| 321 | * streaming unless this function is called for each channel before |
| 322 | * re-enabling the channels. |
| 323 | */ |
| 324 | static void __ipu_idmac_reset_current_buffer(struct ipuv3_channel *channel) |
| 325 | { |
| 326 | struct ipu_soc *ipu = channel->ipu; |
| 327 | unsigned int chno = channel->num; |
| 328 | |
| 329 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_CUR_BUF(chno)); |
| 330 | } |
| 331 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 332 | void ipu_idmac_set_double_buffer(struct ipuv3_channel *channel, |
| 333 | bool doublebuffer) |
| 334 | { |
| 335 | struct ipu_soc *ipu = channel->ipu; |
| 336 | unsigned long flags; |
| 337 | u32 reg; |
| 338 | |
| 339 | spin_lock_irqsave(&ipu->lock, flags); |
| 340 | |
| 341 | reg = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(channel->num)); |
| 342 | if (doublebuffer) |
| 343 | reg |= idma_mask(channel->num); |
| 344 | else |
| 345 | reg &= ~idma_mask(channel->num); |
| 346 | ipu_cm_write(ipu, reg, IPU_CHA_DB_MODE_SEL(channel->num)); |
| 347 | |
Steve Longerbeam | e7268c6 | 2014-06-25 18:05:42 -0700 | [diff] [blame] | 348 | __ipu_idmac_reset_current_buffer(channel); |
| 349 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 350 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 351 | } |
| 352 | EXPORT_SYMBOL_GPL(ipu_idmac_set_double_buffer); |
| 353 | |
Steve Longerbeam | 4fd1a07 | 2014-06-25 18:05:45 -0700 | [diff] [blame] | 354 | static const struct { |
| 355 | int chnum; |
| 356 | u32 reg; |
| 357 | int shift; |
| 358 | } idmac_lock_en_info[] = { |
| 359 | { .chnum = 5, .reg = IDMAC_CH_LOCK_EN_1, .shift = 0, }, |
| 360 | { .chnum = 11, .reg = IDMAC_CH_LOCK_EN_1, .shift = 2, }, |
| 361 | { .chnum = 12, .reg = IDMAC_CH_LOCK_EN_1, .shift = 4, }, |
| 362 | { .chnum = 14, .reg = IDMAC_CH_LOCK_EN_1, .shift = 6, }, |
| 363 | { .chnum = 15, .reg = IDMAC_CH_LOCK_EN_1, .shift = 8, }, |
| 364 | { .chnum = 20, .reg = IDMAC_CH_LOCK_EN_1, .shift = 10, }, |
| 365 | { .chnum = 21, .reg = IDMAC_CH_LOCK_EN_1, .shift = 12, }, |
| 366 | { .chnum = 22, .reg = IDMAC_CH_LOCK_EN_1, .shift = 14, }, |
| 367 | { .chnum = 23, .reg = IDMAC_CH_LOCK_EN_1, .shift = 16, }, |
| 368 | { .chnum = 27, .reg = IDMAC_CH_LOCK_EN_1, .shift = 18, }, |
| 369 | { .chnum = 28, .reg = IDMAC_CH_LOCK_EN_1, .shift = 20, }, |
| 370 | { .chnum = 45, .reg = IDMAC_CH_LOCK_EN_2, .shift = 0, }, |
| 371 | { .chnum = 46, .reg = IDMAC_CH_LOCK_EN_2, .shift = 2, }, |
| 372 | { .chnum = 47, .reg = IDMAC_CH_LOCK_EN_2, .shift = 4, }, |
| 373 | { .chnum = 48, .reg = IDMAC_CH_LOCK_EN_2, .shift = 6, }, |
| 374 | { .chnum = 49, .reg = IDMAC_CH_LOCK_EN_2, .shift = 8, }, |
| 375 | { .chnum = 50, .reg = IDMAC_CH_LOCK_EN_2, .shift = 10, }, |
| 376 | }; |
| 377 | |
| 378 | int ipu_idmac_lock_enable(struct ipuv3_channel *channel, int num_bursts) |
| 379 | { |
| 380 | struct ipu_soc *ipu = channel->ipu; |
| 381 | unsigned long flags; |
| 382 | u32 bursts, regval; |
| 383 | int i; |
| 384 | |
| 385 | switch (num_bursts) { |
| 386 | case 0: |
| 387 | case 1: |
| 388 | bursts = 0x00; /* locking disabled */ |
| 389 | break; |
| 390 | case 2: |
| 391 | bursts = 0x01; |
| 392 | break; |
| 393 | case 4: |
| 394 | bursts = 0x02; |
| 395 | break; |
| 396 | case 8: |
| 397 | bursts = 0x03; |
| 398 | break; |
| 399 | default: |
| 400 | return -EINVAL; |
| 401 | } |
| 402 | |
Philipp Zabel | cda7755 | 2017-10-10 15:13:55 +0200 | [diff] [blame] | 403 | /* |
| 404 | * IPUv3EX / i.MX51 has a different register layout, and on IPUv3M / |
| 405 | * i.MX53 channel arbitration locking doesn't seem to work properly. |
| 406 | * Allow enabling the lock feature on IPUv3H / i.MX6 only. |
| 407 | */ |
| 408 | if (bursts && ipu->ipu_type != IPUV3H) |
| 409 | return -EINVAL; |
| 410 | |
Steve Longerbeam | 4fd1a07 | 2014-06-25 18:05:45 -0700 | [diff] [blame] | 411 | for (i = 0; i < ARRAY_SIZE(idmac_lock_en_info); i++) { |
| 412 | if (channel->num == idmac_lock_en_info[i].chnum) |
| 413 | break; |
| 414 | } |
| 415 | if (i >= ARRAY_SIZE(idmac_lock_en_info)) |
| 416 | return -EINVAL; |
| 417 | |
| 418 | spin_lock_irqsave(&ipu->lock, flags); |
| 419 | |
| 420 | regval = ipu_idmac_read(ipu, idmac_lock_en_info[i].reg); |
| 421 | regval &= ~(0x03 << idmac_lock_en_info[i].shift); |
| 422 | regval |= (bursts << idmac_lock_en_info[i].shift); |
| 423 | ipu_idmac_write(ipu, regval, idmac_lock_en_info[i].reg); |
| 424 | |
| 425 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 426 | |
| 427 | return 0; |
| 428 | } |
| 429 | EXPORT_SYMBOL_GPL(ipu_idmac_lock_enable); |
| 430 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 431 | int ipu_module_enable(struct ipu_soc *ipu, u32 mask) |
| 432 | { |
| 433 | unsigned long lock_flags; |
| 434 | u32 val; |
| 435 | |
| 436 | spin_lock_irqsave(&ipu->lock, lock_flags); |
| 437 | |
| 438 | val = ipu_cm_read(ipu, IPU_DISP_GEN); |
| 439 | |
| 440 | if (mask & IPU_CONF_DI0_EN) |
| 441 | val |= IPU_DI0_COUNTER_RELEASE; |
| 442 | if (mask & IPU_CONF_DI1_EN) |
| 443 | val |= IPU_DI1_COUNTER_RELEASE; |
| 444 | |
| 445 | ipu_cm_write(ipu, val, IPU_DISP_GEN); |
| 446 | |
| 447 | val = ipu_cm_read(ipu, IPU_CONF); |
| 448 | val |= mask; |
| 449 | ipu_cm_write(ipu, val, IPU_CONF); |
| 450 | |
| 451 | spin_unlock_irqrestore(&ipu->lock, lock_flags); |
| 452 | |
| 453 | return 0; |
| 454 | } |
| 455 | EXPORT_SYMBOL_GPL(ipu_module_enable); |
| 456 | |
| 457 | int ipu_module_disable(struct ipu_soc *ipu, u32 mask) |
| 458 | { |
| 459 | unsigned long lock_flags; |
| 460 | u32 val; |
| 461 | |
| 462 | spin_lock_irqsave(&ipu->lock, lock_flags); |
| 463 | |
| 464 | val = ipu_cm_read(ipu, IPU_CONF); |
| 465 | val &= ~mask; |
| 466 | ipu_cm_write(ipu, val, IPU_CONF); |
| 467 | |
| 468 | val = ipu_cm_read(ipu, IPU_DISP_GEN); |
| 469 | |
| 470 | if (mask & IPU_CONF_DI0_EN) |
| 471 | val &= ~IPU_DI0_COUNTER_RELEASE; |
| 472 | if (mask & IPU_CONF_DI1_EN) |
| 473 | val &= ~IPU_DI1_COUNTER_RELEASE; |
| 474 | |
| 475 | ipu_cm_write(ipu, val, IPU_DISP_GEN); |
| 476 | |
| 477 | spin_unlock_irqrestore(&ipu->lock, lock_flags); |
| 478 | |
| 479 | return 0; |
| 480 | } |
| 481 | EXPORT_SYMBOL_GPL(ipu_module_disable); |
| 482 | |
Philipp Zabel | e904609 | 2012-05-16 17:28:29 +0200 | [diff] [blame] | 483 | int ipu_idmac_get_current_buffer(struct ipuv3_channel *channel) |
| 484 | { |
| 485 | struct ipu_soc *ipu = channel->ipu; |
| 486 | unsigned int chno = channel->num; |
| 487 | |
| 488 | return (ipu_cm_read(ipu, IPU_CHA_CUR_BUF(chno)) & idma_mask(chno)) ? 1 : 0; |
| 489 | } |
| 490 | EXPORT_SYMBOL_GPL(ipu_idmac_get_current_buffer); |
| 491 | |
Steve Longerbeam | aa52f57 | 2014-06-25 18:05:40 -0700 | [diff] [blame] | 492 | bool ipu_idmac_buffer_is_ready(struct ipuv3_channel *channel, u32 buf_num) |
| 493 | { |
| 494 | struct ipu_soc *ipu = channel->ipu; |
| 495 | unsigned long flags; |
| 496 | u32 reg = 0; |
| 497 | |
| 498 | spin_lock_irqsave(&ipu->lock, flags); |
| 499 | switch (buf_num) { |
| 500 | case 0: |
| 501 | reg = ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(channel->num)); |
| 502 | break; |
| 503 | case 1: |
| 504 | reg = ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(channel->num)); |
| 505 | break; |
| 506 | case 2: |
| 507 | reg = ipu_cm_read(ipu, IPU_CHA_BUF2_RDY(channel->num)); |
| 508 | break; |
| 509 | } |
| 510 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 511 | |
| 512 | return ((reg & idma_mask(channel->num)) != 0); |
| 513 | } |
| 514 | EXPORT_SYMBOL_GPL(ipu_idmac_buffer_is_ready); |
| 515 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 516 | void ipu_idmac_select_buffer(struct ipuv3_channel *channel, u32 buf_num) |
| 517 | { |
| 518 | struct ipu_soc *ipu = channel->ipu; |
| 519 | unsigned int chno = channel->num; |
| 520 | unsigned long flags; |
| 521 | |
| 522 | spin_lock_irqsave(&ipu->lock, flags); |
| 523 | |
| 524 | /* Mark buffer as ready. */ |
| 525 | if (buf_num == 0) |
| 526 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_BUF0_RDY(chno)); |
| 527 | else |
| 528 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_BUF1_RDY(chno)); |
| 529 | |
| 530 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 531 | } |
| 532 | EXPORT_SYMBOL_GPL(ipu_idmac_select_buffer); |
| 533 | |
Steve Longerbeam | bce6f08 | 2014-06-25 18:05:41 -0700 | [diff] [blame] | 534 | void ipu_idmac_clear_buffer(struct ipuv3_channel *channel, u32 buf_num) |
| 535 | { |
| 536 | struct ipu_soc *ipu = channel->ipu; |
| 537 | unsigned int chno = channel->num; |
| 538 | unsigned long flags; |
| 539 | |
| 540 | spin_lock_irqsave(&ipu->lock, flags); |
| 541 | |
| 542 | ipu_cm_write(ipu, 0xF0300000, IPU_GPR); /* write one to clear */ |
| 543 | switch (buf_num) { |
| 544 | case 0: |
| 545 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_BUF0_RDY(chno)); |
| 546 | break; |
| 547 | case 1: |
| 548 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_BUF1_RDY(chno)); |
| 549 | break; |
| 550 | case 2: |
| 551 | ipu_cm_write(ipu, idma_mask(chno), IPU_CHA_BUF2_RDY(chno)); |
| 552 | break; |
| 553 | default: |
| 554 | break; |
| 555 | } |
| 556 | ipu_cm_write(ipu, 0x0, IPU_GPR); /* write one to set */ |
| 557 | |
| 558 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 559 | } |
| 560 | EXPORT_SYMBOL_GPL(ipu_idmac_clear_buffer); |
| 561 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 562 | int ipu_idmac_enable_channel(struct ipuv3_channel *channel) |
| 563 | { |
| 564 | struct ipu_soc *ipu = channel->ipu; |
| 565 | u32 val; |
| 566 | unsigned long flags; |
| 567 | |
| 568 | spin_lock_irqsave(&ipu->lock, flags); |
| 569 | |
| 570 | val = ipu_idmac_read(ipu, IDMAC_CHA_EN(channel->num)); |
| 571 | val |= idma_mask(channel->num); |
| 572 | ipu_idmac_write(ipu, val, IDMAC_CHA_EN(channel->num)); |
| 573 | |
| 574 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 575 | |
| 576 | return 0; |
| 577 | } |
| 578 | EXPORT_SYMBOL_GPL(ipu_idmac_enable_channel); |
| 579 | |
Philipp Zabel | 1707550 | 2014-04-14 23:53:17 +0200 | [diff] [blame] | 580 | bool ipu_idmac_channel_busy(struct ipu_soc *ipu, unsigned int chno) |
| 581 | { |
| 582 | return (ipu_idmac_read(ipu, IDMAC_CHA_BUSY(chno)) & idma_mask(chno)); |
| 583 | } |
| 584 | EXPORT_SYMBOL_GPL(ipu_idmac_channel_busy); |
| 585 | |
Sascha Hauer | fb822a3 | 2013-10-10 16:18:41 +0200 | [diff] [blame] | 586 | int ipu_idmac_wait_busy(struct ipuv3_channel *channel, int ms) |
| 587 | { |
| 588 | struct ipu_soc *ipu = channel->ipu; |
| 589 | unsigned long timeout; |
| 590 | |
| 591 | timeout = jiffies + msecs_to_jiffies(ms); |
| 592 | while (ipu_idmac_read(ipu, IDMAC_CHA_BUSY(channel->num)) & |
| 593 | idma_mask(channel->num)) { |
| 594 | if (time_after(jiffies, timeout)) |
| 595 | return -ETIMEDOUT; |
| 596 | cpu_relax(); |
| 597 | } |
| 598 | |
| 599 | return 0; |
| 600 | } |
| 601 | EXPORT_SYMBOL_GPL(ipu_idmac_wait_busy); |
| 602 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 603 | int ipu_idmac_disable_channel(struct ipuv3_channel *channel) |
| 604 | { |
| 605 | struct ipu_soc *ipu = channel->ipu; |
| 606 | u32 val; |
| 607 | unsigned long flags; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 608 | |
| 609 | spin_lock_irqsave(&ipu->lock, flags); |
| 610 | |
| 611 | /* Disable DMA channel(s) */ |
| 612 | val = ipu_idmac_read(ipu, IDMAC_CHA_EN(channel->num)); |
| 613 | val &= ~idma_mask(channel->num); |
| 614 | ipu_idmac_write(ipu, val, IDMAC_CHA_EN(channel->num)); |
| 615 | |
Steve Longerbeam | e7268c6 | 2014-06-25 18:05:42 -0700 | [diff] [blame] | 616 | __ipu_idmac_reset_current_buffer(channel); |
| 617 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 618 | /* Set channel buffers NOT to be ready */ |
| 619 | ipu_cm_write(ipu, 0xf0000000, IPU_GPR); /* write one to clear */ |
| 620 | |
| 621 | if (ipu_cm_read(ipu, IPU_CHA_BUF0_RDY(channel->num)) & |
| 622 | idma_mask(channel->num)) { |
| 623 | ipu_cm_write(ipu, idma_mask(channel->num), |
| 624 | IPU_CHA_BUF0_RDY(channel->num)); |
| 625 | } |
| 626 | |
| 627 | if (ipu_cm_read(ipu, IPU_CHA_BUF1_RDY(channel->num)) & |
| 628 | idma_mask(channel->num)) { |
| 629 | ipu_cm_write(ipu, idma_mask(channel->num), |
| 630 | IPU_CHA_BUF1_RDY(channel->num)); |
| 631 | } |
| 632 | |
| 633 | ipu_cm_write(ipu, 0x0, IPU_GPR); /* write one to set */ |
| 634 | |
| 635 | /* Reset the double buffer */ |
| 636 | val = ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(channel->num)); |
| 637 | val &= ~idma_mask(channel->num); |
| 638 | ipu_cm_write(ipu, val, IPU_CHA_DB_MODE_SEL(channel->num)); |
| 639 | |
| 640 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 641 | |
| 642 | return 0; |
| 643 | } |
| 644 | EXPORT_SYMBOL_GPL(ipu_idmac_disable_channel); |
| 645 | |
Steve Longerbeam | 2bcf577 | 2014-06-25 18:05:44 -0700 | [diff] [blame] | 646 | /* |
| 647 | * The imx6 rev. D TRM says that enabling the WM feature will increase |
| 648 | * a channel's priority. Refer to Table 36-8 Calculated priority value. |
| 649 | * The sub-module that is the sink or source for the channel must enable |
| 650 | * watermark signal for this to take effect (SMFC_WM for instance). |
| 651 | */ |
| 652 | void ipu_idmac_enable_watermark(struct ipuv3_channel *channel, bool enable) |
| 653 | { |
| 654 | struct ipu_soc *ipu = channel->ipu; |
| 655 | unsigned long flags; |
| 656 | u32 val; |
| 657 | |
| 658 | spin_lock_irqsave(&ipu->lock, flags); |
| 659 | |
| 660 | val = ipu_idmac_read(ipu, IDMAC_WM_EN(channel->num)); |
| 661 | if (enable) |
| 662 | val |= 1 << (channel->num % 32); |
| 663 | else |
| 664 | val &= ~(1 << (channel->num % 32)); |
| 665 | ipu_idmac_write(ipu, val, IDMAC_WM_EN(channel->num)); |
| 666 | |
| 667 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 668 | } |
| 669 | EXPORT_SYMBOL_GPL(ipu_idmac_enable_watermark); |
| 670 | |
Philipp Zabel | 6c64155 | 2013-03-28 17:35:21 +0100 | [diff] [blame] | 671 | static int ipu_memory_reset(struct ipu_soc *ipu) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 672 | { |
| 673 | unsigned long timeout; |
| 674 | |
| 675 | ipu_cm_write(ipu, 0x807FFFFF, IPU_MEM_RST); |
| 676 | |
| 677 | timeout = jiffies + msecs_to_jiffies(1000); |
| 678 | while (ipu_cm_read(ipu, IPU_MEM_RST) & 0x80000000) { |
| 679 | if (time_after(jiffies, timeout)) |
| 680 | return -ETIME; |
| 681 | cpu_relax(); |
| 682 | } |
| 683 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 684 | return 0; |
| 685 | } |
| 686 | |
Steve Longerbeam | ba07975 | 2014-06-25 18:05:30 -0700 | [diff] [blame] | 687 | /* |
| 688 | * Set the source mux for the given CSI. Selects either parallel or |
| 689 | * MIPI CSI2 sources. |
| 690 | */ |
| 691 | void ipu_set_csi_src_mux(struct ipu_soc *ipu, int csi_id, bool mipi_csi2) |
| 692 | { |
| 693 | unsigned long flags; |
| 694 | u32 val, mask; |
| 695 | |
| 696 | mask = (csi_id == 1) ? IPU_CONF_CSI1_DATA_SOURCE : |
| 697 | IPU_CONF_CSI0_DATA_SOURCE; |
| 698 | |
| 699 | spin_lock_irqsave(&ipu->lock, flags); |
| 700 | |
| 701 | val = ipu_cm_read(ipu, IPU_CONF); |
| 702 | if (mipi_csi2) |
| 703 | val |= mask; |
| 704 | else |
| 705 | val &= ~mask; |
| 706 | ipu_cm_write(ipu, val, IPU_CONF); |
| 707 | |
| 708 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 709 | } |
| 710 | EXPORT_SYMBOL_GPL(ipu_set_csi_src_mux); |
| 711 | |
| 712 | /* |
| 713 | * Set the source mux for the IC. Selects either CSI[01] or the VDI. |
| 714 | */ |
| 715 | void ipu_set_ic_src_mux(struct ipu_soc *ipu, int csi_id, bool vdi) |
| 716 | { |
| 717 | unsigned long flags; |
| 718 | u32 val; |
| 719 | |
| 720 | spin_lock_irqsave(&ipu->lock, flags); |
| 721 | |
| 722 | val = ipu_cm_read(ipu, IPU_CONF); |
Marek Vasut | b7dfee24 | 2017-06-03 11:57:21 -0700 | [diff] [blame] | 723 | if (vdi) |
Steve Longerbeam | ba07975 | 2014-06-25 18:05:30 -0700 | [diff] [blame] | 724 | val |= IPU_CONF_IC_INPUT; |
Marek Vasut | b7dfee24 | 2017-06-03 11:57:21 -0700 | [diff] [blame] | 725 | else |
Steve Longerbeam | ba07975 | 2014-06-25 18:05:30 -0700 | [diff] [blame] | 726 | val &= ~IPU_CONF_IC_INPUT; |
Marek Vasut | b7dfee24 | 2017-06-03 11:57:21 -0700 | [diff] [blame] | 727 | |
| 728 | if (csi_id == 1) |
| 729 | val |= IPU_CONF_CSI_SEL; |
| 730 | else |
| 731 | val &= ~IPU_CONF_CSI_SEL; |
| 732 | |
Steve Longerbeam | ba07975 | 2014-06-25 18:05:30 -0700 | [diff] [blame] | 733 | ipu_cm_write(ipu, val, IPU_CONF); |
| 734 | |
| 735 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 736 | } |
| 737 | EXPORT_SYMBOL_GPL(ipu_set_ic_src_mux); |
| 738 | |
Steve Longerbeam | ac4708f | 2016-08-17 17:50:17 -0700 | [diff] [blame] | 739 | |
| 740 | /* Frame Synchronization Unit Channel Linking */ |
| 741 | |
| 742 | struct fsu_link_reg_info { |
| 743 | int chno; |
| 744 | u32 reg; |
| 745 | u32 mask; |
| 746 | u32 val; |
| 747 | }; |
| 748 | |
| 749 | struct fsu_link_info { |
| 750 | struct fsu_link_reg_info src; |
| 751 | struct fsu_link_reg_info sink; |
| 752 | }; |
| 753 | |
| 754 | static const struct fsu_link_info fsu_link_info[] = { |
| 755 | { |
| 756 | .src = { IPUV3_CHANNEL_IC_PRP_ENC_MEM, IPU_FS_PROC_FLOW2, |
| 757 | FS_PRP_ENC_DEST_SEL_MASK, FS_PRP_ENC_DEST_SEL_IRT_ENC }, |
| 758 | .sink = { IPUV3_CHANNEL_MEM_ROT_ENC, IPU_FS_PROC_FLOW1, |
| 759 | FS_PRPENC_ROT_SRC_SEL_MASK, FS_PRPENC_ROT_SRC_SEL_ENC }, |
| 760 | }, { |
| 761 | .src = { IPUV3_CHANNEL_IC_PRP_VF_MEM, IPU_FS_PROC_FLOW2, |
| 762 | FS_PRPVF_DEST_SEL_MASK, FS_PRPVF_DEST_SEL_IRT_VF }, |
| 763 | .sink = { IPUV3_CHANNEL_MEM_ROT_VF, IPU_FS_PROC_FLOW1, |
| 764 | FS_PRPVF_ROT_SRC_SEL_MASK, FS_PRPVF_ROT_SRC_SEL_VF }, |
| 765 | }, { |
| 766 | .src = { IPUV3_CHANNEL_IC_PP_MEM, IPU_FS_PROC_FLOW2, |
| 767 | FS_PP_DEST_SEL_MASK, FS_PP_DEST_SEL_IRT_PP }, |
| 768 | .sink = { IPUV3_CHANNEL_MEM_ROT_PP, IPU_FS_PROC_FLOW1, |
| 769 | FS_PP_ROT_SRC_SEL_MASK, FS_PP_ROT_SRC_SEL_PP }, |
| 770 | }, { |
| 771 | .src = { IPUV3_CHANNEL_CSI_DIRECT, 0 }, |
| 772 | .sink = { IPUV3_CHANNEL_CSI_VDI_PREV, IPU_FS_PROC_FLOW1, |
| 773 | FS_VDI_SRC_SEL_MASK, FS_VDI_SRC_SEL_CSI_DIRECT }, |
| 774 | }, |
| 775 | }; |
| 776 | |
| 777 | static const struct fsu_link_info *find_fsu_link_info(int src, int sink) |
| 778 | { |
| 779 | int i; |
| 780 | |
| 781 | for (i = 0; i < ARRAY_SIZE(fsu_link_info); i++) { |
| 782 | if (src == fsu_link_info[i].src.chno && |
| 783 | sink == fsu_link_info[i].sink.chno) |
| 784 | return &fsu_link_info[i]; |
| 785 | } |
| 786 | |
| 787 | return NULL; |
| 788 | } |
| 789 | |
| 790 | /* |
| 791 | * Links a source channel to a sink channel in the FSU. |
| 792 | */ |
| 793 | int ipu_fsu_link(struct ipu_soc *ipu, int src_ch, int sink_ch) |
| 794 | { |
| 795 | const struct fsu_link_info *link; |
| 796 | u32 src_reg, sink_reg; |
| 797 | unsigned long flags; |
| 798 | |
| 799 | link = find_fsu_link_info(src_ch, sink_ch); |
| 800 | if (!link) |
| 801 | return -EINVAL; |
| 802 | |
| 803 | spin_lock_irqsave(&ipu->lock, flags); |
| 804 | |
| 805 | if (link->src.mask) { |
| 806 | src_reg = ipu_cm_read(ipu, link->src.reg); |
| 807 | src_reg &= ~link->src.mask; |
| 808 | src_reg |= link->src.val; |
| 809 | ipu_cm_write(ipu, src_reg, link->src.reg); |
| 810 | } |
| 811 | |
| 812 | if (link->sink.mask) { |
| 813 | sink_reg = ipu_cm_read(ipu, link->sink.reg); |
| 814 | sink_reg &= ~link->sink.mask; |
| 815 | sink_reg |= link->sink.val; |
| 816 | ipu_cm_write(ipu, sink_reg, link->sink.reg); |
| 817 | } |
| 818 | |
| 819 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 820 | return 0; |
| 821 | } |
| 822 | EXPORT_SYMBOL_GPL(ipu_fsu_link); |
| 823 | |
| 824 | /* |
| 825 | * Unlinks source and sink channels in the FSU. |
| 826 | */ |
| 827 | int ipu_fsu_unlink(struct ipu_soc *ipu, int src_ch, int sink_ch) |
| 828 | { |
| 829 | const struct fsu_link_info *link; |
| 830 | u32 src_reg, sink_reg; |
| 831 | unsigned long flags; |
| 832 | |
| 833 | link = find_fsu_link_info(src_ch, sink_ch); |
| 834 | if (!link) |
| 835 | return -EINVAL; |
| 836 | |
| 837 | spin_lock_irqsave(&ipu->lock, flags); |
| 838 | |
| 839 | if (link->src.mask) { |
| 840 | src_reg = ipu_cm_read(ipu, link->src.reg); |
| 841 | src_reg &= ~link->src.mask; |
| 842 | ipu_cm_write(ipu, src_reg, link->src.reg); |
| 843 | } |
| 844 | |
| 845 | if (link->sink.mask) { |
| 846 | sink_reg = ipu_cm_read(ipu, link->sink.reg); |
| 847 | sink_reg &= ~link->sink.mask; |
| 848 | ipu_cm_write(ipu, sink_reg, link->sink.reg); |
| 849 | } |
| 850 | |
| 851 | spin_unlock_irqrestore(&ipu->lock, flags); |
| 852 | return 0; |
| 853 | } |
| 854 | EXPORT_SYMBOL_GPL(ipu_fsu_unlink); |
| 855 | |
| 856 | /* Link IDMAC channels in the FSU */ |
| 857 | int ipu_idmac_link(struct ipuv3_channel *src, struct ipuv3_channel *sink) |
| 858 | { |
| 859 | return ipu_fsu_link(src->ipu, src->num, sink->num); |
| 860 | } |
| 861 | EXPORT_SYMBOL_GPL(ipu_idmac_link); |
| 862 | |
| 863 | /* Unlink IDMAC channels in the FSU */ |
| 864 | int ipu_idmac_unlink(struct ipuv3_channel *src, struct ipuv3_channel *sink) |
| 865 | { |
| 866 | return ipu_fsu_unlink(src->ipu, src->num, sink->num); |
| 867 | } |
| 868 | EXPORT_SYMBOL_GPL(ipu_idmac_unlink); |
| 869 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 870 | struct ipu_devtype { |
| 871 | const char *name; |
| 872 | unsigned long cm_ofs; |
| 873 | unsigned long cpmem_ofs; |
| 874 | unsigned long srm_ofs; |
| 875 | unsigned long tpm_ofs; |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 876 | unsigned long csi0_ofs; |
| 877 | unsigned long csi1_ofs; |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 878 | unsigned long ic_ofs; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 879 | unsigned long disp0_ofs; |
| 880 | unsigned long disp1_ofs; |
| 881 | unsigned long dc_tmpl_ofs; |
| 882 | unsigned long vdi_ofs; |
| 883 | enum ipuv3_type type; |
| 884 | }; |
| 885 | |
| 886 | static struct ipu_devtype ipu_type_imx51 = { |
| 887 | .name = "IPUv3EX", |
| 888 | .cm_ofs = 0x1e000000, |
| 889 | .cpmem_ofs = 0x1f000000, |
| 890 | .srm_ofs = 0x1f040000, |
| 891 | .tpm_ofs = 0x1f060000, |
Alexander Shiyan | 2c0408d | 2018-12-20 11:06:38 +0300 | [diff] [blame] | 892 | .csi0_ofs = 0x1e030000, |
| 893 | .csi1_ofs = 0x1e038000, |
Philipp Zabel | a49e7c0 | 2014-09-22 17:15:40 +0200 | [diff] [blame] | 894 | .ic_ofs = 0x1e020000, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 895 | .disp0_ofs = 0x1e040000, |
| 896 | .disp1_ofs = 0x1e048000, |
| 897 | .dc_tmpl_ofs = 0x1f080000, |
| 898 | .vdi_ofs = 0x1e068000, |
| 899 | .type = IPUV3EX, |
| 900 | }; |
| 901 | |
| 902 | static struct ipu_devtype ipu_type_imx53 = { |
| 903 | .name = "IPUv3M", |
| 904 | .cm_ofs = 0x06000000, |
| 905 | .cpmem_ofs = 0x07000000, |
| 906 | .srm_ofs = 0x07040000, |
| 907 | .tpm_ofs = 0x07060000, |
Steve Longerbeam | bb867d21 | 2018-10-16 17:31:40 -0700 | [diff] [blame] | 908 | .csi0_ofs = 0x06030000, |
| 909 | .csi1_ofs = 0x06038000, |
Philipp Zabel | a49e7c0 | 2014-09-22 17:15:40 +0200 | [diff] [blame] | 910 | .ic_ofs = 0x06020000, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 911 | .disp0_ofs = 0x06040000, |
| 912 | .disp1_ofs = 0x06048000, |
| 913 | .dc_tmpl_ofs = 0x07080000, |
| 914 | .vdi_ofs = 0x06068000, |
| 915 | .type = IPUV3M, |
| 916 | }; |
| 917 | |
| 918 | static struct ipu_devtype ipu_type_imx6q = { |
| 919 | .name = "IPUv3H", |
| 920 | .cm_ofs = 0x00200000, |
| 921 | .cpmem_ofs = 0x00300000, |
| 922 | .srm_ofs = 0x00340000, |
| 923 | .tpm_ofs = 0x00360000, |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 924 | .csi0_ofs = 0x00230000, |
| 925 | .csi1_ofs = 0x00238000, |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 926 | .ic_ofs = 0x00220000, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 927 | .disp0_ofs = 0x00240000, |
| 928 | .disp1_ofs = 0x00248000, |
| 929 | .dc_tmpl_ofs = 0x00380000, |
| 930 | .vdi_ofs = 0x00268000, |
| 931 | .type = IPUV3H, |
| 932 | }; |
| 933 | |
| 934 | static const struct of_device_id imx_ipu_dt_ids[] = { |
| 935 | { .compatible = "fsl,imx51-ipu", .data = &ipu_type_imx51, }, |
| 936 | { .compatible = "fsl,imx53-ipu", .data = &ipu_type_imx53, }, |
| 937 | { .compatible = "fsl,imx6q-ipu", .data = &ipu_type_imx6q, }, |
Lucas Stach | 92681fe | 2017-03-08 12:13:18 +0100 | [diff] [blame] | 938 | { .compatible = "fsl,imx6qp-ipu", .data = &ipu_type_imx6q, }, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 939 | { /* sentinel */ } |
| 940 | }; |
| 941 | MODULE_DEVICE_TABLE(of, imx_ipu_dt_ids); |
| 942 | |
| 943 | static int ipu_submodules_init(struct ipu_soc *ipu, |
| 944 | struct platform_device *pdev, unsigned long ipu_base, |
| 945 | struct clk *ipu_clk) |
| 946 | { |
| 947 | char *unit; |
| 948 | int ret; |
| 949 | struct device *dev = &pdev->dev; |
| 950 | const struct ipu_devtype *devtype = ipu->devtype; |
| 951 | |
Steve Longerbeam | 7d2691d | 2014-06-25 18:05:47 -0700 | [diff] [blame] | 952 | ret = ipu_cpmem_init(ipu, dev, ipu_base + devtype->cpmem_ofs); |
| 953 | if (ret) { |
| 954 | unit = "cpmem"; |
| 955 | goto err_cpmem; |
| 956 | } |
| 957 | |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 958 | ret = ipu_csi_init(ipu, dev, 0, ipu_base + devtype->csi0_ofs, |
| 959 | IPU_CONF_CSI0_EN, ipu_clk); |
| 960 | if (ret) { |
| 961 | unit = "csi0"; |
| 962 | goto err_csi_0; |
| 963 | } |
| 964 | |
| 965 | ret = ipu_csi_init(ipu, dev, 1, ipu_base + devtype->csi1_ofs, |
| 966 | IPU_CONF_CSI1_EN, ipu_clk); |
| 967 | if (ret) { |
| 968 | unit = "csi1"; |
| 969 | goto err_csi_1; |
| 970 | } |
| 971 | |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 972 | ret = ipu_ic_init(ipu, dev, |
| 973 | ipu_base + devtype->ic_ofs, |
| 974 | ipu_base + devtype->tpm_ofs); |
| 975 | if (ret) { |
| 976 | unit = "ic"; |
| 977 | goto err_ic; |
| 978 | } |
| 979 | |
Steve Longerbeam | 2d2ead4 | 2016-08-17 17:50:16 -0700 | [diff] [blame] | 980 | ret = ipu_vdi_init(ipu, dev, ipu_base + devtype->vdi_ofs, |
| 981 | IPU_CONF_VDI_EN | IPU_CONF_ISP_EN | |
| 982 | IPU_CONF_IC_INPUT); |
| 983 | if (ret) { |
| 984 | unit = "vdi"; |
| 985 | goto err_vdi; |
| 986 | } |
| 987 | |
Steve Longerbeam | cd98e85 | 2016-09-17 12:33:58 -0700 | [diff] [blame] | 988 | ret = ipu_image_convert_init(ipu, dev); |
| 989 | if (ret) { |
| 990 | unit = "image_convert"; |
| 991 | goto err_image_convert; |
| 992 | } |
| 993 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 994 | ret = ipu_di_init(ipu, dev, 0, ipu_base + devtype->disp0_ofs, |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 995 | IPU_CONF_DI0_EN, ipu_clk); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 996 | if (ret) { |
| 997 | unit = "di0"; |
| 998 | goto err_di_0; |
| 999 | } |
| 1000 | |
| 1001 | ret = ipu_di_init(ipu, dev, 1, ipu_base + devtype->disp1_ofs, |
| 1002 | IPU_CONF_DI1_EN, ipu_clk); |
| 1003 | if (ret) { |
| 1004 | unit = "di1"; |
| 1005 | goto err_di_1; |
| 1006 | } |
| 1007 | |
| 1008 | ret = ipu_dc_init(ipu, dev, ipu_base + devtype->cm_ofs + |
| 1009 | IPU_CM_DC_REG_OFS, ipu_base + devtype->dc_tmpl_ofs); |
| 1010 | if (ret) { |
| 1011 | unit = "dc_template"; |
| 1012 | goto err_dc; |
| 1013 | } |
| 1014 | |
| 1015 | ret = ipu_dmfc_init(ipu, dev, ipu_base + |
| 1016 | devtype->cm_ofs + IPU_CM_DMFC_REG_OFS, ipu_clk); |
| 1017 | if (ret) { |
| 1018 | unit = "dmfc"; |
| 1019 | goto err_dmfc; |
| 1020 | } |
| 1021 | |
| 1022 | ret = ipu_dp_init(ipu, dev, ipu_base + devtype->srm_ofs); |
| 1023 | if (ret) { |
| 1024 | unit = "dp"; |
| 1025 | goto err_dp; |
| 1026 | } |
| 1027 | |
Philipp Zabel | 35de925 | 2012-05-09 16:59:01 +0200 | [diff] [blame] | 1028 | ret = ipu_smfc_init(ipu, dev, ipu_base + |
| 1029 | devtype->cm_ofs + IPU_CM_SMFC_REG_OFS); |
| 1030 | if (ret) { |
| 1031 | unit = "smfc"; |
| 1032 | goto err_smfc; |
| 1033 | } |
| 1034 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1035 | return 0; |
| 1036 | |
Philipp Zabel | 35de925 | 2012-05-09 16:59:01 +0200 | [diff] [blame] | 1037 | err_smfc: |
| 1038 | ipu_dp_exit(ipu); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1039 | err_dp: |
| 1040 | ipu_dmfc_exit(ipu); |
| 1041 | err_dmfc: |
| 1042 | ipu_dc_exit(ipu); |
| 1043 | err_dc: |
| 1044 | ipu_di_exit(ipu, 1); |
| 1045 | err_di_1: |
| 1046 | ipu_di_exit(ipu, 0); |
| 1047 | err_di_0: |
Steve Longerbeam | cd98e85 | 2016-09-17 12:33:58 -0700 | [diff] [blame] | 1048 | ipu_image_convert_exit(ipu); |
| 1049 | err_image_convert: |
Steve Longerbeam | 2d2ead4 | 2016-08-17 17:50:16 -0700 | [diff] [blame] | 1050 | ipu_vdi_exit(ipu); |
| 1051 | err_vdi: |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 1052 | ipu_ic_exit(ipu); |
| 1053 | err_ic: |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 1054 | ipu_csi_exit(ipu, 1); |
| 1055 | err_csi_1: |
| 1056 | ipu_csi_exit(ipu, 0); |
| 1057 | err_csi_0: |
Steve Longerbeam | 7d2691d | 2014-06-25 18:05:47 -0700 | [diff] [blame] | 1058 | ipu_cpmem_exit(ipu); |
| 1059 | err_cpmem: |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1060 | dev_err(&pdev->dev, "init %s failed with %d\n", unit, ret); |
| 1061 | return ret; |
| 1062 | } |
| 1063 | |
| 1064 | static void ipu_irq_handle(struct ipu_soc *ipu, const int *regs, int num_regs) |
| 1065 | { |
| 1066 | unsigned long status; |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1067 | int i, bit, irq; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1068 | |
| 1069 | for (i = 0; i < num_regs; i++) { |
| 1070 | |
| 1071 | status = ipu_cm_read(ipu, IPU_INT_STAT(regs[i])); |
| 1072 | status &= ipu_cm_read(ipu, IPU_INT_CTRL(regs[i])); |
| 1073 | |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1074 | for_each_set_bit(bit, &status, 32) { |
Antoine Schweitzer-Chaput | 838201a | 2014-04-18 23:20:06 +0200 | [diff] [blame] | 1075 | irq = irq_linear_revmap(ipu->domain, |
| 1076 | regs[i] * 32 + bit); |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1077 | if (irq) |
| 1078 | generic_handle_irq(irq); |
| 1079 | } |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1080 | } |
| 1081 | } |
| 1082 | |
Thomas Gleixner | bd0b9ac | 2015-09-14 10:42:37 +0200 | [diff] [blame] | 1083 | static void ipu_irq_handler(struct irq_desc *desc) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1084 | { |
| 1085 | struct ipu_soc *ipu = irq_desc_get_handler_data(desc); |
Jiang Liu | 4d9efdfc | 2015-07-13 20:39:54 +0000 | [diff] [blame] | 1086 | struct irq_chip *chip = irq_desc_get_chip(desc); |
Colin Ian King | ac66b83 | 2018-02-14 18:45:59 +0000 | [diff] [blame] | 1087 | static const int int_reg[] = { 0, 1, 2, 3, 10, 11, 12, 13, 14}; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1088 | |
| 1089 | chained_irq_enter(chip, desc); |
| 1090 | |
| 1091 | ipu_irq_handle(ipu, int_reg, ARRAY_SIZE(int_reg)); |
| 1092 | |
| 1093 | chained_irq_exit(chip, desc); |
| 1094 | } |
| 1095 | |
Thomas Gleixner | bd0b9ac | 2015-09-14 10:42:37 +0200 | [diff] [blame] | 1096 | static void ipu_err_irq_handler(struct irq_desc *desc) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1097 | { |
| 1098 | struct ipu_soc *ipu = irq_desc_get_handler_data(desc); |
Jiang Liu | 4d9efdfc | 2015-07-13 20:39:54 +0000 | [diff] [blame] | 1099 | struct irq_chip *chip = irq_desc_get_chip(desc); |
Colin Ian King | ac66b83 | 2018-02-14 18:45:59 +0000 | [diff] [blame] | 1100 | static const int int_reg[] = { 4, 5, 8, 9}; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1101 | |
| 1102 | chained_irq_enter(chip, desc); |
| 1103 | |
| 1104 | ipu_irq_handle(ipu, int_reg, ARRAY_SIZE(int_reg)); |
| 1105 | |
| 1106 | chained_irq_exit(chip, desc); |
| 1107 | } |
| 1108 | |
Philipp Zabel | 861a50c | 2014-04-14 23:53:16 +0200 | [diff] [blame] | 1109 | int ipu_map_irq(struct ipu_soc *ipu, int irq) |
| 1110 | { |
| 1111 | int virq; |
| 1112 | |
| 1113 | virq = irq_linear_revmap(ipu->domain, irq); |
| 1114 | if (!virq) |
| 1115 | virq = irq_create_mapping(ipu->domain, irq); |
| 1116 | |
| 1117 | return virq; |
| 1118 | } |
| 1119 | EXPORT_SYMBOL_GPL(ipu_map_irq); |
| 1120 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1121 | int ipu_idmac_channel_irq(struct ipu_soc *ipu, struct ipuv3_channel *channel, |
| 1122 | enum ipu_channel_irq irq_type) |
| 1123 | { |
Philipp Zabel | 861a50c | 2014-04-14 23:53:16 +0200 | [diff] [blame] | 1124 | return ipu_map_irq(ipu, irq_type + channel->num); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1125 | } |
| 1126 | EXPORT_SYMBOL_GPL(ipu_idmac_channel_irq); |
| 1127 | |
| 1128 | static void ipu_submodules_exit(struct ipu_soc *ipu) |
| 1129 | { |
Philipp Zabel | 35de925 | 2012-05-09 16:59:01 +0200 | [diff] [blame] | 1130 | ipu_smfc_exit(ipu); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1131 | ipu_dp_exit(ipu); |
| 1132 | ipu_dmfc_exit(ipu); |
| 1133 | ipu_dc_exit(ipu); |
| 1134 | ipu_di_exit(ipu, 1); |
| 1135 | ipu_di_exit(ipu, 0); |
Steve Longerbeam | cd98e85 | 2016-09-17 12:33:58 -0700 | [diff] [blame] | 1136 | ipu_image_convert_exit(ipu); |
Steve Longerbeam | 2d2ead4 | 2016-08-17 17:50:16 -0700 | [diff] [blame] | 1137 | ipu_vdi_exit(ipu); |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 1138 | ipu_ic_exit(ipu); |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 1139 | ipu_csi_exit(ipu, 1); |
| 1140 | ipu_csi_exit(ipu, 0); |
Steve Longerbeam | 7d2691d | 2014-06-25 18:05:47 -0700 | [diff] [blame] | 1141 | ipu_cpmem_exit(ipu); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1142 | } |
| 1143 | |
| 1144 | static int platform_remove_devices_fn(struct device *dev, void *unused) |
| 1145 | { |
| 1146 | struct platform_device *pdev = to_platform_device(dev); |
| 1147 | |
| 1148 | platform_device_unregister(pdev); |
| 1149 | |
| 1150 | return 0; |
| 1151 | } |
| 1152 | |
| 1153 | static void platform_device_unregister_children(struct platform_device *pdev) |
| 1154 | { |
| 1155 | device_for_each_child(&pdev->dev, NULL, platform_remove_devices_fn); |
| 1156 | } |
| 1157 | |
| 1158 | struct ipu_platform_reg { |
| 1159 | struct ipu_client_platformdata pdata; |
| 1160 | const char *name; |
| 1161 | }; |
| 1162 | |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1163 | /* These must be in the order of the corresponding device tree port nodes */ |
Philipp Zabel | 310944d | 2016-05-12 15:00:44 +0200 | [diff] [blame] | 1164 | static struct ipu_platform_reg client_reg[] = { |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1165 | { |
| 1166 | .pdata = { |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1167 | .csi = 0, |
| 1168 | .dma[0] = IPUV3_CHANNEL_CSI0, |
| 1169 | .dma[1] = -EINVAL, |
| 1170 | }, |
Steve Longerbeam | 88287ec | 2016-07-19 18:11:11 -0700 | [diff] [blame] | 1171 | .name = "imx-ipuv3-csi", |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1172 | }, { |
| 1173 | .pdata = { |
| 1174 | .csi = 1, |
| 1175 | .dma[0] = IPUV3_CHANNEL_CSI1, |
| 1176 | .dma[1] = -EINVAL, |
| 1177 | }, |
Steve Longerbeam | 88287ec | 2016-07-19 18:11:11 -0700 | [diff] [blame] | 1178 | .name = "imx-ipuv3-csi", |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1179 | }, { |
| 1180 | .pdata = { |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1181 | .di = 0, |
| 1182 | .dc = 5, |
| 1183 | .dp = IPU_DP_FLOW_SYNC_BG, |
| 1184 | .dma[0] = IPUV3_CHANNEL_MEM_BG_SYNC, |
Philipp Zabel | b8d181e | 2013-10-10 16:18:45 +0200 | [diff] [blame] | 1185 | .dma[1] = IPUV3_CHANNEL_MEM_FG_SYNC, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1186 | }, |
| 1187 | .name = "imx-ipuv3-crtc", |
| 1188 | }, { |
| 1189 | .pdata = { |
| 1190 | .di = 1, |
| 1191 | .dc = 1, |
| 1192 | .dp = -EINVAL, |
| 1193 | .dma[0] = IPUV3_CHANNEL_MEM_DC_SYNC, |
| 1194 | .dma[1] = -EINVAL, |
| 1195 | }, |
| 1196 | .name = "imx-ipuv3-crtc", |
| 1197 | }, |
| 1198 | }; |
| 1199 | |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1200 | static DEFINE_MUTEX(ipu_client_id_mutex); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1201 | static int ipu_client_id; |
| 1202 | |
Philipp Zabel | d6ca8ca | 2012-05-23 17:08:19 +0200 | [diff] [blame] | 1203 | static int ipu_add_client_devices(struct ipu_soc *ipu, unsigned long ipu_base) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1204 | { |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1205 | struct device *dev = ipu->dev; |
| 1206 | unsigned i; |
| 1207 | int id, ret; |
| 1208 | |
| 1209 | mutex_lock(&ipu_client_id_mutex); |
| 1210 | id = ipu_client_id; |
| 1211 | ipu_client_id += ARRAY_SIZE(client_reg); |
| 1212 | mutex_unlock(&ipu_client_id_mutex); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1213 | |
| 1214 | for (i = 0; i < ARRAY_SIZE(client_reg); i++) { |
Philipp Zabel | 310944d | 2016-05-12 15:00:44 +0200 | [diff] [blame] | 1215 | struct ipu_platform_reg *reg = &client_reg[i]; |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1216 | struct platform_device *pdev; |
Philipp Zabel | 17e0521 | 2016-01-04 17:32:26 +0100 | [diff] [blame] | 1217 | struct device_node *of_node; |
| 1218 | |
| 1219 | /* Associate subdevice with the corresponding port node */ |
| 1220 | of_node = of_graph_get_port_by_id(dev->of_node, i); |
| 1221 | if (!of_node) { |
| 1222 | dev_info(dev, |
Rob Herring | 4bf9914 | 2017-07-18 16:43:04 -0500 | [diff] [blame] | 1223 | "no port@%d node in %pOF, not using %s%d\n", |
| 1224 | i, dev->of_node, |
Philipp Zabel | 17e0521 | 2016-01-04 17:32:26 +0100 | [diff] [blame] | 1225 | (i / 2) ? "DI" : "CSI", i % 2); |
| 1226 | continue; |
| 1227 | } |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1228 | |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1229 | pdev = platform_device_alloc(reg->name, id++); |
| 1230 | if (!pdev) { |
| 1231 | ret = -ENOMEM; |
| 1232 | goto err_register; |
| 1233 | } |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1234 | |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1235 | pdev->dev.parent = dev; |
| 1236 | |
Philipp Zabel | 310944d | 2016-05-12 15:00:44 +0200 | [diff] [blame] | 1237 | reg->pdata.of_node = of_node; |
Philipp Zabel | 304e6be | 2015-11-09 16:35:12 +0100 | [diff] [blame] | 1238 | ret = platform_device_add_data(pdev, ®->pdata, |
| 1239 | sizeof(reg->pdata)); |
| 1240 | if (!ret) |
| 1241 | ret = platform_device_add(pdev); |
| 1242 | if (ret) { |
| 1243 | platform_device_put(pdev); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1244 | goto err_register; |
Axel Lin | e4946cd | 2014-08-03 10:38:18 +0800 | [diff] [blame] | 1245 | } |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1246 | } |
| 1247 | |
| 1248 | return 0; |
| 1249 | |
| 1250 | err_register: |
Russell King | 4ae078d | 2013-12-16 11:34:25 +0000 | [diff] [blame] | 1251 | platform_device_unregister_children(to_platform_device(dev)); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1252 | |
| 1253 | return ret; |
| 1254 | } |
| 1255 | |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1256 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1257 | static int ipu_irq_init(struct ipu_soc *ipu) |
| 1258 | { |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1259 | struct irq_chip_generic *gc; |
| 1260 | struct irq_chip_type *ct; |
Philipp Zabel | 37f85b26 | 2013-06-21 14:52:18 +0200 | [diff] [blame] | 1261 | unsigned long unused[IPU_NUM_IRQS / 32] = { |
| 1262 | 0x400100d0, 0xffe000fd, |
| 1263 | 0x400100d0, 0xffe000fd, |
| 1264 | 0x400100d0, 0xffe000fd, |
| 1265 | 0x4077ffff, 0xffe7e1fd, |
| 1266 | 0x23fffffe, 0x8880fff0, |
| 1267 | 0xf98fe7d0, 0xfff81fff, |
| 1268 | 0x400100d0, 0xffe000fd, |
| 1269 | 0x00000000, |
| 1270 | }; |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1271 | int ret, i; |
| 1272 | |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1273 | ipu->domain = irq_domain_add_linear(ipu->dev->of_node, IPU_NUM_IRQS, |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1274 | &irq_generic_chip_ops, ipu); |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1275 | if (!ipu->domain) { |
| 1276 | dev_err(ipu->dev, "failed to add irq domain\n"); |
| 1277 | return -ENODEV; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1278 | } |
| 1279 | |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1280 | ret = irq_alloc_domain_generic_chips(ipu->domain, 32, 1, "IPU", |
Rob Herring | ca0141d | 2015-08-29 18:01:21 -0500 | [diff] [blame] | 1281 | handle_level_irq, 0, 0, 0); |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1282 | if (ret < 0) { |
| 1283 | dev_err(ipu->dev, "failed to alloc generic irq chips\n"); |
| 1284 | irq_domain_remove(ipu->domain); |
| 1285 | return ret; |
| 1286 | } |
| 1287 | |
Philipp Zabel | a92d814 | 2016-08-29 08:32:03 +0200 | [diff] [blame] | 1288 | /* Mask and clear all interrupts */ |
| 1289 | for (i = 0; i < IPU_NUM_IRQS; i += 32) { |
Russell King | 510e642 | 2015-06-16 23:29:41 +0100 | [diff] [blame] | 1290 | ipu_cm_write(ipu, 0, IPU_INT_CTRL(i / 32)); |
Philipp Zabel | a92d814 | 2016-08-29 08:32:03 +0200 | [diff] [blame] | 1291 | ipu_cm_write(ipu, ~unused[i / 32], IPU_INT_STAT(i / 32)); |
| 1292 | } |
Russell King | 510e642 | 2015-06-16 23:29:41 +0100 | [diff] [blame] | 1293 | |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1294 | for (i = 0; i < IPU_NUM_IRQS; i += 32) { |
| 1295 | gc = irq_get_domain_generic_chip(ipu->domain, i); |
| 1296 | gc->reg_base = ipu->cm_reg; |
Philipp Zabel | 37f85b26 | 2013-06-21 14:52:18 +0200 | [diff] [blame] | 1297 | gc->unused = unused[i / 32]; |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1298 | ct = gc->chip_types; |
| 1299 | ct->chip.irq_ack = irq_gc_ack_set_bit; |
| 1300 | ct->chip.irq_mask = irq_gc_mask_clr_bit; |
| 1301 | ct->chip.irq_unmask = irq_gc_mask_set_bit; |
| 1302 | ct->regs.ack = IPU_INT_STAT(i / 32); |
| 1303 | ct->regs.mask = IPU_INT_CTRL(i / 32); |
| 1304 | } |
| 1305 | |
Russell King | 86f5e73 | 2015-06-16 23:06:30 +0100 | [diff] [blame] | 1306 | irq_set_chained_handler_and_data(ipu->irq_sync, ipu_irq_handler, ipu); |
| 1307 | irq_set_chained_handler_and_data(ipu->irq_err, ipu_err_irq_handler, |
| 1308 | ipu); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1309 | |
| 1310 | return 0; |
| 1311 | } |
| 1312 | |
| 1313 | static void ipu_irq_exit(struct ipu_soc *ipu) |
| 1314 | { |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1315 | int i, irq; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1316 | |
Russell King | 86f5e73 | 2015-06-16 23:06:30 +0100 | [diff] [blame] | 1317 | irq_set_chained_handler_and_data(ipu->irq_err, NULL, NULL); |
| 1318 | irq_set_chained_handler_and_data(ipu->irq_sync, NULL, NULL); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1319 | |
Philipp Zabel | 379cdec | 2013-06-21 14:52:17 +0200 | [diff] [blame] | 1320 | /* TODO: remove irq_domain_generic_chips */ |
| 1321 | |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1322 | for (i = 0; i < IPU_NUM_IRQS; i++) { |
| 1323 | irq = irq_linear_revmap(ipu->domain, i); |
| 1324 | if (irq) |
| 1325 | irq_dispose_mapping(irq); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1326 | } |
| 1327 | |
Philipp Zabel | b728766 | 2013-06-21 10:27:39 +0200 | [diff] [blame] | 1328 | irq_domain_remove(ipu->domain); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1329 | } |
| 1330 | |
Steve Longerbeam | 3feb049 | 2014-06-25 18:05:55 -0700 | [diff] [blame] | 1331 | void ipu_dump(struct ipu_soc *ipu) |
| 1332 | { |
| 1333 | int i; |
| 1334 | |
| 1335 | dev_dbg(ipu->dev, "IPU_CONF = \t0x%08X\n", |
| 1336 | ipu_cm_read(ipu, IPU_CONF)); |
| 1337 | dev_dbg(ipu->dev, "IDMAC_CONF = \t0x%08X\n", |
| 1338 | ipu_idmac_read(ipu, IDMAC_CONF)); |
| 1339 | dev_dbg(ipu->dev, "IDMAC_CHA_EN1 = \t0x%08X\n", |
| 1340 | ipu_idmac_read(ipu, IDMAC_CHA_EN(0))); |
| 1341 | dev_dbg(ipu->dev, "IDMAC_CHA_EN2 = \t0x%08X\n", |
| 1342 | ipu_idmac_read(ipu, IDMAC_CHA_EN(32))); |
| 1343 | dev_dbg(ipu->dev, "IDMAC_CHA_PRI1 = \t0x%08X\n", |
| 1344 | ipu_idmac_read(ipu, IDMAC_CHA_PRI(0))); |
| 1345 | dev_dbg(ipu->dev, "IDMAC_CHA_PRI2 = \t0x%08X\n", |
| 1346 | ipu_idmac_read(ipu, IDMAC_CHA_PRI(32))); |
| 1347 | dev_dbg(ipu->dev, "IDMAC_BAND_EN1 = \t0x%08X\n", |
| 1348 | ipu_idmac_read(ipu, IDMAC_BAND_EN(0))); |
| 1349 | dev_dbg(ipu->dev, "IDMAC_BAND_EN2 = \t0x%08X\n", |
| 1350 | ipu_idmac_read(ipu, IDMAC_BAND_EN(32))); |
| 1351 | dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL0 = \t0x%08X\n", |
| 1352 | ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(0))); |
| 1353 | dev_dbg(ipu->dev, "IPU_CHA_DB_MODE_SEL1 = \t0x%08X\n", |
| 1354 | ipu_cm_read(ipu, IPU_CHA_DB_MODE_SEL(32))); |
| 1355 | dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW1 = \t0x%08X\n", |
| 1356 | ipu_cm_read(ipu, IPU_FS_PROC_FLOW1)); |
| 1357 | dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW2 = \t0x%08X\n", |
| 1358 | ipu_cm_read(ipu, IPU_FS_PROC_FLOW2)); |
| 1359 | dev_dbg(ipu->dev, "IPU_FS_PROC_FLOW3 = \t0x%08X\n", |
| 1360 | ipu_cm_read(ipu, IPU_FS_PROC_FLOW3)); |
| 1361 | dev_dbg(ipu->dev, "IPU_FS_DISP_FLOW1 = \t0x%08X\n", |
| 1362 | ipu_cm_read(ipu, IPU_FS_DISP_FLOW1)); |
| 1363 | for (i = 0; i < 15; i++) |
| 1364 | dev_dbg(ipu->dev, "IPU_INT_CTRL(%d) = \t%08X\n", i, |
| 1365 | ipu_cm_read(ipu, IPU_INT_CTRL(i))); |
| 1366 | } |
| 1367 | EXPORT_SYMBOL_GPL(ipu_dump); |
| 1368 | |
Bill Pemberton | c4aabf8 | 2012-11-19 13:22:11 -0500 | [diff] [blame] | 1369 | static int ipu_probe(struct platform_device *pdev) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1370 | { |
Steve Longerbeam | 572a761 | 2016-07-19 18:11:02 -0700 | [diff] [blame] | 1371 | struct device_node *np = pdev->dev.of_node; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1372 | struct ipu_soc *ipu; |
| 1373 | struct resource *res; |
| 1374 | unsigned long ipu_base; |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 1375 | int ret, irq_sync, irq_err; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1376 | const struct ipu_devtype *devtype; |
| 1377 | |
LABBE Corentin | e92e447 | 2016-08-24 10:17:17 +0200 | [diff] [blame] | 1378 | devtype = of_device_get_match_data(&pdev->dev); |
| 1379 | if (!devtype) |
| 1380 | return -EINVAL; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1381 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1382 | irq_sync = platform_get_irq(pdev, 0); |
| 1383 | irq_err = platform_get_irq(pdev, 1); |
| 1384 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
| 1385 | |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1386 | dev_dbg(&pdev->dev, "irq_sync: %d irq_err: %d\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1387 | irq_sync, irq_err); |
| 1388 | |
| 1389 | if (!res || irq_sync < 0 || irq_err < 0) |
| 1390 | return -ENODEV; |
| 1391 | |
| 1392 | ipu_base = res->start; |
| 1393 | |
| 1394 | ipu = devm_kzalloc(&pdev->dev, sizeof(*ipu), GFP_KERNEL); |
| 1395 | if (!ipu) |
| 1396 | return -ENODEV; |
| 1397 | |
Lucas Stach | 92681fe | 2017-03-08 12:13:18 +0100 | [diff] [blame] | 1398 | ipu->id = of_alias_get_id(np, "ipu"); |
Philipp Zabel | 2d87e6c | 2018-06-21 21:13:38 +0200 | [diff] [blame] | 1399 | if (ipu->id < 0) |
| 1400 | ipu->id = 0; |
Lucas Stach | 92681fe | 2017-03-08 12:13:18 +0100 | [diff] [blame] | 1401 | |
Lucas Stach | 30310c8 | 2017-03-23 16:52:02 +0100 | [diff] [blame] | 1402 | if (of_device_is_compatible(np, "fsl,imx6qp-ipu") && |
| 1403 | IS_ENABLED(CONFIG_DRM)) { |
Lucas Stach | 92681fe | 2017-03-08 12:13:18 +0100 | [diff] [blame] | 1404 | ipu->prg_priv = ipu_prg_lookup_by_phandle(&pdev->dev, |
| 1405 | "fsl,prg", ipu->id); |
| 1406 | if (!ipu->prg_priv) |
| 1407 | return -EPROBE_DEFER; |
| 1408 | } |
| 1409 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1410 | ipu->devtype = devtype; |
| 1411 | ipu->ipu_type = devtype->type; |
| 1412 | |
| 1413 | spin_lock_init(&ipu->lock); |
| 1414 | mutex_init(&ipu->channel_lock); |
Philipp Zabel | 93adc8b | 2017-05-08 12:45:52 +0200 | [diff] [blame] | 1415 | INIT_LIST_HEAD(&ipu->channels); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1416 | |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1417 | dev_dbg(&pdev->dev, "cm_reg: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1418 | ipu_base + devtype->cm_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1419 | dev_dbg(&pdev->dev, "idmac: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1420 | ipu_base + devtype->cm_ofs + IPU_CM_IDMAC_REG_OFS); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1421 | dev_dbg(&pdev->dev, "cpmem: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1422 | ipu_base + devtype->cpmem_ofs); |
Steve Longerbeam | 2ffd48f | 2014-08-19 10:52:40 -0700 | [diff] [blame] | 1423 | dev_dbg(&pdev->dev, "csi0: 0x%08lx\n", |
| 1424 | ipu_base + devtype->csi0_ofs); |
| 1425 | dev_dbg(&pdev->dev, "csi1: 0x%08lx\n", |
| 1426 | ipu_base + devtype->csi1_ofs); |
Steve Longerbeam | 1aa8ea0 | 2014-08-11 13:04:50 +0200 | [diff] [blame] | 1427 | dev_dbg(&pdev->dev, "ic: 0x%08lx\n", |
| 1428 | ipu_base + devtype->ic_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1429 | dev_dbg(&pdev->dev, "disp0: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1430 | ipu_base + devtype->disp0_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1431 | dev_dbg(&pdev->dev, "disp1: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1432 | ipu_base + devtype->disp1_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1433 | dev_dbg(&pdev->dev, "srm: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1434 | ipu_base + devtype->srm_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1435 | dev_dbg(&pdev->dev, "tpm: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1436 | ipu_base + devtype->tpm_ofs); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1437 | dev_dbg(&pdev->dev, "dc: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1438 | ipu_base + devtype->cm_ofs + IPU_CM_DC_REG_OFS); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1439 | dev_dbg(&pdev->dev, "ic: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1440 | ipu_base + devtype->cm_ofs + IPU_CM_IC_REG_OFS); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1441 | dev_dbg(&pdev->dev, "dmfc: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1442 | ipu_base + devtype->cm_ofs + IPU_CM_DMFC_REG_OFS); |
Fabio Estevam | fd563db | 2012-10-24 21:36:46 -0200 | [diff] [blame] | 1443 | dev_dbg(&pdev->dev, "vdi: 0x%08lx\n", |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1444 | ipu_base + devtype->vdi_ofs); |
| 1445 | |
| 1446 | ipu->cm_reg = devm_ioremap(&pdev->dev, |
| 1447 | ipu_base + devtype->cm_ofs, PAGE_SIZE); |
| 1448 | ipu->idmac_reg = devm_ioremap(&pdev->dev, |
| 1449 | ipu_base + devtype->cm_ofs + IPU_CM_IDMAC_REG_OFS, |
| 1450 | PAGE_SIZE); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1451 | |
Steve Longerbeam | 7d2691d | 2014-06-25 18:05:47 -0700 | [diff] [blame] | 1452 | if (!ipu->cm_reg || !ipu->idmac_reg) |
Fabio Estevam | be798b2 | 2013-07-20 18:22:09 -0300 | [diff] [blame] | 1453 | return -ENOMEM; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1454 | |
| 1455 | ipu->clk = devm_clk_get(&pdev->dev, "bus"); |
| 1456 | if (IS_ERR(ipu->clk)) { |
| 1457 | ret = PTR_ERR(ipu->clk); |
| 1458 | dev_err(&pdev->dev, "clk_get failed with %d", ret); |
Fabio Estevam | be798b2 | 2013-07-20 18:22:09 -0300 | [diff] [blame] | 1459 | return ret; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1460 | } |
| 1461 | |
| 1462 | platform_set_drvdata(pdev, ipu); |
| 1463 | |
Fabio Estevam | 62645a2 | 2013-07-20 18:22:10 -0300 | [diff] [blame] | 1464 | ret = clk_prepare_enable(ipu->clk); |
| 1465 | if (ret) { |
| 1466 | dev_err(&pdev->dev, "clk_prepare_enable failed: %d\n", ret); |
| 1467 | return ret; |
| 1468 | } |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1469 | |
| 1470 | ipu->dev = &pdev->dev; |
| 1471 | ipu->irq_sync = irq_sync; |
| 1472 | ipu->irq_err = irq_err; |
| 1473 | |
Philipp Zabel | 6c64155 | 2013-03-28 17:35:21 +0100 | [diff] [blame] | 1474 | ret = device_reset(&pdev->dev); |
| 1475 | if (ret) { |
| 1476 | dev_err(&pdev->dev, "failed to reset: %d\n", ret); |
| 1477 | goto out_failed_reset; |
| 1478 | } |
| 1479 | ret = ipu_memory_reset(ipu); |
Lothar Waßmann | 4d27b2c | 2012-12-25 15:58:37 +0100 | [diff] [blame] | 1480 | if (ret) |
| 1481 | goto out_failed_reset; |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1482 | |
David Jander | 596a65d | 2015-07-02 16:21:57 +0200 | [diff] [blame] | 1483 | ret = ipu_irq_init(ipu); |
| 1484 | if (ret) |
| 1485 | goto out_failed_irq; |
| 1486 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1487 | /* Set MCU_T to divide MCU access window into 2 */ |
| 1488 | ipu_cm_write(ipu, 0x00400000L | (IPU_MCU_T_DEFAULT << 18), |
| 1489 | IPU_DISP_GEN); |
| 1490 | |
| 1491 | ret = ipu_submodules_init(ipu, pdev, ipu_base, ipu->clk); |
| 1492 | if (ret) |
| 1493 | goto failed_submodules_init; |
| 1494 | |
Philipp Zabel | d6ca8ca | 2012-05-23 17:08:19 +0200 | [diff] [blame] | 1495 | ret = ipu_add_client_devices(ipu, ipu_base); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1496 | if (ret) { |
| 1497 | dev_err(&pdev->dev, "adding client devices failed with %d\n", |
| 1498 | ret); |
| 1499 | goto failed_add_clients; |
| 1500 | } |
| 1501 | |
Fabio Estevam | 9c2c438 | 2012-10-24 21:36:47 -0200 | [diff] [blame] | 1502 | dev_info(&pdev->dev, "%s probed\n", devtype->name); |
| 1503 | |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1504 | return 0; |
| 1505 | |
| 1506 | failed_add_clients: |
| 1507 | ipu_submodules_exit(ipu); |
| 1508 | failed_submodules_init: |
Philipp Zabel | 6c64155 | 2013-03-28 17:35:21 +0100 | [diff] [blame] | 1509 | ipu_irq_exit(ipu); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1510 | out_failed_irq: |
David Jander | 596a65d | 2015-07-02 16:21:57 +0200 | [diff] [blame] | 1511 | out_failed_reset: |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1512 | clk_disable_unprepare(ipu->clk); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1513 | return ret; |
| 1514 | } |
| 1515 | |
Bill Pemberton | 8aa1be4 | 2012-11-19 13:26:38 -0500 | [diff] [blame] | 1516 | static int ipu_remove(struct platform_device *pdev) |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1517 | { |
| 1518 | struct ipu_soc *ipu = platform_get_drvdata(pdev); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1519 | |
| 1520 | platform_device_unregister_children(pdev); |
| 1521 | ipu_submodules_exit(ipu); |
| 1522 | ipu_irq_exit(ipu); |
| 1523 | |
| 1524 | clk_disable_unprepare(ipu->clk); |
| 1525 | |
| 1526 | return 0; |
| 1527 | } |
| 1528 | |
| 1529 | static struct platform_driver imx_ipu_driver = { |
| 1530 | .driver = { |
| 1531 | .name = "imx-ipuv3", |
| 1532 | .of_match_table = imx_ipu_dt_ids, |
| 1533 | }, |
| 1534 | .probe = ipu_probe, |
Bill Pemberton | 99c28f1 | 2012-11-19 13:20:51 -0500 | [diff] [blame] | 1535 | .remove = ipu_remove, |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1536 | }; |
| 1537 | |
Lucas Stach | d2a3423 | 2017-03-08 12:13:14 +0100 | [diff] [blame] | 1538 | static struct platform_driver * const drivers[] = { |
Lucas Stach | 30310c8 | 2017-03-23 16:52:02 +0100 | [diff] [blame] | 1539 | #if IS_ENABLED(CONFIG_DRM) |
Lucas Stach | d2a3423 | 2017-03-08 12:13:14 +0100 | [diff] [blame] | 1540 | &ipu_pre_drv, |
Lucas Stach | ea9c260 | 2017-03-08 12:13:16 +0100 | [diff] [blame] | 1541 | &ipu_prg_drv, |
Lucas Stach | 30310c8 | 2017-03-23 16:52:02 +0100 | [diff] [blame] | 1542 | #endif |
Lucas Stach | d2a3423 | 2017-03-08 12:13:14 +0100 | [diff] [blame] | 1543 | &imx_ipu_driver, |
| 1544 | }; |
| 1545 | |
| 1546 | static int __init imx_ipu_init(void) |
| 1547 | { |
| 1548 | return platform_register_drivers(drivers, ARRAY_SIZE(drivers)); |
| 1549 | } |
| 1550 | module_init(imx_ipu_init); |
| 1551 | |
| 1552 | static void __exit imx_ipu_exit(void) |
| 1553 | { |
| 1554 | platform_unregister_drivers(drivers, ARRAY_SIZE(drivers)); |
| 1555 | } |
| 1556 | module_exit(imx_ipu_exit); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1557 | |
Fabio Estevam | 10f2268 | 2013-07-20 18:22:11 -0300 | [diff] [blame] | 1558 | MODULE_ALIAS("platform:imx-ipuv3"); |
Sascha Hauer | aecfbdb | 2012-09-21 10:07:49 +0200 | [diff] [blame] | 1559 | MODULE_DESCRIPTION("i.MX IPU v3 driver"); |
| 1560 | MODULE_AUTHOR("Sascha Hauer <s.hauer@pengutronix.de>"); |
| 1561 | MODULE_LICENSE("GPL"); |