xref: /linux/drivers/media/platform/qcom/camss/camss-vfe-4-1.c (revision a1ff5a7d78a036d6c2178ee5acd6ba4946243800)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * camss-vfe-4-1.c
4  *
5  * Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.1
6  *
7  * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
8  * Copyright (C) 2015-2018 Linaro Ltd.
9  */
10 
11 #include <linux/interrupt.h>
12 #include <linux/io.h>
13 #include <linux/iopoll.h>
14 
15 #include "camss.h"
16 #include "camss-vfe.h"
17 #include "camss-vfe-gen1.h"
18 
19 #define VFE_0_HW_VERSION		0x000
20 
21 #define VFE_0_GLOBAL_RESET_CMD		0x00c
22 #define VFE_0_GLOBAL_RESET_CMD_CORE	BIT(0)
23 #define VFE_0_GLOBAL_RESET_CMD_CAMIF	BIT(1)
24 #define VFE_0_GLOBAL_RESET_CMD_BUS	BIT(2)
25 #define VFE_0_GLOBAL_RESET_CMD_BUS_BDG	BIT(3)
26 #define VFE_0_GLOBAL_RESET_CMD_REGISTER	BIT(4)
27 #define VFE_0_GLOBAL_RESET_CMD_TIMER	BIT(5)
28 #define VFE_0_GLOBAL_RESET_CMD_PM	BIT(6)
29 #define VFE_0_GLOBAL_RESET_CMD_BUS_MISR	BIT(7)
30 #define VFE_0_GLOBAL_RESET_CMD_TESTGEN	BIT(8)
31 
32 #define VFE_0_MODULE_CFG		0x018
33 #define VFE_0_MODULE_CFG_DEMUX			BIT(2)
34 #define VFE_0_MODULE_CFG_CHROMA_UPSAMPLE	BIT(3)
35 #define VFE_0_MODULE_CFG_SCALE_ENC		BIT(23)
36 #define VFE_0_MODULE_CFG_CROP_ENC		BIT(27)
37 
38 #define VFE_0_CORE_CFG			0x01c
39 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR	0x4
40 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB	0x5
41 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY	0x6
42 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY	0x7
43 
44 #define VFE_0_IRQ_CMD			0x024
45 #define VFE_0_IRQ_CMD_GLOBAL_CLEAR	BIT(0)
46 
47 #define VFE_0_IRQ_MASK_0		0x028
48 #define VFE_0_IRQ_MASK_0_CAMIF_SOF			BIT(0)
49 #define VFE_0_IRQ_MASK_0_CAMIF_EOF			BIT(1)
50 #define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
51 #define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n)		\
52 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n))
53 #define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
54 #define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
55 #define VFE_0_IRQ_MASK_0_RESET_ACK			BIT(31)
56 #define VFE_0_IRQ_MASK_1		0x02c
57 #define VFE_0_IRQ_MASK_1_CAMIF_ERROR			BIT(0)
58 #define VFE_0_IRQ_MASK_1_VIOLATION			BIT(7)
59 #define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK		BIT(8)
60 #define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n)	BIT((n) + 9)
61 #define VFE_0_IRQ_MASK_1_RDIn_SOF(n)			BIT((n) + 29)
62 
63 #define VFE_0_IRQ_CLEAR_0		0x030
64 #define VFE_0_IRQ_CLEAR_1		0x034
65 
66 #define VFE_0_IRQ_STATUS_0		0x038
67 #define VFE_0_IRQ_STATUS_0_CAMIF_SOF			BIT(0)
68 #define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
69 #define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n)		\
70 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n))
71 #define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
72 #define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
73 #define VFE_0_IRQ_STATUS_0_RESET_ACK			BIT(31)
74 #define VFE_0_IRQ_STATUS_1		0x03c
75 #define VFE_0_IRQ_STATUS_1_VIOLATION			BIT(7)
76 #define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK		BIT(8)
77 #define VFE_0_IRQ_STATUS_1_RDIn_SOF(n)			BIT((n) + 29)
78 
79 #define VFE_0_IRQ_COMPOSITE_MASK_0	0x40
80 #define VFE_0_VIOLATION_STATUS		0x48
81 
82 #define VFE_0_BUS_CMD			0x4c
83 #define VFE_0_BUS_CMD_Mx_RLD_CMD(x)	BIT(x)
84 
85 #define VFE_0_BUS_CFG			0x050
86 
87 #define VFE_0_BUS_XBAR_CFG_x(x)		(0x58 + 0x4 * ((x) / 2))
88 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN			BIT(1)
89 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA	(0x3 << 4)
90 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT		8
91 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA		0
92 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0	5
93 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1	6
94 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2	7
95 
96 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n)		(0x06c + 0x24 * (n))
97 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT	0
98 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT	1
99 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n)	(0x070 + 0x24 * (n))
100 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n)	(0x074 + 0x24 * (n))
101 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n)		(0x078 + 0x24 * (n))
102 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT	2
103 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK	(0x1f << 2)
104 
105 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n)		(0x07c + 0x24 * (n))
106 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT	16
107 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n)	(0x080 + 0x24 * (n))
108 #define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n)	(0x084 + 0x24 * (n))
109 #define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n)	\
110 							(0x088 + 0x24 * (n))
111 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n)	\
112 							(0x08c + 0x24 * (n))
113 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF	0xffffffff
114 
115 #define VFE_0_BUS_PING_PONG_STATUS	0x268
116 
117 #define VFE_0_BUS_BDG_CMD		0x2c0
118 #define VFE_0_BUS_BDG_CMD_HALT_REQ	1
119 
120 #define VFE_0_BUS_BDG_QOS_CFG_0		0x2c4
121 #define VFE_0_BUS_BDG_QOS_CFG_0_CFG	0xaaa5aaa5
122 #define VFE_0_BUS_BDG_QOS_CFG_1		0x2c8
123 #define VFE_0_BUS_BDG_QOS_CFG_2		0x2cc
124 #define VFE_0_BUS_BDG_QOS_CFG_3		0x2d0
125 #define VFE_0_BUS_BDG_QOS_CFG_4		0x2d4
126 #define VFE_0_BUS_BDG_QOS_CFG_5		0x2d8
127 #define VFE_0_BUS_BDG_QOS_CFG_6		0x2dc
128 #define VFE_0_BUS_BDG_QOS_CFG_7		0x2e0
129 #define VFE_0_BUS_BDG_QOS_CFG_7_CFG	0x0001aaa5
130 
131 #define VFE_0_RDI_CFG_x(x)		(0x2e8 + (0x4 * (x)))
132 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT	28
133 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK	(0xf << 28)
134 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT	4
135 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK		(0xf << 4)
136 #define VFE_0_RDI_CFG_x_RDI_EN_BIT		BIT(2)
137 #define VFE_0_RDI_CFG_x_MIPI_EN_BITS		0x3
138 #define VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(r)	BIT(16 + (r))
139 
140 #define VFE_0_CAMIF_CMD				0x2f4
141 #define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY	0
142 #define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY	1
143 #define VFE_0_CAMIF_CMD_NO_CHANGE		3
144 #define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS	BIT(2)
145 #define VFE_0_CAMIF_CFG				0x2f8
146 #define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN		BIT(6)
147 #define VFE_0_CAMIF_FRAME_CFG			0x300
148 #define VFE_0_CAMIF_WINDOW_WIDTH_CFG		0x304
149 #define VFE_0_CAMIF_WINDOW_HEIGHT_CFG		0x308
150 #define VFE_0_CAMIF_SUBSAMPLE_CFG_0		0x30c
151 #define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN	0x314
152 #define VFE_0_CAMIF_STATUS			0x31c
153 #define VFE_0_CAMIF_STATUS_HALT			BIT(31)
154 
155 #define VFE_0_REG_UPDATE			0x378
156 #define VFE_0_REG_UPDATE_RDIn(n)		BIT(1 + (n))
157 #define VFE_0_REG_UPDATE_line_n(n)		\
158 			((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n))
159 
160 #define VFE_0_DEMUX_CFG				0x424
161 #define VFE_0_DEMUX_CFG_PERIOD			0x3
162 #define VFE_0_DEMUX_GAIN_0			0x428
163 #define VFE_0_DEMUX_GAIN_0_CH0_EVEN		(0x80 << 0)
164 #define VFE_0_DEMUX_GAIN_0_CH0_ODD		(0x80 << 16)
165 #define VFE_0_DEMUX_GAIN_1			0x42c
166 #define VFE_0_DEMUX_GAIN_1_CH1			(0x80 << 0)
167 #define VFE_0_DEMUX_GAIN_1_CH2			(0x80 << 16)
168 #define VFE_0_DEMUX_EVEN_CFG			0x438
169 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV	0x9cac
170 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU	0xac9c
171 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY	0xc9ca
172 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY	0xcac9
173 #define VFE_0_DEMUX_ODD_CFG			0x43c
174 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV	0x9cac
175 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU	0xac9c
176 #define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY	0xc9ca
177 #define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY	0xcac9
178 
179 #define VFE_0_SCALE_ENC_Y_CFG			0x75c
180 #define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE		0x760
181 #define VFE_0_SCALE_ENC_Y_H_PHASE		0x764
182 #define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE		0x76c
183 #define VFE_0_SCALE_ENC_Y_V_PHASE		0x770
184 #define VFE_0_SCALE_ENC_CBCR_CFG		0x778
185 #define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE	0x77c
186 #define VFE_0_SCALE_ENC_CBCR_H_PHASE		0x780
187 #define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE	0x790
188 #define VFE_0_SCALE_ENC_CBCR_V_PHASE		0x794
189 
190 #define VFE_0_CROP_ENC_Y_WIDTH			0x854
191 #define VFE_0_CROP_ENC_Y_HEIGHT			0x858
192 #define VFE_0_CROP_ENC_CBCR_WIDTH		0x85c
193 #define VFE_0_CROP_ENC_CBCR_HEIGHT		0x860
194 
195 #define VFE_0_CLAMP_ENC_MAX_CFG			0x874
196 #define VFE_0_CLAMP_ENC_MAX_CFG_CH0		(0xff << 0)
197 #define VFE_0_CLAMP_ENC_MAX_CFG_CH1		(0xff << 8)
198 #define VFE_0_CLAMP_ENC_MAX_CFG_CH2		(0xff << 16)
199 #define VFE_0_CLAMP_ENC_MIN_CFG			0x878
200 #define VFE_0_CLAMP_ENC_MIN_CFG_CH0		(0x0 << 0)
201 #define VFE_0_CLAMP_ENC_MIN_CFG_CH1		(0x0 << 8)
202 #define VFE_0_CLAMP_ENC_MIN_CFG_CH2		(0x0 << 16)
203 
204 #define VFE_0_CGC_OVERRIDE_1			0x974
205 #define VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(x)	BIT(x)
206 
207 #define CAMIF_TIMEOUT_SLEEP_US 1000
208 #define CAMIF_TIMEOUT_ALL_US 1000000
209 
210 #define MSM_VFE_VFE0_UB_SIZE 1023
211 #define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3)
212 
vfe_hw_version(struct vfe_device * vfe)213 static u32 vfe_hw_version(struct vfe_device *vfe)
214 {
215 	u32 hw_version = readl_relaxed(vfe->base + VFE_0_HW_VERSION);
216 
217 	dev_dbg(vfe->camss->dev, "VFE HW Version = 0x%08x\n", hw_version);
218 
219 	return hw_version;
220 }
221 
vfe_get_ub_size(u8 vfe_id)222 static u16 vfe_get_ub_size(u8 vfe_id)
223 {
224 	if (vfe_id == 0)
225 		return MSM_VFE_VFE0_UB_SIZE_RDI;
226 
227 	return 0;
228 }
229 
vfe_reg_clr(struct vfe_device * vfe,u32 reg,u32 clr_bits)230 static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits)
231 {
232 	u32 bits = readl_relaxed(vfe->base + reg);
233 
234 	writel_relaxed(bits & ~clr_bits, vfe->base + reg);
235 }
236 
vfe_reg_set(struct vfe_device * vfe,u32 reg,u32 set_bits)237 static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits)
238 {
239 	u32 bits = readl_relaxed(vfe->base + reg);
240 
241 	writel_relaxed(bits | set_bits, vfe->base + reg);
242 }
243 
vfe_global_reset(struct vfe_device * vfe)244 static void vfe_global_reset(struct vfe_device *vfe)
245 {
246 	u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_TESTGEN		|
247 			 VFE_0_GLOBAL_RESET_CMD_BUS_MISR	|
248 			 VFE_0_GLOBAL_RESET_CMD_PM		|
249 			 VFE_0_GLOBAL_RESET_CMD_TIMER		|
250 			 VFE_0_GLOBAL_RESET_CMD_REGISTER	|
251 			 VFE_0_GLOBAL_RESET_CMD_BUS_BDG		|
252 			 VFE_0_GLOBAL_RESET_CMD_BUS		|
253 			 VFE_0_GLOBAL_RESET_CMD_CAMIF		|
254 			 VFE_0_GLOBAL_RESET_CMD_CORE;
255 
256 	writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD);
257 }
258 
vfe_halt_request(struct vfe_device * vfe)259 static void vfe_halt_request(struct vfe_device *vfe)
260 {
261 	writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ,
262 		       vfe->base + VFE_0_BUS_BDG_CMD);
263 }
264 
vfe_halt_clear(struct vfe_device * vfe)265 static void vfe_halt_clear(struct vfe_device *vfe)
266 {
267 	writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD);
268 }
269 
vfe_wm_enable(struct vfe_device * vfe,u8 wm,u8 enable)270 static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable)
271 {
272 	if (enable)
273 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
274 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
275 	else
276 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
277 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
278 }
279 
vfe_wm_frame_based(struct vfe_device * vfe,u8 wm,u8 enable)280 static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable)
281 {
282 	if (enable)
283 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
284 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
285 	else
286 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
287 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_FRM_BASED_SHIFT);
288 }
289 
vfe_get_wm_sizes(struct v4l2_pix_format_mplane * pix,u8 plane,u16 * width,u16 * height,u16 * bytesperline)290 static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane,
291 			     u16 *width, u16 *height, u16 *bytesperline)
292 {
293 	*width = pix->width;
294 	*height = pix->height;
295 	*bytesperline = pix->plane_fmt[0].bytesperline;
296 
297 	if (pix->pixelformat == V4L2_PIX_FMT_NV12 ||
298 	    pix->pixelformat == V4L2_PIX_FMT_NV21)
299 		if (plane == 1)
300 			*height /= 2;
301 }
302 
vfe_wm_line_based(struct vfe_device * vfe,u32 wm,struct v4l2_pix_format_mplane * pix,u8 plane,u32 enable)303 static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm,
304 			      struct v4l2_pix_format_mplane *pix,
305 			      u8 plane, u32 enable)
306 {
307 	u32 reg;
308 
309 	if (enable) {
310 		u16 width = 0, height = 0, bytesperline = 0, wpl;
311 
312 		vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline);
313 
314 		wpl = vfe_word_per_line(pix->pixelformat, width);
315 
316 		reg = height - 1;
317 		reg |= ((wpl + 1) / 2 - 1) << 16;
318 
319 		writel_relaxed(reg, vfe->base +
320 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
321 
322 		wpl = vfe_word_per_line(pix->pixelformat, bytesperline);
323 
324 		reg = 0x3;
325 		reg |= (height - 1) << 4;
326 		reg |= wpl << 16;
327 
328 		writel_relaxed(reg, vfe->base +
329 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
330 	} else {
331 		writel_relaxed(0, vfe->base +
332 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
333 		writel_relaxed(0, vfe->base +
334 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
335 	}
336 }
337 
vfe_wm_set_framedrop_period(struct vfe_device * vfe,u8 wm,u8 per)338 static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per)
339 {
340 	u32 reg;
341 
342 	reg = readl_relaxed(vfe->base +
343 			    VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
344 
345 	reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK);
346 
347 	reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT)
348 		& VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK;
349 
350 	writel_relaxed(reg,
351 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
352 }
353 
vfe_wm_set_framedrop_pattern(struct vfe_device * vfe,u8 wm,u32 pattern)354 static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm,
355 					 u32 pattern)
356 {
357 	writel_relaxed(pattern,
358 	       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm));
359 }
360 
vfe_wm_set_ub_cfg(struct vfe_device * vfe,u8 wm,u16 offset,u16 depth)361 static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm,
362 			      u16 offset, u16 depth)
363 {
364 	u32 reg;
365 
366 	reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) |
367 		depth;
368 	writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm));
369 }
370 
vfe_bus_reload_wm(struct vfe_device * vfe,u8 wm)371 static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm)
372 {
373 	wmb();
374 	writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD);
375 	wmb();
376 }
377 
vfe_wm_set_ping_addr(struct vfe_device * vfe,u8 wm,u32 addr)378 static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr)
379 {
380 	writel_relaxed(addr,
381 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm));
382 }
383 
vfe_wm_set_pong_addr(struct vfe_device * vfe,u8 wm,u32 addr)384 static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr)
385 {
386 	writel_relaxed(addr,
387 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm));
388 }
389 
vfe_wm_get_ping_pong_status(struct vfe_device * vfe,u8 wm)390 static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm)
391 {
392 	u32 reg;
393 
394 	reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS);
395 
396 	return (reg >> wm) & 0x1;
397 }
398 
vfe_bus_enable_wr_if(struct vfe_device * vfe,u8 enable)399 static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable)
400 {
401 	if (enable)
402 		writel_relaxed(0x10000009, vfe->base + VFE_0_BUS_CFG);
403 	else
404 		writel_relaxed(0, vfe->base + VFE_0_BUS_CFG);
405 }
406 
vfe_bus_connect_wm_to_rdi(struct vfe_device * vfe,u8 wm,enum vfe_line_id id)407 static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm,
408 				      enum vfe_line_id id)
409 {
410 	u32 reg;
411 
412 	reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
413 	reg |= VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
414 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg);
415 
416 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
417 	reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) &
418 		VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK;
419 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg);
420 
421 	switch (id) {
422 	case VFE_LINE_RDI0:
423 	default:
424 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
425 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
426 		break;
427 	case VFE_LINE_RDI1:
428 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
429 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
430 		break;
431 	case VFE_LINE_RDI2:
432 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
433 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
434 		break;
435 	}
436 
437 	if (wm % 2 == 1)
438 		reg <<= 16;
439 
440 	vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
441 }
442 
vfe_wm_set_subsample(struct vfe_device * vfe,u8 wm)443 static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm)
444 {
445 	writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF,
446 		       vfe->base +
447 		       VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm));
448 }
449 
vfe_bus_disconnect_wm_from_rdi(struct vfe_device * vfe,u8 wm,enum vfe_line_id id)450 static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm,
451 					   enum vfe_line_id id)
452 {
453 	u32 reg;
454 
455 	reg = VFE_0_RDI_CFG_x_RDI_Mr_FRAME_BASED_EN(id);
456 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(0), reg);
457 
458 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
459 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg);
460 
461 	switch (id) {
462 	case VFE_LINE_RDI0:
463 	default:
464 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
465 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
466 		break;
467 	case VFE_LINE_RDI1:
468 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
469 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
470 		break;
471 	case VFE_LINE_RDI2:
472 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
473 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
474 		break;
475 	}
476 
477 	if (wm % 2 == 1)
478 		reg <<= 16;
479 
480 	vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
481 }
482 
vfe_set_xbar_cfg(struct vfe_device * vfe,struct vfe_output * output,u8 enable)483 static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output,
484 			     u8 enable)
485 {
486 	struct vfe_line *line = container_of(output, struct vfe_line, output);
487 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
488 	u32 reg;
489 	unsigned int i;
490 
491 	for (i = 0; i < output->wm_num; i++) {
492 		if (i == 0) {
493 			reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA <<
494 				VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
495 		} else if (i == 1) {
496 			reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
497 			if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16)
498 				reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
499 		} else {
500 			/* On current devices output->wm_num is always <= 2 */
501 			break;
502 		}
503 
504 		if (output->wm_idx[i] % 2 == 1)
505 			reg <<= 16;
506 
507 		if (enable)
508 			vfe_reg_set(vfe,
509 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
510 				    reg);
511 		else
512 			vfe_reg_clr(vfe,
513 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[i]),
514 				    reg);
515 	}
516 }
517 
vfe_set_realign_cfg(struct vfe_device * vfe,struct vfe_line * line,u8 enable)518 static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line,
519 				u8 enable)
520 {
521 	/* empty */
522 }
vfe_set_rdi_cid(struct vfe_device * vfe,enum vfe_line_id id,u8 cid)523 static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid)
524 {
525 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id),
526 		    VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK);
527 
528 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id),
529 		    cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT);
530 }
531 
vfe_reg_update(struct vfe_device * vfe,enum vfe_line_id line_id)532 static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id)
533 {
534 	vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id);
535 	wmb();
536 	writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE);
537 	wmb();
538 }
539 
vfe_reg_update_clear(struct vfe_device * vfe,enum vfe_line_id line_id)540 static inline void vfe_reg_update_clear(struct vfe_device *vfe,
541 					enum vfe_line_id line_id)
542 {
543 	vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id);
544 }
545 
vfe_enable_irq_wm_line(struct vfe_device * vfe,u8 wm,enum vfe_line_id line_id,u8 enable)546 static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm,
547 				   enum vfe_line_id line_id, u8 enable)
548 {
549 	u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) |
550 		      VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
551 	u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) |
552 		      VFE_0_IRQ_MASK_1_RDIn_SOF(line_id);
553 
554 	if (enable) {
555 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
556 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
557 	} else {
558 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
559 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
560 	}
561 }
562 
vfe_enable_irq_pix_line(struct vfe_device * vfe,u8 comp,enum vfe_line_id line_id,u8 enable)563 static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp,
564 				    enum vfe_line_id line_id, u8 enable)
565 {
566 	struct vfe_output *output = &vfe->line[line_id].output;
567 	unsigned int i;
568 	u32 irq_en0;
569 	u32 irq_en1;
570 	u32 comp_mask = 0;
571 
572 	irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF;
573 	irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF;
574 	irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp);
575 	irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
576 	irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR;
577 	for (i = 0; i < output->wm_num; i++) {
578 		irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(
579 							output->wm_idx[i]);
580 		comp_mask |= (1 << output->wm_idx[i]) << comp * 8;
581 	}
582 
583 	if (enable) {
584 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
585 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
586 		vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
587 	} else {
588 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
589 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
590 		vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
591 	}
592 }
593 
vfe_enable_irq_common(struct vfe_device * vfe)594 static void vfe_enable_irq_common(struct vfe_device *vfe)
595 {
596 	u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK;
597 	u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION |
598 		      VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK;
599 
600 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
601 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
602 }
603 
vfe_set_demux_cfg(struct vfe_device * vfe,struct vfe_line * line)604 static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line)
605 {
606 	u32 val, even_cfg, odd_cfg;
607 
608 	writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG);
609 
610 	val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD;
611 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0);
612 
613 	val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2;
614 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1);
615 
616 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
617 	case MEDIA_BUS_FMT_YUYV8_1X16:
618 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV;
619 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV;
620 		break;
621 	case MEDIA_BUS_FMT_YVYU8_1X16:
622 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU;
623 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU;
624 		break;
625 	case MEDIA_BUS_FMT_UYVY8_1X16:
626 	default:
627 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY;
628 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY;
629 		break;
630 	case MEDIA_BUS_FMT_VYUY8_1X16:
631 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY;
632 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY;
633 		break;
634 	}
635 
636 	writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG);
637 	writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG);
638 }
639 
vfe_set_scale_cfg(struct vfe_device * vfe,struct vfe_line * line)640 static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line)
641 {
642 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
643 	u32 reg;
644 	u16 input, output;
645 	u8 interp_reso;
646 	u32 phase_mult;
647 
648 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG);
649 
650 	input = line->fmt[MSM_VFE_PAD_SINK].width;
651 	output = line->compose.width;
652 	reg = (output << 16) | input;
653 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE);
654 
655 	interp_reso = vfe_calc_interp_reso(input, output);
656 	phase_mult = input * (1 << (13 + interp_reso)) / output;
657 	reg = (interp_reso << 20) | phase_mult;
658 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE);
659 
660 	input = line->fmt[MSM_VFE_PAD_SINK].height;
661 	output = line->compose.height;
662 	reg = (output << 16) | input;
663 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE);
664 
665 	interp_reso = vfe_calc_interp_reso(input, output);
666 	phase_mult = input * (1 << (13 + interp_reso)) / output;
667 	reg = (interp_reso << 20) | phase_mult;
668 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE);
669 
670 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG);
671 
672 	input = line->fmt[MSM_VFE_PAD_SINK].width;
673 	output = line->compose.width / 2;
674 	reg = (output << 16) | input;
675 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE);
676 
677 	interp_reso = vfe_calc_interp_reso(input, output);
678 	phase_mult = input * (1 << (13 + interp_reso)) / output;
679 	reg = (interp_reso << 20) | phase_mult;
680 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE);
681 
682 	input = line->fmt[MSM_VFE_PAD_SINK].height;
683 	output = line->compose.height;
684 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21)
685 		output = line->compose.height / 2;
686 	reg = (output << 16) | input;
687 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE);
688 
689 	interp_reso = vfe_calc_interp_reso(input, output);
690 	phase_mult = input * (1 << (13 + interp_reso)) / output;
691 	reg = (interp_reso << 20) | phase_mult;
692 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE);
693 }
694 
vfe_set_crop_cfg(struct vfe_device * vfe,struct vfe_line * line)695 static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line)
696 {
697 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
698 	u32 reg;
699 	u16 first, last;
700 
701 	first = line->crop.left;
702 	last = line->crop.left + line->crop.width - 1;
703 	reg = (first << 16) | last;
704 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH);
705 
706 	first = line->crop.top;
707 	last = line->crop.top + line->crop.height - 1;
708 	reg = (first << 16) | last;
709 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT);
710 
711 	first = line->crop.left / 2;
712 	last = line->crop.left / 2 + line->crop.width / 2 - 1;
713 	reg = (first << 16) | last;
714 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH);
715 
716 	first = line->crop.top;
717 	last = line->crop.top + line->crop.height - 1;
718 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) {
719 		first = line->crop.top / 2;
720 		last = line->crop.top / 2 + line->crop.height / 2 - 1;
721 	}
722 	reg = (first << 16) | last;
723 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT);
724 }
725 
vfe_set_clamp_cfg(struct vfe_device * vfe)726 static void vfe_set_clamp_cfg(struct vfe_device *vfe)
727 {
728 	u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 |
729 		VFE_0_CLAMP_ENC_MAX_CFG_CH1 |
730 		VFE_0_CLAMP_ENC_MAX_CFG_CH2;
731 
732 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG);
733 
734 	val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 |
735 		VFE_0_CLAMP_ENC_MIN_CFG_CH1 |
736 		VFE_0_CLAMP_ENC_MIN_CFG_CH2;
737 
738 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG);
739 }
740 
vfe_set_qos(struct vfe_device * vfe)741 static void vfe_set_qos(struct vfe_device *vfe)
742 {
743 	u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG;
744 	u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG;
745 
746 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0);
747 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1);
748 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2);
749 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3);
750 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4);
751 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5);
752 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6);
753 	writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7);
754 }
755 
vfe_set_ds(struct vfe_device * vfe)756 static void vfe_set_ds(struct vfe_device *vfe)
757 {
758 	/* empty */
759 }
760 
vfe_set_cgc_override(struct vfe_device * vfe,u8 wm,u8 enable)761 static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable)
762 {
763 	u32 val = VFE_0_CGC_OVERRIDE_1_IMAGE_Mx_CGC_OVERRIDE(wm);
764 
765 	if (enable)
766 		vfe_reg_set(vfe, VFE_0_CGC_OVERRIDE_1, val);
767 	else
768 		vfe_reg_clr(vfe, VFE_0_CGC_OVERRIDE_1, val);
769 
770 	wmb();
771 }
772 
vfe_set_camif_cfg(struct vfe_device * vfe,struct vfe_line * line)773 static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line)
774 {
775 	u32 val;
776 
777 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
778 	case MEDIA_BUS_FMT_YUYV8_1X16:
779 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR;
780 		break;
781 	case MEDIA_BUS_FMT_YVYU8_1X16:
782 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB;
783 		break;
784 	case MEDIA_BUS_FMT_UYVY8_1X16:
785 	default:
786 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY;
787 		break;
788 	case MEDIA_BUS_FMT_VYUY8_1X16:
789 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY;
790 		break;
791 	}
792 
793 	writel_relaxed(val, vfe->base + VFE_0_CORE_CFG);
794 
795 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2;
796 	val |= line->fmt[MSM_VFE_PAD_SINK].height << 16;
797 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG);
798 
799 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
800 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG);
801 
802 	val = line->fmt[MSM_VFE_PAD_SINK].height - 1;
803 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG);
804 
805 	val = 0xffffffff;
806 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG_0);
807 
808 	val = 0xffffffff;
809 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN);
810 
811 	val = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
812 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val);
813 
814 	val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN;
815 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG);
816 }
817 
vfe_set_camif_cmd(struct vfe_device * vfe,u8 enable)818 static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable)
819 {
820 	u32 cmd;
821 
822 	cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE;
823 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
824 	wmb();
825 
826 	if (enable)
827 		cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY;
828 	else
829 		cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY;
830 
831 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
832 }
833 
vfe_set_module_cfg(struct vfe_device * vfe,u8 enable)834 static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable)
835 {
836 	u32 val = VFE_0_MODULE_CFG_DEMUX |
837 		  VFE_0_MODULE_CFG_CHROMA_UPSAMPLE |
838 		  VFE_0_MODULE_CFG_SCALE_ENC |
839 		  VFE_0_MODULE_CFG_CROP_ENC;
840 
841 	if (enable)
842 		writel_relaxed(val, vfe->base + VFE_0_MODULE_CFG);
843 	else
844 		writel_relaxed(0x0, vfe->base + VFE_0_MODULE_CFG);
845 }
846 
vfe_camif_wait_for_stop(struct vfe_device * vfe,struct device * dev)847 static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev)
848 {
849 	u32 val;
850 	int ret;
851 
852 	ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS,
853 				 val,
854 				 (val & VFE_0_CAMIF_STATUS_HALT),
855 				 CAMIF_TIMEOUT_SLEEP_US,
856 				 CAMIF_TIMEOUT_ALL_US);
857 	if (ret < 0)
858 		dev_err(dev, "%s: camif stop timeout\n", __func__);
859 
860 	return ret;
861 }
862 
vfe_isr_read(struct vfe_device * vfe,u32 * value0,u32 * value1)863 static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1)
864 {
865 	*value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0);
866 	*value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1);
867 
868 	writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0);
869 	writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1);
870 
871 	wmb();
872 	writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD);
873 }
874 
vfe_violation_read(struct vfe_device * vfe)875 static void vfe_violation_read(struct vfe_device *vfe)
876 {
877 	u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS);
878 
879 	pr_err_ratelimited("VFE: violation = 0x%08x\n", violation);
880 }
881 
882 /*
883  * vfe_isr - VFE module interrupt handler
884  * @irq: Interrupt line
885  * @dev: VFE device
886  *
887  * Return IRQ_HANDLED on success
888  */
vfe_isr(int irq,void * dev)889 static irqreturn_t vfe_isr(int irq, void *dev)
890 {
891 	struct vfe_device *vfe = dev;
892 	u32 value0, value1;
893 	int i, j;
894 
895 	vfe->res->hw_ops->isr_read(vfe, &value0, &value1);
896 
897 	dev_dbg(vfe->camss->dev, "VFE: status0 = 0x%08x, status1 = 0x%08x\n",
898 		value0, value1);
899 
900 	if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK)
901 		vfe->isr_ops.reset_ack(vfe);
902 
903 	if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION)
904 		vfe->res->hw_ops->violation_read(vfe);
905 
906 	if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK)
907 		vfe->isr_ops.halt_ack(vfe);
908 
909 	for (i = VFE_LINE_RDI0; i <= VFE_LINE_PIX; i++)
910 		if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i))
911 			vfe->isr_ops.reg_update(vfe, i);
912 
913 	if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF)
914 		vfe->isr_ops.sof(vfe, VFE_LINE_PIX);
915 
916 	for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++)
917 		if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i))
918 			vfe->isr_ops.sof(vfe, i);
919 
920 	for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++)
921 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) {
922 			vfe->isr_ops.comp_done(vfe, i);
923 			for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++)
924 				if (vfe->wm_output_map[j] == VFE_LINE_PIX)
925 					value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j);
926 		}
927 
928 	for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++)
929 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i))
930 			vfe->isr_ops.wm_done(vfe, i);
931 
932 	return IRQ_HANDLED;
933 }
934 
935 /*
936  * vfe_pm_domain_off - Disable power domains specific to this VFE.
937  * @vfe: VFE Device
938  */
vfe_4_1_pm_domain_off(struct vfe_device * vfe)939 static void vfe_4_1_pm_domain_off(struct vfe_device *vfe)
940 {
941 	/* nop */
942 }
943 
944 /*
945  * vfe_pm_domain_on - Enable power domains specific to this VFE.
946  * @vfe: VFE Device
947  */
vfe_4_1_pm_domain_on(struct vfe_device * vfe)948 static int vfe_4_1_pm_domain_on(struct vfe_device *vfe)
949 {
950 	return 0;
951 }
952 
953 static const struct vfe_hw_ops_gen1 vfe_ops_gen1_4_1 = {
954 	.bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi,
955 	.bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi,
956 	.bus_enable_wr_if = vfe_bus_enable_wr_if,
957 	.bus_reload_wm = vfe_bus_reload_wm,
958 	.camif_wait_for_stop = vfe_camif_wait_for_stop,
959 	.enable_irq_common = vfe_enable_irq_common,
960 	.enable_irq_pix_line = vfe_enable_irq_pix_line,
961 	.enable_irq_wm_line = vfe_enable_irq_wm_line,
962 	.get_ub_size = vfe_get_ub_size,
963 	.halt_clear = vfe_halt_clear,
964 	.halt_request = vfe_halt_request,
965 	.set_camif_cfg = vfe_set_camif_cfg,
966 	.set_camif_cmd = vfe_set_camif_cmd,
967 	.set_cgc_override = vfe_set_cgc_override,
968 	.set_clamp_cfg = vfe_set_clamp_cfg,
969 	.set_crop_cfg = vfe_set_crop_cfg,
970 	.set_demux_cfg = vfe_set_demux_cfg,
971 	.set_ds = vfe_set_ds,
972 	.set_module_cfg = vfe_set_module_cfg,
973 	.set_qos = vfe_set_qos,
974 	.set_rdi_cid = vfe_set_rdi_cid,
975 	.set_realign_cfg = vfe_set_realign_cfg,
976 	.set_scale_cfg = vfe_set_scale_cfg,
977 	.set_xbar_cfg = vfe_set_xbar_cfg,
978 	.wm_enable = vfe_wm_enable,
979 	.wm_frame_based = vfe_wm_frame_based,
980 	.wm_get_ping_pong_status = vfe_wm_get_ping_pong_status,
981 	.wm_line_based = vfe_wm_line_based,
982 	.wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern,
983 	.wm_set_framedrop_period = vfe_wm_set_framedrop_period,
984 	.wm_set_ping_addr = vfe_wm_set_ping_addr,
985 	.wm_set_pong_addr = vfe_wm_set_pong_addr,
986 	.wm_set_subsample = vfe_wm_set_subsample,
987 	.wm_set_ub_cfg = vfe_wm_set_ub_cfg,
988 };
989 
vfe_subdev_init(struct device * dev,struct vfe_device * vfe)990 static void vfe_subdev_init(struct device *dev, struct vfe_device *vfe)
991 {
992 	vfe->isr_ops = vfe_isr_ops_gen1;
993 	vfe->ops_gen1 = &vfe_ops_gen1_4_1;
994 	vfe->video_ops = vfe_video_ops_gen1;
995 }
996 
997 const struct vfe_hw_ops vfe_ops_4_1 = {
998 	.global_reset = vfe_global_reset,
999 	.hw_version = vfe_hw_version,
1000 	.isr_read = vfe_isr_read,
1001 	.isr = vfe_isr,
1002 	.pm_domain_off = vfe_4_1_pm_domain_off,
1003 	.pm_domain_on = vfe_4_1_pm_domain_on,
1004 	.reg_update_clear = vfe_reg_update_clear,
1005 	.reg_update = vfe_reg_update,
1006 	.subdev_init = vfe_subdev_init,
1007 	.vfe_disable = vfe_gen1_disable,
1008 	.vfe_enable = vfe_gen1_enable,
1009 	.vfe_halt = vfe_gen1_halt,
1010 	.violation_read = vfe_violation_read,
1011 };
1012