1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Driver for STM32 Digital Camera Memory Interface
4  *
5  * Copyright (C) STMicroelectronics SA 2017
6  * Authors: Yannick Fertre <yannick.fertre@st.com>
7  *          Hugues Fruchet <hugues.fruchet@st.com>
8  *          for STMicroelectronics.
9  *
10  * This driver is based on atmel_isi.c
11  *
12  */
13 
14 #include <linux/clk.h>
15 #include <linux/completion.h>
16 #include <linux/delay.h>
17 #include <linux/dmaengine.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/of.h>
23 #include <linux/of_device.h>
24 #include <linux/of_graph.h>
25 #include <linux/pinctrl/consumer.h>
26 #include <linux/platform_device.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/reset.h>
29 #include <linux/videodev2.h>
30 
31 #include <media/v4l2-ctrls.h>
32 #include <media/v4l2-dev.h>
33 #include <media/v4l2-device.h>
34 #include <media/v4l2-event.h>
35 #include <media/v4l2-fwnode.h>
36 #include <media/v4l2-image-sizes.h>
37 #include <media/v4l2-ioctl.h>
38 #include <media/v4l2-rect.h>
39 #include <media/videobuf2-dma-contig.h>
40 
41 #define DRV_NAME "stm32-dcmi"
42 
43 /* Registers offset for DCMI */
44 #define DCMI_CR		0x00 /* Control Register */
45 #define DCMI_SR		0x04 /* Status Register */
46 #define DCMI_RIS	0x08 /* Raw Interrupt Status register */
47 #define DCMI_IER	0x0C /* Interrupt Enable Register */
48 #define DCMI_MIS	0x10 /* Masked Interrupt Status register */
49 #define DCMI_ICR	0x14 /* Interrupt Clear Register */
50 #define DCMI_ESCR	0x18 /* Embedded Synchronization Code Register */
51 #define DCMI_ESUR	0x1C /* Embedded Synchronization Unmask Register */
52 #define DCMI_CWSTRT	0x20 /* Crop Window STaRT */
53 #define DCMI_CWSIZE	0x24 /* Crop Window SIZE */
54 #define DCMI_DR		0x28 /* Data Register */
55 #define DCMI_IDR	0x2C /* IDentifier Register */
56 
57 /* Bits definition for control register (DCMI_CR) */
58 #define CR_CAPTURE	BIT(0)
59 #define CR_CM		BIT(1)
60 #define CR_CROP		BIT(2)
61 #define CR_JPEG		BIT(3)
62 #define CR_ESS		BIT(4)
63 #define CR_PCKPOL	BIT(5)
64 #define CR_HSPOL	BIT(6)
65 #define CR_VSPOL	BIT(7)
66 #define CR_FCRC_0	BIT(8)
67 #define CR_FCRC_1	BIT(9)
68 #define CR_EDM_0	BIT(10)
69 #define CR_EDM_1	BIT(11)
70 #define CR_ENABLE	BIT(14)
71 
72 /* Bits definition for status register (DCMI_SR) */
73 #define SR_HSYNC	BIT(0)
74 #define SR_VSYNC	BIT(1)
75 #define SR_FNE		BIT(2)
76 
77 /*
78  * Bits definition for interrupt registers
79  * (DCMI_RIS, DCMI_IER, DCMI_MIS, DCMI_ICR)
80  */
81 #define IT_FRAME	BIT(0)
82 #define IT_OVR		BIT(1)
83 #define IT_ERR		BIT(2)
84 #define IT_VSYNC	BIT(3)
85 #define IT_LINE		BIT(4)
86 
87 enum state {
88 	STOPPED = 0,
89 	WAIT_FOR_BUFFER,
90 	RUNNING,
91 };
92 
93 #define MIN_WIDTH	16U
94 #define MAX_WIDTH	2592U
95 #define MIN_HEIGHT	16U
96 #define MAX_HEIGHT	2592U
97 
98 #define TIMEOUT_MS	1000
99 
100 struct dcmi_graph_entity {
101 	struct device_node *node;
102 
103 	struct v4l2_async_subdev asd;
104 	struct v4l2_subdev *subdev;
105 };
106 
107 struct dcmi_format {
108 	u32	fourcc;
109 	u32	mbus_code;
110 	u8	bpp;
111 };
112 
113 struct dcmi_framesize {
114 	u32	width;
115 	u32	height;
116 };
117 
118 struct dcmi_buf {
119 	struct vb2_v4l2_buffer	vb;
120 	bool			prepared;
121 	dma_addr_t		paddr;
122 	size_t			size;
123 	struct list_head	list;
124 };
125 
126 struct stm32_dcmi {
127 	/* Protects the access of variables shared within the interrupt */
128 	spinlock_t			irqlock;
129 	struct device			*dev;
130 	void __iomem			*regs;
131 	struct resource			*res;
132 	struct reset_control		*rstc;
133 	int				sequence;
134 	struct list_head		buffers;
135 	struct dcmi_buf			*active;
136 
137 	struct v4l2_device		v4l2_dev;
138 	struct video_device		*vdev;
139 	struct v4l2_async_notifier	notifier;
140 	struct dcmi_graph_entity	entity;
141 	struct v4l2_format		fmt;
142 	struct v4l2_rect		crop;
143 	bool				do_crop;
144 
145 	const struct dcmi_format	**sd_formats;
146 	unsigned int			num_of_sd_formats;
147 	const struct dcmi_format	*sd_format;
148 	struct dcmi_framesize		*sd_framesizes;
149 	unsigned int			num_of_sd_framesizes;
150 	struct dcmi_framesize		sd_framesize;
151 	struct v4l2_rect		sd_bounds;
152 
153 	/* Protect this data structure */
154 	struct mutex			lock;
155 	struct vb2_queue		queue;
156 
157 	struct v4l2_fwnode_bus_parallel	bus;
158 	struct completion		complete;
159 	struct clk			*mclk;
160 	enum state			state;
161 	struct dma_chan			*dma_chan;
162 	dma_cookie_t			dma_cookie;
163 	u32				misr;
164 	int				errors_count;
165 	int				overrun_count;
166 	int				buffers_count;
167 };
168 
notifier_to_dcmi(struct v4l2_async_notifier * n)169 static inline struct stm32_dcmi *notifier_to_dcmi(struct v4l2_async_notifier *n)
170 {
171 	return container_of(n, struct stm32_dcmi, notifier);
172 }
173 
reg_read(void __iomem * base,u32 reg)174 static inline u32 reg_read(void __iomem *base, u32 reg)
175 {
176 	return readl_relaxed(base + reg);
177 }
178 
reg_write(void __iomem * base,u32 reg,u32 val)179 static inline void reg_write(void __iomem *base, u32 reg, u32 val)
180 {
181 	writel_relaxed(val, base + reg);
182 }
183 
reg_set(void __iomem * base,u32 reg,u32 mask)184 static inline void reg_set(void __iomem *base, u32 reg, u32 mask)
185 {
186 	reg_write(base, reg, reg_read(base, reg) | mask);
187 }
188 
reg_clear(void __iomem * base,u32 reg,u32 mask)189 static inline void reg_clear(void __iomem *base, u32 reg, u32 mask)
190 {
191 	reg_write(base, reg, reg_read(base, reg) & ~mask);
192 }
193 
194 static int dcmi_start_capture(struct stm32_dcmi *dcmi, struct dcmi_buf *buf);
195 
dcmi_buffer_done(struct stm32_dcmi * dcmi,struct dcmi_buf * buf,size_t bytesused,int err)196 static void dcmi_buffer_done(struct stm32_dcmi *dcmi,
197 			     struct dcmi_buf *buf,
198 			     size_t bytesused,
199 			     int err)
200 {
201 	struct vb2_v4l2_buffer *vbuf;
202 
203 	if (!buf)
204 		return;
205 
206 	list_del_init(&buf->list);
207 
208 	vbuf = &buf->vb;
209 
210 	vbuf->sequence = dcmi->sequence++;
211 	vbuf->field = V4L2_FIELD_NONE;
212 	vbuf->vb2_buf.timestamp = ktime_get_ns();
213 	vb2_set_plane_payload(&vbuf->vb2_buf, 0, bytesused);
214 	vb2_buffer_done(&vbuf->vb2_buf,
215 			err ? VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
216 	dev_dbg(dcmi->dev, "buffer[%d] done seq=%d, bytesused=%zu\n",
217 		vbuf->vb2_buf.index, vbuf->sequence, bytesused);
218 
219 	dcmi->buffers_count++;
220 	dcmi->active = NULL;
221 }
222 
dcmi_restart_capture(struct stm32_dcmi * dcmi)223 static int dcmi_restart_capture(struct stm32_dcmi *dcmi)
224 {
225 	struct dcmi_buf *buf;
226 
227 	spin_lock_irq(&dcmi->irqlock);
228 
229 	if (dcmi->state != RUNNING) {
230 		spin_unlock_irq(&dcmi->irqlock);
231 		return -EINVAL;
232 	}
233 
234 	/* Restart a new DMA transfer with next buffer */
235 	if (list_empty(&dcmi->buffers)) {
236 		dev_dbg(dcmi->dev, "Capture restart is deferred to next buffer queueing\n");
237 		dcmi->state = WAIT_FOR_BUFFER;
238 		spin_unlock_irq(&dcmi->irqlock);
239 		return 0;
240 	}
241 	buf = list_entry(dcmi->buffers.next, struct dcmi_buf, list);
242 	dcmi->active = buf;
243 
244 	spin_unlock_irq(&dcmi->irqlock);
245 
246 	return dcmi_start_capture(dcmi, buf);
247 }
248 
dcmi_dma_callback(void * param)249 static void dcmi_dma_callback(void *param)
250 {
251 	struct stm32_dcmi *dcmi = (struct stm32_dcmi *)param;
252 	struct dma_tx_state state;
253 	enum dma_status status;
254 	struct dcmi_buf *buf = dcmi->active;
255 
256 	spin_lock_irq(&dcmi->irqlock);
257 
258 	/* Check DMA status */
259 	status = dmaengine_tx_status(dcmi->dma_chan, dcmi->dma_cookie, &state);
260 
261 	switch (status) {
262 	case DMA_IN_PROGRESS:
263 		dev_dbg(dcmi->dev, "%s: Received DMA_IN_PROGRESS\n", __func__);
264 		break;
265 	case DMA_PAUSED:
266 		dev_err(dcmi->dev, "%s: Received DMA_PAUSED\n", __func__);
267 		break;
268 	case DMA_ERROR:
269 		dev_err(dcmi->dev, "%s: Received DMA_ERROR\n", __func__);
270 
271 		/* Return buffer to V4L2 in error state */
272 		dcmi_buffer_done(dcmi, buf, 0, -EIO);
273 		break;
274 	case DMA_COMPLETE:
275 		dev_dbg(dcmi->dev, "%s: Received DMA_COMPLETE\n", __func__);
276 
277 		/* Return buffer to V4L2 */
278 		dcmi_buffer_done(dcmi, buf, buf->size, 0);
279 
280 		spin_unlock_irq(&dcmi->irqlock);
281 
282 		/* Restart capture */
283 		if (dcmi_restart_capture(dcmi))
284 			dev_err(dcmi->dev, "%s: Cannot restart capture on DMA complete\n",
285 				__func__);
286 		return;
287 	default:
288 		dev_err(dcmi->dev, "%s: Received unknown status\n", __func__);
289 		break;
290 	}
291 
292 	spin_unlock_irq(&dcmi->irqlock);
293 }
294 
dcmi_start_dma(struct stm32_dcmi * dcmi,struct dcmi_buf * buf)295 static int dcmi_start_dma(struct stm32_dcmi *dcmi,
296 			  struct dcmi_buf *buf)
297 {
298 	struct dma_async_tx_descriptor *desc = NULL;
299 	struct dma_slave_config config;
300 	int ret;
301 
302 	memset(&config, 0, sizeof(config));
303 
304 	config.src_addr = (dma_addr_t)dcmi->res->start + DCMI_DR;
305 	config.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
306 	config.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
307 	config.dst_maxburst = 4;
308 
309 	/* Configure DMA channel */
310 	ret = dmaengine_slave_config(dcmi->dma_chan, &config);
311 	if (ret < 0) {
312 		dev_err(dcmi->dev, "%s: DMA channel config failed (%d)\n",
313 			__func__, ret);
314 		return ret;
315 	}
316 
317 	/* Prepare a DMA transaction */
318 	desc = dmaengine_prep_slave_single(dcmi->dma_chan, buf->paddr,
319 					   buf->size,
320 					   DMA_DEV_TO_MEM,
321 					   DMA_PREP_INTERRUPT);
322 	if (!desc) {
323 		dev_err(dcmi->dev, "%s: DMA dmaengine_prep_slave_single failed for buffer phy=%pad size=%zu\n",
324 			__func__, &buf->paddr, buf->size);
325 		return -EINVAL;
326 	}
327 
328 	/* Set completion callback routine for notification */
329 	desc->callback = dcmi_dma_callback;
330 	desc->callback_param = dcmi;
331 
332 	/* Push current DMA transaction in the pending queue */
333 	dcmi->dma_cookie = dmaengine_submit(desc);
334 	if (dma_submit_error(dcmi->dma_cookie)) {
335 		dev_err(dcmi->dev, "%s: DMA submission failed\n", __func__);
336 		return -ENXIO;
337 	}
338 
339 	dma_async_issue_pending(dcmi->dma_chan);
340 
341 	return 0;
342 }
343 
dcmi_start_capture(struct stm32_dcmi * dcmi,struct dcmi_buf * buf)344 static int dcmi_start_capture(struct stm32_dcmi *dcmi, struct dcmi_buf *buf)
345 {
346 	int ret;
347 
348 	if (!buf)
349 		return -EINVAL;
350 
351 	ret = dcmi_start_dma(dcmi, buf);
352 	if (ret) {
353 		dcmi->errors_count++;
354 		return ret;
355 	}
356 
357 	/* Enable capture */
358 	reg_set(dcmi->regs, DCMI_CR, CR_CAPTURE);
359 
360 	return 0;
361 }
362 
dcmi_set_crop(struct stm32_dcmi * dcmi)363 static void dcmi_set_crop(struct stm32_dcmi *dcmi)
364 {
365 	u32 size, start;
366 
367 	/* Crop resolution */
368 	size = ((dcmi->crop.height - 1) << 16) |
369 		((dcmi->crop.width << 1) - 1);
370 	reg_write(dcmi->regs, DCMI_CWSIZE, size);
371 
372 	/* Crop start point */
373 	start = ((dcmi->crop.top) << 16) |
374 		 ((dcmi->crop.left << 1));
375 	reg_write(dcmi->regs, DCMI_CWSTRT, start);
376 
377 	dev_dbg(dcmi->dev, "Cropping to %ux%u@%u:%u\n",
378 		dcmi->crop.width, dcmi->crop.height,
379 		dcmi->crop.left, dcmi->crop.top);
380 
381 	/* Enable crop */
382 	reg_set(dcmi->regs, DCMI_CR, CR_CROP);
383 }
384 
dcmi_process_jpeg(struct stm32_dcmi * dcmi)385 static void dcmi_process_jpeg(struct stm32_dcmi *dcmi)
386 {
387 	struct dma_tx_state state;
388 	enum dma_status status;
389 	struct dcmi_buf *buf = dcmi->active;
390 
391 	if (!buf)
392 		return;
393 
394 	/*
395 	 * Because of variable JPEG buffer size sent by sensor,
396 	 * DMA transfer never completes due to transfer size never reached.
397 	 * In order to ensure that all the JPEG data are transferred
398 	 * in active buffer memory, DMA is drained.
399 	 * Then DMA tx status gives the amount of data transferred
400 	 * to memory, which is then returned to V4L2 through the active
401 	 * buffer payload.
402 	 */
403 
404 	/* Drain DMA */
405 	dmaengine_synchronize(dcmi->dma_chan);
406 
407 	/* Get DMA residue to get JPEG size */
408 	status = dmaengine_tx_status(dcmi->dma_chan, dcmi->dma_cookie, &state);
409 	if (status != DMA_ERROR && state.residue < buf->size) {
410 		/* Return JPEG buffer to V4L2 with received JPEG buffer size */
411 		dcmi_buffer_done(dcmi, buf, buf->size - state.residue, 0);
412 	} else {
413 		dcmi->errors_count++;
414 		dev_err(dcmi->dev, "%s: Cannot get JPEG size from DMA\n",
415 			__func__);
416 		/* Return JPEG buffer to V4L2 in ERROR state */
417 		dcmi_buffer_done(dcmi, buf, 0, -EIO);
418 	}
419 
420 	/* Abort DMA operation */
421 	dmaengine_terminate_all(dcmi->dma_chan);
422 
423 	/* Restart capture */
424 	if (dcmi_restart_capture(dcmi))
425 		dev_err(dcmi->dev, "%s: Cannot restart capture on JPEG received\n",
426 			__func__);
427 }
428 
dcmi_irq_thread(int irq,void * arg)429 static irqreturn_t dcmi_irq_thread(int irq, void *arg)
430 {
431 	struct stm32_dcmi *dcmi = arg;
432 
433 	spin_lock_irq(&dcmi->irqlock);
434 
435 	if ((dcmi->misr & IT_OVR) || (dcmi->misr & IT_ERR)) {
436 		dcmi->errors_count++;
437 		if (dcmi->misr & IT_OVR)
438 			dcmi->overrun_count++;
439 	}
440 
441 	if (dcmi->sd_format->fourcc == V4L2_PIX_FMT_JPEG &&
442 	    dcmi->misr & IT_FRAME) {
443 		/* JPEG received */
444 		spin_unlock_irq(&dcmi->irqlock);
445 		dcmi_process_jpeg(dcmi);
446 		return IRQ_HANDLED;
447 	}
448 
449 	spin_unlock_irq(&dcmi->irqlock);
450 	return IRQ_HANDLED;
451 }
452 
dcmi_irq_callback(int irq,void * arg)453 static irqreturn_t dcmi_irq_callback(int irq, void *arg)
454 {
455 	struct stm32_dcmi *dcmi = arg;
456 	unsigned long flags;
457 
458 	spin_lock_irqsave(&dcmi->irqlock, flags);
459 
460 	dcmi->misr = reg_read(dcmi->regs, DCMI_MIS);
461 
462 	/* Clear interrupt */
463 	reg_set(dcmi->regs, DCMI_ICR, IT_FRAME | IT_OVR | IT_ERR);
464 
465 	spin_unlock_irqrestore(&dcmi->irqlock, flags);
466 
467 	return IRQ_WAKE_THREAD;
468 }
469 
dcmi_queue_setup(struct vb2_queue * vq,unsigned int * nbuffers,unsigned int * nplanes,unsigned int sizes[],struct device * alloc_devs[])470 static int dcmi_queue_setup(struct vb2_queue *vq,
471 			    unsigned int *nbuffers,
472 			    unsigned int *nplanes,
473 			    unsigned int sizes[],
474 			    struct device *alloc_devs[])
475 {
476 	struct stm32_dcmi *dcmi = vb2_get_drv_priv(vq);
477 	unsigned int size;
478 
479 	size = dcmi->fmt.fmt.pix.sizeimage;
480 
481 	/* Make sure the image size is large enough */
482 	if (*nplanes)
483 		return sizes[0] < size ? -EINVAL : 0;
484 
485 	*nplanes = 1;
486 	sizes[0] = size;
487 
488 	dev_dbg(dcmi->dev, "Setup queue, count=%d, size=%d\n",
489 		*nbuffers, size);
490 
491 	return 0;
492 }
493 
dcmi_buf_init(struct vb2_buffer * vb)494 static int dcmi_buf_init(struct vb2_buffer *vb)
495 {
496 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
497 	struct dcmi_buf *buf = container_of(vbuf, struct dcmi_buf, vb);
498 
499 	INIT_LIST_HEAD(&buf->list);
500 
501 	return 0;
502 }
503 
dcmi_buf_prepare(struct vb2_buffer * vb)504 static int dcmi_buf_prepare(struct vb2_buffer *vb)
505 {
506 	struct stm32_dcmi *dcmi =  vb2_get_drv_priv(vb->vb2_queue);
507 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
508 	struct dcmi_buf *buf = container_of(vbuf, struct dcmi_buf, vb);
509 	unsigned long size;
510 
511 	size = dcmi->fmt.fmt.pix.sizeimage;
512 
513 	if (vb2_plane_size(vb, 0) < size) {
514 		dev_err(dcmi->dev, "%s data will not fit into plane (%lu < %lu)\n",
515 			__func__, vb2_plane_size(vb, 0), size);
516 		return -EINVAL;
517 	}
518 
519 	vb2_set_plane_payload(vb, 0, size);
520 
521 	if (!buf->prepared) {
522 		/* Get memory addresses */
523 		buf->paddr =
524 			vb2_dma_contig_plane_dma_addr(&buf->vb.vb2_buf, 0);
525 		buf->size = vb2_plane_size(&buf->vb.vb2_buf, 0);
526 		buf->prepared = true;
527 
528 		vb2_set_plane_payload(&buf->vb.vb2_buf, 0, buf->size);
529 
530 		dev_dbg(dcmi->dev, "buffer[%d] phy=%pad size=%zu\n",
531 			vb->index, &buf->paddr, buf->size);
532 	}
533 
534 	return 0;
535 }
536 
dcmi_buf_queue(struct vb2_buffer * vb)537 static void dcmi_buf_queue(struct vb2_buffer *vb)
538 {
539 	struct stm32_dcmi *dcmi =  vb2_get_drv_priv(vb->vb2_queue);
540 	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
541 	struct dcmi_buf *buf = container_of(vbuf, struct dcmi_buf, vb);
542 
543 	spin_lock_irq(&dcmi->irqlock);
544 
545 	/* Enqueue to video buffers list */
546 	list_add_tail(&buf->list, &dcmi->buffers);
547 
548 	if (dcmi->state == WAIT_FOR_BUFFER) {
549 		dcmi->state = RUNNING;
550 		dcmi->active = buf;
551 
552 		dev_dbg(dcmi->dev, "Starting capture on buffer[%d] queued\n",
553 			buf->vb.vb2_buf.index);
554 
555 		spin_unlock_irq(&dcmi->irqlock);
556 		if (dcmi_start_capture(dcmi, buf))
557 			dev_err(dcmi->dev, "%s: Cannot restart capture on overflow or error\n",
558 				__func__);
559 		return;
560 	}
561 
562 	spin_unlock_irq(&dcmi->irqlock);
563 }
564 
dcmi_start_streaming(struct vb2_queue * vq,unsigned int count)565 static int dcmi_start_streaming(struct vb2_queue *vq, unsigned int count)
566 {
567 	struct stm32_dcmi *dcmi = vb2_get_drv_priv(vq);
568 	struct dcmi_buf *buf, *node;
569 	u32 val = 0;
570 	int ret;
571 
572 	ret = pm_runtime_get_sync(dcmi->dev);
573 	if (ret) {
574 		dev_err(dcmi->dev, "%s: Failed to start streaming, cannot get sync\n",
575 			__func__);
576 		goto err_release_buffers;
577 	}
578 
579 	/* Enable stream on the sub device */
580 	ret = v4l2_subdev_call(dcmi->entity.subdev, video, s_stream, 1);
581 	if (ret && ret != -ENOIOCTLCMD) {
582 		dev_err(dcmi->dev, "%s: Failed to start streaming, subdev streamon error",
583 			__func__);
584 		goto err_pm_put;
585 	}
586 
587 	spin_lock_irq(&dcmi->irqlock);
588 
589 	/* Set bus width */
590 	switch (dcmi->bus.bus_width) {
591 	case 14:
592 		val |= CR_EDM_0 | CR_EDM_1;
593 		break;
594 	case 12:
595 		val |= CR_EDM_1;
596 		break;
597 	case 10:
598 		val |= CR_EDM_0;
599 		break;
600 	default:
601 		/* Set bus width to 8 bits by default */
602 		break;
603 	}
604 
605 	/* Set vertical synchronization polarity */
606 	if (dcmi->bus.flags & V4L2_MBUS_VSYNC_ACTIVE_HIGH)
607 		val |= CR_VSPOL;
608 
609 	/* Set horizontal synchronization polarity */
610 	if (dcmi->bus.flags & V4L2_MBUS_HSYNC_ACTIVE_HIGH)
611 		val |= CR_HSPOL;
612 
613 	/* Set pixel clock polarity */
614 	if (dcmi->bus.flags & V4L2_MBUS_PCLK_SAMPLE_RISING)
615 		val |= CR_PCKPOL;
616 
617 	reg_write(dcmi->regs, DCMI_CR, val);
618 
619 	/* Set crop */
620 	if (dcmi->do_crop)
621 		dcmi_set_crop(dcmi);
622 
623 	/* Enable jpeg capture */
624 	if (dcmi->sd_format->fourcc == V4L2_PIX_FMT_JPEG)
625 		reg_set(dcmi->regs, DCMI_CR, CR_CM);/* Snapshot mode */
626 
627 	/* Enable dcmi */
628 	reg_set(dcmi->regs, DCMI_CR, CR_ENABLE);
629 
630 	dcmi->sequence = 0;
631 	dcmi->errors_count = 0;
632 	dcmi->overrun_count = 0;
633 	dcmi->buffers_count = 0;
634 
635 	/*
636 	 * Start transfer if at least one buffer has been queued,
637 	 * otherwise transfer is deferred at buffer queueing
638 	 */
639 	if (list_empty(&dcmi->buffers)) {
640 		dev_dbg(dcmi->dev, "Start streaming is deferred to next buffer queueing\n");
641 		dcmi->state = WAIT_FOR_BUFFER;
642 		spin_unlock_irq(&dcmi->irqlock);
643 		return 0;
644 	}
645 
646 	buf = list_entry(dcmi->buffers.next, struct dcmi_buf, list);
647 	dcmi->active = buf;
648 
649 	dcmi->state = RUNNING;
650 
651 	dev_dbg(dcmi->dev, "Start streaming, starting capture\n");
652 
653 	spin_unlock_irq(&dcmi->irqlock);
654 	ret = dcmi_start_capture(dcmi, buf);
655 	if (ret) {
656 		dev_err(dcmi->dev, "%s: Start streaming failed, cannot start capture\n",
657 			__func__);
658 		goto err_subdev_streamoff;
659 	}
660 
661 	/* Enable interruptions */
662 	reg_set(dcmi->regs, DCMI_IER, IT_FRAME | IT_OVR | IT_ERR);
663 
664 	return 0;
665 
666 err_subdev_streamoff:
667 	v4l2_subdev_call(dcmi->entity.subdev, video, s_stream, 0);
668 
669 err_pm_put:
670 	pm_runtime_put(dcmi->dev);
671 
672 err_release_buffers:
673 	spin_lock_irq(&dcmi->irqlock);
674 	/*
675 	 * Return all buffers to vb2 in QUEUED state.
676 	 * This will give ownership back to userspace
677 	 */
678 	list_for_each_entry_safe(buf, node, &dcmi->buffers, list) {
679 		list_del_init(&buf->list);
680 		vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_QUEUED);
681 	}
682 	dcmi->active = NULL;
683 	spin_unlock_irq(&dcmi->irqlock);
684 
685 	return ret;
686 }
687 
dcmi_stop_streaming(struct vb2_queue * vq)688 static void dcmi_stop_streaming(struct vb2_queue *vq)
689 {
690 	struct stm32_dcmi *dcmi = vb2_get_drv_priv(vq);
691 	struct dcmi_buf *buf, *node;
692 	int ret;
693 
694 	/* Disable stream on the sub device */
695 	ret = v4l2_subdev_call(dcmi->entity.subdev, video, s_stream, 0);
696 	if (ret && ret != -ENOIOCTLCMD)
697 		dev_err(dcmi->dev, "%s: Failed to stop streaming, subdev streamoff error (%d)\n",
698 			__func__, ret);
699 
700 	spin_lock_irq(&dcmi->irqlock);
701 
702 	/* Disable interruptions */
703 	reg_clear(dcmi->regs, DCMI_IER, IT_FRAME | IT_OVR | IT_ERR);
704 
705 	/* Disable DCMI */
706 	reg_clear(dcmi->regs, DCMI_CR, CR_ENABLE);
707 
708 	/* Return all queued buffers to vb2 in ERROR state */
709 	list_for_each_entry_safe(buf, node, &dcmi->buffers, list) {
710 		list_del_init(&buf->list);
711 		vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
712 	}
713 
714 	dcmi->active = NULL;
715 	dcmi->state = STOPPED;
716 
717 	spin_unlock_irq(&dcmi->irqlock);
718 
719 	/* Stop all pending DMA operations */
720 	dmaengine_terminate_all(dcmi->dma_chan);
721 
722 	pm_runtime_put(dcmi->dev);
723 
724 	if (dcmi->errors_count)
725 		dev_warn(dcmi->dev, "Some errors found while streaming: errors=%d (overrun=%d), buffers=%d\n",
726 			 dcmi->errors_count, dcmi->overrun_count,
727 			 dcmi->buffers_count);
728 	dev_dbg(dcmi->dev, "Stop streaming, errors=%d (overrun=%d), buffers=%d\n",
729 		dcmi->errors_count, dcmi->overrun_count,
730 		dcmi->buffers_count);
731 }
732 
733 static const struct vb2_ops dcmi_video_qops = {
734 	.queue_setup		= dcmi_queue_setup,
735 	.buf_init		= dcmi_buf_init,
736 	.buf_prepare		= dcmi_buf_prepare,
737 	.buf_queue		= dcmi_buf_queue,
738 	.start_streaming	= dcmi_start_streaming,
739 	.stop_streaming		= dcmi_stop_streaming,
740 	.wait_prepare		= vb2_ops_wait_prepare,
741 	.wait_finish		= vb2_ops_wait_finish,
742 };
743 
dcmi_g_fmt_vid_cap(struct file * file,void * priv,struct v4l2_format * fmt)744 static int dcmi_g_fmt_vid_cap(struct file *file, void *priv,
745 			      struct v4l2_format *fmt)
746 {
747 	struct stm32_dcmi *dcmi = video_drvdata(file);
748 
749 	*fmt = dcmi->fmt;
750 
751 	return 0;
752 }
753 
find_format_by_fourcc(struct stm32_dcmi * dcmi,unsigned int fourcc)754 static const struct dcmi_format *find_format_by_fourcc(struct stm32_dcmi *dcmi,
755 						       unsigned int fourcc)
756 {
757 	unsigned int num_formats = dcmi->num_of_sd_formats;
758 	const struct dcmi_format *fmt;
759 	unsigned int i;
760 
761 	for (i = 0; i < num_formats; i++) {
762 		fmt = dcmi->sd_formats[i];
763 		if (fmt->fourcc == fourcc)
764 			return fmt;
765 	}
766 
767 	return NULL;
768 }
769 
__find_outer_frame_size(struct stm32_dcmi * dcmi,struct v4l2_pix_format * pix,struct dcmi_framesize * framesize)770 static void __find_outer_frame_size(struct stm32_dcmi *dcmi,
771 				    struct v4l2_pix_format *pix,
772 				    struct dcmi_framesize *framesize)
773 {
774 	struct dcmi_framesize *match = NULL;
775 	unsigned int i;
776 	unsigned int min_err = UINT_MAX;
777 
778 	for (i = 0; i < dcmi->num_of_sd_framesizes; i++) {
779 		struct dcmi_framesize *fsize = &dcmi->sd_framesizes[i];
780 		int w_err = (fsize->width - pix->width);
781 		int h_err = (fsize->height - pix->height);
782 		int err = w_err + h_err;
783 
784 		if (w_err >= 0 && h_err >= 0 && err < min_err) {
785 			min_err = err;
786 			match = fsize;
787 		}
788 	}
789 	if (!match)
790 		match = &dcmi->sd_framesizes[0];
791 
792 	*framesize = *match;
793 }
794 
dcmi_try_fmt(struct stm32_dcmi * dcmi,struct v4l2_format * f,const struct dcmi_format ** sd_format,struct dcmi_framesize * sd_framesize)795 static int dcmi_try_fmt(struct stm32_dcmi *dcmi, struct v4l2_format *f,
796 			const struct dcmi_format **sd_format,
797 			struct dcmi_framesize *sd_framesize)
798 {
799 	const struct dcmi_format *sd_fmt;
800 	struct dcmi_framesize sd_fsize;
801 	struct v4l2_pix_format *pix = &f->fmt.pix;
802 	struct v4l2_subdev_pad_config pad_cfg;
803 	struct v4l2_subdev_format format = {
804 		.which = V4L2_SUBDEV_FORMAT_TRY,
805 	};
806 	bool do_crop;
807 	int ret;
808 
809 	sd_fmt = find_format_by_fourcc(dcmi, pix->pixelformat);
810 	if (!sd_fmt) {
811 		sd_fmt = dcmi->sd_formats[dcmi->num_of_sd_formats - 1];
812 		pix->pixelformat = sd_fmt->fourcc;
813 	}
814 
815 	/* Limit to hardware capabilities */
816 	pix->width = clamp(pix->width, MIN_WIDTH, MAX_WIDTH);
817 	pix->height = clamp(pix->height, MIN_HEIGHT, MAX_HEIGHT);
818 
819 	/* No crop if JPEG is requested */
820 	do_crop = dcmi->do_crop && (pix->pixelformat != V4L2_PIX_FMT_JPEG);
821 
822 	if (do_crop && dcmi->num_of_sd_framesizes) {
823 		struct dcmi_framesize outer_sd_fsize;
824 		/*
825 		 * If crop is requested and sensor have discrete frame sizes,
826 		 * select the frame size that is just larger than request
827 		 */
828 		__find_outer_frame_size(dcmi, pix, &outer_sd_fsize);
829 		pix->width = outer_sd_fsize.width;
830 		pix->height = outer_sd_fsize.height;
831 	}
832 
833 	v4l2_fill_mbus_format(&format.format, pix, sd_fmt->mbus_code);
834 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad, set_fmt,
835 			       &pad_cfg, &format);
836 	if (ret < 0)
837 		return ret;
838 
839 	/* Update pix regarding to what sensor can do */
840 	v4l2_fill_pix_format(pix, &format.format);
841 
842 	/* Save resolution that sensor can actually do */
843 	sd_fsize.width = pix->width;
844 	sd_fsize.height = pix->height;
845 
846 	if (do_crop) {
847 		struct v4l2_rect c = dcmi->crop;
848 		struct v4l2_rect max_rect;
849 
850 		/*
851 		 * Adjust crop by making the intersection between
852 		 * format resolution request and crop request
853 		 */
854 		max_rect.top = 0;
855 		max_rect.left = 0;
856 		max_rect.width = pix->width;
857 		max_rect.height = pix->height;
858 		v4l2_rect_map_inside(&c, &max_rect);
859 		c.top  = clamp_t(s32, c.top, 0, pix->height - c.height);
860 		c.left = clamp_t(s32, c.left, 0, pix->width - c.width);
861 		dcmi->crop = c;
862 
863 		/* Adjust format resolution request to crop */
864 		pix->width = dcmi->crop.width;
865 		pix->height = dcmi->crop.height;
866 	}
867 
868 	pix->field = V4L2_FIELD_NONE;
869 	pix->bytesperline = pix->width * sd_fmt->bpp;
870 	pix->sizeimage = pix->bytesperline * pix->height;
871 
872 	if (sd_format)
873 		*sd_format = sd_fmt;
874 	if (sd_framesize)
875 		*sd_framesize = sd_fsize;
876 
877 	return 0;
878 }
879 
dcmi_set_fmt(struct stm32_dcmi * dcmi,struct v4l2_format * f)880 static int dcmi_set_fmt(struct stm32_dcmi *dcmi, struct v4l2_format *f)
881 {
882 	struct v4l2_subdev_format format = {
883 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
884 	};
885 	const struct dcmi_format *sd_format;
886 	struct dcmi_framesize sd_framesize;
887 	struct v4l2_mbus_framefmt *mf = &format.format;
888 	struct v4l2_pix_format *pix = &f->fmt.pix;
889 	int ret;
890 
891 	/*
892 	 * Try format, fmt.width/height could have been changed
893 	 * to match sensor capability or crop request
894 	 * sd_format & sd_framesize will contain what subdev
895 	 * can do for this request.
896 	 */
897 	ret = dcmi_try_fmt(dcmi, f, &sd_format, &sd_framesize);
898 	if (ret)
899 		return ret;
900 
901 	/* Disable crop if JPEG is requested */
902 	if (pix->pixelformat == V4L2_PIX_FMT_JPEG)
903 		dcmi->do_crop = false;
904 
905 	/* pix to mbus format */
906 	v4l2_fill_mbus_format(mf, pix,
907 			      sd_format->mbus_code);
908 	mf->width = sd_framesize.width;
909 	mf->height = sd_framesize.height;
910 
911 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad,
912 			       set_fmt, NULL, &format);
913 	if (ret < 0)
914 		return ret;
915 
916 	dev_dbg(dcmi->dev, "Sensor format set to 0x%x %ux%u\n",
917 		mf->code, mf->width, mf->height);
918 	dev_dbg(dcmi->dev, "Buffer format set to %4.4s %ux%u\n",
919 		(char *)&pix->pixelformat,
920 		pix->width, pix->height);
921 
922 	dcmi->fmt = *f;
923 	dcmi->sd_format = sd_format;
924 	dcmi->sd_framesize = sd_framesize;
925 
926 	return 0;
927 }
928 
dcmi_s_fmt_vid_cap(struct file * file,void * priv,struct v4l2_format * f)929 static int dcmi_s_fmt_vid_cap(struct file *file, void *priv,
930 			      struct v4l2_format *f)
931 {
932 	struct stm32_dcmi *dcmi = video_drvdata(file);
933 
934 	if (vb2_is_streaming(&dcmi->queue))
935 		return -EBUSY;
936 
937 	return dcmi_set_fmt(dcmi, f);
938 }
939 
dcmi_try_fmt_vid_cap(struct file * file,void * priv,struct v4l2_format * f)940 static int dcmi_try_fmt_vid_cap(struct file *file, void *priv,
941 				struct v4l2_format *f)
942 {
943 	struct stm32_dcmi *dcmi = video_drvdata(file);
944 
945 	return dcmi_try_fmt(dcmi, f, NULL, NULL);
946 }
947 
dcmi_enum_fmt_vid_cap(struct file * file,void * priv,struct v4l2_fmtdesc * f)948 static int dcmi_enum_fmt_vid_cap(struct file *file, void  *priv,
949 				 struct v4l2_fmtdesc *f)
950 {
951 	struct stm32_dcmi *dcmi = video_drvdata(file);
952 
953 	if (f->index >= dcmi->num_of_sd_formats)
954 		return -EINVAL;
955 
956 	f->pixelformat = dcmi->sd_formats[f->index]->fourcc;
957 	return 0;
958 }
959 
dcmi_get_sensor_format(struct stm32_dcmi * dcmi,struct v4l2_pix_format * pix)960 static int dcmi_get_sensor_format(struct stm32_dcmi *dcmi,
961 				  struct v4l2_pix_format *pix)
962 {
963 	struct v4l2_subdev_format fmt = {
964 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
965 	};
966 	int ret;
967 
968 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad, get_fmt, NULL, &fmt);
969 	if (ret)
970 		return ret;
971 
972 	v4l2_fill_pix_format(pix, &fmt.format);
973 
974 	return 0;
975 }
976 
dcmi_set_sensor_format(struct stm32_dcmi * dcmi,struct v4l2_pix_format * pix)977 static int dcmi_set_sensor_format(struct stm32_dcmi *dcmi,
978 				  struct v4l2_pix_format *pix)
979 {
980 	const struct dcmi_format *sd_fmt;
981 	struct v4l2_subdev_format format = {
982 		.which = V4L2_SUBDEV_FORMAT_TRY,
983 	};
984 	struct v4l2_subdev_pad_config pad_cfg;
985 	int ret;
986 
987 	sd_fmt = find_format_by_fourcc(dcmi, pix->pixelformat);
988 	if (!sd_fmt) {
989 		sd_fmt = dcmi->sd_formats[dcmi->num_of_sd_formats - 1];
990 		pix->pixelformat = sd_fmt->fourcc;
991 	}
992 
993 	v4l2_fill_mbus_format(&format.format, pix, sd_fmt->mbus_code);
994 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad, set_fmt,
995 			       &pad_cfg, &format);
996 	if (ret < 0)
997 		return ret;
998 
999 	return 0;
1000 }
1001 
dcmi_get_sensor_bounds(struct stm32_dcmi * dcmi,struct v4l2_rect * r)1002 static int dcmi_get_sensor_bounds(struct stm32_dcmi *dcmi,
1003 				  struct v4l2_rect *r)
1004 {
1005 	struct v4l2_subdev_selection bounds = {
1006 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1007 		.target = V4L2_SEL_TGT_CROP_BOUNDS,
1008 	};
1009 	unsigned int max_width, max_height, max_pixsize;
1010 	struct v4l2_pix_format pix;
1011 	unsigned int i;
1012 	int ret;
1013 
1014 	/*
1015 	 * Get sensor bounds first
1016 	 */
1017 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad, get_selection,
1018 			       NULL, &bounds);
1019 	if (!ret)
1020 		*r = bounds.r;
1021 	if (ret != -ENOIOCTLCMD)
1022 		return ret;
1023 
1024 	/*
1025 	 * If selection is not implemented,
1026 	 * fallback by enumerating sensor frame sizes
1027 	 * and take the largest one
1028 	 */
1029 	max_width = 0;
1030 	max_height = 0;
1031 	max_pixsize = 0;
1032 	for (i = 0; i < dcmi->num_of_sd_framesizes; i++) {
1033 		struct dcmi_framesize *fsize = &dcmi->sd_framesizes[i];
1034 		unsigned int pixsize = fsize->width * fsize->height;
1035 
1036 		if (pixsize > max_pixsize) {
1037 			max_pixsize = pixsize;
1038 			max_width = fsize->width;
1039 			max_height = fsize->height;
1040 		}
1041 	}
1042 	if (max_pixsize > 0) {
1043 		r->top = 0;
1044 		r->left = 0;
1045 		r->width = max_width;
1046 		r->height = max_height;
1047 		return 0;
1048 	}
1049 
1050 	/*
1051 	 * If frame sizes enumeration is not implemented,
1052 	 * fallback by getting current sensor frame size
1053 	 */
1054 	ret = dcmi_get_sensor_format(dcmi, &pix);
1055 	if (ret)
1056 		return ret;
1057 
1058 	r->top = 0;
1059 	r->left = 0;
1060 	r->width = pix.width;
1061 	r->height = pix.height;
1062 
1063 	return 0;
1064 }
1065 
dcmi_g_selection(struct file * file,void * fh,struct v4l2_selection * s)1066 static int dcmi_g_selection(struct file *file, void *fh,
1067 			    struct v4l2_selection *s)
1068 {
1069 	struct stm32_dcmi *dcmi = video_drvdata(file);
1070 
1071 	if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1072 		return -EINVAL;
1073 
1074 	switch (s->target) {
1075 	case V4L2_SEL_TGT_CROP_DEFAULT:
1076 	case V4L2_SEL_TGT_CROP_BOUNDS:
1077 		s->r = dcmi->sd_bounds;
1078 		return 0;
1079 	case V4L2_SEL_TGT_CROP:
1080 		if (dcmi->do_crop) {
1081 			s->r = dcmi->crop;
1082 		} else {
1083 			s->r.top = 0;
1084 			s->r.left = 0;
1085 			s->r.width = dcmi->fmt.fmt.pix.width;
1086 			s->r.height = dcmi->fmt.fmt.pix.height;
1087 		}
1088 		break;
1089 	default:
1090 		return -EINVAL;
1091 	}
1092 
1093 	return 0;
1094 }
1095 
dcmi_s_selection(struct file * file,void * priv,struct v4l2_selection * s)1096 static int dcmi_s_selection(struct file *file, void *priv,
1097 			    struct v4l2_selection *s)
1098 {
1099 	struct stm32_dcmi *dcmi = video_drvdata(file);
1100 	struct v4l2_rect r = s->r;
1101 	struct v4l2_rect max_rect;
1102 	struct v4l2_pix_format pix;
1103 
1104 	if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
1105 	    s->target != V4L2_SEL_TGT_CROP)
1106 		return -EINVAL;
1107 
1108 	/* Reset sensor resolution to max resolution */
1109 	pix.pixelformat = dcmi->fmt.fmt.pix.pixelformat;
1110 	pix.width = dcmi->sd_bounds.width;
1111 	pix.height = dcmi->sd_bounds.height;
1112 	dcmi_set_sensor_format(dcmi, &pix);
1113 
1114 	/*
1115 	 * Make the intersection between
1116 	 * sensor resolution
1117 	 * and crop request
1118 	 */
1119 	max_rect.top = 0;
1120 	max_rect.left = 0;
1121 	max_rect.width = pix.width;
1122 	max_rect.height = pix.height;
1123 	v4l2_rect_map_inside(&r, &max_rect);
1124 	r.top  = clamp_t(s32, r.top, 0, pix.height - r.height);
1125 	r.left = clamp_t(s32, r.left, 0, pix.width - r.width);
1126 
1127 	if (!(r.top == dcmi->sd_bounds.top &&
1128 	      r.left == dcmi->sd_bounds.left &&
1129 	      r.width == dcmi->sd_bounds.width &&
1130 	      r.height == dcmi->sd_bounds.height)) {
1131 		/* Crop if request is different than sensor resolution */
1132 		dcmi->do_crop = true;
1133 		dcmi->crop = r;
1134 		dev_dbg(dcmi->dev, "s_selection: crop %ux%u@(%u,%u) from %ux%u\n",
1135 			r.width, r.height, r.left, r.top,
1136 			pix.width, pix.height);
1137 	} else {
1138 		/* Disable crop */
1139 		dcmi->do_crop = false;
1140 		dev_dbg(dcmi->dev, "s_selection: crop is disabled\n");
1141 	}
1142 
1143 	s->r = r;
1144 	return 0;
1145 }
1146 
dcmi_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1147 static int dcmi_querycap(struct file *file, void *priv,
1148 			 struct v4l2_capability *cap)
1149 {
1150 	strlcpy(cap->driver, DRV_NAME, sizeof(cap->driver));
1151 	strlcpy(cap->card, "STM32 Camera Memory Interface",
1152 		sizeof(cap->card));
1153 	strlcpy(cap->bus_info, "platform:dcmi", sizeof(cap->bus_info));
1154 	return 0;
1155 }
1156 
dcmi_enum_input(struct file * file,void * priv,struct v4l2_input * i)1157 static int dcmi_enum_input(struct file *file, void *priv,
1158 			   struct v4l2_input *i)
1159 {
1160 	if (i->index != 0)
1161 		return -EINVAL;
1162 
1163 	i->type = V4L2_INPUT_TYPE_CAMERA;
1164 	strlcpy(i->name, "Camera", sizeof(i->name));
1165 	return 0;
1166 }
1167 
dcmi_g_input(struct file * file,void * priv,unsigned int * i)1168 static int dcmi_g_input(struct file *file, void *priv, unsigned int *i)
1169 {
1170 	*i = 0;
1171 	return 0;
1172 }
1173 
dcmi_s_input(struct file * file,void * priv,unsigned int i)1174 static int dcmi_s_input(struct file *file, void *priv, unsigned int i)
1175 {
1176 	if (i > 0)
1177 		return -EINVAL;
1178 	return 0;
1179 }
1180 
dcmi_enum_framesizes(struct file * file,void * fh,struct v4l2_frmsizeenum * fsize)1181 static int dcmi_enum_framesizes(struct file *file, void *fh,
1182 				struct v4l2_frmsizeenum *fsize)
1183 {
1184 	struct stm32_dcmi *dcmi = video_drvdata(file);
1185 	const struct dcmi_format *sd_fmt;
1186 	struct v4l2_subdev_frame_size_enum fse = {
1187 		.index = fsize->index,
1188 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1189 	};
1190 	int ret;
1191 
1192 	sd_fmt = find_format_by_fourcc(dcmi, fsize->pixel_format);
1193 	if (!sd_fmt)
1194 		return -EINVAL;
1195 
1196 	fse.code = sd_fmt->mbus_code;
1197 
1198 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad, enum_frame_size,
1199 			       NULL, &fse);
1200 	if (ret)
1201 		return ret;
1202 
1203 	fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1204 	fsize->discrete.width = fse.max_width;
1205 	fsize->discrete.height = fse.max_height;
1206 
1207 	return 0;
1208 }
1209 
dcmi_g_parm(struct file * file,void * priv,struct v4l2_streamparm * p)1210 static int dcmi_g_parm(struct file *file, void *priv,
1211 		       struct v4l2_streamparm *p)
1212 {
1213 	struct stm32_dcmi *dcmi = video_drvdata(file);
1214 
1215 	return v4l2_g_parm_cap(video_devdata(file), dcmi->entity.subdev, p);
1216 }
1217 
dcmi_s_parm(struct file * file,void * priv,struct v4l2_streamparm * p)1218 static int dcmi_s_parm(struct file *file, void *priv,
1219 		       struct v4l2_streamparm *p)
1220 {
1221 	struct stm32_dcmi *dcmi = video_drvdata(file);
1222 
1223 	return v4l2_s_parm_cap(video_devdata(file), dcmi->entity.subdev, p);
1224 }
1225 
dcmi_enum_frameintervals(struct file * file,void * fh,struct v4l2_frmivalenum * fival)1226 static int dcmi_enum_frameintervals(struct file *file, void *fh,
1227 				    struct v4l2_frmivalenum *fival)
1228 {
1229 	struct stm32_dcmi *dcmi = video_drvdata(file);
1230 	const struct dcmi_format *sd_fmt;
1231 	struct v4l2_subdev_frame_interval_enum fie = {
1232 		.index = fival->index,
1233 		.width = fival->width,
1234 		.height = fival->height,
1235 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1236 	};
1237 	int ret;
1238 
1239 	sd_fmt = find_format_by_fourcc(dcmi, fival->pixel_format);
1240 	if (!sd_fmt)
1241 		return -EINVAL;
1242 
1243 	fie.code = sd_fmt->mbus_code;
1244 
1245 	ret = v4l2_subdev_call(dcmi->entity.subdev, pad,
1246 			       enum_frame_interval, NULL, &fie);
1247 	if (ret)
1248 		return ret;
1249 
1250 	fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1251 	fival->discrete = fie.interval;
1252 
1253 	return 0;
1254 }
1255 
1256 static const struct of_device_id stm32_dcmi_of_match[] = {
1257 	{ .compatible = "st,stm32-dcmi"},
1258 	{ /* end node */ },
1259 };
1260 MODULE_DEVICE_TABLE(of, stm32_dcmi_of_match);
1261 
dcmi_open(struct file * file)1262 static int dcmi_open(struct file *file)
1263 {
1264 	struct stm32_dcmi *dcmi = video_drvdata(file);
1265 	struct v4l2_subdev *sd = dcmi->entity.subdev;
1266 	int ret;
1267 
1268 	if (mutex_lock_interruptible(&dcmi->lock))
1269 		return -ERESTARTSYS;
1270 
1271 	ret = v4l2_fh_open(file);
1272 	if (ret < 0)
1273 		goto unlock;
1274 
1275 	if (!v4l2_fh_is_singular_file(file))
1276 		goto fh_rel;
1277 
1278 	ret = v4l2_subdev_call(sd, core, s_power, 1);
1279 	if (ret < 0 && ret != -ENOIOCTLCMD)
1280 		goto fh_rel;
1281 
1282 	ret = dcmi_set_fmt(dcmi, &dcmi->fmt);
1283 	if (ret)
1284 		v4l2_subdev_call(sd, core, s_power, 0);
1285 fh_rel:
1286 	if (ret)
1287 		v4l2_fh_release(file);
1288 unlock:
1289 	mutex_unlock(&dcmi->lock);
1290 	return ret;
1291 }
1292 
dcmi_release(struct file * file)1293 static int dcmi_release(struct file *file)
1294 {
1295 	struct stm32_dcmi *dcmi = video_drvdata(file);
1296 	struct v4l2_subdev *sd = dcmi->entity.subdev;
1297 	bool fh_singular;
1298 	int ret;
1299 
1300 	mutex_lock(&dcmi->lock);
1301 
1302 	fh_singular = v4l2_fh_is_singular_file(file);
1303 
1304 	ret = _vb2_fop_release(file, NULL);
1305 
1306 	if (fh_singular)
1307 		v4l2_subdev_call(sd, core, s_power, 0);
1308 
1309 	mutex_unlock(&dcmi->lock);
1310 
1311 	return ret;
1312 }
1313 
1314 static const struct v4l2_ioctl_ops dcmi_ioctl_ops = {
1315 	.vidioc_querycap		= dcmi_querycap,
1316 
1317 	.vidioc_try_fmt_vid_cap		= dcmi_try_fmt_vid_cap,
1318 	.vidioc_g_fmt_vid_cap		= dcmi_g_fmt_vid_cap,
1319 	.vidioc_s_fmt_vid_cap		= dcmi_s_fmt_vid_cap,
1320 	.vidioc_enum_fmt_vid_cap	= dcmi_enum_fmt_vid_cap,
1321 	.vidioc_g_selection		= dcmi_g_selection,
1322 	.vidioc_s_selection		= dcmi_s_selection,
1323 
1324 	.vidioc_enum_input		= dcmi_enum_input,
1325 	.vidioc_g_input			= dcmi_g_input,
1326 	.vidioc_s_input			= dcmi_s_input,
1327 
1328 	.vidioc_g_parm			= dcmi_g_parm,
1329 	.vidioc_s_parm			= dcmi_s_parm,
1330 
1331 	.vidioc_enum_framesizes		= dcmi_enum_framesizes,
1332 	.vidioc_enum_frameintervals	= dcmi_enum_frameintervals,
1333 
1334 	.vidioc_reqbufs			= vb2_ioctl_reqbufs,
1335 	.vidioc_create_bufs		= vb2_ioctl_create_bufs,
1336 	.vidioc_querybuf		= vb2_ioctl_querybuf,
1337 	.vidioc_qbuf			= vb2_ioctl_qbuf,
1338 	.vidioc_dqbuf			= vb2_ioctl_dqbuf,
1339 	.vidioc_expbuf			= vb2_ioctl_expbuf,
1340 	.vidioc_prepare_buf		= vb2_ioctl_prepare_buf,
1341 	.vidioc_streamon		= vb2_ioctl_streamon,
1342 	.vidioc_streamoff		= vb2_ioctl_streamoff,
1343 
1344 	.vidioc_log_status		= v4l2_ctrl_log_status,
1345 	.vidioc_subscribe_event		= v4l2_ctrl_subscribe_event,
1346 	.vidioc_unsubscribe_event	= v4l2_event_unsubscribe,
1347 };
1348 
1349 static const struct v4l2_file_operations dcmi_fops = {
1350 	.owner		= THIS_MODULE,
1351 	.unlocked_ioctl	= video_ioctl2,
1352 	.open		= dcmi_open,
1353 	.release	= dcmi_release,
1354 	.poll		= vb2_fop_poll,
1355 	.mmap		= vb2_fop_mmap,
1356 #ifndef CONFIG_MMU
1357 	.get_unmapped_area = vb2_fop_get_unmapped_area,
1358 #endif
1359 	.read		= vb2_fop_read,
1360 };
1361 
dcmi_set_default_fmt(struct stm32_dcmi * dcmi)1362 static int dcmi_set_default_fmt(struct stm32_dcmi *dcmi)
1363 {
1364 	struct v4l2_format f = {
1365 		.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
1366 		.fmt.pix = {
1367 			.width		= CIF_WIDTH,
1368 			.height		= CIF_HEIGHT,
1369 			.field		= V4L2_FIELD_NONE,
1370 			.pixelformat	= dcmi->sd_formats[0]->fourcc,
1371 		},
1372 	};
1373 	int ret;
1374 
1375 	ret = dcmi_try_fmt(dcmi, &f, NULL, NULL);
1376 	if (ret)
1377 		return ret;
1378 	dcmi->sd_format = dcmi->sd_formats[0];
1379 	dcmi->fmt = f;
1380 	return 0;
1381 }
1382 
1383 static const struct dcmi_format dcmi_formats[] = {
1384 	{
1385 		.fourcc = V4L2_PIX_FMT_RGB565,
1386 		.mbus_code = MEDIA_BUS_FMT_RGB565_2X8_LE,
1387 		.bpp = 2,
1388 	}, {
1389 		.fourcc = V4L2_PIX_FMT_YUYV,
1390 		.mbus_code = MEDIA_BUS_FMT_YUYV8_2X8,
1391 		.bpp = 2,
1392 	}, {
1393 		.fourcc = V4L2_PIX_FMT_UYVY,
1394 		.mbus_code = MEDIA_BUS_FMT_UYVY8_2X8,
1395 		.bpp = 2,
1396 	}, {
1397 		.fourcc = V4L2_PIX_FMT_JPEG,
1398 		.mbus_code = MEDIA_BUS_FMT_JPEG_1X8,
1399 		.bpp = 1,
1400 	},
1401 };
1402 
dcmi_formats_init(struct stm32_dcmi * dcmi)1403 static int dcmi_formats_init(struct stm32_dcmi *dcmi)
1404 {
1405 	const struct dcmi_format *sd_fmts[ARRAY_SIZE(dcmi_formats)];
1406 	unsigned int num_fmts = 0, i, j;
1407 	struct v4l2_subdev *subdev = dcmi->entity.subdev;
1408 	struct v4l2_subdev_mbus_code_enum mbus_code = {
1409 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1410 	};
1411 
1412 	while (!v4l2_subdev_call(subdev, pad, enum_mbus_code,
1413 				 NULL, &mbus_code)) {
1414 		for (i = 0; i < ARRAY_SIZE(dcmi_formats); i++) {
1415 			if (dcmi_formats[i].mbus_code != mbus_code.code)
1416 				continue;
1417 
1418 			/* Code supported, have we got this fourcc yet? */
1419 			for (j = 0; j < num_fmts; j++)
1420 				if (sd_fmts[j]->fourcc ==
1421 						dcmi_formats[i].fourcc)
1422 					/* Already available */
1423 					break;
1424 			if (j == num_fmts)
1425 				/* New */
1426 				sd_fmts[num_fmts++] = dcmi_formats + i;
1427 		}
1428 		mbus_code.index++;
1429 	}
1430 
1431 	if (!num_fmts)
1432 		return -ENXIO;
1433 
1434 	dcmi->num_of_sd_formats = num_fmts;
1435 	dcmi->sd_formats = devm_kcalloc(dcmi->dev,
1436 					num_fmts, sizeof(struct dcmi_format *),
1437 					GFP_KERNEL);
1438 	if (!dcmi->sd_formats) {
1439 		dev_err(dcmi->dev, "Could not allocate memory\n");
1440 		return -ENOMEM;
1441 	}
1442 
1443 	memcpy(dcmi->sd_formats, sd_fmts,
1444 	       num_fmts * sizeof(struct dcmi_format *));
1445 	dcmi->sd_format = dcmi->sd_formats[0];
1446 
1447 	return 0;
1448 }
1449 
dcmi_framesizes_init(struct stm32_dcmi * dcmi)1450 static int dcmi_framesizes_init(struct stm32_dcmi *dcmi)
1451 {
1452 	unsigned int num_fsize = 0;
1453 	struct v4l2_subdev *subdev = dcmi->entity.subdev;
1454 	struct v4l2_subdev_frame_size_enum fse = {
1455 		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
1456 		.code = dcmi->sd_format->mbus_code,
1457 	};
1458 	unsigned int ret;
1459 	unsigned int i;
1460 
1461 	/* Allocate discrete framesizes array */
1462 	while (!v4l2_subdev_call(subdev, pad, enum_frame_size,
1463 				 NULL, &fse))
1464 		fse.index++;
1465 
1466 	num_fsize = fse.index;
1467 	if (!num_fsize)
1468 		return 0;
1469 
1470 	dcmi->num_of_sd_framesizes = num_fsize;
1471 	dcmi->sd_framesizes = devm_kcalloc(dcmi->dev, num_fsize,
1472 					   sizeof(struct dcmi_framesize),
1473 					   GFP_KERNEL);
1474 	if (!dcmi->sd_framesizes) {
1475 		dev_err(dcmi->dev, "Could not allocate memory\n");
1476 		return -ENOMEM;
1477 	}
1478 
1479 	/* Fill array with sensor supported framesizes */
1480 	dev_dbg(dcmi->dev, "Sensor supports %u frame sizes:\n", num_fsize);
1481 	for (i = 0; i < dcmi->num_of_sd_framesizes; i++) {
1482 		fse.index = i;
1483 		ret = v4l2_subdev_call(subdev, pad, enum_frame_size,
1484 				       NULL, &fse);
1485 		if (ret)
1486 			return ret;
1487 		dcmi->sd_framesizes[fse.index].width = fse.max_width;
1488 		dcmi->sd_framesizes[fse.index].height = fse.max_height;
1489 		dev_dbg(dcmi->dev, "%ux%u\n", fse.max_width, fse.max_height);
1490 	}
1491 
1492 	return 0;
1493 }
1494 
dcmi_graph_notify_complete(struct v4l2_async_notifier * notifier)1495 static int dcmi_graph_notify_complete(struct v4l2_async_notifier *notifier)
1496 {
1497 	struct stm32_dcmi *dcmi = notifier_to_dcmi(notifier);
1498 	int ret;
1499 
1500 	dcmi->vdev->ctrl_handler = dcmi->entity.subdev->ctrl_handler;
1501 	ret = dcmi_formats_init(dcmi);
1502 	if (ret) {
1503 		dev_err(dcmi->dev, "No supported mediabus format found\n");
1504 		return ret;
1505 	}
1506 
1507 	ret = dcmi_framesizes_init(dcmi);
1508 	if (ret) {
1509 		dev_err(dcmi->dev, "Could not initialize framesizes\n");
1510 		return ret;
1511 	}
1512 
1513 	ret = dcmi_get_sensor_bounds(dcmi, &dcmi->sd_bounds);
1514 	if (ret) {
1515 		dev_err(dcmi->dev, "Could not get sensor bounds\n");
1516 		return ret;
1517 	}
1518 
1519 	ret = dcmi_set_default_fmt(dcmi);
1520 	if (ret) {
1521 		dev_err(dcmi->dev, "Could not set default format\n");
1522 		return ret;
1523 	}
1524 
1525 	ret = video_register_device(dcmi->vdev, VFL_TYPE_GRABBER, -1);
1526 	if (ret) {
1527 		dev_err(dcmi->dev, "Failed to register video device\n");
1528 		return ret;
1529 	}
1530 
1531 	dev_dbg(dcmi->dev, "Device registered as %s\n",
1532 		video_device_node_name(dcmi->vdev));
1533 	return 0;
1534 }
1535 
dcmi_graph_notify_unbind(struct v4l2_async_notifier * notifier,struct v4l2_subdev * sd,struct v4l2_async_subdev * asd)1536 static void dcmi_graph_notify_unbind(struct v4l2_async_notifier *notifier,
1537 				     struct v4l2_subdev *sd,
1538 				     struct v4l2_async_subdev *asd)
1539 {
1540 	struct stm32_dcmi *dcmi = notifier_to_dcmi(notifier);
1541 
1542 	dev_dbg(dcmi->dev, "Removing %s\n", video_device_node_name(dcmi->vdev));
1543 
1544 	/* Checks internaly if vdev has been init or not */
1545 	video_unregister_device(dcmi->vdev);
1546 }
1547 
dcmi_graph_notify_bound(struct v4l2_async_notifier * notifier,struct v4l2_subdev * subdev,struct v4l2_async_subdev * asd)1548 static int dcmi_graph_notify_bound(struct v4l2_async_notifier *notifier,
1549 				   struct v4l2_subdev *subdev,
1550 				   struct v4l2_async_subdev *asd)
1551 {
1552 	struct stm32_dcmi *dcmi = notifier_to_dcmi(notifier);
1553 
1554 	dev_dbg(dcmi->dev, "Subdev %s bound\n", subdev->name);
1555 
1556 	dcmi->entity.subdev = subdev;
1557 
1558 	return 0;
1559 }
1560 
1561 static const struct v4l2_async_notifier_operations dcmi_graph_notify_ops = {
1562 	.bound = dcmi_graph_notify_bound,
1563 	.unbind = dcmi_graph_notify_unbind,
1564 	.complete = dcmi_graph_notify_complete,
1565 };
1566 
dcmi_graph_parse(struct stm32_dcmi * dcmi,struct device_node * node)1567 static int dcmi_graph_parse(struct stm32_dcmi *dcmi, struct device_node *node)
1568 {
1569 	struct device_node *ep = NULL;
1570 	struct device_node *remote;
1571 
1572 	ep = of_graph_get_next_endpoint(node, ep);
1573 	if (!ep)
1574 		return -EINVAL;
1575 
1576 	remote = of_graph_get_remote_port_parent(ep);
1577 	of_node_put(ep);
1578 	if (!remote)
1579 		return -EINVAL;
1580 
1581 	/* Remote node to connect */
1582 	dcmi->entity.node = remote;
1583 	dcmi->entity.asd.match_type = V4L2_ASYNC_MATCH_FWNODE;
1584 	dcmi->entity.asd.match.fwnode = of_fwnode_handle(remote);
1585 	return 0;
1586 }
1587 
dcmi_graph_init(struct stm32_dcmi * dcmi)1588 static int dcmi_graph_init(struct stm32_dcmi *dcmi)
1589 {
1590 	struct v4l2_async_subdev **subdevs = NULL;
1591 	int ret;
1592 
1593 	/* Parse the graph to extract a list of subdevice DT nodes. */
1594 	ret = dcmi_graph_parse(dcmi, dcmi->dev->of_node);
1595 	if (ret < 0) {
1596 		dev_err(dcmi->dev, "Graph parsing failed\n");
1597 		return ret;
1598 	}
1599 
1600 	/* Register the subdevices notifier. */
1601 	subdevs = devm_kzalloc(dcmi->dev, sizeof(*subdevs), GFP_KERNEL);
1602 	if (!subdevs) {
1603 		of_node_put(dcmi->entity.node);
1604 		return -ENOMEM;
1605 	}
1606 
1607 	subdevs[0] = &dcmi->entity.asd;
1608 
1609 	dcmi->notifier.subdevs = subdevs;
1610 	dcmi->notifier.num_subdevs = 1;
1611 	dcmi->notifier.ops = &dcmi_graph_notify_ops;
1612 
1613 	ret = v4l2_async_notifier_register(&dcmi->v4l2_dev, &dcmi->notifier);
1614 	if (ret < 0) {
1615 		dev_err(dcmi->dev, "Notifier registration failed\n");
1616 		of_node_put(dcmi->entity.node);
1617 		return ret;
1618 	}
1619 
1620 	return 0;
1621 }
1622 
dcmi_probe(struct platform_device * pdev)1623 static int dcmi_probe(struct platform_device *pdev)
1624 {
1625 	struct device_node *np = pdev->dev.of_node;
1626 	const struct of_device_id *match = NULL;
1627 	struct v4l2_fwnode_endpoint ep;
1628 	struct stm32_dcmi *dcmi;
1629 	struct vb2_queue *q;
1630 	struct dma_chan *chan;
1631 	struct clk *mclk;
1632 	int irq;
1633 	int ret = 0;
1634 
1635 	match = of_match_device(of_match_ptr(stm32_dcmi_of_match), &pdev->dev);
1636 	if (!match) {
1637 		dev_err(&pdev->dev, "Could not find a match in devicetree\n");
1638 		return -ENODEV;
1639 	}
1640 
1641 	dcmi = devm_kzalloc(&pdev->dev, sizeof(struct stm32_dcmi), GFP_KERNEL);
1642 	if (!dcmi)
1643 		return -ENOMEM;
1644 
1645 	dcmi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1646 	if (IS_ERR(dcmi->rstc)) {
1647 		dev_err(&pdev->dev, "Could not get reset control\n");
1648 		return -ENODEV;
1649 	}
1650 
1651 	/* Get bus characteristics from devicetree */
1652 	np = of_graph_get_next_endpoint(np, NULL);
1653 	if (!np) {
1654 		dev_err(&pdev->dev, "Could not find the endpoint\n");
1655 		of_node_put(np);
1656 		return -ENODEV;
1657 	}
1658 
1659 	ret = v4l2_fwnode_endpoint_parse(of_fwnode_handle(np), &ep);
1660 	of_node_put(np);
1661 	if (ret) {
1662 		dev_err(&pdev->dev, "Could not parse the endpoint\n");
1663 		return -ENODEV;
1664 	}
1665 
1666 	if (ep.bus_type == V4L2_MBUS_CSI2) {
1667 		dev_err(&pdev->dev, "CSI bus not supported\n");
1668 		return -ENODEV;
1669 	}
1670 	dcmi->bus.flags = ep.bus.parallel.flags;
1671 	dcmi->bus.bus_width = ep.bus.parallel.bus_width;
1672 	dcmi->bus.data_shift = ep.bus.parallel.data_shift;
1673 
1674 	irq = platform_get_irq(pdev, 0);
1675 	if (irq <= 0) {
1676 		dev_err(&pdev->dev, "Could not get irq\n");
1677 		return -ENODEV;
1678 	}
1679 
1680 	dcmi->res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1681 	if (!dcmi->res) {
1682 		dev_err(&pdev->dev, "Could not get resource\n");
1683 		return -ENODEV;
1684 	}
1685 
1686 	dcmi->regs = devm_ioremap_resource(&pdev->dev, dcmi->res);
1687 	if (IS_ERR(dcmi->regs)) {
1688 		dev_err(&pdev->dev, "Could not map registers\n");
1689 		return PTR_ERR(dcmi->regs);
1690 	}
1691 
1692 	ret = devm_request_threaded_irq(&pdev->dev, irq, dcmi_irq_callback,
1693 					dcmi_irq_thread, IRQF_ONESHOT,
1694 					dev_name(&pdev->dev), dcmi);
1695 	if (ret) {
1696 		dev_err(&pdev->dev, "Unable to request irq %d\n", irq);
1697 		return -ENODEV;
1698 	}
1699 
1700 	mclk = devm_clk_get(&pdev->dev, "mclk");
1701 	if (IS_ERR(mclk)) {
1702 		dev_err(&pdev->dev, "Unable to get mclk\n");
1703 		return PTR_ERR(mclk);
1704 	}
1705 
1706 	chan = dma_request_slave_channel(&pdev->dev, "tx");
1707 	if (!chan) {
1708 		dev_info(&pdev->dev, "Unable to request DMA channel, defer probing\n");
1709 		return -EPROBE_DEFER;
1710 	}
1711 
1712 	spin_lock_init(&dcmi->irqlock);
1713 	mutex_init(&dcmi->lock);
1714 	init_completion(&dcmi->complete);
1715 	INIT_LIST_HEAD(&dcmi->buffers);
1716 
1717 	dcmi->dev = &pdev->dev;
1718 	dcmi->mclk = mclk;
1719 	dcmi->state = STOPPED;
1720 	dcmi->dma_chan = chan;
1721 
1722 	q = &dcmi->queue;
1723 
1724 	/* Initialize the top-level structure */
1725 	ret = v4l2_device_register(&pdev->dev, &dcmi->v4l2_dev);
1726 	if (ret)
1727 		goto err_dma_release;
1728 
1729 	dcmi->vdev = video_device_alloc();
1730 	if (!dcmi->vdev) {
1731 		ret = -ENOMEM;
1732 		goto err_device_unregister;
1733 	}
1734 
1735 	/* Video node */
1736 	dcmi->vdev->fops = &dcmi_fops;
1737 	dcmi->vdev->v4l2_dev = &dcmi->v4l2_dev;
1738 	dcmi->vdev->queue = &dcmi->queue;
1739 	strlcpy(dcmi->vdev->name, KBUILD_MODNAME, sizeof(dcmi->vdev->name));
1740 	dcmi->vdev->release = video_device_release;
1741 	dcmi->vdev->ioctl_ops = &dcmi_ioctl_ops;
1742 	dcmi->vdev->lock = &dcmi->lock;
1743 	dcmi->vdev->device_caps = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING |
1744 				  V4L2_CAP_READWRITE;
1745 	video_set_drvdata(dcmi->vdev, dcmi);
1746 
1747 	/* Buffer queue */
1748 	q->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1749 	q->io_modes = VB2_MMAP | VB2_READ | VB2_DMABUF;
1750 	q->lock = &dcmi->lock;
1751 	q->drv_priv = dcmi;
1752 	q->buf_struct_size = sizeof(struct dcmi_buf);
1753 	q->ops = &dcmi_video_qops;
1754 	q->mem_ops = &vb2_dma_contig_memops;
1755 	q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1756 	q->min_buffers_needed = 2;
1757 	q->dev = &pdev->dev;
1758 
1759 	ret = vb2_queue_init(q);
1760 	if (ret < 0) {
1761 		dev_err(&pdev->dev, "Failed to initialize vb2 queue\n");
1762 		goto err_device_release;
1763 	}
1764 
1765 	ret = dcmi_graph_init(dcmi);
1766 	if (ret < 0)
1767 		goto err_device_release;
1768 
1769 	/* Reset device */
1770 	ret = reset_control_assert(dcmi->rstc);
1771 	if (ret) {
1772 		dev_err(&pdev->dev, "Failed to assert the reset line\n");
1773 		goto err_device_release;
1774 	}
1775 
1776 	usleep_range(3000, 5000);
1777 
1778 	ret = reset_control_deassert(dcmi->rstc);
1779 	if (ret) {
1780 		dev_err(&pdev->dev, "Failed to deassert the reset line\n");
1781 		goto err_device_release;
1782 	}
1783 
1784 	dev_info(&pdev->dev, "Probe done\n");
1785 
1786 	platform_set_drvdata(pdev, dcmi);
1787 
1788 	pm_runtime_enable(&pdev->dev);
1789 
1790 	return 0;
1791 
1792 err_device_release:
1793 	video_device_release(dcmi->vdev);
1794 err_device_unregister:
1795 	v4l2_device_unregister(&dcmi->v4l2_dev);
1796 err_dma_release:
1797 	dma_release_channel(dcmi->dma_chan);
1798 
1799 	return ret;
1800 }
1801 
dcmi_remove(struct platform_device * pdev)1802 static int dcmi_remove(struct platform_device *pdev)
1803 {
1804 	struct stm32_dcmi *dcmi = platform_get_drvdata(pdev);
1805 
1806 	pm_runtime_disable(&pdev->dev);
1807 
1808 	v4l2_async_notifier_unregister(&dcmi->notifier);
1809 	v4l2_device_unregister(&dcmi->v4l2_dev);
1810 
1811 	dma_release_channel(dcmi->dma_chan);
1812 
1813 	return 0;
1814 }
1815 
dcmi_runtime_suspend(struct device * dev)1816 static __maybe_unused int dcmi_runtime_suspend(struct device *dev)
1817 {
1818 	struct stm32_dcmi *dcmi = dev_get_drvdata(dev);
1819 
1820 	clk_disable_unprepare(dcmi->mclk);
1821 
1822 	return 0;
1823 }
1824 
dcmi_runtime_resume(struct device * dev)1825 static __maybe_unused int dcmi_runtime_resume(struct device *dev)
1826 {
1827 	struct stm32_dcmi *dcmi = dev_get_drvdata(dev);
1828 	int ret;
1829 
1830 	ret = clk_prepare_enable(dcmi->mclk);
1831 	if (ret)
1832 		dev_err(dev, "%s: Failed to prepare_enable clock\n", __func__);
1833 
1834 	return ret;
1835 }
1836 
dcmi_suspend(struct device * dev)1837 static __maybe_unused int dcmi_suspend(struct device *dev)
1838 {
1839 	/* disable clock */
1840 	pm_runtime_force_suspend(dev);
1841 
1842 	/* change pinctrl state */
1843 	pinctrl_pm_select_sleep_state(dev);
1844 
1845 	return 0;
1846 }
1847 
dcmi_resume(struct device * dev)1848 static __maybe_unused int dcmi_resume(struct device *dev)
1849 {
1850 	/* restore pinctl default state */
1851 	pinctrl_pm_select_default_state(dev);
1852 
1853 	/* clock enable */
1854 	pm_runtime_force_resume(dev);
1855 
1856 	return 0;
1857 }
1858 
1859 static const struct dev_pm_ops dcmi_pm_ops = {
1860 	SET_SYSTEM_SLEEP_PM_OPS(dcmi_suspend, dcmi_resume)
1861 	SET_RUNTIME_PM_OPS(dcmi_runtime_suspend,
1862 			   dcmi_runtime_resume, NULL)
1863 };
1864 
1865 static struct platform_driver stm32_dcmi_driver = {
1866 	.probe		= dcmi_probe,
1867 	.remove		= dcmi_remove,
1868 	.driver		= {
1869 		.name = DRV_NAME,
1870 		.of_match_table = of_match_ptr(stm32_dcmi_of_match),
1871 		.pm = &dcmi_pm_ops,
1872 	},
1873 };
1874 
1875 module_platform_driver(stm32_dcmi_driver);
1876 
1877 MODULE_AUTHOR("Yannick Fertre <yannick.fertre@st.com>");
1878 MODULE_AUTHOR("Hugues Fruchet <hugues.fruchet@st.com>");
1879 MODULE_DESCRIPTION("STMicroelectronics STM32 Digital Camera Memory Interface driver");
1880 MODULE_LICENSE("GPL");
1881 MODULE_SUPPORTED_DEVICE("video");
1882