1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * vsp1_histo.c -- R-Car VSP1 Histogram API
4 *
5 * Copyright (C) 2016 Renesas Electronics Corporation
6 * Copyright (C) 2016 Laurent Pinchart
7 *
8 * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
9 */
10
11 #include <linux/device.h>
12 #include <linux/gfp.h>
13
14 #include <media/v4l2-ioctl.h>
15 #include <media/v4l2-subdev.h>
16 #include <media/videobuf2-vmalloc.h>
17
18 #include "vsp1.h"
19 #include "vsp1_histo.h"
20 #include "vsp1_pipe.h"
21
22 #define HISTO_MIN_SIZE 4U
23 #define HISTO_MAX_SIZE 8192U
24
25 /* -----------------------------------------------------------------------------
26 * Buffer Operations
27 */
28
29 static inline struct vsp1_histogram_buffer *
to_vsp1_histogram_buffer(struct vb2_v4l2_buffer * vbuf)30 to_vsp1_histogram_buffer(struct vb2_v4l2_buffer *vbuf)
31 {
32 return container_of(vbuf, struct vsp1_histogram_buffer, buf);
33 }
34
35 struct vsp1_histogram_buffer *
vsp1_histogram_buffer_get(struct vsp1_histogram * histo)36 vsp1_histogram_buffer_get(struct vsp1_histogram *histo)
37 {
38 struct vsp1_histogram_buffer *buf = NULL;
39 unsigned long flags;
40
41 spin_lock_irqsave(&histo->irqlock, flags);
42
43 if (list_empty(&histo->irqqueue))
44 goto done;
45
46 buf = list_first_entry(&histo->irqqueue, struct vsp1_histogram_buffer,
47 queue);
48 list_del(&buf->queue);
49 histo->readout = true;
50
51 done:
52 spin_unlock_irqrestore(&histo->irqlock, flags);
53 return buf;
54 }
55
vsp1_histogram_buffer_complete(struct vsp1_histogram * histo,struct vsp1_histogram_buffer * buf,size_t size)56 void vsp1_histogram_buffer_complete(struct vsp1_histogram *histo,
57 struct vsp1_histogram_buffer *buf,
58 size_t size)
59 {
60 struct vsp1_pipeline *pipe = histo->entity.pipe;
61 unsigned long flags;
62
63 /*
64 * The pipeline pointer is guaranteed to be valid as this function is
65 * called from the frame completion interrupt handler, which can only
66 * occur when video streaming is active.
67 */
68 buf->buf.sequence = pipe->sequence;
69 buf->buf.vb2_buf.timestamp = ktime_get_ns();
70 vb2_set_plane_payload(&buf->buf.vb2_buf, 0, size);
71 vb2_buffer_done(&buf->buf.vb2_buf, VB2_BUF_STATE_DONE);
72
73 spin_lock_irqsave(&histo->irqlock, flags);
74 histo->readout = false;
75 wake_up(&histo->wait_queue);
76 spin_unlock_irqrestore(&histo->irqlock, flags);
77 }
78
79 /* -----------------------------------------------------------------------------
80 * videobuf2 Queue Operations
81 */
82
histo_queue_setup(struct vb2_queue * vq,unsigned int * nbuffers,unsigned int * nplanes,unsigned int sizes[],struct device * alloc_devs[])83 static int histo_queue_setup(struct vb2_queue *vq, unsigned int *nbuffers,
84 unsigned int *nplanes, unsigned int sizes[],
85 struct device *alloc_devs[])
86 {
87 struct vsp1_histogram *histo = vb2_get_drv_priv(vq);
88
89 if (*nplanes) {
90 if (*nplanes != 1)
91 return -EINVAL;
92
93 if (sizes[0] < histo->data_size)
94 return -EINVAL;
95
96 return 0;
97 }
98
99 *nplanes = 1;
100 sizes[0] = histo->data_size;
101
102 return 0;
103 }
104
histo_buffer_prepare(struct vb2_buffer * vb)105 static int histo_buffer_prepare(struct vb2_buffer *vb)
106 {
107 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
108 struct vsp1_histogram *histo = vb2_get_drv_priv(vb->vb2_queue);
109 struct vsp1_histogram_buffer *buf = to_vsp1_histogram_buffer(vbuf);
110
111 if (vb->num_planes != 1)
112 return -EINVAL;
113
114 if (vb2_plane_size(vb, 0) < histo->data_size)
115 return -EINVAL;
116
117 buf->addr = vb2_plane_vaddr(vb, 0);
118
119 return 0;
120 }
121
histo_buffer_queue(struct vb2_buffer * vb)122 static void histo_buffer_queue(struct vb2_buffer *vb)
123 {
124 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
125 struct vsp1_histogram *histo = vb2_get_drv_priv(vb->vb2_queue);
126 struct vsp1_histogram_buffer *buf = to_vsp1_histogram_buffer(vbuf);
127 unsigned long flags;
128
129 spin_lock_irqsave(&histo->irqlock, flags);
130 list_add_tail(&buf->queue, &histo->irqqueue);
131 spin_unlock_irqrestore(&histo->irqlock, flags);
132 }
133
histo_start_streaming(struct vb2_queue * vq,unsigned int count)134 static int histo_start_streaming(struct vb2_queue *vq, unsigned int count)
135 {
136 return 0;
137 }
138
histo_stop_streaming(struct vb2_queue * vq)139 static void histo_stop_streaming(struct vb2_queue *vq)
140 {
141 struct vsp1_histogram *histo = vb2_get_drv_priv(vq);
142 struct vsp1_histogram_buffer *buffer;
143 unsigned long flags;
144
145 spin_lock_irqsave(&histo->irqlock, flags);
146
147 /* Remove all buffers from the IRQ queue. */
148 list_for_each_entry(buffer, &histo->irqqueue, queue)
149 vb2_buffer_done(&buffer->buf.vb2_buf, VB2_BUF_STATE_ERROR);
150 INIT_LIST_HEAD(&histo->irqqueue);
151
152 /* Wait for the buffer being read out (if any) to complete. */
153 wait_event_lock_irq(histo->wait_queue, !histo->readout, histo->irqlock);
154
155 spin_unlock_irqrestore(&histo->irqlock, flags);
156 }
157
158 static const struct vb2_ops histo_video_queue_qops = {
159 .queue_setup = histo_queue_setup,
160 .buf_prepare = histo_buffer_prepare,
161 .buf_queue = histo_buffer_queue,
162 .wait_prepare = vb2_ops_wait_prepare,
163 .wait_finish = vb2_ops_wait_finish,
164 .start_streaming = histo_start_streaming,
165 .stop_streaming = histo_stop_streaming,
166 };
167
168 /* -----------------------------------------------------------------------------
169 * V4L2 Subdevice Operations
170 */
171
histo_enum_mbus_code(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_mbus_code_enum * code)172 static int histo_enum_mbus_code(struct v4l2_subdev *subdev,
173 struct v4l2_subdev_state *sd_state,
174 struct v4l2_subdev_mbus_code_enum *code)
175 {
176 struct vsp1_histogram *histo = subdev_to_histo(subdev);
177
178 if (code->pad == HISTO_PAD_SOURCE) {
179 code->code = MEDIA_BUS_FMT_FIXED;
180 return 0;
181 }
182
183 return vsp1_subdev_enum_mbus_code(subdev, sd_state, code,
184 histo->formats,
185 histo->num_formats);
186 }
187
histo_enum_frame_size(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_frame_size_enum * fse)188 static int histo_enum_frame_size(struct v4l2_subdev *subdev,
189 struct v4l2_subdev_state *sd_state,
190 struct v4l2_subdev_frame_size_enum *fse)
191 {
192 if (fse->pad != HISTO_PAD_SINK)
193 return -EINVAL;
194
195 return vsp1_subdev_enum_frame_size(subdev, sd_state, fse,
196 HISTO_MIN_SIZE,
197 HISTO_MIN_SIZE, HISTO_MAX_SIZE,
198 HISTO_MAX_SIZE);
199 }
200
histo_get_selection(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)201 static int histo_get_selection(struct v4l2_subdev *subdev,
202 struct v4l2_subdev_state *sd_state,
203 struct v4l2_subdev_selection *sel)
204 {
205 struct vsp1_histogram *histo = subdev_to_histo(subdev);
206 struct v4l2_subdev_state *state;
207 struct v4l2_mbus_framefmt *format;
208 struct v4l2_rect *crop;
209 int ret = 0;
210
211 if (sel->pad != HISTO_PAD_SINK)
212 return -EINVAL;
213
214 mutex_lock(&histo->entity.lock);
215
216 state = vsp1_entity_get_state(&histo->entity, sd_state, sel->which);
217 if (!state) {
218 ret = -EINVAL;
219 goto done;
220 }
221
222 switch (sel->target) {
223 case V4L2_SEL_TGT_COMPOSE_BOUNDS:
224 case V4L2_SEL_TGT_COMPOSE_DEFAULT:
225 crop = vsp1_entity_get_pad_selection(&histo->entity, state,
226 HISTO_PAD_SINK,
227 V4L2_SEL_TGT_CROP);
228 sel->r.left = 0;
229 sel->r.top = 0;
230 sel->r.width = crop->width;
231 sel->r.height = crop->height;
232 break;
233
234 case V4L2_SEL_TGT_CROP_BOUNDS:
235 case V4L2_SEL_TGT_CROP_DEFAULT:
236 format = vsp1_entity_get_pad_format(&histo->entity, state,
237 HISTO_PAD_SINK);
238 sel->r.left = 0;
239 sel->r.top = 0;
240 sel->r.width = format->width;
241 sel->r.height = format->height;
242 break;
243
244 case V4L2_SEL_TGT_COMPOSE:
245 case V4L2_SEL_TGT_CROP:
246 sel->r = *vsp1_entity_get_pad_selection(&histo->entity, state,
247 sel->pad, sel->target);
248 break;
249
250 default:
251 ret = -EINVAL;
252 break;
253 }
254
255 done:
256 mutex_unlock(&histo->entity.lock);
257 return ret;
258 }
259
histo_set_crop(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)260 static int histo_set_crop(struct v4l2_subdev *subdev,
261 struct v4l2_subdev_state *sd_state,
262 struct v4l2_subdev_selection *sel)
263 {
264 struct vsp1_histogram *histo = subdev_to_histo(subdev);
265 struct v4l2_mbus_framefmt *format;
266 struct v4l2_rect *selection;
267
268 /* The crop rectangle must be inside the input frame. */
269 format = vsp1_entity_get_pad_format(&histo->entity, sd_state,
270 HISTO_PAD_SINK);
271 sel->r.left = clamp_t(unsigned int, sel->r.left, 0, format->width - 1);
272 sel->r.top = clamp_t(unsigned int, sel->r.top, 0, format->height - 1);
273 sel->r.width = clamp_t(unsigned int, sel->r.width, HISTO_MIN_SIZE,
274 format->width - sel->r.left);
275 sel->r.height = clamp_t(unsigned int, sel->r.height, HISTO_MIN_SIZE,
276 format->height - sel->r.top);
277
278 /* Set the crop rectangle and reset the compose rectangle. */
279 selection = vsp1_entity_get_pad_selection(&histo->entity, sd_state,
280 sel->pad, V4L2_SEL_TGT_CROP);
281 *selection = sel->r;
282
283 selection = vsp1_entity_get_pad_selection(&histo->entity, sd_state,
284 sel->pad,
285 V4L2_SEL_TGT_COMPOSE);
286 *selection = sel->r;
287
288 return 0;
289 }
290
histo_set_compose(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)291 static int histo_set_compose(struct v4l2_subdev *subdev,
292 struct v4l2_subdev_state *sd_state,
293 struct v4l2_subdev_selection *sel)
294 {
295 struct vsp1_histogram *histo = subdev_to_histo(subdev);
296 struct v4l2_rect *compose;
297 struct v4l2_rect *crop;
298 unsigned int ratio;
299
300 /*
301 * The compose rectangle is used to configure downscaling, the top left
302 * corner is fixed to (0,0) and the size to 1/2 or 1/4 of the crop
303 * rectangle.
304 */
305 sel->r.left = 0;
306 sel->r.top = 0;
307
308 crop = vsp1_entity_get_pad_selection(&histo->entity, sd_state,
309 sel->pad,
310 V4L2_SEL_TGT_CROP);
311
312 /*
313 * Clamp the width and height to acceptable values first and then
314 * compute the closest rounded dividing ratio.
315 *
316 * Ratio Rounded ratio
317 * --------------------------
318 * [1.0 1.5[ 1
319 * [1.5 3.0[ 2
320 * [3.0 4.0] 4
321 *
322 * The rounded ratio can be computed using
323 *
324 * 1 << (ceil(ratio * 2) / 3)
325 */
326 sel->r.width = clamp(sel->r.width, crop->width / 4, crop->width);
327 ratio = 1 << (crop->width * 2 / sel->r.width / 3);
328 sel->r.width = crop->width / ratio;
329
330
331 sel->r.height = clamp(sel->r.height, crop->height / 4, crop->height);
332 ratio = 1 << (crop->height * 2 / sel->r.height / 3);
333 sel->r.height = crop->height / ratio;
334
335 compose = vsp1_entity_get_pad_selection(&histo->entity, sd_state,
336 sel->pad,
337 V4L2_SEL_TGT_COMPOSE);
338 *compose = sel->r;
339
340 return 0;
341 }
342
histo_set_selection(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_selection * sel)343 static int histo_set_selection(struct v4l2_subdev *subdev,
344 struct v4l2_subdev_state *sd_state,
345 struct v4l2_subdev_selection *sel)
346 {
347 struct vsp1_histogram *histo = subdev_to_histo(subdev);
348 struct v4l2_subdev_state *state;
349 int ret;
350
351 if (sel->pad != HISTO_PAD_SINK)
352 return -EINVAL;
353
354 mutex_lock(&histo->entity.lock);
355
356 state = vsp1_entity_get_state(&histo->entity, sd_state, sel->which);
357 if (!state) {
358 ret = -EINVAL;
359 goto done;
360 }
361
362 if (sel->target == V4L2_SEL_TGT_CROP)
363 ret = histo_set_crop(subdev, state, sel);
364 else if (sel->target == V4L2_SEL_TGT_COMPOSE)
365 ret = histo_set_compose(subdev, state, sel);
366 else
367 ret = -EINVAL;
368
369 done:
370 mutex_unlock(&histo->entity.lock);
371 return ret;
372 }
373
histo_get_format(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_format * fmt)374 static int histo_get_format(struct v4l2_subdev *subdev,
375 struct v4l2_subdev_state *sd_state,
376 struct v4l2_subdev_format *fmt)
377 {
378 if (fmt->pad == HISTO_PAD_SOURCE) {
379 fmt->format.code = MEDIA_BUS_FMT_FIXED;
380 fmt->format.width = 0;
381 fmt->format.height = 0;
382 fmt->format.field = V4L2_FIELD_NONE;
383 fmt->format.colorspace = V4L2_COLORSPACE_RAW;
384 return 0;
385 }
386
387 return vsp1_subdev_get_pad_format(subdev, sd_state, fmt);
388 }
389
histo_set_format(struct v4l2_subdev * subdev,struct v4l2_subdev_state * sd_state,struct v4l2_subdev_format * fmt)390 static int histo_set_format(struct v4l2_subdev *subdev,
391 struct v4l2_subdev_state *sd_state,
392 struct v4l2_subdev_format *fmt)
393 {
394 struct vsp1_histogram *histo = subdev_to_histo(subdev);
395
396 if (fmt->pad != HISTO_PAD_SINK)
397 return histo_get_format(subdev, sd_state, fmt);
398
399 return vsp1_subdev_set_pad_format(subdev, sd_state, fmt,
400 histo->formats, histo->num_formats,
401 HISTO_MIN_SIZE, HISTO_MIN_SIZE,
402 HISTO_MAX_SIZE, HISTO_MAX_SIZE);
403 }
404
405 static const struct v4l2_subdev_pad_ops histo_pad_ops = {
406 .enum_mbus_code = histo_enum_mbus_code,
407 .enum_frame_size = histo_enum_frame_size,
408 .get_fmt = histo_get_format,
409 .set_fmt = histo_set_format,
410 .get_selection = histo_get_selection,
411 .set_selection = histo_set_selection,
412 };
413
414 static const struct v4l2_subdev_ops histo_ops = {
415 .pad = &histo_pad_ops,
416 };
417
418 /* -----------------------------------------------------------------------------
419 * V4L2 ioctls
420 */
421
histo_v4l2_querycap(struct file * file,void * fh,struct v4l2_capability * cap)422 static int histo_v4l2_querycap(struct file *file, void *fh,
423 struct v4l2_capability *cap)
424 {
425 struct v4l2_fh *vfh = file->private_data;
426 struct vsp1_histogram *histo = vdev_to_histo(vfh->vdev);
427
428 cap->capabilities = V4L2_CAP_DEVICE_CAPS | V4L2_CAP_STREAMING
429 | V4L2_CAP_VIDEO_CAPTURE_MPLANE
430 | V4L2_CAP_VIDEO_OUTPUT_MPLANE
431 | V4L2_CAP_META_CAPTURE;
432
433 strscpy(cap->driver, "vsp1", sizeof(cap->driver));
434 strscpy(cap->card, histo->video.name, sizeof(cap->card));
435
436 return 0;
437 }
438
histo_v4l2_enum_format(struct file * file,void * fh,struct v4l2_fmtdesc * f)439 static int histo_v4l2_enum_format(struct file *file, void *fh,
440 struct v4l2_fmtdesc *f)
441 {
442 struct v4l2_fh *vfh = file->private_data;
443 struct vsp1_histogram *histo = vdev_to_histo(vfh->vdev);
444
445 if (f->index > 0 || f->type != histo->queue.type)
446 return -EINVAL;
447
448 f->pixelformat = histo->meta_format;
449
450 return 0;
451 }
452
histo_v4l2_get_format(struct file * file,void * fh,struct v4l2_format * format)453 static int histo_v4l2_get_format(struct file *file, void *fh,
454 struct v4l2_format *format)
455 {
456 struct v4l2_fh *vfh = file->private_data;
457 struct vsp1_histogram *histo = vdev_to_histo(vfh->vdev);
458 struct v4l2_meta_format *meta = &format->fmt.meta;
459
460 if (format->type != histo->queue.type)
461 return -EINVAL;
462
463 memset(meta, 0, sizeof(*meta));
464
465 meta->dataformat = histo->meta_format;
466 meta->buffersize = histo->data_size;
467
468 return 0;
469 }
470
471 static const struct v4l2_ioctl_ops histo_v4l2_ioctl_ops = {
472 .vidioc_querycap = histo_v4l2_querycap,
473 .vidioc_enum_fmt_meta_cap = histo_v4l2_enum_format,
474 .vidioc_g_fmt_meta_cap = histo_v4l2_get_format,
475 .vidioc_s_fmt_meta_cap = histo_v4l2_get_format,
476 .vidioc_try_fmt_meta_cap = histo_v4l2_get_format,
477 .vidioc_reqbufs = vb2_ioctl_reqbufs,
478 .vidioc_querybuf = vb2_ioctl_querybuf,
479 .vidioc_qbuf = vb2_ioctl_qbuf,
480 .vidioc_dqbuf = vb2_ioctl_dqbuf,
481 .vidioc_create_bufs = vb2_ioctl_create_bufs,
482 .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
483 .vidioc_streamon = vb2_ioctl_streamon,
484 .vidioc_streamoff = vb2_ioctl_streamoff,
485 };
486
487 /* -----------------------------------------------------------------------------
488 * V4L2 File Operations
489 */
490
491 static const struct v4l2_file_operations histo_v4l2_fops = {
492 .owner = THIS_MODULE,
493 .unlocked_ioctl = video_ioctl2,
494 .open = v4l2_fh_open,
495 .release = vb2_fop_release,
496 .poll = vb2_fop_poll,
497 .mmap = vb2_fop_mmap,
498 };
499
vsp1_histogram_cleanup(struct vsp1_histogram * histo)500 static void vsp1_histogram_cleanup(struct vsp1_histogram *histo)
501 {
502 if (video_is_registered(&histo->video))
503 video_unregister_device(&histo->video);
504
505 media_entity_cleanup(&histo->video.entity);
506 }
507
vsp1_histogram_destroy(struct vsp1_entity * entity)508 void vsp1_histogram_destroy(struct vsp1_entity *entity)
509 {
510 struct vsp1_histogram *histo = subdev_to_histo(&entity->subdev);
511
512 vsp1_histogram_cleanup(histo);
513 }
514
vsp1_histogram_init(struct vsp1_device * vsp1,struct vsp1_histogram * histo,enum vsp1_entity_type type,const char * name,const struct vsp1_entity_operations * ops,const unsigned int * formats,unsigned int num_formats,size_t data_size,u32 meta_format)515 int vsp1_histogram_init(struct vsp1_device *vsp1, struct vsp1_histogram *histo,
516 enum vsp1_entity_type type, const char *name,
517 const struct vsp1_entity_operations *ops,
518 const unsigned int *formats, unsigned int num_formats,
519 size_t data_size, u32 meta_format)
520 {
521 int ret;
522
523 histo->formats = formats;
524 histo->num_formats = num_formats;
525 histo->data_size = data_size;
526 histo->meta_format = meta_format;
527
528 histo->pad.flags = MEDIA_PAD_FL_SINK;
529 histo->video.vfl_dir = VFL_DIR_RX;
530
531 mutex_init(&histo->lock);
532 spin_lock_init(&histo->irqlock);
533 INIT_LIST_HEAD(&histo->irqqueue);
534 init_waitqueue_head(&histo->wait_queue);
535
536 /* Initialize the VSP entity... */
537 histo->entity.ops = ops;
538 histo->entity.type = type;
539
540 ret = vsp1_entity_init(vsp1, &histo->entity, name, 2, &histo_ops,
541 MEDIA_ENT_F_PROC_VIDEO_STATISTICS);
542 if (ret < 0)
543 return ret;
544
545 /* ... and the media entity... */
546 ret = media_entity_pads_init(&histo->video.entity, 1, &histo->pad);
547 if (ret < 0)
548 return ret;
549
550 /* ... and the video node... */
551 histo->video.v4l2_dev = &vsp1->v4l2_dev;
552 histo->video.fops = &histo_v4l2_fops;
553 snprintf(histo->video.name, sizeof(histo->video.name),
554 "%s histo", histo->entity.subdev.name);
555 histo->video.vfl_type = VFL_TYPE_VIDEO;
556 histo->video.release = video_device_release_empty;
557 histo->video.ioctl_ops = &histo_v4l2_ioctl_ops;
558 histo->video.device_caps = V4L2_CAP_META_CAPTURE | V4L2_CAP_STREAMING;
559
560 video_set_drvdata(&histo->video, histo);
561
562 /* ... and the buffers queue... */
563 histo->queue.type = V4L2_BUF_TYPE_META_CAPTURE;
564 histo->queue.io_modes = VB2_MMAP | VB2_USERPTR | VB2_DMABUF;
565 histo->queue.lock = &histo->lock;
566 histo->queue.drv_priv = histo;
567 histo->queue.buf_struct_size = sizeof(struct vsp1_histogram_buffer);
568 histo->queue.ops = &histo_video_queue_qops;
569 histo->queue.mem_ops = &vb2_vmalloc_memops;
570 histo->queue.timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
571 histo->queue.dev = vsp1->dev;
572 ret = vb2_queue_init(&histo->queue);
573 if (ret < 0) {
574 dev_err(vsp1->dev, "failed to initialize vb2 queue\n");
575 goto error;
576 }
577
578 /* ... and register the video device. */
579 histo->video.queue = &histo->queue;
580 ret = video_register_device(&histo->video, VFL_TYPE_VIDEO, -1);
581 if (ret < 0) {
582 dev_err(vsp1->dev, "failed to register video device\n");
583 goto error;
584 }
585
586 return 0;
587
588 error:
589 vsp1_histogram_cleanup(histo);
590 return ret;
591 }
592