Lines Matching full:sd
50 static int subdev_fh_init(struct v4l2_subdev_fh *fh, struct v4l2_subdev *sd) in subdev_fh_init() argument
55 state = __v4l2_subdev_state_alloc(sd, "fh->state->lock", &key); in subdev_fh_init()
73 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_open() local
81 ret = subdev_fh_init(subdev_fh, sd); in subdev_open()
91 if (sd->v4l2_dev->mdev && sd->entity.graph_obj.mdev->dev) { in subdev_open()
94 owner = sd->entity.graph_obj.mdev->dev->driver->owner; in subdev_open()
102 if (sd->internal_ops && sd->internal_ops->open) { in subdev_open()
103 ret = sd->internal_ops->open(sd, subdev_fh); in subdev_open()
123 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_close() local
127 if (sd->internal_ops && sd->internal_ops->close) in subdev_close()
128 sd->internal_ops->close(sd, subdev_fh); in subdev_close()
159 static inline int check_pad(struct v4l2_subdev *sd, u32 pad) in check_pad() argument
162 if (sd->entity.num_pads) { in check_pad()
163 if (pad >= sd->entity.num_pads) in check_pad()
174 static int check_state(struct v4l2_subdev *sd, struct v4l2_subdev_state *state, in check_state() argument
177 if (sd->flags & V4L2_SUBDEV_FL_STREAMS) { in check_state()
196 static inline int check_format(struct v4l2_subdev *sd, in check_format() argument
203 return check_which(format->which) ? : check_pad(sd, format->pad) ? : in check_format()
204 check_state(sd, state, format->which, format->pad, format->stream); in check_format()
207 static int call_get_fmt(struct v4l2_subdev *sd, in call_get_fmt() argument
211 return check_format(sd, state, format) ? : in call_get_fmt()
212 sd->ops->pad->get_fmt(sd, state, format); in call_get_fmt()
215 static int call_set_fmt(struct v4l2_subdev *sd, in call_set_fmt() argument
219 return check_format(sd, state, format) ? : in call_set_fmt()
220 sd->ops->pad->set_fmt(sd, state, format); in call_set_fmt()
223 static int call_enum_mbus_code(struct v4l2_subdev *sd, in call_enum_mbus_code() argument
230 return check_which(code->which) ? : check_pad(sd, code->pad) ? : in call_enum_mbus_code()
231 check_state(sd, state, code->which, code->pad, code->stream) ? : in call_enum_mbus_code()
232 sd->ops->pad->enum_mbus_code(sd, state, code); in call_enum_mbus_code()
235 static int call_enum_frame_size(struct v4l2_subdev *sd, in call_enum_frame_size() argument
242 return check_which(fse->which) ? : check_pad(sd, fse->pad) ? : in call_enum_frame_size()
243 check_state(sd, state, fse->which, fse->pad, fse->stream) ? : in call_enum_frame_size()
244 sd->ops->pad->enum_frame_size(sd, state, fse); in call_enum_frame_size()
247 static inline int check_frame_interval(struct v4l2_subdev *sd, in check_frame_interval() argument
253 return check_pad(sd, fi->pad); in check_frame_interval()
256 static int call_g_frame_interval(struct v4l2_subdev *sd, in call_g_frame_interval() argument
259 return check_frame_interval(sd, fi) ? : in call_g_frame_interval()
260 sd->ops->video->g_frame_interval(sd, fi); in call_g_frame_interval()
263 static int call_s_frame_interval(struct v4l2_subdev *sd, in call_s_frame_interval() argument
266 return check_frame_interval(sd, fi) ? : in call_s_frame_interval()
267 sd->ops->video->s_frame_interval(sd, fi); in call_s_frame_interval()
270 static int call_enum_frame_interval(struct v4l2_subdev *sd, in call_enum_frame_interval() argument
277 return check_which(fie->which) ? : check_pad(sd, fie->pad) ? : in call_enum_frame_interval()
278 check_state(sd, state, fie->which, fie->pad, fie->stream) ? : in call_enum_frame_interval()
279 sd->ops->pad->enum_frame_interval(sd, state, fie); in call_enum_frame_interval()
282 static inline int check_selection(struct v4l2_subdev *sd, in check_selection() argument
289 return check_which(sel->which) ? : check_pad(sd, sel->pad) ? : in check_selection()
290 check_state(sd, state, sel->which, sel->pad, sel->stream); in check_selection()
293 static int call_get_selection(struct v4l2_subdev *sd, in call_get_selection() argument
297 return check_selection(sd, state, sel) ? : in call_get_selection()
298 sd->ops->pad->get_selection(sd, state, sel); in call_get_selection()
301 static int call_set_selection(struct v4l2_subdev *sd, in call_set_selection() argument
305 return check_selection(sd, state, sel) ? : in call_set_selection()
306 sd->ops->pad->set_selection(sd, state, sel); in call_set_selection()
309 static inline int check_edid(struct v4l2_subdev *sd, in check_edid() argument
318 return check_pad(sd, edid->pad); in check_edid()
321 static int call_get_edid(struct v4l2_subdev *sd, struct v4l2_subdev_edid *edid) in call_get_edid() argument
323 return check_edid(sd, edid) ? : sd->ops->pad->get_edid(sd, edid); in call_get_edid()
326 static int call_set_edid(struct v4l2_subdev *sd, struct v4l2_subdev_edid *edid) in call_set_edid() argument
328 return check_edid(sd, edid) ? : sd->ops->pad->set_edid(sd, edid); in call_set_edid()
331 static int call_dv_timings_cap(struct v4l2_subdev *sd, in call_dv_timings_cap() argument
337 return check_pad(sd, cap->pad) ? : in call_dv_timings_cap()
338 sd->ops->pad->dv_timings_cap(sd, cap); in call_dv_timings_cap()
341 static int call_enum_dv_timings(struct v4l2_subdev *sd, in call_enum_dv_timings() argument
347 return check_pad(sd, dvt->pad) ? : in call_enum_dv_timings()
348 sd->ops->pad->enum_dv_timings(sd, dvt); in call_enum_dv_timings()
351 static int call_get_mbus_config(struct v4l2_subdev *sd, unsigned int pad, in call_get_mbus_config() argument
354 return check_pad(sd, pad) ? : in call_get_mbus_config()
355 sd->ops->pad->get_mbus_config(sd, pad, config); in call_get_mbus_config()
358 static int call_s_stream(struct v4l2_subdev *sd, int enable) in call_s_stream() argument
363 if (!IS_ERR_OR_NULL(sd->privacy_led)) { in call_s_stream()
365 led_set_brightness(sd->privacy_led, in call_s_stream()
366 sd->privacy_led->max_brightness); in call_s_stream()
368 led_set_brightness(sd->privacy_led, 0); in call_s_stream()
371 ret = sd->ops->video->s_stream(sd, enable); in call_s_stream()
374 dev_warn(sd->dev, "disabling streaming failed (%d)\n", ret); in call_s_stream()
388 static int call_##f##_state(struct v4l2_subdev *sd, \
395 state = v4l2_subdev_lock_and_get_active_state(sd); \
396 ret = call_##f(sd, state, arg); \
405 static int call_##f##_state(struct v4l2_subdev *sd, \
409 return call_##f(sd, state, arg); \
452 subdev_ioctl_get_state(struct v4l2_subdev *sd, struct v4l2_subdev_fh *subdev_fh, in subdev_ioctl_get_state() argument
489 v4l2_subdev_get_unlocked_active_state(sd); in subdev_ioctl_get_state()
496 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_do_ioctl() local
500 bool streams_subdev = sd->flags & V4L2_SUBDEV_FL_STREAMS; in subdev_do_ioctl()
562 vdev, sd->v4l2_dev->mdev, arg); in subdev_do_ioctl()
568 vdev, sd->v4l2_dev->mdev, arg); in subdev_do_ioctl()
574 vdev, sd->v4l2_dev->mdev, arg); in subdev_do_ioctl()
577 if (!(sd->flags & V4L2_SUBDEV_FL_HAS_EVENTS)) in subdev_do_ioctl()
583 return v4l2_subdev_call(sd, core, subscribe_event, vfh, arg); in subdev_do_ioctl()
586 return v4l2_subdev_call(sd, core, unsubscribe_event, vfh, arg); in subdev_do_ioctl()
595 return v4l2_subdev_call(sd, core, g_register, p); in subdev_do_ioctl()
603 return v4l2_subdev_call(sd, core, s_register, p); in subdev_do_ioctl()
611 if (sd->ops->core && sd->ops->core->s_register) in subdev_do_ioctl()
613 if (sd->ops->core && sd->ops->core->g_register) in subdev_do_ioctl()
615 strscpy(p->name, sd->name, sizeof(p->name)); in subdev_do_ioctl()
624 sd->name); in subdev_do_ioctl()
625 ret = v4l2_subdev_call(sd, core, log_status); in subdev_do_ioctl()
627 sd->name); in subdev_do_ioctl()
639 return v4l2_subdev_call(sd, pad, get_fmt, state, format); in subdev_do_ioctl()
653 return v4l2_subdev_call(sd, pad, set_fmt, state, format); in subdev_do_ioctl()
670 sd, pad, get_selection, state, &sel); in subdev_do_ioctl()
695 sd, pad, set_selection, state, &sel); in subdev_do_ioctl()
709 return v4l2_subdev_call(sd, pad, enum_mbus_code, state, in subdev_do_ioctl()
720 return v4l2_subdev_call(sd, pad, enum_frame_size, state, in subdev_do_ioctl()
731 return v4l2_subdev_call(sd, video, g_frame_interval, arg); in subdev_do_ioctl()
744 return v4l2_subdev_call(sd, video, s_frame_interval, arg); in subdev_do_ioctl()
754 return v4l2_subdev_call(sd, pad, enum_frame_interval, state, in subdev_do_ioctl()
766 sd, pad, get_selection, state, sel); in subdev_do_ioctl()
780 sd, pad, set_selection, state, sel); in subdev_do_ioctl()
786 return v4l2_subdev_call(sd, pad, get_edid, edid); in subdev_do_ioctl()
792 return v4l2_subdev_call(sd, pad, set_edid, edid); in subdev_do_ioctl()
798 return v4l2_subdev_call(sd, pad, dv_timings_cap, cap); in subdev_do_ioctl()
804 return v4l2_subdev_call(sd, pad, enum_dv_timings, dvt); in subdev_do_ioctl()
808 return v4l2_subdev_call(sd, video, query_dv_timings, arg); in subdev_do_ioctl()
811 return v4l2_subdev_call(sd, video, g_dv_timings, arg); in subdev_do_ioctl()
817 return v4l2_subdev_call(sd, video, s_dv_timings, arg); in subdev_do_ioctl()
820 return v4l2_subdev_call(sd, video, g_std, arg); in subdev_do_ioctl()
828 return v4l2_subdev_call(sd, video, s_std, *std); in subdev_do_ioctl()
835 if (v4l2_subdev_call(sd, video, g_tvnorms, &id)) in subdev_do_ioctl()
842 return v4l2_subdev_call(sd, video, querystd, arg); in subdev_do_ioctl()
851 if (!(sd->flags & V4L2_SUBDEV_FL_STREAMS)) in subdev_do_ioctl()
881 if (!(sd->flags & V4L2_SUBDEV_FL_STREAMS)) in subdev_do_ioctl()
891 const struct media_pad *pads = sd->entity.pads; in subdev_do_ioctl()
897 if (route->sink_pad >= sd->entity.num_pads) in subdev_do_ioctl()
904 if (route->source_pad >= sd->entity.num_pads) in subdev_do_ioctl()
915 return v4l2_subdev_call(sd, pad, set_routing, state, in subdev_do_ioctl()
947 return v4l2_subdev_call(sd, core, ioctl, cmd, arg); in subdev_do_ioctl()
963 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_do_ioctl_lock() local
968 state = subdev_ioctl_get_state(sd, subdev_fh, cmd, arg); in subdev_do_ioctl_lock()
995 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_compat_ioctl32() local
997 return v4l2_subdev_call(sd, core, compat_ioctl32, cmd, arg); in subdev_compat_ioctl32()
1020 struct v4l2_subdev *sd = vdev_to_v4l2_subdev(vdev); in subdev_poll() local
1023 if (!(sd->flags & V4L2_SUBDEV_FL_HAS_EVENTS)) in subdev_poll()
1051 struct v4l2_subdev *sd; in v4l2_subdev_get_fwnode_pad_1_to_1() local
1056 sd = media_entity_to_v4l2_subdev(entity); in v4l2_subdev_get_fwnode_pad_1_to_1()
1061 if (device_match_fwnode(sd->dev, fwnode)) in v4l2_subdev_get_fwnode_pad_1_to_1()
1068 int v4l2_subdev_link_validate_default(struct v4l2_subdev *sd, in v4l2_subdev_link_validate_default() argument
1077 dev_dbg(sd->entity.graph_obj.mdev->dev, in v4l2_subdev_link_validate_default()
1085 dev_dbg(sd->entity.graph_obj.mdev->dev, in v4l2_subdev_link_validate_default()
1093 dev_dbg(sd->entity.graph_obj.mdev->dev, in v4l2_subdev_link_validate_default()
1106 dev_dbg(sd->entity.graph_obj.mdev->dev, in v4l2_subdev_link_validate_default()
1116 dev_dbg(sd->entity.graph_obj.mdev->dev, in v4l2_subdev_link_validate_default()
1131 struct v4l2_subdev *sd; in v4l2_subdev_link_validate_get_format() local
1142 sd = media_entity_to_v4l2_subdev(pad->entity); in v4l2_subdev_link_validate_get_format()
1149 state = v4l2_subdev_get_locked_active_state(sd); in v4l2_subdev_link_validate_get_format()
1151 state = v4l2_subdev_lock_and_get_active_state(sd); in v4l2_subdev_link_validate_get_format()
1153 ret = v4l2_subdev_call(sd, pad, get_fmt, state, fmt); in v4l2_subdev_link_validate_get_format()
1353 struct v4l2_subdev *sd = media_entity_to_v4l2_subdev(entity); in v4l2_subdev_has_pad_interdep() local
1358 state = v4l2_subdev_lock_and_get_active_state(sd); in v4l2_subdev_has_pad_interdep()
1382 __v4l2_subdev_state_alloc(struct v4l2_subdev *sd, const char *lock_name, in __v4l2_subdev_state_alloc() argument
1393 if (sd->state_lock) in __v4l2_subdev_state_alloc()
1394 state->lock = sd->state_lock; in __v4l2_subdev_state_alloc()
1399 if (!(sd->flags & V4L2_SUBDEV_FL_STREAMS) && sd->entity.num_pads) { in __v4l2_subdev_state_alloc()
1400 state->pads = kvcalloc(sd->entity.num_pads, in __v4l2_subdev_state_alloc()
1413 ret = v4l2_subdev_call(sd, pad, init_cfg, state); in __v4l2_subdev_state_alloc()
1445 int __v4l2_subdev_init_finalize(struct v4l2_subdev *sd, const char *name, in __v4l2_subdev_init_finalize() argument
1450 state = __v4l2_subdev_state_alloc(sd, name, key); in __v4l2_subdev_init_finalize()
1454 sd->active_state = state; in __v4l2_subdev_init_finalize()
1460 void v4l2_subdev_cleanup(struct v4l2_subdev *sd) in v4l2_subdev_cleanup() argument
1464 __v4l2_subdev_state_free(sd->active_state); in v4l2_subdev_cleanup()
1465 sd->active_state = NULL; in v4l2_subdev_cleanup()
1467 if (list_empty(&sd->async_subdev_endpoint_list)) in v4l2_subdev_cleanup()
1470 list_for_each_entry_safe(ase, ase_tmp, &sd->async_subdev_endpoint_list, in v4l2_subdev_cleanup()
1530 int v4l2_subdev_get_fmt(struct v4l2_subdev *sd, struct v4l2_subdev_state *state, in v4l2_subdev_get_fmt() argument
1535 if (sd->flags & V4L2_SUBDEV_FL_STREAMS) in v4l2_subdev_get_fmt()
1538 else if (format->pad < sd->entity.num_pads && format->stream == 0) in v4l2_subdev_get_fmt()
1539 fmt = v4l2_subdev_get_pad_format(sd, state, format->pad); in v4l2_subdev_get_fmt()
1552 int v4l2_subdev_set_routing(struct v4l2_subdev *sd, in v4l2_subdev_set_routing() argument
1610 int v4l2_subdev_set_routing_with_fmt(struct v4l2_subdev *sd, in v4l2_subdev_set_routing_with_fmt() argument
1619 ret = v4l2_subdev_set_routing(sd, state, routing); in v4l2_subdev_set_routing_with_fmt()
1770 int v4l2_subdev_routing_validate(struct v4l2_subdev *sd, in v4l2_subdev_routing_validate() argument
1780 remote_pads = kcalloc(sd->entity.num_pads, sizeof(*remote_pads), in v4l2_subdev_routing_validate()
1785 for (i = 0; i < sd->entity.num_pads; ++i) in v4l2_subdev_routing_validate()
1793 if (route->sink_pad >= sd->entity.num_pads || in v4l2_subdev_routing_validate()
1794 !(sd->entity.pads[route->sink_pad].flags & MEDIA_PAD_FL_SINK)) { in v4l2_subdev_routing_validate()
1795 dev_dbg(sd->dev, "route %u sink (%u) is not a sink pad\n", in v4l2_subdev_routing_validate()
1800 if (route->source_pad >= sd->entity.num_pads || in v4l2_subdev_routing_validate()
1801 !(sd->entity.pads[route->source_pad].flags & MEDIA_PAD_FL_SOURCE)) { in v4l2_subdev_routing_validate()
1802 dev_dbg(sd->dev, "route %u source (%u) is not a source pad\n", in v4l2_subdev_routing_validate()
1814 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1828 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1842 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1856 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1878 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1892 dev_dbg(sd->dev, in v4l2_subdev_routing_validate()
1909 static int v4l2_subdev_enable_streams_fallback(struct v4l2_subdev *sd, u32 pad, in v4l2_subdev_enable_streams_fallback() argument
1912 struct device *dev = sd->entity.graph_obj.mdev->dev; in v4l2_subdev_enable_streams_fallback()
1919 * have a single source pad, as sd->enabled_streams is global to the in v4l2_subdev_enable_streams_fallback()
1922 if (!(sd->entity.pads[pad].flags & MEDIA_PAD_FL_SOURCE)) in v4l2_subdev_enable_streams_fallback()
1925 for (i = 0; i < sd->entity.num_pads; ++i) { in v4l2_subdev_enable_streams_fallback()
1926 if (i != pad && sd->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) in v4l2_subdev_enable_streams_fallback()
1930 if (sd->enabled_streams & streams_mask) { in v4l2_subdev_enable_streams_fallback()
1932 streams_mask, sd->entity.name, pad); in v4l2_subdev_enable_streams_fallback()
1937 if (!sd->enabled_streams) { in v4l2_subdev_enable_streams_fallback()
1938 ret = v4l2_subdev_call(sd, video, s_stream, 1); in v4l2_subdev_enable_streams_fallback()
1943 sd->enabled_streams |= streams_mask; in v4l2_subdev_enable_streams_fallback()
1948 int v4l2_subdev_enable_streams(struct v4l2_subdev *sd, u32 pad, in v4l2_subdev_enable_streams() argument
1951 struct device *dev = sd->entity.graph_obj.mdev->dev; in v4l2_subdev_enable_streams()
1958 if (pad >= sd->entity.num_pads) in v4l2_subdev_enable_streams()
1965 if (!sd->ops->pad || !sd->ops->pad->enable_streams) in v4l2_subdev_enable_streams()
1966 return v4l2_subdev_enable_streams_fallback(sd, pad, in v4l2_subdev_enable_streams()
1969 state = v4l2_subdev_lock_and_get_active_state(sd); in v4l2_subdev_enable_streams()
1986 cfg->stream, sd->entity.name, pad); in v4l2_subdev_enable_streams()
1994 streams_mask & ~found_streams, sd->entity.name, pad); in v4l2_subdev_enable_streams()
2002 ret = v4l2_subdev_call(sd, pad, enable_streams, state, pad, in v4l2_subdev_enable_streams()
2026 static int v4l2_subdev_disable_streams_fallback(struct v4l2_subdev *sd, u32 pad, in v4l2_subdev_disable_streams_fallback() argument
2029 struct device *dev = sd->entity.graph_obj.mdev->dev; in v4l2_subdev_disable_streams_fallback()
2036 * have a single source pad, as sd->enabled_streams is global to the in v4l2_subdev_disable_streams_fallback()
2039 if (!(sd->entity.pads[pad].flags & MEDIA_PAD_FL_SOURCE)) in v4l2_subdev_disable_streams_fallback()
2042 for (i = 0; i < sd->entity.num_pads; ++i) { in v4l2_subdev_disable_streams_fallback()
2043 if (i != pad && sd->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) in v4l2_subdev_disable_streams_fallback()
2047 if ((sd->enabled_streams & streams_mask) != streams_mask) { in v4l2_subdev_disable_streams_fallback()
2049 streams_mask, sd->entity.name, pad); in v4l2_subdev_disable_streams_fallback()
2054 if (!(sd->enabled_streams & ~streams_mask)) { in v4l2_subdev_disable_streams_fallback()
2055 ret = v4l2_subdev_call(sd, video, s_stream, 0); in v4l2_subdev_disable_streams_fallback()
2060 sd->enabled_streams &= ~streams_mask; in v4l2_subdev_disable_streams_fallback()
2065 int v4l2_subdev_disable_streams(struct v4l2_subdev *sd, u32 pad, in v4l2_subdev_disable_streams() argument
2068 struct device *dev = sd->entity.graph_obj.mdev->dev; in v4l2_subdev_disable_streams()
2075 if (pad >= sd->entity.num_pads) in v4l2_subdev_disable_streams()
2082 if (!sd->ops->pad || !sd->ops->pad->disable_streams) in v4l2_subdev_disable_streams()
2083 return v4l2_subdev_disable_streams_fallback(sd, pad, in v4l2_subdev_disable_streams()
2086 state = v4l2_subdev_lock_and_get_active_state(sd); in v4l2_subdev_disable_streams()
2103 cfg->stream, sd->entity.name, pad); in v4l2_subdev_disable_streams()
2111 streams_mask & ~found_streams, sd->entity.name, pad); in v4l2_subdev_disable_streams()
2119 ret = v4l2_subdev_call(sd, pad, disable_streams, state, pad, in v4l2_subdev_disable_streams()
2143 int v4l2_subdev_s_stream_helper(struct v4l2_subdev *sd, int enable) in v4l2_subdev_s_stream_helper() argument
2156 media_entity_for_each_pad(&sd->entity, pad) { in v4l2_subdev_s_stream_helper()
2169 state = v4l2_subdev_lock_and_get_active_state(sd); in v4l2_subdev_s_stream_helper()
2177 return v4l2_subdev_enable_streams(sd, pad_index, source_mask); in v4l2_subdev_s_stream_helper()
2179 return v4l2_subdev_disable_streams(sd, pad_index, source_mask); in v4l2_subdev_s_stream_helper()
2187 void v4l2_subdev_init(struct v4l2_subdev *sd, const struct v4l2_subdev_ops *ops) in v4l2_subdev_init() argument
2189 INIT_LIST_HEAD(&sd->list); in v4l2_subdev_init()
2191 sd->ops = ops; in v4l2_subdev_init()
2192 sd->v4l2_dev = NULL; in v4l2_subdev_init()
2193 sd->flags = 0; in v4l2_subdev_init()
2194 sd->name[0] = '\0'; in v4l2_subdev_init()
2195 sd->grp_id = 0; in v4l2_subdev_init()
2196 sd->dev_priv = NULL; in v4l2_subdev_init()
2197 sd->host_priv = NULL; in v4l2_subdev_init()
2198 sd->privacy_led = NULL; in v4l2_subdev_init()
2199 INIT_LIST_HEAD(&sd->async_subdev_endpoint_list); in v4l2_subdev_init()
2201 sd->entity.name = sd->name; in v4l2_subdev_init()
2202 sd->entity.obj_type = MEDIA_ENTITY_TYPE_V4L2_SUBDEV; in v4l2_subdev_init()
2203 sd->entity.function = MEDIA_ENT_F_V4L2_SUBDEV_UNKNOWN; in v4l2_subdev_init()
2208 void v4l2_subdev_notify_event(struct v4l2_subdev *sd, in v4l2_subdev_notify_event() argument
2211 v4l2_event_queue(sd->devnode, ev); in v4l2_subdev_notify_event()
2212 v4l2_subdev_notify(sd, V4L2_DEVICE_NOTIFY_EVENT, (void *)ev); in v4l2_subdev_notify_event()
2216 int v4l2_subdev_get_privacy_led(struct v4l2_subdev *sd) in v4l2_subdev_get_privacy_led() argument
2219 sd->privacy_led = led_get(sd->dev, "privacy-led"); in v4l2_subdev_get_privacy_led()
2220 if (IS_ERR(sd->privacy_led) && PTR_ERR(sd->privacy_led) != -ENOENT) in v4l2_subdev_get_privacy_led()
2221 return dev_err_probe(sd->dev, PTR_ERR(sd->privacy_led), in v4l2_subdev_get_privacy_led()
2224 if (!IS_ERR_OR_NULL(sd->privacy_led)) { in v4l2_subdev_get_privacy_led()
2225 mutex_lock(&sd->privacy_led->led_access); in v4l2_subdev_get_privacy_led()
2226 led_sysfs_disable(sd->privacy_led); in v4l2_subdev_get_privacy_led()
2227 led_trigger_remove(sd->privacy_led); in v4l2_subdev_get_privacy_led()
2228 led_set_brightness(sd->privacy_led, 0); in v4l2_subdev_get_privacy_led()
2229 mutex_unlock(&sd->privacy_led->led_access); in v4l2_subdev_get_privacy_led()
2236 void v4l2_subdev_put_privacy_led(struct v4l2_subdev *sd) in v4l2_subdev_put_privacy_led() argument
2239 if (!IS_ERR_OR_NULL(sd->privacy_led)) { in v4l2_subdev_put_privacy_led()
2240 mutex_lock(&sd->privacy_led->led_access); in v4l2_subdev_put_privacy_led()
2241 led_sysfs_enable(sd->privacy_led); in v4l2_subdev_put_privacy_led()
2242 mutex_unlock(&sd->privacy_led->led_access); in v4l2_subdev_put_privacy_led()
2243 led_put(sd->privacy_led); in v4l2_subdev_put_privacy_led()