summaryrefslogtreecommitdiffstats
path: root/drivers/media/test-drivers/vivid
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 18:49:45 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 18:49:45 +0000
commit2c3c1048746a4622d8c89a29670120dc8fab93c4 (patch)
tree848558de17fb3008cdf4d861b01ac7781903ce39 /drivers/media/test-drivers/vivid
parentInitial commit. (diff)
downloadlinux-2c3c1048746a4622d8c89a29670120dc8fab93c4.tar.xz
linux-2c3c1048746a4622d8c89a29670120dc8fab93c4.zip
Adding upstream version 6.1.76.upstream/6.1.76upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'drivers/media/test-drivers/vivid')
-rw-r--r--drivers/media/test-drivers/vivid/Kconfig43
-rw-r--r--drivers/media/test-drivers/vivid/Makefile12
-rw-r--r--drivers/media/test-drivers/vivid/vivid-cec.c325
-rw-r--r--drivers/media/test-drivers/vivid/vivid-cec.h13
-rw-r--r--drivers/media/test-drivers/vivid/vivid-core.c2183
-rw-r--r--drivers/media/test-drivers/vivid/vivid-core.h618
-rw-r--r--drivers/media/test-drivers/vivid/vivid-ctrls.c2002
-rw-r--r--drivers/media/test-drivers/vivid/vivid-ctrls.h22
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-cap.c1015
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-cap.h14
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-out.c357
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-out.h14
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-touch.c191
-rw-r--r--drivers/media/test-drivers/vivid/vivid-kthread-touch.h13
-rw-r--r--drivers/media/test-drivers/vivid/vivid-meta-cap.c201
-rw-r--r--drivers/media/test-drivers/vivid/vivid-meta-cap.h29
-rw-r--r--drivers/media/test-drivers/vivid/vivid-meta-out.c175
-rw-r--r--drivers/media/test-drivers/vivid/vivid-meta-out.h25
-rw-r--r--drivers/media/test-drivers/vivid/vivid-osd.c388
-rw-r--r--drivers/media/test-drivers/vivid/vivid-osd.h15
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-common.c177
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-common.h28
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-rx.c278
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-rx.h19
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-tx.c128
-rw-r--r--drivers/media/test-drivers/vivid/vivid-radio-tx.h17
-rw-r--r--drivers/media/test-drivers/vivid/vivid-rds-gen.c157
-rw-r--r--drivers/media/test-drivers/vivid/vivid-rds-gen.h42
-rw-r--r--drivers/media/test-drivers/vivid/vivid-sdr-cap.c574
-rw-r--r--drivers/media/test-drivers/vivid/vivid-sdr-cap.h24
-rw-r--r--drivers/media/test-drivers/vivid/vivid-touch-cap.c341
-rw-r--r--drivers/media/test-drivers/vivid/vivid-touch-cap.h39
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-cap.c361
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-cap.h28
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-gen.c311
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-gen.h21
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-out.c250
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vbi-out.h22
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-cap.c1968
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-cap.h59
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-common.c1075
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-common.h38
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-out.c1206
-rw-r--r--drivers/media/test-drivers/vivid/vivid-vid-out.h44
44 files changed, 14862 insertions, 0 deletions
diff --git a/drivers/media/test-drivers/vivid/Kconfig b/drivers/media/test-drivers/vivid/Kconfig
new file mode 100644
index 000000000..318799d31
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/Kconfig
@@ -0,0 +1,43 @@
+# SPDX-License-Identifier: GPL-2.0-only
+config VIDEO_VIVID
+ tristate "Virtual Video Test Driver"
+ depends on VIDEO_DEV && !SPARC32 && !SPARC64 && FB
+ depends on HAS_DMA
+ select FONT_SUPPORT
+ select FONT_8x16
+ select FB_CFB_FILLRECT
+ select FB_CFB_COPYAREA
+ select FB_CFB_IMAGEBLIT
+ select VIDEOBUF2_VMALLOC
+ select VIDEOBUF2_DMA_CONTIG
+ select VIDEO_V4L2_TPG
+ select MEDIA_CONTROLLER
+ select MEDIA_CONTROLLER_REQUEST_API
+ help
+ Enables a virtual video driver. This driver emulates a webcam,
+ TV, S-Video and HDMI capture hardware, including VBI support for
+ the SDTV inputs. Also video output, VBI output, radio receivers,
+ transmitters and software defined radio capture is emulated.
+
+ It is highly configurable and is ideal for testing applications.
+ Error injection is supported to test rare errors that are hard
+ to reproduce in real hardware.
+
+ Say Y here if you want to test video apps or debug V4L devices.
+ When in doubt, say N.
+
+config VIDEO_VIVID_CEC
+ bool "Enable CEC emulation support"
+ depends on VIDEO_VIVID
+ select CEC_CORE
+ help
+ When selected the vivid module will emulate the optional
+ HDMI CEC feature.
+
+config VIDEO_VIVID_MAX_DEVS
+ int "Maximum number of devices"
+ depends on VIDEO_VIVID
+ default "64"
+ help
+ This allows you to specify the maximum number of devices supported
+ by the vivid driver.
diff --git a/drivers/media/test-drivers/vivid/Makefile b/drivers/media/test-drivers/vivid/Makefile
new file mode 100644
index 000000000..b12ad0152
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/Makefile
@@ -0,0 +1,12 @@
+# SPDX-License-Identifier: GPL-2.0
+vivid-objs := vivid-core.o vivid-ctrls.o vivid-vid-common.o vivid-vbi-gen.o \
+ vivid-vid-cap.o vivid-vid-out.o vivid-kthread-cap.o vivid-kthread-out.o \
+ vivid-radio-rx.o vivid-radio-tx.o vivid-radio-common.o \
+ vivid-rds-gen.o vivid-sdr-cap.o vivid-vbi-cap.o vivid-vbi-out.o \
+ vivid-osd.o vivid-meta-cap.o vivid-meta-out.o \
+ vivid-kthread-touch.o vivid-touch-cap.o
+ifeq ($(CONFIG_VIDEO_VIVID_CEC),y)
+ vivid-objs += vivid-cec.o
+endif
+
+obj-$(CONFIG_VIDEO_VIVID) += vivid.o
diff --git a/drivers/media/test-drivers/vivid/vivid-cec.c b/drivers/media/test-drivers/vivid/vivid-cec.c
new file mode 100644
index 000000000..1f7469ff0
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-cec.c
@@ -0,0 +1,325 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-cec.c - A Virtual Video Test Driver, cec emulation
+ *
+ * Copyright 2016 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/delay.h>
+#include <media/cec.h>
+
+#include "vivid-core.h"
+#include "vivid-cec.h"
+
+#define CEC_START_BIT_US 4500
+#define CEC_DATA_BIT_US 2400
+#define CEC_MARGIN_US 350
+
+struct xfer_on_bus {
+ struct cec_adapter *adap;
+ u8 status;
+};
+
+static bool find_dest_adap(struct vivid_dev *dev,
+ struct cec_adapter *adap, u8 dest)
+{
+ unsigned int i;
+
+ if (dest >= 0xf)
+ return false;
+
+ if (adap != dev->cec_rx_adap && dev->cec_rx_adap &&
+ dev->cec_rx_adap->is_configured &&
+ cec_has_log_addr(dev->cec_rx_adap, dest))
+ return true;
+
+ for (i = 0; i < MAX_OUTPUTS && dev->cec_tx_adap[i]; i++) {
+ if (adap == dev->cec_tx_adap[i])
+ continue;
+ if (!dev->cec_tx_adap[i]->is_configured)
+ continue;
+ if (cec_has_log_addr(dev->cec_tx_adap[i], dest))
+ return true;
+ }
+ return false;
+}
+
+static bool xfer_ready(struct vivid_dev *dev)
+{
+ unsigned int i;
+ bool ready = false;
+
+ spin_lock(&dev->cec_xfers_slock);
+ for (i = 0; i < ARRAY_SIZE(dev->xfers); i++) {
+ if (dev->xfers[i].sft &&
+ dev->xfers[i].sft <= dev->cec_sft) {
+ ready = true;
+ break;
+ }
+ }
+ spin_unlock(&dev->cec_xfers_slock);
+
+ return ready;
+}
+
+/*
+ * If an adapter tries to send successive messages, it must wait for the
+ * longest signal-free time between its transmissions. But, if another
+ * adapter sends a message in the interim, then the wait can be reduced
+ * because the messages are no longer successive. Make these adjustments
+ * if necessary. Should be called holding cec_xfers_slock.
+ */
+static void adjust_sfts(struct vivid_dev *dev)
+{
+ unsigned int i;
+ u8 initiator;
+
+ for (i = 0; i < ARRAY_SIZE(dev->xfers); i++) {
+ if (dev->xfers[i].sft <= CEC_SIGNAL_FREE_TIME_RETRY)
+ continue;
+ initiator = dev->xfers[i].msg[0] >> 4;
+ if (initiator == dev->last_initiator)
+ dev->xfers[i].sft = CEC_SIGNAL_FREE_TIME_NEXT_XFER;
+ else
+ dev->xfers[i].sft = CEC_SIGNAL_FREE_TIME_NEW_INITIATOR;
+ }
+}
+
+/*
+ * The main emulation of the bus on which CEC adapters attempt to send
+ * messages to each other. The bus keeps track of how long it has been
+ * signal-free and accepts a pending transmission only if the state of
+ * the bus matches the transmission's signal-free requirements. It calls
+ * cec_transmit_attempt_done() for all transmits that enter the bus and
+ * cec_received_msg() for successful transmits.
+ */
+int vivid_cec_bus_thread(void *_dev)
+{
+ u32 last_sft;
+ unsigned int i;
+ unsigned int dest;
+ ktime_t start, end;
+ s64 delta_us, retry_us;
+ struct vivid_dev *dev = _dev;
+
+ dev->cec_sft = CEC_SIGNAL_FREE_TIME_NEXT_XFER;
+ for (;;) {
+ bool first = true;
+ int wait_xfer_us = 0;
+ bool valid_dest = false;
+ int wait_arb_lost_us = 0;
+ unsigned int first_idx = 0;
+ unsigned int first_status = 0;
+ struct cec_msg first_msg = {};
+ struct xfer_on_bus xfers_on_bus[MAX_OUTPUTS] = {};
+
+ wait_event_interruptible(dev->kthread_waitq_cec, xfer_ready(dev) ||
+ kthread_should_stop());
+ if (kthread_should_stop())
+ break;
+ last_sft = dev->cec_sft;
+ dev->cec_sft = 0;
+ /*
+ * Move the messages that are ready onto the bus. The adapter with
+ * the most leading zeros will win control of the bus and any other
+ * adapters will lose arbitration.
+ */
+ spin_lock(&dev->cec_xfers_slock);
+ for (i = 0; i < ARRAY_SIZE(dev->xfers); i++) {
+ if (!dev->xfers[i].sft || dev->xfers[i].sft > last_sft)
+ continue;
+ if (first) {
+ first = false;
+ first_idx = i;
+ xfers_on_bus[first_idx].adap = dev->xfers[i].adap;
+ memcpy(first_msg.msg, dev->xfers[i].msg, dev->xfers[i].len);
+ first_msg.len = dev->xfers[i].len;
+ } else {
+ xfers_on_bus[i].adap = dev->xfers[i].adap;
+ xfers_on_bus[i].status = CEC_TX_STATUS_ARB_LOST;
+ /*
+ * For simplicity wait for all 4 bits of the initiator's
+ * address even though HDMI specification uses bit-level
+ * precision.
+ */
+ wait_arb_lost_us = 4 * CEC_DATA_BIT_US + CEC_START_BIT_US;
+ }
+ dev->xfers[i].sft = 0;
+ }
+ dev->last_initiator = cec_msg_initiator(&first_msg);
+ adjust_sfts(dev);
+ spin_unlock(&dev->cec_xfers_slock);
+
+ dest = cec_msg_destination(&first_msg);
+ valid_dest = cec_msg_is_broadcast(&first_msg);
+ if (!valid_dest)
+ valid_dest = find_dest_adap(dev, xfers_on_bus[first_idx].adap, dest);
+ if (valid_dest) {
+ first_status = CEC_TX_STATUS_OK;
+ /*
+ * Message length is in bytes, but each byte is transmitted in
+ * a block of 10 bits.
+ */
+ wait_xfer_us = first_msg.len * 10 * CEC_DATA_BIT_US;
+ } else {
+ first_status = CEC_TX_STATUS_NACK;
+ /*
+ * A message that is not acknowledged stops transmitting after
+ * the header block of 10 bits.
+ */
+ wait_xfer_us = 10 * CEC_DATA_BIT_US;
+ }
+ wait_xfer_us += CEC_START_BIT_US;
+ xfers_on_bus[first_idx].status = first_status;
+
+ /* Sleep as if sending messages on a real hardware bus. */
+ start = ktime_get();
+ if (wait_arb_lost_us) {
+ usleep_range(wait_arb_lost_us - CEC_MARGIN_US, wait_arb_lost_us);
+ for (i = 0; i < ARRAY_SIZE(xfers_on_bus); i++) {
+ if (xfers_on_bus[i].status != CEC_TX_STATUS_ARB_LOST)
+ continue;
+ cec_transmit_attempt_done(xfers_on_bus[i].adap,
+ CEC_TX_STATUS_ARB_LOST);
+ }
+ if (kthread_should_stop())
+ break;
+ }
+ wait_xfer_us -= wait_arb_lost_us;
+ usleep_range(wait_xfer_us - CEC_MARGIN_US, wait_xfer_us);
+ cec_transmit_attempt_done(xfers_on_bus[first_idx].adap, first_status);
+ if (kthread_should_stop())
+ break;
+ if (first_status == CEC_TX_STATUS_OK) {
+ if (xfers_on_bus[first_idx].adap != dev->cec_rx_adap)
+ cec_received_msg(dev->cec_rx_adap, &first_msg);
+ for (i = 0; i < MAX_OUTPUTS && dev->cec_tx_adap[i]; i++)
+ if (xfers_on_bus[first_idx].adap != dev->cec_tx_adap[i])
+ cec_received_msg(dev->cec_tx_adap[i], &first_msg);
+ }
+ end = ktime_get();
+ /*
+ * If the emulated transfer took more or less time than it should
+ * have, then compensate by adjusting the wait time needed for the
+ * bus to be signal-free for 3 bit periods (the retry time).
+ */
+ delta_us = div_s64(end - start, 1000);
+ delta_us -= wait_xfer_us + wait_arb_lost_us;
+ retry_us = CEC_SIGNAL_FREE_TIME_RETRY * CEC_DATA_BIT_US - delta_us;
+ if (retry_us > CEC_MARGIN_US)
+ usleep_range(retry_us - CEC_MARGIN_US, retry_us);
+ dev->cec_sft = CEC_SIGNAL_FREE_TIME_RETRY;
+ /*
+ * If there are no messages that need to be retried, check if any
+ * adapters that did not just transmit a message are ready to
+ * transmit. If none of these adapters are ready, then increase
+ * the signal-free time so that the bus is available to all
+ * adapters and go back to waiting for a transmission.
+ */
+ while (dev->cec_sft >= CEC_SIGNAL_FREE_TIME_RETRY &&
+ dev->cec_sft < CEC_SIGNAL_FREE_TIME_NEXT_XFER &&
+ !xfer_ready(dev) && !kthread_should_stop()) {
+ usleep_range(2 * CEC_DATA_BIT_US - CEC_MARGIN_US,
+ 2 * CEC_DATA_BIT_US);
+ dev->cec_sft += 2;
+ }
+ }
+ return 0;
+}
+
+static int vivid_cec_adap_enable(struct cec_adapter *adap, bool enable)
+{
+ adap->cec_pin_is_high = true;
+ return 0;
+}
+
+static int vivid_cec_adap_log_addr(struct cec_adapter *adap, u8 log_addr)
+{
+ return 0;
+}
+
+static int vivid_cec_adap_transmit(struct cec_adapter *adap, u8 attempts,
+ u32 signal_free_time, struct cec_msg *msg)
+{
+ struct vivid_dev *dev = cec_get_drvdata(adap);
+ u8 idx = cec_msg_initiator(msg);
+
+ spin_lock(&dev->cec_xfers_slock);
+ dev->xfers[idx].adap = adap;
+ memcpy(dev->xfers[idx].msg, msg->msg, CEC_MAX_MSG_SIZE);
+ dev->xfers[idx].len = msg->len;
+ dev->xfers[idx].sft = CEC_SIGNAL_FREE_TIME_RETRY;
+ if (signal_free_time > CEC_SIGNAL_FREE_TIME_RETRY) {
+ if (idx == dev->last_initiator)
+ dev->xfers[idx].sft = CEC_SIGNAL_FREE_TIME_NEXT_XFER;
+ else
+ dev->xfers[idx].sft = CEC_SIGNAL_FREE_TIME_NEW_INITIATOR;
+ }
+ spin_unlock(&dev->cec_xfers_slock);
+ wake_up_interruptible(&dev->kthread_waitq_cec);
+
+ return 0;
+}
+
+static int vivid_received(struct cec_adapter *adap, struct cec_msg *msg)
+{
+ struct vivid_dev *dev = cec_get_drvdata(adap);
+ struct cec_msg reply;
+ u8 dest = cec_msg_destination(msg);
+ u8 disp_ctl;
+ char osd[14];
+
+ if (cec_msg_is_broadcast(msg))
+ dest = adap->log_addrs.log_addr[0];
+ cec_msg_init(&reply, dest, cec_msg_initiator(msg));
+
+ switch (cec_msg_opcode(msg)) {
+ case CEC_MSG_SET_OSD_STRING:
+ if (!cec_is_sink(adap))
+ return -ENOMSG;
+ cec_ops_set_osd_string(msg, &disp_ctl, osd);
+ switch (disp_ctl) {
+ case CEC_OP_DISP_CTL_DEFAULT:
+ strscpy(dev->osd, osd, sizeof(dev->osd));
+ dev->osd_jiffies = jiffies;
+ break;
+ case CEC_OP_DISP_CTL_UNTIL_CLEARED:
+ strscpy(dev->osd, osd, sizeof(dev->osd));
+ dev->osd_jiffies = 0;
+ break;
+ case CEC_OP_DISP_CTL_CLEAR:
+ dev->osd[0] = 0;
+ dev->osd_jiffies = 0;
+ break;
+ default:
+ cec_msg_feature_abort(&reply, cec_msg_opcode(msg),
+ CEC_OP_ABORT_INVALID_OP);
+ cec_transmit_msg(adap, &reply, false);
+ break;
+ }
+ break;
+ default:
+ return -ENOMSG;
+ }
+ return 0;
+}
+
+static const struct cec_adap_ops vivid_cec_adap_ops = {
+ .adap_enable = vivid_cec_adap_enable,
+ .adap_log_addr = vivid_cec_adap_log_addr,
+ .adap_transmit = vivid_cec_adap_transmit,
+ .received = vivid_received,
+};
+
+struct cec_adapter *vivid_cec_alloc_adap(struct vivid_dev *dev,
+ unsigned int idx,
+ bool is_source)
+{
+ u32 caps = CEC_CAP_DEFAULTS | CEC_CAP_MONITOR_ALL | CEC_CAP_MONITOR_PIN;
+ char name[32];
+
+ snprintf(name, sizeof(name), "vivid-%03d-vid-%s%d",
+ dev->inst, is_source ? "out" : "cap", idx);
+ return cec_allocate_adapter(&vivid_cec_adap_ops, dev,
+ name, caps, CEC_MAX_LOG_ADDRS);
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-cec.h b/drivers/media/test-drivers/vivid/vivid-cec.h
new file mode 100644
index 000000000..b2bcddb50
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-cec.h
@@ -0,0 +1,13 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-cec.h - A Virtual Video Test Driver, cec emulation
+ *
+ * Copyright 2016 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifdef CONFIG_VIDEO_VIVID_CEC
+struct cec_adapter *vivid_cec_alloc_adap(struct vivid_dev *dev,
+ unsigned int idx,
+ bool is_source);
+int vivid_cec_bus_thread(void *_dev);
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-core.c b/drivers/media/test-drivers/vivid/vivid-core.c
new file mode 100644
index 000000000..f28440e6c
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-core.c
@@ -0,0 +1,2183 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-core.c - A Virtual Video Test Driver, core initialization
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/init.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+#include <linux/vmalloc.h>
+#include <linux/font.h>
+#include <linux/mutex.h>
+#include <linux/platform_device.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/videobuf2-vmalloc.h>
+#include <media/videobuf2-dma-contig.h>
+#include <media/v4l2-dv-timings.h>
+#include <media/v4l2-ioctl.h>
+#include <media/v4l2-fh.h>
+#include <media/v4l2-event.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+#include "vivid-vid-cap.h"
+#include "vivid-vid-out.h"
+#include "vivid-radio-common.h"
+#include "vivid-radio-rx.h"
+#include "vivid-radio-tx.h"
+#include "vivid-sdr-cap.h"
+#include "vivid-vbi-cap.h"
+#include "vivid-vbi-out.h"
+#include "vivid-osd.h"
+#include "vivid-cec.h"
+#include "vivid-ctrls.h"
+#include "vivid-meta-cap.h"
+#include "vivid-meta-out.h"
+#include "vivid-touch-cap.h"
+
+#define VIVID_MODULE_NAME "vivid"
+
+/* The maximum number of vivid devices */
+#define VIVID_MAX_DEVS CONFIG_VIDEO_VIVID_MAX_DEVS
+
+MODULE_DESCRIPTION("Virtual Video Test Driver");
+MODULE_AUTHOR("Hans Verkuil");
+MODULE_LICENSE("GPL");
+
+static unsigned n_devs = 1;
+module_param(n_devs, uint, 0444);
+MODULE_PARM_DESC(n_devs, " number of driver instances to create");
+
+static int vid_cap_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(vid_cap_nr, int, NULL, 0444);
+MODULE_PARM_DESC(vid_cap_nr, " videoX start number, -1 is autodetect");
+
+static int vid_out_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(vid_out_nr, int, NULL, 0444);
+MODULE_PARM_DESC(vid_out_nr, " videoX start number, -1 is autodetect");
+
+static int vbi_cap_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(vbi_cap_nr, int, NULL, 0444);
+MODULE_PARM_DESC(vbi_cap_nr, " vbiX start number, -1 is autodetect");
+
+static int vbi_out_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(vbi_out_nr, int, NULL, 0444);
+MODULE_PARM_DESC(vbi_out_nr, " vbiX start number, -1 is autodetect");
+
+static int sdr_cap_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(sdr_cap_nr, int, NULL, 0444);
+MODULE_PARM_DESC(sdr_cap_nr, " swradioX start number, -1 is autodetect");
+
+static int radio_rx_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(radio_rx_nr, int, NULL, 0444);
+MODULE_PARM_DESC(radio_rx_nr, " radioX start number, -1 is autodetect");
+
+static int radio_tx_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(radio_tx_nr, int, NULL, 0444);
+MODULE_PARM_DESC(radio_tx_nr, " radioX start number, -1 is autodetect");
+
+static int meta_cap_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(meta_cap_nr, int, NULL, 0444);
+MODULE_PARM_DESC(meta_cap_nr, " videoX start number, -1 is autodetect");
+
+static int meta_out_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(meta_out_nr, int, NULL, 0444);
+MODULE_PARM_DESC(meta_out_nr, " videoX start number, -1 is autodetect");
+
+static int touch_cap_nr[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(touch_cap_nr, int, NULL, 0444);
+MODULE_PARM_DESC(touch_cap_nr, " v4l-touchX start number, -1 is autodetect");
+
+static int ccs_cap_mode[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(ccs_cap_mode, int, NULL, 0444);
+MODULE_PARM_DESC(ccs_cap_mode, " capture crop/compose/scale mode:\n"
+ "\t\t bit 0=crop, 1=compose, 2=scale,\n"
+ "\t\t -1=user-controlled (default)");
+
+static int ccs_out_mode[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = -1 };
+module_param_array(ccs_out_mode, int, NULL, 0444);
+MODULE_PARM_DESC(ccs_out_mode, " output crop/compose/scale mode:\n"
+ "\t\t bit 0=crop, 1=compose, 2=scale,\n"
+ "\t\t -1=user-controlled (default)");
+
+static unsigned multiplanar[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 1 };
+module_param_array(multiplanar, uint, NULL, 0444);
+MODULE_PARM_DESC(multiplanar, " 1 (default) creates a single planar device, 2 creates a multiplanar device.");
+
+/*
+ * Default: video + vbi-cap (raw and sliced) + radio rx + radio tx + sdr +
+ * vbi-out + vid-out + meta-cap
+ */
+static unsigned int node_types[VIVID_MAX_DEVS] = {
+ [0 ... (VIVID_MAX_DEVS - 1)] = 0xe1d3d
+};
+module_param_array(node_types, uint, NULL, 0444);
+MODULE_PARM_DESC(node_types, " node types, default is 0xe1d3d. Bitmask with the following meaning:\n"
+ "\t\t bit 0: Video Capture node\n"
+ "\t\t bit 2-3: VBI Capture node: 0 = none, 1 = raw vbi, 2 = sliced vbi, 3 = both\n"
+ "\t\t bit 4: Radio Receiver node\n"
+ "\t\t bit 5: Software Defined Radio Receiver node\n"
+ "\t\t bit 8: Video Output node\n"
+ "\t\t bit 10-11: VBI Output node: 0 = none, 1 = raw vbi, 2 = sliced vbi, 3 = both\n"
+ "\t\t bit 12: Radio Transmitter node\n"
+ "\t\t bit 16: Framebuffer for testing overlays\n"
+ "\t\t bit 17: Metadata Capture node\n"
+ "\t\t bit 18: Metadata Output node\n"
+ "\t\t bit 19: Touch Capture node\n");
+
+/* Default: 4 inputs */
+static unsigned num_inputs[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 4 };
+module_param_array(num_inputs, uint, NULL, 0444);
+MODULE_PARM_DESC(num_inputs, " number of inputs, default is 4");
+
+/* Default: input 0 = WEBCAM, 1 = TV, 2 = SVID, 3 = HDMI */
+static unsigned input_types[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 0xe4 };
+module_param_array(input_types, uint, NULL, 0444);
+MODULE_PARM_DESC(input_types, " input types, default is 0xe4. Two bits per input,\n"
+ "\t\t bits 0-1 == input 0, bits 31-30 == input 15.\n"
+ "\t\t Type 0 == webcam, 1 == TV, 2 == S-Video, 3 == HDMI");
+
+/* Default: 2 outputs */
+static unsigned num_outputs[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 2 };
+module_param_array(num_outputs, uint, NULL, 0444);
+MODULE_PARM_DESC(num_outputs, " number of outputs, default is 2");
+
+/* Default: output 0 = SVID, 1 = HDMI */
+static unsigned output_types[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 2 };
+module_param_array(output_types, uint, NULL, 0444);
+MODULE_PARM_DESC(output_types, " output types, default is 0x02. One bit per output,\n"
+ "\t\t bit 0 == output 0, bit 15 == output 15.\n"
+ "\t\t Type 0 == S-Video, 1 == HDMI");
+
+unsigned vivid_debug;
+module_param(vivid_debug, uint, 0644);
+MODULE_PARM_DESC(vivid_debug, " activates debug info");
+
+static bool no_error_inj;
+module_param(no_error_inj, bool, 0444);
+MODULE_PARM_DESC(no_error_inj, " if set disable the error injecting controls");
+
+static unsigned int allocators[VIVID_MAX_DEVS] = { [0 ... (VIVID_MAX_DEVS - 1)] = 0 };
+module_param_array(allocators, uint, NULL, 0444);
+MODULE_PARM_DESC(allocators, " memory allocator selection, default is 0.\n"
+ "\t\t 0 == vmalloc\n"
+ "\t\t 1 == dma-contig");
+
+static unsigned int cache_hints[VIVID_MAX_DEVS] = {
+ [0 ... (VIVID_MAX_DEVS - 1)] = 0
+};
+module_param_array(cache_hints, uint, NULL, 0444);
+MODULE_PARM_DESC(cache_hints, " user-space cache hints, default is 0.\n"
+ "\t\t 0 == forbid\n"
+ "\t\t 1 == allow");
+
+static unsigned int supports_requests[VIVID_MAX_DEVS] = {
+ [0 ... (VIVID_MAX_DEVS - 1)] = 1
+};
+module_param_array(supports_requests, uint, NULL, 0444);
+MODULE_PARM_DESC(supports_requests, " support for requests, default is 1.\n"
+ "\t\t 0 == no support\n"
+ "\t\t 1 == supports requests\n"
+ "\t\t 2 == requires requests");
+
+static struct vivid_dev *vivid_devs[VIVID_MAX_DEVS];
+
+const struct v4l2_rect vivid_min_rect = {
+ 0, 0, MIN_WIDTH, MIN_HEIGHT
+};
+
+const struct v4l2_rect vivid_max_rect = {
+ 0, 0, MAX_WIDTH * MAX_ZOOM, MAX_HEIGHT * MAX_ZOOM
+};
+
+static const u8 vivid_hdmi_edid[256] = {
+ 0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00,
+ 0x31, 0xd8, 0x34, 0x12, 0x00, 0x00, 0x00, 0x00,
+ 0x22, 0x1a, 0x01, 0x03, 0x80, 0x60, 0x36, 0x78,
+ 0x0f, 0xee, 0x91, 0xa3, 0x54, 0x4c, 0x99, 0x26,
+ 0x0f, 0x50, 0x54, 0x2f, 0xcf, 0x00, 0x31, 0x59,
+ 0x45, 0x59, 0x81, 0x80, 0x81, 0x40, 0x90, 0x40,
+ 0x95, 0x00, 0xa9, 0x40, 0xb3, 0x00, 0x08, 0xe8,
+ 0x00, 0x30, 0xf2, 0x70, 0x5a, 0x80, 0xb0, 0x58,
+ 0x8a, 0x00, 0xc0, 0x1c, 0x32, 0x00, 0x00, 0x1e,
+ 0x00, 0x00, 0x00, 0xfd, 0x00, 0x18, 0x55, 0x18,
+ 0x87, 0x3c, 0x00, 0x0a, 0x20, 0x20, 0x20, 0x20,
+ 0x20, 0x20, 0x00, 0x00, 0x00, 0xfc, 0x00, 0x76,
+ 0x69, 0x76, 0x69, 0x64, 0x0a, 0x20, 0x20, 0x20,
+ 0x20, 0x20, 0x20, 0x20, 0x00, 0x00, 0x00, 0x10,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x7b,
+
+ 0x02, 0x03, 0x3f, 0xf1, 0x51, 0x61, 0x60, 0x5f,
+ 0x5e, 0x5d, 0x10, 0x1f, 0x04, 0x13, 0x22, 0x21,
+ 0x20, 0x05, 0x14, 0x02, 0x11, 0x01, 0x23, 0x09,
+ 0x07, 0x07, 0x83, 0x01, 0x00, 0x00, 0x6d, 0x03,
+ 0x0c, 0x00, 0x10, 0x00, 0x00, 0x3c, 0x21, 0x00,
+ 0x60, 0x01, 0x02, 0x03, 0x67, 0xd8, 0x5d, 0xc4,
+ 0x01, 0x78, 0x00, 0x00, 0xe2, 0x00, 0xca, 0xe3,
+ 0x05, 0x00, 0x00, 0xe3, 0x06, 0x01, 0x00, 0x4d,
+ 0xd0, 0x00, 0xa0, 0xf0, 0x70, 0x3e, 0x80, 0x30,
+ 0x20, 0x35, 0x00, 0xc0, 0x1c, 0x32, 0x00, 0x00,
+ 0x1e, 0x1a, 0x36, 0x80, 0xa0, 0x70, 0x38, 0x1f,
+ 0x40, 0x30, 0x20, 0x35, 0x00, 0xc0, 0x1c, 0x32,
+ 0x00, 0x00, 0x1a, 0x1a, 0x1d, 0x00, 0x80, 0x51,
+ 0xd0, 0x1c, 0x20, 0x40, 0x80, 0x35, 0x00, 0xc0,
+ 0x1c, 0x32, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x82,
+};
+
+static int vidioc_querycap(struct file *file, void *priv,
+ struct v4l2_capability *cap)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ strscpy(cap->driver, "vivid", sizeof(cap->driver));
+ strscpy(cap->card, "vivid", sizeof(cap->card));
+ snprintf(cap->bus_info, sizeof(cap->bus_info),
+ "platform:%s", dev->v4l2_dev.name);
+
+ cap->capabilities = dev->vid_cap_caps | dev->vid_out_caps |
+ dev->vbi_cap_caps | dev->vbi_out_caps |
+ dev->radio_rx_caps | dev->radio_tx_caps |
+ dev->sdr_cap_caps | dev->meta_cap_caps |
+ dev->meta_out_caps | dev->touch_cap_caps |
+ V4L2_CAP_DEVICE_CAPS;
+ return 0;
+}
+
+static int vidioc_s_hw_freq_seek(struct file *file, void *fh, const struct v4l2_hw_freq_seek *a)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_rx_s_hw_freq_seek(file, fh, a);
+ return -ENOTTY;
+}
+
+static int vidioc_enum_freq_bands(struct file *file, void *fh, struct v4l2_frequency_band *band)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_rx_enum_freq_bands(file, fh, band);
+ if (vdev->vfl_type == VFL_TYPE_SDR)
+ return vivid_sdr_enum_freq_bands(file, fh, band);
+ return -ENOTTY;
+}
+
+static int vidioc_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_rx_g_tuner(file, fh, vt);
+ if (vdev->vfl_type == VFL_TYPE_SDR)
+ return vivid_sdr_g_tuner(file, fh, vt);
+ return vivid_video_g_tuner(file, fh, vt);
+}
+
+static int vidioc_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_rx_s_tuner(file, fh, vt);
+ if (vdev->vfl_type == VFL_TYPE_SDR)
+ return vivid_sdr_s_tuner(file, fh, vt);
+ return vivid_video_s_tuner(file, fh, vt);
+}
+
+static int vidioc_g_frequency(struct file *file, void *fh, struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_g_frequency(file,
+ vdev->vfl_dir == VFL_DIR_RX ?
+ &dev->radio_rx_freq : &dev->radio_tx_freq, vf);
+ if (vdev->vfl_type == VFL_TYPE_SDR)
+ return vivid_sdr_g_frequency(file, fh, vf);
+ return vivid_video_g_frequency(file, fh, vf);
+}
+
+static int vidioc_s_frequency(struct file *file, void *fh, const struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_RADIO)
+ return vivid_radio_s_frequency(file,
+ vdev->vfl_dir == VFL_DIR_RX ?
+ &dev->radio_rx_freq : &dev->radio_tx_freq, vf);
+ if (vdev->vfl_type == VFL_TYPE_SDR)
+ return vivid_sdr_s_frequency(file, fh, vf);
+ return vivid_video_s_frequency(file, fh, vf);
+}
+
+static int vidioc_overlay(struct file *file, void *fh, unsigned i)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_overlay(file, fh, i);
+ return vivid_vid_out_overlay(file, fh, i);
+}
+
+static int vidioc_g_fbuf(struct file *file, void *fh, struct v4l2_framebuffer *a)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_g_fbuf(file, fh, a);
+ return vivid_vid_out_g_fbuf(file, fh, a);
+}
+
+/*
+ * Only support the framebuffer of one of the vivid instances.
+ * Anything else is rejected.
+ */
+bool vivid_validate_fb(const struct v4l2_framebuffer *a)
+{
+ struct vivid_dev *dev;
+ int i;
+
+ for (i = 0; i < n_devs; i++) {
+ dev = vivid_devs[i];
+ if (!dev || !dev->video_pbase)
+ continue;
+ if ((unsigned long)a->base == dev->video_pbase &&
+ a->fmt.width <= dev->display_width &&
+ a->fmt.height <= dev->display_height &&
+ a->fmt.bytesperline <= dev->display_byte_stride)
+ return true;
+ }
+ return false;
+}
+
+static int vidioc_s_fbuf(struct file *file, void *fh, const struct v4l2_framebuffer *a)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_s_fbuf(file, fh, a);
+ return vivid_vid_out_s_fbuf(file, fh, a);
+}
+
+static int vidioc_s_std(struct file *file, void *fh, v4l2_std_id id)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_s_std(file, fh, id);
+ return vivid_vid_out_s_std(file, fh, id);
+}
+
+static int vidioc_s_dv_timings(struct file *file, void *fh, struct v4l2_dv_timings *timings)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_s_dv_timings(file, fh, timings);
+ return vivid_vid_out_s_dv_timings(file, fh, timings);
+}
+
+static int vidioc_g_pixelaspect(struct file *file, void *fh,
+ int type, struct v4l2_fract *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_g_pixelaspect(file, fh, type, f);
+ return vivid_vid_out_g_pixelaspect(file, fh, type, f);
+}
+
+static int vidioc_g_selection(struct file *file, void *fh,
+ struct v4l2_selection *sel)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_g_selection(file, fh, sel);
+ return vivid_vid_out_g_selection(file, fh, sel);
+}
+
+static int vidioc_s_selection(struct file *file, void *fh,
+ struct v4l2_selection *sel)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_s_selection(file, fh, sel);
+ return vivid_vid_out_s_selection(file, fh, sel);
+}
+
+static int vidioc_g_parm(struct file *file, void *fh,
+ struct v4l2_streamparm *parm)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_parm_tch(file, fh, parm);
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_g_parm(file, fh, parm);
+ return vivid_vid_out_g_parm(file, fh, parm);
+}
+
+static int vidioc_s_parm(struct file *file, void *fh,
+ struct v4l2_streamparm *parm)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_vid_cap_s_parm(file, fh, parm);
+ return -ENOTTY;
+}
+
+static int vidioc_log_status(struct file *file, void *fh)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ v4l2_ctrl_log_status(file, fh);
+ if (vdev->vfl_dir == VFL_DIR_RX && vdev->vfl_type == VFL_TYPE_VIDEO)
+ tpg_log_status(&dev->tpg);
+ return 0;
+}
+
+static ssize_t vivid_radio_read(struct file *file, char __user *buf,
+ size_t size, loff_t *offset)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_TX)
+ return -EINVAL;
+ return vivid_radio_rx_read(file, buf, size, offset);
+}
+
+static ssize_t vivid_radio_write(struct file *file, const char __user *buf,
+ size_t size, loff_t *offset)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return -EINVAL;
+ return vivid_radio_tx_write(file, buf, size, offset);
+}
+
+static __poll_t vivid_radio_poll(struct file *file, struct poll_table_struct *wait)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX)
+ return vivid_radio_rx_poll(file, wait);
+ return vivid_radio_tx_poll(file, wait);
+}
+
+static int vivid_enum_input(struct file *file, void *priv,
+ struct v4l2_input *inp)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_enum_input_tch(file, priv, inp);
+ return vidioc_enum_input(file, priv, inp);
+}
+
+static int vivid_g_input(struct file *file, void *priv, unsigned int *i)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_input_tch(file, priv, i);
+ return vidioc_g_input(file, priv, i);
+}
+
+static int vivid_s_input(struct file *file, void *priv, unsigned int i)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_s_input_tch(file, priv, i);
+ return vidioc_s_input(file, priv, i);
+}
+
+static int vivid_enum_fmt_cap(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_enum_fmt_tch(file, priv, f);
+ return vivid_enum_fmt_vid(file, priv, f);
+}
+
+static int vivid_g_fmt_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch(file, priv, f);
+ return vidioc_g_fmt_vid_cap(file, priv, f);
+}
+
+static int vivid_try_fmt_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch(file, priv, f);
+ return vidioc_try_fmt_vid_cap(file, priv, f);
+}
+
+static int vivid_s_fmt_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch(file, priv, f);
+ return vidioc_s_fmt_vid_cap(file, priv, f);
+}
+
+static int vivid_g_fmt_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch_mplane(file, priv, f);
+ return vidioc_g_fmt_vid_cap_mplane(file, priv, f);
+}
+
+static int vivid_try_fmt_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch_mplane(file, priv, f);
+ return vidioc_try_fmt_vid_cap_mplane(file, priv, f);
+}
+
+static int vivid_s_fmt_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_type == VFL_TYPE_TOUCH)
+ return vivid_g_fmt_tch_mplane(file, priv, f);
+ return vidioc_s_fmt_vid_cap_mplane(file, priv, f);
+}
+
+static bool vivid_is_in_use(bool valid, struct video_device *vdev)
+{
+ unsigned long flags;
+ bool res;
+
+ if (!valid)
+ return false;
+ spin_lock_irqsave(&vdev->fh_lock, flags);
+ res = !list_empty(&vdev->fh_list);
+ spin_unlock_irqrestore(&vdev->fh_lock, flags);
+ return res;
+}
+
+static bool vivid_is_last_user(struct vivid_dev *dev)
+{
+ unsigned int uses =
+ vivid_is_in_use(dev->has_vid_cap, &dev->vid_cap_dev) +
+ vivid_is_in_use(dev->has_vid_out, &dev->vid_out_dev) +
+ vivid_is_in_use(dev->has_vbi_cap, &dev->vbi_cap_dev) +
+ vivid_is_in_use(dev->has_vbi_out, &dev->vbi_out_dev) +
+ vivid_is_in_use(dev->has_radio_rx, &dev->radio_rx_dev) +
+ vivid_is_in_use(dev->has_radio_tx, &dev->radio_tx_dev) +
+ vivid_is_in_use(dev->has_sdr_cap, &dev->sdr_cap_dev) +
+ vivid_is_in_use(dev->has_meta_cap, &dev->meta_cap_dev) +
+ vivid_is_in_use(dev->has_meta_out, &dev->meta_out_dev) +
+ vivid_is_in_use(dev->has_touch_cap, &dev->touch_cap_dev);
+
+ return uses == 1;
+}
+
+static void vivid_reconnect(struct vivid_dev *dev)
+{
+ if (dev->has_vid_cap)
+ set_bit(V4L2_FL_REGISTERED, &dev->vid_cap_dev.flags);
+ if (dev->has_vid_out)
+ set_bit(V4L2_FL_REGISTERED, &dev->vid_out_dev.flags);
+ if (dev->has_vbi_cap)
+ set_bit(V4L2_FL_REGISTERED, &dev->vbi_cap_dev.flags);
+ if (dev->has_vbi_out)
+ set_bit(V4L2_FL_REGISTERED, &dev->vbi_out_dev.flags);
+ if (dev->has_radio_rx)
+ set_bit(V4L2_FL_REGISTERED, &dev->radio_rx_dev.flags);
+ if (dev->has_radio_tx)
+ set_bit(V4L2_FL_REGISTERED, &dev->radio_tx_dev.flags);
+ if (dev->has_sdr_cap)
+ set_bit(V4L2_FL_REGISTERED, &dev->sdr_cap_dev.flags);
+ if (dev->has_meta_cap)
+ set_bit(V4L2_FL_REGISTERED, &dev->meta_cap_dev.flags);
+ if (dev->has_meta_out)
+ set_bit(V4L2_FL_REGISTERED, &dev->meta_out_dev.flags);
+ if (dev->has_touch_cap)
+ set_bit(V4L2_FL_REGISTERED, &dev->touch_cap_dev.flags);
+ dev->disconnect_error = false;
+}
+
+static int vivid_fop_release(struct file *file)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ mutex_lock(&dev->mutex);
+ if (!no_error_inj && v4l2_fh_is_singular_file(file) &&
+ dev->disconnect_error && !video_is_registered(vdev) &&
+ vivid_is_last_user(dev)) {
+ /*
+ * I am the last user of this driver, and a disconnect
+ * was forced (since this video_device is unregistered),
+ * so re-register all video_device's again.
+ */
+ v4l2_info(&dev->v4l2_dev, "reconnect\n");
+ vivid_reconnect(dev);
+ }
+ mutex_unlock(&dev->mutex);
+ if (file->private_data == dev->overlay_cap_owner)
+ dev->overlay_cap_owner = NULL;
+ if (file->private_data == dev->radio_rx_rds_owner) {
+ dev->radio_rx_rds_last_block = 0;
+ dev->radio_rx_rds_owner = NULL;
+ }
+ if (file->private_data == dev->radio_tx_rds_owner) {
+ dev->radio_tx_rds_last_block = 0;
+ dev->radio_tx_rds_owner = NULL;
+ }
+ if (vdev->queue)
+ return vb2_fop_release(file);
+ return v4l2_fh_release(file);
+}
+
+static const struct v4l2_file_operations vivid_fops = {
+ .owner = THIS_MODULE,
+ .open = v4l2_fh_open,
+ .release = vivid_fop_release,
+ .read = vb2_fop_read,
+ .write = vb2_fop_write,
+ .poll = vb2_fop_poll,
+ .unlocked_ioctl = video_ioctl2,
+ .mmap = vb2_fop_mmap,
+};
+
+static const struct v4l2_file_operations vivid_radio_fops = {
+ .owner = THIS_MODULE,
+ .open = v4l2_fh_open,
+ .release = vivid_fop_release,
+ .read = vivid_radio_read,
+ .write = vivid_radio_write,
+ .poll = vivid_radio_poll,
+ .unlocked_ioctl = video_ioctl2,
+};
+
+static int vidioc_reqbufs(struct file *file, void *priv,
+ struct v4l2_requestbuffers *p)
+{
+ struct video_device *vdev = video_devdata(file);
+ int r;
+
+ /*
+ * Sliced and raw VBI capture share the same queue so we must
+ * change the type.
+ */
+ if (p->type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE ||
+ p->type == V4L2_BUF_TYPE_VBI_CAPTURE) {
+ r = vb2_queue_change_type(vdev->queue, p->type);
+ if (r)
+ return r;
+ }
+
+ return vb2_ioctl_reqbufs(file, priv, p);
+}
+
+static int vidioc_create_bufs(struct file *file, void *priv,
+ struct v4l2_create_buffers *p)
+{
+ struct video_device *vdev = video_devdata(file);
+ int r;
+
+ /*
+ * Sliced and raw VBI capture share the same queue so we must
+ * change the type.
+ */
+ if (p->format.type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE ||
+ p->format.type == V4L2_BUF_TYPE_VBI_CAPTURE) {
+ r = vb2_queue_change_type(vdev->queue, p->format.type);
+ if (r)
+ return r;
+ }
+
+ return vb2_ioctl_create_bufs(file, priv, p);
+}
+
+static const struct v4l2_ioctl_ops vivid_ioctl_ops = {
+ .vidioc_querycap = vidioc_querycap,
+
+ .vidioc_enum_fmt_vid_cap = vivid_enum_fmt_cap,
+ .vidioc_g_fmt_vid_cap = vivid_g_fmt_cap,
+ .vidioc_try_fmt_vid_cap = vivid_try_fmt_cap,
+ .vidioc_s_fmt_vid_cap = vivid_s_fmt_cap,
+ .vidioc_g_fmt_vid_cap_mplane = vivid_g_fmt_cap_mplane,
+ .vidioc_try_fmt_vid_cap_mplane = vivid_try_fmt_cap_mplane,
+ .vidioc_s_fmt_vid_cap_mplane = vivid_s_fmt_cap_mplane,
+
+ .vidioc_enum_fmt_vid_out = vivid_enum_fmt_vid,
+ .vidioc_g_fmt_vid_out = vidioc_g_fmt_vid_out,
+ .vidioc_try_fmt_vid_out = vidioc_try_fmt_vid_out,
+ .vidioc_s_fmt_vid_out = vidioc_s_fmt_vid_out,
+ .vidioc_g_fmt_vid_out_mplane = vidioc_g_fmt_vid_out_mplane,
+ .vidioc_try_fmt_vid_out_mplane = vidioc_try_fmt_vid_out_mplane,
+ .vidioc_s_fmt_vid_out_mplane = vidioc_s_fmt_vid_out_mplane,
+
+ .vidioc_g_selection = vidioc_g_selection,
+ .vidioc_s_selection = vidioc_s_selection,
+ .vidioc_g_pixelaspect = vidioc_g_pixelaspect,
+
+ .vidioc_g_fmt_vbi_cap = vidioc_g_fmt_vbi_cap,
+ .vidioc_try_fmt_vbi_cap = vidioc_g_fmt_vbi_cap,
+ .vidioc_s_fmt_vbi_cap = vidioc_s_fmt_vbi_cap,
+
+ .vidioc_g_fmt_sliced_vbi_cap = vidioc_g_fmt_sliced_vbi_cap,
+ .vidioc_try_fmt_sliced_vbi_cap = vidioc_try_fmt_sliced_vbi_cap,
+ .vidioc_s_fmt_sliced_vbi_cap = vidioc_s_fmt_sliced_vbi_cap,
+ .vidioc_g_sliced_vbi_cap = vidioc_g_sliced_vbi_cap,
+
+ .vidioc_g_fmt_vbi_out = vidioc_g_fmt_vbi_out,
+ .vidioc_try_fmt_vbi_out = vidioc_g_fmt_vbi_out,
+ .vidioc_s_fmt_vbi_out = vidioc_s_fmt_vbi_out,
+
+ .vidioc_g_fmt_sliced_vbi_out = vidioc_g_fmt_sliced_vbi_out,
+ .vidioc_try_fmt_sliced_vbi_out = vidioc_try_fmt_sliced_vbi_out,
+ .vidioc_s_fmt_sliced_vbi_out = vidioc_s_fmt_sliced_vbi_out,
+
+ .vidioc_enum_fmt_sdr_cap = vidioc_enum_fmt_sdr_cap,
+ .vidioc_g_fmt_sdr_cap = vidioc_g_fmt_sdr_cap,
+ .vidioc_try_fmt_sdr_cap = vidioc_try_fmt_sdr_cap,
+ .vidioc_s_fmt_sdr_cap = vidioc_s_fmt_sdr_cap,
+
+ .vidioc_overlay = vidioc_overlay,
+ .vidioc_enum_framesizes = vidioc_enum_framesizes,
+ .vidioc_enum_frameintervals = vidioc_enum_frameintervals,
+ .vidioc_g_parm = vidioc_g_parm,
+ .vidioc_s_parm = vidioc_s_parm,
+
+ .vidioc_enum_fmt_vid_overlay = vidioc_enum_fmt_vid_overlay,
+ .vidioc_g_fmt_vid_overlay = vidioc_g_fmt_vid_overlay,
+ .vidioc_try_fmt_vid_overlay = vidioc_try_fmt_vid_overlay,
+ .vidioc_s_fmt_vid_overlay = vidioc_s_fmt_vid_overlay,
+ .vidioc_g_fmt_vid_out_overlay = vidioc_g_fmt_vid_out_overlay,
+ .vidioc_try_fmt_vid_out_overlay = vidioc_try_fmt_vid_out_overlay,
+ .vidioc_s_fmt_vid_out_overlay = vidioc_s_fmt_vid_out_overlay,
+ .vidioc_g_fbuf = vidioc_g_fbuf,
+ .vidioc_s_fbuf = vidioc_s_fbuf,
+
+ .vidioc_reqbufs = vidioc_reqbufs,
+ .vidioc_create_bufs = vidioc_create_bufs,
+ .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
+ .vidioc_querybuf = vb2_ioctl_querybuf,
+ .vidioc_qbuf = vb2_ioctl_qbuf,
+ .vidioc_dqbuf = vb2_ioctl_dqbuf,
+ .vidioc_expbuf = vb2_ioctl_expbuf,
+ .vidioc_streamon = vb2_ioctl_streamon,
+ .vidioc_streamoff = vb2_ioctl_streamoff,
+
+ .vidioc_enum_input = vivid_enum_input,
+ .vidioc_g_input = vivid_g_input,
+ .vidioc_s_input = vivid_s_input,
+ .vidioc_s_audio = vidioc_s_audio,
+ .vidioc_g_audio = vidioc_g_audio,
+ .vidioc_enumaudio = vidioc_enumaudio,
+ .vidioc_s_frequency = vidioc_s_frequency,
+ .vidioc_g_frequency = vidioc_g_frequency,
+ .vidioc_s_tuner = vidioc_s_tuner,
+ .vidioc_g_tuner = vidioc_g_tuner,
+ .vidioc_s_modulator = vidioc_s_modulator,
+ .vidioc_g_modulator = vidioc_g_modulator,
+ .vidioc_s_hw_freq_seek = vidioc_s_hw_freq_seek,
+ .vidioc_enum_freq_bands = vidioc_enum_freq_bands,
+
+ .vidioc_enum_output = vidioc_enum_output,
+ .vidioc_g_output = vidioc_g_output,
+ .vidioc_s_output = vidioc_s_output,
+ .vidioc_s_audout = vidioc_s_audout,
+ .vidioc_g_audout = vidioc_g_audout,
+ .vidioc_enumaudout = vidioc_enumaudout,
+
+ .vidioc_querystd = vidioc_querystd,
+ .vidioc_g_std = vidioc_g_std,
+ .vidioc_s_std = vidioc_s_std,
+ .vidioc_s_dv_timings = vidioc_s_dv_timings,
+ .vidioc_g_dv_timings = vidioc_g_dv_timings,
+ .vidioc_query_dv_timings = vidioc_query_dv_timings,
+ .vidioc_enum_dv_timings = vidioc_enum_dv_timings,
+ .vidioc_dv_timings_cap = vidioc_dv_timings_cap,
+ .vidioc_g_edid = vidioc_g_edid,
+ .vidioc_s_edid = vidioc_s_edid,
+
+ .vidioc_log_status = vidioc_log_status,
+ .vidioc_subscribe_event = vidioc_subscribe_event,
+ .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
+
+ .vidioc_enum_fmt_meta_cap = vidioc_enum_fmt_meta_cap,
+ .vidioc_g_fmt_meta_cap = vidioc_g_fmt_meta_cap,
+ .vidioc_s_fmt_meta_cap = vidioc_g_fmt_meta_cap,
+ .vidioc_try_fmt_meta_cap = vidioc_g_fmt_meta_cap,
+
+ .vidioc_enum_fmt_meta_out = vidioc_enum_fmt_meta_out,
+ .vidioc_g_fmt_meta_out = vidioc_g_fmt_meta_out,
+ .vidioc_s_fmt_meta_out = vidioc_g_fmt_meta_out,
+ .vidioc_try_fmt_meta_out = vidioc_g_fmt_meta_out,
+};
+
+/* -----------------------------------------------------------------
+ Initialization and module stuff
+ ------------------------------------------------------------------*/
+
+static void vivid_dev_release(struct v4l2_device *v4l2_dev)
+{
+ struct vivid_dev *dev = container_of(v4l2_dev, struct vivid_dev, v4l2_dev);
+
+ vivid_free_controls(dev);
+ v4l2_device_unregister(&dev->v4l2_dev);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ media_device_cleanup(&dev->mdev);
+#endif
+ vfree(dev->scaled_line);
+ vfree(dev->blended_line);
+ vfree(dev->edid);
+ vfree(dev->bitmap_cap);
+ vfree(dev->bitmap_out);
+ tpg_free(&dev->tpg);
+ kfree(dev->query_dv_timings_qmenu);
+ kfree(dev->query_dv_timings_qmenu_strings);
+ kfree(dev);
+}
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+static int vivid_req_validate(struct media_request *req)
+{
+ struct vivid_dev *dev = container_of(req->mdev, struct vivid_dev, mdev);
+
+ if (dev->req_validate_error) {
+ dev->req_validate_error = false;
+ return -EINVAL;
+ }
+ return vb2_request_validate(req);
+}
+
+static const struct media_device_ops vivid_media_ops = {
+ .req_validate = vivid_req_validate,
+ .req_queue = vb2_request_queue,
+};
+#endif
+
+static int vivid_create_queue(struct vivid_dev *dev,
+ struct vb2_queue *q,
+ u32 buf_type,
+ unsigned int min_buffers_needed,
+ const struct vb2_ops *ops)
+{
+ if (buf_type == V4L2_BUF_TYPE_VIDEO_CAPTURE && dev->multiplanar)
+ buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ else if (buf_type == V4L2_BUF_TYPE_VIDEO_OUTPUT && dev->multiplanar)
+ buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ else if (buf_type == V4L2_BUF_TYPE_VBI_CAPTURE && !dev->has_raw_vbi_cap)
+ buf_type = V4L2_BUF_TYPE_SLICED_VBI_CAPTURE;
+ else if (buf_type == V4L2_BUF_TYPE_VBI_OUTPUT && !dev->has_raw_vbi_out)
+ buf_type = V4L2_BUF_TYPE_SLICED_VBI_OUTPUT;
+
+ q->type = buf_type;
+ q->io_modes = VB2_MMAP | VB2_DMABUF;
+ q->io_modes |= V4L2_TYPE_IS_OUTPUT(buf_type) ? VB2_WRITE : VB2_READ;
+ if (allocators[dev->inst] != 1)
+ q->io_modes |= VB2_USERPTR;
+ q->drv_priv = dev;
+ q->buf_struct_size = sizeof(struct vivid_buffer);
+ q->ops = ops;
+ q->mem_ops = allocators[dev->inst] == 1 ? &vb2_dma_contig_memops :
+ &vb2_vmalloc_memops;
+ q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
+ q->min_buffers_needed = supports_requests[dev->inst] ? 0 : min_buffers_needed;
+ q->lock = &dev->mutex;
+ q->dev = dev->v4l2_dev.dev;
+ q->supports_requests = supports_requests[dev->inst];
+ q->requires_requests = supports_requests[dev->inst] >= 2;
+ q->allow_cache_hints = (cache_hints[dev->inst] == 1);
+
+ return vb2_queue_init(q);
+}
+
+static int vivid_detect_feature_set(struct vivid_dev *dev, int inst,
+ unsigned node_type,
+ bool *has_tuner,
+ bool *has_modulator,
+ int *ccs_cap,
+ int *ccs_out,
+ unsigned in_type_counter[4],
+ unsigned out_type_counter[4])
+{
+ int i;
+
+ /* do we use single- or multi-planar? */
+ dev->multiplanar = multiplanar[inst] > 1;
+ v4l2_info(&dev->v4l2_dev, "using %splanar format API\n",
+ dev->multiplanar ? "multi" : "single ");
+
+ /* how many inputs do we have and of what type? */
+ dev->num_inputs = num_inputs[inst];
+ if (node_type & 0x20007) {
+ if (dev->num_inputs < 1)
+ dev->num_inputs = 1;
+ } else {
+ dev->num_inputs = 0;
+ }
+ if (dev->num_inputs >= MAX_INPUTS)
+ dev->num_inputs = MAX_INPUTS;
+ for (i = 0; i < dev->num_inputs; i++) {
+ dev->input_type[i] = (input_types[inst] >> (i * 2)) & 0x3;
+ dev->input_name_counter[i] = in_type_counter[dev->input_type[i]]++;
+ }
+ dev->has_audio_inputs = in_type_counter[TV] && in_type_counter[SVID];
+ if (in_type_counter[HDMI] == 16) {
+ /* The CEC physical address only allows for max 15 inputs */
+ in_type_counter[HDMI]--;
+ dev->num_inputs--;
+ }
+ dev->num_hdmi_inputs = in_type_counter[HDMI];
+
+ /* how many outputs do we have and of what type? */
+ dev->num_outputs = num_outputs[inst];
+ if (node_type & 0x40300) {
+ if (dev->num_outputs < 1)
+ dev->num_outputs = 1;
+ } else {
+ dev->num_outputs = 0;
+ }
+ if (dev->num_outputs >= MAX_OUTPUTS)
+ dev->num_outputs = MAX_OUTPUTS;
+ for (i = 0; i < dev->num_outputs; i++) {
+ dev->output_type[i] = ((output_types[inst] >> i) & 1) ? HDMI : SVID;
+ dev->output_name_counter[i] = out_type_counter[dev->output_type[i]]++;
+ dev->display_present[i] = true;
+ }
+ dev->has_audio_outputs = out_type_counter[SVID];
+ if (out_type_counter[HDMI] == 16) {
+ /*
+ * The CEC physical address only allows for max 15 inputs,
+ * so outputs are also limited to 15 to allow for easy
+ * CEC output to input mapping.
+ */
+ out_type_counter[HDMI]--;
+ dev->num_outputs--;
+ }
+ dev->num_hdmi_outputs = out_type_counter[HDMI];
+
+ /* do we create a video capture device? */
+ dev->has_vid_cap = node_type & 0x0001;
+
+ /* do we create a vbi capture device? */
+ if (in_type_counter[TV] || in_type_counter[SVID]) {
+ dev->has_raw_vbi_cap = node_type & 0x0004;
+ dev->has_sliced_vbi_cap = node_type & 0x0008;
+ dev->has_vbi_cap = dev->has_raw_vbi_cap | dev->has_sliced_vbi_cap;
+ }
+
+ /* do we create a meta capture device */
+ dev->has_meta_cap = node_type & 0x20000;
+
+ /* sanity checks */
+ if ((in_type_counter[WEBCAM] || in_type_counter[HDMI]) &&
+ !dev->has_vid_cap && !dev->has_meta_cap) {
+ v4l2_warn(&dev->v4l2_dev,
+ "Webcam or HDMI input without video or metadata nodes\n");
+ return -EINVAL;
+ }
+ if ((in_type_counter[TV] || in_type_counter[SVID]) &&
+ !dev->has_vid_cap && !dev->has_vbi_cap && !dev->has_meta_cap) {
+ v4l2_warn(&dev->v4l2_dev,
+ "TV or S-Video input without video, VBI or metadata nodes\n");
+ return -EINVAL;
+ }
+
+ /* do we create a video output device? */
+ dev->has_vid_out = node_type & 0x0100;
+
+ /* do we create a vbi output device? */
+ if (out_type_counter[SVID]) {
+ dev->has_raw_vbi_out = node_type & 0x0400;
+ dev->has_sliced_vbi_out = node_type & 0x0800;
+ dev->has_vbi_out = dev->has_raw_vbi_out | dev->has_sliced_vbi_out;
+ }
+
+ /* do we create a metadata output device */
+ dev->has_meta_out = node_type & 0x40000;
+
+ /* sanity checks */
+ if (out_type_counter[SVID] &&
+ !dev->has_vid_out && !dev->has_vbi_out && !dev->has_meta_out) {
+ v4l2_warn(&dev->v4l2_dev,
+ "S-Video output without video, VBI or metadata nodes\n");
+ return -EINVAL;
+ }
+ if (out_type_counter[HDMI] && !dev->has_vid_out && !dev->has_meta_out) {
+ v4l2_warn(&dev->v4l2_dev,
+ "HDMI output without video or metadata nodes\n");
+ return -EINVAL;
+ }
+
+ /* do we create a radio receiver device? */
+ dev->has_radio_rx = node_type & 0x0010;
+
+ /* do we create a radio transmitter device? */
+ dev->has_radio_tx = node_type & 0x1000;
+
+ /* do we create a software defined radio capture device? */
+ dev->has_sdr_cap = node_type & 0x0020;
+
+ /* do we have a TV tuner? */
+ dev->has_tv_tuner = in_type_counter[TV];
+
+ /* do we have a tuner? */
+ *has_tuner = ((dev->has_vid_cap || dev->has_vbi_cap) && in_type_counter[TV]) ||
+ dev->has_radio_rx || dev->has_sdr_cap;
+
+ /* do we have a modulator? */
+ *has_modulator = dev->has_radio_tx;
+
+ if (dev->has_vid_cap)
+ /* do we have a framebuffer for overlay testing? */
+ dev->has_fb = node_type & 0x10000;
+
+ /* can we do crop/compose/scaling while capturing? */
+ if (no_error_inj && *ccs_cap == -1)
+ *ccs_cap = 7;
+
+ /* if ccs_cap == -1, then the user can select it using controls */
+ if (*ccs_cap != -1) {
+ dev->has_crop_cap = *ccs_cap & 1;
+ dev->has_compose_cap = *ccs_cap & 2;
+ dev->has_scaler_cap = *ccs_cap & 4;
+ v4l2_info(&dev->v4l2_dev, "Capture Crop: %c Compose: %c Scaler: %c\n",
+ dev->has_crop_cap ? 'Y' : 'N',
+ dev->has_compose_cap ? 'Y' : 'N',
+ dev->has_scaler_cap ? 'Y' : 'N');
+ }
+
+ /* can we do crop/compose/scaling with video output? */
+ if (no_error_inj && *ccs_out == -1)
+ *ccs_out = 7;
+
+ /* if ccs_out == -1, then the user can select it using controls */
+ if (*ccs_out != -1) {
+ dev->has_crop_out = *ccs_out & 1;
+ dev->has_compose_out = *ccs_out & 2;
+ dev->has_scaler_out = *ccs_out & 4;
+ v4l2_info(&dev->v4l2_dev, "Output Crop: %c Compose: %c Scaler: %c\n",
+ dev->has_crop_out ? 'Y' : 'N',
+ dev->has_compose_out ? 'Y' : 'N',
+ dev->has_scaler_out ? 'Y' : 'N');
+ }
+
+ /* do we create a touch capture device */
+ dev->has_touch_cap = node_type & 0x80000;
+
+ return 0;
+}
+
+static void vivid_set_capabilities(struct vivid_dev *dev)
+{
+ if (dev->has_vid_cap) {
+ /* set up the capabilities of the video capture device */
+ dev->vid_cap_caps = dev->multiplanar ?
+ V4L2_CAP_VIDEO_CAPTURE_MPLANE :
+ V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OVERLAY;
+ dev->vid_cap_caps |= V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_inputs)
+ dev->vid_cap_caps |= V4L2_CAP_AUDIO;
+ if (dev->has_tv_tuner)
+ dev->vid_cap_caps |= V4L2_CAP_TUNER;
+ }
+ if (dev->has_vid_out) {
+ /* set up the capabilities of the video output device */
+ dev->vid_out_caps = dev->multiplanar ?
+ V4L2_CAP_VIDEO_OUTPUT_MPLANE :
+ V4L2_CAP_VIDEO_OUTPUT;
+ if (dev->has_fb)
+ dev->vid_out_caps |= V4L2_CAP_VIDEO_OUTPUT_OVERLAY;
+ dev->vid_out_caps |= V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_outputs)
+ dev->vid_out_caps |= V4L2_CAP_AUDIO;
+ }
+ if (dev->has_vbi_cap) {
+ /* set up the capabilities of the vbi capture device */
+ dev->vbi_cap_caps = (dev->has_raw_vbi_cap ? V4L2_CAP_VBI_CAPTURE : 0) |
+ (dev->has_sliced_vbi_cap ? V4L2_CAP_SLICED_VBI_CAPTURE : 0);
+ dev->vbi_cap_caps |= V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_inputs)
+ dev->vbi_cap_caps |= V4L2_CAP_AUDIO;
+ if (dev->has_tv_tuner)
+ dev->vbi_cap_caps |= V4L2_CAP_TUNER;
+ }
+ if (dev->has_vbi_out) {
+ /* set up the capabilities of the vbi output device */
+ dev->vbi_out_caps = (dev->has_raw_vbi_out ? V4L2_CAP_VBI_OUTPUT : 0) |
+ (dev->has_sliced_vbi_out ? V4L2_CAP_SLICED_VBI_OUTPUT : 0);
+ dev->vbi_out_caps |= V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_outputs)
+ dev->vbi_out_caps |= V4L2_CAP_AUDIO;
+ }
+ if (dev->has_sdr_cap) {
+ /* set up the capabilities of the sdr capture device */
+ dev->sdr_cap_caps = V4L2_CAP_SDR_CAPTURE | V4L2_CAP_TUNER;
+ dev->sdr_cap_caps |= V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ }
+ /* set up the capabilities of the radio receiver device */
+ if (dev->has_radio_rx)
+ dev->radio_rx_caps = V4L2_CAP_RADIO | V4L2_CAP_RDS_CAPTURE |
+ V4L2_CAP_HW_FREQ_SEEK | V4L2_CAP_TUNER |
+ V4L2_CAP_READWRITE;
+ /* set up the capabilities of the radio transmitter device */
+ if (dev->has_radio_tx)
+ dev->radio_tx_caps = V4L2_CAP_RDS_OUTPUT | V4L2_CAP_MODULATOR |
+ V4L2_CAP_READWRITE;
+
+ /* set up the capabilities of meta capture device */
+ if (dev->has_meta_cap) {
+ dev->meta_cap_caps = V4L2_CAP_META_CAPTURE |
+ V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_inputs)
+ dev->meta_cap_caps |= V4L2_CAP_AUDIO;
+ if (dev->has_tv_tuner)
+ dev->meta_cap_caps |= V4L2_CAP_TUNER;
+ }
+ /* set up the capabilities of meta output device */
+ if (dev->has_meta_out) {
+ dev->meta_out_caps = V4L2_CAP_META_OUTPUT |
+ V4L2_CAP_STREAMING | V4L2_CAP_READWRITE;
+ if (dev->has_audio_outputs)
+ dev->meta_out_caps |= V4L2_CAP_AUDIO;
+ }
+ /* set up the capabilities of the touch capture device */
+ if (dev->has_touch_cap) {
+ dev->touch_cap_caps = V4L2_CAP_TOUCH | V4L2_CAP_STREAMING |
+ V4L2_CAP_READWRITE;
+ dev->touch_cap_caps |= dev->multiplanar ?
+ V4L2_CAP_VIDEO_CAPTURE_MPLANE : V4L2_CAP_VIDEO_CAPTURE;
+ }
+}
+
+static void vivid_disable_unused_ioctls(struct vivid_dev *dev,
+ bool has_tuner,
+ bool has_modulator,
+ unsigned in_type_counter[4],
+ unsigned out_type_counter[4])
+{
+ /* disable invalid ioctls based on the feature set */
+ if (!dev->has_audio_inputs) {
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_AUDIO);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_AUDIO);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_ENUMAUDIO);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_S_AUDIO);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_G_AUDIO);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_ENUMAUDIO);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_S_AUDIO);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_G_AUDIO);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_ENUMAUDIO);
+ }
+ if (!dev->has_audio_outputs) {
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_S_AUDOUT);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_AUDOUT);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_ENUMAUDOUT);
+ v4l2_disable_ioctl(&dev->vbi_out_dev, VIDIOC_S_AUDOUT);
+ v4l2_disable_ioctl(&dev->vbi_out_dev, VIDIOC_G_AUDOUT);
+ v4l2_disable_ioctl(&dev->vbi_out_dev, VIDIOC_ENUMAUDOUT);
+ v4l2_disable_ioctl(&dev->meta_out_dev, VIDIOC_S_AUDOUT);
+ v4l2_disable_ioctl(&dev->meta_out_dev, VIDIOC_G_AUDOUT);
+ v4l2_disable_ioctl(&dev->meta_out_dev, VIDIOC_ENUMAUDOUT);
+ }
+ if (!in_type_counter[TV] && !in_type_counter[SVID]) {
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_STD);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_STD);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_ENUMSTD);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_QUERYSTD);
+ }
+ if (!out_type_counter[SVID]) {
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_S_STD);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_STD);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_ENUMSTD);
+ }
+ if (!has_tuner && !has_modulator) {
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_G_FREQUENCY);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_G_FREQUENCY);
+ }
+ if (!has_tuner) {
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_TUNER);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_TUNER);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_S_TUNER);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_G_TUNER);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_S_TUNER);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_G_TUNER);
+ }
+ if (in_type_counter[HDMI] == 0) {
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_EDID);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_EDID);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_DV_TIMINGS_CAP);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_G_DV_TIMINGS);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_DV_TIMINGS);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_ENUM_DV_TIMINGS);
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_QUERY_DV_TIMINGS);
+ }
+ if (out_type_counter[HDMI] == 0) {
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_EDID);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_DV_TIMINGS_CAP);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_DV_TIMINGS);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_S_DV_TIMINGS);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_ENUM_DV_TIMINGS);
+ }
+ if (!dev->has_fb) {
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_FBUF);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_S_FBUF);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_OVERLAY);
+ }
+ v4l2_disable_ioctl(&dev->vid_cap_dev, VIDIOC_S_HW_FREQ_SEEK);
+ v4l2_disable_ioctl(&dev->vbi_cap_dev, VIDIOC_S_HW_FREQ_SEEK);
+ v4l2_disable_ioctl(&dev->sdr_cap_dev, VIDIOC_S_HW_FREQ_SEEK);
+ v4l2_disable_ioctl(&dev->meta_cap_dev, VIDIOC_S_HW_FREQ_SEEK);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_G_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_ENUM_FRAMESIZES);
+ v4l2_disable_ioctl(&dev->vid_out_dev, VIDIOC_ENUM_FRAMEINTERVALS);
+ v4l2_disable_ioctl(&dev->vbi_out_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->vbi_out_dev, VIDIOC_G_FREQUENCY);
+ v4l2_disable_ioctl(&dev->meta_out_dev, VIDIOC_S_FREQUENCY);
+ v4l2_disable_ioctl(&dev->meta_out_dev, VIDIOC_G_FREQUENCY);
+ v4l2_disable_ioctl(&dev->touch_cap_dev, VIDIOC_S_PARM);
+ v4l2_disable_ioctl(&dev->touch_cap_dev, VIDIOC_ENUM_FRAMESIZES);
+ v4l2_disable_ioctl(&dev->touch_cap_dev, VIDIOC_ENUM_FRAMEINTERVALS);
+}
+
+static int vivid_init_dv_timings(struct vivid_dev *dev)
+{
+ int i;
+
+ while (v4l2_dv_timings_presets[dev->query_dv_timings_size].bt.width)
+ dev->query_dv_timings_size++;
+
+ /*
+ * Create a char pointer array that points to the names of all the
+ * preset timings
+ */
+ dev->query_dv_timings_qmenu = kmalloc_array(dev->query_dv_timings_size,
+ sizeof(char *), GFP_KERNEL);
+ /*
+ * Create a string array containing the names of all the preset
+ * timings. Each name is max 31 chars long (+ terminating 0).
+ */
+ dev->query_dv_timings_qmenu_strings =
+ kmalloc_array(dev->query_dv_timings_size, 32, GFP_KERNEL);
+
+ if (!dev->query_dv_timings_qmenu ||
+ !dev->query_dv_timings_qmenu_strings)
+ return -ENOMEM;
+
+ for (i = 0; i < dev->query_dv_timings_size; i++) {
+ const struct v4l2_bt_timings *bt = &v4l2_dv_timings_presets[i].bt;
+ char *p = dev->query_dv_timings_qmenu_strings + i * 32;
+ u32 htot, vtot;
+
+ dev->query_dv_timings_qmenu[i] = p;
+
+ htot = V4L2_DV_BT_FRAME_WIDTH(bt);
+ vtot = V4L2_DV_BT_FRAME_HEIGHT(bt);
+ snprintf(p, 32, "%ux%u%s%u",
+ bt->width, bt->height, bt->interlaced ? "i" : "p",
+ (u32)bt->pixelclock / (htot * vtot));
+ }
+
+ return 0;
+}
+
+static int vivid_create_queues(struct vivid_dev *dev)
+{
+ int ret;
+
+ /* start creating the vb2 queues */
+ if (dev->has_vid_cap) {
+ /* initialize vid_cap queue */
+ ret = vivid_create_queue(dev, &dev->vb_vid_cap_q,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE, 2,
+ &vivid_vid_cap_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_vid_out) {
+ /* initialize vid_out queue */
+ ret = vivid_create_queue(dev, &dev->vb_vid_out_q,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT, 2,
+ &vivid_vid_out_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_vbi_cap) {
+ /* initialize vbi_cap queue */
+ ret = vivid_create_queue(dev, &dev->vb_vbi_cap_q,
+ V4L2_BUF_TYPE_VBI_CAPTURE, 2,
+ &vivid_vbi_cap_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_vbi_out) {
+ /* initialize vbi_out queue */
+ ret = vivid_create_queue(dev, &dev->vb_vbi_out_q,
+ V4L2_BUF_TYPE_VBI_OUTPUT, 2,
+ &vivid_vbi_out_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_sdr_cap) {
+ /* initialize sdr_cap queue */
+ ret = vivid_create_queue(dev, &dev->vb_sdr_cap_q,
+ V4L2_BUF_TYPE_SDR_CAPTURE, 8,
+ &vivid_sdr_cap_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_meta_cap) {
+ /* initialize meta_cap queue */
+ ret = vivid_create_queue(dev, &dev->vb_meta_cap_q,
+ V4L2_BUF_TYPE_META_CAPTURE, 2,
+ &vivid_meta_cap_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_meta_out) {
+ /* initialize meta_out queue */
+ ret = vivid_create_queue(dev, &dev->vb_meta_out_q,
+ V4L2_BUF_TYPE_META_OUTPUT, 1,
+ &vivid_meta_out_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_touch_cap) {
+ /* initialize touch_cap queue */
+ ret = vivid_create_queue(dev, &dev->vb_touch_cap_q,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE, 1,
+ &vivid_touch_cap_qops);
+ if (ret)
+ return ret;
+ }
+
+ if (dev->has_fb) {
+ /* Create framebuffer for testing capture/output overlay */
+ ret = vivid_fb_init(dev);
+ if (ret)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "Framebuffer device registered as fb%d\n",
+ dev->fb_info.node);
+ }
+ return 0;
+}
+
+static int vivid_create_devnodes(struct platform_device *pdev,
+ struct vivid_dev *dev, int inst,
+ unsigned int cec_tx_bus_cnt,
+ v4l2_std_id tvnorms_cap,
+ v4l2_std_id tvnorms_out,
+ unsigned in_type_counter[4],
+ unsigned out_type_counter[4])
+{
+ struct video_device *vfd;
+ int ret;
+
+ if (dev->has_vid_cap) {
+ vfd = &dev->vid_cap_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-vid-cap", inst);
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->vid_cap_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_vid_cap_q;
+ vfd->tvnorms = tvnorms_cap;
+
+ /*
+ * Provide a mutex to v4l2 core. It will be used to protect
+ * all fops and v4l2 ioctls.
+ */
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->vid_cap_pad.flags = MEDIA_PAD_FL_SINK;
+ ret = media_entity_pads_init(&vfd->entity, 1, &dev->vid_cap_pad);
+ if (ret)
+ return ret;
+#endif
+
+#ifdef CONFIG_VIDEO_VIVID_CEC
+ if (in_type_counter[HDMI]) {
+ ret = cec_register_adapter(dev->cec_rx_adap, &pdev->dev);
+ if (ret < 0) {
+ cec_delete_adapter(dev->cec_rx_adap);
+ dev->cec_rx_adap = NULL;
+ return ret;
+ }
+ cec_s_phys_addr(dev->cec_rx_adap, 0, false);
+ v4l2_info(&dev->v4l2_dev, "CEC adapter %s registered for HDMI input 0\n",
+ dev_name(&dev->cec_rx_adap->devnode.dev));
+ }
+#endif
+
+ ret = video_register_device(vfd, VFL_TYPE_VIDEO, vid_cap_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 capture device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_vid_out) {
+#ifdef CONFIG_VIDEO_VIVID_CEC
+ int i;
+#endif
+ vfd = &dev->vid_out_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-vid-out", inst);
+ vfd->vfl_dir = VFL_DIR_TX;
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->vid_out_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_vid_out_q;
+ vfd->tvnorms = tvnorms_out;
+
+ /*
+ * Provide a mutex to v4l2 core. It will be used to protect
+ * all fops and v4l2 ioctls.
+ */
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->vid_out_pad.flags = MEDIA_PAD_FL_SOURCE;
+ ret = media_entity_pads_init(&vfd->entity, 1, &dev->vid_out_pad);
+ if (ret)
+ return ret;
+#endif
+
+#ifdef CONFIG_VIDEO_VIVID_CEC
+ for (i = 0; i < cec_tx_bus_cnt; i++) {
+ ret = cec_register_adapter(dev->cec_tx_adap[i], &pdev->dev);
+ if (ret < 0) {
+ for (; i < cec_tx_bus_cnt; i++) {
+ cec_delete_adapter(dev->cec_tx_adap[i]);
+ dev->cec_tx_adap[i] = NULL;
+ }
+ return ret;
+ }
+ v4l2_info(&dev->v4l2_dev, "CEC adapter %s registered for HDMI output %d\n",
+ dev_name(&dev->cec_tx_adap[i]->devnode.dev), i);
+ if (i < out_type_counter[HDMI])
+ cec_s_phys_addr(dev->cec_tx_adap[i], (i + 1) << 12, false);
+ else
+ cec_s_phys_addr(dev->cec_tx_adap[i], 0x1000, false);
+ }
+#endif
+
+ ret = video_register_device(vfd, VFL_TYPE_VIDEO, vid_out_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 output device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_vbi_cap) {
+ vfd = &dev->vbi_cap_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-vbi-cap", inst);
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->vbi_cap_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_vbi_cap_q;
+ vfd->lock = &dev->mutex;
+ vfd->tvnorms = tvnorms_cap;
+ video_set_drvdata(vfd, dev);
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->vbi_cap_pad.flags = MEDIA_PAD_FL_SINK;
+ ret = media_entity_pads_init(&vfd->entity, 1, &dev->vbi_cap_pad);
+ if (ret)
+ return ret;
+#endif
+
+ ret = video_register_device(vfd, VFL_TYPE_VBI, vbi_cap_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 capture device registered as %s, supports %s VBI\n",
+ video_device_node_name(vfd),
+ (dev->has_raw_vbi_cap && dev->has_sliced_vbi_cap) ?
+ "raw and sliced" :
+ (dev->has_raw_vbi_cap ? "raw" : "sliced"));
+ }
+
+ if (dev->has_vbi_out) {
+ vfd = &dev->vbi_out_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-vbi-out", inst);
+ vfd->vfl_dir = VFL_DIR_TX;
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->vbi_out_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_vbi_out_q;
+ vfd->lock = &dev->mutex;
+ vfd->tvnorms = tvnorms_out;
+ video_set_drvdata(vfd, dev);
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->vbi_out_pad.flags = MEDIA_PAD_FL_SOURCE;
+ ret = media_entity_pads_init(&vfd->entity, 1, &dev->vbi_out_pad);
+ if (ret)
+ return ret;
+#endif
+
+ ret = video_register_device(vfd, VFL_TYPE_VBI, vbi_out_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 output device registered as %s, supports %s VBI\n",
+ video_device_node_name(vfd),
+ (dev->has_raw_vbi_out && dev->has_sliced_vbi_out) ?
+ "raw and sliced" :
+ (dev->has_raw_vbi_out ? "raw" : "sliced"));
+ }
+
+ if (dev->has_sdr_cap) {
+ vfd = &dev->sdr_cap_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-sdr-cap", inst);
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->sdr_cap_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_sdr_cap_q;
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->sdr_cap_pad.flags = MEDIA_PAD_FL_SINK;
+ ret = media_entity_pads_init(&vfd->entity, 1, &dev->sdr_cap_pad);
+ if (ret)
+ return ret;
+#endif
+
+ ret = video_register_device(vfd, VFL_TYPE_SDR, sdr_cap_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 capture device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_radio_rx) {
+ vfd = &dev->radio_rx_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-rad-rx", inst);
+ vfd->fops = &vivid_radio_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->radio_rx_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+
+ ret = video_register_device(vfd, VFL_TYPE_RADIO, radio_rx_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 receiver device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_radio_tx) {
+ vfd = &dev->radio_tx_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-rad-tx", inst);
+ vfd->vfl_dir = VFL_DIR_TX;
+ vfd->fops = &vivid_radio_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->radio_tx_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+
+ ret = video_register_device(vfd, VFL_TYPE_RADIO, radio_tx_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev, "V4L2 transmitter device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_meta_cap) {
+ vfd = &dev->meta_cap_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-meta-cap", inst);
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->meta_cap_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_meta_cap_q;
+ vfd->lock = &dev->mutex;
+ vfd->tvnorms = tvnorms_cap;
+ video_set_drvdata(vfd, dev);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->meta_cap_pad.flags = MEDIA_PAD_FL_SINK;
+ ret = media_entity_pads_init(&vfd->entity, 1,
+ &dev->meta_cap_pad);
+ if (ret)
+ return ret;
+#endif
+ ret = video_register_device(vfd, VFL_TYPE_VIDEO,
+ meta_cap_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev,
+ "V4L2 metadata capture device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_meta_out) {
+ vfd = &dev->meta_out_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-meta-out", inst);
+ vfd->vfl_dir = VFL_DIR_TX;
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->meta_out_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_meta_out_q;
+ vfd->lock = &dev->mutex;
+ vfd->tvnorms = tvnorms_out;
+ video_set_drvdata(vfd, dev);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->meta_out_pad.flags = MEDIA_PAD_FL_SOURCE;
+ ret = media_entity_pads_init(&vfd->entity, 1,
+ &dev->meta_out_pad);
+ if (ret)
+ return ret;
+#endif
+ ret = video_register_device(vfd, VFL_TYPE_VIDEO,
+ meta_out_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev,
+ "V4L2 metadata output device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+ if (dev->has_touch_cap) {
+ vfd = &dev->touch_cap_dev;
+ snprintf(vfd->name, sizeof(vfd->name),
+ "vivid-%03d-touch-cap", inst);
+ vfd->fops = &vivid_fops;
+ vfd->ioctl_ops = &vivid_ioctl_ops;
+ vfd->device_caps = dev->touch_cap_caps;
+ vfd->release = video_device_release_empty;
+ vfd->v4l2_dev = &dev->v4l2_dev;
+ vfd->queue = &dev->vb_touch_cap_q;
+ vfd->tvnorms = tvnorms_cap;
+ vfd->lock = &dev->mutex;
+ video_set_drvdata(vfd, dev);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->touch_cap_pad.flags = MEDIA_PAD_FL_SINK;
+ ret = media_entity_pads_init(&vfd->entity, 1,
+ &dev->touch_cap_pad);
+ if (ret)
+ return ret;
+#endif
+ ret = video_register_device(vfd, VFL_TYPE_TOUCH,
+ touch_cap_nr[inst]);
+ if (ret < 0)
+ return ret;
+ v4l2_info(&dev->v4l2_dev,
+ "V4L2 touch capture device registered as %s\n",
+ video_device_node_name(vfd));
+ }
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ /* Register the media device */
+ ret = media_device_register(&dev->mdev);
+ if (ret) {
+ dev_err(dev->mdev.dev,
+ "media device register failed (err=%d)\n", ret);
+ return ret;
+ }
+#endif
+ return 0;
+}
+
+static int vivid_create_instance(struct platform_device *pdev, int inst)
+{
+ static const struct v4l2_dv_timings def_dv_timings =
+ V4L2_DV_BT_CEA_1280X720P60;
+ unsigned in_type_counter[4] = { 0, 0, 0, 0 };
+ unsigned out_type_counter[4] = { 0, 0, 0, 0 };
+ int ccs_cap = ccs_cap_mode[inst];
+ int ccs_out = ccs_out_mode[inst];
+ bool has_tuner;
+ bool has_modulator;
+ struct vivid_dev *dev;
+ unsigned node_type = node_types[inst];
+ v4l2_std_id tvnorms_cap = 0, tvnorms_out = 0;
+ unsigned int cec_tx_bus_cnt = 0;
+ int ret;
+ int i;
+
+ /* allocate main vivid state structure */
+ dev = kzalloc(sizeof(*dev), GFP_KERNEL);
+ if (!dev)
+ return -ENOMEM;
+
+ dev->inst = inst;
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+ dev->v4l2_dev.mdev = &dev->mdev;
+
+ /* Initialize media device */
+ strscpy(dev->mdev.model, VIVID_MODULE_NAME, sizeof(dev->mdev.model));
+ snprintf(dev->mdev.bus_info, sizeof(dev->mdev.bus_info),
+ "platform:%s-%03d", VIVID_MODULE_NAME, inst);
+ dev->mdev.dev = &pdev->dev;
+ media_device_init(&dev->mdev);
+ dev->mdev.ops = &vivid_media_ops;
+#endif
+
+ /* register v4l2_device */
+ snprintf(dev->v4l2_dev.name, sizeof(dev->v4l2_dev.name),
+ "%s-%03d", VIVID_MODULE_NAME, inst);
+ ret = v4l2_device_register(&pdev->dev, &dev->v4l2_dev);
+ if (ret) {
+ kfree(dev);
+ return ret;
+ }
+ dev->v4l2_dev.release = vivid_dev_release;
+
+ ret = vivid_detect_feature_set(dev, inst, node_type,
+ &has_tuner, &has_modulator,
+ &ccs_cap, &ccs_out,
+ in_type_counter, out_type_counter);
+ if (ret)
+ goto free_dev;
+
+ vivid_set_capabilities(dev);
+
+ ret = -ENOMEM;
+ /* initialize the test pattern generator */
+ tpg_init(&dev->tpg, 640, 360);
+ if (tpg_alloc(&dev->tpg, array_size(MAX_WIDTH, MAX_ZOOM)))
+ goto free_dev;
+ dev->scaled_line = vzalloc(array_size(MAX_WIDTH, MAX_ZOOM));
+ if (!dev->scaled_line)
+ goto free_dev;
+ dev->blended_line = vzalloc(array_size(MAX_WIDTH, MAX_ZOOM));
+ if (!dev->blended_line)
+ goto free_dev;
+
+ /* load the edid */
+ dev->edid = vmalloc(array_size(256, 128));
+ if (!dev->edid)
+ goto free_dev;
+
+ ret = vivid_init_dv_timings(dev);
+ if (ret < 0)
+ goto free_dev;
+
+ vivid_disable_unused_ioctls(dev, has_tuner, has_modulator,
+ in_type_counter, out_type_counter);
+
+ /* configure internal data */
+ dev->fmt_cap = &vivid_formats[0];
+ dev->fmt_out = &vivid_formats[0];
+ if (!dev->multiplanar)
+ vivid_formats[0].data_offset[0] = 0;
+ dev->webcam_size_idx = 1;
+ dev->webcam_ival_idx = 3;
+ tpg_s_fourcc(&dev->tpg, dev->fmt_cap->fourcc);
+ dev->std_out = V4L2_STD_PAL;
+ if (dev->input_type[0] == TV || dev->input_type[0] == SVID)
+ tvnorms_cap = V4L2_STD_ALL;
+ if (dev->output_type[0] == SVID)
+ tvnorms_out = V4L2_STD_ALL;
+ for (i = 0; i < MAX_INPUTS; i++) {
+ dev->dv_timings_cap[i] = def_dv_timings;
+ dev->std_cap[i] = V4L2_STD_PAL;
+ }
+ dev->dv_timings_out = def_dv_timings;
+ dev->tv_freq = 2804 /* 175.25 * 16 */;
+ dev->tv_audmode = V4L2_TUNER_MODE_STEREO;
+ dev->tv_field_cap = V4L2_FIELD_INTERLACED;
+ dev->tv_field_out = V4L2_FIELD_INTERLACED;
+ dev->radio_rx_freq = 95000 * 16;
+ dev->radio_rx_audmode = V4L2_TUNER_MODE_STEREO;
+ if (dev->has_radio_tx) {
+ dev->radio_tx_freq = 95500 * 16;
+ dev->radio_rds_loop = false;
+ }
+ dev->radio_tx_subchans = V4L2_TUNER_SUB_STEREO | V4L2_TUNER_SUB_RDS;
+ dev->sdr_adc_freq = 300000;
+ dev->sdr_fm_freq = 50000000;
+ dev->sdr_pixelformat = V4L2_SDR_FMT_CU8;
+ dev->sdr_buffersize = SDR_CAP_SAMPLES_PER_BUF * 2;
+
+ dev->edid_max_blocks = dev->edid_blocks = 2;
+ memcpy(dev->edid, vivid_hdmi_edid, sizeof(vivid_hdmi_edid));
+ dev->radio_rds_init_time = ktime_get();
+
+ /* create all controls */
+ ret = vivid_create_controls(dev, ccs_cap == -1, ccs_out == -1, no_error_inj,
+ in_type_counter[TV] || in_type_counter[SVID] ||
+ out_type_counter[SVID],
+ in_type_counter[HDMI] || out_type_counter[HDMI]);
+ if (ret)
+ goto unreg_dev;
+
+ /* enable/disable interface specific controls */
+ if (dev->num_outputs && dev->output_type[0] != HDMI)
+ v4l2_ctrl_activate(dev->ctrl_display_present, false);
+ if (dev->num_inputs && dev->input_type[0] != HDMI) {
+ v4l2_ctrl_activate(dev->ctrl_dv_timings_signal_mode, false);
+ v4l2_ctrl_activate(dev->ctrl_dv_timings, false);
+ } else if (dev->num_inputs && dev->input_type[0] == HDMI) {
+ v4l2_ctrl_activate(dev->ctrl_std_signal_mode, false);
+ v4l2_ctrl_activate(dev->ctrl_standard, false);
+ }
+
+ /*
+ * update the capture and output formats to do a proper initial
+ * configuration.
+ */
+ vivid_update_format_cap(dev, false);
+ vivid_update_format_out(dev);
+
+ /* initialize overlay */
+ dev->fb_cap.fmt.width = dev->src_rect.width;
+ dev->fb_cap.fmt.height = dev->src_rect.height;
+ dev->fb_cap.fmt.pixelformat = dev->fmt_cap->fourcc;
+ dev->fb_cap.fmt.bytesperline = dev->src_rect.width * tpg_g_twopixelsize(&dev->tpg, 0) / 2;
+ dev->fb_cap.fmt.sizeimage = dev->src_rect.height * dev->fb_cap.fmt.bytesperline;
+
+ /* update touch configuration */
+ dev->timeperframe_tch_cap.numerator = 1;
+ dev->timeperframe_tch_cap.denominator = 10;
+ vivid_set_touch(dev, 0);
+
+ /* initialize locks */
+ spin_lock_init(&dev->slock);
+ mutex_init(&dev->mutex);
+
+ /* init dma queues */
+ INIT_LIST_HEAD(&dev->vid_cap_active);
+ INIT_LIST_HEAD(&dev->vid_out_active);
+ INIT_LIST_HEAD(&dev->vbi_cap_active);
+ INIT_LIST_HEAD(&dev->vbi_out_active);
+ INIT_LIST_HEAD(&dev->sdr_cap_active);
+ INIT_LIST_HEAD(&dev->meta_cap_active);
+ INIT_LIST_HEAD(&dev->meta_out_active);
+ INIT_LIST_HEAD(&dev->touch_cap_active);
+
+ spin_lock_init(&dev->cec_xfers_slock);
+
+ if (allocators[inst] == 1)
+ dma_coerce_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(32));
+
+ ret = vivid_create_queues(dev);
+ if (ret)
+ goto unreg_dev;
+
+#ifdef CONFIG_VIDEO_VIVID_CEC
+ if (dev->has_vid_cap && in_type_counter[HDMI]) {
+ struct cec_adapter *adap;
+
+ adap = vivid_cec_alloc_adap(dev, 0, false);
+ ret = PTR_ERR_OR_ZERO(adap);
+ if (ret < 0)
+ goto unreg_dev;
+ dev->cec_rx_adap = adap;
+ }
+
+ if (dev->has_vid_out) {
+ for (i = 0; i < dev->num_outputs; i++) {
+ struct cec_adapter *adap;
+
+ if (dev->output_type[i] != HDMI)
+ continue;
+
+ dev->cec_output2bus_map[i] = cec_tx_bus_cnt;
+ adap = vivid_cec_alloc_adap(dev, cec_tx_bus_cnt, true);
+ ret = PTR_ERR_OR_ZERO(adap);
+ if (ret < 0) {
+ for (i = 0; i < dev->num_outputs; i++)
+ cec_delete_adapter(dev->cec_tx_adap[i]);
+ goto unreg_dev;
+ }
+
+ dev->cec_tx_adap[cec_tx_bus_cnt] = adap;
+ cec_tx_bus_cnt++;
+ }
+ }
+
+ if (dev->cec_rx_adap || cec_tx_bus_cnt) {
+ init_waitqueue_head(&dev->kthread_waitq_cec);
+ dev->kthread_cec = kthread_run(vivid_cec_bus_thread, dev,
+ "vivid_cec-%s", dev->v4l2_dev.name);
+ if (IS_ERR(dev->kthread_cec)) {
+ ret = PTR_ERR(dev->kthread_cec);
+ dev->kthread_cec = NULL;
+ v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n");
+ goto unreg_dev;
+ }
+ }
+
+#endif
+
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_vid_cap);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_vid_out);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_vbi_cap);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_vbi_out);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_radio_rx);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_radio_tx);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_sdr_cap);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_meta_cap);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_meta_out);
+ v4l2_ctrl_handler_setup(&dev->ctrl_hdl_touch_cap);
+
+ /* finally start creating the device nodes */
+ ret = vivid_create_devnodes(pdev, dev, inst, cec_tx_bus_cnt,
+ tvnorms_cap, tvnorms_out,
+ in_type_counter, out_type_counter);
+ if (ret)
+ goto unreg_dev;
+
+ /* Now that everything is fine, let's add it to device list */
+ vivid_devs[inst] = dev;
+
+ return 0;
+
+unreg_dev:
+ vb2_video_unregister_device(&dev->touch_cap_dev);
+ vb2_video_unregister_device(&dev->meta_out_dev);
+ vb2_video_unregister_device(&dev->meta_cap_dev);
+ video_unregister_device(&dev->radio_tx_dev);
+ video_unregister_device(&dev->radio_rx_dev);
+ vb2_video_unregister_device(&dev->sdr_cap_dev);
+ vb2_video_unregister_device(&dev->vbi_out_dev);
+ vb2_video_unregister_device(&dev->vbi_cap_dev);
+ vb2_video_unregister_device(&dev->vid_out_dev);
+ vb2_video_unregister_device(&dev->vid_cap_dev);
+ cec_unregister_adapter(dev->cec_rx_adap);
+ for (i = 0; i < MAX_OUTPUTS; i++)
+ cec_unregister_adapter(dev->cec_tx_adap[i]);
+ if (dev->kthread_cec)
+ kthread_stop(dev->kthread_cec);
+free_dev:
+ v4l2_device_put(&dev->v4l2_dev);
+ return ret;
+}
+
+/* This routine allocates from 1 to n_devs virtual drivers.
+
+ The real maximum number of virtual drivers will depend on how many drivers
+ will succeed. This is limited to the maximum number of devices that
+ videodev supports, which is equal to VIDEO_NUM_DEVICES.
+ */
+static int vivid_probe(struct platform_device *pdev)
+{
+ const struct font_desc *font = find_font("VGA8x16");
+ int ret = 0, i;
+
+ if (font == NULL) {
+ pr_err("vivid: could not find font\n");
+ return -ENODEV;
+ }
+
+ tpg_set_font(font->data);
+
+ n_devs = clamp_t(unsigned, n_devs, 1, VIVID_MAX_DEVS);
+
+ for (i = 0; i < n_devs; i++) {
+ ret = vivid_create_instance(pdev, i);
+ if (ret) {
+ /* If some instantiations succeeded, keep driver */
+ if (i)
+ ret = 0;
+ break;
+ }
+ }
+
+ if (ret < 0) {
+ pr_err("vivid: error %d while loading driver\n", ret);
+ return ret;
+ }
+
+ /* n_devs will reflect the actual number of allocated devices */
+ n_devs = i;
+
+ return ret;
+}
+
+static int vivid_remove(struct platform_device *pdev)
+{
+ struct vivid_dev *dev;
+ unsigned int i, j;
+
+ for (i = 0; i < n_devs; i++) {
+ dev = vivid_devs[i];
+ if (!dev)
+ continue;
+
+ if (dev->disconnect_error)
+ vivid_reconnect(dev);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ media_device_unregister(&dev->mdev);
+#endif
+
+ if (dev->has_vid_cap) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->vid_cap_dev));
+ vb2_video_unregister_device(&dev->vid_cap_dev);
+ }
+ if (dev->has_vid_out) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->vid_out_dev));
+ vb2_video_unregister_device(&dev->vid_out_dev);
+ }
+ if (dev->has_vbi_cap) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->vbi_cap_dev));
+ vb2_video_unregister_device(&dev->vbi_cap_dev);
+ }
+ if (dev->has_vbi_out) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->vbi_out_dev));
+ vb2_video_unregister_device(&dev->vbi_out_dev);
+ }
+ if (dev->has_sdr_cap) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->sdr_cap_dev));
+ vb2_video_unregister_device(&dev->sdr_cap_dev);
+ }
+ if (dev->has_radio_rx) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->radio_rx_dev));
+ video_unregister_device(&dev->radio_rx_dev);
+ }
+ if (dev->has_radio_tx) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->radio_tx_dev));
+ video_unregister_device(&dev->radio_tx_dev);
+ }
+ if (dev->has_fb) {
+ v4l2_info(&dev->v4l2_dev, "unregistering fb%d\n",
+ dev->fb_info.node);
+ unregister_framebuffer(&dev->fb_info);
+ vivid_fb_release_buffers(dev);
+ }
+ if (dev->has_meta_cap) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->meta_cap_dev));
+ vb2_video_unregister_device(&dev->meta_cap_dev);
+ }
+ if (dev->has_meta_out) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->meta_out_dev));
+ vb2_video_unregister_device(&dev->meta_out_dev);
+ }
+ if (dev->has_touch_cap) {
+ v4l2_info(&dev->v4l2_dev, "unregistering %s\n",
+ video_device_node_name(&dev->touch_cap_dev));
+ vb2_video_unregister_device(&dev->touch_cap_dev);
+ }
+ cec_unregister_adapter(dev->cec_rx_adap);
+ for (j = 0; j < MAX_OUTPUTS; j++)
+ cec_unregister_adapter(dev->cec_tx_adap[j]);
+ if (dev->kthread_cec)
+ kthread_stop(dev->kthread_cec);
+ v4l2_device_put(&dev->v4l2_dev);
+ vivid_devs[i] = NULL;
+ }
+ return 0;
+}
+
+static void vivid_pdev_release(struct device *dev)
+{
+}
+
+static struct platform_device vivid_pdev = {
+ .name = "vivid",
+ .dev.release = vivid_pdev_release,
+};
+
+static struct platform_driver vivid_pdrv = {
+ .probe = vivid_probe,
+ .remove = vivid_remove,
+ .driver = {
+ .name = "vivid",
+ },
+};
+
+static int __init vivid_init(void)
+{
+ int ret;
+
+ ret = platform_device_register(&vivid_pdev);
+ if (ret)
+ return ret;
+
+ ret = platform_driver_register(&vivid_pdrv);
+ if (ret)
+ platform_device_unregister(&vivid_pdev);
+
+ return ret;
+}
+
+static void __exit vivid_exit(void)
+{
+ platform_driver_unregister(&vivid_pdrv);
+ platform_device_unregister(&vivid_pdev);
+}
+
+module_init(vivid_init);
+module_exit(vivid_exit);
diff --git a/drivers/media/test-drivers/vivid/vivid-core.h b/drivers/media/test-drivers/vivid/vivid-core.h
new file mode 100644
index 000000000..473f3598d
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-core.h
@@ -0,0 +1,618 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-core.h - core datastructures
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_CORE_H_
+#define _VIVID_CORE_H_
+
+#include <linux/fb.h>
+#include <linux/workqueue.h>
+#include <media/cec.h>
+#include <media/videobuf2-v4l2.h>
+#include <media/v4l2-device.h>
+#include <media/v4l2-dev.h>
+#include <media/v4l2-ctrls.h>
+#include <media/tpg/v4l2-tpg.h>
+#include "vivid-rds-gen.h"
+#include "vivid-vbi-gen.h"
+
+#define dprintk(dev, level, fmt, arg...) \
+ v4l2_dbg(level, vivid_debug, &dev->v4l2_dev, fmt, ## arg)
+
+/* The maximum number of clip rectangles */
+#define MAX_CLIPS 16
+/* The maximum number of inputs */
+#define MAX_INPUTS 16
+/* The maximum number of outputs */
+#define MAX_OUTPUTS 16
+/* The maximum up or down scaling factor is 4 */
+#define MAX_ZOOM 4
+/* The maximum image width/height are set to 4K DMT */
+#define MAX_WIDTH 4096
+#define MAX_HEIGHT 2160
+/* The minimum image width/height */
+#define MIN_WIDTH 16
+#define MIN_HEIGHT MIN_WIDTH
+/* Pixel Array control divider */
+#define PIXEL_ARRAY_DIV MIN_WIDTH
+/* The data_offset of plane 0 for the multiplanar formats */
+#define PLANE0_DATA_OFFSET 128
+
+/* The supported TV frequency range in MHz */
+#define MIN_TV_FREQ (44U * 16U)
+#define MAX_TV_FREQ (958U * 16U)
+
+/* The number of samples returned in every SDR buffer */
+#define SDR_CAP_SAMPLES_PER_BUF 0x4000
+
+/* used by the threads to know when to resync internal counters */
+#define JIFFIES_PER_DAY (3600U * 24U * HZ)
+#define JIFFIES_RESYNC (JIFFIES_PER_DAY * (0xf0000000U / JIFFIES_PER_DAY))
+
+extern const struct v4l2_rect vivid_min_rect;
+extern const struct v4l2_rect vivid_max_rect;
+extern unsigned vivid_debug;
+
+struct vivid_fmt {
+ u32 fourcc; /* v4l2 format id */
+ enum tgp_color_enc color_enc;
+ bool can_do_overlay;
+ u8 vdownsampling[TPG_MAX_PLANES];
+ u32 alpha_mask;
+ u8 planes;
+ u8 buffers;
+ u32 data_offset[TPG_MAX_PLANES];
+ u32 bit_depth[TPG_MAX_PLANES];
+};
+
+extern struct vivid_fmt vivid_formats[];
+
+/* buffer for one video frame */
+struct vivid_buffer {
+ /* common v4l buffer stuff -- must be first */
+ struct vb2_v4l2_buffer vb;
+ struct list_head list;
+};
+
+enum vivid_input {
+ WEBCAM,
+ TV,
+ SVID,
+ HDMI,
+};
+
+enum vivid_signal_mode {
+ CURRENT_DV_TIMINGS,
+ CURRENT_STD = CURRENT_DV_TIMINGS,
+ NO_SIGNAL,
+ NO_LOCK,
+ OUT_OF_RANGE,
+ SELECTED_DV_TIMINGS,
+ SELECTED_STD = SELECTED_DV_TIMINGS,
+ CYCLE_DV_TIMINGS,
+ CYCLE_STD = CYCLE_DV_TIMINGS,
+ CUSTOM_DV_TIMINGS,
+};
+
+enum vivid_colorspace {
+ VIVID_CS_170M,
+ VIVID_CS_709,
+ VIVID_CS_SRGB,
+ VIVID_CS_OPRGB,
+ VIVID_CS_2020,
+ VIVID_CS_DCI_P3,
+ VIVID_CS_240M,
+ VIVID_CS_SYS_M,
+ VIVID_CS_SYS_BG,
+};
+
+#define VIVID_INVALID_SIGNAL(mode) \
+ ((mode) == NO_SIGNAL || (mode) == NO_LOCK || (mode) == OUT_OF_RANGE)
+
+struct vivid_cec_xfer {
+ struct cec_adapter *adap;
+ u8 msg[CEC_MAX_MSG_SIZE];
+ u32 len;
+ u32 sft;
+};
+
+struct vivid_dev {
+ unsigned inst;
+ struct v4l2_device v4l2_dev;
+#ifdef CONFIG_MEDIA_CONTROLLER
+ struct media_device mdev;
+ struct media_pad vid_cap_pad;
+ struct media_pad vid_out_pad;
+ struct media_pad vbi_cap_pad;
+ struct media_pad vbi_out_pad;
+ struct media_pad sdr_cap_pad;
+ struct media_pad meta_cap_pad;
+ struct media_pad meta_out_pad;
+ struct media_pad touch_cap_pad;
+#endif
+ struct v4l2_ctrl_handler ctrl_hdl_user_gen;
+ struct v4l2_ctrl_handler ctrl_hdl_user_vid;
+ struct v4l2_ctrl_handler ctrl_hdl_user_aud;
+ struct v4l2_ctrl_handler ctrl_hdl_streaming;
+ struct v4l2_ctrl_handler ctrl_hdl_sdtv_cap;
+ struct v4l2_ctrl_handler ctrl_hdl_loop_cap;
+ struct v4l2_ctrl_handler ctrl_hdl_fb;
+ struct video_device vid_cap_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_vid_cap;
+ struct video_device vid_out_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_vid_out;
+ struct video_device vbi_cap_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_vbi_cap;
+ struct video_device vbi_out_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_vbi_out;
+ struct video_device radio_rx_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_radio_rx;
+ struct video_device radio_tx_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_radio_tx;
+ struct video_device sdr_cap_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_sdr_cap;
+ struct video_device meta_cap_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_meta_cap;
+ struct video_device meta_out_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_meta_out;
+ struct video_device touch_cap_dev;
+ struct v4l2_ctrl_handler ctrl_hdl_touch_cap;
+
+ spinlock_t slock;
+ struct mutex mutex;
+
+ /* capabilities */
+ u32 vid_cap_caps;
+ u32 vid_out_caps;
+ u32 vbi_cap_caps;
+ u32 vbi_out_caps;
+ u32 sdr_cap_caps;
+ u32 radio_rx_caps;
+ u32 radio_tx_caps;
+ u32 meta_cap_caps;
+ u32 meta_out_caps;
+ u32 touch_cap_caps;
+
+ /* supported features */
+ bool multiplanar;
+ unsigned num_inputs;
+ unsigned int num_hdmi_inputs;
+ u8 input_type[MAX_INPUTS];
+ u8 input_name_counter[MAX_INPUTS];
+ unsigned num_outputs;
+ unsigned int num_hdmi_outputs;
+ u8 output_type[MAX_OUTPUTS];
+ u8 output_name_counter[MAX_OUTPUTS];
+ bool has_audio_inputs;
+ bool has_audio_outputs;
+ bool has_vid_cap;
+ bool has_vid_out;
+ bool has_vbi_cap;
+ bool has_raw_vbi_cap;
+ bool has_sliced_vbi_cap;
+ bool has_vbi_out;
+ bool has_raw_vbi_out;
+ bool has_sliced_vbi_out;
+ bool has_radio_rx;
+ bool has_radio_tx;
+ bool has_sdr_cap;
+ bool has_fb;
+ bool has_meta_cap;
+ bool has_meta_out;
+ bool has_tv_tuner;
+ bool has_touch_cap;
+
+ bool can_loop_video;
+
+ /* controls */
+ struct v4l2_ctrl *brightness;
+ struct v4l2_ctrl *contrast;
+ struct v4l2_ctrl *saturation;
+ struct v4l2_ctrl *hue;
+ struct {
+ /* autogain/gain cluster */
+ struct v4l2_ctrl *autogain;
+ struct v4l2_ctrl *gain;
+ };
+ struct v4l2_ctrl *volume;
+ struct v4l2_ctrl *mute;
+ struct v4l2_ctrl *alpha;
+ struct v4l2_ctrl *button;
+ struct v4l2_ctrl *boolean;
+ struct v4l2_ctrl *int32;
+ struct v4l2_ctrl *int64;
+ struct v4l2_ctrl *menu;
+ struct v4l2_ctrl *string;
+ struct v4l2_ctrl *bitmask;
+ struct v4l2_ctrl *int_menu;
+ struct v4l2_ctrl *ro_int32;
+ struct v4l2_ctrl *pixel_array;
+ struct v4l2_ctrl *test_pattern;
+ struct v4l2_ctrl *colorspace;
+ struct v4l2_ctrl *rgb_range_cap;
+ struct v4l2_ctrl *real_rgb_range_cap;
+ struct {
+ /* std_signal_mode/standard cluster */
+ struct v4l2_ctrl *ctrl_std_signal_mode;
+ struct v4l2_ctrl *ctrl_standard;
+ };
+ struct {
+ /* dv_timings_signal_mode/timings cluster */
+ struct v4l2_ctrl *ctrl_dv_timings_signal_mode;
+ struct v4l2_ctrl *ctrl_dv_timings;
+ };
+ struct v4l2_ctrl *ctrl_display_present;
+ struct v4l2_ctrl *ctrl_has_crop_cap;
+ struct v4l2_ctrl *ctrl_has_compose_cap;
+ struct v4l2_ctrl *ctrl_has_scaler_cap;
+ struct v4l2_ctrl *ctrl_has_crop_out;
+ struct v4l2_ctrl *ctrl_has_compose_out;
+ struct v4l2_ctrl *ctrl_has_scaler_out;
+ struct v4l2_ctrl *ctrl_tx_mode;
+ struct v4l2_ctrl *ctrl_tx_rgb_range;
+ struct v4l2_ctrl *ctrl_tx_edid_present;
+ struct v4l2_ctrl *ctrl_tx_hotplug;
+ struct v4l2_ctrl *ctrl_tx_rxsense;
+
+ struct v4l2_ctrl *ctrl_rx_power_present;
+
+ struct v4l2_ctrl *radio_tx_rds_pi;
+ struct v4l2_ctrl *radio_tx_rds_pty;
+ struct v4l2_ctrl *radio_tx_rds_mono_stereo;
+ struct v4l2_ctrl *radio_tx_rds_art_head;
+ struct v4l2_ctrl *radio_tx_rds_compressed;
+ struct v4l2_ctrl *radio_tx_rds_dyn_pty;
+ struct v4l2_ctrl *radio_tx_rds_ta;
+ struct v4l2_ctrl *radio_tx_rds_tp;
+ struct v4l2_ctrl *radio_tx_rds_ms;
+ struct v4l2_ctrl *radio_tx_rds_psname;
+ struct v4l2_ctrl *radio_tx_rds_radiotext;
+
+ struct v4l2_ctrl *radio_rx_rds_pty;
+ struct v4l2_ctrl *radio_rx_rds_ta;
+ struct v4l2_ctrl *radio_rx_rds_tp;
+ struct v4l2_ctrl *radio_rx_rds_ms;
+ struct v4l2_ctrl *radio_rx_rds_psname;
+ struct v4l2_ctrl *radio_rx_rds_radiotext;
+
+ unsigned input_brightness[MAX_INPUTS];
+ unsigned osd_mode;
+ unsigned button_pressed;
+ bool sensor_hflip;
+ bool sensor_vflip;
+ bool hflip;
+ bool vflip;
+ bool vbi_cap_interlaced;
+ bool loop_video;
+ bool reduced_fps;
+
+ /* Framebuffer */
+ unsigned long video_pbase;
+ void *video_vbase;
+ u32 video_buffer_size;
+ int display_width;
+ int display_height;
+ int display_byte_stride;
+ int bits_per_pixel;
+ int bytes_per_pixel;
+ struct fb_info fb_info;
+ struct fb_var_screeninfo fb_defined;
+ struct fb_fix_screeninfo fb_fix;
+
+ /* Error injection */
+ bool disconnect_error;
+ bool queue_setup_error;
+ bool buf_prepare_error;
+ bool start_streaming_error;
+ bool dqbuf_error;
+ bool req_validate_error;
+ bool seq_wrap;
+ u64 time_wrap;
+ u64 time_wrap_offset;
+ unsigned perc_dropped_buffers;
+ enum vivid_signal_mode std_signal_mode[MAX_INPUTS];
+ unsigned int query_std_last[MAX_INPUTS];
+ v4l2_std_id query_std[MAX_INPUTS];
+ enum tpg_video_aspect std_aspect_ratio[MAX_INPUTS];
+
+ enum vivid_signal_mode dv_timings_signal_mode[MAX_INPUTS];
+ char **query_dv_timings_qmenu;
+ char *query_dv_timings_qmenu_strings;
+ unsigned query_dv_timings_size;
+ unsigned int query_dv_timings_last[MAX_INPUTS];
+ unsigned int query_dv_timings[MAX_INPUTS];
+ enum tpg_video_aspect dv_timings_aspect_ratio[MAX_INPUTS];
+
+ /* Input */
+ unsigned input;
+ v4l2_std_id std_cap[MAX_INPUTS];
+ struct v4l2_dv_timings dv_timings_cap[MAX_INPUTS];
+ int dv_timings_cap_sel[MAX_INPUTS];
+ u32 service_set_cap;
+ struct vivid_vbi_gen_data vbi_gen;
+ u8 *edid;
+ unsigned edid_blocks;
+ unsigned edid_max_blocks;
+ unsigned webcam_size_idx;
+ unsigned webcam_ival_idx;
+ unsigned tv_freq;
+ unsigned tv_audmode;
+ unsigned tv_field_cap;
+ unsigned tv_audio_input;
+
+ u32 power_present;
+
+ /* Capture Overlay */
+ struct v4l2_framebuffer fb_cap;
+ struct v4l2_fh *overlay_cap_owner;
+ void *fb_vbase_cap;
+ int overlay_cap_top, overlay_cap_left;
+ enum v4l2_field overlay_cap_field;
+ void *bitmap_cap;
+ struct v4l2_clip clips_cap[MAX_CLIPS];
+ struct v4l2_clip try_clips_cap[MAX_CLIPS];
+ unsigned clipcount_cap;
+
+ /* Output */
+ unsigned output;
+ v4l2_std_id std_out;
+ struct v4l2_dv_timings dv_timings_out;
+ u32 colorspace_out;
+ u32 ycbcr_enc_out;
+ u32 hsv_enc_out;
+ u32 quantization_out;
+ u32 xfer_func_out;
+ u32 service_set_out;
+ unsigned bytesperline_out[TPG_MAX_PLANES];
+ unsigned tv_field_out;
+ unsigned tv_audio_output;
+ bool vbi_out_have_wss;
+ u8 vbi_out_wss[2];
+ bool vbi_out_have_cc[2];
+ u8 vbi_out_cc[2][2];
+ bool dvi_d_out;
+ u8 *scaled_line;
+ u8 *blended_line;
+ unsigned cur_scaled_line;
+ bool display_present[MAX_OUTPUTS];
+
+ /* Output Overlay */
+ void *fb_vbase_out;
+ bool overlay_out_enabled;
+ int overlay_out_top, overlay_out_left;
+ void *bitmap_out;
+ struct v4l2_clip clips_out[MAX_CLIPS];
+ struct v4l2_clip try_clips_out[MAX_CLIPS];
+ unsigned clipcount_out;
+ unsigned fbuf_out_flags;
+ u32 chromakey_out;
+ u8 global_alpha_out;
+
+ /* video capture */
+ struct tpg_data tpg;
+ unsigned ms_vid_cap;
+ bool must_blank[VIDEO_MAX_FRAME];
+
+ const struct vivid_fmt *fmt_cap;
+ struct v4l2_fract timeperframe_vid_cap;
+ enum v4l2_field field_cap;
+ struct v4l2_rect src_rect;
+ struct v4l2_rect fmt_cap_rect;
+ struct v4l2_rect crop_cap;
+ struct v4l2_rect compose_cap;
+ struct v4l2_rect crop_bounds_cap;
+ struct vb2_queue vb_vid_cap_q;
+ struct list_head vid_cap_active;
+ struct vb2_queue vb_vbi_cap_q;
+ struct list_head vbi_cap_active;
+ struct vb2_queue vb_meta_cap_q;
+ struct list_head meta_cap_active;
+ struct vb2_queue vb_touch_cap_q;
+ struct list_head touch_cap_active;
+
+ /* thread for generating video capture stream */
+ struct task_struct *kthread_vid_cap;
+ unsigned long jiffies_vid_cap;
+ u64 cap_stream_start;
+ u64 cap_frame_period;
+ u64 cap_frame_eof_offset;
+ u32 cap_seq_offset;
+ u32 cap_seq_count;
+ bool cap_seq_resync;
+ u32 vid_cap_seq_start;
+ u32 vid_cap_seq_count;
+ bool vid_cap_streaming;
+ u32 vbi_cap_seq_start;
+ u32 vbi_cap_seq_count;
+ bool vbi_cap_streaming;
+ u32 meta_cap_seq_start;
+ u32 meta_cap_seq_count;
+ bool meta_cap_streaming;
+
+ /* Touch capture */
+ struct task_struct *kthread_touch_cap;
+ unsigned long jiffies_touch_cap;
+ u64 touch_cap_stream_start;
+ u32 touch_cap_seq_offset;
+ bool touch_cap_seq_resync;
+ u32 touch_cap_seq_start;
+ u32 touch_cap_seq_count;
+ u32 touch_cap_with_seq_wrap_count;
+ bool touch_cap_streaming;
+ struct v4l2_fract timeperframe_tch_cap;
+ struct v4l2_pix_format tch_format;
+ int tch_pat_random;
+
+ /* video output */
+ const struct vivid_fmt *fmt_out;
+ struct v4l2_fract timeperframe_vid_out;
+ enum v4l2_field field_out;
+ struct v4l2_rect sink_rect;
+ struct v4l2_rect fmt_out_rect;
+ struct v4l2_rect crop_out;
+ struct v4l2_rect compose_out;
+ struct v4l2_rect compose_bounds_out;
+ struct vb2_queue vb_vid_out_q;
+ struct list_head vid_out_active;
+ struct vb2_queue vb_vbi_out_q;
+ struct list_head vbi_out_active;
+ struct vb2_queue vb_meta_out_q;
+ struct list_head meta_out_active;
+
+ /* video loop precalculated rectangles */
+
+ /*
+ * Intersection between what the output side composes and the capture side
+ * crops. I.e., what actually needs to be copied from the output buffer to
+ * the capture buffer.
+ */
+ struct v4l2_rect loop_vid_copy;
+ /* The part of the output buffer that (after scaling) corresponds to loop_vid_copy. */
+ struct v4l2_rect loop_vid_out;
+ /* The part of the capture buffer that (after scaling) corresponds to loop_vid_copy. */
+ struct v4l2_rect loop_vid_cap;
+ /*
+ * The intersection of the framebuffer, the overlay output window and
+ * loop_vid_copy. I.e., the part of the framebuffer that actually should be
+ * blended with the compose_out rectangle. This uses the framebuffer origin.
+ */
+ struct v4l2_rect loop_fb_copy;
+ /* The same as loop_fb_copy but with compose_out origin. */
+ struct v4l2_rect loop_vid_overlay;
+ /*
+ * The part of the capture buffer that (after scaling) corresponds
+ * to loop_vid_overlay.
+ */
+ struct v4l2_rect loop_vid_overlay_cap;
+
+ /* thread for generating video output stream */
+ struct task_struct *kthread_vid_out;
+ unsigned long jiffies_vid_out;
+ u32 out_seq_offset;
+ u32 out_seq_count;
+ bool out_seq_resync;
+ u32 vid_out_seq_start;
+ u32 vid_out_seq_count;
+ bool vid_out_streaming;
+ u32 vbi_out_seq_start;
+ u32 vbi_out_seq_count;
+ bool vbi_out_streaming;
+ bool stream_sliced_vbi_out;
+ u32 meta_out_seq_start;
+ u32 meta_out_seq_count;
+ bool meta_out_streaming;
+
+ /* SDR capture */
+ struct vb2_queue vb_sdr_cap_q;
+ struct list_head sdr_cap_active;
+ u32 sdr_pixelformat; /* v4l2 format id */
+ unsigned sdr_buffersize;
+ unsigned sdr_adc_freq;
+ unsigned sdr_fm_freq;
+ unsigned sdr_fm_deviation;
+ int sdr_fixp_src_phase;
+ int sdr_fixp_mod_phase;
+
+ bool tstamp_src_is_soe;
+ bool has_crop_cap;
+ bool has_compose_cap;
+ bool has_scaler_cap;
+ bool has_crop_out;
+ bool has_compose_out;
+ bool has_scaler_out;
+
+ /* thread for generating SDR stream */
+ struct task_struct *kthread_sdr_cap;
+ unsigned long jiffies_sdr_cap;
+ u32 sdr_cap_seq_offset;
+ u32 sdr_cap_seq_start;
+ u32 sdr_cap_seq_count;
+ u32 sdr_cap_with_seq_wrap_count;
+ bool sdr_cap_seq_resync;
+
+ /* RDS generator */
+ struct vivid_rds_gen rds_gen;
+
+ /* Radio receiver */
+ unsigned radio_rx_freq;
+ unsigned radio_rx_audmode;
+ int radio_rx_sig_qual;
+ unsigned radio_rx_hw_seek_mode;
+ bool radio_rx_hw_seek_prog_lim;
+ bool radio_rx_rds_controls;
+ bool radio_rx_rds_enabled;
+ unsigned radio_rx_rds_use_alternates;
+ unsigned radio_rx_rds_last_block;
+ struct v4l2_fh *radio_rx_rds_owner;
+
+ /* Radio transmitter */
+ unsigned radio_tx_freq;
+ unsigned radio_tx_subchans;
+ bool radio_tx_rds_controls;
+ unsigned radio_tx_rds_last_block;
+ struct v4l2_fh *radio_tx_rds_owner;
+
+ /* Shared between radio receiver and transmitter */
+ bool radio_rds_loop;
+ ktime_t radio_rds_init_time;
+
+ /* CEC */
+ struct cec_adapter *cec_rx_adap;
+ struct cec_adapter *cec_tx_adap[MAX_OUTPUTS];
+ u8 cec_output2bus_map[MAX_OUTPUTS];
+ struct task_struct *kthread_cec;
+ wait_queue_head_t kthread_waitq_cec;
+ struct vivid_cec_xfer xfers[MAX_OUTPUTS];
+ spinlock_t cec_xfers_slock; /* read and write cec messages */
+ u32 cec_sft; /* bus signal free time, in bit periods */
+ u8 last_initiator;
+
+ /* CEC OSD String */
+ char osd[14];
+ unsigned long osd_jiffies;
+
+ bool meta_pts;
+ bool meta_scr;
+};
+
+static inline bool vivid_is_webcam(const struct vivid_dev *dev)
+{
+ return dev->input_type[dev->input] == WEBCAM;
+}
+
+static inline bool vivid_is_tv_cap(const struct vivid_dev *dev)
+{
+ return dev->input_type[dev->input] == TV;
+}
+
+static inline bool vivid_is_svid_cap(const struct vivid_dev *dev)
+{
+ return dev->input_type[dev->input] == SVID;
+}
+
+static inline bool vivid_is_hdmi_cap(const struct vivid_dev *dev)
+{
+ return dev->input_type[dev->input] == HDMI;
+}
+
+static inline bool vivid_is_sdtv_cap(const struct vivid_dev *dev)
+{
+ return vivid_is_tv_cap(dev) || vivid_is_svid_cap(dev);
+}
+
+static inline bool vivid_is_svid_out(const struct vivid_dev *dev)
+{
+ return dev->output_type[dev->output] == SVID;
+}
+
+static inline bool vivid_is_hdmi_out(const struct vivid_dev *dev)
+{
+ return dev->output_type[dev->output] == HDMI;
+}
+
+bool vivid_validate_fb(const struct v4l2_framebuffer *a);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-ctrls.c b/drivers/media/test-drivers/vivid/vivid-ctrls.c
new file mode 100644
index 000000000..92b1a7598
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-ctrls.c
@@ -0,0 +1,2002 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-ctrls.c - control support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/videodev2.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-common.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-cap.h"
+#include "vivid-vid-out.h"
+#include "vivid-vid-common.h"
+#include "vivid-radio-common.h"
+#include "vivid-osd.h"
+#include "vivid-ctrls.h"
+#include "vivid-cec.h"
+
+#define VIVID_CID_CUSTOM_BASE (V4L2_CID_USER_BASE | 0xf000)
+#define VIVID_CID_BUTTON (VIVID_CID_CUSTOM_BASE + 0)
+#define VIVID_CID_BOOLEAN (VIVID_CID_CUSTOM_BASE + 1)
+#define VIVID_CID_INTEGER (VIVID_CID_CUSTOM_BASE + 2)
+#define VIVID_CID_INTEGER64 (VIVID_CID_CUSTOM_BASE + 3)
+#define VIVID_CID_MENU (VIVID_CID_CUSTOM_BASE + 4)
+#define VIVID_CID_STRING (VIVID_CID_CUSTOM_BASE + 5)
+#define VIVID_CID_BITMASK (VIVID_CID_CUSTOM_BASE + 6)
+#define VIVID_CID_INTMENU (VIVID_CID_CUSTOM_BASE + 7)
+#define VIVID_CID_U32_ARRAY (VIVID_CID_CUSTOM_BASE + 8)
+#define VIVID_CID_U16_MATRIX (VIVID_CID_CUSTOM_BASE + 9)
+#define VIVID_CID_U8_4D_ARRAY (VIVID_CID_CUSTOM_BASE + 10)
+#define VIVID_CID_AREA (VIVID_CID_CUSTOM_BASE + 11)
+#define VIVID_CID_RO_INTEGER (VIVID_CID_CUSTOM_BASE + 12)
+#define VIVID_CID_U32_DYN_ARRAY (VIVID_CID_CUSTOM_BASE + 13)
+#define VIVID_CID_U8_PIXEL_ARRAY (VIVID_CID_CUSTOM_BASE + 14)
+
+#define VIVID_CID_VIVID_BASE (0x00f00000 | 0xf000)
+#define VIVID_CID_VIVID_CLASS (0x00f00000 | 1)
+#define VIVID_CID_TEST_PATTERN (VIVID_CID_VIVID_BASE + 0)
+#define VIVID_CID_OSD_TEXT_MODE (VIVID_CID_VIVID_BASE + 1)
+#define VIVID_CID_HOR_MOVEMENT (VIVID_CID_VIVID_BASE + 2)
+#define VIVID_CID_VERT_MOVEMENT (VIVID_CID_VIVID_BASE + 3)
+#define VIVID_CID_SHOW_BORDER (VIVID_CID_VIVID_BASE + 4)
+#define VIVID_CID_SHOW_SQUARE (VIVID_CID_VIVID_BASE + 5)
+#define VIVID_CID_INSERT_SAV (VIVID_CID_VIVID_BASE + 6)
+#define VIVID_CID_INSERT_EAV (VIVID_CID_VIVID_BASE + 7)
+#define VIVID_CID_VBI_CAP_INTERLACED (VIVID_CID_VIVID_BASE + 8)
+#define VIVID_CID_INSERT_HDMI_VIDEO_GUARD_BAND (VIVID_CID_VIVID_BASE + 9)
+
+#define VIVID_CID_HFLIP (VIVID_CID_VIVID_BASE + 20)
+#define VIVID_CID_VFLIP (VIVID_CID_VIVID_BASE + 21)
+#define VIVID_CID_STD_ASPECT_RATIO (VIVID_CID_VIVID_BASE + 22)
+#define VIVID_CID_DV_TIMINGS_ASPECT_RATIO (VIVID_CID_VIVID_BASE + 23)
+#define VIVID_CID_TSTAMP_SRC (VIVID_CID_VIVID_BASE + 24)
+#define VIVID_CID_COLORSPACE (VIVID_CID_VIVID_BASE + 25)
+#define VIVID_CID_XFER_FUNC (VIVID_CID_VIVID_BASE + 26)
+#define VIVID_CID_YCBCR_ENC (VIVID_CID_VIVID_BASE + 27)
+#define VIVID_CID_QUANTIZATION (VIVID_CID_VIVID_BASE + 28)
+#define VIVID_CID_LIMITED_RGB_RANGE (VIVID_CID_VIVID_BASE + 29)
+#define VIVID_CID_ALPHA_MODE (VIVID_CID_VIVID_BASE + 30)
+#define VIVID_CID_HAS_CROP_CAP (VIVID_CID_VIVID_BASE + 31)
+#define VIVID_CID_HAS_COMPOSE_CAP (VIVID_CID_VIVID_BASE + 32)
+#define VIVID_CID_HAS_SCALER_CAP (VIVID_CID_VIVID_BASE + 33)
+#define VIVID_CID_HAS_CROP_OUT (VIVID_CID_VIVID_BASE + 34)
+#define VIVID_CID_HAS_COMPOSE_OUT (VIVID_CID_VIVID_BASE + 35)
+#define VIVID_CID_HAS_SCALER_OUT (VIVID_CID_VIVID_BASE + 36)
+#define VIVID_CID_LOOP_VIDEO (VIVID_CID_VIVID_BASE + 37)
+#define VIVID_CID_SEQ_WRAP (VIVID_CID_VIVID_BASE + 38)
+#define VIVID_CID_TIME_WRAP (VIVID_CID_VIVID_BASE + 39)
+#define VIVID_CID_MAX_EDID_BLOCKS (VIVID_CID_VIVID_BASE + 40)
+#define VIVID_CID_PERCENTAGE_FILL (VIVID_CID_VIVID_BASE + 41)
+#define VIVID_CID_REDUCED_FPS (VIVID_CID_VIVID_BASE + 42)
+#define VIVID_CID_HSV_ENC (VIVID_CID_VIVID_BASE + 43)
+#define VIVID_CID_DISPLAY_PRESENT (VIVID_CID_VIVID_BASE + 44)
+
+#define VIVID_CID_STD_SIGNAL_MODE (VIVID_CID_VIVID_BASE + 60)
+#define VIVID_CID_STANDARD (VIVID_CID_VIVID_BASE + 61)
+#define VIVID_CID_DV_TIMINGS_SIGNAL_MODE (VIVID_CID_VIVID_BASE + 62)
+#define VIVID_CID_DV_TIMINGS (VIVID_CID_VIVID_BASE + 63)
+#define VIVID_CID_PERC_DROPPED (VIVID_CID_VIVID_BASE + 64)
+#define VIVID_CID_DISCONNECT (VIVID_CID_VIVID_BASE + 65)
+#define VIVID_CID_DQBUF_ERROR (VIVID_CID_VIVID_BASE + 66)
+#define VIVID_CID_QUEUE_SETUP_ERROR (VIVID_CID_VIVID_BASE + 67)
+#define VIVID_CID_BUF_PREPARE_ERROR (VIVID_CID_VIVID_BASE + 68)
+#define VIVID_CID_START_STR_ERROR (VIVID_CID_VIVID_BASE + 69)
+#define VIVID_CID_QUEUE_ERROR (VIVID_CID_VIVID_BASE + 70)
+#define VIVID_CID_CLEAR_FB (VIVID_CID_VIVID_BASE + 71)
+#define VIVID_CID_REQ_VALIDATE_ERROR (VIVID_CID_VIVID_BASE + 72)
+
+#define VIVID_CID_RADIO_SEEK_MODE (VIVID_CID_VIVID_BASE + 90)
+#define VIVID_CID_RADIO_SEEK_PROG_LIM (VIVID_CID_VIVID_BASE + 91)
+#define VIVID_CID_RADIO_RX_RDS_RBDS (VIVID_CID_VIVID_BASE + 92)
+#define VIVID_CID_RADIO_RX_RDS_BLOCKIO (VIVID_CID_VIVID_BASE + 93)
+
+#define VIVID_CID_RADIO_TX_RDS_BLOCKIO (VIVID_CID_VIVID_BASE + 94)
+
+#define VIVID_CID_SDR_CAP_FM_DEVIATION (VIVID_CID_VIVID_BASE + 110)
+
+#define VIVID_CID_META_CAP_GENERATE_PTS (VIVID_CID_VIVID_BASE + 111)
+#define VIVID_CID_META_CAP_GENERATE_SCR (VIVID_CID_VIVID_BASE + 112)
+
+/* General User Controls */
+
+static void vivid_unregister_dev(bool valid, struct video_device *vdev)
+{
+ if (!valid)
+ return;
+ clear_bit(V4L2_FL_REGISTERED, &vdev->flags);
+ v4l2_event_wake_all(vdev);
+}
+
+static int vivid_user_gen_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_user_gen);
+
+ switch (ctrl->id) {
+ case VIVID_CID_DISCONNECT:
+ v4l2_info(&dev->v4l2_dev, "disconnect\n");
+ dev->disconnect_error = true;
+ vivid_unregister_dev(dev->has_vid_cap, &dev->vid_cap_dev);
+ vivid_unregister_dev(dev->has_vid_out, &dev->vid_out_dev);
+ vivid_unregister_dev(dev->has_vbi_cap, &dev->vbi_cap_dev);
+ vivid_unregister_dev(dev->has_vbi_out, &dev->vbi_out_dev);
+ vivid_unregister_dev(dev->has_radio_rx, &dev->radio_rx_dev);
+ vivid_unregister_dev(dev->has_radio_tx, &dev->radio_tx_dev);
+ vivid_unregister_dev(dev->has_sdr_cap, &dev->sdr_cap_dev);
+ vivid_unregister_dev(dev->has_meta_cap, &dev->meta_cap_dev);
+ vivid_unregister_dev(dev->has_meta_out, &dev->meta_out_dev);
+ vivid_unregister_dev(dev->has_touch_cap, &dev->touch_cap_dev);
+ break;
+ case VIVID_CID_BUTTON:
+ dev->button_pressed = 30;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_user_gen_ctrl_ops = {
+ .s_ctrl = vivid_user_gen_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_button = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_BUTTON,
+ .name = "Button",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_boolean = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_BOOLEAN,
+ .name = "Boolean",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .min = 0,
+ .max = 1,
+ .step = 1,
+ .def = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_int32 = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_INTEGER,
+ .name = "Integer 32 Bits",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .min = 0xffffffff80000000ULL,
+ .max = 0x7fffffff,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_int64 = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_INTEGER64,
+ .name = "Integer 64 Bits",
+ .type = V4L2_CTRL_TYPE_INTEGER64,
+ .min = 0x8000000000000000ULL,
+ .max = 0x7fffffffffffffffLL,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_u32_array = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_U32_ARRAY,
+ .name = "U32 1 Element Array",
+ .type = V4L2_CTRL_TYPE_U32,
+ .def = 0x18,
+ .min = 0x10,
+ .max = 0x20000,
+ .step = 1,
+ .dims = { 1 },
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_u32_dyn_array = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_U32_DYN_ARRAY,
+ .name = "U32 Dynamic Array",
+ .type = V4L2_CTRL_TYPE_U32,
+ .flags = V4L2_CTRL_FLAG_DYNAMIC_ARRAY,
+ .def = 50,
+ .min = 10,
+ .max = 90,
+ .step = 1,
+ .dims = { 100 },
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_u16_matrix = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_U16_MATRIX,
+ .name = "U16 8x16 Matrix",
+ .type = V4L2_CTRL_TYPE_U16,
+ .def = 0x18,
+ .min = 0x10,
+ .max = 0x2000,
+ .step = 1,
+ .dims = { 8, 16 },
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_u8_4d_array = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_U8_4D_ARRAY,
+ .name = "U8 2x3x4x5 Array",
+ .type = V4L2_CTRL_TYPE_U8,
+ .def = 0x18,
+ .min = 0x10,
+ .max = 0x20,
+ .step = 1,
+ .dims = { 2, 3, 4, 5 },
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_u8_pixel_array = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_U8_PIXEL_ARRAY,
+ .name = "U8 Pixel Array",
+ .type = V4L2_CTRL_TYPE_U8,
+ .def = 0x80,
+ .min = 0x00,
+ .max = 0xff,
+ .step = 1,
+ .dims = { 640 / PIXEL_ARRAY_DIV, 360 / PIXEL_ARRAY_DIV },
+};
+
+static const char * const vivid_ctrl_menu_strings[] = {
+ "Menu Item 0 (Skipped)",
+ "Menu Item 1",
+ "Menu Item 2 (Skipped)",
+ "Menu Item 3",
+ "Menu Item 4",
+ "Menu Item 5 (Skipped)",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_menu = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_MENU,
+ .name = "Menu",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .min = 1,
+ .max = 4,
+ .def = 3,
+ .menu_skip_mask = 0x04,
+ .qmenu = vivid_ctrl_menu_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_string = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_STRING,
+ .name = "String",
+ .type = V4L2_CTRL_TYPE_STRING,
+ .min = 2,
+ .max = 4,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_bitmask = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_BITMASK,
+ .name = "Bitmask",
+ .type = V4L2_CTRL_TYPE_BITMASK,
+ .def = 0x80002000,
+ .min = 0,
+ .max = 0x80402010,
+ .step = 0,
+};
+
+static const s64 vivid_ctrl_int_menu_values[] = {
+ 1, 1, 2, 3, 5, 8, 13, 21, 42,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_int_menu = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_INTMENU,
+ .name = "Integer Menu",
+ .type = V4L2_CTRL_TYPE_INTEGER_MENU,
+ .min = 1,
+ .max = 8,
+ .def = 4,
+ .menu_skip_mask = 0x02,
+ .qmenu_int = vivid_ctrl_int_menu_values,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_disconnect = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_DISCONNECT,
+ .name = "Disconnect",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_area area = {
+ .width = 1000,
+ .height = 2000,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_area = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_AREA,
+ .name = "Area",
+ .type = V4L2_CTRL_TYPE_AREA,
+ .p_def.p_const = &area,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_ro_int32 = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .id = VIVID_CID_RO_INTEGER,
+ .name = "Read-Only Integer 32 Bits",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .flags = V4L2_CTRL_FLAG_READ_ONLY,
+ .min = 0,
+ .max = 255,
+ .step = 1,
+};
+
+/* Framebuffer Controls */
+
+static int vivid_fb_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler,
+ struct vivid_dev, ctrl_hdl_fb);
+
+ switch (ctrl->id) {
+ case VIVID_CID_CLEAR_FB:
+ vivid_clear_fb(dev);
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_fb_ctrl_ops = {
+ .s_ctrl = vivid_fb_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_clear_fb = {
+ .ops = &vivid_fb_ctrl_ops,
+ .id = VIVID_CID_CLEAR_FB,
+ .name = "Clear Framebuffer",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+
+/* Video User Controls */
+
+static int vivid_user_vid_g_volatile_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_user_vid);
+
+ switch (ctrl->id) {
+ case V4L2_CID_AUTOGAIN:
+ dev->gain->val = (jiffies_to_msecs(jiffies) / 1000) & 0xff;
+ break;
+ }
+ return 0;
+}
+
+static int vivid_user_vid_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_user_vid);
+
+ switch (ctrl->id) {
+ case V4L2_CID_BRIGHTNESS:
+ dev->input_brightness[dev->input] = ctrl->val - dev->input * 128;
+ tpg_s_brightness(&dev->tpg, dev->input_brightness[dev->input]);
+ break;
+ case V4L2_CID_CONTRAST:
+ tpg_s_contrast(&dev->tpg, ctrl->val);
+ break;
+ case V4L2_CID_SATURATION:
+ tpg_s_saturation(&dev->tpg, ctrl->val);
+ break;
+ case V4L2_CID_HUE:
+ tpg_s_hue(&dev->tpg, ctrl->val);
+ break;
+ case V4L2_CID_HFLIP:
+ dev->hflip = ctrl->val;
+ tpg_s_hflip(&dev->tpg, dev->sensor_hflip ^ dev->hflip);
+ break;
+ case V4L2_CID_VFLIP:
+ dev->vflip = ctrl->val;
+ tpg_s_vflip(&dev->tpg, dev->sensor_vflip ^ dev->vflip);
+ break;
+ case V4L2_CID_ALPHA_COMPONENT:
+ tpg_s_alpha_component(&dev->tpg, ctrl->val);
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_user_vid_ctrl_ops = {
+ .g_volatile_ctrl = vivid_user_vid_g_volatile_ctrl,
+ .s_ctrl = vivid_user_vid_s_ctrl,
+};
+
+
+/* Video Capture Controls */
+
+static int vivid_vid_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ static const u32 colorspaces[] = {
+ V4L2_COLORSPACE_SMPTE170M,
+ V4L2_COLORSPACE_REC709,
+ V4L2_COLORSPACE_SRGB,
+ V4L2_COLORSPACE_OPRGB,
+ V4L2_COLORSPACE_BT2020,
+ V4L2_COLORSPACE_DCI_P3,
+ V4L2_COLORSPACE_SMPTE240M,
+ V4L2_COLORSPACE_470_SYSTEM_M,
+ V4L2_COLORSPACE_470_SYSTEM_BG,
+ };
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_vid_cap);
+ unsigned int i, j;
+
+ switch (ctrl->id) {
+ case VIVID_CID_TEST_PATTERN:
+ vivid_update_quality(dev);
+ tpg_s_pattern(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_COLORSPACE:
+ tpg_s_colorspace(&dev->tpg, colorspaces[ctrl->val]);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ vivid_send_source_change(dev, WEBCAM);
+ break;
+ case VIVID_CID_XFER_FUNC:
+ tpg_s_xfer_func(&dev->tpg, ctrl->val);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ vivid_send_source_change(dev, WEBCAM);
+ break;
+ case VIVID_CID_YCBCR_ENC:
+ tpg_s_ycbcr_enc(&dev->tpg, ctrl->val);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ vivid_send_source_change(dev, WEBCAM);
+ break;
+ case VIVID_CID_HSV_ENC:
+ tpg_s_hsv_enc(&dev->tpg, ctrl->val ? V4L2_HSV_ENC_256 :
+ V4L2_HSV_ENC_180);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ vivid_send_source_change(dev, WEBCAM);
+ break;
+ case VIVID_CID_QUANTIZATION:
+ tpg_s_quantization(&dev->tpg, ctrl->val);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ vivid_send_source_change(dev, WEBCAM);
+ break;
+ case V4L2_CID_DV_RX_RGB_RANGE:
+ if (!vivid_is_hdmi_cap(dev))
+ break;
+ tpg_s_rgb_range(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_LIMITED_RGB_RANGE:
+ tpg_s_real_rgb_range(&dev->tpg, ctrl->val ?
+ V4L2_DV_RGB_RANGE_LIMITED : V4L2_DV_RGB_RANGE_FULL);
+ break;
+ case VIVID_CID_ALPHA_MODE:
+ tpg_s_alpha_mode(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_HOR_MOVEMENT:
+ tpg_s_mv_hor_mode(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_VERT_MOVEMENT:
+ tpg_s_mv_vert_mode(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_OSD_TEXT_MODE:
+ dev->osd_mode = ctrl->val;
+ break;
+ case VIVID_CID_PERCENTAGE_FILL:
+ tpg_s_perc_fill(&dev->tpg, ctrl->val);
+ for (i = 0; i < VIDEO_MAX_FRAME; i++)
+ dev->must_blank[i] = ctrl->val < 100;
+ break;
+ case VIVID_CID_INSERT_SAV:
+ tpg_s_insert_sav(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_INSERT_EAV:
+ tpg_s_insert_eav(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_INSERT_HDMI_VIDEO_GUARD_BAND:
+ tpg_s_insert_hdmi_video_guard_band(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_HFLIP:
+ dev->sensor_hflip = ctrl->val;
+ tpg_s_hflip(&dev->tpg, dev->sensor_hflip ^ dev->hflip);
+ break;
+ case VIVID_CID_VFLIP:
+ dev->sensor_vflip = ctrl->val;
+ tpg_s_vflip(&dev->tpg, dev->sensor_vflip ^ dev->vflip);
+ break;
+ case VIVID_CID_REDUCED_FPS:
+ dev->reduced_fps = ctrl->val;
+ vivid_update_format_cap(dev, true);
+ break;
+ case VIVID_CID_HAS_CROP_CAP:
+ dev->has_crop_cap = ctrl->val;
+ vivid_update_format_cap(dev, true);
+ break;
+ case VIVID_CID_HAS_COMPOSE_CAP:
+ dev->has_compose_cap = ctrl->val;
+ vivid_update_format_cap(dev, true);
+ break;
+ case VIVID_CID_HAS_SCALER_CAP:
+ dev->has_scaler_cap = ctrl->val;
+ vivid_update_format_cap(dev, true);
+ break;
+ case VIVID_CID_SHOW_BORDER:
+ tpg_s_show_border(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_SHOW_SQUARE:
+ tpg_s_show_square(&dev->tpg, ctrl->val);
+ break;
+ case VIVID_CID_STD_ASPECT_RATIO:
+ dev->std_aspect_ratio[dev->input] = ctrl->val;
+ tpg_s_video_aspect(&dev->tpg, vivid_get_video_aspect(dev));
+ break;
+ case VIVID_CID_DV_TIMINGS_SIGNAL_MODE:
+ dev->dv_timings_signal_mode[dev->input] =
+ dev->ctrl_dv_timings_signal_mode->val;
+ dev->query_dv_timings[dev->input] = dev->ctrl_dv_timings->val;
+
+ dev->power_present = 0;
+ for (i = 0, j = 0;
+ i < ARRAY_SIZE(dev->dv_timings_signal_mode);
+ i++)
+ if (dev->input_type[i] == HDMI) {
+ if (dev->dv_timings_signal_mode[i] != NO_SIGNAL)
+ dev->power_present |= (1 << j);
+ j++;
+ }
+ __v4l2_ctrl_s_ctrl(dev->ctrl_rx_power_present,
+ dev->power_present);
+
+ v4l2_ctrl_activate(dev->ctrl_dv_timings,
+ dev->dv_timings_signal_mode[dev->input] ==
+ SELECTED_DV_TIMINGS);
+
+ vivid_update_quality(dev);
+ vivid_send_source_change(dev, HDMI);
+ break;
+ case VIVID_CID_DV_TIMINGS_ASPECT_RATIO:
+ dev->dv_timings_aspect_ratio[dev->input] = ctrl->val;
+ tpg_s_video_aspect(&dev->tpg, vivid_get_video_aspect(dev));
+ break;
+ case VIVID_CID_TSTAMP_SRC:
+ dev->tstamp_src_is_soe = ctrl->val;
+ dev->vb_vid_cap_q.timestamp_flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
+ if (dev->tstamp_src_is_soe)
+ dev->vb_vid_cap_q.timestamp_flags |= V4L2_BUF_FLAG_TSTAMP_SRC_SOE;
+ break;
+ case VIVID_CID_MAX_EDID_BLOCKS:
+ dev->edid_max_blocks = ctrl->val;
+ if (dev->edid_blocks > dev->edid_max_blocks)
+ dev->edid_blocks = dev->edid_max_blocks;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_vid_cap_ctrl_ops = {
+ .s_ctrl = vivid_vid_cap_s_ctrl,
+};
+
+static const char * const vivid_ctrl_hor_movement_strings[] = {
+ "Move Left Fast",
+ "Move Left",
+ "Move Left Slow",
+ "No Movement",
+ "Move Right Slow",
+ "Move Right",
+ "Move Right Fast",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_hor_movement = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HOR_MOVEMENT,
+ .name = "Horizontal Movement",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = TPG_MOVE_POS_FAST,
+ .def = TPG_MOVE_NONE,
+ .qmenu = vivid_ctrl_hor_movement_strings,
+};
+
+static const char * const vivid_ctrl_vert_movement_strings[] = {
+ "Move Up Fast",
+ "Move Up",
+ "Move Up Slow",
+ "No Movement",
+ "Move Down Slow",
+ "Move Down",
+ "Move Down Fast",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_vert_movement = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_VERT_MOVEMENT,
+ .name = "Vertical Movement",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = TPG_MOVE_POS_FAST,
+ .def = TPG_MOVE_NONE,
+ .qmenu = vivid_ctrl_vert_movement_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_show_border = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_SHOW_BORDER,
+ .name = "Show Border",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_show_square = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_SHOW_SQUARE,
+ .name = "Show Square",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const char * const vivid_ctrl_osd_mode_strings[] = {
+ "All",
+ "Counters Only",
+ "None",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_osd_mode = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_OSD_TEXT_MODE,
+ .name = "OSD Text Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_osd_mode_strings) - 2,
+ .qmenu = vivid_ctrl_osd_mode_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_perc_fill = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_PERCENTAGE_FILL,
+ .name = "Fill Percentage of Frame",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .min = 0,
+ .max = 100,
+ .def = 100,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_insert_sav = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_INSERT_SAV,
+ .name = "Insert SAV Code in Image",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_insert_eav = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_INSERT_EAV,
+ .name = "Insert EAV Code in Image",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_insert_hdmi_video_guard_band = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_INSERT_HDMI_VIDEO_GUARD_BAND,
+ .name = "Insert Video Guard Band",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_hflip = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HFLIP,
+ .name = "Sensor Flipped Horizontally",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_vflip = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_VFLIP,
+ .name = "Sensor Flipped Vertically",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_reduced_fps = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_REDUCED_FPS,
+ .name = "Reduced Framerate",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_crop_cap = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HAS_CROP_CAP,
+ .name = "Enable Capture Cropping",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_compose_cap = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HAS_COMPOSE_CAP,
+ .name = "Enable Capture Composing",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_scaler_cap = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HAS_SCALER_CAP,
+ .name = "Enable Capture Scaler",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const char * const vivid_ctrl_tstamp_src_strings[] = {
+ "End of Frame",
+ "Start of Exposure",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_tstamp_src = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_TSTAMP_SRC,
+ .name = "Timestamp Source",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_tstamp_src_strings) - 2,
+ .qmenu = vivid_ctrl_tstamp_src_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_std_aspect_ratio = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_STD_ASPECT_RATIO,
+ .name = "Standard Aspect Ratio",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .min = 1,
+ .max = 4,
+ .def = 1,
+ .qmenu = tpg_aspect_strings,
+};
+
+static const char * const vivid_ctrl_dv_timings_signal_mode_strings[] = {
+ "Current DV Timings",
+ "No Signal",
+ "No Lock",
+ "Out of Range",
+ "Selected DV Timings",
+ "Cycle Through All DV Timings",
+ "Custom DV Timings",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_dv_timings_signal_mode = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_DV_TIMINGS_SIGNAL_MODE,
+ .name = "DV Timings Signal Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = 5,
+ .qmenu = vivid_ctrl_dv_timings_signal_mode_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_dv_timings_aspect_ratio = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_DV_TIMINGS_ASPECT_RATIO,
+ .name = "DV Timings Aspect Ratio",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = 3,
+ .qmenu = tpg_aspect_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_max_edid_blocks = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_MAX_EDID_BLOCKS,
+ .name = "Maximum EDID Blocks",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .min = 1,
+ .max = 256,
+ .def = 2,
+ .step = 1,
+};
+
+static const char * const vivid_ctrl_colorspace_strings[] = {
+ "SMPTE 170M",
+ "Rec. 709",
+ "sRGB",
+ "opRGB",
+ "BT.2020",
+ "DCI-P3",
+ "SMPTE 240M",
+ "470 System M",
+ "470 System BG",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_colorspace = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_COLORSPACE,
+ .name = "Colorspace",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_colorspace_strings) - 2,
+ .def = 2,
+ .qmenu = vivid_ctrl_colorspace_strings,
+};
+
+static const char * const vivid_ctrl_xfer_func_strings[] = {
+ "Default",
+ "Rec. 709",
+ "sRGB",
+ "opRGB",
+ "SMPTE 240M",
+ "None",
+ "DCI-P3",
+ "SMPTE 2084",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_xfer_func = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_XFER_FUNC,
+ .name = "Transfer Function",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_xfer_func_strings) - 2,
+ .qmenu = vivid_ctrl_xfer_func_strings,
+};
+
+static const char * const vivid_ctrl_ycbcr_enc_strings[] = {
+ "Default",
+ "ITU-R 601",
+ "Rec. 709",
+ "xvYCC 601",
+ "xvYCC 709",
+ "",
+ "BT.2020",
+ "BT.2020 Constant Luminance",
+ "SMPTE 240M",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_ycbcr_enc = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_YCBCR_ENC,
+ .name = "Y'CbCr Encoding",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .menu_skip_mask = 1 << 5,
+ .max = ARRAY_SIZE(vivid_ctrl_ycbcr_enc_strings) - 2,
+ .qmenu = vivid_ctrl_ycbcr_enc_strings,
+};
+
+static const char * const vivid_ctrl_hsv_enc_strings[] = {
+ "Hue 0-179",
+ "Hue 0-256",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_hsv_enc = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_HSV_ENC,
+ .name = "HSV Encoding",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_hsv_enc_strings) - 2,
+ .qmenu = vivid_ctrl_hsv_enc_strings,
+};
+
+static const char * const vivid_ctrl_quantization_strings[] = {
+ "Default",
+ "Full Range",
+ "Limited Range",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_quantization = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_QUANTIZATION,
+ .name = "Quantization",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_quantization_strings) - 2,
+ .qmenu = vivid_ctrl_quantization_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_alpha_mode = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_ALPHA_MODE,
+ .name = "Apply Alpha To Red Only",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_limited_rgb_range = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_LIMITED_RGB_RANGE,
+ .name = "Limited RGB Range (16-235)",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+
+/* Video Loop Control */
+
+static int vivid_loop_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_loop_cap);
+
+ switch (ctrl->id) {
+ case VIVID_CID_LOOP_VIDEO:
+ dev->loop_video = ctrl->val;
+ vivid_update_quality(dev);
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_loop_cap_ctrl_ops = {
+ .s_ctrl = vivid_loop_cap_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_loop_video = {
+ .ops = &vivid_loop_cap_ctrl_ops,
+ .id = VIVID_CID_LOOP_VIDEO,
+ .name = "Loop Video",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+
+/* VBI Capture Control */
+
+static int vivid_vbi_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_vbi_cap);
+
+ switch (ctrl->id) {
+ case VIVID_CID_VBI_CAP_INTERLACED:
+ dev->vbi_cap_interlaced = ctrl->val;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_vbi_cap_ctrl_ops = {
+ .s_ctrl = vivid_vbi_cap_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_vbi_cap_interlaced = {
+ .ops = &vivid_vbi_cap_ctrl_ops,
+ .id = VIVID_CID_VBI_CAP_INTERLACED,
+ .name = "Interlaced VBI Format",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+
+/* Video Output Controls */
+
+static int vivid_vid_out_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_vid_out);
+ struct v4l2_bt_timings *bt = &dev->dv_timings_out.bt;
+ u32 display_present = 0;
+ unsigned int i, j, bus_idx;
+
+ switch (ctrl->id) {
+ case VIVID_CID_HAS_CROP_OUT:
+ dev->has_crop_out = ctrl->val;
+ vivid_update_format_out(dev);
+ break;
+ case VIVID_CID_HAS_COMPOSE_OUT:
+ dev->has_compose_out = ctrl->val;
+ vivid_update_format_out(dev);
+ break;
+ case VIVID_CID_HAS_SCALER_OUT:
+ dev->has_scaler_out = ctrl->val;
+ vivid_update_format_out(dev);
+ break;
+ case V4L2_CID_DV_TX_MODE:
+ dev->dvi_d_out = ctrl->val == V4L2_DV_TX_MODE_DVI_D;
+ if (!vivid_is_hdmi_out(dev))
+ break;
+ if (!dev->dvi_d_out && (bt->flags & V4L2_DV_FL_IS_CE_VIDEO)) {
+ if (bt->width == 720 && bt->height <= 576)
+ dev->colorspace_out = V4L2_COLORSPACE_SMPTE170M;
+ else
+ dev->colorspace_out = V4L2_COLORSPACE_REC709;
+ dev->quantization_out = V4L2_QUANTIZATION_DEFAULT;
+ } else {
+ dev->colorspace_out = V4L2_COLORSPACE_SRGB;
+ dev->quantization_out = dev->dvi_d_out ?
+ V4L2_QUANTIZATION_LIM_RANGE :
+ V4L2_QUANTIZATION_DEFAULT;
+ }
+ if (dev->loop_video)
+ vivid_send_source_change(dev, HDMI);
+ break;
+ case VIVID_CID_DISPLAY_PRESENT:
+ if (dev->output_type[dev->output] != HDMI)
+ break;
+
+ dev->display_present[dev->output] = ctrl->val;
+ for (i = 0, j = 0; i < dev->num_outputs; i++)
+ if (dev->output_type[i] == HDMI)
+ display_present |=
+ dev->display_present[i] << j++;
+
+ __v4l2_ctrl_s_ctrl(dev->ctrl_tx_rxsense, display_present);
+
+ if (dev->edid_blocks) {
+ __v4l2_ctrl_s_ctrl(dev->ctrl_tx_edid_present,
+ display_present);
+ __v4l2_ctrl_s_ctrl(dev->ctrl_tx_hotplug,
+ display_present);
+ }
+
+ bus_idx = dev->cec_output2bus_map[dev->output];
+ if (!dev->cec_tx_adap[bus_idx])
+ break;
+
+ if (ctrl->val && dev->edid_blocks)
+ cec_s_phys_addr(dev->cec_tx_adap[bus_idx],
+ dev->cec_tx_adap[bus_idx]->phys_addr,
+ false);
+ else
+ cec_phys_addr_invalidate(dev->cec_tx_adap[bus_idx]);
+
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_vid_out_ctrl_ops = {
+ .s_ctrl = vivid_vid_out_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_crop_out = {
+ .ops = &vivid_vid_out_ctrl_ops,
+ .id = VIVID_CID_HAS_CROP_OUT,
+ .name = "Enable Output Cropping",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_compose_out = {
+ .ops = &vivid_vid_out_ctrl_ops,
+ .id = VIVID_CID_HAS_COMPOSE_OUT,
+ .name = "Enable Output Composing",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_has_scaler_out = {
+ .ops = &vivid_vid_out_ctrl_ops,
+ .id = VIVID_CID_HAS_SCALER_OUT,
+ .name = "Enable Output Scaler",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_display_present = {
+ .ops = &vivid_vid_out_ctrl_ops,
+ .id = VIVID_CID_DISPLAY_PRESENT,
+ .name = "Display Present",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+/* Streaming Controls */
+
+static int vivid_streaming_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_streaming);
+
+ switch (ctrl->id) {
+ case VIVID_CID_DQBUF_ERROR:
+ dev->dqbuf_error = true;
+ break;
+ case VIVID_CID_PERC_DROPPED:
+ dev->perc_dropped_buffers = ctrl->val;
+ break;
+ case VIVID_CID_QUEUE_SETUP_ERROR:
+ dev->queue_setup_error = true;
+ break;
+ case VIVID_CID_BUF_PREPARE_ERROR:
+ dev->buf_prepare_error = true;
+ break;
+ case VIVID_CID_START_STR_ERROR:
+ dev->start_streaming_error = true;
+ break;
+ case VIVID_CID_REQ_VALIDATE_ERROR:
+ dev->req_validate_error = true;
+ break;
+ case VIVID_CID_QUEUE_ERROR:
+ if (vb2_start_streaming_called(&dev->vb_vid_cap_q))
+ vb2_queue_error(&dev->vb_vid_cap_q);
+ if (vb2_start_streaming_called(&dev->vb_vbi_cap_q))
+ vb2_queue_error(&dev->vb_vbi_cap_q);
+ if (vb2_start_streaming_called(&dev->vb_vid_out_q))
+ vb2_queue_error(&dev->vb_vid_out_q);
+ if (vb2_start_streaming_called(&dev->vb_vbi_out_q))
+ vb2_queue_error(&dev->vb_vbi_out_q);
+ if (vb2_start_streaming_called(&dev->vb_sdr_cap_q))
+ vb2_queue_error(&dev->vb_sdr_cap_q);
+ break;
+ case VIVID_CID_SEQ_WRAP:
+ dev->seq_wrap = ctrl->val;
+ break;
+ case VIVID_CID_TIME_WRAP:
+ dev->time_wrap = ctrl->val;
+ if (dev->time_wrap == 1)
+ dev->time_wrap = (1ULL << 63) - NSEC_PER_SEC * 16ULL;
+ else if (dev->time_wrap == 2)
+ dev->time_wrap = ((1ULL << 31) - 16) * NSEC_PER_SEC;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_streaming_ctrl_ops = {
+ .s_ctrl = vivid_streaming_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_dqbuf_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_DQBUF_ERROR,
+ .name = "Inject V4L2_BUF_FLAG_ERROR",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_perc_dropped = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_PERC_DROPPED,
+ .name = "Percentage of Dropped Buffers",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .min = 0,
+ .max = 100,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_queue_setup_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_QUEUE_SETUP_ERROR,
+ .name = "Inject VIDIOC_REQBUFS Error",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_buf_prepare_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_BUF_PREPARE_ERROR,
+ .name = "Inject VIDIOC_QBUF Error",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_start_streaming_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_START_STR_ERROR,
+ .name = "Inject VIDIOC_STREAMON Error",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_queue_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_QUEUE_ERROR,
+ .name = "Inject Fatal Streaming Error",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+
+#ifdef CONFIG_MEDIA_CONTROLLER
+static const struct v4l2_ctrl_config vivid_ctrl_req_validate_error = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_REQ_VALIDATE_ERROR,
+ .name = "Inject req_validate() Error",
+ .type = V4L2_CTRL_TYPE_BUTTON,
+};
+#endif
+
+static const struct v4l2_ctrl_config vivid_ctrl_seq_wrap = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_SEQ_WRAP,
+ .name = "Wrap Sequence Number",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const char * const vivid_ctrl_time_wrap_strings[] = {
+ "None",
+ "64 Bit",
+ "32 Bit",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_time_wrap = {
+ .ops = &vivid_streaming_ctrl_ops,
+ .id = VIVID_CID_TIME_WRAP,
+ .name = "Wrap Timestamp",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_time_wrap_strings) - 2,
+ .qmenu = vivid_ctrl_time_wrap_strings,
+};
+
+
+/* SDTV Capture Controls */
+
+static int vivid_sdtv_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_sdtv_cap);
+
+ switch (ctrl->id) {
+ case VIVID_CID_STD_SIGNAL_MODE:
+ dev->std_signal_mode[dev->input] =
+ dev->ctrl_std_signal_mode->val;
+ if (dev->std_signal_mode[dev->input] == SELECTED_STD)
+ dev->query_std[dev->input] =
+ vivid_standard[dev->ctrl_standard->val];
+ v4l2_ctrl_activate(dev->ctrl_standard,
+ dev->std_signal_mode[dev->input] ==
+ SELECTED_STD);
+ vivid_update_quality(dev);
+ vivid_send_source_change(dev, TV);
+ vivid_send_source_change(dev, SVID);
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_sdtv_cap_ctrl_ops = {
+ .s_ctrl = vivid_sdtv_cap_s_ctrl,
+};
+
+static const char * const vivid_ctrl_std_signal_mode_strings[] = {
+ "Current Standard",
+ "No Signal",
+ "No Lock",
+ "",
+ "Selected Standard",
+ "Cycle Through All Standards",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_std_signal_mode = {
+ .ops = &vivid_sdtv_cap_ctrl_ops,
+ .id = VIVID_CID_STD_SIGNAL_MODE,
+ .name = "Standard Signal Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = ARRAY_SIZE(vivid_ctrl_std_signal_mode_strings) - 2,
+ .menu_skip_mask = 1 << 3,
+ .qmenu = vivid_ctrl_std_signal_mode_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_standard = {
+ .ops = &vivid_sdtv_cap_ctrl_ops,
+ .id = VIVID_CID_STANDARD,
+ .name = "Standard",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = 14,
+ .qmenu = vivid_ctrl_standard_strings,
+};
+
+
+
+/* Radio Receiver Controls */
+
+static int vivid_radio_rx_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_radio_rx);
+
+ switch (ctrl->id) {
+ case VIVID_CID_RADIO_SEEK_MODE:
+ dev->radio_rx_hw_seek_mode = ctrl->val;
+ break;
+ case VIVID_CID_RADIO_SEEK_PROG_LIM:
+ dev->radio_rx_hw_seek_prog_lim = ctrl->val;
+ break;
+ case VIVID_CID_RADIO_RX_RDS_RBDS:
+ dev->rds_gen.use_rbds = ctrl->val;
+ break;
+ case VIVID_CID_RADIO_RX_RDS_BLOCKIO:
+ dev->radio_rx_rds_controls = ctrl->val;
+ dev->radio_rx_caps &= ~V4L2_CAP_READWRITE;
+ dev->radio_rx_rds_use_alternates = false;
+ if (!dev->radio_rx_rds_controls) {
+ dev->radio_rx_caps |= V4L2_CAP_READWRITE;
+ __v4l2_ctrl_s_ctrl(dev->radio_rx_rds_pty, 0);
+ __v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ta, 0);
+ __v4l2_ctrl_s_ctrl(dev->radio_rx_rds_tp, 0);
+ __v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ms, 0);
+ __v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_psname, "");
+ __v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_radiotext, "");
+ }
+ v4l2_ctrl_activate(dev->radio_rx_rds_pty, dev->radio_rx_rds_controls);
+ v4l2_ctrl_activate(dev->radio_rx_rds_psname, dev->radio_rx_rds_controls);
+ v4l2_ctrl_activate(dev->radio_rx_rds_radiotext, dev->radio_rx_rds_controls);
+ v4l2_ctrl_activate(dev->radio_rx_rds_ta, dev->radio_rx_rds_controls);
+ v4l2_ctrl_activate(dev->radio_rx_rds_tp, dev->radio_rx_rds_controls);
+ v4l2_ctrl_activate(dev->radio_rx_rds_ms, dev->radio_rx_rds_controls);
+ dev->radio_rx_dev.device_caps = dev->radio_rx_caps;
+ break;
+ case V4L2_CID_RDS_RECEPTION:
+ dev->radio_rx_rds_enabled = ctrl->val;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_radio_rx_ctrl_ops = {
+ .s_ctrl = vivid_radio_rx_s_ctrl,
+};
+
+static const char * const vivid_ctrl_radio_rds_mode_strings[] = {
+ "Block I/O",
+ "Controls",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_radio_rx_rds_blockio = {
+ .ops = &vivid_radio_rx_ctrl_ops,
+ .id = VIVID_CID_RADIO_RX_RDS_BLOCKIO,
+ .name = "RDS Rx I/O Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .qmenu = vivid_ctrl_radio_rds_mode_strings,
+ .max = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_radio_rx_rds_rbds = {
+ .ops = &vivid_radio_rx_ctrl_ops,
+ .id = VIVID_CID_RADIO_RX_RDS_RBDS,
+ .name = "Generate RBDS Instead of RDS",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+static const char * const vivid_ctrl_radio_hw_seek_mode_strings[] = {
+ "Bounded",
+ "Wrap Around",
+ "Both",
+ NULL,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_radio_hw_seek_mode = {
+ .ops = &vivid_radio_rx_ctrl_ops,
+ .id = VIVID_CID_RADIO_SEEK_MODE,
+ .name = "Radio HW Seek Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = 2,
+ .qmenu = vivid_ctrl_radio_hw_seek_mode_strings,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_radio_hw_seek_prog_lim = {
+ .ops = &vivid_radio_rx_ctrl_ops,
+ .id = VIVID_CID_RADIO_SEEK_PROG_LIM,
+ .name = "Radio Programmable HW Seek",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .step = 1,
+};
+
+
+/* Radio Transmitter Controls */
+
+static int vivid_radio_tx_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_radio_tx);
+
+ switch (ctrl->id) {
+ case VIVID_CID_RADIO_TX_RDS_BLOCKIO:
+ dev->radio_tx_rds_controls = ctrl->val;
+ dev->radio_tx_caps &= ~V4L2_CAP_READWRITE;
+ if (!dev->radio_tx_rds_controls)
+ dev->radio_tx_caps |= V4L2_CAP_READWRITE;
+ dev->radio_tx_dev.device_caps = dev->radio_tx_caps;
+ break;
+ case V4L2_CID_RDS_TX_PTY:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_pty, ctrl->val);
+ break;
+ case V4L2_CID_RDS_TX_PS_NAME:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_psname, ctrl->p_new.p_char);
+ break;
+ case V4L2_CID_RDS_TX_RADIO_TEXT:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_radiotext, ctrl->p_new.p_char);
+ break;
+ case V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ta, ctrl->val);
+ break;
+ case V4L2_CID_RDS_TX_TRAFFIC_PROGRAM:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_tp, ctrl->val);
+ break;
+ case V4L2_CID_RDS_TX_MUSIC_SPEECH:
+ if (dev->radio_rx_rds_controls)
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ms, ctrl->val);
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_radio_tx_ctrl_ops = {
+ .s_ctrl = vivid_radio_tx_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_radio_tx_rds_blockio = {
+ .ops = &vivid_radio_tx_ctrl_ops,
+ .id = VIVID_CID_RADIO_TX_RDS_BLOCKIO,
+ .name = "RDS Tx I/O Mode",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .qmenu = vivid_ctrl_radio_rds_mode_strings,
+ .max = 1,
+ .def = 1,
+};
+
+
+/* SDR Capture Controls */
+
+static int vivid_sdr_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_sdr_cap);
+
+ switch (ctrl->id) {
+ case VIVID_CID_SDR_CAP_FM_DEVIATION:
+ dev->sdr_fm_deviation = ctrl->val;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_sdr_cap_ctrl_ops = {
+ .s_ctrl = vivid_sdr_cap_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_sdr_cap_fm_deviation = {
+ .ops = &vivid_sdr_cap_ctrl_ops,
+ .id = VIVID_CID_SDR_CAP_FM_DEVIATION,
+ .name = "FM Deviation",
+ .type = V4L2_CTRL_TYPE_INTEGER,
+ .min = 100,
+ .max = 200000,
+ .def = 75000,
+ .step = 1,
+};
+
+/* Metadata Capture Control */
+
+static int vivid_meta_cap_s_ctrl(struct v4l2_ctrl *ctrl)
+{
+ struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev,
+ ctrl_hdl_meta_cap);
+
+ switch (ctrl->id) {
+ case VIVID_CID_META_CAP_GENERATE_PTS:
+ dev->meta_pts = ctrl->val;
+ break;
+ case VIVID_CID_META_CAP_GENERATE_SCR:
+ dev->meta_scr = ctrl->val;
+ break;
+ }
+ return 0;
+}
+
+static const struct v4l2_ctrl_ops vivid_meta_cap_ctrl_ops = {
+ .s_ctrl = vivid_meta_cap_s_ctrl,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_meta_has_pts = {
+ .ops = &vivid_meta_cap_ctrl_ops,
+ .id = VIVID_CID_META_CAP_GENERATE_PTS,
+ .name = "Generate PTS",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_meta_has_src_clk = {
+ .ops = &vivid_meta_cap_ctrl_ops,
+ .id = VIVID_CID_META_CAP_GENERATE_SCR,
+ .name = "Generate SCR",
+ .type = V4L2_CTRL_TYPE_BOOLEAN,
+ .max = 1,
+ .def = 1,
+ .step = 1,
+};
+
+static const struct v4l2_ctrl_config vivid_ctrl_class = {
+ .ops = &vivid_user_gen_ctrl_ops,
+ .flags = V4L2_CTRL_FLAG_READ_ONLY | V4L2_CTRL_FLAG_WRITE_ONLY,
+ .id = VIVID_CID_VIVID_CLASS,
+ .name = "Vivid Controls",
+ .type = V4L2_CTRL_TYPE_CTRL_CLASS,
+};
+
+int vivid_create_controls(struct vivid_dev *dev, bool show_ccs_cap,
+ bool show_ccs_out, bool no_error_inj,
+ bool has_sdtv, bool has_hdmi)
+{
+ struct v4l2_ctrl_handler *hdl_user_gen = &dev->ctrl_hdl_user_gen;
+ struct v4l2_ctrl_handler *hdl_user_vid = &dev->ctrl_hdl_user_vid;
+ struct v4l2_ctrl_handler *hdl_user_aud = &dev->ctrl_hdl_user_aud;
+ struct v4l2_ctrl_handler *hdl_streaming = &dev->ctrl_hdl_streaming;
+ struct v4l2_ctrl_handler *hdl_sdtv_cap = &dev->ctrl_hdl_sdtv_cap;
+ struct v4l2_ctrl_handler *hdl_loop_cap = &dev->ctrl_hdl_loop_cap;
+ struct v4l2_ctrl_handler *hdl_fb = &dev->ctrl_hdl_fb;
+ struct v4l2_ctrl_handler *hdl_vid_cap = &dev->ctrl_hdl_vid_cap;
+ struct v4l2_ctrl_handler *hdl_vid_out = &dev->ctrl_hdl_vid_out;
+ struct v4l2_ctrl_handler *hdl_vbi_cap = &dev->ctrl_hdl_vbi_cap;
+ struct v4l2_ctrl_handler *hdl_vbi_out = &dev->ctrl_hdl_vbi_out;
+ struct v4l2_ctrl_handler *hdl_radio_rx = &dev->ctrl_hdl_radio_rx;
+ struct v4l2_ctrl_handler *hdl_radio_tx = &dev->ctrl_hdl_radio_tx;
+ struct v4l2_ctrl_handler *hdl_sdr_cap = &dev->ctrl_hdl_sdr_cap;
+ struct v4l2_ctrl_handler *hdl_meta_cap = &dev->ctrl_hdl_meta_cap;
+ struct v4l2_ctrl_handler *hdl_meta_out = &dev->ctrl_hdl_meta_out;
+ struct v4l2_ctrl_handler *hdl_tch_cap = &dev->ctrl_hdl_touch_cap;
+
+ struct v4l2_ctrl_config vivid_ctrl_dv_timings = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_DV_TIMINGS,
+ .name = "DV Timings",
+ .type = V4L2_CTRL_TYPE_MENU,
+ };
+ int i;
+
+ v4l2_ctrl_handler_init(hdl_user_gen, 10);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_user_vid, 9);
+ v4l2_ctrl_new_custom(hdl_user_vid, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_user_aud, 2);
+ v4l2_ctrl_new_custom(hdl_user_aud, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_streaming, 8);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_sdtv_cap, 2);
+ v4l2_ctrl_new_custom(hdl_sdtv_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_loop_cap, 1);
+ v4l2_ctrl_new_custom(hdl_loop_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_fb, 1);
+ v4l2_ctrl_new_custom(hdl_fb, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_vid_cap, 55);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_vid_out, 26);
+ if (!no_error_inj || dev->has_fb || dev->num_hdmi_outputs)
+ v4l2_ctrl_new_custom(hdl_vid_out, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_vbi_cap, 21);
+ v4l2_ctrl_new_custom(hdl_vbi_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_vbi_out, 19);
+ if (!no_error_inj)
+ v4l2_ctrl_new_custom(hdl_vbi_out, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_radio_rx, 17);
+ v4l2_ctrl_new_custom(hdl_radio_rx, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_radio_tx, 17);
+ v4l2_ctrl_new_custom(hdl_radio_tx, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_sdr_cap, 19);
+ v4l2_ctrl_new_custom(hdl_sdr_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_meta_cap, 2);
+ v4l2_ctrl_new_custom(hdl_meta_cap, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_meta_out, 2);
+ v4l2_ctrl_new_custom(hdl_meta_out, &vivid_ctrl_class, NULL);
+ v4l2_ctrl_handler_init(hdl_tch_cap, 2);
+ v4l2_ctrl_new_custom(hdl_tch_cap, &vivid_ctrl_class, NULL);
+
+ /* User Controls */
+ dev->volume = v4l2_ctrl_new_std(hdl_user_aud, NULL,
+ V4L2_CID_AUDIO_VOLUME, 0, 255, 1, 200);
+ dev->mute = v4l2_ctrl_new_std(hdl_user_aud, NULL,
+ V4L2_CID_AUDIO_MUTE, 0, 1, 1, 0);
+ if (dev->has_vid_cap) {
+ dev->brightness = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_BRIGHTNESS, 0, 255, 1, 128);
+ for (i = 0; i < MAX_INPUTS; i++)
+ dev->input_brightness[i] = 128;
+ dev->contrast = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_CONTRAST, 0, 255, 1, 128);
+ dev->saturation = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_SATURATION, 0, 255, 1, 128);
+ dev->hue = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_HUE, -128, 128, 1, 0);
+ v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_HFLIP, 0, 1, 1, 0);
+ v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_VFLIP, 0, 1, 1, 0);
+ dev->autogain = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_AUTOGAIN, 0, 1, 1, 1);
+ dev->gain = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_GAIN, 0, 255, 1, 100);
+ dev->alpha = v4l2_ctrl_new_std(hdl_user_vid, &vivid_user_vid_ctrl_ops,
+ V4L2_CID_ALPHA_COMPONENT, 0, 255, 1, 0);
+ }
+ dev->button = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_button, NULL);
+ dev->int32 = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_int32, NULL);
+ dev->int64 = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_int64, NULL);
+ dev->boolean = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_boolean, NULL);
+ dev->menu = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_menu, NULL);
+ dev->string = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_string, NULL);
+ dev->bitmask = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_bitmask, NULL);
+ dev->int_menu = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_int_menu, NULL);
+ dev->ro_int32 = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_ro_int32, NULL);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_area, NULL);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_u32_array, NULL);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_u32_dyn_array, NULL);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_u16_matrix, NULL);
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_u8_4d_array, NULL);
+ dev->pixel_array = v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_u8_pixel_array, NULL);
+
+ if (dev->has_vid_cap) {
+ /* Image Processing Controls */
+ struct v4l2_ctrl_config vivid_ctrl_test_pattern = {
+ .ops = &vivid_vid_cap_ctrl_ops,
+ .id = VIVID_CID_TEST_PATTERN,
+ .name = "Test Pattern",
+ .type = V4L2_CTRL_TYPE_MENU,
+ .max = TPG_PAT_NOISE,
+ .qmenu = tpg_pattern_strings,
+ };
+
+ dev->test_pattern = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_test_pattern, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_perc_fill, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_hor_movement, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_vert_movement, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_osd_mode, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_show_border, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_show_square, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_hflip, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_vflip, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_insert_sav, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_insert_eav, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_insert_hdmi_video_guard_band, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_reduced_fps, NULL);
+ if (show_ccs_cap) {
+ dev->ctrl_has_crop_cap = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_has_crop_cap, NULL);
+ dev->ctrl_has_compose_cap = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_has_compose_cap, NULL);
+ dev->ctrl_has_scaler_cap = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_has_scaler_cap, NULL);
+ }
+
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_tstamp_src, NULL);
+ dev->colorspace = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_colorspace, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_xfer_func, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_ycbcr_enc, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_hsv_enc, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_quantization, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_alpha_mode, NULL);
+ }
+
+ if (dev->has_vid_out && show_ccs_out) {
+ dev->ctrl_has_crop_out = v4l2_ctrl_new_custom(hdl_vid_out,
+ &vivid_ctrl_has_crop_out, NULL);
+ dev->ctrl_has_compose_out = v4l2_ctrl_new_custom(hdl_vid_out,
+ &vivid_ctrl_has_compose_out, NULL);
+ dev->ctrl_has_scaler_out = v4l2_ctrl_new_custom(hdl_vid_out,
+ &vivid_ctrl_has_scaler_out, NULL);
+ }
+
+ /*
+ * Testing this driver with v4l2-compliance will trigger the error
+ * injection controls, and after that nothing will work as expected.
+ * So we have a module option to drop these error injecting controls
+ * allowing us to run v4l2_compliance again.
+ */
+ if (!no_error_inj) {
+ v4l2_ctrl_new_custom(hdl_user_gen, &vivid_ctrl_disconnect, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_dqbuf_error, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_perc_dropped, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_queue_setup_error, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_buf_prepare_error, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_start_streaming_error, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_queue_error, NULL);
+#ifdef CONFIG_MEDIA_CONTROLLER
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_req_validate_error, NULL);
+#endif
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_seq_wrap, NULL);
+ v4l2_ctrl_new_custom(hdl_streaming, &vivid_ctrl_time_wrap, NULL);
+ }
+
+ if (has_sdtv && (dev->has_vid_cap || dev->has_vbi_cap)) {
+ if (dev->has_vid_cap)
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_std_aspect_ratio, NULL);
+ dev->ctrl_std_signal_mode = v4l2_ctrl_new_custom(hdl_sdtv_cap,
+ &vivid_ctrl_std_signal_mode, NULL);
+ dev->ctrl_standard = v4l2_ctrl_new_custom(hdl_sdtv_cap,
+ &vivid_ctrl_standard, NULL);
+ if (dev->ctrl_std_signal_mode)
+ v4l2_ctrl_cluster(2, &dev->ctrl_std_signal_mode);
+ if (dev->has_raw_vbi_cap)
+ v4l2_ctrl_new_custom(hdl_vbi_cap, &vivid_ctrl_vbi_cap_interlaced, NULL);
+ }
+
+ if (dev->num_hdmi_inputs) {
+ s64 hdmi_input_mask = GENMASK(dev->num_hdmi_inputs - 1, 0);
+
+ dev->ctrl_dv_timings_signal_mode = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_dv_timings_signal_mode, NULL);
+
+ vivid_ctrl_dv_timings.max = dev->query_dv_timings_size - 1;
+ vivid_ctrl_dv_timings.qmenu =
+ (const char * const *)dev->query_dv_timings_qmenu;
+ dev->ctrl_dv_timings = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_dv_timings, NULL);
+ if (dev->ctrl_dv_timings_signal_mode)
+ v4l2_ctrl_cluster(2, &dev->ctrl_dv_timings_signal_mode);
+
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_dv_timings_aspect_ratio, NULL);
+ v4l2_ctrl_new_custom(hdl_vid_cap, &vivid_ctrl_max_edid_blocks, NULL);
+ dev->real_rgb_range_cap = v4l2_ctrl_new_custom(hdl_vid_cap,
+ &vivid_ctrl_limited_rgb_range, NULL);
+ dev->rgb_range_cap = v4l2_ctrl_new_std_menu(hdl_vid_cap,
+ &vivid_vid_cap_ctrl_ops,
+ V4L2_CID_DV_RX_RGB_RANGE, V4L2_DV_RGB_RANGE_FULL,
+ 0, V4L2_DV_RGB_RANGE_AUTO);
+ dev->ctrl_rx_power_present = v4l2_ctrl_new_std(hdl_vid_cap,
+ NULL, V4L2_CID_DV_RX_POWER_PRESENT, 0, hdmi_input_mask,
+ 0, hdmi_input_mask);
+
+ }
+ if (dev->num_hdmi_outputs) {
+ s64 hdmi_output_mask = GENMASK(dev->num_hdmi_outputs - 1, 0);
+
+ /*
+ * We aren't doing anything with this at the moment, but
+ * HDMI outputs typically have this controls.
+ */
+ dev->ctrl_tx_rgb_range = v4l2_ctrl_new_std_menu(hdl_vid_out, NULL,
+ V4L2_CID_DV_TX_RGB_RANGE, V4L2_DV_RGB_RANGE_FULL,
+ 0, V4L2_DV_RGB_RANGE_AUTO);
+ dev->ctrl_tx_mode = v4l2_ctrl_new_std_menu(hdl_vid_out, NULL,
+ V4L2_CID_DV_TX_MODE, V4L2_DV_TX_MODE_HDMI,
+ 0, V4L2_DV_TX_MODE_HDMI);
+ dev->ctrl_display_present = v4l2_ctrl_new_custom(hdl_vid_out,
+ &vivid_ctrl_display_present, NULL);
+ dev->ctrl_tx_hotplug = v4l2_ctrl_new_std(hdl_vid_out,
+ NULL, V4L2_CID_DV_TX_HOTPLUG, 0, hdmi_output_mask,
+ 0, hdmi_output_mask);
+ dev->ctrl_tx_rxsense = v4l2_ctrl_new_std(hdl_vid_out,
+ NULL, V4L2_CID_DV_TX_RXSENSE, 0, hdmi_output_mask,
+ 0, hdmi_output_mask);
+ dev->ctrl_tx_edid_present = v4l2_ctrl_new_std(hdl_vid_out,
+ NULL, V4L2_CID_DV_TX_EDID_PRESENT, 0, hdmi_output_mask,
+ 0, hdmi_output_mask);
+ }
+ if ((dev->has_vid_cap && dev->has_vid_out) ||
+ (dev->has_vbi_cap && dev->has_vbi_out))
+ v4l2_ctrl_new_custom(hdl_loop_cap, &vivid_ctrl_loop_video, NULL);
+
+ if (dev->has_fb)
+ v4l2_ctrl_new_custom(hdl_fb, &vivid_ctrl_clear_fb, NULL);
+
+ if (dev->has_radio_rx) {
+ v4l2_ctrl_new_custom(hdl_radio_rx, &vivid_ctrl_radio_hw_seek_mode, NULL);
+ v4l2_ctrl_new_custom(hdl_radio_rx, &vivid_ctrl_radio_hw_seek_prog_lim, NULL);
+ v4l2_ctrl_new_custom(hdl_radio_rx, &vivid_ctrl_radio_rx_rds_blockio, NULL);
+ v4l2_ctrl_new_custom(hdl_radio_rx, &vivid_ctrl_radio_rx_rds_rbds, NULL);
+ v4l2_ctrl_new_std(hdl_radio_rx, &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RECEPTION, 0, 1, 1, 1);
+ dev->radio_rx_rds_pty = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_PTY, 0, 31, 1, 0);
+ dev->radio_rx_rds_psname = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_PS_NAME, 0, 8, 8, 0);
+ dev->radio_rx_rds_radiotext = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_RADIO_TEXT, 0, 64, 64, 0);
+ dev->radio_rx_rds_ta = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT, 0, 1, 1, 0);
+ dev->radio_rx_rds_tp = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_TRAFFIC_PROGRAM, 0, 1, 1, 0);
+ dev->radio_rx_rds_ms = v4l2_ctrl_new_std(hdl_radio_rx,
+ &vivid_radio_rx_ctrl_ops,
+ V4L2_CID_RDS_RX_MUSIC_SPEECH, 0, 1, 1, 1);
+ }
+ if (dev->has_radio_tx) {
+ v4l2_ctrl_new_custom(hdl_radio_tx,
+ &vivid_ctrl_radio_tx_rds_blockio, NULL);
+ dev->radio_tx_rds_pi = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_PI, 0, 0xffff, 1, 0x8088);
+ dev->radio_tx_rds_pty = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_PTY, 0, 31, 1, 3);
+ dev->radio_tx_rds_psname = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_PS_NAME, 0, 8, 8, 0);
+ if (dev->radio_tx_rds_psname)
+ v4l2_ctrl_s_ctrl_string(dev->radio_tx_rds_psname, "VIVID-TX");
+ dev->radio_tx_rds_radiotext = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_RADIO_TEXT, 0, 64 * 2, 64, 0);
+ if (dev->radio_tx_rds_radiotext)
+ v4l2_ctrl_s_ctrl_string(dev->radio_tx_rds_radiotext,
+ "This is a VIVID default Radio Text template text, change at will");
+ dev->radio_tx_rds_mono_stereo = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_MONO_STEREO, 0, 1, 1, 1);
+ dev->radio_tx_rds_art_head = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_ARTIFICIAL_HEAD, 0, 1, 1, 0);
+ dev->radio_tx_rds_compressed = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_COMPRESSED, 0, 1, 1, 0);
+ dev->radio_tx_rds_dyn_pty = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_DYNAMIC_PTY, 0, 1, 1, 0);
+ dev->radio_tx_rds_ta = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT, 0, 1, 1, 0);
+ dev->radio_tx_rds_tp = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_TRAFFIC_PROGRAM, 0, 1, 1, 1);
+ dev->radio_tx_rds_ms = v4l2_ctrl_new_std(hdl_radio_tx,
+ &vivid_radio_tx_ctrl_ops,
+ V4L2_CID_RDS_TX_MUSIC_SPEECH, 0, 1, 1, 1);
+ }
+ if (dev->has_sdr_cap) {
+ v4l2_ctrl_new_custom(hdl_sdr_cap,
+ &vivid_ctrl_sdr_cap_fm_deviation, NULL);
+ }
+ if (dev->has_meta_cap) {
+ v4l2_ctrl_new_custom(hdl_meta_cap,
+ &vivid_ctrl_meta_has_pts, NULL);
+ v4l2_ctrl_new_custom(hdl_meta_cap,
+ &vivid_ctrl_meta_has_src_clk, NULL);
+ }
+
+ if (hdl_user_gen->error)
+ return hdl_user_gen->error;
+ if (hdl_user_vid->error)
+ return hdl_user_vid->error;
+ if (hdl_user_aud->error)
+ return hdl_user_aud->error;
+ if (hdl_streaming->error)
+ return hdl_streaming->error;
+ if (hdl_sdr_cap->error)
+ return hdl_sdr_cap->error;
+ if (hdl_loop_cap->error)
+ return hdl_loop_cap->error;
+
+ if (dev->autogain)
+ v4l2_ctrl_auto_cluster(2, &dev->autogain, 0, true);
+
+ if (dev->has_vid_cap) {
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_user_vid, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_user_aud, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_streaming, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_sdtv_cap, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_loop_cap, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_cap, hdl_fb, NULL, false);
+ if (hdl_vid_cap->error)
+ return hdl_vid_cap->error;
+ dev->vid_cap_dev.ctrl_handler = hdl_vid_cap;
+ }
+ if (dev->has_vid_out) {
+ v4l2_ctrl_add_handler(hdl_vid_out, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_out, hdl_user_aud, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_out, hdl_streaming, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vid_out, hdl_fb, NULL, false);
+ if (hdl_vid_out->error)
+ return hdl_vid_out->error;
+ dev->vid_out_dev.ctrl_handler = hdl_vid_out;
+ }
+ if (dev->has_vbi_cap) {
+ v4l2_ctrl_add_handler(hdl_vbi_cap, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vbi_cap, hdl_streaming, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vbi_cap, hdl_sdtv_cap, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vbi_cap, hdl_loop_cap, NULL, false);
+ if (hdl_vbi_cap->error)
+ return hdl_vbi_cap->error;
+ dev->vbi_cap_dev.ctrl_handler = hdl_vbi_cap;
+ }
+ if (dev->has_vbi_out) {
+ v4l2_ctrl_add_handler(hdl_vbi_out, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_vbi_out, hdl_streaming, NULL, false);
+ if (hdl_vbi_out->error)
+ return hdl_vbi_out->error;
+ dev->vbi_out_dev.ctrl_handler = hdl_vbi_out;
+ }
+ if (dev->has_radio_rx) {
+ v4l2_ctrl_add_handler(hdl_radio_rx, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_radio_rx, hdl_user_aud, NULL, false);
+ if (hdl_radio_rx->error)
+ return hdl_radio_rx->error;
+ dev->radio_rx_dev.ctrl_handler = hdl_radio_rx;
+ }
+ if (dev->has_radio_tx) {
+ v4l2_ctrl_add_handler(hdl_radio_tx, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_radio_tx, hdl_user_aud, NULL, false);
+ if (hdl_radio_tx->error)
+ return hdl_radio_tx->error;
+ dev->radio_tx_dev.ctrl_handler = hdl_radio_tx;
+ }
+ if (dev->has_sdr_cap) {
+ v4l2_ctrl_add_handler(hdl_sdr_cap, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_sdr_cap, hdl_streaming, NULL, false);
+ if (hdl_sdr_cap->error)
+ return hdl_sdr_cap->error;
+ dev->sdr_cap_dev.ctrl_handler = hdl_sdr_cap;
+ }
+ if (dev->has_meta_cap) {
+ v4l2_ctrl_add_handler(hdl_meta_cap, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_meta_cap, hdl_streaming, NULL, false);
+ if (hdl_meta_cap->error)
+ return hdl_meta_cap->error;
+ dev->meta_cap_dev.ctrl_handler = hdl_meta_cap;
+ }
+ if (dev->has_meta_out) {
+ v4l2_ctrl_add_handler(hdl_meta_out, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_meta_out, hdl_streaming, NULL, false);
+ if (hdl_meta_out->error)
+ return hdl_meta_out->error;
+ dev->meta_out_dev.ctrl_handler = hdl_meta_out;
+ }
+ if (dev->has_touch_cap) {
+ v4l2_ctrl_add_handler(hdl_tch_cap, hdl_user_gen, NULL, false);
+ v4l2_ctrl_add_handler(hdl_tch_cap, hdl_streaming, NULL, false);
+ if (hdl_tch_cap->error)
+ return hdl_tch_cap->error;
+ dev->touch_cap_dev.ctrl_handler = hdl_tch_cap;
+ }
+ return 0;
+}
+
+void vivid_free_controls(struct vivid_dev *dev)
+{
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_vid_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_vid_out);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_vbi_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_vbi_out);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_radio_rx);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_radio_tx);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_sdr_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_user_gen);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_user_vid);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_user_aud);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_streaming);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_sdtv_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_loop_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_fb);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_meta_cap);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_meta_out);
+ v4l2_ctrl_handler_free(&dev->ctrl_hdl_touch_cap);
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-ctrls.h b/drivers/media/test-drivers/vivid/vivid-ctrls.h
new file mode 100644
index 000000000..6fad5f5d0
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-ctrls.h
@@ -0,0 +1,22 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-ctrls.h - control support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_CTRLS_H_
+#define _VIVID_CTRLS_H_
+
+enum vivid_hw_seek_modes {
+ VIVID_HW_SEEK_BOUNDED,
+ VIVID_HW_SEEK_WRAP,
+ VIVID_HW_SEEK_BOTH,
+};
+
+int vivid_create_controls(struct vivid_dev *dev, bool show_ccs_cap,
+ bool show_ccs_out, bool no_error_inj,
+ bool has_sdtv, bool has_hdmi);
+void vivid_free_controls(struct vivid_dev *dev);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-cap.c b/drivers/media/test-drivers/vivid/vivid-kthread-cap.c
new file mode 100644
index 000000000..690daada7
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-cap.c
@@ -0,0 +1,1015 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-kthread-cap.h - video/vbi capture thread support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/init.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+#include <linux/font.h>
+#include <linux/mutex.h>
+#include <linux/videodev2.h>
+#include <linux/kthread.h>
+#include <linux/freezer.h>
+#include <linux/random.h>
+#include <linux/v4l2-dv-timings.h>
+#include <linux/jiffies.h>
+#include <asm/div64.h>
+#include <media/videobuf2-vmalloc.h>
+#include <media/v4l2-dv-timings.h>
+#include <media/v4l2-ioctl.h>
+#include <media/v4l2-fh.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-rect.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+#include "vivid-vid-cap.h"
+#include "vivid-vid-out.h"
+#include "vivid-radio-common.h"
+#include "vivid-radio-rx.h"
+#include "vivid-radio-tx.h"
+#include "vivid-sdr-cap.h"
+#include "vivid-vbi-cap.h"
+#include "vivid-vbi-out.h"
+#include "vivid-osd.h"
+#include "vivid-ctrls.h"
+#include "vivid-kthread-cap.h"
+#include "vivid-meta-cap.h"
+
+static inline v4l2_std_id vivid_get_std_cap(const struct vivid_dev *dev)
+{
+ if (vivid_is_sdtv_cap(dev))
+ return dev->std_cap[dev->input];
+ return 0;
+}
+
+static void copy_pix(struct vivid_dev *dev, int win_y, int win_x,
+ u16 *cap, const u16 *osd)
+{
+ u16 out;
+ int left = dev->overlay_out_left;
+ int top = dev->overlay_out_top;
+ int fb_x = win_x + left;
+ int fb_y = win_y + top;
+ int i;
+
+ out = *cap;
+ *cap = *osd;
+ if (dev->bitmap_out) {
+ const u8 *p = dev->bitmap_out;
+ unsigned stride = (dev->compose_out.width + 7) / 8;
+
+ win_x -= dev->compose_out.left;
+ win_y -= dev->compose_out.top;
+ if (!(p[stride * win_y + win_x / 8] & (1 << (win_x & 7))))
+ return;
+ }
+
+ for (i = 0; i < dev->clipcount_out; i++) {
+ struct v4l2_rect *r = &dev->clips_out[i].c;
+
+ if (fb_y >= r->top && fb_y < r->top + r->height &&
+ fb_x >= r->left && fb_x < r->left + r->width)
+ return;
+ }
+ if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_CHROMAKEY) &&
+ *osd != dev->chromakey_out)
+ return;
+ if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_SRC_CHROMAKEY) &&
+ out == dev->chromakey_out)
+ return;
+ if (dev->fmt_cap->alpha_mask) {
+ if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_GLOBAL_ALPHA) &&
+ dev->global_alpha_out)
+ return;
+ if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_LOCAL_ALPHA) &&
+ *cap & dev->fmt_cap->alpha_mask)
+ return;
+ if ((dev->fbuf_out_flags & V4L2_FBUF_FLAG_LOCAL_INV_ALPHA) &&
+ !(*cap & dev->fmt_cap->alpha_mask))
+ return;
+ }
+ *cap = out;
+}
+
+static void blend_line(struct vivid_dev *dev, unsigned y_offset, unsigned x_offset,
+ u8 *vcapbuf, const u8 *vosdbuf,
+ unsigned width, unsigned pixsize)
+{
+ unsigned x;
+
+ for (x = 0; x < width; x++, vcapbuf += pixsize, vosdbuf += pixsize) {
+ copy_pix(dev, y_offset, x_offset + x,
+ (u16 *)vcapbuf, (const u16 *)vosdbuf);
+ }
+}
+
+static void scale_line(const u8 *src, u8 *dst, unsigned srcw, unsigned dstw, unsigned twopixsize)
+{
+ /* Coarse scaling with Bresenham */
+ unsigned int_part;
+ unsigned fract_part;
+ unsigned src_x = 0;
+ unsigned error = 0;
+ unsigned x;
+
+ /*
+ * We always combine two pixels to prevent color bleed in the packed
+ * yuv case.
+ */
+ srcw /= 2;
+ dstw /= 2;
+ int_part = srcw / dstw;
+ fract_part = srcw % dstw;
+ for (x = 0; x < dstw; x++, dst += twopixsize) {
+ memcpy(dst, src + src_x * twopixsize, twopixsize);
+ src_x += int_part;
+ error += fract_part;
+ if (error >= dstw) {
+ error -= dstw;
+ src_x++;
+ }
+ }
+}
+
+/*
+ * Precalculate the rectangles needed to perform video looping:
+ *
+ * The nominal pipeline is that the video output buffer is cropped by
+ * crop_out, scaled to compose_out, overlaid with the output overlay,
+ * cropped on the capture side by crop_cap and scaled again to the video
+ * capture buffer using compose_cap.
+ *
+ * To keep things efficient we calculate the intersection of compose_out
+ * and crop_cap (since that's the only part of the video that will
+ * actually end up in the capture buffer), determine which part of the
+ * video output buffer that is and which part of the video capture buffer
+ * so we can scale the video straight from the output buffer to the capture
+ * buffer without any intermediate steps.
+ *
+ * If we need to deal with an output overlay, then there is no choice and
+ * that intermediate step still has to be taken. For the output overlay
+ * support we calculate the intersection of the framebuffer and the overlay
+ * window (which may be partially or wholly outside of the framebuffer
+ * itself) and the intersection of that with loop_vid_copy (i.e. the part of
+ * the actual looped video that will be overlaid). The result is calculated
+ * both in framebuffer coordinates (loop_fb_copy) and compose_out coordinates
+ * (loop_vid_overlay). Finally calculate the part of the capture buffer that
+ * will receive that overlaid video.
+ */
+static void vivid_precalc_copy_rects(struct vivid_dev *dev)
+{
+ /* Framebuffer rectangle */
+ struct v4l2_rect r_fb = {
+ 0, 0, dev->display_width, dev->display_height
+ };
+ /* Overlay window rectangle in framebuffer coordinates */
+ struct v4l2_rect r_overlay = {
+ dev->overlay_out_left, dev->overlay_out_top,
+ dev->compose_out.width, dev->compose_out.height
+ };
+
+ v4l2_rect_intersect(&dev->loop_vid_copy, &dev->crop_cap, &dev->compose_out);
+
+ dev->loop_vid_out = dev->loop_vid_copy;
+ v4l2_rect_scale(&dev->loop_vid_out, &dev->compose_out, &dev->crop_out);
+ dev->loop_vid_out.left += dev->crop_out.left;
+ dev->loop_vid_out.top += dev->crop_out.top;
+
+ dev->loop_vid_cap = dev->loop_vid_copy;
+ v4l2_rect_scale(&dev->loop_vid_cap, &dev->crop_cap, &dev->compose_cap);
+
+ dprintk(dev, 1,
+ "loop_vid_copy: %dx%d@%dx%d loop_vid_out: %dx%d@%dx%d loop_vid_cap: %dx%d@%dx%d\n",
+ dev->loop_vid_copy.width, dev->loop_vid_copy.height,
+ dev->loop_vid_copy.left, dev->loop_vid_copy.top,
+ dev->loop_vid_out.width, dev->loop_vid_out.height,
+ dev->loop_vid_out.left, dev->loop_vid_out.top,
+ dev->loop_vid_cap.width, dev->loop_vid_cap.height,
+ dev->loop_vid_cap.left, dev->loop_vid_cap.top);
+
+ v4l2_rect_intersect(&r_overlay, &r_fb, &r_overlay);
+
+ /* shift r_overlay to the same origin as compose_out */
+ r_overlay.left += dev->compose_out.left - dev->overlay_out_left;
+ r_overlay.top += dev->compose_out.top - dev->overlay_out_top;
+
+ v4l2_rect_intersect(&dev->loop_vid_overlay, &r_overlay, &dev->loop_vid_copy);
+ dev->loop_fb_copy = dev->loop_vid_overlay;
+
+ /* shift dev->loop_fb_copy back again to the fb origin */
+ dev->loop_fb_copy.left -= dev->compose_out.left - dev->overlay_out_left;
+ dev->loop_fb_copy.top -= dev->compose_out.top - dev->overlay_out_top;
+
+ dev->loop_vid_overlay_cap = dev->loop_vid_overlay;
+ v4l2_rect_scale(&dev->loop_vid_overlay_cap, &dev->crop_cap, &dev->compose_cap);
+
+ dprintk(dev, 1,
+ "loop_fb_copy: %dx%d@%dx%d loop_vid_overlay: %dx%d@%dx%d loop_vid_overlay_cap: %dx%d@%dx%d\n",
+ dev->loop_fb_copy.width, dev->loop_fb_copy.height,
+ dev->loop_fb_copy.left, dev->loop_fb_copy.top,
+ dev->loop_vid_overlay.width, dev->loop_vid_overlay.height,
+ dev->loop_vid_overlay.left, dev->loop_vid_overlay.top,
+ dev->loop_vid_overlay_cap.width, dev->loop_vid_overlay_cap.height,
+ dev->loop_vid_overlay_cap.left, dev->loop_vid_overlay_cap.top);
+}
+
+static void *plane_vaddr(struct tpg_data *tpg, struct vivid_buffer *buf,
+ unsigned p, unsigned bpl[TPG_MAX_PLANES], unsigned h)
+{
+ unsigned i;
+ void *vbuf;
+
+ if (p == 0 || tpg_g_buffers(tpg) > 1)
+ return vb2_plane_vaddr(&buf->vb.vb2_buf, p);
+ vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+ for (i = 0; i < p; i++)
+ vbuf += bpl[i] * h / tpg->vdownsampling[i];
+ return vbuf;
+}
+
+static noinline_for_stack int vivid_copy_buffer(struct vivid_dev *dev, unsigned p,
+ u8 *vcapbuf, struct vivid_buffer *vid_cap_buf)
+{
+ bool blank = dev->must_blank[vid_cap_buf->vb.vb2_buf.index];
+ struct tpg_data *tpg = &dev->tpg;
+ struct vivid_buffer *vid_out_buf = NULL;
+ unsigned vdiv = dev->fmt_out->vdownsampling[p];
+ unsigned twopixsize = tpg_g_twopixelsize(tpg, p);
+ unsigned img_width = tpg_hdiv(tpg, p, dev->compose_cap.width);
+ unsigned img_height = dev->compose_cap.height;
+ unsigned stride_cap = tpg->bytesperline[p];
+ unsigned stride_out = dev->bytesperline_out[p];
+ unsigned stride_osd = dev->display_byte_stride;
+ unsigned hmax = (img_height * tpg->perc_fill) / 100;
+ u8 *voutbuf;
+ u8 *vosdbuf = NULL;
+ unsigned y;
+ bool blend = dev->bitmap_out || dev->clipcount_out || dev->fbuf_out_flags;
+ /* Coarse scaling with Bresenham */
+ unsigned vid_out_int_part;
+ unsigned vid_out_fract_part;
+ unsigned vid_out_y = 0;
+ unsigned vid_out_error = 0;
+ unsigned vid_overlay_int_part = 0;
+ unsigned vid_overlay_fract_part = 0;
+ unsigned vid_overlay_y = 0;
+ unsigned vid_overlay_error = 0;
+ unsigned vid_cap_left = tpg_hdiv(tpg, p, dev->loop_vid_cap.left);
+ unsigned vid_cap_right;
+ bool quick;
+
+ vid_out_int_part = dev->loop_vid_out.height / dev->loop_vid_cap.height;
+ vid_out_fract_part = dev->loop_vid_out.height % dev->loop_vid_cap.height;
+
+ if (!list_empty(&dev->vid_out_active))
+ vid_out_buf = list_entry(dev->vid_out_active.next,
+ struct vivid_buffer, list);
+ if (vid_out_buf == NULL)
+ return -ENODATA;
+
+ vid_cap_buf->vb.field = vid_out_buf->vb.field;
+
+ voutbuf = plane_vaddr(tpg, vid_out_buf, p,
+ dev->bytesperline_out, dev->fmt_out_rect.height);
+ if (p < dev->fmt_out->buffers)
+ voutbuf += vid_out_buf->vb.vb2_buf.planes[p].data_offset;
+ voutbuf += tpg_hdiv(tpg, p, dev->loop_vid_out.left) +
+ (dev->loop_vid_out.top / vdiv) * stride_out;
+ vcapbuf += tpg_hdiv(tpg, p, dev->compose_cap.left) +
+ (dev->compose_cap.top / vdiv) * stride_cap;
+
+ if (dev->loop_vid_copy.width == 0 || dev->loop_vid_copy.height == 0) {
+ /*
+ * If there is nothing to copy, then just fill the capture window
+ * with black.
+ */
+ for (y = 0; y < hmax / vdiv; y++, vcapbuf += stride_cap)
+ memcpy(vcapbuf, tpg->black_line[p], img_width);
+ return 0;
+ }
+
+ if (dev->overlay_out_enabled &&
+ dev->loop_vid_overlay.width && dev->loop_vid_overlay.height) {
+ vosdbuf = dev->video_vbase;
+ vosdbuf += (dev->loop_fb_copy.left * twopixsize) / 2 +
+ dev->loop_fb_copy.top * stride_osd;
+ vid_overlay_int_part = dev->loop_vid_overlay.height /
+ dev->loop_vid_overlay_cap.height;
+ vid_overlay_fract_part = dev->loop_vid_overlay.height %
+ dev->loop_vid_overlay_cap.height;
+ }
+
+ vid_cap_right = tpg_hdiv(tpg, p, dev->loop_vid_cap.left + dev->loop_vid_cap.width);
+ /* quick is true if no video scaling is needed */
+ quick = dev->loop_vid_out.width == dev->loop_vid_cap.width;
+
+ dev->cur_scaled_line = dev->loop_vid_out.height;
+ for (y = 0; y < hmax; y += vdiv, vcapbuf += stride_cap) {
+ /* osdline is true if this line requires overlay blending */
+ bool osdline = vosdbuf && y >= dev->loop_vid_overlay_cap.top &&
+ y < dev->loop_vid_overlay_cap.top + dev->loop_vid_overlay_cap.height;
+
+ /*
+ * If this line of the capture buffer doesn't get any video, then
+ * just fill with black.
+ */
+ if (y < dev->loop_vid_cap.top ||
+ y >= dev->loop_vid_cap.top + dev->loop_vid_cap.height) {
+ memcpy(vcapbuf, tpg->black_line[p], img_width);
+ continue;
+ }
+
+ /* fill the left border with black */
+ if (dev->loop_vid_cap.left)
+ memcpy(vcapbuf, tpg->black_line[p], vid_cap_left);
+
+ /* fill the right border with black */
+ if (vid_cap_right < img_width)
+ memcpy(vcapbuf + vid_cap_right, tpg->black_line[p],
+ img_width - vid_cap_right);
+
+ if (quick && !osdline) {
+ memcpy(vcapbuf + vid_cap_left,
+ voutbuf + vid_out_y * stride_out,
+ tpg_hdiv(tpg, p, dev->loop_vid_cap.width));
+ goto update_vid_out_y;
+ }
+ if (dev->cur_scaled_line == vid_out_y) {
+ memcpy(vcapbuf + vid_cap_left, dev->scaled_line,
+ tpg_hdiv(tpg, p, dev->loop_vid_cap.width));
+ goto update_vid_out_y;
+ }
+ if (!osdline) {
+ scale_line(voutbuf + vid_out_y * stride_out, dev->scaled_line,
+ tpg_hdiv(tpg, p, dev->loop_vid_out.width),
+ tpg_hdiv(tpg, p, dev->loop_vid_cap.width),
+ tpg_g_twopixelsize(tpg, p));
+ } else {
+ /*
+ * Offset in bytes within loop_vid_copy to the start of the
+ * loop_vid_overlay rectangle.
+ */
+ unsigned offset =
+ ((dev->loop_vid_overlay.left - dev->loop_vid_copy.left) *
+ twopixsize) / 2;
+ u8 *osd = vosdbuf + vid_overlay_y * stride_osd;
+
+ scale_line(voutbuf + vid_out_y * stride_out, dev->blended_line,
+ dev->loop_vid_out.width, dev->loop_vid_copy.width,
+ tpg_g_twopixelsize(tpg, p));
+ if (blend)
+ blend_line(dev, vid_overlay_y + dev->loop_vid_overlay.top,
+ dev->loop_vid_overlay.left,
+ dev->blended_line + offset, osd,
+ dev->loop_vid_overlay.width, twopixsize / 2);
+ else
+ memcpy(dev->blended_line + offset,
+ osd, (dev->loop_vid_overlay.width * twopixsize) / 2);
+ scale_line(dev->blended_line, dev->scaled_line,
+ dev->loop_vid_copy.width, dev->loop_vid_cap.width,
+ tpg_g_twopixelsize(tpg, p));
+ }
+ dev->cur_scaled_line = vid_out_y;
+ memcpy(vcapbuf + vid_cap_left, dev->scaled_line,
+ tpg_hdiv(tpg, p, dev->loop_vid_cap.width));
+
+update_vid_out_y:
+ if (osdline) {
+ vid_overlay_y += vid_overlay_int_part;
+ vid_overlay_error += vid_overlay_fract_part;
+ if (vid_overlay_error >= dev->loop_vid_overlay_cap.height) {
+ vid_overlay_error -= dev->loop_vid_overlay_cap.height;
+ vid_overlay_y++;
+ }
+ }
+ vid_out_y += vid_out_int_part;
+ vid_out_error += vid_out_fract_part;
+ if (vid_out_error >= dev->loop_vid_cap.height / vdiv) {
+ vid_out_error -= dev->loop_vid_cap.height / vdiv;
+ vid_out_y++;
+ }
+ }
+
+ if (!blank)
+ return 0;
+ for (; y < img_height; y += vdiv, vcapbuf += stride_cap)
+ memcpy(vcapbuf, tpg->contrast_line[p], img_width);
+ return 0;
+}
+
+static void vivid_fillbuff(struct vivid_dev *dev, struct vivid_buffer *buf)
+{
+ struct tpg_data *tpg = &dev->tpg;
+ unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_cap) ? 2 : 1;
+ unsigned line_height = 16 / factor;
+ bool is_tv = vivid_is_sdtv_cap(dev);
+ bool is_60hz = is_tv && (dev->std_cap[dev->input] & V4L2_STD_525_60);
+ unsigned p;
+ int line = 1;
+ u8 *basep[TPG_MAX_PLANES][2];
+ unsigned ms;
+ char str[100];
+ s32 gain;
+ bool is_loop = false;
+
+ if (dev->loop_video && dev->can_loop_video &&
+ ((vivid_is_svid_cap(dev) &&
+ !VIVID_INVALID_SIGNAL(dev->std_signal_mode[dev->input])) ||
+ (vivid_is_hdmi_cap(dev) &&
+ !VIVID_INVALID_SIGNAL(dev->dv_timings_signal_mode[dev->input]))))
+ is_loop = true;
+
+ buf->vb.sequence = dev->vid_cap_seq_count;
+ v4l2_ctrl_s_ctrl(dev->ro_int32, buf->vb.sequence & 0xff);
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE) {
+ /*
+ * 60 Hz standards start with the bottom field, 50 Hz standards
+ * with the top field. So if the 0-based seq_count is even,
+ * then the field is TOP for 50 Hz and BOTTOM for 60 Hz
+ * standards.
+ */
+ buf->vb.field = ((dev->vid_cap_seq_count & 1) ^ is_60hz) ?
+ V4L2_FIELD_BOTTOM : V4L2_FIELD_TOP;
+ /*
+ * The sequence counter counts frames, not fields. So divide
+ * by two.
+ */
+ buf->vb.sequence /= 2;
+ } else {
+ buf->vb.field = dev->field_cap;
+ }
+ tpg_s_field(tpg, buf->vb.field,
+ dev->field_cap == V4L2_FIELD_ALTERNATE);
+ tpg_s_perc_fill_blank(tpg, dev->must_blank[buf->vb.vb2_buf.index]);
+
+ vivid_precalc_copy_rects(dev);
+
+ for (p = 0; p < tpg_g_planes(tpg); p++) {
+ void *vbuf = plane_vaddr(tpg, buf, p,
+ tpg->bytesperline, tpg->buf_height);
+
+ /*
+ * The first plane of a multiplanar format has a non-zero
+ * data_offset. This helps testing whether the application
+ * correctly supports non-zero data offsets.
+ */
+ if (p < tpg_g_buffers(tpg) && dev->fmt_cap->data_offset[p]) {
+ memset(vbuf, dev->fmt_cap->data_offset[p] & 0xff,
+ dev->fmt_cap->data_offset[p]);
+ vbuf += dev->fmt_cap->data_offset[p];
+ }
+ tpg_calc_text_basep(tpg, basep, p, vbuf);
+ if (!is_loop || vivid_copy_buffer(dev, p, vbuf, buf))
+ tpg_fill_plane_buffer(tpg, vivid_get_std_cap(dev),
+ p, vbuf);
+ }
+ dev->must_blank[buf->vb.vb2_buf.index] = false;
+
+ /* Updates stream time, only update at the start of a new frame. */
+ if (dev->field_cap != V4L2_FIELD_ALTERNATE ||
+ (dev->vid_cap_seq_count & 1) == 0)
+ dev->ms_vid_cap =
+ jiffies_to_msecs(jiffies - dev->jiffies_vid_cap);
+
+ ms = dev->ms_vid_cap;
+ if (dev->osd_mode <= 1) {
+ snprintf(str, sizeof(str), " %02d:%02d:%02d:%03d %u%s",
+ (ms / (60 * 60 * 1000)) % 24,
+ (ms / (60 * 1000)) % 60,
+ (ms / 1000) % 60,
+ ms % 1000,
+ buf->vb.sequence,
+ (dev->field_cap == V4L2_FIELD_ALTERNATE) ?
+ (buf->vb.field == V4L2_FIELD_TOP ?
+ " top" : " bottom") : "");
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ }
+ if (dev->osd_mode == 0) {
+ snprintf(str, sizeof(str), " %dx%d, input %d ",
+ dev->src_rect.width, dev->src_rect.height, dev->input);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+
+ gain = v4l2_ctrl_g_ctrl(dev->gain);
+ mutex_lock(dev->ctrl_hdl_user_vid.lock);
+ snprintf(str, sizeof(str),
+ " brightness %3d, contrast %3d, saturation %3d, hue %d ",
+ dev->brightness->cur.val,
+ dev->contrast->cur.val,
+ dev->saturation->cur.val,
+ dev->hue->cur.val);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ snprintf(str, sizeof(str),
+ " autogain %d, gain %3d, alpha 0x%02x ",
+ dev->autogain->cur.val, gain, dev->alpha->cur.val);
+ mutex_unlock(dev->ctrl_hdl_user_vid.lock);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ mutex_lock(dev->ctrl_hdl_user_aud.lock);
+ snprintf(str, sizeof(str),
+ " volume %3d, mute %d ",
+ dev->volume->cur.val, dev->mute->cur.val);
+ mutex_unlock(dev->ctrl_hdl_user_aud.lock);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ mutex_lock(dev->ctrl_hdl_user_gen.lock);
+ snprintf(str, sizeof(str), " int32 %d, ro_int32 %d, int64 %lld, bitmask %08x ",
+ dev->int32->cur.val,
+ dev->ro_int32->cur.val,
+ *dev->int64->p_cur.p_s64,
+ dev->bitmask->cur.val);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ snprintf(str, sizeof(str), " boolean %d, menu %s, string \"%s\" ",
+ dev->boolean->cur.val,
+ dev->menu->qmenu[dev->menu->cur.val],
+ dev->string->p_cur.p_char);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ snprintf(str, sizeof(str), " integer_menu %lld, value %d ",
+ dev->int_menu->qmenu_int[dev->int_menu->cur.val],
+ dev->int_menu->cur.val);
+ mutex_unlock(dev->ctrl_hdl_user_gen.lock);
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ if (dev->button_pressed) {
+ dev->button_pressed--;
+ snprintf(str, sizeof(str), " button pressed!");
+ tpg_gen_text(tpg, basep, line++ * line_height, 16, str);
+ }
+ if (dev->osd[0]) {
+ if (vivid_is_hdmi_cap(dev)) {
+ snprintf(str, sizeof(str),
+ " OSD \"%s\"", dev->osd);
+ tpg_gen_text(tpg, basep, line++ * line_height,
+ 16, str);
+ }
+ if (dev->osd_jiffies &&
+ time_is_before_jiffies(dev->osd_jiffies + 5 * HZ)) {
+ dev->osd[0] = 0;
+ dev->osd_jiffies = 0;
+ }
+ }
+ }
+}
+
+/*
+ * Return true if this pixel coordinate is a valid video pixel.
+ */
+static bool valid_pix(struct vivid_dev *dev, int win_y, int win_x, int fb_y, int fb_x)
+{
+ int i;
+
+ if (dev->bitmap_cap) {
+ /*
+ * Only if the corresponding bit in the bitmap is set can
+ * the video pixel be shown. Coordinates are relative to
+ * the overlay window set by VIDIOC_S_FMT.
+ */
+ const u8 *p = dev->bitmap_cap;
+ unsigned stride = (dev->compose_cap.width + 7) / 8;
+
+ if (!(p[stride * win_y + win_x / 8] & (1 << (win_x & 7))))
+ return false;
+ }
+
+ for (i = 0; i < dev->clipcount_cap; i++) {
+ /*
+ * Only if the framebuffer coordinate is not in any of the
+ * clip rectangles will be video pixel be shown.
+ */
+ struct v4l2_rect *r = &dev->clips_cap[i].c;
+
+ if (fb_y >= r->top && fb_y < r->top + r->height &&
+ fb_x >= r->left && fb_x < r->left + r->width)
+ return false;
+ }
+ return true;
+}
+
+/*
+ * Draw the image into the overlay buffer.
+ * Note that the combination of overlay and multiplanar is not supported.
+ */
+static void vivid_overlay(struct vivid_dev *dev, struct vivid_buffer *buf)
+{
+ struct tpg_data *tpg = &dev->tpg;
+ unsigned pixsize = tpg_g_twopixelsize(tpg, 0) / 2;
+ void *vbase = dev->fb_vbase_cap;
+ void *vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+ unsigned img_width = dev->compose_cap.width;
+ unsigned img_height = dev->compose_cap.height;
+ unsigned stride = tpg->bytesperline[0];
+ /* if quick is true, then valid_pix() doesn't have to be called */
+ bool quick = dev->bitmap_cap == NULL && dev->clipcount_cap == 0;
+ int x, y, w, out_x = 0;
+
+ /*
+ * Overlay support is only supported for formats that have a twopixelsize
+ * that's >= 2. Warn and bail out if that's not the case.
+ */
+ if (WARN_ON(pixsize == 0))
+ return;
+ if ((dev->overlay_cap_field == V4L2_FIELD_TOP ||
+ dev->overlay_cap_field == V4L2_FIELD_BOTTOM) &&
+ dev->overlay_cap_field != buf->vb.field)
+ return;
+
+ vbuf += dev->compose_cap.left * pixsize + dev->compose_cap.top * stride;
+ x = dev->overlay_cap_left;
+ w = img_width;
+ if (x < 0) {
+ out_x = -x;
+ w = w - out_x;
+ x = 0;
+ } else {
+ w = dev->fb_cap.fmt.width - x;
+ if (w > img_width)
+ w = img_width;
+ }
+ if (w <= 0)
+ return;
+ if (dev->overlay_cap_top >= 0)
+ vbase += dev->overlay_cap_top * dev->fb_cap.fmt.bytesperline;
+ for (y = dev->overlay_cap_top;
+ y < dev->overlay_cap_top + (int)img_height;
+ y++, vbuf += stride) {
+ int px;
+
+ if (y < 0 || y > dev->fb_cap.fmt.height)
+ continue;
+ if (quick) {
+ memcpy(vbase + x * pixsize,
+ vbuf + out_x * pixsize, w * pixsize);
+ vbase += dev->fb_cap.fmt.bytesperline;
+ continue;
+ }
+ for (px = 0; px < w; px++) {
+ if (!valid_pix(dev, y - dev->overlay_cap_top,
+ px + out_x, y, px + x))
+ continue;
+ memcpy(vbase + (px + x) * pixsize,
+ vbuf + (px + out_x) * pixsize,
+ pixsize);
+ }
+ vbase += dev->fb_cap.fmt.bytesperline;
+ }
+}
+
+static void vivid_cap_update_frame_period(struct vivid_dev *dev)
+{
+ u64 f_period;
+
+ f_period = (u64)dev->timeperframe_vid_cap.numerator * 1000000000;
+ if (WARN_ON(dev->timeperframe_vid_cap.denominator == 0))
+ dev->timeperframe_vid_cap.denominator = 1;
+ do_div(f_period, dev->timeperframe_vid_cap.denominator);
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ f_period >>= 1;
+ /*
+ * If "End of Frame", then offset the exposure time by 0.9
+ * of the frame period.
+ */
+ dev->cap_frame_eof_offset = f_period * 9;
+ do_div(dev->cap_frame_eof_offset, 10);
+ dev->cap_frame_period = f_period;
+}
+
+static noinline_for_stack void vivid_thread_vid_cap_tick(struct vivid_dev *dev,
+ int dropped_bufs)
+{
+ struct vivid_buffer *vid_cap_buf = NULL;
+ struct vivid_buffer *vbi_cap_buf = NULL;
+ struct vivid_buffer *meta_cap_buf = NULL;
+ u64 f_time = 0;
+
+ dprintk(dev, 1, "Video Capture Thread Tick\n");
+
+ while (dropped_bufs-- > 1)
+ tpg_update_mv_count(&dev->tpg,
+ dev->field_cap == V4L2_FIELD_NONE ||
+ dev->field_cap == V4L2_FIELD_ALTERNATE);
+
+ /* Drop a certain percentage of buffers. */
+ if (dev->perc_dropped_buffers &&
+ prandom_u32_max(100) < dev->perc_dropped_buffers)
+ goto update_mv;
+
+ spin_lock(&dev->slock);
+ if (!list_empty(&dev->vid_cap_active)) {
+ vid_cap_buf = list_entry(dev->vid_cap_active.next, struct vivid_buffer, list);
+ list_del(&vid_cap_buf->list);
+ }
+ if (!list_empty(&dev->vbi_cap_active)) {
+ if (dev->field_cap != V4L2_FIELD_ALTERNATE ||
+ (dev->vbi_cap_seq_count & 1)) {
+ vbi_cap_buf = list_entry(dev->vbi_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&vbi_cap_buf->list);
+ }
+ }
+ if (!list_empty(&dev->meta_cap_active)) {
+ meta_cap_buf = list_entry(dev->meta_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&meta_cap_buf->list);
+ }
+
+ spin_unlock(&dev->slock);
+
+ if (!vid_cap_buf && !vbi_cap_buf && !meta_cap_buf)
+ goto update_mv;
+
+ f_time = ktime_get_ns() + dev->time_wrap_offset;
+
+ if (vid_cap_buf) {
+ v4l2_ctrl_request_setup(vid_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_cap);
+ /* Fill buffer */
+ vivid_fillbuff(dev, vid_cap_buf);
+ dprintk(dev, 1, "filled buffer %d\n",
+ vid_cap_buf->vb.vb2_buf.index);
+
+ /* Handle overlay */
+ if (dev->overlay_cap_owner && dev->fb_cap.base &&
+ dev->fb_cap.fmt.pixelformat == dev->fmt_cap->fourcc)
+ vivid_overlay(dev, vid_cap_buf);
+
+ v4l2_ctrl_request_complete(vid_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_cap);
+ vb2_buffer_done(&vid_cap_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "vid_cap buffer %d done\n",
+ vid_cap_buf->vb.vb2_buf.index);
+
+ vid_cap_buf->vb.vb2_buf.timestamp = f_time;
+ if (!dev->tstamp_src_is_soe)
+ vid_cap_buf->vb.vb2_buf.timestamp += dev->cap_frame_eof_offset;
+ }
+
+ if (vbi_cap_buf) {
+ u64 vbi_period;
+
+ v4l2_ctrl_request_setup(vbi_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_cap);
+ if (vbi_cap_buf->vb.vb2_buf.type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE)
+ vivid_sliced_vbi_cap_process(dev, vbi_cap_buf);
+ else
+ vivid_raw_vbi_cap_process(dev, vbi_cap_buf);
+ v4l2_ctrl_request_complete(vbi_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_cap);
+ vb2_buffer_done(&vbi_cap_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "vbi_cap %d done\n",
+ vbi_cap_buf->vb.vb2_buf.index);
+
+ /* If capturing a VBI, offset by 0.05 */
+ vbi_period = dev->cap_frame_period * 5;
+ do_div(vbi_period, 100);
+ vbi_cap_buf->vb.vb2_buf.timestamp = f_time + dev->cap_frame_eof_offset + vbi_period;
+ }
+
+ if (meta_cap_buf) {
+ v4l2_ctrl_request_setup(meta_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_cap);
+ vivid_meta_cap_fillbuff(dev, meta_cap_buf, f_time);
+ v4l2_ctrl_request_complete(meta_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_cap);
+ vb2_buffer_done(&meta_cap_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "meta_cap %d done\n",
+ meta_cap_buf->vb.vb2_buf.index);
+ meta_cap_buf->vb.vb2_buf.timestamp = f_time + dev->cap_frame_eof_offset;
+ }
+
+ dev->dqbuf_error = false;
+
+update_mv:
+ /* Update the test pattern movement counters */
+ tpg_update_mv_count(&dev->tpg, dev->field_cap == V4L2_FIELD_NONE ||
+ dev->field_cap == V4L2_FIELD_ALTERNATE);
+}
+
+static int vivid_thread_vid_cap(void *data)
+{
+ struct vivid_dev *dev = data;
+ u64 numerators_since_start;
+ u64 buffers_since_start;
+ u64 next_jiffies_since_start;
+ unsigned long jiffies_since_start;
+ unsigned long cur_jiffies;
+ unsigned wait_jiffies;
+ unsigned numerator;
+ unsigned denominator;
+ int dropped_bufs;
+
+ dprintk(dev, 1, "Video Capture Thread Start\n");
+
+ set_freezable();
+
+ /* Resets frame counters */
+ dev->cap_seq_offset = 0;
+ dev->cap_seq_count = 0;
+ dev->cap_seq_resync = false;
+ dev->jiffies_vid_cap = jiffies;
+ dev->cap_stream_start = ktime_get_ns();
+ if (dev->time_wrap)
+ dev->time_wrap_offset = dev->time_wrap - dev->cap_stream_start;
+ else
+ dev->time_wrap_offset = 0;
+ vivid_cap_update_frame_period(dev);
+
+ for (;;) {
+ try_to_freeze();
+ if (kthread_should_stop())
+ break;
+
+ if (!mutex_trylock(&dev->mutex)) {
+ schedule();
+ continue;
+ }
+
+ cur_jiffies = jiffies;
+ if (dev->cap_seq_resync) {
+ dev->jiffies_vid_cap = cur_jiffies;
+ dev->cap_seq_offset = dev->cap_seq_count + 1;
+ dev->cap_seq_count = 0;
+ dev->cap_stream_start += dev->cap_frame_period *
+ dev->cap_seq_offset;
+ vivid_cap_update_frame_period(dev);
+ dev->cap_seq_resync = false;
+ }
+ numerator = dev->timeperframe_vid_cap.numerator;
+ denominator = dev->timeperframe_vid_cap.denominator;
+
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ denominator *= 2;
+
+ /* Calculate the number of jiffies since we started streaming */
+ jiffies_since_start = cur_jiffies - dev->jiffies_vid_cap;
+ /* Get the number of buffers streamed since the start */
+ buffers_since_start = (u64)jiffies_since_start * denominator +
+ (HZ * numerator) / 2;
+ do_div(buffers_since_start, HZ * numerator);
+
+ /*
+ * After more than 0xf0000000 (rounded down to a multiple of
+ * 'jiffies-per-day' to ease jiffies_to_msecs calculation)
+ * jiffies have passed since we started streaming reset the
+ * counters and keep track of the sequence offset.
+ */
+ if (jiffies_since_start > JIFFIES_RESYNC) {
+ dev->jiffies_vid_cap = cur_jiffies;
+ dev->cap_seq_offset = buffers_since_start;
+ buffers_since_start = 0;
+ }
+ dropped_bufs = buffers_since_start + dev->cap_seq_offset - dev->cap_seq_count;
+ dev->cap_seq_count = buffers_since_start + dev->cap_seq_offset;
+ dev->vid_cap_seq_count = dev->cap_seq_count - dev->vid_cap_seq_start;
+ dev->vbi_cap_seq_count = dev->cap_seq_count - dev->vbi_cap_seq_start;
+ dev->meta_cap_seq_count = dev->cap_seq_count - dev->meta_cap_seq_start;
+
+ vivid_thread_vid_cap_tick(dev, dropped_bufs);
+
+ /*
+ * Calculate the number of 'numerators' streamed since we started,
+ * including the current buffer.
+ */
+ numerators_since_start = ++buffers_since_start * numerator;
+
+ /* And the number of jiffies since we started */
+ jiffies_since_start = jiffies - dev->jiffies_vid_cap;
+
+ mutex_unlock(&dev->mutex);
+
+ /*
+ * Calculate when that next buffer is supposed to start
+ * in jiffies since we started streaming.
+ */
+ next_jiffies_since_start = numerators_since_start * HZ +
+ denominator / 2;
+ do_div(next_jiffies_since_start, denominator);
+ /* If it is in the past, then just schedule asap */
+ if (next_jiffies_since_start < jiffies_since_start)
+ next_jiffies_since_start = jiffies_since_start;
+
+ wait_jiffies = next_jiffies_since_start - jiffies_since_start;
+ while (time_is_after_jiffies(cur_jiffies + wait_jiffies) &&
+ !kthread_should_stop())
+ schedule();
+ }
+ dprintk(dev, 1, "Video Capture Thread End\n");
+ return 0;
+}
+
+static void vivid_grab_controls(struct vivid_dev *dev, bool grab)
+{
+ v4l2_ctrl_grab(dev->ctrl_has_crop_cap, grab);
+ v4l2_ctrl_grab(dev->ctrl_has_compose_cap, grab);
+ v4l2_ctrl_grab(dev->ctrl_has_scaler_cap, grab);
+}
+
+int vivid_start_generating_vid_cap(struct vivid_dev *dev, bool *pstreaming)
+{
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->kthread_vid_cap) {
+ u32 seq_count = dev->cap_seq_count + dev->seq_wrap * 128;
+
+ if (pstreaming == &dev->vid_cap_streaming)
+ dev->vid_cap_seq_start = seq_count;
+ else if (pstreaming == &dev->vbi_cap_streaming)
+ dev->vbi_cap_seq_start = seq_count;
+ else
+ dev->meta_cap_seq_start = seq_count;
+ *pstreaming = true;
+ return 0;
+ }
+
+ /* Resets frame counters */
+ tpg_init_mv_count(&dev->tpg);
+
+ dev->vid_cap_seq_start = dev->seq_wrap * 128;
+ dev->vbi_cap_seq_start = dev->seq_wrap * 128;
+ dev->meta_cap_seq_start = dev->seq_wrap * 128;
+
+ dev->kthread_vid_cap = kthread_run(vivid_thread_vid_cap, dev,
+ "%s-vid-cap", dev->v4l2_dev.name);
+
+ if (IS_ERR(dev->kthread_vid_cap)) {
+ int err = PTR_ERR(dev->kthread_vid_cap);
+
+ dev->kthread_vid_cap = NULL;
+ v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n");
+ return err;
+ }
+ *pstreaming = true;
+ vivid_grab_controls(dev, true);
+
+ dprintk(dev, 1, "returning from %s\n", __func__);
+ return 0;
+}
+
+void vivid_stop_generating_vid_cap(struct vivid_dev *dev, bool *pstreaming)
+{
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->kthread_vid_cap == NULL)
+ return;
+
+ *pstreaming = false;
+ if (pstreaming == &dev->vid_cap_streaming) {
+ /* Release all active buffers */
+ while (!list_empty(&dev->vid_cap_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->vid_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_cap);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "vid_cap buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (pstreaming == &dev->vbi_cap_streaming) {
+ while (!list_empty(&dev->vbi_cap_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->vbi_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_cap);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "vbi_cap buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (pstreaming == &dev->meta_cap_streaming) {
+ while (!list_empty(&dev->meta_cap_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->meta_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_cap);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "meta_cap buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (dev->vid_cap_streaming || dev->vbi_cap_streaming ||
+ dev->meta_cap_streaming)
+ return;
+
+ /* shutdown control thread */
+ vivid_grab_controls(dev, false);
+ kthread_stop(dev->kthread_vid_cap);
+ dev->kthread_vid_cap = NULL;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-cap.h b/drivers/media/test-drivers/vivid/vivid-kthread-cap.h
new file mode 100644
index 000000000..0f4301530
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-cap.h
@@ -0,0 +1,14 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-kthread-cap.h - video/vbi capture thread support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_KTHREAD_CAP_H_
+#define _VIVID_KTHREAD_CAP_H_
+
+int vivid_start_generating_vid_cap(struct vivid_dev *dev, bool *pstreaming);
+void vivid_stop_generating_vid_cap(struct vivid_dev *dev, bool *pstreaming);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-out.c b/drivers/media/test-drivers/vivid/vivid-kthread-out.c
new file mode 100644
index 000000000..0833e021b
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-out.c
@@ -0,0 +1,357 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-kthread-out.h - video/vbi output thread support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/init.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+#include <linux/font.h>
+#include <linux/mutex.h>
+#include <linux/videodev2.h>
+#include <linux/kthread.h>
+#include <linux/freezer.h>
+#include <linux/random.h>
+#include <linux/v4l2-dv-timings.h>
+#include <linux/jiffies.h>
+#include <asm/div64.h>
+#include <media/videobuf2-vmalloc.h>
+#include <media/v4l2-dv-timings.h>
+#include <media/v4l2-ioctl.h>
+#include <media/v4l2-fh.h>
+#include <media/v4l2-event.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+#include "vivid-vid-cap.h"
+#include "vivid-vid-out.h"
+#include "vivid-radio-common.h"
+#include "vivid-radio-rx.h"
+#include "vivid-radio-tx.h"
+#include "vivid-sdr-cap.h"
+#include "vivid-vbi-cap.h"
+#include "vivid-vbi-out.h"
+#include "vivid-osd.h"
+#include "vivid-ctrls.h"
+#include "vivid-kthread-out.h"
+#include "vivid-meta-out.h"
+
+static void vivid_thread_vid_out_tick(struct vivid_dev *dev)
+{
+ struct vivid_buffer *vid_out_buf = NULL;
+ struct vivid_buffer *vbi_out_buf = NULL;
+ struct vivid_buffer *meta_out_buf = NULL;
+
+ dprintk(dev, 1, "Video Output Thread Tick\n");
+
+ /* Drop a certain percentage of buffers. */
+ if (dev->perc_dropped_buffers &&
+ prandom_u32_max(100) < dev->perc_dropped_buffers)
+ return;
+
+ spin_lock(&dev->slock);
+ /*
+ * Only dequeue buffer if there is at least one more pending.
+ * This makes video loopback possible.
+ */
+ if (!list_empty(&dev->vid_out_active) &&
+ !list_is_singular(&dev->vid_out_active)) {
+ vid_out_buf = list_entry(dev->vid_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&vid_out_buf->list);
+ }
+ if (!list_empty(&dev->vbi_out_active) &&
+ (dev->field_out != V4L2_FIELD_ALTERNATE ||
+ (dev->vbi_out_seq_count & 1))) {
+ vbi_out_buf = list_entry(dev->vbi_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&vbi_out_buf->list);
+ }
+ if (!list_empty(&dev->meta_out_active)) {
+ meta_out_buf = list_entry(dev->meta_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&meta_out_buf->list);
+ }
+ spin_unlock(&dev->slock);
+
+ if (!vid_out_buf && !vbi_out_buf && !meta_out_buf)
+ return;
+
+ if (vid_out_buf) {
+ v4l2_ctrl_request_setup(vid_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_out);
+ v4l2_ctrl_request_complete(vid_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_out);
+ vid_out_buf->vb.sequence = dev->vid_out_seq_count;
+ if (dev->field_out == V4L2_FIELD_ALTERNATE) {
+ /*
+ * The sequence counter counts frames, not fields.
+ * So divide by two.
+ */
+ vid_out_buf->vb.sequence /= 2;
+ }
+ vid_out_buf->vb.vb2_buf.timestamp =
+ ktime_get_ns() + dev->time_wrap_offset;
+ vb2_buffer_done(&vid_out_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "vid_out buffer %d done\n",
+ vid_out_buf->vb.vb2_buf.index);
+ }
+
+ if (vbi_out_buf) {
+ v4l2_ctrl_request_setup(vbi_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_out);
+ v4l2_ctrl_request_complete(vbi_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_out);
+ if (dev->stream_sliced_vbi_out)
+ vivid_sliced_vbi_out_process(dev, vbi_out_buf);
+
+ vbi_out_buf->vb.sequence = dev->vbi_out_seq_count;
+ vbi_out_buf->vb.vb2_buf.timestamp =
+ ktime_get_ns() + dev->time_wrap_offset;
+ vb2_buffer_done(&vbi_out_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "vbi_out buffer %d done\n",
+ vbi_out_buf->vb.vb2_buf.index);
+ }
+ if (meta_out_buf) {
+ v4l2_ctrl_request_setup(meta_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_out);
+ v4l2_ctrl_request_complete(meta_out_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_out);
+ vivid_meta_out_process(dev, meta_out_buf);
+ meta_out_buf->vb.sequence = dev->meta_out_seq_count;
+ meta_out_buf->vb.vb2_buf.timestamp =
+ ktime_get_ns() + dev->time_wrap_offset;
+ vb2_buffer_done(&meta_out_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "meta_out buffer %d done\n",
+ meta_out_buf->vb.vb2_buf.index);
+ }
+
+ dev->dqbuf_error = false;
+}
+
+static int vivid_thread_vid_out(void *data)
+{
+ struct vivid_dev *dev = data;
+ u64 numerators_since_start;
+ u64 buffers_since_start;
+ u64 next_jiffies_since_start;
+ unsigned long jiffies_since_start;
+ unsigned long cur_jiffies;
+ unsigned wait_jiffies;
+ unsigned numerator;
+ unsigned denominator;
+
+ dprintk(dev, 1, "Video Output Thread Start\n");
+
+ set_freezable();
+
+ /* Resets frame counters */
+ dev->out_seq_offset = 0;
+ dev->out_seq_count = 0;
+ dev->jiffies_vid_out = jiffies;
+ dev->out_seq_resync = false;
+ if (dev->time_wrap)
+ dev->time_wrap_offset = dev->time_wrap - ktime_get_ns();
+ else
+ dev->time_wrap_offset = 0;
+
+ for (;;) {
+ try_to_freeze();
+ if (kthread_should_stop())
+ break;
+
+ if (!mutex_trylock(&dev->mutex)) {
+ schedule();
+ continue;
+ }
+
+ cur_jiffies = jiffies;
+ if (dev->out_seq_resync) {
+ dev->jiffies_vid_out = cur_jiffies;
+ dev->out_seq_offset = dev->out_seq_count + 1;
+ dev->out_seq_count = 0;
+ dev->out_seq_resync = false;
+ }
+ numerator = dev->timeperframe_vid_out.numerator;
+ denominator = dev->timeperframe_vid_out.denominator;
+
+ if (dev->field_out == V4L2_FIELD_ALTERNATE)
+ denominator *= 2;
+
+ /* Calculate the number of jiffies since we started streaming */
+ jiffies_since_start = cur_jiffies - dev->jiffies_vid_out;
+ /* Get the number of buffers streamed since the start */
+ buffers_since_start = (u64)jiffies_since_start * denominator +
+ (HZ * numerator) / 2;
+ do_div(buffers_since_start, HZ * numerator);
+
+ /*
+ * After more than 0xf0000000 (rounded down to a multiple of
+ * 'jiffies-per-day' to ease jiffies_to_msecs calculation)
+ * jiffies have passed since we started streaming reset the
+ * counters and keep track of the sequence offset.
+ */
+ if (jiffies_since_start > JIFFIES_RESYNC) {
+ dev->jiffies_vid_out = cur_jiffies;
+ dev->out_seq_offset = buffers_since_start;
+ buffers_since_start = 0;
+ }
+ dev->out_seq_count = buffers_since_start + dev->out_seq_offset;
+ dev->vid_out_seq_count = dev->out_seq_count - dev->vid_out_seq_start;
+ dev->vbi_out_seq_count = dev->out_seq_count - dev->vbi_out_seq_start;
+ dev->meta_out_seq_count = dev->out_seq_count - dev->meta_out_seq_start;
+
+ vivid_thread_vid_out_tick(dev);
+ mutex_unlock(&dev->mutex);
+
+ /*
+ * Calculate the number of 'numerators' streamed since we started,
+ * not including the current buffer.
+ */
+ numerators_since_start = buffers_since_start * numerator;
+
+ /* And the number of jiffies since we started */
+ jiffies_since_start = jiffies - dev->jiffies_vid_out;
+
+ /* Increase by the 'numerator' of one buffer */
+ numerators_since_start += numerator;
+ /*
+ * Calculate when that next buffer is supposed to start
+ * in jiffies since we started streaming.
+ */
+ next_jiffies_since_start = numerators_since_start * HZ +
+ denominator / 2;
+ do_div(next_jiffies_since_start, denominator);
+ /* If it is in the past, then just schedule asap */
+ if (next_jiffies_since_start < jiffies_since_start)
+ next_jiffies_since_start = jiffies_since_start;
+
+ wait_jiffies = next_jiffies_since_start - jiffies_since_start;
+ while (time_is_after_jiffies(cur_jiffies + wait_jiffies) &&
+ !kthread_should_stop())
+ schedule();
+ }
+ dprintk(dev, 1, "Video Output Thread End\n");
+ return 0;
+}
+
+static void vivid_grab_controls(struct vivid_dev *dev, bool grab)
+{
+ v4l2_ctrl_grab(dev->ctrl_has_crop_out, grab);
+ v4l2_ctrl_grab(dev->ctrl_has_compose_out, grab);
+ v4l2_ctrl_grab(dev->ctrl_has_scaler_out, grab);
+ v4l2_ctrl_grab(dev->ctrl_tx_mode, grab);
+ v4l2_ctrl_grab(dev->ctrl_tx_rgb_range, grab);
+}
+
+int vivid_start_generating_vid_out(struct vivid_dev *dev, bool *pstreaming)
+{
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->kthread_vid_out) {
+ u32 seq_count = dev->out_seq_count + dev->seq_wrap * 128;
+
+ if (pstreaming == &dev->vid_out_streaming)
+ dev->vid_out_seq_start = seq_count;
+ else if (pstreaming == &dev->vbi_out_streaming)
+ dev->vbi_out_seq_start = seq_count;
+ else
+ dev->meta_out_seq_start = seq_count;
+ *pstreaming = true;
+ return 0;
+ }
+
+ /* Resets frame counters */
+ dev->jiffies_vid_out = jiffies;
+ dev->vid_out_seq_start = dev->seq_wrap * 128;
+ dev->vbi_out_seq_start = dev->seq_wrap * 128;
+ dev->meta_out_seq_start = dev->seq_wrap * 128;
+
+ dev->kthread_vid_out = kthread_run(vivid_thread_vid_out, dev,
+ "%s-vid-out", dev->v4l2_dev.name);
+
+ if (IS_ERR(dev->kthread_vid_out)) {
+ int err = PTR_ERR(dev->kthread_vid_out);
+
+ dev->kthread_vid_out = NULL;
+ v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n");
+ return err;
+ }
+ *pstreaming = true;
+ vivid_grab_controls(dev, true);
+
+ dprintk(dev, 1, "returning from %s\n", __func__);
+ return 0;
+}
+
+void vivid_stop_generating_vid_out(struct vivid_dev *dev, bool *pstreaming)
+{
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->kthread_vid_out == NULL)
+ return;
+
+ *pstreaming = false;
+ if (pstreaming == &dev->vid_out_streaming) {
+ /* Release all active buffers */
+ while (!list_empty(&dev->vid_out_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->vid_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vid_out);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "vid_out buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (pstreaming == &dev->vbi_out_streaming) {
+ while (!list_empty(&dev->vbi_out_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->vbi_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_vbi_out);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "vbi_out buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (pstreaming == &dev->meta_out_streaming) {
+ while (!list_empty(&dev->meta_out_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->meta_out_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_meta_out);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "meta_out buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+ }
+
+ if (dev->vid_out_streaming || dev->vbi_out_streaming ||
+ dev->meta_out_streaming)
+ return;
+
+ /* shutdown control thread */
+ vivid_grab_controls(dev, false);
+ kthread_stop(dev->kthread_vid_out);
+ dev->kthread_vid_out = NULL;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-out.h b/drivers/media/test-drivers/vivid/vivid-kthread-out.h
new file mode 100644
index 000000000..d5bcf44bb
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-out.h
@@ -0,0 +1,14 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-kthread-out.h - video/vbi output thread support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_KTHREAD_OUT_H_
+#define _VIVID_KTHREAD_OUT_H_
+
+int vivid_start_generating_vid_out(struct vivid_dev *dev, bool *pstreaming);
+void vivid_stop_generating_vid_out(struct vivid_dev *dev, bool *pstreaming);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-touch.c b/drivers/media/test-drivers/vivid/vivid-kthread-touch.c
new file mode 100644
index 000000000..fa711ee36
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-touch.c
@@ -0,0 +1,191 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-kthread-touch.c - touch capture thread support functions.
+ *
+ */
+
+#include <linux/freezer.h>
+#include <linux/jiffies.h>
+#include "vivid-core.h"
+#include "vivid-kthread-touch.h"
+#include "vivid-touch-cap.h"
+
+static noinline_for_stack void vivid_thread_tch_cap_tick(struct vivid_dev *dev,
+ int dropped_bufs)
+{
+ struct vivid_buffer *tch_cap_buf = NULL;
+
+ spin_lock(&dev->slock);
+ if (!list_empty(&dev->touch_cap_active)) {
+ tch_cap_buf = list_entry(dev->touch_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&tch_cap_buf->list);
+ }
+
+ spin_unlock(&dev->slock);
+
+ if (tch_cap_buf) {
+ v4l2_ctrl_request_setup(tch_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_touch_cap);
+
+ vivid_fillbuff_tch(dev, tch_cap_buf);
+ v4l2_ctrl_request_complete(tch_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_touch_cap);
+ vb2_buffer_done(&tch_cap_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dprintk(dev, 2, "touch_cap buffer %d done\n",
+ tch_cap_buf->vb.vb2_buf.index);
+
+ tch_cap_buf->vb.vb2_buf.timestamp = ktime_get_ns() + dev->time_wrap_offset;
+ }
+ dev->dqbuf_error = false;
+}
+
+static int vivid_thread_touch_cap(void *data)
+{
+ struct vivid_dev *dev = data;
+ u64 numerators_since_start;
+ u64 buffers_since_start;
+ u64 next_jiffies_since_start;
+ unsigned long jiffies_since_start;
+ unsigned long cur_jiffies;
+ unsigned int wait_jiffies;
+ unsigned int numerator;
+ unsigned int denominator;
+ int dropped_bufs;
+
+ dprintk(dev, 1, "Touch Capture Thread Start\n");
+
+ set_freezable();
+
+ /* Resets frame counters */
+ dev->touch_cap_seq_offset = 0;
+ dev->touch_cap_seq_count = 0;
+ dev->touch_cap_seq_resync = false;
+ dev->jiffies_touch_cap = jiffies;
+ if (dev->time_wrap)
+ dev->time_wrap_offset = dev->time_wrap - ktime_get_ns();
+ else
+ dev->time_wrap_offset = 0;
+
+ for (;;) {
+ try_to_freeze();
+ if (kthread_should_stop())
+ break;
+
+ if (!mutex_trylock(&dev->mutex)) {
+ schedule();
+ continue;
+ }
+ cur_jiffies = jiffies;
+ if (dev->touch_cap_seq_resync) {
+ dev->jiffies_touch_cap = cur_jiffies;
+ dev->touch_cap_seq_offset = dev->touch_cap_seq_count + 1;
+ dev->touch_cap_seq_count = 0;
+ dev->cap_seq_resync = false;
+ }
+ denominator = dev->timeperframe_tch_cap.denominator;
+ numerator = dev->timeperframe_tch_cap.numerator;
+
+ /* Calculate the number of jiffies since we started streaming */
+ jiffies_since_start = cur_jiffies - dev->jiffies_touch_cap;
+ /* Get the number of buffers streamed since the start */
+ buffers_since_start = (u64)jiffies_since_start * denominator +
+ (HZ * numerator) / 2;
+ do_div(buffers_since_start, HZ * numerator);
+
+ /*
+ * After more than 0xf0000000 (rounded down to a multiple of
+ * 'jiffies-per-day' to ease jiffies_to_msecs calculation)
+ * jiffies have passed since we started streaming reset the
+ * counters and keep track of the sequence offset.
+ */
+ if (jiffies_since_start > JIFFIES_RESYNC) {
+ dev->jiffies_touch_cap = cur_jiffies;
+ dev->cap_seq_offset = buffers_since_start;
+ buffers_since_start = 0;
+ }
+ dropped_bufs = buffers_since_start + dev->touch_cap_seq_offset - dev->touch_cap_seq_count;
+ dev->touch_cap_seq_count = buffers_since_start + dev->touch_cap_seq_offset;
+ dev->touch_cap_with_seq_wrap_count =
+ dev->touch_cap_seq_count - dev->touch_cap_seq_start;
+
+ vivid_thread_tch_cap_tick(dev, dropped_bufs);
+
+ /*
+ * Calculate the number of 'numerators' streamed
+ * since we started, including the current buffer.
+ */
+ numerators_since_start = ++buffers_since_start * numerator;
+
+ /* And the number of jiffies since we started */
+ jiffies_since_start = jiffies - dev->jiffies_touch_cap;
+
+ mutex_unlock(&dev->mutex);
+
+ /*
+ * Calculate when that next buffer is supposed to start
+ * in jiffies since we started streaming.
+ */
+ next_jiffies_since_start = numerators_since_start * HZ +
+ denominator / 2;
+ do_div(next_jiffies_since_start, denominator);
+ /* If it is in the past, then just schedule asap */
+ if (next_jiffies_since_start < jiffies_since_start)
+ next_jiffies_since_start = jiffies_since_start;
+
+ wait_jiffies = next_jiffies_since_start - jiffies_since_start;
+ while (time_is_after_jiffies(cur_jiffies + wait_jiffies) &&
+ !kthread_should_stop())
+ schedule();
+ }
+ dprintk(dev, 1, "Touch Capture Thread End\n");
+ return 0;
+}
+
+int vivid_start_generating_touch_cap(struct vivid_dev *dev)
+{
+ if (dev->kthread_touch_cap) {
+ dev->touch_cap_streaming = true;
+ return 0;
+ }
+
+ dev->touch_cap_seq_start = dev->seq_wrap * 128;
+ dev->kthread_touch_cap = kthread_run(vivid_thread_touch_cap, dev,
+ "%s-tch-cap", dev->v4l2_dev.name);
+
+ if (IS_ERR(dev->kthread_touch_cap)) {
+ int err = PTR_ERR(dev->kthread_touch_cap);
+
+ dev->kthread_touch_cap = NULL;
+ v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n");
+ return err;
+ }
+ dev->touch_cap_streaming = true;
+ dprintk(dev, 1, "returning from %s\n", __func__);
+ return 0;
+}
+
+void vivid_stop_generating_touch_cap(struct vivid_dev *dev)
+{
+ if (!dev->kthread_touch_cap)
+ return;
+
+ dev->touch_cap_streaming = false;
+
+ while (!list_empty(&dev->touch_cap_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->touch_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_touch_cap);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ dprintk(dev, 2, "touch_cap buffer %d done\n",
+ buf->vb.vb2_buf.index);
+ }
+
+ kthread_stop(dev->kthread_touch_cap);
+ dev->kthread_touch_cap = NULL;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-kthread-touch.h b/drivers/media/test-drivers/vivid/vivid-kthread-touch.h
new file mode 100644
index 000000000..ecf79b462
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-kthread-touch.h
@@ -0,0 +1,13 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-kthread-cap.h - video/vbi capture thread support functions.
+ *
+ */
+
+#ifndef _VIVID_KTHREAD_CAP_H_
+#define _VIVID_KTHREAD_CAP_H_
+
+int vivid_start_generating_touch_cap(struct vivid_dev *dev);
+void vivid_stop_generating_touch_cap(struct vivid_dev *dev);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-meta-cap.c b/drivers/media/test-drivers/vivid/vivid-meta-cap.c
new file mode 100644
index 000000000..780f96860
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-meta-cap.c
@@ -0,0 +1,201 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-meta-cap.c - meta capture support functions.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/videodev2.h>
+#include <media/v4l2-common.h>
+#include <linux/usb/video.h>
+
+#include "vivid-core.h"
+#include "vivid-kthread-cap.h"
+#include "vivid-meta-cap.h"
+
+static int meta_cap_queue_setup(struct vb2_queue *vq, unsigned int *nbuffers,
+ unsigned int *nplanes, unsigned int sizes[],
+ struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ unsigned int size = sizeof(struct vivid_uvc_meta_buf);
+
+ if (!vivid_is_webcam(dev))
+ return -EINVAL;
+
+ if (*nplanes) {
+ if (sizes[0] < size)
+ return -EINVAL;
+ } else {
+ sizes[0] = size;
+ }
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = 1;
+ return 0;
+}
+
+static int meta_cap_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ unsigned int size = sizeof(struct vivid_uvc_meta_buf);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void meta_cap_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->meta_cap_active);
+ spin_unlock(&dev->slock);
+}
+
+static int meta_cap_start_streaming(struct vb2_queue *vq, unsigned int count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ dprintk(dev, 1, "%s\n", __func__);
+ dev->meta_cap_seq_count = 0;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_cap(dev,
+ &dev->meta_cap_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp,
+ &dev->meta_cap_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void meta_cap_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_cap(dev, &dev->meta_cap_streaming);
+}
+
+static void meta_cap_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_meta_cap);
+}
+
+const struct vb2_ops vivid_meta_cap_qops = {
+ .queue_setup = meta_cap_queue_setup,
+ .buf_prepare = meta_cap_buf_prepare,
+ .buf_queue = meta_cap_buf_queue,
+ .start_streaming = meta_cap_start_streaming,
+ .stop_streaming = meta_cap_stop_streaming,
+ .buf_request_complete = meta_cap_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vidioc_enum_fmt_meta_cap(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_webcam(dev))
+ return -EINVAL;
+
+ if (f->index > 0)
+ return -EINVAL;
+
+ f->type = V4L2_BUF_TYPE_META_CAPTURE;
+ f->pixelformat = V4L2_META_FMT_UVC;
+ return 0;
+}
+
+int vidioc_g_fmt_meta_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_meta_format *meta = &f->fmt.meta;
+
+ if (!vivid_is_webcam(dev) || !dev->has_meta_cap)
+ return -EINVAL;
+
+ meta->dataformat = V4L2_META_FMT_UVC;
+ meta->buffersize = sizeof(struct vivid_uvc_meta_buf);
+ return 0;
+}
+
+void vivid_meta_cap_fillbuff(struct vivid_dev *dev,
+ struct vivid_buffer *buf, u64 soe)
+{
+ struct vivid_uvc_meta_buf *meta = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+ int buf_off = 0;
+
+ buf->vb.sequence = dev->meta_cap_seq_count;
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ buf->vb.sequence /= 2;
+ memset(meta, 1, vb2_plane_size(&buf->vb.vb2_buf, 0));
+
+ meta->ns = ktime_get_ns();
+ meta->sof = buf->vb.sequence * 30;
+ meta->length = sizeof(*meta) - offsetof(struct vivid_uvc_meta_buf, length);
+ meta->flags = UVC_STREAM_EOH | UVC_STREAM_EOF;
+
+ if ((buf->vb.sequence % 2) == 0)
+ meta->flags |= UVC_STREAM_FID;
+
+ dprintk(dev, 2, "%s ns:%llu sof:%4d len:%u flags: 0x%02x",
+ __func__, meta->ns, meta->sof, meta->length, meta->flags);
+ if (dev->meta_pts) {
+ meta->flags |= UVC_STREAM_PTS;
+ meta->buf[0] = div_u64(soe, VIVID_META_CLOCK_UNIT);
+ buf_off = 4;
+ dprintk(dev, 2, " pts: %u\n", *(__u32 *)(meta->buf));
+ }
+
+ if (dev->meta_scr) {
+ meta->flags |= UVC_STREAM_SCR;
+ meta->buf[buf_off] = div_u64((soe + dev->cap_frame_eof_offset),
+ VIVID_META_CLOCK_UNIT);
+
+ meta->buf[buf_off + 4] = (buf->vb.sequence * 30) % 1000;
+ dprintk(dev, 2, " stc: %u, sof counter: %u\n",
+ *(__u32 *)(meta->buf + buf_off),
+ *(__u16 *)(meta->buf + buf_off + 4));
+ }
+ dprintk(dev, 2, "\n");
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-meta-cap.h b/drivers/media/test-drivers/vivid/vivid-meta-cap.h
new file mode 100644
index 000000000..4670d00d1
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-meta-cap.h
@@ -0,0 +1,29 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-meta-cap.h - meta capture support functions.
+ */
+#ifndef _VIVID_META_CAP_H_
+#define _VIVID_META_CAP_H_
+
+#define VIVID_META_CLOCK_UNIT 10 /* 100 MHz */
+
+struct vivid_uvc_meta_buf {
+ __u64 ns;
+ __u16 sof;
+ __u8 length;
+ __u8 flags;
+ __u8 buf[10]; /* PTS(4)+STC(4)+SOF(2) */
+} __packed;
+
+void vivid_meta_cap_fillbuff(struct vivid_dev *dev,
+ struct vivid_buffer *buf, u64 soe);
+
+int vidioc_enum_fmt_meta_cap(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f);
+
+int vidioc_g_fmt_meta_cap(struct file *file, void *priv,
+ struct v4l2_format *f);
+
+extern const struct vb2_ops vivid_meta_cap_qops;
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-meta-out.c b/drivers/media/test-drivers/vivid/vivid-meta-out.c
new file mode 100644
index 000000000..95835b52b
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-meta-out.c
@@ -0,0 +1,175 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-meta-out.c - meta output support functions.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/videodev2.h>
+#include <media/v4l2-common.h>
+#include <linux/usb/video.h>
+
+#include "vivid-core.h"
+#include "vivid-kthread-out.h"
+#include "vivid-meta-out.h"
+
+static int meta_out_queue_setup(struct vb2_queue *vq, unsigned int *nbuffers,
+ unsigned int *nplanes, unsigned int sizes[],
+ struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ unsigned int size = sizeof(struct vivid_meta_out_buf);
+
+ if (!vivid_is_webcam(dev))
+ return -EINVAL;
+
+ if (*nplanes) {
+ if (sizes[0] < size)
+ return -EINVAL;
+ } else {
+ sizes[0] = size;
+ }
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = 1;
+ return 0;
+}
+
+static int meta_out_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ unsigned int size = sizeof(struct vivid_meta_out_buf);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void meta_out_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->meta_out_active);
+ spin_unlock(&dev->slock);
+}
+
+static int meta_out_start_streaming(struct vb2_queue *vq, unsigned int count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ dprintk(dev, 1, "%s\n", __func__);
+ dev->meta_out_seq_count = 0;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_out(dev,
+ &dev->meta_out_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp,
+ &dev->meta_out_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void meta_out_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_out(dev, &dev->meta_out_streaming);
+}
+
+static void meta_out_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_meta_out);
+}
+
+const struct vb2_ops vivid_meta_out_qops = {
+ .queue_setup = meta_out_queue_setup,
+ .buf_prepare = meta_out_buf_prepare,
+ .buf_queue = meta_out_buf_queue,
+ .start_streaming = meta_out_start_streaming,
+ .stop_streaming = meta_out_stop_streaming,
+ .buf_request_complete = meta_out_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vidioc_enum_fmt_meta_out(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_webcam(dev))
+ return -EINVAL;
+
+ if (f->index > 0)
+ return -EINVAL;
+
+ f->type = V4L2_BUF_TYPE_META_OUTPUT;
+ f->pixelformat = V4L2_META_FMT_VIVID;
+ return 0;
+}
+
+int vidioc_g_fmt_meta_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_meta_format *meta = &f->fmt.meta;
+
+ if (!vivid_is_webcam(dev) || !dev->has_meta_out)
+ return -EINVAL;
+
+ meta->dataformat = V4L2_META_FMT_VIVID;
+ meta->buffersize = sizeof(struct vivid_meta_out_buf);
+ return 0;
+}
+
+void vivid_meta_out_process(struct vivid_dev *dev,
+ struct vivid_buffer *buf)
+{
+ struct vivid_meta_out_buf *meta = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+
+ v4l2_ctrl_s_ctrl(dev->brightness, meta->brightness);
+ v4l2_ctrl_s_ctrl(dev->contrast, meta->contrast);
+ v4l2_ctrl_s_ctrl(dev->saturation, meta->saturation);
+ v4l2_ctrl_s_ctrl(dev->hue, meta->hue);
+
+ dprintk(dev, 2, " %s brightness %u contrast %u saturation %u hue %d\n",
+ __func__, meta->brightness, meta->contrast,
+ meta->saturation, meta->hue);
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-meta-out.h b/drivers/media/test-drivers/vivid/vivid-meta-out.h
new file mode 100644
index 000000000..0c639b7c2
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-meta-out.h
@@ -0,0 +1,25 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-meta-out.h - meta output support functions.
+ */
+#ifndef _VIVID_META_OUT_H_
+#define _VIVID_META_OUT_H_
+
+struct vivid_meta_out_buf {
+ u16 brightness;
+ u16 contrast;
+ u16 saturation;
+ s16 hue;
+};
+
+void vivid_meta_out_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+int vidioc_enum_fmt_meta_out(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f);
+int vidioc_g_fmt_meta_out(struct file *file, void *priv,
+ struct v4l2_format *f);
+int vidioc_s_fmt_meta_out(struct file *file, void *priv,
+ struct v4l2_format *f);
+
+extern const struct vb2_ops vivid_meta_out_qops;
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-osd.c b/drivers/media/test-drivers/vivid/vivid-osd.c
new file mode 100644
index 000000000..ec25edc67
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-osd.c
@@ -0,0 +1,388 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-osd.c - osd support for testing overlays.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/module.h>
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/init.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+#include <linux/font.h>
+#include <linux/mutex.h>
+#include <linux/videodev2.h>
+#include <linux/kthread.h>
+#include <linux/freezer.h>
+#include <linux/fb.h>
+#include <media/videobuf2-vmalloc.h>
+#include <media/v4l2-device.h>
+#include <media/v4l2-ioctl.h>
+#include <media/v4l2-ctrls.h>
+#include <media/v4l2-fh.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-common.h>
+
+#include "vivid-core.h"
+#include "vivid-osd.h"
+
+#define MAX_OSD_WIDTH 720
+#define MAX_OSD_HEIGHT 576
+
+/*
+ * Order: white, yellow, cyan, green, magenta, red, blue, black,
+ * and same again with the alpha bit set (if any)
+ */
+static const u16 rgb555[16] = {
+ 0x7fff, 0x7fe0, 0x03ff, 0x03e0, 0x7c1f, 0x7c00, 0x001f, 0x0000,
+ 0xffff, 0xffe0, 0x83ff, 0x83e0, 0xfc1f, 0xfc00, 0x801f, 0x8000
+};
+
+static const u16 rgb565[16] = {
+ 0xffff, 0xffe0, 0x07ff, 0x07e0, 0xf81f, 0xf800, 0x001f, 0x0000,
+ 0xffff, 0xffe0, 0x07ff, 0x07e0, 0xf81f, 0xf800, 0x001f, 0x0000
+};
+
+void vivid_clear_fb(struct vivid_dev *dev)
+{
+ void *p = dev->video_vbase;
+ const u16 *rgb = rgb555;
+ unsigned x, y;
+
+ if (dev->fb_defined.green.length == 6)
+ rgb = rgb565;
+
+ for (y = 0; y < dev->display_height; y++) {
+ u16 *d = p;
+
+ for (x = 0; x < dev->display_width; x++)
+ d[x] = rgb[(y / 16 + x / 16) % 16];
+ p += dev->display_byte_stride;
+ }
+}
+
+/* --------------------------------------------------------------------- */
+
+static int vivid_fb_ioctl(struct fb_info *info, unsigned cmd, unsigned long arg)
+{
+ struct vivid_dev *dev = (struct vivid_dev *)info->par;
+
+ switch (cmd) {
+ case FBIOGET_VBLANK: {
+ struct fb_vblank vblank;
+
+ memset(&vblank, 0, sizeof(vblank));
+ vblank.flags = FB_VBLANK_HAVE_COUNT | FB_VBLANK_HAVE_VCOUNT |
+ FB_VBLANK_HAVE_VSYNC;
+ vblank.count = 0;
+ vblank.vcount = 0;
+ vblank.hcount = 0;
+ if (copy_to_user((void __user *)arg, &vblank, sizeof(vblank)))
+ return -EFAULT;
+ return 0;
+ }
+
+ default:
+ dprintk(dev, 1, "Unknown ioctl %08x\n", cmd);
+ return -EINVAL;
+ }
+ return 0;
+}
+
+/* Framebuffer device handling */
+
+static int vivid_fb_set_var(struct vivid_dev *dev, struct fb_var_screeninfo *var)
+{
+ dprintk(dev, 1, "vivid_fb_set_var\n");
+
+ if (var->bits_per_pixel != 16) {
+ dprintk(dev, 1, "vivid_fb_set_var - Invalid bpp\n");
+ return -EINVAL;
+ }
+ dev->display_byte_stride = var->xres * dev->bytes_per_pixel;
+
+ return 0;
+}
+
+static int vivid_fb_get_fix(struct vivid_dev *dev, struct fb_fix_screeninfo *fix)
+{
+ dprintk(dev, 1, "vivid_fb_get_fix\n");
+ memset(fix, 0, sizeof(struct fb_fix_screeninfo));
+ strscpy(fix->id, "vioverlay fb", sizeof(fix->id));
+ fix->smem_start = dev->video_pbase;
+ fix->smem_len = dev->video_buffer_size;
+ fix->type = FB_TYPE_PACKED_PIXELS;
+ fix->visual = FB_VISUAL_TRUECOLOR;
+ fix->xpanstep = 1;
+ fix->ypanstep = 1;
+ fix->ywrapstep = 0;
+ fix->line_length = dev->display_byte_stride;
+ fix->accel = FB_ACCEL_NONE;
+ return 0;
+}
+
+/* Check the requested display mode, returning -EINVAL if we can't
+ handle it. */
+
+static int _vivid_fb_check_var(struct fb_var_screeninfo *var, struct vivid_dev *dev)
+{
+ dprintk(dev, 1, "vivid_fb_check_var\n");
+
+ var->bits_per_pixel = 16;
+ if (var->green.length == 5) {
+ var->red.offset = 10;
+ var->red.length = 5;
+ var->green.offset = 5;
+ var->green.length = 5;
+ var->blue.offset = 0;
+ var->blue.length = 5;
+ var->transp.offset = 15;
+ var->transp.length = 1;
+ } else {
+ var->red.offset = 11;
+ var->red.length = 5;
+ var->green.offset = 5;
+ var->green.length = 6;
+ var->blue.offset = 0;
+ var->blue.length = 5;
+ var->transp.offset = 0;
+ var->transp.length = 0;
+ }
+ var->xoffset = var->yoffset = 0;
+ var->left_margin = var->upper_margin = 0;
+ var->nonstd = 0;
+
+ var->vmode &= ~FB_VMODE_MASK;
+ var->vmode |= FB_VMODE_NONINTERLACED;
+
+ /* Dummy values */
+ var->hsync_len = 24;
+ var->vsync_len = 2;
+ var->pixclock = 84316;
+ var->right_margin = 776;
+ var->lower_margin = 591;
+ return 0;
+}
+
+static int vivid_fb_check_var(struct fb_var_screeninfo *var, struct fb_info *info)
+{
+ struct vivid_dev *dev = (struct vivid_dev *) info->par;
+
+ dprintk(dev, 1, "vivid_fb_check_var\n");
+ return _vivid_fb_check_var(var, dev);
+}
+
+static int vivid_fb_pan_display(struct fb_var_screeninfo *var, struct fb_info *info)
+{
+ return 0;
+}
+
+static int vivid_fb_set_par(struct fb_info *info)
+{
+ int rc = 0;
+ struct vivid_dev *dev = (struct vivid_dev *) info->par;
+
+ dprintk(dev, 1, "vivid_fb_set_par\n");
+
+ rc = vivid_fb_set_var(dev, &info->var);
+ vivid_fb_get_fix(dev, &info->fix);
+ return rc;
+}
+
+static int vivid_fb_setcolreg(unsigned regno, unsigned red, unsigned green,
+ unsigned blue, unsigned transp,
+ struct fb_info *info)
+{
+ u32 color, *palette;
+
+ if (regno >= info->cmap.len)
+ return -EINVAL;
+
+ color = ((transp & 0xFF00) << 16) | ((red & 0xFF00) << 8) |
+ (green & 0xFF00) | ((blue & 0xFF00) >> 8);
+ if (regno >= 16)
+ return -EINVAL;
+
+ palette = info->pseudo_palette;
+ if (info->var.bits_per_pixel == 16) {
+ switch (info->var.green.length) {
+ case 6:
+ color = (red & 0xf800) |
+ ((green & 0xfc00) >> 5) |
+ ((blue & 0xf800) >> 11);
+ break;
+ case 5:
+ color = ((red & 0xf800) >> 1) |
+ ((green & 0xf800) >> 6) |
+ ((blue & 0xf800) >> 11) |
+ (transp ? 0x8000 : 0);
+ break;
+ }
+ }
+ palette[regno] = color;
+ return 0;
+}
+
+/* We don't really support blanking. All this does is enable or
+ disable the OSD. */
+static int vivid_fb_blank(int blank_mode, struct fb_info *info)
+{
+ struct vivid_dev *dev = (struct vivid_dev *)info->par;
+
+ dprintk(dev, 1, "Set blanking mode : %d\n", blank_mode);
+ switch (blank_mode) {
+ case FB_BLANK_UNBLANK:
+ break;
+ case FB_BLANK_NORMAL:
+ case FB_BLANK_HSYNC_SUSPEND:
+ case FB_BLANK_VSYNC_SUSPEND:
+ case FB_BLANK_POWERDOWN:
+ break;
+ }
+ return 0;
+}
+
+static const struct fb_ops vivid_fb_ops = {
+ .owner = THIS_MODULE,
+ .fb_check_var = vivid_fb_check_var,
+ .fb_set_par = vivid_fb_set_par,
+ .fb_setcolreg = vivid_fb_setcolreg,
+ .fb_fillrect = cfb_fillrect,
+ .fb_copyarea = cfb_copyarea,
+ .fb_imageblit = cfb_imageblit,
+ .fb_cursor = NULL,
+ .fb_ioctl = vivid_fb_ioctl,
+ .fb_pan_display = vivid_fb_pan_display,
+ .fb_blank = vivid_fb_blank,
+};
+
+/* Initialization */
+
+
+/* Setup our initial video mode */
+static int vivid_fb_init_vidmode(struct vivid_dev *dev)
+{
+ struct v4l2_rect start_window;
+
+ /* Color mode */
+
+ dev->bits_per_pixel = 16;
+ dev->bytes_per_pixel = dev->bits_per_pixel / 8;
+
+ start_window.width = MAX_OSD_WIDTH;
+ start_window.left = 0;
+
+ dev->display_byte_stride = start_window.width * dev->bytes_per_pixel;
+
+ /* Vertical size & position */
+
+ start_window.height = MAX_OSD_HEIGHT;
+ start_window.top = 0;
+
+ dev->display_width = start_window.width;
+ dev->display_height = start_window.height;
+
+ /* Generate a valid fb_var_screeninfo */
+
+ dev->fb_defined.xres = dev->display_width;
+ dev->fb_defined.yres = dev->display_height;
+ dev->fb_defined.xres_virtual = dev->display_width;
+ dev->fb_defined.yres_virtual = dev->display_height;
+ dev->fb_defined.bits_per_pixel = dev->bits_per_pixel;
+ dev->fb_defined.vmode = FB_VMODE_NONINTERLACED;
+ dev->fb_defined.left_margin = start_window.left + 1;
+ dev->fb_defined.upper_margin = start_window.top + 1;
+ dev->fb_defined.accel_flags = FB_ACCEL_NONE;
+ dev->fb_defined.nonstd = 0;
+ /* set default to 1:5:5:5 */
+ dev->fb_defined.green.length = 5;
+
+ /* We've filled in the most data, let the usual mode check
+ routine fill in the rest. */
+ _vivid_fb_check_var(&dev->fb_defined, dev);
+
+ /* Generate valid fb_fix_screeninfo */
+
+ vivid_fb_get_fix(dev, &dev->fb_fix);
+
+ /* Generate valid fb_info */
+
+ dev->fb_info.node = -1;
+ dev->fb_info.flags = FBINFO_FLAG_DEFAULT;
+ dev->fb_info.par = dev;
+ dev->fb_info.var = dev->fb_defined;
+ dev->fb_info.fix = dev->fb_fix;
+ dev->fb_info.screen_base = (u8 __iomem *)dev->video_vbase;
+ dev->fb_info.fbops = &vivid_fb_ops;
+
+ /* Supply some monitor specs. Bogus values will do for now */
+ dev->fb_info.monspecs.hfmin = 8000;
+ dev->fb_info.monspecs.hfmax = 70000;
+ dev->fb_info.monspecs.vfmin = 10;
+ dev->fb_info.monspecs.vfmax = 100;
+
+ /* Allocate color map */
+ if (fb_alloc_cmap(&dev->fb_info.cmap, 256, 1)) {
+ pr_err("abort, unable to alloc cmap\n");
+ return -ENOMEM;
+ }
+
+ /* Allocate the pseudo palette */
+ dev->fb_info.pseudo_palette = kmalloc_array(16, sizeof(u32), GFP_KERNEL);
+
+ return dev->fb_info.pseudo_palette ? 0 : -ENOMEM;
+}
+
+/* Release any memory we've grabbed */
+void vivid_fb_release_buffers(struct vivid_dev *dev)
+{
+ if (dev->video_vbase == NULL)
+ return;
+
+ /* Release cmap */
+ if (dev->fb_info.cmap.len)
+ fb_dealloc_cmap(&dev->fb_info.cmap);
+
+ /* Release pseudo palette */
+ kfree(dev->fb_info.pseudo_palette);
+ kfree(dev->video_vbase);
+}
+
+/* Initialize the specified card */
+
+int vivid_fb_init(struct vivid_dev *dev)
+{
+ int ret;
+
+ dev->video_buffer_size = MAX_OSD_HEIGHT * MAX_OSD_WIDTH * 2;
+ dev->video_vbase = kzalloc(dev->video_buffer_size, GFP_KERNEL);
+ if (dev->video_vbase == NULL)
+ return -ENOMEM;
+ dev->video_pbase = virt_to_phys(dev->video_vbase);
+
+ pr_info("Framebuffer at 0x%lx, mapped to 0x%p, size %dk\n",
+ dev->video_pbase, dev->video_vbase,
+ dev->video_buffer_size / 1024);
+
+ /* Set the startup video mode information */
+ ret = vivid_fb_init_vidmode(dev);
+ if (ret) {
+ vivid_fb_release_buffers(dev);
+ return ret;
+ }
+
+ vivid_clear_fb(dev);
+
+ /* Register the framebuffer */
+ if (register_framebuffer(&dev->fb_info) < 0) {
+ vivid_fb_release_buffers(dev);
+ return -EINVAL;
+ }
+
+ /* Set the card to the requested mode */
+ vivid_fb_set_par(&dev->fb_info);
+ return 0;
+
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-osd.h b/drivers/media/test-drivers/vivid/vivid-osd.h
new file mode 100644
index 000000000..f9ac1af25
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-osd.h
@@ -0,0 +1,15 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-osd.h - output overlay support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_OSD_H_
+#define _VIVID_OSD_H_
+
+int vivid_fb_init(struct vivid_dev *dev);
+void vivid_fb_release_buffers(struct vivid_dev *dev);
+void vivid_clear_fb(struct vivid_dev *dev);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-common.c b/drivers/media/test-drivers/vivid/vivid-radio-common.c
new file mode 100644
index 000000000..138c7bce6
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-common.c
@@ -0,0 +1,177 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-radio-common.c - common radio rx/tx support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/delay.h>
+#include <linux/videodev2.h>
+
+#include "vivid-core.h"
+#include "vivid-ctrls.h"
+#include "vivid-radio-common.h"
+#include "vivid-rds-gen.h"
+
+/*
+ * These functions are shared between the vivid receiver and transmitter
+ * since both use the same frequency bands.
+ */
+
+const struct v4l2_frequency_band vivid_radio_bands[TOT_BANDS] = {
+ /* Band FM */
+ {
+ .type = V4L2_TUNER_RADIO,
+ .index = 0,
+ .capability = V4L2_TUNER_CAP_LOW | V4L2_TUNER_CAP_STEREO |
+ V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = FM_FREQ_RANGE_LOW,
+ .rangehigh = FM_FREQ_RANGE_HIGH,
+ .modulation = V4L2_BAND_MODULATION_FM,
+ },
+ /* Band AM */
+ {
+ .type = V4L2_TUNER_RADIO,
+ .index = 1,
+ .capability = V4L2_TUNER_CAP_LOW | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = AM_FREQ_RANGE_LOW,
+ .rangehigh = AM_FREQ_RANGE_HIGH,
+ .modulation = V4L2_BAND_MODULATION_AM,
+ },
+ /* Band SW */
+ {
+ .type = V4L2_TUNER_RADIO,
+ .index = 2,
+ .capability = V4L2_TUNER_CAP_LOW | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = SW_FREQ_RANGE_LOW,
+ .rangehigh = SW_FREQ_RANGE_HIGH,
+ .modulation = V4L2_BAND_MODULATION_AM,
+ },
+};
+
+/*
+ * Initialize the RDS generator. If we can loop, then the RDS generator
+ * is set up with the values from the RDS TX controls, otherwise it
+ * will fill in standard values using one of two alternates.
+ */
+void vivid_radio_rds_init(struct vivid_dev *dev)
+{
+ struct vivid_rds_gen *rds = &dev->rds_gen;
+ bool alt = dev->radio_rx_rds_use_alternates;
+
+ /* Do nothing, blocks will be filled by the transmitter */
+ if (dev->radio_rds_loop && !dev->radio_tx_rds_controls)
+ return;
+
+ if (dev->radio_rds_loop) {
+ v4l2_ctrl_lock(dev->radio_tx_rds_pi);
+ rds->picode = dev->radio_tx_rds_pi->cur.val;
+ rds->pty = dev->radio_tx_rds_pty->cur.val;
+ rds->mono_stereo = dev->radio_tx_rds_mono_stereo->cur.val;
+ rds->art_head = dev->radio_tx_rds_art_head->cur.val;
+ rds->compressed = dev->radio_tx_rds_compressed->cur.val;
+ rds->dyn_pty = dev->radio_tx_rds_dyn_pty->cur.val;
+ rds->ta = dev->radio_tx_rds_ta->cur.val;
+ rds->tp = dev->radio_tx_rds_tp->cur.val;
+ rds->ms = dev->radio_tx_rds_ms->cur.val;
+ strscpy(rds->psname,
+ dev->radio_tx_rds_psname->p_cur.p_char,
+ sizeof(rds->psname));
+ strscpy(rds->radiotext,
+ dev->radio_tx_rds_radiotext->p_cur.p_char + alt * 64,
+ sizeof(rds->radiotext));
+ v4l2_ctrl_unlock(dev->radio_tx_rds_pi);
+ } else {
+ vivid_rds_gen_fill(rds, dev->radio_rx_freq, alt);
+ }
+ if (dev->radio_rx_rds_controls) {
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_pty, rds->pty);
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ta, rds->ta);
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_tp, rds->tp);
+ v4l2_ctrl_s_ctrl(dev->radio_rx_rds_ms, rds->ms);
+ v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_psname, rds->psname);
+ v4l2_ctrl_s_ctrl_string(dev->radio_rx_rds_radiotext, rds->radiotext);
+ if (!dev->radio_rds_loop)
+ dev->radio_rx_rds_use_alternates = !dev->radio_rx_rds_use_alternates;
+ }
+ vivid_rds_generate(rds);
+}
+
+/*
+ * Calculate the emulated signal quality taking into account the frequency
+ * the transmitter is using.
+ */
+static void vivid_radio_calc_sig_qual(struct vivid_dev *dev)
+{
+ int mod = 16000;
+ int delta = 800;
+ int sig_qual, sig_qual_tx = mod;
+
+ /*
+ * For SW and FM there is a channel every 1000 kHz, for AM there is one
+ * every 100 kHz.
+ */
+ if (dev->radio_rx_freq <= AM_FREQ_RANGE_HIGH) {
+ mod /= 10;
+ delta /= 10;
+ }
+ sig_qual = (dev->radio_rx_freq + delta) % mod - delta;
+ if (dev->has_radio_tx)
+ sig_qual_tx = dev->radio_rx_freq - dev->radio_tx_freq;
+ if (abs(sig_qual_tx) <= abs(sig_qual)) {
+ sig_qual = sig_qual_tx;
+ /*
+ * Zero the internal rds buffer if we are going to loop
+ * rds blocks.
+ */
+ if (!dev->radio_rds_loop && !dev->radio_tx_rds_controls)
+ memset(dev->rds_gen.data, 0,
+ sizeof(dev->rds_gen.data));
+ dev->radio_rds_loop = dev->radio_rx_freq >= FM_FREQ_RANGE_LOW;
+ } else {
+ dev->radio_rds_loop = false;
+ }
+ if (dev->radio_rx_freq <= AM_FREQ_RANGE_HIGH)
+ sig_qual *= 10;
+ dev->radio_rx_sig_qual = sig_qual;
+}
+
+int vivid_radio_g_frequency(struct file *file, const unsigned *pfreq, struct v4l2_frequency *vf)
+{
+ if (vf->tuner != 0)
+ return -EINVAL;
+ vf->frequency = *pfreq;
+ return 0;
+}
+
+int vivid_radio_s_frequency(struct file *file, unsigned *pfreq, const struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned freq;
+ unsigned band;
+
+ if (vf->tuner != 0)
+ return -EINVAL;
+
+ if (vf->frequency >= (FM_FREQ_RANGE_LOW + SW_FREQ_RANGE_HIGH) / 2)
+ band = BAND_FM;
+ else if (vf->frequency <= (AM_FREQ_RANGE_HIGH + SW_FREQ_RANGE_LOW) / 2)
+ band = BAND_AM;
+ else
+ band = BAND_SW;
+
+ freq = clamp_t(u32, vf->frequency, vivid_radio_bands[band].rangelow,
+ vivid_radio_bands[band].rangehigh);
+ *pfreq = freq;
+
+ /*
+ * For both receiver and transmitter recalculate the signal quality
+ * (since that depends on both frequencies) and re-init the rds
+ * generator.
+ */
+ vivid_radio_calc_sig_qual(dev);
+ vivid_radio_rds_init(dev);
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-common.h b/drivers/media/test-drivers/vivid/vivid-radio-common.h
new file mode 100644
index 000000000..30a9900e5
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-common.h
@@ -0,0 +1,28 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-radio-common.h - common radio rx/tx support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_RADIO_COMMON_H_
+#define _VIVID_RADIO_COMMON_H_
+
+/* The supported radio frequency ranges in kHz */
+#define FM_FREQ_RANGE_LOW (64000U * 16U)
+#define FM_FREQ_RANGE_HIGH (108000U * 16U)
+#define AM_FREQ_RANGE_LOW (520U * 16U)
+#define AM_FREQ_RANGE_HIGH (1710U * 16U)
+#define SW_FREQ_RANGE_LOW (2300U * 16U)
+#define SW_FREQ_RANGE_HIGH (26100U * 16U)
+
+enum { BAND_FM, BAND_AM, BAND_SW, TOT_BANDS };
+
+extern const struct v4l2_frequency_band vivid_radio_bands[TOT_BANDS];
+
+int vivid_radio_g_frequency(struct file *file, const unsigned *freq, struct v4l2_frequency *vf);
+int vivid_radio_s_frequency(struct file *file, unsigned *freq, const struct v4l2_frequency *vf);
+
+void vivid_radio_rds_init(struct vivid_dev *dev);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-rx.c b/drivers/media/test-drivers/vivid/vivid-radio-rx.c
new file mode 100644
index 000000000..8bd09589f
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-rx.c
@@ -0,0 +1,278 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-radio-rx.c - radio receiver support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/delay.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <linux/sched/signal.h>
+
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+
+#include "vivid-core.h"
+#include "vivid-ctrls.h"
+#include "vivid-radio-common.h"
+#include "vivid-rds-gen.h"
+#include "vivid-radio-rx.h"
+
+ssize_t vivid_radio_rx_read(struct file *file, char __user *buf,
+ size_t size, loff_t *offset)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rds_data *data = dev->rds_gen.data;
+ bool use_alternates;
+ ktime_t timestamp;
+ unsigned blk;
+ int perc;
+ int i;
+
+ if (dev->radio_rx_rds_controls)
+ return -EINVAL;
+ if (size < sizeof(*data))
+ return 0;
+ size = sizeof(*data) * (size / sizeof(*data));
+
+ if (mutex_lock_interruptible(&dev->mutex))
+ return -ERESTARTSYS;
+ if (dev->radio_rx_rds_owner &&
+ file->private_data != dev->radio_rx_rds_owner) {
+ mutex_unlock(&dev->mutex);
+ return -EBUSY;
+ }
+ if (dev->radio_rx_rds_owner == NULL) {
+ vivid_radio_rds_init(dev);
+ dev->radio_rx_rds_owner = file->private_data;
+ }
+
+retry:
+ timestamp = ktime_sub(ktime_get(), dev->radio_rds_init_time);
+ blk = ktime_divns(timestamp, VIVID_RDS_NSEC_PER_BLK);
+ use_alternates = (blk % VIVID_RDS_GEN_BLOCKS) & 1;
+
+ if (dev->radio_rx_rds_last_block == 0 ||
+ dev->radio_rx_rds_use_alternates != use_alternates) {
+ dev->radio_rx_rds_use_alternates = use_alternates;
+ /* Re-init the RDS generator */
+ vivid_radio_rds_init(dev);
+ }
+ if (blk >= dev->radio_rx_rds_last_block + VIVID_RDS_GEN_BLOCKS)
+ dev->radio_rx_rds_last_block = blk - VIVID_RDS_GEN_BLOCKS + 1;
+
+ /*
+ * No data is available if there hasn't been time to get new data,
+ * or if the RDS receiver has been disabled, or if we use the data
+ * from the RDS transmitter and that RDS transmitter has been disabled,
+ * or if the signal quality is too weak.
+ */
+ if (blk == dev->radio_rx_rds_last_block || !dev->radio_rx_rds_enabled ||
+ (dev->radio_rds_loop && !(dev->radio_tx_subchans & V4L2_TUNER_SUB_RDS)) ||
+ abs(dev->radio_rx_sig_qual) > 200) {
+ mutex_unlock(&dev->mutex);
+ if (file->f_flags & O_NONBLOCK)
+ return -EWOULDBLOCK;
+ if (msleep_interruptible(20) && signal_pending(current))
+ return -EINTR;
+ if (mutex_lock_interruptible(&dev->mutex))
+ return -ERESTARTSYS;
+ goto retry;
+ }
+
+ /* abs(dev->radio_rx_sig_qual) <= 200, map that to a 0-50% range */
+ perc = abs(dev->radio_rx_sig_qual) / 4;
+
+ for (i = 0; i < size && blk > dev->radio_rx_rds_last_block;
+ dev->radio_rx_rds_last_block++) {
+ unsigned data_blk = dev->radio_rx_rds_last_block % VIVID_RDS_GEN_BLOCKS;
+ struct v4l2_rds_data rds = data[data_blk];
+
+ if (data_blk == 0 && dev->radio_rds_loop)
+ vivid_radio_rds_init(dev);
+ if (perc && prandom_u32_max(100) < perc) {
+ switch (prandom_u32_max(4)) {
+ case 0:
+ rds.block |= V4L2_RDS_BLOCK_CORRECTED;
+ break;
+ case 1:
+ rds.block |= V4L2_RDS_BLOCK_INVALID;
+ break;
+ case 2:
+ rds.block |= V4L2_RDS_BLOCK_ERROR;
+ rds.lsb = get_random_u8();
+ rds.msb = get_random_u8();
+ break;
+ case 3: /* Skip block altogether */
+ if (i)
+ continue;
+ /*
+ * Must make sure at least one block is
+ * returned, otherwise the application
+ * might think that end-of-file occurred.
+ */
+ break;
+ }
+ }
+ if (copy_to_user(buf + i, &rds, sizeof(rds))) {
+ i = -EFAULT;
+ break;
+ }
+ i += sizeof(rds);
+ }
+ mutex_unlock(&dev->mutex);
+ return i;
+}
+
+__poll_t vivid_radio_rx_poll(struct file *file, struct poll_table_struct *wait)
+{
+ return EPOLLIN | EPOLLRDNORM | v4l2_ctrl_poll(file, wait);
+}
+
+int vivid_radio_rx_enum_freq_bands(struct file *file, void *fh, struct v4l2_frequency_band *band)
+{
+ if (band->tuner != 0)
+ return -EINVAL;
+
+ if (band->index >= TOT_BANDS)
+ return -EINVAL;
+
+ *band = vivid_radio_bands[band->index];
+ return 0;
+}
+
+int vivid_radio_rx_s_hw_freq_seek(struct file *file, void *fh, const struct v4l2_hw_freq_seek *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned low, high;
+ unsigned freq;
+ unsigned spacing;
+ unsigned band;
+
+ if (a->tuner)
+ return -EINVAL;
+ if (a->wrap_around && dev->radio_rx_hw_seek_mode == VIVID_HW_SEEK_BOUNDED)
+ return -EINVAL;
+
+ if (!a->wrap_around && dev->radio_rx_hw_seek_mode == VIVID_HW_SEEK_WRAP)
+ return -EINVAL;
+ if (!a->rangelow ^ !a->rangehigh)
+ return -EINVAL;
+
+ if (file->f_flags & O_NONBLOCK)
+ return -EWOULDBLOCK;
+
+ if (a->rangelow) {
+ for (band = 0; band < TOT_BANDS; band++)
+ if (a->rangelow >= vivid_radio_bands[band].rangelow &&
+ a->rangehigh <= vivid_radio_bands[band].rangehigh)
+ break;
+ if (band == TOT_BANDS)
+ return -EINVAL;
+ if (!dev->radio_rx_hw_seek_prog_lim &&
+ (a->rangelow != vivid_radio_bands[band].rangelow ||
+ a->rangehigh != vivid_radio_bands[band].rangehigh))
+ return -EINVAL;
+ low = a->rangelow;
+ high = a->rangehigh;
+ } else {
+ for (band = 0; band < TOT_BANDS; band++)
+ if (dev->radio_rx_freq >= vivid_radio_bands[band].rangelow &&
+ dev->radio_rx_freq <= vivid_radio_bands[band].rangehigh)
+ break;
+ if (band == TOT_BANDS)
+ return -EINVAL;
+ low = vivid_radio_bands[band].rangelow;
+ high = vivid_radio_bands[band].rangehigh;
+ }
+ spacing = band == BAND_AM ? 1600 : 16000;
+ freq = clamp(dev->radio_rx_freq, low, high);
+
+ if (a->seek_upward) {
+ freq = spacing * (freq / spacing) + spacing;
+ if (freq > high) {
+ if (!a->wrap_around)
+ return -ENODATA;
+ freq = spacing * (low / spacing) + spacing;
+ if (freq >= dev->radio_rx_freq)
+ return -ENODATA;
+ }
+ } else {
+ freq = spacing * ((freq + spacing - 1) / spacing) - spacing;
+ if (freq < low) {
+ if (!a->wrap_around)
+ return -ENODATA;
+ freq = spacing * ((high + spacing - 1) / spacing) - spacing;
+ if (freq <= dev->radio_rx_freq)
+ return -ENODATA;
+ }
+ }
+ return 0;
+}
+
+int vivid_radio_rx_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ int delta = 800;
+ int sig_qual;
+
+ if (vt->index > 0)
+ return -EINVAL;
+
+ strscpy(vt->name, "AM/FM/SW Receiver", sizeof(vt->name));
+ vt->capability = V4L2_TUNER_CAP_LOW | V4L2_TUNER_CAP_STEREO |
+ V4L2_TUNER_CAP_FREQ_BANDS | V4L2_TUNER_CAP_RDS |
+ (dev->radio_rx_rds_controls ?
+ V4L2_TUNER_CAP_RDS_CONTROLS :
+ V4L2_TUNER_CAP_RDS_BLOCK_IO) |
+ (dev->radio_rx_hw_seek_prog_lim ?
+ V4L2_TUNER_CAP_HWSEEK_PROG_LIM : 0);
+ switch (dev->radio_rx_hw_seek_mode) {
+ case VIVID_HW_SEEK_BOUNDED:
+ vt->capability |= V4L2_TUNER_CAP_HWSEEK_BOUNDED;
+ break;
+ case VIVID_HW_SEEK_WRAP:
+ vt->capability |= V4L2_TUNER_CAP_HWSEEK_WRAP;
+ break;
+ case VIVID_HW_SEEK_BOTH:
+ vt->capability |= V4L2_TUNER_CAP_HWSEEK_WRAP |
+ V4L2_TUNER_CAP_HWSEEK_BOUNDED;
+ break;
+ }
+ vt->rangelow = AM_FREQ_RANGE_LOW;
+ vt->rangehigh = FM_FREQ_RANGE_HIGH;
+ sig_qual = dev->radio_rx_sig_qual;
+ vt->signal = abs(sig_qual) > delta ? 0 :
+ 0xffff - ((unsigned)abs(sig_qual) * 0xffff) / delta;
+ vt->afc = sig_qual > delta ? 0 : sig_qual;
+ if (abs(sig_qual) > delta)
+ vt->rxsubchans = 0;
+ else if (dev->radio_rx_freq < FM_FREQ_RANGE_LOW || vt->signal < 0x8000)
+ vt->rxsubchans = V4L2_TUNER_SUB_MONO;
+ else if (dev->radio_rds_loop && !(dev->radio_tx_subchans & V4L2_TUNER_SUB_STEREO))
+ vt->rxsubchans = V4L2_TUNER_SUB_MONO;
+ else
+ vt->rxsubchans = V4L2_TUNER_SUB_STEREO;
+ if (dev->radio_rx_rds_enabled &&
+ (!dev->radio_rds_loop || (dev->radio_tx_subchans & V4L2_TUNER_SUB_RDS)) &&
+ dev->radio_rx_freq >= FM_FREQ_RANGE_LOW && vt->signal >= 0xc000)
+ vt->rxsubchans |= V4L2_TUNER_SUB_RDS;
+ if (dev->radio_rx_rds_controls)
+ vivid_radio_rds_init(dev);
+ vt->audmode = dev->radio_rx_audmode;
+ return 0;
+}
+
+int vivid_radio_rx_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (vt->index)
+ return -EINVAL;
+ dev->radio_rx_audmode = vt->audmode >= V4L2_TUNER_MODE_STEREO;
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-rx.h b/drivers/media/test-drivers/vivid/vivid-radio-rx.h
new file mode 100644
index 000000000..c9c7849f6
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-rx.h
@@ -0,0 +1,19 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-radio-rx.h - radio receiver support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_RADIO_RX_H_
+#define _VIVID_RADIO_RX_H_
+
+ssize_t vivid_radio_rx_read(struct file *, char __user *, size_t, loff_t *);
+__poll_t vivid_radio_rx_poll(struct file *file, struct poll_table_struct *wait);
+
+int vivid_radio_rx_enum_freq_bands(struct file *file, void *fh, struct v4l2_frequency_band *band);
+int vivid_radio_rx_s_hw_freq_seek(struct file *file, void *fh, const struct v4l2_hw_freq_seek *a);
+int vivid_radio_rx_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt);
+int vivid_radio_rx_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-tx.c b/drivers/media/test-drivers/vivid/vivid-radio-tx.c
new file mode 100644
index 000000000..049d40b94
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-tx.c
@@ -0,0 +1,128 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-radio-tx.c - radio transmitter support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/sched/signal.h>
+#include <linux/delay.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+
+#include "vivid-core.h"
+#include "vivid-ctrls.h"
+#include "vivid-radio-common.h"
+#include "vivid-radio-tx.h"
+
+ssize_t vivid_radio_tx_write(struct file *file, const char __user *buf,
+ size_t size, loff_t *offset)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rds_data *data = dev->rds_gen.data;
+ ktime_t timestamp;
+ unsigned blk;
+ int i;
+
+ if (dev->radio_tx_rds_controls)
+ return -EINVAL;
+
+ if (size < sizeof(*data))
+ return -EINVAL;
+ size = sizeof(*data) * (size / sizeof(*data));
+
+ if (mutex_lock_interruptible(&dev->mutex))
+ return -ERESTARTSYS;
+ if (dev->radio_tx_rds_owner &&
+ file->private_data != dev->radio_tx_rds_owner) {
+ mutex_unlock(&dev->mutex);
+ return -EBUSY;
+ }
+ dev->radio_tx_rds_owner = file->private_data;
+
+retry:
+ timestamp = ktime_sub(ktime_get(), dev->radio_rds_init_time);
+ blk = ktime_divns(timestamp, VIVID_RDS_NSEC_PER_BLK);
+ if (blk - VIVID_RDS_GEN_BLOCKS >= dev->radio_tx_rds_last_block)
+ dev->radio_tx_rds_last_block = blk - VIVID_RDS_GEN_BLOCKS + 1;
+
+ /*
+ * No data is available if there hasn't been time to get new data,
+ * or if the RDS receiver has been disabled, or if we use the data
+ * from the RDS transmitter and that RDS transmitter has been disabled,
+ * or if the signal quality is too weak.
+ */
+ if (blk == dev->radio_tx_rds_last_block ||
+ !(dev->radio_tx_subchans & V4L2_TUNER_SUB_RDS)) {
+ mutex_unlock(&dev->mutex);
+ if (file->f_flags & O_NONBLOCK)
+ return -EWOULDBLOCK;
+ if (msleep_interruptible(20) && signal_pending(current))
+ return -EINTR;
+ if (mutex_lock_interruptible(&dev->mutex))
+ return -ERESTARTSYS;
+ goto retry;
+ }
+
+ for (i = 0; i < size && blk > dev->radio_tx_rds_last_block;
+ dev->radio_tx_rds_last_block++) {
+ unsigned data_blk = dev->radio_tx_rds_last_block % VIVID_RDS_GEN_BLOCKS;
+ struct v4l2_rds_data rds;
+
+ if (copy_from_user(&rds, buf + i, sizeof(rds))) {
+ i = -EFAULT;
+ break;
+ }
+ i += sizeof(rds);
+ if (!dev->radio_rds_loop)
+ continue;
+ if ((rds.block & V4L2_RDS_BLOCK_MSK) == V4L2_RDS_BLOCK_INVALID ||
+ (rds.block & V4L2_RDS_BLOCK_ERROR))
+ continue;
+ rds.block &= V4L2_RDS_BLOCK_MSK;
+ data[data_blk] = rds;
+ }
+ mutex_unlock(&dev->mutex);
+ return i;
+}
+
+__poll_t vivid_radio_tx_poll(struct file *file, struct poll_table_struct *wait)
+{
+ return EPOLLOUT | EPOLLWRNORM | v4l2_ctrl_poll(file, wait);
+}
+
+int vidioc_g_modulator(struct file *file, void *fh, struct v4l2_modulator *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (a->index > 0)
+ return -EINVAL;
+
+ strscpy(a->name, "AM/FM/SW Transmitter", sizeof(a->name));
+ a->capability = V4L2_TUNER_CAP_LOW | V4L2_TUNER_CAP_STEREO |
+ V4L2_TUNER_CAP_FREQ_BANDS | V4L2_TUNER_CAP_RDS |
+ (dev->radio_tx_rds_controls ?
+ V4L2_TUNER_CAP_RDS_CONTROLS :
+ V4L2_TUNER_CAP_RDS_BLOCK_IO);
+ a->rangelow = AM_FREQ_RANGE_LOW;
+ a->rangehigh = FM_FREQ_RANGE_HIGH;
+ a->txsubchans = dev->radio_tx_subchans;
+ return 0;
+}
+
+int vidioc_s_modulator(struct file *file, void *fh, const struct v4l2_modulator *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (a->index)
+ return -EINVAL;
+ if (a->txsubchans & ~0x13)
+ return -EINVAL;
+ dev->radio_tx_subchans = a->txsubchans;
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-radio-tx.h b/drivers/media/test-drivers/vivid/vivid-radio-tx.h
new file mode 100644
index 000000000..c2bf1e7e6
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-radio-tx.h
@@ -0,0 +1,17 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-radio-tx.h - radio transmitter support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_RADIO_TX_H_
+#define _VIVID_RADIO_TX_H_
+
+ssize_t vivid_radio_tx_write(struct file *, const char __user *, size_t, loff_t *);
+__poll_t vivid_radio_tx_poll(struct file *file, struct poll_table_struct *wait);
+
+int vidioc_g_modulator(struct file *file, void *fh, struct v4l2_modulator *a);
+int vidioc_s_modulator(struct file *file, void *fh, const struct v4l2_modulator *a);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-rds-gen.c b/drivers/media/test-drivers/vivid/vivid-rds-gen.c
new file mode 100644
index 000000000..c57771119
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-rds-gen.c
@@ -0,0 +1,157 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-rds-gen.c - rds (radio data system) generator support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/kernel.h>
+#include <linux/ktime.h>
+#include <linux/string.h>
+#include <linux/videodev2.h>
+
+#include "vivid-rds-gen.h"
+
+static u8 vivid_get_di(const struct vivid_rds_gen *rds, unsigned grp)
+{
+ switch (grp) {
+ case 0:
+ return (rds->dyn_pty << 2) | (grp & 3);
+ case 1:
+ return (rds->compressed << 2) | (grp & 3);
+ case 2:
+ return (rds->art_head << 2) | (grp & 3);
+ case 3:
+ return (rds->mono_stereo << 2) | (grp & 3);
+ }
+ return 0;
+}
+
+/*
+ * This RDS generator creates 57 RDS groups (one group == four RDS blocks).
+ * Groups 0-3, 22-25 and 44-47 (spaced 22 groups apart) are filled with a
+ * standard 0B group containing the PI code and PS name.
+ *
+ * Groups 4-19 and 26-41 use group 2A for the radio text.
+ *
+ * Group 56 contains the time (group 4A).
+ *
+ * All remaining groups use a filler group 15B block that just repeats
+ * the PI and PTY codes.
+ */
+void vivid_rds_generate(struct vivid_rds_gen *rds)
+{
+ struct v4l2_rds_data *data = rds->data;
+ unsigned grp;
+ unsigned idx;
+ struct tm tm;
+ unsigned date;
+ unsigned time;
+ int l;
+
+ for (grp = 0; grp < VIVID_RDS_GEN_GROUPS; grp++, data += VIVID_RDS_GEN_BLKS_PER_GRP) {
+ data[0].lsb = rds->picode & 0xff;
+ data[0].msb = rds->picode >> 8;
+ data[0].block = V4L2_RDS_BLOCK_A | (V4L2_RDS_BLOCK_A << 3);
+ data[1].lsb = rds->pty << 5;
+ data[1].msb = (rds->pty >> 3) | (rds->tp << 2);
+ data[1].block = V4L2_RDS_BLOCK_B | (V4L2_RDS_BLOCK_B << 3);
+ data[3].block = V4L2_RDS_BLOCK_D | (V4L2_RDS_BLOCK_D << 3);
+
+ switch (grp) {
+ case 0 ... 3:
+ case 22 ... 25:
+ case 44 ... 47: /* Group 0B */
+ idx = (grp % 22) % 4;
+ data[1].lsb |= (rds->ta << 4) | (rds->ms << 3);
+ data[1].lsb |= vivid_get_di(rds, idx);
+ data[1].msb |= 1 << 3;
+ data[2].lsb = rds->picode & 0xff;
+ data[2].msb = rds->picode >> 8;
+ data[2].block = V4L2_RDS_BLOCK_C_ALT | (V4L2_RDS_BLOCK_C_ALT << 3);
+ data[3].lsb = rds->psname[2 * idx + 1];
+ data[3].msb = rds->psname[2 * idx];
+ break;
+ case 4 ... 19:
+ case 26 ... 41: /* Group 2A */
+ idx = ((grp - 4) % 22) % 16;
+ data[1].lsb |= idx;
+ data[1].msb |= 4 << 3;
+ data[2].msb = rds->radiotext[4 * idx];
+ data[2].lsb = rds->radiotext[4 * idx + 1];
+ data[2].block = V4L2_RDS_BLOCK_C | (V4L2_RDS_BLOCK_C << 3);
+ data[3].msb = rds->radiotext[4 * idx + 2];
+ data[3].lsb = rds->radiotext[4 * idx + 3];
+ break;
+ case 56:
+ /*
+ * Group 4A
+ *
+ * Uses the algorithm from Annex G of the RDS standard
+ * EN 50067:1998 to convert a UTC date to an RDS Modified
+ * Julian Day.
+ */
+ time64_to_tm(ktime_get_real_seconds(), 0, &tm);
+ l = tm.tm_mon <= 1;
+ date = 14956 + tm.tm_mday + ((tm.tm_year - l) * 1461) / 4 +
+ ((tm.tm_mon + 2 + l * 12) * 306001) / 10000;
+ time = (tm.tm_hour << 12) |
+ (tm.tm_min << 6) |
+ (sys_tz.tz_minuteswest >= 0 ? 0x20 : 0) |
+ (abs(sys_tz.tz_minuteswest) / 30);
+ data[1].lsb &= ~3;
+ data[1].lsb |= date >> 15;
+ data[1].msb |= 8 << 3;
+ data[2].lsb = (date << 1) & 0xfe;
+ data[2].lsb |= (time >> 16) & 1;
+ data[2].msb = (date >> 7) & 0xff;
+ data[2].block = V4L2_RDS_BLOCK_C | (V4L2_RDS_BLOCK_C << 3);
+ data[3].lsb = time & 0xff;
+ data[3].msb = (time >> 8) & 0xff;
+ break;
+ default: /* Group 15B */
+ data[1].lsb |= (rds->ta << 4) | (rds->ms << 3);
+ data[1].lsb |= vivid_get_di(rds, grp % 22);
+ data[1].msb |= 0x1f << 3;
+ data[2].lsb = rds->picode & 0xff;
+ data[2].msb = rds->picode >> 8;
+ data[2].block = V4L2_RDS_BLOCK_C_ALT | (V4L2_RDS_BLOCK_C_ALT << 3);
+ data[3].lsb = rds->pty << 5;
+ data[3].lsb |= (rds->ta << 4) | (rds->ms << 3);
+ data[3].lsb |= vivid_get_di(rds, grp % 22);
+ data[3].msb |= rds->pty >> 3;
+ data[3].msb |= 0x1f << 3;
+ break;
+ }
+ }
+}
+
+void vivid_rds_gen_fill(struct vivid_rds_gen *rds, unsigned freq,
+ bool alt)
+{
+ /* Alternate PTY between Info and Weather */
+ if (rds->use_rbds) {
+ rds->picode = 0x2e75; /* 'KLNX' call sign */
+ rds->pty = alt ? 29 : 2;
+ } else {
+ rds->picode = 0x8088;
+ rds->pty = alt ? 16 : 3;
+ }
+ rds->mono_stereo = true;
+ rds->art_head = false;
+ rds->compressed = false;
+ rds->dyn_pty = false;
+ rds->tp = true;
+ rds->ta = alt;
+ rds->ms = true;
+ snprintf(rds->psname, sizeof(rds->psname), "%6d.%1d",
+ (freq / 16) % 1000000, (((freq & 0xf) * 10) / 16) % 10);
+ if (alt)
+ strscpy(rds->radiotext,
+ " The Radio Data System can switch between different Radio Texts ",
+ sizeof(rds->radiotext));
+ else
+ strscpy(rds->radiotext,
+ "An example of Radio Text as transmitted by the Radio Data System",
+ sizeof(rds->radiotext));
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-rds-gen.h b/drivers/media/test-drivers/vivid/vivid-rds-gen.h
new file mode 100644
index 000000000..35ac57423
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-rds-gen.h
@@ -0,0 +1,42 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-rds-gen.h - rds (radio data system) generator support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_RDS_GEN_H_
+#define _VIVID_RDS_GEN_H_
+
+/*
+ * It takes almost exactly 5 seconds to transmit 57 RDS groups.
+ * Each group has 4 blocks and each block has a payload of 16 bits + a
+ * block identification. The driver will generate the contents of these
+ * 57 groups only when necessary and it will just be played continuously.
+ */
+#define VIVID_RDS_GEN_GROUPS 57
+#define VIVID_RDS_GEN_BLKS_PER_GRP 4
+#define VIVID_RDS_GEN_BLOCKS (VIVID_RDS_GEN_BLKS_PER_GRP * VIVID_RDS_GEN_GROUPS)
+#define VIVID_RDS_NSEC_PER_BLK (u32)(5ull * NSEC_PER_SEC / VIVID_RDS_GEN_BLOCKS)
+
+struct vivid_rds_gen {
+ struct v4l2_rds_data data[VIVID_RDS_GEN_BLOCKS];
+ bool use_rbds;
+ u16 picode;
+ u8 pty;
+ bool mono_stereo;
+ bool art_head;
+ bool compressed;
+ bool dyn_pty;
+ bool ta;
+ bool tp;
+ bool ms;
+ char psname[8 + 1];
+ char radiotext[64 + 1];
+};
+
+void vivid_rds_gen_fill(struct vivid_rds_gen *rds, unsigned freq,
+ bool use_alternate);
+void vivid_rds_generate(struct vivid_rds_gen *rds);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-sdr-cap.c b/drivers/media/test-drivers/vivid/vivid-sdr-cap.c
new file mode 100644
index 000000000..0ae5628b8
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-sdr-cap.c
@@ -0,0 +1,574 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-sdr-cap.c - software defined radio support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/delay.h>
+#include <linux/kthread.h>
+#include <linux/freezer.h>
+#include <linux/math64.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+#include <linux/fixp-arith.h>
+#include <linux/jiffies.h>
+
+#include "vivid-core.h"
+#include "vivid-ctrls.h"
+#include "vivid-sdr-cap.h"
+
+/* stream formats */
+struct vivid_format {
+ u32 pixelformat;
+ u32 buffersize;
+};
+
+/* format descriptions for capture and preview */
+static const struct vivid_format formats[] = {
+ {
+ .pixelformat = V4L2_SDR_FMT_CU8,
+ .buffersize = SDR_CAP_SAMPLES_PER_BUF * 2,
+ }, {
+ .pixelformat = V4L2_SDR_FMT_CS8,
+ .buffersize = SDR_CAP_SAMPLES_PER_BUF * 2,
+ },
+};
+
+static const struct v4l2_frequency_band bands_adc[] = {
+ {
+ .tuner = 0,
+ .type = V4L2_TUNER_ADC,
+ .index = 0,
+ .capability = V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = 300000,
+ .rangehigh = 300000,
+ },
+ {
+ .tuner = 0,
+ .type = V4L2_TUNER_ADC,
+ .index = 1,
+ .capability = V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = 900001,
+ .rangehigh = 2800000,
+ },
+ {
+ .tuner = 0,
+ .type = V4L2_TUNER_ADC,
+ .index = 2,
+ .capability = V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = 3200000,
+ .rangehigh = 3200000,
+ },
+};
+
+/* ADC band midpoints */
+#define BAND_ADC_0 ((bands_adc[0].rangehigh + bands_adc[1].rangelow) / 2)
+#define BAND_ADC_1 ((bands_adc[1].rangehigh + bands_adc[2].rangelow) / 2)
+
+static const struct v4l2_frequency_band bands_fm[] = {
+ {
+ .tuner = 1,
+ .type = V4L2_TUNER_RF,
+ .index = 0,
+ .capability = V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS,
+ .rangelow = 50000000,
+ .rangehigh = 2000000000,
+ },
+};
+
+static void vivid_thread_sdr_cap_tick(struct vivid_dev *dev)
+{
+ struct vivid_buffer *sdr_cap_buf = NULL;
+
+ dprintk(dev, 1, "SDR Capture Thread Tick\n");
+
+ /* Drop a certain percentage of buffers. */
+ if (dev->perc_dropped_buffers &&
+ prandom_u32_max(100) < dev->perc_dropped_buffers)
+ return;
+
+ spin_lock(&dev->slock);
+ if (!list_empty(&dev->sdr_cap_active)) {
+ sdr_cap_buf = list_entry(dev->sdr_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&sdr_cap_buf->list);
+ }
+ spin_unlock(&dev->slock);
+
+ if (sdr_cap_buf) {
+ sdr_cap_buf->vb.sequence = dev->sdr_cap_with_seq_wrap_count;
+ v4l2_ctrl_request_setup(sdr_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_sdr_cap);
+ v4l2_ctrl_request_complete(sdr_cap_buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_sdr_cap);
+ vivid_sdr_cap_process(dev, sdr_cap_buf);
+ sdr_cap_buf->vb.vb2_buf.timestamp =
+ ktime_get_ns() + dev->time_wrap_offset;
+ vb2_buffer_done(&sdr_cap_buf->vb.vb2_buf, dev->dqbuf_error ?
+ VB2_BUF_STATE_ERROR : VB2_BUF_STATE_DONE);
+ dev->dqbuf_error = false;
+ }
+}
+
+static int vivid_thread_sdr_cap(void *data)
+{
+ struct vivid_dev *dev = data;
+ u64 samples_since_start;
+ u64 buffers_since_start;
+ u64 next_jiffies_since_start;
+ unsigned long jiffies_since_start;
+ unsigned long cur_jiffies;
+ unsigned wait_jiffies;
+
+ dprintk(dev, 1, "SDR Capture Thread Start\n");
+
+ set_freezable();
+
+ /* Resets frame counters */
+ dev->sdr_cap_seq_offset = 0;
+ dev->sdr_cap_seq_count = 0;
+ dev->jiffies_sdr_cap = jiffies;
+ dev->sdr_cap_seq_resync = false;
+ if (dev->time_wrap)
+ dev->time_wrap_offset = dev->time_wrap - ktime_get_ns();
+ else
+ dev->time_wrap_offset = 0;
+
+ for (;;) {
+ try_to_freeze();
+ if (kthread_should_stop())
+ break;
+
+ if (!mutex_trylock(&dev->mutex)) {
+ schedule();
+ continue;
+ }
+
+ cur_jiffies = jiffies;
+ if (dev->sdr_cap_seq_resync) {
+ dev->jiffies_sdr_cap = cur_jiffies;
+ dev->sdr_cap_seq_offset = dev->sdr_cap_seq_count + 1;
+ dev->sdr_cap_seq_count = 0;
+ dev->sdr_cap_seq_resync = false;
+ }
+ /* Calculate the number of jiffies since we started streaming */
+ jiffies_since_start = cur_jiffies - dev->jiffies_sdr_cap;
+ /* Get the number of buffers streamed since the start */
+ buffers_since_start =
+ (u64)jiffies_since_start * dev->sdr_adc_freq +
+ (HZ * SDR_CAP_SAMPLES_PER_BUF) / 2;
+ do_div(buffers_since_start, HZ * SDR_CAP_SAMPLES_PER_BUF);
+
+ /*
+ * After more than 0xf0000000 (rounded down to a multiple of
+ * 'jiffies-per-day' to ease jiffies_to_msecs calculation)
+ * jiffies have passed since we started streaming reset the
+ * counters and keep track of the sequence offset.
+ */
+ if (jiffies_since_start > JIFFIES_RESYNC) {
+ dev->jiffies_sdr_cap = cur_jiffies;
+ dev->sdr_cap_seq_offset = buffers_since_start;
+ buffers_since_start = 0;
+ }
+ dev->sdr_cap_seq_count =
+ buffers_since_start + dev->sdr_cap_seq_offset;
+ dev->sdr_cap_with_seq_wrap_count = dev->sdr_cap_seq_count - dev->sdr_cap_seq_start;
+
+ vivid_thread_sdr_cap_tick(dev);
+ mutex_unlock(&dev->mutex);
+
+ /*
+ * Calculate the number of samples streamed since we started,
+ * not including the current buffer.
+ */
+ samples_since_start = buffers_since_start * SDR_CAP_SAMPLES_PER_BUF;
+
+ /* And the number of jiffies since we started */
+ jiffies_since_start = jiffies - dev->jiffies_sdr_cap;
+
+ /* Increase by the number of samples in one buffer */
+ samples_since_start += SDR_CAP_SAMPLES_PER_BUF;
+ /*
+ * Calculate when that next buffer is supposed to start
+ * in jiffies since we started streaming.
+ */
+ next_jiffies_since_start = samples_since_start * HZ +
+ dev->sdr_adc_freq / 2;
+ do_div(next_jiffies_since_start, dev->sdr_adc_freq);
+ /* If it is in the past, then just schedule asap */
+ if (next_jiffies_since_start < jiffies_since_start)
+ next_jiffies_since_start = jiffies_since_start;
+
+ wait_jiffies = next_jiffies_since_start - jiffies_since_start;
+ while (time_is_after_jiffies(cur_jiffies + wait_jiffies) &&
+ !kthread_should_stop())
+ schedule();
+ }
+ dprintk(dev, 1, "SDR Capture Thread End\n");
+ return 0;
+}
+
+static int sdr_cap_queue_setup(struct vb2_queue *vq,
+ unsigned *nbuffers, unsigned *nplanes,
+ unsigned sizes[], struct device *alloc_devs[])
+{
+ /* 2 = max 16-bit sample returned */
+ sizes[0] = SDR_CAP_SAMPLES_PER_BUF * 2;
+ *nplanes = 1;
+ return 0;
+}
+
+static int sdr_cap_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ unsigned size = SDR_CAP_SAMPLES_PER_BUF * 2;
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void sdr_cap_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->sdr_cap_active);
+ spin_unlock(&dev->slock);
+}
+
+static int sdr_cap_start_streaming(struct vb2_queue *vq, unsigned count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err = 0;
+
+ dprintk(dev, 1, "%s\n", __func__);
+ dev->sdr_cap_seq_start = dev->seq_wrap * 128;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else if (dev->kthread_sdr_cap == NULL) {
+ dev->kthread_sdr_cap = kthread_run(vivid_thread_sdr_cap, dev,
+ "%s-sdr-cap", dev->v4l2_dev.name);
+
+ if (IS_ERR(dev->kthread_sdr_cap)) {
+ v4l2_err(&dev->v4l2_dev, "kernel_thread() failed\n");
+ err = PTR_ERR(dev->kthread_sdr_cap);
+ dev->kthread_sdr_cap = NULL;
+ }
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp, &dev->sdr_cap_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void sdr_cap_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ if (dev->kthread_sdr_cap == NULL)
+ return;
+
+ while (!list_empty(&dev->sdr_cap_active)) {
+ struct vivid_buffer *buf;
+
+ buf = list_entry(dev->sdr_cap_active.next,
+ struct vivid_buffer, list);
+ list_del(&buf->list);
+ v4l2_ctrl_request_complete(buf->vb.vb2_buf.req_obj.req,
+ &dev->ctrl_hdl_sdr_cap);
+ vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR);
+ }
+
+ /* shutdown control thread */
+ kthread_stop(dev->kthread_sdr_cap);
+ dev->kthread_sdr_cap = NULL;
+}
+
+static void sdr_cap_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_sdr_cap);
+}
+
+const struct vb2_ops vivid_sdr_cap_qops = {
+ .queue_setup = sdr_cap_queue_setup,
+ .buf_prepare = sdr_cap_buf_prepare,
+ .buf_queue = sdr_cap_buf_queue,
+ .start_streaming = sdr_cap_start_streaming,
+ .stop_streaming = sdr_cap_stop_streaming,
+ .buf_request_complete = sdr_cap_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vivid_sdr_enum_freq_bands(struct file *file, void *fh,
+ struct v4l2_frequency_band *band)
+{
+ switch (band->tuner) {
+ case 0:
+ if (band->index >= ARRAY_SIZE(bands_adc))
+ return -EINVAL;
+ *band = bands_adc[band->index];
+ return 0;
+ case 1:
+ if (band->index >= ARRAY_SIZE(bands_fm))
+ return -EINVAL;
+ *band = bands_fm[band->index];
+ return 0;
+ default:
+ return -EINVAL;
+ }
+}
+
+int vivid_sdr_g_frequency(struct file *file, void *fh,
+ struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ switch (vf->tuner) {
+ case 0:
+ vf->frequency = dev->sdr_adc_freq;
+ vf->type = V4L2_TUNER_ADC;
+ return 0;
+ case 1:
+ vf->frequency = dev->sdr_fm_freq;
+ vf->type = V4L2_TUNER_RF;
+ return 0;
+ default:
+ return -EINVAL;
+ }
+}
+
+int vivid_sdr_s_frequency(struct file *file, void *fh,
+ const struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned freq = vf->frequency;
+ unsigned band;
+
+ switch (vf->tuner) {
+ case 0:
+ if (vf->type != V4L2_TUNER_ADC)
+ return -EINVAL;
+ if (freq < BAND_ADC_0)
+ band = 0;
+ else if (freq < BAND_ADC_1)
+ band = 1;
+ else
+ band = 2;
+
+ freq = clamp_t(unsigned, freq,
+ bands_adc[band].rangelow,
+ bands_adc[band].rangehigh);
+
+ if (vb2_is_streaming(&dev->vb_sdr_cap_q) &&
+ freq != dev->sdr_adc_freq) {
+ /* resync the thread's timings */
+ dev->sdr_cap_seq_resync = true;
+ }
+ dev->sdr_adc_freq = freq;
+ return 0;
+ case 1:
+ if (vf->type != V4L2_TUNER_RF)
+ return -EINVAL;
+ dev->sdr_fm_freq = clamp_t(unsigned, freq,
+ bands_fm[0].rangelow,
+ bands_fm[0].rangehigh);
+ return 0;
+ default:
+ return -EINVAL;
+ }
+}
+
+int vivid_sdr_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt)
+{
+ switch (vt->index) {
+ case 0:
+ strscpy(vt->name, "ADC", sizeof(vt->name));
+ vt->type = V4L2_TUNER_ADC;
+ vt->capability =
+ V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS;
+ vt->rangelow = bands_adc[0].rangelow;
+ vt->rangehigh = bands_adc[2].rangehigh;
+ return 0;
+ case 1:
+ strscpy(vt->name, "RF", sizeof(vt->name));
+ vt->type = V4L2_TUNER_RF;
+ vt->capability =
+ V4L2_TUNER_CAP_1HZ | V4L2_TUNER_CAP_FREQ_BANDS;
+ vt->rangelow = bands_fm[0].rangelow;
+ vt->rangehigh = bands_fm[0].rangehigh;
+ return 0;
+ default:
+ return -EINVAL;
+ }
+}
+
+int vivid_sdr_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt)
+{
+ if (vt->index > 1)
+ return -EINVAL;
+ return 0;
+}
+
+int vidioc_enum_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_fmtdesc *f)
+{
+ if (f->index >= ARRAY_SIZE(formats))
+ return -EINVAL;
+ f->pixelformat = formats[f->index].pixelformat;
+ return 0;
+}
+
+int vidioc_g_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ f->fmt.sdr.pixelformat = dev->sdr_pixelformat;
+ f->fmt.sdr.buffersize = dev->sdr_buffersize;
+ return 0;
+}
+
+int vidioc_s_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct vb2_queue *q = &dev->vb_sdr_cap_q;
+ int i;
+
+ if (vb2_is_busy(q))
+ return -EBUSY;
+
+ for (i = 0; i < ARRAY_SIZE(formats); i++) {
+ if (formats[i].pixelformat == f->fmt.sdr.pixelformat) {
+ dev->sdr_pixelformat = formats[i].pixelformat;
+ dev->sdr_buffersize = formats[i].buffersize;
+ f->fmt.sdr.buffersize = formats[i].buffersize;
+ return 0;
+ }
+ }
+ dev->sdr_pixelformat = formats[0].pixelformat;
+ dev->sdr_buffersize = formats[0].buffersize;
+ f->fmt.sdr.pixelformat = formats[0].pixelformat;
+ f->fmt.sdr.buffersize = formats[0].buffersize;
+ return 0;
+}
+
+int vidioc_try_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f)
+{
+ int i;
+
+ for (i = 0; i < ARRAY_SIZE(formats); i++) {
+ if (formats[i].pixelformat == f->fmt.sdr.pixelformat) {
+ f->fmt.sdr.buffersize = formats[i].buffersize;
+ return 0;
+ }
+ }
+ f->fmt.sdr.pixelformat = formats[0].pixelformat;
+ f->fmt.sdr.buffersize = formats[0].buffersize;
+ return 0;
+}
+
+#define FIXP_N (15)
+#define FIXP_FRAC (1 << FIXP_N)
+#define FIXP_2PI ((int)(2 * 3.141592653589 * FIXP_FRAC))
+#define M_100000PI (3.14159 * 100000)
+
+void vivid_sdr_cap_process(struct vivid_dev *dev, struct vivid_buffer *buf)
+{
+ u8 *vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+ unsigned long i;
+ unsigned long plane_size = vb2_plane_size(&buf->vb.vb2_buf, 0);
+ s64 s64tmp;
+ s32 src_phase_step;
+ s32 mod_phase_step;
+ s32 fixp_i;
+ s32 fixp_q;
+
+ /* calculate phase step */
+ #define BEEP_FREQ 1000 /* 1kHz beep */
+ src_phase_step = DIV_ROUND_CLOSEST(FIXP_2PI * BEEP_FREQ,
+ dev->sdr_adc_freq);
+
+ for (i = 0; i < plane_size; i += 2) {
+ mod_phase_step = fixp_cos32_rad(dev->sdr_fixp_src_phase,
+ FIXP_2PI) >> (31 - FIXP_N);
+
+ dev->sdr_fixp_src_phase += src_phase_step;
+ s64tmp = (s64) mod_phase_step * dev->sdr_fm_deviation;
+ dev->sdr_fixp_mod_phase += div_s64(s64tmp, M_100000PI);
+
+ /*
+ * Transfer phase angle to [0, 2xPI] in order to avoid variable
+ * overflow and make it suitable for cosine implementation
+ * used, which does not support negative angles.
+ */
+ dev->sdr_fixp_src_phase %= FIXP_2PI;
+ dev->sdr_fixp_mod_phase %= FIXP_2PI;
+
+ if (dev->sdr_fixp_mod_phase < 0)
+ dev->sdr_fixp_mod_phase += FIXP_2PI;
+
+ fixp_i = fixp_cos32_rad(dev->sdr_fixp_mod_phase, FIXP_2PI);
+ fixp_q = fixp_sin32_rad(dev->sdr_fixp_mod_phase, FIXP_2PI);
+
+ /* Normalize fraction values represented with 32 bit precision
+ * to fixed point representation with FIXP_N bits */
+ fixp_i >>= (31 - FIXP_N);
+ fixp_q >>= (31 - FIXP_N);
+
+ switch (dev->sdr_pixelformat) {
+ case V4L2_SDR_FMT_CU8:
+ /* convert 'fixp float' to u8 [0, +255] */
+ /* u8 = X * 127.5 + 127.5; X is float [-1.0, +1.0] */
+ fixp_i = fixp_i * 1275 + FIXP_FRAC * 1275;
+ fixp_q = fixp_q * 1275 + FIXP_FRAC * 1275;
+ *vbuf++ = DIV_ROUND_CLOSEST(fixp_i, FIXP_FRAC * 10);
+ *vbuf++ = DIV_ROUND_CLOSEST(fixp_q, FIXP_FRAC * 10);
+ break;
+ case V4L2_SDR_FMT_CS8:
+ /* convert 'fixp float' to s8 [-128, +127] */
+ /* s8 = X * 127.5 - 0.5; X is float [-1.0, +1.0] */
+ fixp_i = fixp_i * 1275 - FIXP_FRAC * 5;
+ fixp_q = fixp_q * 1275 - FIXP_FRAC * 5;
+ *vbuf++ = DIV_ROUND_CLOSEST(fixp_i, FIXP_FRAC * 10);
+ *vbuf++ = DIV_ROUND_CLOSEST(fixp_q, FIXP_FRAC * 10);
+ break;
+ default:
+ break;
+ }
+ }
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-sdr-cap.h b/drivers/media/test-drivers/vivid/vivid-sdr-cap.h
new file mode 100644
index 000000000..813c9248e
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-sdr-cap.h
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-sdr-cap.h - software defined radio support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_SDR_CAP_H_
+#define _VIVID_SDR_CAP_H_
+
+int vivid_sdr_enum_freq_bands(struct file *file, void *fh, struct v4l2_frequency_band *band);
+int vivid_sdr_g_frequency(struct file *file, void *fh, struct v4l2_frequency *vf);
+int vivid_sdr_s_frequency(struct file *file, void *fh, const struct v4l2_frequency *vf);
+int vivid_sdr_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt);
+int vivid_sdr_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt);
+int vidioc_enum_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_fmtdesc *f);
+int vidioc_g_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f);
+int vidioc_s_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f);
+int vidioc_try_fmt_sdr_cap(struct file *file, void *fh, struct v4l2_format *f);
+void vivid_sdr_cap_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+
+extern const struct vb2_ops vivid_sdr_cap_qops;
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-touch-cap.c b/drivers/media/test-drivers/vivid/vivid-touch-cap.c
new file mode 100644
index 000000000..6cc32eb54
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-touch-cap.c
@@ -0,0 +1,341 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-touch-cap.c - touch support functions.
+ */
+
+#include "vivid-core.h"
+#include "vivid-kthread-touch.h"
+#include "vivid-vid-common.h"
+#include "vivid-touch-cap.h"
+
+static int touch_cap_queue_setup(struct vb2_queue *vq, unsigned int *nbuffers,
+ unsigned int *nplanes, unsigned int sizes[],
+ struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ struct v4l2_pix_format *f = &dev->tch_format;
+ unsigned int size = f->sizeimage;
+
+ if (*nplanes) {
+ if (sizes[0] < size)
+ return -EINVAL;
+ } else {
+ sizes[0] = size;
+ }
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = 1;
+ return 0;
+}
+
+static int touch_cap_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct v4l2_pix_format *f = &dev->tch_format;
+ unsigned int size = f->sizeimage;
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void touch_cap_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ vbuf->field = V4L2_FIELD_NONE;
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->touch_cap_active);
+ spin_unlock(&dev->slock);
+}
+
+static int touch_cap_start_streaming(struct vb2_queue *vq, unsigned int count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ dev->touch_cap_seq_count = 0;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_touch_cap(dev);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp,
+ &dev->touch_cap_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void touch_cap_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ vivid_stop_generating_touch_cap(dev);
+}
+
+static void touch_cap_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_touch_cap);
+}
+
+const struct vb2_ops vivid_touch_cap_qops = {
+ .queue_setup = touch_cap_queue_setup,
+ .buf_prepare = touch_cap_buf_prepare,
+ .buf_queue = touch_cap_buf_queue,
+ .start_streaming = touch_cap_start_streaming,
+ .stop_streaming = touch_cap_stop_streaming,
+ .buf_request_complete = touch_cap_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vivid_enum_fmt_tch(struct file *file, void *priv, struct v4l2_fmtdesc *f)
+{
+ if (f->index)
+ return -EINVAL;
+
+ f->pixelformat = V4L2_TCH_FMT_DELTA_TD16;
+ return 0;
+}
+
+int vivid_g_fmt_tch(struct file *file, void *priv, struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ f->fmt.pix = dev->tch_format;
+ return 0;
+}
+
+int vivid_g_fmt_tch_mplane(struct file *file, void *priv, struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_format sp_fmt;
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ sp_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ sp_fmt.fmt.pix = dev->tch_format;
+ fmt_sp2mp(&sp_fmt, f);
+ return 0;
+}
+
+int vivid_g_parm_tch(struct file *file, void *priv,
+ struct v4l2_streamparm *parm)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (parm->type != (dev->multiplanar ?
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE :
+ V4L2_BUF_TYPE_VIDEO_CAPTURE))
+ return -EINVAL;
+
+ parm->parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ parm->parm.capture.timeperframe = dev->timeperframe_tch_cap;
+ parm->parm.capture.readbuffers = 1;
+ return 0;
+}
+
+int vivid_enum_input_tch(struct file *file, void *priv, struct v4l2_input *inp)
+{
+ if (inp->index)
+ return -EINVAL;
+
+ inp->type = V4L2_INPUT_TYPE_TOUCH;
+ strscpy(inp->name, "Vivid Touch", sizeof(inp->name));
+ inp->capabilities = 0;
+ return 0;
+}
+
+int vivid_g_input_tch(struct file *file, void *priv, unsigned int *i)
+{
+ *i = 0;
+ return 0;
+}
+
+int vivid_set_touch(struct vivid_dev *dev, unsigned int i)
+{
+ struct v4l2_pix_format *f = &dev->tch_format;
+
+ if (i)
+ return -EINVAL;
+
+ f->pixelformat = V4L2_TCH_FMT_DELTA_TD16;
+ f->width = VIVID_TCH_WIDTH;
+ f->height = VIVID_TCH_HEIGHT;
+ f->field = V4L2_FIELD_NONE;
+ f->colorspace = V4L2_COLORSPACE_RAW;
+ f->bytesperline = f->width * sizeof(s16);
+ f->sizeimage = f->width * f->height * sizeof(s16);
+ return 0;
+}
+
+int vivid_s_input_tch(struct file *file, void *priv, unsigned int i)
+{
+ return vivid_set_touch(video_drvdata(file), i);
+}
+
+static void vivid_fill_buff_noise(__s16 *tch_buf, int size)
+{
+ int i;
+
+ /* Fill 10% of the values within range -3 and 3, zero the others */
+ for (i = 0; i < size; i++) {
+ unsigned int rand = get_random_u32();
+
+ if (rand % 10)
+ tch_buf[i] = 0;
+ else
+ tch_buf[i] = (rand / 10) % 7 - 3;
+ }
+}
+
+static inline int get_random_pressure(void)
+{
+ return prandom_u32_max(VIVID_PRESSURE_LIMIT);
+}
+
+static void vivid_tch_buf_set(struct v4l2_pix_format *f,
+ __s16 *tch_buf,
+ int index)
+{
+ unsigned int x = index % f->width;
+ unsigned int y = index / f->width;
+ unsigned int offset = VIVID_MIN_PRESSURE;
+
+ tch_buf[index] = offset + get_random_pressure();
+ offset /= 2;
+ if (x)
+ tch_buf[index - 1] = offset + get_random_pressure();
+ if (x < f->width - 1)
+ tch_buf[index + 1] = offset + get_random_pressure();
+ if (y)
+ tch_buf[index - f->width] = offset + get_random_pressure();
+ if (y < f->height - 1)
+ tch_buf[index + f->width] = offset + get_random_pressure();
+ offset /= 2;
+ if (x && y)
+ tch_buf[index - 1 - f->width] = offset + get_random_pressure();
+ if (x < f->width - 1 && y)
+ tch_buf[index + 1 - f->width] = offset + get_random_pressure();
+ if (x && y < f->height - 1)
+ tch_buf[index - 1 + f->width] = offset + get_random_pressure();
+ if (x < f->width - 1 && y < f->height - 1)
+ tch_buf[index + 1 + f->width] = offset + get_random_pressure();
+}
+
+void vivid_fillbuff_tch(struct vivid_dev *dev, struct vivid_buffer *buf)
+{
+ struct v4l2_pix_format *f = &dev->tch_format;
+ int size = f->width * f->height;
+ int x, y, xstart, ystart, offset_x, offset_y;
+ unsigned int test_pattern, test_pat_idx, rand;
+
+ __s16 *tch_buf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+
+ buf->vb.sequence = dev->touch_cap_with_seq_wrap_count;
+ test_pattern = (buf->vb.sequence / TCH_SEQ_COUNT) % TEST_CASE_MAX;
+ test_pat_idx = buf->vb.sequence % TCH_SEQ_COUNT;
+
+ vivid_fill_buff_noise(tch_buf, size);
+
+ if (test_pat_idx >= TCH_PATTERN_COUNT)
+ return;
+
+ if (test_pat_idx == 0)
+ dev->tch_pat_random = get_random_u32();
+ rand = dev->tch_pat_random;
+
+ switch (test_pattern) {
+ case SINGLE_TAP:
+ if (test_pat_idx == 2)
+ vivid_tch_buf_set(f, tch_buf, rand % size);
+ break;
+ case DOUBLE_TAP:
+ if (test_pat_idx == 2 || test_pat_idx == 4)
+ vivid_tch_buf_set(f, tch_buf, rand % size);
+ break;
+ case TRIPLE_TAP:
+ if (test_pat_idx == 2 || test_pat_idx == 4 || test_pat_idx == 6)
+ vivid_tch_buf_set(f, tch_buf, rand % size);
+ break;
+ case MOVE_LEFT_TO_RIGHT:
+ vivid_tch_buf_set(f, tch_buf,
+ (rand % f->height) * f->width +
+ test_pat_idx *
+ (f->width / TCH_PATTERN_COUNT));
+ break;
+ case ZOOM_IN:
+ x = f->width / 2;
+ y = f->height / 2;
+ offset_x = ((TCH_PATTERN_COUNT - 1 - test_pat_idx) * x) /
+ TCH_PATTERN_COUNT;
+ offset_y = ((TCH_PATTERN_COUNT - 1 - test_pat_idx) * y) /
+ TCH_PATTERN_COUNT;
+ vivid_tch_buf_set(f, tch_buf,
+ (x - offset_x) + f->width * (y - offset_y));
+ vivid_tch_buf_set(f, tch_buf,
+ (x + offset_x) + f->width * (y + offset_y));
+ break;
+ case ZOOM_OUT:
+ x = f->width / 2;
+ y = f->height / 2;
+ offset_x = (test_pat_idx * x) / TCH_PATTERN_COUNT;
+ offset_y = (test_pat_idx * y) / TCH_PATTERN_COUNT;
+ vivid_tch_buf_set(f, tch_buf,
+ (x - offset_x) + f->width * (y - offset_y));
+ vivid_tch_buf_set(f, tch_buf,
+ (x + offset_x) + f->width * (y + offset_y));
+ break;
+ case PALM_PRESS:
+ for (x = 0; x < f->width; x++)
+ for (y = f->height / 2; y < f->height; y++)
+ tch_buf[x + f->width * y] = VIVID_MIN_PRESSURE +
+ get_random_pressure();
+ break;
+ case MULTIPLE_PRESS:
+ /* 16 pressure points */
+ for (y = 0; y < 4; y++) {
+ for (x = 0; x < 4; x++) {
+ ystart = (y * f->height) / 4 + f->height / 8;
+ xstart = (x * f->width) / 4 + f->width / 8;
+ vivid_tch_buf_set(f, tch_buf,
+ ystart * f->width + xstart);
+ }
+ }
+ break;
+ }
+#ifdef __BIG_ENDIAN__
+ for (x = 0; x < size; x++)
+ tch_buf[x] = (__force s16)__cpu_to_le16((u16)tch_buf[x]);
+#endif
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-touch-cap.h b/drivers/media/test-drivers/vivid/vivid-touch-cap.h
new file mode 100644
index 000000000..07e514046
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-touch-cap.h
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-touch-cap.h - touch support functions.
+ */
+#ifndef _VIVID_TOUCH_CAP_H_
+#define _VIVID_TOUCH_CAP_H_
+
+#define VIVID_TCH_HEIGHT 12
+#define VIVID_TCH_WIDTH 21
+#define VIVID_MIN_PRESSURE 180
+#define VIVID_PRESSURE_LIMIT 40
+#define TCH_SEQ_COUNT 16
+#define TCH_PATTERN_COUNT 12
+
+enum vivid_tch_test {
+ SINGLE_TAP,
+ DOUBLE_TAP,
+ TRIPLE_TAP,
+ MOVE_LEFT_TO_RIGHT,
+ ZOOM_IN,
+ ZOOM_OUT,
+ PALM_PRESS,
+ MULTIPLE_PRESS,
+ TEST_CASE_MAX
+};
+
+extern const struct vb2_ops vivid_touch_cap_qops;
+
+int vivid_enum_fmt_tch(struct file *file, void *priv, struct v4l2_fmtdesc *f);
+int vivid_g_fmt_tch(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_g_fmt_tch_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_enum_input_tch(struct file *file, void *priv, struct v4l2_input *inp);
+int vivid_g_input_tch(struct file *file, void *priv, unsigned int *i);
+int vivid_s_input_tch(struct file *file, void *priv, unsigned int i);
+void vivid_fillbuff_tch(struct vivid_dev *dev, struct vivid_buffer *buf);
+int vivid_set_touch(struct vivid_dev *dev, unsigned int i);
+int vivid_g_parm_tch(struct file *file, void *priv,
+ struct v4l2_streamparm *parm);
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-cap.c b/drivers/media/test-drivers/vivid/vivid-vbi-cap.c
new file mode 100644
index 000000000..b65b02eee
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-cap.c
@@ -0,0 +1,361 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vbi-cap.c - vbi capture support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/videodev2.h>
+#include <media/v4l2-common.h>
+
+#include "vivid-core.h"
+#include "vivid-kthread-cap.h"
+#include "vivid-vbi-cap.h"
+#include "vivid-vbi-gen.h"
+
+static void vivid_sliced_vbi_cap_fill(struct vivid_dev *dev, unsigned seqnr)
+{
+ struct vivid_vbi_gen_data *vbi_gen = &dev->vbi_gen;
+ bool is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+
+ vivid_vbi_gen_sliced(vbi_gen, is_60hz, seqnr);
+
+ if (!is_60hz) {
+ if (dev->loop_video) {
+ if (dev->vbi_out_have_wss) {
+ vbi_gen->data[12].data[0] = dev->vbi_out_wss[0];
+ vbi_gen->data[12].data[1] = dev->vbi_out_wss[1];
+ } else {
+ vbi_gen->data[12].id = 0;
+ }
+ } else {
+ switch (tpg_g_video_aspect(&dev->tpg)) {
+ case TPG_VIDEO_ASPECT_14X9_CENTRE:
+ vbi_gen->data[12].data[0] = 0x01;
+ break;
+ case TPG_VIDEO_ASPECT_16X9_CENTRE:
+ vbi_gen->data[12].data[0] = 0x0b;
+ break;
+ case TPG_VIDEO_ASPECT_16X9_ANAMORPHIC:
+ vbi_gen->data[12].data[0] = 0x07;
+ break;
+ case TPG_VIDEO_ASPECT_4X3:
+ default:
+ vbi_gen->data[12].data[0] = 0x08;
+ break;
+ }
+ }
+ } else if (dev->loop_video && is_60hz) {
+ if (dev->vbi_out_have_cc[0]) {
+ vbi_gen->data[0].data[0] = dev->vbi_out_cc[0][0];
+ vbi_gen->data[0].data[1] = dev->vbi_out_cc[0][1];
+ } else {
+ vbi_gen->data[0].id = 0;
+ }
+ if (dev->vbi_out_have_cc[1]) {
+ vbi_gen->data[1].data[0] = dev->vbi_out_cc[1][0];
+ vbi_gen->data[1].data[1] = dev->vbi_out_cc[1][1];
+ } else {
+ vbi_gen->data[1].id = 0;
+ }
+ }
+}
+
+static void vivid_g_fmt_vbi_cap(struct vivid_dev *dev, struct v4l2_vbi_format *vbi)
+{
+ bool is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+
+ vbi->sampling_rate = 27000000;
+ vbi->offset = 24;
+ vbi->samples_per_line = 1440;
+ vbi->sample_format = V4L2_PIX_FMT_GREY;
+ vbi->start[0] = is_60hz ? V4L2_VBI_ITU_525_F1_START + 9 : V4L2_VBI_ITU_625_F1_START + 5;
+ vbi->start[1] = is_60hz ? V4L2_VBI_ITU_525_F2_START + 9 : V4L2_VBI_ITU_625_F2_START + 5;
+ vbi->count[0] = vbi->count[1] = is_60hz ? 12 : 18;
+ vbi->flags = dev->vbi_cap_interlaced ? V4L2_VBI_INTERLACED : 0;
+ vbi->reserved[0] = 0;
+ vbi->reserved[1] = 0;
+}
+
+void vivid_raw_vbi_cap_process(struct vivid_dev *dev, struct vivid_buffer *buf)
+{
+ struct v4l2_vbi_format vbi;
+ u8 *vbuf = vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+
+ vivid_g_fmt_vbi_cap(dev, &vbi);
+ buf->vb.sequence = dev->vbi_cap_seq_count;
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ buf->vb.sequence /= 2;
+
+ vivid_sliced_vbi_cap_fill(dev, buf->vb.sequence);
+
+ memset(vbuf, 0x10, vb2_plane_size(&buf->vb.vb2_buf, 0));
+
+ if (!VIVID_INVALID_SIGNAL(dev->std_signal_mode[dev->input]))
+ vivid_vbi_gen_raw(&dev->vbi_gen, &vbi, vbuf);
+}
+
+
+void vivid_sliced_vbi_cap_process(struct vivid_dev *dev,
+ struct vivid_buffer *buf)
+{
+ struct v4l2_sliced_vbi_data *vbuf =
+ vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+
+ buf->vb.sequence = dev->vbi_cap_seq_count;
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ buf->vb.sequence /= 2;
+
+ vivid_sliced_vbi_cap_fill(dev, buf->vb.sequence);
+
+ memset(vbuf, 0, vb2_plane_size(&buf->vb.vb2_buf, 0));
+ if (!VIVID_INVALID_SIGNAL(dev->std_signal_mode[dev->input])) {
+ unsigned i;
+
+ for (i = 0; i < 25; i++)
+ vbuf[i] = dev->vbi_gen.data[i];
+ }
+}
+
+static int vbi_cap_queue_setup(struct vb2_queue *vq,
+ unsigned *nbuffers, unsigned *nplanes,
+ unsigned sizes[], struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ bool is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+ unsigned size = vq->type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE ?
+ 36 * sizeof(struct v4l2_sliced_vbi_data) :
+ 1440 * 2 * (is_60hz ? 12 : 18);
+
+ if (!vivid_is_sdtv_cap(dev))
+ return -EINVAL;
+
+ sizes[0] = size;
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = 1;
+ return 0;
+}
+
+static int vbi_cap_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ bool is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+ unsigned size = vb->vb2_queue->type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE ?
+ 36 * sizeof(struct v4l2_sliced_vbi_data) :
+ 1440 * 2 * (is_60hz ? 12 : 18);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void vbi_cap_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->vbi_cap_active);
+ spin_unlock(&dev->slock);
+}
+
+static int vbi_cap_start_streaming(struct vb2_queue *vq, unsigned count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ dprintk(dev, 1, "%s\n", __func__);
+ dev->vbi_cap_seq_count = 0;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_cap(dev, &dev->vbi_cap_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp, &dev->vbi_cap_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void vbi_cap_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_cap(dev, &dev->vbi_cap_streaming);
+}
+
+static void vbi_cap_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_vbi_cap);
+}
+
+const struct vb2_ops vivid_vbi_cap_qops = {
+ .queue_setup = vbi_cap_queue_setup,
+ .buf_prepare = vbi_cap_buf_prepare,
+ .buf_queue = vbi_cap_buf_queue,
+ .start_streaming = vbi_cap_start_streaming,
+ .stop_streaming = vbi_cap_stop_streaming,
+ .buf_request_complete = vbi_cap_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vidioc_g_fmt_vbi_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_vbi_format *vbi = &f->fmt.vbi;
+
+ if (!vivid_is_sdtv_cap(dev) || !dev->has_raw_vbi_cap)
+ return -EINVAL;
+
+ vivid_g_fmt_vbi_cap(dev, vbi);
+ return 0;
+}
+
+int vidioc_s_fmt_vbi_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ int ret = vidioc_g_fmt_vbi_cap(file, priv, f);
+
+ if (ret)
+ return ret;
+ if (f->type != V4L2_BUF_TYPE_VBI_CAPTURE && vb2_is_busy(&dev->vb_vbi_cap_q))
+ return -EBUSY;
+ return 0;
+}
+
+void vivid_fill_service_lines(struct v4l2_sliced_vbi_format *vbi, u32 service_set)
+{
+ vbi->io_size = sizeof(struct v4l2_sliced_vbi_data) * 36;
+ vbi->service_set = service_set;
+ memset(vbi->service_lines, 0, sizeof(vbi->service_lines));
+ memset(vbi->reserved, 0, sizeof(vbi->reserved));
+
+ if (vbi->service_set == 0)
+ return;
+
+ if (vbi->service_set & V4L2_SLICED_CAPTION_525) {
+ vbi->service_lines[0][21] = V4L2_SLICED_CAPTION_525;
+ vbi->service_lines[1][21] = V4L2_SLICED_CAPTION_525;
+ }
+ if (vbi->service_set & V4L2_SLICED_WSS_625) {
+ unsigned i;
+
+ for (i = 7; i <= 18; i++)
+ vbi->service_lines[0][i] =
+ vbi->service_lines[1][i] = V4L2_SLICED_TELETEXT_B;
+ vbi->service_lines[0][23] = V4L2_SLICED_WSS_625;
+ }
+}
+
+int vidioc_g_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+
+ if (!vivid_is_sdtv_cap(dev) || !dev->has_sliced_vbi_cap)
+ return -EINVAL;
+
+ vivid_fill_service_lines(vbi, dev->service_set_cap);
+ return 0;
+}
+
+int vidioc_try_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+ bool is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+ u32 service_set = vbi->service_set;
+
+ if (!vivid_is_sdtv_cap(dev) || !dev->has_sliced_vbi_cap)
+ return -EINVAL;
+
+ service_set &= is_60hz ? V4L2_SLICED_CAPTION_525 :
+ V4L2_SLICED_WSS_625 | V4L2_SLICED_TELETEXT_B;
+ vivid_fill_service_lines(vbi, service_set);
+ return 0;
+}
+
+int vidioc_s_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+ int ret = vidioc_try_fmt_sliced_vbi_cap(file, fh, fmt);
+
+ if (ret)
+ return ret;
+ if (fmt->type != V4L2_BUF_TYPE_SLICED_VBI_CAPTURE && vb2_is_busy(&dev->vb_vbi_cap_q))
+ return -EBUSY;
+ dev->service_set_cap = vbi->service_set;
+ return 0;
+}
+
+int vidioc_g_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_sliced_vbi_cap *cap)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+ bool is_60hz;
+
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ is_60hz = dev->std_cap[dev->input] & V4L2_STD_525_60;
+ if (!vivid_is_sdtv_cap(dev) || !dev->has_sliced_vbi_cap ||
+ cap->type != V4L2_BUF_TYPE_SLICED_VBI_CAPTURE)
+ return -EINVAL;
+ } else {
+ is_60hz = dev->std_out & V4L2_STD_525_60;
+ if (!vivid_is_svid_out(dev) || !dev->has_sliced_vbi_out ||
+ cap->type != V4L2_BUF_TYPE_SLICED_VBI_OUTPUT)
+ return -EINVAL;
+ }
+
+ cap->service_set = is_60hz ? V4L2_SLICED_CAPTION_525 :
+ V4L2_SLICED_WSS_625 | V4L2_SLICED_TELETEXT_B;
+ if (is_60hz) {
+ cap->service_lines[0][21] = V4L2_SLICED_CAPTION_525;
+ cap->service_lines[1][21] = V4L2_SLICED_CAPTION_525;
+ } else {
+ unsigned i;
+
+ for (i = 7; i <= 18; i++)
+ cap->service_lines[0][i] =
+ cap->service_lines[1][i] = V4L2_SLICED_TELETEXT_B;
+ cap->service_lines[0][23] = V4L2_SLICED_WSS_625;
+ }
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-cap.h b/drivers/media/test-drivers/vivid/vivid-vbi-cap.h
new file mode 100644
index 000000000..91d2de013
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-cap.h
@@ -0,0 +1,28 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vbi-cap.h - vbi capture support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VBI_CAP_H_
+#define _VIVID_VBI_CAP_H_
+
+void vivid_fill_time_of_day_packet(u8 *packet);
+void vivid_raw_vbi_cap_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+void vivid_sliced_vbi_cap_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+void vivid_sliced_vbi_out_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+int vidioc_g_fmt_vbi_cap(struct file *file, void *priv,
+ struct v4l2_format *f);
+int vidioc_s_fmt_vbi_cap(struct file *file, void *priv,
+ struct v4l2_format *f);
+int vidioc_g_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt);
+int vidioc_try_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt);
+int vidioc_s_fmt_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_format *fmt);
+int vidioc_g_sliced_vbi_cap(struct file *file, void *fh, struct v4l2_sliced_vbi_cap *cap);
+
+void vivid_fill_service_lines(struct v4l2_sliced_vbi_format *vbi, u32 service_set);
+
+extern const struct vb2_ops vivid_vbi_cap_qops;
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-gen.c b/drivers/media/test-drivers/vivid/vivid-vbi-gen.c
new file mode 100644
index 000000000..a141369a7
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-gen.c
@@ -0,0 +1,311 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vbi-gen.c - vbi generator support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/ktime.h>
+#include <linux/string.h>
+#include <linux/videodev2.h>
+
+#include "vivid-vbi-gen.h"
+
+static void wss_insert(u8 *wss, u32 val, unsigned size)
+{
+ while (size--)
+ *wss++ = (val & (1 << size)) ? 0xc0 : 0x10;
+}
+
+static void vivid_vbi_gen_wss_raw(const struct v4l2_sliced_vbi_data *data,
+ u8 *buf, unsigned sampling_rate)
+{
+ const unsigned rate = 5000000; /* WSS has a 5 MHz transmission rate */
+ u8 wss[29 + 24 + 24 + 24 + 18 + 18] = { 0 };
+ const unsigned zero = 0x07;
+ const unsigned one = 0x38;
+ unsigned bit = 0;
+ u16 wss_data;
+ int i;
+
+ wss_insert(wss + bit, 0x1f1c71c7, 29); bit += 29;
+ wss_insert(wss + bit, 0x1e3c1f, 24); bit += 24;
+
+ wss_data = (data->data[1] << 8) | data->data[0];
+ for (i = 0; i <= 13; i++, bit += 6)
+ wss_insert(wss + bit, (wss_data & (1 << i)) ? one : zero, 6);
+
+ for (i = 0, bit = 0; bit < sizeof(wss); bit++) {
+ unsigned n = ((bit + 1) * sampling_rate) / rate;
+
+ while (i < n)
+ buf[i++] = wss[bit];
+ }
+}
+
+static void vivid_vbi_gen_teletext_raw(const struct v4l2_sliced_vbi_data *data,
+ u8 *buf, unsigned sampling_rate)
+{
+ const unsigned rate = 6937500 / 10; /* Teletext has a 6.9375 MHz transmission rate */
+ u8 teletext[45] = { 0x55, 0x55, 0x27 };
+ unsigned bit = 0;
+ int i;
+
+ memcpy(teletext + 3, data->data, sizeof(teletext) - 3);
+ /* prevents 32 bit overflow */
+ sampling_rate /= 10;
+
+ for (i = 0, bit = 0; bit < sizeof(teletext) * 8; bit++) {
+ unsigned n = ((bit + 1) * sampling_rate) / rate;
+ u8 val = (teletext[bit / 8] & (1 << (bit & 7))) ? 0xc0 : 0x10;
+
+ while (i < n)
+ buf[i++] = val;
+ }
+}
+
+static void cc_insert(u8 *cc, u8 ch)
+{
+ unsigned tot = 0;
+ unsigned i;
+
+ for (i = 0; i < 7; i++) {
+ cc[2 * i] = cc[2 * i + 1] = (ch & (1 << i)) ? 1 : 0;
+ tot += cc[2 * i];
+ }
+ cc[14] = cc[15] = !(tot & 1);
+}
+
+#define CC_PREAMBLE_BITS (14 + 4 + 2)
+
+static void vivid_vbi_gen_cc_raw(const struct v4l2_sliced_vbi_data *data,
+ u8 *buf, unsigned sampling_rate)
+{
+ const unsigned rate = 1000000; /* CC has a 1 MHz transmission rate */
+
+ u8 cc[CC_PREAMBLE_BITS + 2 * 16] = {
+ /* Clock run-in: 7 cycles */
+ 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
+ /* 2 cycles of 0 */
+ 0, 0, 0, 0,
+ /* Start bit of 1 (each bit is two cycles) */
+ 1, 1
+ };
+ unsigned bit, i;
+
+ cc_insert(cc + CC_PREAMBLE_BITS, data->data[0]);
+ cc_insert(cc + CC_PREAMBLE_BITS + 16, data->data[1]);
+
+ for (i = 0, bit = 0; bit < sizeof(cc); bit++) {
+ unsigned n = ((bit + 1) * sampling_rate) / rate;
+
+ while (i < n)
+ buf[i++] = cc[bit] ? 0xc0 : 0x10;
+ }
+}
+
+void vivid_vbi_gen_raw(const struct vivid_vbi_gen_data *vbi,
+ const struct v4l2_vbi_format *vbi_fmt, u8 *buf)
+{
+ unsigned idx;
+
+ for (idx = 0; idx < 25; idx++) {
+ const struct v4l2_sliced_vbi_data *data = vbi->data + idx;
+ unsigned start_2nd_field;
+ unsigned line = data->line;
+ u8 *linebuf = buf;
+
+ start_2nd_field = (data->id & V4L2_SLICED_VBI_525) ? 263 : 313;
+ if (data->field)
+ line += start_2nd_field;
+ line -= vbi_fmt->start[data->field];
+
+ if (vbi_fmt->flags & V4L2_VBI_INTERLACED)
+ linebuf += (line * 2 + data->field) *
+ vbi_fmt->samples_per_line;
+ else
+ linebuf += (line + data->field * vbi_fmt->count[0]) *
+ vbi_fmt->samples_per_line;
+ if (data->id == V4L2_SLICED_CAPTION_525)
+ vivid_vbi_gen_cc_raw(data, linebuf, vbi_fmt->sampling_rate);
+ else if (data->id == V4L2_SLICED_WSS_625)
+ vivid_vbi_gen_wss_raw(data, linebuf, vbi_fmt->sampling_rate);
+ else if (data->id == V4L2_SLICED_TELETEXT_B)
+ vivid_vbi_gen_teletext_raw(data, linebuf, vbi_fmt->sampling_rate);
+ }
+}
+
+static const u8 vivid_cc_sequence1[30] = {
+ 0x14, 0x20, /* Resume Caption Loading */
+ 'H', 'e',
+ 'l', 'l',
+ 'o', ' ',
+ 'w', 'o',
+ 'r', 'l',
+ 'd', '!',
+ 0x14, 0x2f, /* End of Caption */
+};
+
+static const u8 vivid_cc_sequence2[30] = {
+ 0x14, 0x20, /* Resume Caption Loading */
+ 'C', 'l',
+ 'o', 's',
+ 'e', 'd',
+ ' ', 'c',
+ 'a', 'p',
+ 't', 'i',
+ 'o', 'n',
+ 's', ' ',
+ 't', 'e',
+ 's', 't',
+ 0x14, 0x2f, /* End of Caption */
+};
+
+static u8 calc_parity(u8 val)
+{
+ unsigned i;
+ unsigned tot = 0;
+
+ for (i = 0; i < 7; i++)
+ tot += (val & (1 << i)) ? 1 : 0;
+ return val | ((tot & 1) ? 0 : 0x80);
+}
+
+static void vivid_vbi_gen_set_time_of_day(u8 *packet)
+{
+ struct tm tm;
+ u8 checksum, i;
+
+ time64_to_tm(ktime_get_real_seconds(), 0, &tm);
+ packet[0] = calc_parity(0x07);
+ packet[1] = calc_parity(0x01);
+ packet[2] = calc_parity(0x40 | tm.tm_min);
+ packet[3] = calc_parity(0x40 | tm.tm_hour);
+ packet[4] = calc_parity(0x40 | tm.tm_mday);
+ if (tm.tm_mday == 1 && tm.tm_mon == 2 &&
+ sys_tz.tz_minuteswest > tm.tm_min + tm.tm_hour * 60)
+ packet[4] = calc_parity(0x60 | tm.tm_mday);
+ packet[5] = calc_parity(0x40 | (1 + tm.tm_mon));
+ packet[6] = calc_parity(0x40 | (1 + tm.tm_wday));
+ packet[7] = calc_parity(0x40 | ((tm.tm_year - 90) & 0x3f));
+ packet[8] = calc_parity(0x0f);
+ for (checksum = i = 0; i <= 8; i++)
+ checksum += packet[i] & 0x7f;
+ packet[9] = calc_parity(0x100 - checksum);
+ checksum = 0;
+ packet[10] = calc_parity(0x07);
+ packet[11] = calc_parity(0x04);
+ if (sys_tz.tz_minuteswest >= 0)
+ packet[12] = calc_parity(0x40 | ((sys_tz.tz_minuteswest / 60) & 0x1f));
+ else
+ packet[12] = calc_parity(0x40 | ((24 + sys_tz.tz_minuteswest / 60) & 0x1f));
+ packet[13] = calc_parity(0);
+ packet[14] = calc_parity(0x0f);
+ for (checksum = 0, i = 10; i <= 14; i++)
+ checksum += packet[i] & 0x7f;
+ packet[15] = calc_parity(0x100 - checksum);
+}
+
+static const u8 hamming[16] = {
+ 0x15, 0x02, 0x49, 0x5e, 0x64, 0x73, 0x38, 0x2f,
+ 0xd0, 0xc7, 0x8c, 0x9b, 0xa1, 0xb6, 0xfd, 0xea
+};
+
+static void vivid_vbi_gen_teletext(u8 *packet, unsigned line, unsigned frame)
+{
+ unsigned offset = 2;
+ unsigned i;
+
+ packet[0] = hamming[1 + ((line & 1) << 3)];
+ packet[1] = hamming[line >> 1];
+ memset(packet + 2, 0x20, 40);
+ if (line == 0) {
+ /* subcode */
+ packet[2] = hamming[frame % 10];
+ packet[3] = hamming[frame / 10];
+ packet[4] = hamming[0];
+ packet[5] = hamming[0];
+ packet[6] = hamming[0];
+ packet[7] = hamming[0];
+ packet[8] = hamming[0];
+ packet[9] = hamming[1];
+ offset = 10;
+ }
+ packet += offset;
+ memcpy(packet, "Page: 100 Row: 10", 17);
+ packet[7] = '0' + frame / 10;
+ packet[8] = '0' + frame % 10;
+ packet[15] = '0' + line / 10;
+ packet[16] = '0' + line % 10;
+ for (i = 0; i < 42 - offset; i++)
+ packet[i] = calc_parity(packet[i]);
+}
+
+void vivid_vbi_gen_sliced(struct vivid_vbi_gen_data *vbi,
+ bool is_60hz, unsigned seqnr)
+{
+ struct v4l2_sliced_vbi_data *data0 = vbi->data;
+ struct v4l2_sliced_vbi_data *data1 = vbi->data + 1;
+ unsigned frame = seqnr % 60;
+
+ memset(vbi->data, 0, sizeof(vbi->data));
+
+ if (!is_60hz) {
+ unsigned i;
+
+ for (i = 0; i <= 11; i++) {
+ data0->id = V4L2_SLICED_TELETEXT_B;
+ data0->line = 7 + i;
+ vivid_vbi_gen_teletext(data0->data, i, frame);
+ data0++;
+ }
+ data0->id = V4L2_SLICED_WSS_625;
+ data0->line = 23;
+ /* 4x3 video aspect ratio */
+ data0->data[0] = 0x08;
+ data0++;
+ for (i = 0; i <= 11; i++) {
+ data0->id = V4L2_SLICED_TELETEXT_B;
+ data0->field = 1;
+ data0->line = 7 + i;
+ vivid_vbi_gen_teletext(data0->data, 12 + i, frame);
+ data0++;
+ }
+ return;
+ }
+
+ data0->id = V4L2_SLICED_CAPTION_525;
+ data0->line = 21;
+ data1->id = V4L2_SLICED_CAPTION_525;
+ data1->field = 1;
+ data1->line = 21;
+
+ if (frame < 15) {
+ data0->data[0] = calc_parity(vivid_cc_sequence1[2 * frame]);
+ data0->data[1] = calc_parity(vivid_cc_sequence1[2 * frame + 1]);
+ } else if (frame >= 30 && frame < 45) {
+ frame -= 30;
+ data0->data[0] = calc_parity(vivid_cc_sequence2[2 * frame]);
+ data0->data[1] = calc_parity(vivid_cc_sequence2[2 * frame + 1]);
+ } else {
+ data0->data[0] = calc_parity(0);
+ data0->data[1] = calc_parity(0);
+ }
+
+ frame = seqnr % (30 * 60);
+ switch (frame) {
+ case 0:
+ vivid_vbi_gen_set_time_of_day(vbi->time_of_day_packet);
+ fallthrough;
+ case 1 ... 7:
+ data1->data[0] = vbi->time_of_day_packet[frame * 2];
+ data1->data[1] = vbi->time_of_day_packet[frame * 2 + 1];
+ break;
+ default:
+ data1->data[0] = calc_parity(0);
+ data1->data[1] = calc_parity(0);
+ break;
+ }
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-gen.h b/drivers/media/test-drivers/vivid/vivid-vbi-gen.h
new file mode 100644
index 000000000..2657a7f55
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-gen.h
@@ -0,0 +1,21 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vbi-gen.h - vbi generator support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VBI_GEN_H_
+#define _VIVID_VBI_GEN_H_
+
+struct vivid_vbi_gen_data {
+ struct v4l2_sliced_vbi_data data[25];
+ u8 time_of_day_packet[16];
+};
+
+void vivid_vbi_gen_sliced(struct vivid_vbi_gen_data *vbi,
+ bool is_60hz, unsigned seqnr);
+void vivid_vbi_gen_raw(const struct vivid_vbi_gen_data *vbi,
+ const struct v4l2_vbi_format *vbi_fmt, u8 *buf);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-out.c b/drivers/media/test-drivers/vivid/vivid-vbi-out.c
new file mode 100644
index 000000000..cd5647690
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-out.c
@@ -0,0 +1,250 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vbi-out.c - vbi output support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/videodev2.h>
+#include <media/v4l2-common.h>
+
+#include "vivid-core.h"
+#include "vivid-kthread-out.h"
+#include "vivid-vbi-out.h"
+#include "vivid-vbi-cap.h"
+
+static int vbi_out_queue_setup(struct vb2_queue *vq,
+ unsigned *nbuffers, unsigned *nplanes,
+ unsigned sizes[], struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ bool is_60hz = dev->std_out & V4L2_STD_525_60;
+ unsigned size = vq->type == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT ?
+ 36 * sizeof(struct v4l2_sliced_vbi_data) :
+ 1440 * 2 * (is_60hz ? 12 : 18);
+
+ if (!vivid_is_svid_out(dev))
+ return -EINVAL;
+
+ sizes[0] = size;
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = 1;
+ return 0;
+}
+
+static int vbi_out_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ bool is_60hz = dev->std_out & V4L2_STD_525_60;
+ unsigned size = vb->vb2_queue->type == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT ?
+ 36 * sizeof(struct v4l2_sliced_vbi_data) :
+ 1440 * 2 * (is_60hz ? 12 : 18);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ if (vb2_plane_size(vb, 0) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane (%lu < %u)\n",
+ __func__, vb2_plane_size(vb, 0), size);
+ return -EINVAL;
+ }
+ vb2_set_plane_payload(vb, 0, size);
+
+ return 0;
+}
+
+static void vbi_out_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->vbi_out_active);
+ spin_unlock(&dev->slock);
+}
+
+static int vbi_out_start_streaming(struct vb2_queue *vq, unsigned count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ dprintk(dev, 1, "%s\n", __func__);
+ dev->vbi_out_seq_count = 0;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_out(dev, &dev->vbi_out_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp, &dev->vbi_out_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void vbi_out_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_out(dev, &dev->vbi_out_streaming);
+ dev->vbi_out_have_wss = false;
+ dev->vbi_out_have_cc[0] = false;
+ dev->vbi_out_have_cc[1] = false;
+}
+
+static void vbi_out_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_vbi_out);
+}
+
+const struct vb2_ops vivid_vbi_out_qops = {
+ .queue_setup = vbi_out_queue_setup,
+ .buf_prepare = vbi_out_buf_prepare,
+ .buf_queue = vbi_out_buf_queue,
+ .start_streaming = vbi_out_start_streaming,
+ .stop_streaming = vbi_out_stop_streaming,
+ .buf_request_complete = vbi_out_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+int vidioc_g_fmt_vbi_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_vbi_format *vbi = &f->fmt.vbi;
+ bool is_60hz = dev->std_out & V4L2_STD_525_60;
+
+ if (!vivid_is_svid_out(dev) || !dev->has_raw_vbi_out)
+ return -EINVAL;
+
+ vbi->sampling_rate = 25000000;
+ vbi->offset = 24;
+ vbi->samples_per_line = 1440;
+ vbi->sample_format = V4L2_PIX_FMT_GREY;
+ vbi->start[0] = is_60hz ? V4L2_VBI_ITU_525_F1_START + 9 : V4L2_VBI_ITU_625_F1_START + 5;
+ vbi->start[1] = is_60hz ? V4L2_VBI_ITU_525_F2_START + 9 : V4L2_VBI_ITU_625_F2_START + 5;
+ vbi->count[0] = vbi->count[1] = is_60hz ? 12 : 18;
+ vbi->flags = dev->vbi_cap_interlaced ? V4L2_VBI_INTERLACED : 0;
+ vbi->reserved[0] = 0;
+ vbi->reserved[1] = 0;
+ return 0;
+}
+
+int vidioc_s_fmt_vbi_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ int ret = vidioc_g_fmt_vbi_out(file, priv, f);
+
+ if (ret)
+ return ret;
+ if (vb2_is_busy(&dev->vb_vbi_out_q))
+ return -EBUSY;
+ dev->stream_sliced_vbi_out = false;
+ dev->vbi_out_dev.queue->type = V4L2_BUF_TYPE_VBI_OUTPUT;
+ return 0;
+}
+
+int vidioc_g_fmt_sliced_vbi_out(struct file *file, void *fh, struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+
+ if (!vivid_is_svid_out(dev) || !dev->has_sliced_vbi_out)
+ return -EINVAL;
+
+ vivid_fill_service_lines(vbi, dev->service_set_out);
+ return 0;
+}
+
+int vidioc_try_fmt_sliced_vbi_out(struct file *file, void *fh, struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+ bool is_60hz = dev->std_out & V4L2_STD_525_60;
+ u32 service_set = vbi->service_set;
+
+ if (!vivid_is_svid_out(dev) || !dev->has_sliced_vbi_out)
+ return -EINVAL;
+
+ service_set &= is_60hz ? V4L2_SLICED_CAPTION_525 :
+ V4L2_SLICED_WSS_625 | V4L2_SLICED_TELETEXT_B;
+ vivid_fill_service_lines(vbi, service_set);
+ return 0;
+}
+
+int vidioc_s_fmt_sliced_vbi_out(struct file *file, void *fh,
+ struct v4l2_format *fmt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_sliced_vbi_format *vbi = &fmt->fmt.sliced;
+ int ret = vidioc_try_fmt_sliced_vbi_out(file, fh, fmt);
+
+ if (ret)
+ return ret;
+ if (vb2_is_busy(&dev->vb_vbi_out_q))
+ return -EBUSY;
+ dev->service_set_out = vbi->service_set;
+ dev->stream_sliced_vbi_out = true;
+ dev->vbi_out_dev.queue->type = V4L2_BUF_TYPE_SLICED_VBI_OUTPUT;
+ return 0;
+}
+
+void vivid_sliced_vbi_out_process(struct vivid_dev *dev,
+ struct vivid_buffer *buf)
+{
+ struct v4l2_sliced_vbi_data *vbi =
+ vb2_plane_vaddr(&buf->vb.vb2_buf, 0);
+ unsigned elems =
+ vb2_get_plane_payload(&buf->vb.vb2_buf, 0) / sizeof(*vbi);
+
+ dev->vbi_out_have_cc[0] = false;
+ dev->vbi_out_have_cc[1] = false;
+ dev->vbi_out_have_wss = false;
+ while (elems--) {
+ switch (vbi->id) {
+ case V4L2_SLICED_CAPTION_525:
+ if ((dev->std_out & V4L2_STD_525_60) && vbi->line == 21) {
+ dev->vbi_out_have_cc[!!vbi->field] = true;
+ dev->vbi_out_cc[!!vbi->field][0] = vbi->data[0];
+ dev->vbi_out_cc[!!vbi->field][1] = vbi->data[1];
+ }
+ break;
+ case V4L2_SLICED_WSS_625:
+ if ((dev->std_out & V4L2_STD_625_50) &&
+ vbi->field == 0 && vbi->line == 23) {
+ dev->vbi_out_have_wss = true;
+ dev->vbi_out_wss[0] = vbi->data[0];
+ dev->vbi_out_wss[1] = vbi->data[1];
+ }
+ break;
+ }
+ vbi++;
+ }
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vbi-out.h b/drivers/media/test-drivers/vivid/vivid-vbi-out.h
new file mode 100644
index 000000000..76584940c
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vbi-out.h
@@ -0,0 +1,22 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vbi-out.h - vbi output support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VBI_OUT_H_
+#define _VIVID_VBI_OUT_H_
+
+void vivid_sliced_vbi_out_process(struct vivid_dev *dev, struct vivid_buffer *buf);
+int vidioc_g_fmt_vbi_out(struct file *file, void *priv,
+ struct v4l2_format *f);
+int vidioc_s_fmt_vbi_out(struct file *file, void *priv,
+ struct v4l2_format *f);
+int vidioc_g_fmt_sliced_vbi_out(struct file *file, void *fh, struct v4l2_format *fmt);
+int vidioc_try_fmt_sliced_vbi_out(struct file *file, void *fh, struct v4l2_format *fmt);
+int vidioc_s_fmt_sliced_vbi_out(struct file *file, void *fh, struct v4l2_format *fmt);
+
+extern const struct vb2_ops vivid_vbi_out_qops;
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-cap.c b/drivers/media/test-drivers/vivid/vivid-vid-cap.c
new file mode 100644
index 000000000..c0999581c
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-cap.c
@@ -0,0 +1,1968 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vid-cap.c - video capture support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/sched.h>
+#include <linux/vmalloc.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+#include <media/v4l2-rect.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+#include "vivid-kthread-cap.h"
+#include "vivid-vid-cap.h"
+
+static const struct vivid_fmt formats_ovl[] = {
+ {
+ .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XRGB555, /* gggbbbbb arrrrrgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ARGB555, /* gggbbbbb arrrrrgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+};
+
+/* The number of discrete webcam framesizes */
+#define VIVID_WEBCAM_SIZES 6
+/* The number of discrete webcam frameintervals */
+#define VIVID_WEBCAM_IVALS (VIVID_WEBCAM_SIZES * 2)
+
+/* Sizes must be in increasing order */
+static const struct v4l2_frmsize_discrete webcam_sizes[VIVID_WEBCAM_SIZES] = {
+ { 320, 180 },
+ { 640, 360 },
+ { 640, 480 },
+ { 1280, 720 },
+ { 1920, 1080 },
+ { 3840, 2160 },
+};
+
+/*
+ * Intervals must be in increasing order and there must be twice as many
+ * elements in this array as there are in webcam_sizes.
+ */
+static const struct v4l2_fract webcam_intervals[VIVID_WEBCAM_IVALS] = {
+ { 1, 1 },
+ { 1, 2 },
+ { 1, 4 },
+ { 1, 5 },
+ { 1, 10 },
+ { 2, 25 },
+ { 1, 15 },
+ { 1, 25 },
+ { 1, 30 },
+ { 1, 40 },
+ { 1, 50 },
+ { 1, 60 },
+};
+
+static int vid_cap_queue_setup(struct vb2_queue *vq,
+ unsigned *nbuffers, unsigned *nplanes,
+ unsigned sizes[], struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ unsigned buffers = tpg_g_buffers(&dev->tpg);
+ unsigned h = dev->fmt_cap_rect.height;
+ unsigned p;
+
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE) {
+ /*
+ * You cannot use read() with FIELD_ALTERNATE since the field
+ * information (TOP/BOTTOM) cannot be passed back to the user.
+ */
+ if (vb2_fileio_is_active(vq))
+ return -EINVAL;
+ }
+
+ if (dev->queue_setup_error) {
+ /*
+ * Error injection: test what happens if queue_setup() returns
+ * an error.
+ */
+ dev->queue_setup_error = false;
+ return -EINVAL;
+ }
+ if (*nplanes) {
+ /*
+ * Check if the number of requested planes match
+ * the number of buffers in the current format. You can't mix that.
+ */
+ if (*nplanes != buffers)
+ return -EINVAL;
+ for (p = 0; p < buffers; p++) {
+ if (sizes[p] < tpg_g_line_width(&dev->tpg, p) * h +
+ dev->fmt_cap->data_offset[p])
+ return -EINVAL;
+ }
+ } else {
+ for (p = 0; p < buffers; p++)
+ sizes[p] = (tpg_g_line_width(&dev->tpg, p) * h) /
+ dev->fmt_cap->vdownsampling[p] +
+ dev->fmt_cap->data_offset[p];
+ }
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = buffers;
+
+ dprintk(dev, 1, "%s: count=%d\n", __func__, *nbuffers);
+ for (p = 0; p < buffers; p++)
+ dprintk(dev, 1, "%s: size[%u]=%u\n", __func__, p, sizes[p]);
+
+ return 0;
+}
+
+static int vid_cap_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ unsigned long size;
+ unsigned buffers = tpg_g_buffers(&dev->tpg);
+ unsigned p;
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (WARN_ON(NULL == dev->fmt_cap))
+ return -EINVAL;
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+ for (p = 0; p < buffers; p++) {
+ size = (tpg_g_line_width(&dev->tpg, p) *
+ dev->fmt_cap_rect.height) /
+ dev->fmt_cap->vdownsampling[p] +
+ dev->fmt_cap->data_offset[p];
+
+ if (vb2_plane_size(vb, p) < size) {
+ dprintk(dev, 1, "%s data will not fit into plane %u (%lu < %lu)\n",
+ __func__, p, vb2_plane_size(vb, p), size);
+ return -EINVAL;
+ }
+
+ vb2_set_plane_payload(vb, p, size);
+ vb->planes[p].data_offset = dev->fmt_cap->data_offset[p];
+ }
+
+ return 0;
+}
+
+static void vid_cap_buf_finish(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct v4l2_timecode *tc = &vbuf->timecode;
+ unsigned fps = 25;
+ unsigned seq = vbuf->sequence;
+
+ if (!vivid_is_sdtv_cap(dev))
+ return;
+
+ /*
+ * Set the timecode. Rarely used, so it is interesting to
+ * test this.
+ */
+ vbuf->flags |= V4L2_BUF_FLAG_TIMECODE;
+ if (dev->std_cap[dev->input] & V4L2_STD_525_60)
+ fps = 30;
+ tc->type = (fps == 30) ? V4L2_TC_TYPE_30FPS : V4L2_TC_TYPE_25FPS;
+ tc->flags = 0;
+ tc->frames = seq % fps;
+ tc->seconds = (seq / fps) % 60;
+ tc->minutes = (seq / (60 * fps)) % 60;
+ tc->hours = (seq / (60 * 60 * fps)) % 24;
+}
+
+static void vid_cap_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->vid_cap_active);
+ spin_unlock(&dev->slock);
+}
+
+static int vid_cap_start_streaming(struct vb2_queue *vq, unsigned count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ unsigned i;
+ int err;
+
+ if (vb2_is_streaming(&dev->vb_vid_out_q))
+ dev->can_loop_video = vivid_vid_can_loop(dev);
+
+ dev->vid_cap_seq_count = 0;
+ dprintk(dev, 1, "%s\n", __func__);
+ for (i = 0; i < VIDEO_MAX_FRAME; i++)
+ dev->must_blank[i] = tpg_g_perc_fill(&dev->tpg) < 100;
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_cap(dev, &dev->vid_cap_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp, &dev->vid_cap_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void vid_cap_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_cap(dev, &dev->vid_cap_streaming);
+ dev->can_loop_video = false;
+}
+
+static void vid_cap_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_vid_cap);
+}
+
+const struct vb2_ops vivid_vid_cap_qops = {
+ .queue_setup = vid_cap_queue_setup,
+ .buf_prepare = vid_cap_buf_prepare,
+ .buf_finish = vid_cap_buf_finish,
+ .buf_queue = vid_cap_buf_queue,
+ .start_streaming = vid_cap_start_streaming,
+ .stop_streaming = vid_cap_stop_streaming,
+ .buf_request_complete = vid_cap_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+/*
+ * Determine the 'picture' quality based on the current TV frequency: either
+ * COLOR for a good 'signal', GRAY (grayscale picture) for a slightly off
+ * signal or NOISE for no signal.
+ */
+void vivid_update_quality(struct vivid_dev *dev)
+{
+ unsigned freq_modulus;
+
+ if (dev->loop_video && (vivid_is_svid_cap(dev) || vivid_is_hdmi_cap(dev))) {
+ /*
+ * The 'noise' will only be replaced by the actual video
+ * if the output video matches the input video settings.
+ */
+ tpg_s_quality(&dev->tpg, TPG_QUAL_NOISE, 0);
+ return;
+ }
+ if (vivid_is_hdmi_cap(dev) &&
+ VIVID_INVALID_SIGNAL(dev->dv_timings_signal_mode[dev->input])) {
+ tpg_s_quality(&dev->tpg, TPG_QUAL_NOISE, 0);
+ return;
+ }
+ if (vivid_is_sdtv_cap(dev) &&
+ VIVID_INVALID_SIGNAL(dev->std_signal_mode[dev->input])) {
+ tpg_s_quality(&dev->tpg, TPG_QUAL_NOISE, 0);
+ return;
+ }
+ if (!vivid_is_tv_cap(dev)) {
+ tpg_s_quality(&dev->tpg, TPG_QUAL_COLOR, 0);
+ return;
+ }
+
+ /*
+ * There is a fake channel every 6 MHz at 49.25, 55.25, etc.
+ * From +/- 0.25 MHz around the channel there is color, and from
+ * +/- 1 MHz there is grayscale (chroma is lost).
+ * Everywhere else it is just noise.
+ */
+ freq_modulus = (dev->tv_freq - 676 /* (43.25-1) * 16 */) % (6 * 16);
+ if (freq_modulus > 2 * 16) {
+ tpg_s_quality(&dev->tpg, TPG_QUAL_NOISE,
+ next_pseudo_random32(dev->tv_freq ^ 0x55) & 0x3f);
+ return;
+ }
+ if (freq_modulus < 12 /*0.75 * 16*/ || freq_modulus > 20 /*1.25 * 16*/)
+ tpg_s_quality(&dev->tpg, TPG_QUAL_GRAY, 0);
+ else
+ tpg_s_quality(&dev->tpg, TPG_QUAL_COLOR, 0);
+}
+
+/*
+ * Get the current picture quality and the associated afc value.
+ */
+static enum tpg_quality vivid_get_quality(struct vivid_dev *dev, s32 *afc)
+{
+ unsigned freq_modulus;
+
+ if (afc)
+ *afc = 0;
+ if (tpg_g_quality(&dev->tpg) == TPG_QUAL_COLOR ||
+ tpg_g_quality(&dev->tpg) == TPG_QUAL_NOISE)
+ return tpg_g_quality(&dev->tpg);
+
+ /*
+ * There is a fake channel every 6 MHz at 49.25, 55.25, etc.
+ * From +/- 0.25 MHz around the channel there is color, and from
+ * +/- 1 MHz there is grayscale (chroma is lost).
+ * Everywhere else it is just gray.
+ */
+ freq_modulus = (dev->tv_freq - 676 /* (43.25-1) * 16 */) % (6 * 16);
+ if (afc)
+ *afc = freq_modulus - 1 * 16;
+ return TPG_QUAL_GRAY;
+}
+
+enum tpg_video_aspect vivid_get_video_aspect(const struct vivid_dev *dev)
+{
+ if (vivid_is_sdtv_cap(dev))
+ return dev->std_aspect_ratio[dev->input];
+
+ if (vivid_is_hdmi_cap(dev))
+ return dev->dv_timings_aspect_ratio[dev->input];
+
+ return TPG_VIDEO_ASPECT_IMAGE;
+}
+
+static enum tpg_pixel_aspect vivid_get_pixel_aspect(const struct vivid_dev *dev)
+{
+ if (vivid_is_sdtv_cap(dev))
+ return (dev->std_cap[dev->input] & V4L2_STD_525_60) ?
+ TPG_PIXEL_ASPECT_NTSC : TPG_PIXEL_ASPECT_PAL;
+
+ if (vivid_is_hdmi_cap(dev) &&
+ dev->src_rect.width == 720 && dev->src_rect.height <= 576)
+ return dev->src_rect.height == 480 ?
+ TPG_PIXEL_ASPECT_NTSC : TPG_PIXEL_ASPECT_PAL;
+
+ return TPG_PIXEL_ASPECT_SQUARE;
+}
+
+/*
+ * Called whenever the format has to be reset which can occur when
+ * changing inputs, standard, timings, etc.
+ */
+void vivid_update_format_cap(struct vivid_dev *dev, bool keep_controls)
+{
+ struct v4l2_bt_timings *bt = &dev->dv_timings_cap[dev->input].bt;
+ u32 dims[V4L2_CTRL_MAX_DIMS] = {};
+ unsigned size;
+ u64 pixelclock;
+
+ switch (dev->input_type[dev->input]) {
+ case WEBCAM:
+ default:
+ dev->src_rect.width = webcam_sizes[dev->webcam_size_idx].width;
+ dev->src_rect.height = webcam_sizes[dev->webcam_size_idx].height;
+ dev->timeperframe_vid_cap = webcam_intervals[dev->webcam_ival_idx];
+ dev->field_cap = V4L2_FIELD_NONE;
+ tpg_s_rgb_range(&dev->tpg, V4L2_DV_RGB_RANGE_AUTO);
+ break;
+ case TV:
+ case SVID:
+ dev->field_cap = dev->tv_field_cap;
+ dev->src_rect.width = 720;
+ if (dev->std_cap[dev->input] & V4L2_STD_525_60) {
+ dev->src_rect.height = 480;
+ dev->timeperframe_vid_cap = (struct v4l2_fract) { 1001, 30000 };
+ dev->service_set_cap = V4L2_SLICED_CAPTION_525;
+ } else {
+ dev->src_rect.height = 576;
+ dev->timeperframe_vid_cap = (struct v4l2_fract) { 1000, 25000 };
+ dev->service_set_cap = V4L2_SLICED_WSS_625 | V4L2_SLICED_TELETEXT_B;
+ }
+ tpg_s_rgb_range(&dev->tpg, V4L2_DV_RGB_RANGE_AUTO);
+ break;
+ case HDMI:
+ dev->src_rect.width = bt->width;
+ dev->src_rect.height = bt->height;
+ size = V4L2_DV_BT_FRAME_WIDTH(bt) * V4L2_DV_BT_FRAME_HEIGHT(bt);
+ if (dev->reduced_fps && can_reduce_fps(bt)) {
+ pixelclock = div_u64(bt->pixelclock * 1000, 1001);
+ bt->flags |= V4L2_DV_FL_REDUCED_FPS;
+ } else {
+ pixelclock = bt->pixelclock;
+ bt->flags &= ~V4L2_DV_FL_REDUCED_FPS;
+ }
+ dev->timeperframe_vid_cap = (struct v4l2_fract) {
+ size / 100, (u32)pixelclock / 100
+ };
+ if (bt->interlaced)
+ dev->field_cap = V4L2_FIELD_ALTERNATE;
+ else
+ dev->field_cap = V4L2_FIELD_NONE;
+
+ /*
+ * We can be called from within s_ctrl, in that case we can't
+ * set/get controls. Luckily we don't need to in that case.
+ */
+ if (keep_controls || !dev->colorspace)
+ break;
+ if (bt->flags & V4L2_DV_FL_IS_CE_VIDEO) {
+ if (bt->width == 720 && bt->height <= 576)
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_170M);
+ else
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_709);
+ v4l2_ctrl_s_ctrl(dev->real_rgb_range_cap, 1);
+ } else {
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_SRGB);
+ v4l2_ctrl_s_ctrl(dev->real_rgb_range_cap, 0);
+ }
+ tpg_s_rgb_range(&dev->tpg, v4l2_ctrl_g_ctrl(dev->rgb_range_cap));
+ break;
+ }
+ vfree(dev->bitmap_cap);
+ dev->bitmap_cap = NULL;
+ vivid_update_quality(dev);
+ tpg_reset_source(&dev->tpg, dev->src_rect.width, dev->src_rect.height, dev->field_cap);
+ dev->crop_cap = dev->src_rect;
+ dev->crop_bounds_cap = dev->src_rect;
+ if (dev->bitmap_cap &&
+ (dev->compose_cap.width != dev->crop_cap.width ||
+ dev->compose_cap.height != dev->crop_cap.height)) {
+ vfree(dev->bitmap_cap);
+ dev->bitmap_cap = NULL;
+ }
+ dev->compose_cap = dev->crop_cap;
+ if (V4L2_FIELD_HAS_T_OR_B(dev->field_cap))
+ dev->compose_cap.height /= 2;
+ dev->fmt_cap_rect = dev->compose_cap;
+ tpg_s_video_aspect(&dev->tpg, vivid_get_video_aspect(dev));
+ tpg_s_pixel_aspect(&dev->tpg, vivid_get_pixel_aspect(dev));
+ tpg_update_mv_step(&dev->tpg);
+
+ /*
+ * We can be called from within s_ctrl, in that case we can't
+ * modify controls. Luckily we don't need to in that case.
+ */
+ if (keep_controls)
+ return;
+
+ dims[0] = roundup(dev->src_rect.width, PIXEL_ARRAY_DIV);
+ dims[1] = roundup(dev->src_rect.height, PIXEL_ARRAY_DIV);
+ v4l2_ctrl_modify_dimensions(dev->pixel_array, dims);
+}
+
+/* Map the field to something that is valid for the current input */
+static enum v4l2_field vivid_field_cap(struct vivid_dev *dev, enum v4l2_field field)
+{
+ if (vivid_is_sdtv_cap(dev)) {
+ switch (field) {
+ case V4L2_FIELD_INTERLACED_TB:
+ case V4L2_FIELD_INTERLACED_BT:
+ case V4L2_FIELD_SEQ_TB:
+ case V4L2_FIELD_SEQ_BT:
+ case V4L2_FIELD_TOP:
+ case V4L2_FIELD_BOTTOM:
+ case V4L2_FIELD_ALTERNATE:
+ return field;
+ case V4L2_FIELD_INTERLACED:
+ default:
+ return V4L2_FIELD_INTERLACED;
+ }
+ }
+ if (vivid_is_hdmi_cap(dev))
+ return dev->dv_timings_cap[dev->input].bt.interlaced ?
+ V4L2_FIELD_ALTERNATE : V4L2_FIELD_NONE;
+ return V4L2_FIELD_NONE;
+}
+
+static unsigned vivid_colorspace_cap(struct vivid_dev *dev)
+{
+ if (!dev->loop_video || vivid_is_webcam(dev) || vivid_is_tv_cap(dev))
+ return tpg_g_colorspace(&dev->tpg);
+ return dev->colorspace_out;
+}
+
+static unsigned vivid_xfer_func_cap(struct vivid_dev *dev)
+{
+ if (!dev->loop_video || vivid_is_webcam(dev) || vivid_is_tv_cap(dev))
+ return tpg_g_xfer_func(&dev->tpg);
+ return dev->xfer_func_out;
+}
+
+static unsigned vivid_ycbcr_enc_cap(struct vivid_dev *dev)
+{
+ if (!dev->loop_video || vivid_is_webcam(dev) || vivid_is_tv_cap(dev))
+ return tpg_g_ycbcr_enc(&dev->tpg);
+ return dev->ycbcr_enc_out;
+}
+
+static unsigned int vivid_hsv_enc_cap(struct vivid_dev *dev)
+{
+ if (!dev->loop_video || vivid_is_webcam(dev) || vivid_is_tv_cap(dev))
+ return tpg_g_hsv_enc(&dev->tpg);
+ return dev->hsv_enc_out;
+}
+
+static unsigned vivid_quantization_cap(struct vivid_dev *dev)
+{
+ if (!dev->loop_video || vivid_is_webcam(dev) || vivid_is_tv_cap(dev))
+ return tpg_g_quantization(&dev->tpg);
+ return dev->quantization_out;
+}
+
+int vivid_g_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ unsigned p;
+
+ mp->width = dev->fmt_cap_rect.width;
+ mp->height = dev->fmt_cap_rect.height;
+ mp->field = dev->field_cap;
+ mp->pixelformat = dev->fmt_cap->fourcc;
+ mp->colorspace = vivid_colorspace_cap(dev);
+ mp->xfer_func = vivid_xfer_func_cap(dev);
+ if (dev->fmt_cap->color_enc == TGP_COLOR_ENC_HSV)
+ mp->hsv_enc = vivid_hsv_enc_cap(dev);
+ else
+ mp->ycbcr_enc = vivid_ycbcr_enc_cap(dev);
+ mp->quantization = vivid_quantization_cap(dev);
+ mp->num_planes = dev->fmt_cap->buffers;
+ for (p = 0; p < mp->num_planes; p++) {
+ mp->plane_fmt[p].bytesperline = tpg_g_bytesperline(&dev->tpg, p);
+ mp->plane_fmt[p].sizeimage =
+ (tpg_g_line_width(&dev->tpg, p) * mp->height) /
+ dev->fmt_cap->vdownsampling[p] +
+ dev->fmt_cap->data_offset[p];
+ }
+ return 0;
+}
+
+int vivid_try_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ struct v4l2_plane_pix_format *pfmt = mp->plane_fmt;
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct vivid_fmt *fmt;
+ unsigned bytesperline, max_bpl;
+ unsigned factor = 1;
+ unsigned w, h;
+ unsigned p;
+ bool user_set_csc = !!(mp->flags & V4L2_PIX_FMT_FLAG_SET_CSC);
+
+ fmt = vivid_get_format(dev, mp->pixelformat);
+ if (!fmt) {
+ dprintk(dev, 1, "Fourcc format (0x%08x) unknown.\n",
+ mp->pixelformat);
+ mp->pixelformat = V4L2_PIX_FMT_YUYV;
+ fmt = vivid_get_format(dev, mp->pixelformat);
+ }
+
+ mp->field = vivid_field_cap(dev, mp->field);
+ if (vivid_is_webcam(dev)) {
+ const struct v4l2_frmsize_discrete *sz =
+ v4l2_find_nearest_size(webcam_sizes,
+ VIVID_WEBCAM_SIZES, width,
+ height, mp->width, mp->height);
+
+ w = sz->width;
+ h = sz->height;
+ } else if (vivid_is_sdtv_cap(dev)) {
+ w = 720;
+ h = (dev->std_cap[dev->input] & V4L2_STD_525_60) ? 480 : 576;
+ } else {
+ w = dev->src_rect.width;
+ h = dev->src_rect.height;
+ }
+ if (V4L2_FIELD_HAS_T_OR_B(mp->field))
+ factor = 2;
+ if (vivid_is_webcam(dev) ||
+ (!dev->has_scaler_cap && !dev->has_crop_cap && !dev->has_compose_cap)) {
+ mp->width = w;
+ mp->height = h / factor;
+ } else {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height * factor };
+
+ v4l2_rect_set_min_size(&r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&r, &vivid_max_rect);
+ if (dev->has_scaler_cap && !dev->has_compose_cap) {
+ struct v4l2_rect max_r = { 0, 0, MAX_ZOOM * w, MAX_ZOOM * h };
+
+ v4l2_rect_set_max_size(&r, &max_r);
+ } else if (!dev->has_scaler_cap && dev->has_crop_cap && !dev->has_compose_cap) {
+ v4l2_rect_set_max_size(&r, &dev->src_rect);
+ } else if (!dev->has_scaler_cap && !dev->has_crop_cap) {
+ v4l2_rect_set_min_size(&r, &dev->src_rect);
+ }
+ mp->width = r.width;
+ mp->height = r.height / factor;
+ }
+
+ /* This driver supports custom bytesperline values */
+
+ mp->num_planes = fmt->buffers;
+ for (p = 0; p < fmt->buffers; p++) {
+ /* Calculate the minimum supported bytesperline value */
+ bytesperline = (mp->width * fmt->bit_depth[p]) >> 3;
+ /* Calculate the maximum supported bytesperline value */
+ max_bpl = (MAX_ZOOM * MAX_WIDTH * fmt->bit_depth[p]) >> 3;
+
+ if (pfmt[p].bytesperline > max_bpl)
+ pfmt[p].bytesperline = max_bpl;
+ if (pfmt[p].bytesperline < bytesperline)
+ pfmt[p].bytesperline = bytesperline;
+
+ pfmt[p].sizeimage = (pfmt[p].bytesperline * mp->height) /
+ fmt->vdownsampling[p] + fmt->data_offset[p];
+
+ memset(pfmt[p].reserved, 0, sizeof(pfmt[p].reserved));
+ }
+ for (p = fmt->buffers; p < fmt->planes; p++)
+ pfmt[0].sizeimage += (pfmt[0].bytesperline * mp->height *
+ (fmt->bit_depth[p] / fmt->vdownsampling[p])) /
+ (fmt->bit_depth[0] / fmt->vdownsampling[0]);
+
+ if (!user_set_csc || !v4l2_is_colorspace_valid(mp->colorspace))
+ mp->colorspace = vivid_colorspace_cap(dev);
+
+ if (!user_set_csc || !v4l2_is_xfer_func_valid(mp->xfer_func))
+ mp->xfer_func = vivid_xfer_func_cap(dev);
+
+ if (fmt->color_enc == TGP_COLOR_ENC_HSV) {
+ if (!user_set_csc || !v4l2_is_hsv_enc_valid(mp->hsv_enc))
+ mp->hsv_enc = vivid_hsv_enc_cap(dev);
+ } else if (fmt->color_enc == TGP_COLOR_ENC_YCBCR) {
+ if (!user_set_csc || !v4l2_is_ycbcr_enc_valid(mp->ycbcr_enc))
+ mp->ycbcr_enc = vivid_ycbcr_enc_cap(dev);
+ } else {
+ mp->ycbcr_enc = vivid_ycbcr_enc_cap(dev);
+ }
+
+ if (fmt->color_enc == TGP_COLOR_ENC_YCBCR ||
+ fmt->color_enc == TGP_COLOR_ENC_RGB) {
+ if (!user_set_csc || !v4l2_is_quant_valid(mp->quantization))
+ mp->quantization = vivid_quantization_cap(dev);
+ } else {
+ mp->quantization = vivid_quantization_cap(dev);
+ }
+
+ memset(mp->reserved, 0, sizeof(mp->reserved));
+ return 0;
+}
+
+int vivid_s_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rect *crop = &dev->crop_cap;
+ struct v4l2_rect *compose = &dev->compose_cap;
+ struct vb2_queue *q = &dev->vb_vid_cap_q;
+ int ret = vivid_try_fmt_vid_cap(file, priv, f);
+ unsigned factor = 1;
+ unsigned p;
+ unsigned i;
+
+ if (ret < 0)
+ return ret;
+
+ if (vb2_is_busy(q)) {
+ dprintk(dev, 1, "%s device busy\n", __func__);
+ return -EBUSY;
+ }
+
+ if (dev->overlay_cap_owner && dev->fb_cap.fmt.pixelformat != mp->pixelformat) {
+ dprintk(dev, 1, "overlay is active, can't change pixelformat\n");
+ return -EBUSY;
+ }
+
+ dev->fmt_cap = vivid_get_format(dev, mp->pixelformat);
+ if (V4L2_FIELD_HAS_T_OR_B(mp->field))
+ factor = 2;
+
+ /* Note: the webcam input doesn't support scaling, cropping or composing */
+
+ if (!vivid_is_webcam(dev) &&
+ (dev->has_scaler_cap || dev->has_crop_cap || dev->has_compose_cap)) {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height };
+
+ if (dev->has_scaler_cap) {
+ if (dev->has_compose_cap)
+ v4l2_rect_map_inside(compose, &r);
+ else
+ *compose = r;
+ if (dev->has_crop_cap && !dev->has_compose_cap) {
+ struct v4l2_rect min_r = {
+ 0, 0,
+ r.width / MAX_ZOOM,
+ factor * r.height / MAX_ZOOM
+ };
+ struct v4l2_rect max_r = {
+ 0, 0,
+ r.width * MAX_ZOOM,
+ factor * r.height * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(crop, &min_r);
+ v4l2_rect_set_max_size(crop, &max_r);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ } else if (dev->has_crop_cap) {
+ struct v4l2_rect min_r = {
+ 0, 0,
+ compose->width / MAX_ZOOM,
+ factor * compose->height / MAX_ZOOM
+ };
+ struct v4l2_rect max_r = {
+ 0, 0,
+ compose->width * MAX_ZOOM,
+ factor * compose->height * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(crop, &min_r);
+ v4l2_rect_set_max_size(crop, &max_r);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ }
+ } else if (dev->has_crop_cap && !dev->has_compose_cap) {
+ r.height *= factor;
+ v4l2_rect_set_size_to(crop, &r);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ r = *crop;
+ r.height /= factor;
+ v4l2_rect_set_size_to(compose, &r);
+ } else if (!dev->has_crop_cap) {
+ v4l2_rect_map_inside(compose, &r);
+ } else {
+ r.height *= factor;
+ v4l2_rect_set_max_size(crop, &r);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ compose->top *= factor;
+ compose->height *= factor;
+ v4l2_rect_set_size_to(compose, crop);
+ v4l2_rect_map_inside(compose, &r);
+ compose->top /= factor;
+ compose->height /= factor;
+ }
+ } else if (vivid_is_webcam(dev)) {
+ /* Guaranteed to be a match */
+ for (i = 0; i < ARRAY_SIZE(webcam_sizes); i++)
+ if (webcam_sizes[i].width == mp->width &&
+ webcam_sizes[i].height == mp->height)
+ break;
+ dev->webcam_size_idx = i;
+ if (dev->webcam_ival_idx >= 2 * (VIVID_WEBCAM_SIZES - i))
+ dev->webcam_ival_idx = 2 * (VIVID_WEBCAM_SIZES - i) - 1;
+ vivid_update_format_cap(dev, false);
+ } else {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height };
+
+ v4l2_rect_set_size_to(compose, &r);
+ r.height *= factor;
+ v4l2_rect_set_size_to(crop, &r);
+ }
+
+ dev->fmt_cap_rect.width = mp->width;
+ dev->fmt_cap_rect.height = mp->height;
+ tpg_s_buf_height(&dev->tpg, mp->height);
+ tpg_s_fourcc(&dev->tpg, dev->fmt_cap->fourcc);
+ for (p = 0; p < tpg_g_buffers(&dev->tpg); p++)
+ tpg_s_bytesperline(&dev->tpg, p, mp->plane_fmt[p].bytesperline);
+ dev->field_cap = mp->field;
+ if (dev->field_cap == V4L2_FIELD_ALTERNATE)
+ tpg_s_field(&dev->tpg, V4L2_FIELD_TOP, true);
+ else
+ tpg_s_field(&dev->tpg, dev->field_cap, false);
+ tpg_s_crop_compose(&dev->tpg, &dev->crop_cap, &dev->compose_cap);
+ if (vivid_is_sdtv_cap(dev))
+ dev->tv_field_cap = mp->field;
+ tpg_update_mv_step(&dev->tpg);
+ dev->tpg.colorspace = mp->colorspace;
+ dev->tpg.xfer_func = mp->xfer_func;
+ if (dev->fmt_cap->color_enc == TGP_COLOR_ENC_YCBCR)
+ dev->tpg.ycbcr_enc = mp->ycbcr_enc;
+ else
+ dev->tpg.hsv_enc = mp->hsv_enc;
+ dev->tpg.quantization = mp->quantization;
+
+ return 0;
+}
+
+int vidioc_g_fmt_vid_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_g_fmt_vid_cap(file, priv, f);
+}
+
+int vidioc_try_fmt_vid_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_try_fmt_vid_cap(file, priv, f);
+}
+
+int vidioc_s_fmt_vid_cap_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_s_fmt_vid_cap(file, priv, f);
+}
+
+int vidioc_g_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_g_fmt_vid_cap);
+}
+
+int vidioc_try_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_try_fmt_vid_cap);
+}
+
+int vidioc_s_fmt_vid_cap(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_s_fmt_vid_cap);
+}
+
+int vivid_vid_cap_g_selection(struct file *file, void *priv,
+ struct v4l2_selection *sel)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->has_crop_cap && !dev->has_compose_cap)
+ return -ENOTTY;
+ if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ return -EINVAL;
+ if (vivid_is_webcam(dev))
+ return -ENODATA;
+
+ sel->r.left = sel->r.top = 0;
+ switch (sel->target) {
+ case V4L2_SEL_TGT_CROP:
+ if (!dev->has_crop_cap)
+ return -EINVAL;
+ sel->r = dev->crop_cap;
+ break;
+ case V4L2_SEL_TGT_CROP_DEFAULT:
+ case V4L2_SEL_TGT_CROP_BOUNDS:
+ if (!dev->has_crop_cap)
+ return -EINVAL;
+ sel->r = dev->src_rect;
+ break;
+ case V4L2_SEL_TGT_COMPOSE_BOUNDS:
+ if (!dev->has_compose_cap)
+ return -EINVAL;
+ sel->r = vivid_max_rect;
+ break;
+ case V4L2_SEL_TGT_COMPOSE:
+ if (!dev->has_compose_cap)
+ return -EINVAL;
+ sel->r = dev->compose_cap;
+ break;
+ case V4L2_SEL_TGT_COMPOSE_DEFAULT:
+ if (!dev->has_compose_cap)
+ return -EINVAL;
+ sel->r = dev->fmt_cap_rect;
+ break;
+ default:
+ return -EINVAL;
+ }
+ return 0;
+}
+
+int vivid_vid_cap_s_selection(struct file *file, void *fh, struct v4l2_selection *s)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rect *crop = &dev->crop_cap;
+ struct v4l2_rect *compose = &dev->compose_cap;
+ unsigned orig_compose_w = compose->width;
+ unsigned orig_compose_h = compose->height;
+ unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_cap) ? 2 : 1;
+ int ret;
+
+ if (!dev->has_crop_cap && !dev->has_compose_cap)
+ return -ENOTTY;
+ if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ return -EINVAL;
+ if (vivid_is_webcam(dev))
+ return -ENODATA;
+
+ switch (s->target) {
+ case V4L2_SEL_TGT_CROP:
+ if (!dev->has_crop_cap)
+ return -EINVAL;
+ ret = vivid_vid_adjust_sel(s->flags, &s->r);
+ if (ret)
+ return ret;
+ v4l2_rect_set_min_size(&s->r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&s->r, &dev->src_rect);
+ v4l2_rect_map_inside(&s->r, &dev->crop_bounds_cap);
+ s->r.top /= factor;
+ s->r.height /= factor;
+ if (dev->has_scaler_cap) {
+ struct v4l2_rect fmt = dev->fmt_cap_rect;
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ s->r.width * MAX_ZOOM,
+ s->r.height * MAX_ZOOM
+ };
+ struct v4l2_rect min_rect = {
+ 0, 0,
+ s->r.width / MAX_ZOOM,
+ s->r.height / MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(&fmt, &min_rect);
+ if (!dev->has_compose_cap)
+ v4l2_rect_set_max_size(&fmt, &max_rect);
+ if (!v4l2_rect_same_size(&dev->fmt_cap_rect, &fmt) &&
+ vb2_is_busy(&dev->vb_vid_cap_q))
+ return -EBUSY;
+ if (dev->has_compose_cap) {
+ v4l2_rect_set_min_size(compose, &min_rect);
+ v4l2_rect_set_max_size(compose, &max_rect);
+ v4l2_rect_map_inside(compose, &fmt);
+ }
+ dev->fmt_cap_rect = fmt;
+ tpg_s_buf_height(&dev->tpg, fmt.height);
+ } else if (dev->has_compose_cap) {
+ struct v4l2_rect fmt = dev->fmt_cap_rect;
+
+ v4l2_rect_set_min_size(&fmt, &s->r);
+ if (!v4l2_rect_same_size(&dev->fmt_cap_rect, &fmt) &&
+ vb2_is_busy(&dev->vb_vid_cap_q))
+ return -EBUSY;
+ dev->fmt_cap_rect = fmt;
+ tpg_s_buf_height(&dev->tpg, fmt.height);
+ v4l2_rect_set_size_to(compose, &s->r);
+ v4l2_rect_map_inside(compose, &dev->fmt_cap_rect);
+ } else {
+ if (!v4l2_rect_same_size(&s->r, &dev->fmt_cap_rect) &&
+ vb2_is_busy(&dev->vb_vid_cap_q))
+ return -EBUSY;
+ v4l2_rect_set_size_to(&dev->fmt_cap_rect, &s->r);
+ v4l2_rect_set_size_to(compose, &s->r);
+ v4l2_rect_map_inside(compose, &dev->fmt_cap_rect);
+ tpg_s_buf_height(&dev->tpg, dev->fmt_cap_rect.height);
+ }
+ s->r.top *= factor;
+ s->r.height *= factor;
+ *crop = s->r;
+ break;
+ case V4L2_SEL_TGT_COMPOSE:
+ if (!dev->has_compose_cap)
+ return -EINVAL;
+ ret = vivid_vid_adjust_sel(s->flags, &s->r);
+ if (ret)
+ return ret;
+ v4l2_rect_set_min_size(&s->r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&s->r, &dev->fmt_cap_rect);
+ if (dev->has_scaler_cap) {
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ dev->src_rect.width * MAX_ZOOM,
+ (dev->src_rect.height / factor) * MAX_ZOOM
+ };
+
+ v4l2_rect_set_max_size(&s->r, &max_rect);
+ if (dev->has_crop_cap) {
+ struct v4l2_rect min_rect = {
+ 0, 0,
+ s->r.width / MAX_ZOOM,
+ (s->r.height * factor) / MAX_ZOOM
+ };
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ s->r.width * MAX_ZOOM,
+ (s->r.height * factor) * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(crop, &min_rect);
+ v4l2_rect_set_max_size(crop, &max_rect);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ }
+ } else if (dev->has_crop_cap) {
+ s->r.top *= factor;
+ s->r.height *= factor;
+ v4l2_rect_set_max_size(&s->r, &dev->src_rect);
+ v4l2_rect_set_size_to(crop, &s->r);
+ v4l2_rect_map_inside(crop, &dev->crop_bounds_cap);
+ s->r.top /= factor;
+ s->r.height /= factor;
+ } else {
+ v4l2_rect_set_size_to(&s->r, &dev->src_rect);
+ s->r.height /= factor;
+ }
+ v4l2_rect_map_inside(&s->r, &dev->fmt_cap_rect);
+ *compose = s->r;
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ if (dev->bitmap_cap && (compose->width != orig_compose_w ||
+ compose->height != orig_compose_h)) {
+ vfree(dev->bitmap_cap);
+ dev->bitmap_cap = NULL;
+ }
+ tpg_s_crop_compose(&dev->tpg, crop, compose);
+ return 0;
+}
+
+int vivid_vid_cap_g_pixelaspect(struct file *file, void *priv,
+ int type, struct v4l2_fract *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
+ return -EINVAL;
+
+ switch (vivid_get_pixel_aspect(dev)) {
+ case TPG_PIXEL_ASPECT_NTSC:
+ f->numerator = 11;
+ f->denominator = 10;
+ break;
+ case TPG_PIXEL_ASPECT_PAL:
+ f->numerator = 54;
+ f->denominator = 59;
+ break;
+ default:
+ break;
+ }
+ return 0;
+}
+
+int vidioc_enum_fmt_vid_overlay(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct vivid_fmt *fmt;
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ if (f->index >= ARRAY_SIZE(formats_ovl))
+ return -EINVAL;
+
+ fmt = &formats_ovl[f->index];
+
+ f->pixelformat = fmt->fourcc;
+ return 0;
+}
+
+int vidioc_g_fmt_vid_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_cap;
+ struct v4l2_window *win = &f->fmt.win;
+ unsigned clipcount = win->clipcount;
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ win->w.top = dev->overlay_cap_top;
+ win->w.left = dev->overlay_cap_left;
+ win->w.width = compose->width;
+ win->w.height = compose->height;
+ win->field = dev->overlay_cap_field;
+ win->clipcount = dev->clipcount_cap;
+ if (clipcount > dev->clipcount_cap)
+ clipcount = dev->clipcount_cap;
+ if (dev->bitmap_cap == NULL)
+ win->bitmap = NULL;
+ else if (win->bitmap) {
+ if (copy_to_user(win->bitmap, dev->bitmap_cap,
+ ((compose->width + 7) / 8) * compose->height))
+ return -EFAULT;
+ }
+ if (clipcount && win->clips)
+ memcpy(win->clips, dev->clips_cap,
+ clipcount * sizeof(dev->clips_cap[0]));
+ return 0;
+}
+
+int vidioc_try_fmt_vid_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_cap;
+ struct v4l2_window *win = &f->fmt.win;
+ int i, j;
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ win->w.left = clamp_t(int, win->w.left,
+ -dev->fb_cap.fmt.width, dev->fb_cap.fmt.width);
+ win->w.top = clamp_t(int, win->w.top,
+ -dev->fb_cap.fmt.height, dev->fb_cap.fmt.height);
+ win->w.width = compose->width;
+ win->w.height = compose->height;
+ if (win->field != V4L2_FIELD_BOTTOM && win->field != V4L2_FIELD_TOP)
+ win->field = V4L2_FIELD_ANY;
+ win->chromakey = 0;
+ win->global_alpha = 0;
+ if (win->clipcount && !win->clips)
+ win->clipcount = 0;
+ if (win->clipcount > MAX_CLIPS)
+ win->clipcount = MAX_CLIPS;
+ if (win->clipcount) {
+ memcpy(dev->try_clips_cap, win->clips,
+ win->clipcount * sizeof(dev->clips_cap[0]));
+ for (i = 0; i < win->clipcount; i++) {
+ struct v4l2_rect *r = &dev->try_clips_cap[i].c;
+
+ r->top = clamp_t(s32, r->top, 0, dev->fb_cap.fmt.height - 1);
+ r->height = clamp_t(s32, r->height, 1, dev->fb_cap.fmt.height - r->top);
+ r->left = clamp_t(u32, r->left, 0, dev->fb_cap.fmt.width - 1);
+ r->width = clamp_t(u32, r->width, 1, dev->fb_cap.fmt.width - r->left);
+ }
+ /*
+ * Yeah, so sue me, it's an O(n^2) algorithm. But n is a small
+ * number and it's typically a one-time deal.
+ */
+ for (i = 0; i < win->clipcount - 1; i++) {
+ struct v4l2_rect *r1 = &dev->try_clips_cap[i].c;
+
+ for (j = i + 1; j < win->clipcount; j++) {
+ struct v4l2_rect *r2 = &dev->try_clips_cap[j].c;
+
+ if (v4l2_rect_overlap(r1, r2))
+ return -EINVAL;
+ }
+ }
+ memcpy(win->clips, dev->try_clips_cap,
+ win->clipcount * sizeof(dev->clips_cap[0]));
+ }
+ return 0;
+}
+
+int vidioc_s_fmt_vid_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_cap;
+ struct v4l2_window *win = &f->fmt.win;
+ int ret = vidioc_try_fmt_vid_overlay(file, priv, f);
+ unsigned bitmap_size = ((compose->width + 7) / 8) * compose->height;
+ unsigned clips_size = win->clipcount * sizeof(dev->clips_cap[0]);
+ void *new_bitmap = NULL;
+
+ if (ret)
+ return ret;
+
+ if (win->bitmap) {
+ new_bitmap = vzalloc(bitmap_size);
+
+ if (new_bitmap == NULL)
+ return -ENOMEM;
+ if (copy_from_user(new_bitmap, win->bitmap, bitmap_size)) {
+ vfree(new_bitmap);
+ return -EFAULT;
+ }
+ }
+
+ dev->overlay_cap_top = win->w.top;
+ dev->overlay_cap_left = win->w.left;
+ dev->overlay_cap_field = win->field;
+ vfree(dev->bitmap_cap);
+ dev->bitmap_cap = new_bitmap;
+ dev->clipcount_cap = win->clipcount;
+ if (dev->clipcount_cap)
+ memcpy(dev->clips_cap, dev->try_clips_cap, clips_size);
+ return 0;
+}
+
+int vivid_vid_cap_overlay(struct file *file, void *fh, unsigned i)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ if (i && dev->fb_vbase_cap == NULL)
+ return -EINVAL;
+
+ if (i && dev->fb_cap.fmt.pixelformat != dev->fmt_cap->fourcc) {
+ dprintk(dev, 1, "mismatch between overlay and video capture pixelformats\n");
+ return -EINVAL;
+ }
+
+ if (dev->overlay_cap_owner && dev->overlay_cap_owner != fh)
+ return -EBUSY;
+ dev->overlay_cap_owner = i ? fh : NULL;
+ return 0;
+}
+
+int vivid_vid_cap_g_fbuf(struct file *file, void *fh,
+ struct v4l2_framebuffer *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ *a = dev->fb_cap;
+ a->capability = V4L2_FBUF_CAP_BITMAP_CLIPPING |
+ V4L2_FBUF_CAP_LIST_CLIPPING;
+ a->flags = V4L2_FBUF_FLAG_PRIMARY;
+ a->fmt.field = V4L2_FIELD_NONE;
+ a->fmt.colorspace = V4L2_COLORSPACE_SRGB;
+ a->fmt.priv = 0;
+ return 0;
+}
+
+int vivid_vid_cap_s_fbuf(struct file *file, void *fh,
+ const struct v4l2_framebuffer *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct vivid_fmt *fmt;
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+
+ if (!capable(CAP_SYS_ADMIN) && !capable(CAP_SYS_RAWIO))
+ return -EPERM;
+
+ if (dev->overlay_cap_owner)
+ return -EBUSY;
+
+ if (a->base == NULL) {
+ dev->fb_cap.base = NULL;
+ dev->fb_vbase_cap = NULL;
+ return 0;
+ }
+
+ if (a->fmt.width < 48 || a->fmt.height < 32)
+ return -EINVAL;
+ fmt = vivid_get_format(dev, a->fmt.pixelformat);
+ if (!fmt || !fmt->can_do_overlay)
+ return -EINVAL;
+ if (a->fmt.bytesperline < (a->fmt.width * fmt->bit_depth[0]) / 8)
+ return -EINVAL;
+ if (a->fmt.bytesperline > a->fmt.sizeimage / a->fmt.height)
+ return -EINVAL;
+
+ /*
+ * Only support the framebuffer of one of the vivid instances.
+ * Anything else is rejected.
+ */
+ if (!vivid_validate_fb(a))
+ return -EINVAL;
+
+ dev->fb_vbase_cap = phys_to_virt((unsigned long)a->base);
+ dev->fb_cap = *a;
+ dev->overlay_cap_left = clamp_t(int, dev->overlay_cap_left,
+ -dev->fb_cap.fmt.width, dev->fb_cap.fmt.width);
+ dev->overlay_cap_top = clamp_t(int, dev->overlay_cap_top,
+ -dev->fb_cap.fmt.height, dev->fb_cap.fmt.height);
+ return 0;
+}
+
+static const struct v4l2_audio vivid_audio_inputs[] = {
+ { 0, "TV", V4L2_AUDCAP_STEREO },
+ { 1, "Line-In", V4L2_AUDCAP_STEREO },
+};
+
+int vidioc_enum_input(struct file *file, void *priv,
+ struct v4l2_input *inp)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (inp->index >= dev->num_inputs)
+ return -EINVAL;
+
+ inp->type = V4L2_INPUT_TYPE_CAMERA;
+ switch (dev->input_type[inp->index]) {
+ case WEBCAM:
+ snprintf(inp->name, sizeof(inp->name), "Webcam %u",
+ dev->input_name_counter[inp->index]);
+ inp->capabilities = 0;
+ break;
+ case TV:
+ snprintf(inp->name, sizeof(inp->name), "TV %u",
+ dev->input_name_counter[inp->index]);
+ inp->type = V4L2_INPUT_TYPE_TUNER;
+ inp->std = V4L2_STD_ALL;
+ if (dev->has_audio_inputs)
+ inp->audioset = (1 << ARRAY_SIZE(vivid_audio_inputs)) - 1;
+ inp->capabilities = V4L2_IN_CAP_STD;
+ break;
+ case SVID:
+ snprintf(inp->name, sizeof(inp->name), "S-Video %u",
+ dev->input_name_counter[inp->index]);
+ inp->std = V4L2_STD_ALL;
+ if (dev->has_audio_inputs)
+ inp->audioset = (1 << ARRAY_SIZE(vivid_audio_inputs)) - 1;
+ inp->capabilities = V4L2_IN_CAP_STD;
+ break;
+ case HDMI:
+ snprintf(inp->name, sizeof(inp->name), "HDMI %u",
+ dev->input_name_counter[inp->index]);
+ inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
+ if (dev->edid_blocks == 0 ||
+ dev->dv_timings_signal_mode[dev->input] == NO_SIGNAL)
+ inp->status |= V4L2_IN_ST_NO_SIGNAL;
+ else if (dev->dv_timings_signal_mode[dev->input] == NO_LOCK ||
+ dev->dv_timings_signal_mode[dev->input] == OUT_OF_RANGE)
+ inp->status |= V4L2_IN_ST_NO_H_LOCK;
+ break;
+ }
+ if (dev->sensor_hflip)
+ inp->status |= V4L2_IN_ST_HFLIP;
+ if (dev->sensor_vflip)
+ inp->status |= V4L2_IN_ST_VFLIP;
+ if (dev->input == inp->index && vivid_is_sdtv_cap(dev)) {
+ if (dev->std_signal_mode[dev->input] == NO_SIGNAL) {
+ inp->status |= V4L2_IN_ST_NO_SIGNAL;
+ } else if (dev->std_signal_mode[dev->input] == NO_LOCK) {
+ inp->status |= V4L2_IN_ST_NO_H_LOCK;
+ } else if (vivid_is_tv_cap(dev)) {
+ switch (tpg_g_quality(&dev->tpg)) {
+ case TPG_QUAL_GRAY:
+ inp->status |= V4L2_IN_ST_COLOR_KILL;
+ break;
+ case TPG_QUAL_NOISE:
+ inp->status |= V4L2_IN_ST_NO_H_LOCK;
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ return 0;
+}
+
+int vidioc_g_input(struct file *file, void *priv, unsigned *i)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ *i = dev->input;
+ return 0;
+}
+
+int vidioc_s_input(struct file *file, void *priv, unsigned i)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_bt_timings *bt = &dev->dv_timings_cap[dev->input].bt;
+ unsigned brightness;
+
+ if (i >= dev->num_inputs)
+ return -EINVAL;
+
+ if (i == dev->input)
+ return 0;
+
+ if (vb2_is_busy(&dev->vb_vid_cap_q) ||
+ vb2_is_busy(&dev->vb_vbi_cap_q) ||
+ vb2_is_busy(&dev->vb_meta_cap_q))
+ return -EBUSY;
+
+ dev->input = i;
+ dev->vid_cap_dev.tvnorms = 0;
+ if (dev->input_type[i] == TV || dev->input_type[i] == SVID) {
+ dev->tv_audio_input = (dev->input_type[i] == TV) ? 0 : 1;
+ dev->vid_cap_dev.tvnorms = V4L2_STD_ALL;
+ }
+ dev->vbi_cap_dev.tvnorms = dev->vid_cap_dev.tvnorms;
+ dev->meta_cap_dev.tvnorms = dev->vid_cap_dev.tvnorms;
+ vivid_update_format_cap(dev, false);
+
+ if (dev->colorspace) {
+ switch (dev->input_type[i]) {
+ case WEBCAM:
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_SRGB);
+ break;
+ case TV:
+ case SVID:
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_170M);
+ break;
+ case HDMI:
+ if (bt->flags & V4L2_DV_FL_IS_CE_VIDEO) {
+ if (dev->src_rect.width == 720 && dev->src_rect.height <= 576)
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_170M);
+ else
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_709);
+ } else {
+ v4l2_ctrl_s_ctrl(dev->colorspace, VIVID_CS_SRGB);
+ }
+ break;
+ }
+ }
+
+ /*
+ * Modify the brightness range depending on the input.
+ * This makes it easy to use vivid to test if applications can
+ * handle control range modifications and is also how this is
+ * typically used in practice as different inputs may be hooked
+ * up to different receivers with different control ranges.
+ */
+ brightness = 128 * i + dev->input_brightness[i];
+ v4l2_ctrl_modify_range(dev->brightness,
+ 128 * i, 255 + 128 * i, 1, 128 + 128 * i);
+ v4l2_ctrl_s_ctrl(dev->brightness, brightness);
+
+ /* Restore per-input states. */
+ v4l2_ctrl_activate(dev->ctrl_dv_timings_signal_mode,
+ vivid_is_hdmi_cap(dev));
+ v4l2_ctrl_activate(dev->ctrl_dv_timings, vivid_is_hdmi_cap(dev) &&
+ dev->dv_timings_signal_mode[dev->input] ==
+ SELECTED_DV_TIMINGS);
+ v4l2_ctrl_activate(dev->ctrl_std_signal_mode, vivid_is_sdtv_cap(dev));
+ v4l2_ctrl_activate(dev->ctrl_standard, vivid_is_sdtv_cap(dev) &&
+ dev->std_signal_mode[dev->input]);
+
+ if (vivid_is_hdmi_cap(dev)) {
+ v4l2_ctrl_s_ctrl(dev->ctrl_dv_timings_signal_mode,
+ dev->dv_timings_signal_mode[dev->input]);
+ v4l2_ctrl_s_ctrl(dev->ctrl_dv_timings,
+ dev->query_dv_timings[dev->input]);
+ } else if (vivid_is_sdtv_cap(dev)) {
+ v4l2_ctrl_s_ctrl(dev->ctrl_std_signal_mode,
+ dev->std_signal_mode[dev->input]);
+ v4l2_ctrl_s_ctrl(dev->ctrl_standard,
+ dev->std_signal_mode[dev->input]);
+ }
+
+ return 0;
+}
+
+int vidioc_enumaudio(struct file *file, void *fh, struct v4l2_audio *vin)
+{
+ if (vin->index >= ARRAY_SIZE(vivid_audio_inputs))
+ return -EINVAL;
+ *vin = vivid_audio_inputs[vin->index];
+ return 0;
+}
+
+int vidioc_g_audio(struct file *file, void *fh, struct v4l2_audio *vin)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_sdtv_cap(dev))
+ return -EINVAL;
+ *vin = vivid_audio_inputs[dev->tv_audio_input];
+ return 0;
+}
+
+int vidioc_s_audio(struct file *file, void *fh, const struct v4l2_audio *vin)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_sdtv_cap(dev))
+ return -EINVAL;
+ if (vin->index >= ARRAY_SIZE(vivid_audio_inputs))
+ return -EINVAL;
+ dev->tv_audio_input = vin->index;
+ return 0;
+}
+
+int vivid_video_g_frequency(struct file *file, void *fh, struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (vf->tuner != 0)
+ return -EINVAL;
+ vf->frequency = dev->tv_freq;
+ return 0;
+}
+
+int vivid_video_s_frequency(struct file *file, void *fh, const struct v4l2_frequency *vf)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (vf->tuner != 0)
+ return -EINVAL;
+ dev->tv_freq = clamp_t(unsigned, vf->frequency, MIN_TV_FREQ, MAX_TV_FREQ);
+ if (vivid_is_tv_cap(dev))
+ vivid_update_quality(dev);
+ return 0;
+}
+
+int vivid_video_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (vt->index != 0)
+ return -EINVAL;
+ if (vt->audmode > V4L2_TUNER_MODE_LANG1_LANG2)
+ return -EINVAL;
+ dev->tv_audmode = vt->audmode;
+ return 0;
+}
+
+int vivid_video_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ enum tpg_quality qual;
+
+ if (vt->index != 0)
+ return -EINVAL;
+
+ vt->capability = V4L2_TUNER_CAP_NORM | V4L2_TUNER_CAP_STEREO |
+ V4L2_TUNER_CAP_LANG1 | V4L2_TUNER_CAP_LANG2;
+ vt->audmode = dev->tv_audmode;
+ vt->rangelow = MIN_TV_FREQ;
+ vt->rangehigh = MAX_TV_FREQ;
+ qual = vivid_get_quality(dev, &vt->afc);
+ if (qual == TPG_QUAL_COLOR)
+ vt->signal = 0xffff;
+ else if (qual == TPG_QUAL_GRAY)
+ vt->signal = 0x8000;
+ else
+ vt->signal = 0;
+ if (qual == TPG_QUAL_NOISE) {
+ vt->rxsubchans = 0;
+ } else if (qual == TPG_QUAL_GRAY) {
+ vt->rxsubchans = V4L2_TUNER_SUB_MONO;
+ } else {
+ unsigned int channel_nr = dev->tv_freq / (6 * 16);
+ unsigned int options =
+ (dev->std_cap[dev->input] & V4L2_STD_NTSC_M) ? 4 : 3;
+
+ switch (channel_nr % options) {
+ case 0:
+ vt->rxsubchans = V4L2_TUNER_SUB_MONO;
+ break;
+ case 1:
+ vt->rxsubchans = V4L2_TUNER_SUB_STEREO;
+ break;
+ case 2:
+ if (dev->std_cap[dev->input] & V4L2_STD_NTSC_M)
+ vt->rxsubchans = V4L2_TUNER_SUB_MONO | V4L2_TUNER_SUB_SAP;
+ else
+ vt->rxsubchans = V4L2_TUNER_SUB_LANG1 | V4L2_TUNER_SUB_LANG2;
+ break;
+ case 3:
+ vt->rxsubchans = V4L2_TUNER_SUB_STEREO | V4L2_TUNER_SUB_SAP;
+ break;
+ }
+ }
+ strscpy(vt->name, "TV Tuner", sizeof(vt->name));
+ return 0;
+}
+
+/* Must remain in sync with the vivid_ctrl_standard_strings array */
+const v4l2_std_id vivid_standard[] = {
+ V4L2_STD_NTSC_M,
+ V4L2_STD_NTSC_M_JP,
+ V4L2_STD_NTSC_M_KR,
+ V4L2_STD_NTSC_443,
+ V4L2_STD_PAL_BG | V4L2_STD_PAL_H,
+ V4L2_STD_PAL_I,
+ V4L2_STD_PAL_DK,
+ V4L2_STD_PAL_M,
+ V4L2_STD_PAL_N,
+ V4L2_STD_PAL_Nc,
+ V4L2_STD_PAL_60,
+ V4L2_STD_SECAM_B | V4L2_STD_SECAM_G | V4L2_STD_SECAM_H,
+ V4L2_STD_SECAM_DK,
+ V4L2_STD_SECAM_L,
+ V4L2_STD_SECAM_LC,
+ V4L2_STD_UNKNOWN
+};
+
+/* Must remain in sync with the vivid_standard array */
+const char * const vivid_ctrl_standard_strings[] = {
+ "NTSC-M",
+ "NTSC-M-JP",
+ "NTSC-M-KR",
+ "NTSC-443",
+ "PAL-BGH",
+ "PAL-I",
+ "PAL-DK",
+ "PAL-M",
+ "PAL-N",
+ "PAL-Nc",
+ "PAL-60",
+ "SECAM-BGH",
+ "SECAM-DK",
+ "SECAM-L",
+ "SECAM-Lc",
+ NULL,
+};
+
+int vidioc_querystd(struct file *file, void *priv, v4l2_std_id *id)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned int last = dev->query_std_last[dev->input];
+
+ if (!vivid_is_sdtv_cap(dev))
+ return -ENODATA;
+ if (dev->std_signal_mode[dev->input] == NO_SIGNAL ||
+ dev->std_signal_mode[dev->input] == NO_LOCK) {
+ *id = V4L2_STD_UNKNOWN;
+ return 0;
+ }
+ if (vivid_is_tv_cap(dev) && tpg_g_quality(&dev->tpg) == TPG_QUAL_NOISE) {
+ *id = V4L2_STD_UNKNOWN;
+ } else if (dev->std_signal_mode[dev->input] == CURRENT_STD) {
+ *id = dev->std_cap[dev->input];
+ } else if (dev->std_signal_mode[dev->input] == SELECTED_STD) {
+ *id = dev->query_std[dev->input];
+ } else {
+ *id = vivid_standard[last];
+ dev->query_std_last[dev->input] =
+ (last + 1) % ARRAY_SIZE(vivid_standard);
+ }
+
+ return 0;
+}
+
+int vivid_vid_cap_s_std(struct file *file, void *priv, v4l2_std_id id)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_sdtv_cap(dev))
+ return -ENODATA;
+ if (dev->std_cap[dev->input] == id)
+ return 0;
+ if (vb2_is_busy(&dev->vb_vid_cap_q) || vb2_is_busy(&dev->vb_vbi_cap_q))
+ return -EBUSY;
+ dev->std_cap[dev->input] = id;
+ vivid_update_format_cap(dev, false);
+ return 0;
+}
+
+static void find_aspect_ratio(u32 width, u32 height,
+ u32 *num, u32 *denom)
+{
+ if (!(height % 3) && ((height * 4 / 3) == width)) {
+ *num = 4;
+ *denom = 3;
+ } else if (!(height % 9) && ((height * 16 / 9) == width)) {
+ *num = 16;
+ *denom = 9;
+ } else if (!(height % 10) && ((height * 16 / 10) == width)) {
+ *num = 16;
+ *denom = 10;
+ } else if (!(height % 4) && ((height * 5 / 4) == width)) {
+ *num = 5;
+ *denom = 4;
+ } else if (!(height % 9) && ((height * 15 / 9) == width)) {
+ *num = 15;
+ *denom = 9;
+ } else { /* default to 16:9 */
+ *num = 16;
+ *denom = 9;
+ }
+}
+
+static bool valid_cvt_gtf_timings(struct v4l2_dv_timings *timings)
+{
+ struct v4l2_bt_timings *bt = &timings->bt;
+ u32 total_h_pixel;
+ u32 total_v_lines;
+ u32 h_freq;
+
+ if (!v4l2_valid_dv_timings(timings, &vivid_dv_timings_cap,
+ NULL, NULL))
+ return false;
+
+ total_h_pixel = V4L2_DV_BT_FRAME_WIDTH(bt);
+ total_v_lines = V4L2_DV_BT_FRAME_HEIGHT(bt);
+
+ h_freq = (u32)bt->pixelclock / total_h_pixel;
+
+ if (bt->standards == 0 || (bt->standards & V4L2_DV_BT_STD_CVT)) {
+ if (v4l2_detect_cvt(total_v_lines, h_freq, bt->vsync, bt->width,
+ bt->polarities, bt->interlaced, timings))
+ return true;
+ }
+
+ if (bt->standards == 0 || (bt->standards & V4L2_DV_BT_STD_GTF)) {
+ struct v4l2_fract aspect_ratio;
+
+ find_aspect_ratio(bt->width, bt->height,
+ &aspect_ratio.numerator,
+ &aspect_ratio.denominator);
+ if (v4l2_detect_gtf(total_v_lines, h_freq, bt->vsync,
+ bt->polarities, bt->interlaced,
+ aspect_ratio, timings))
+ return true;
+ }
+ return false;
+}
+
+int vivid_vid_cap_s_dv_timings(struct file *file, void *_fh,
+ struct v4l2_dv_timings *timings)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_hdmi_cap(dev))
+ return -ENODATA;
+ if (!v4l2_find_dv_timings_cap(timings, &vivid_dv_timings_cap,
+ 0, NULL, NULL) &&
+ !valid_cvt_gtf_timings(timings))
+ return -EINVAL;
+
+ if (v4l2_match_dv_timings(timings, &dev->dv_timings_cap[dev->input],
+ 0, false))
+ return 0;
+ if (vb2_is_busy(&dev->vb_vid_cap_q))
+ return -EBUSY;
+
+ dev->dv_timings_cap[dev->input] = *timings;
+ vivid_update_format_cap(dev, false);
+ return 0;
+}
+
+int vidioc_query_dv_timings(struct file *file, void *_fh,
+ struct v4l2_dv_timings *timings)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned int input = dev->input;
+ unsigned int last = dev->query_dv_timings_last[input];
+
+ if (!vivid_is_hdmi_cap(dev))
+ return -ENODATA;
+ if (dev->dv_timings_signal_mode[input] == NO_SIGNAL ||
+ dev->edid_blocks == 0)
+ return -ENOLINK;
+ if (dev->dv_timings_signal_mode[input] == NO_LOCK)
+ return -ENOLCK;
+ if (dev->dv_timings_signal_mode[input] == OUT_OF_RANGE) {
+ timings->bt.pixelclock = vivid_dv_timings_cap.bt.max_pixelclock * 2;
+ return -ERANGE;
+ }
+ if (dev->dv_timings_signal_mode[input] == CURRENT_DV_TIMINGS) {
+ *timings = dev->dv_timings_cap[input];
+ } else if (dev->dv_timings_signal_mode[input] ==
+ SELECTED_DV_TIMINGS) {
+ *timings =
+ v4l2_dv_timings_presets[dev->query_dv_timings[input]];
+ } else {
+ *timings =
+ v4l2_dv_timings_presets[last];
+ dev->query_dv_timings_last[input] =
+ (last + 1) % dev->query_dv_timings_size;
+ }
+ return 0;
+}
+
+int vidioc_s_edid(struct file *file, void *_fh,
+ struct v4l2_edid *edid)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ u16 phys_addr;
+ u32 display_present = 0;
+ unsigned int i, j;
+ int ret;
+
+ memset(edid->reserved, 0, sizeof(edid->reserved));
+ if (edid->pad >= dev->num_inputs)
+ return -EINVAL;
+ if (dev->input_type[edid->pad] != HDMI || edid->start_block)
+ return -EINVAL;
+ if (edid->blocks == 0) {
+ dev->edid_blocks = 0;
+ v4l2_ctrl_s_ctrl(dev->ctrl_tx_edid_present, 0);
+ v4l2_ctrl_s_ctrl(dev->ctrl_tx_hotplug, 0);
+ phys_addr = CEC_PHYS_ADDR_INVALID;
+ goto set_phys_addr;
+ }
+ if (edid->blocks > dev->edid_max_blocks) {
+ edid->blocks = dev->edid_max_blocks;
+ return -E2BIG;
+ }
+ phys_addr = cec_get_edid_phys_addr(edid->edid, edid->blocks * 128, NULL);
+ ret = v4l2_phys_addr_validate(phys_addr, &phys_addr, NULL);
+ if (ret)
+ return ret;
+
+ if (vb2_is_busy(&dev->vb_vid_cap_q))
+ return -EBUSY;
+
+ dev->edid_blocks = edid->blocks;
+ memcpy(dev->edid, edid->edid, edid->blocks * 128);
+
+ for (i = 0, j = 0; i < dev->num_outputs; i++)
+ if (dev->output_type[i] == HDMI)
+ display_present |=
+ dev->display_present[i] << j++;
+
+ v4l2_ctrl_s_ctrl(dev->ctrl_tx_edid_present, display_present);
+ v4l2_ctrl_s_ctrl(dev->ctrl_tx_hotplug, display_present);
+
+set_phys_addr:
+ /* TODO: a proper hotplug detect cycle should be emulated here */
+ cec_s_phys_addr(dev->cec_rx_adap, phys_addr, false);
+
+ for (i = 0; i < MAX_OUTPUTS && dev->cec_tx_adap[i]; i++)
+ cec_s_phys_addr(dev->cec_tx_adap[i],
+ dev->display_present[i] ?
+ v4l2_phys_addr_for_input(phys_addr, i + 1) :
+ CEC_PHYS_ADDR_INVALID,
+ false);
+ return 0;
+}
+
+int vidioc_enum_framesizes(struct file *file, void *fh,
+ struct v4l2_frmsizeenum *fsize)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_webcam(dev) && !dev->has_scaler_cap)
+ return -EINVAL;
+ if (vivid_get_format(dev, fsize->pixel_format) == NULL)
+ return -EINVAL;
+ if (vivid_is_webcam(dev)) {
+ if (fsize->index >= ARRAY_SIZE(webcam_sizes))
+ return -EINVAL;
+ fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
+ fsize->discrete = webcam_sizes[fsize->index];
+ return 0;
+ }
+ if (fsize->index)
+ return -EINVAL;
+ fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
+ fsize->stepwise.min_width = MIN_WIDTH;
+ fsize->stepwise.max_width = MAX_WIDTH * MAX_ZOOM;
+ fsize->stepwise.step_width = 2;
+ fsize->stepwise.min_height = MIN_HEIGHT;
+ fsize->stepwise.max_height = MAX_HEIGHT * MAX_ZOOM;
+ fsize->stepwise.step_height = 2;
+ return 0;
+}
+
+/* timeperframe is arbitrary and continuous */
+int vidioc_enum_frameintervals(struct file *file, void *priv,
+ struct v4l2_frmivalenum *fival)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct vivid_fmt *fmt;
+ int i;
+
+ fmt = vivid_get_format(dev, fival->pixel_format);
+ if (!fmt)
+ return -EINVAL;
+
+ if (!vivid_is_webcam(dev)) {
+ if (fival->index)
+ return -EINVAL;
+ if (fival->width < MIN_WIDTH || fival->width > MAX_WIDTH * MAX_ZOOM)
+ return -EINVAL;
+ if (fival->height < MIN_HEIGHT || fival->height > MAX_HEIGHT * MAX_ZOOM)
+ return -EINVAL;
+ fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
+ fival->discrete = dev->timeperframe_vid_cap;
+ return 0;
+ }
+
+ for (i = 0; i < ARRAY_SIZE(webcam_sizes); i++)
+ if (fival->width == webcam_sizes[i].width &&
+ fival->height == webcam_sizes[i].height)
+ break;
+ if (i == ARRAY_SIZE(webcam_sizes))
+ return -EINVAL;
+ if (fival->index >= 2 * (VIVID_WEBCAM_SIZES - i))
+ return -EINVAL;
+ fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
+ fival->discrete = webcam_intervals[fival->index];
+ return 0;
+}
+
+int vivid_vid_cap_g_parm(struct file *file, void *priv,
+ struct v4l2_streamparm *parm)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (parm->type != (dev->multiplanar ?
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE :
+ V4L2_BUF_TYPE_VIDEO_CAPTURE))
+ return -EINVAL;
+
+ parm->parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ parm->parm.capture.timeperframe = dev->timeperframe_vid_cap;
+ parm->parm.capture.readbuffers = 1;
+ return 0;
+}
+
+int vivid_vid_cap_s_parm(struct file *file, void *priv,
+ struct v4l2_streamparm *parm)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ unsigned ival_sz = 2 * (VIVID_WEBCAM_SIZES - dev->webcam_size_idx);
+ struct v4l2_fract tpf;
+ unsigned i;
+
+ if (parm->type != (dev->multiplanar ?
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE :
+ V4L2_BUF_TYPE_VIDEO_CAPTURE))
+ return -EINVAL;
+ if (!vivid_is_webcam(dev))
+ return vivid_vid_cap_g_parm(file, priv, parm);
+
+ tpf = parm->parm.capture.timeperframe;
+
+ if (tpf.denominator == 0)
+ tpf = webcam_intervals[ival_sz - 1];
+ for (i = 0; i < ival_sz; i++)
+ if (V4L2_FRACT_COMPARE(tpf, >=, webcam_intervals[i]))
+ break;
+ if (i == ival_sz)
+ i = ival_sz - 1;
+ dev->webcam_ival_idx = i;
+ tpf = webcam_intervals[dev->webcam_ival_idx];
+
+ /* resync the thread's timings */
+ dev->cap_seq_resync = true;
+ dev->timeperframe_vid_cap = tpf;
+ parm->parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
+ parm->parm.capture.timeperframe = tpf;
+ parm->parm.capture.readbuffers = 1;
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-cap.h b/drivers/media/test-drivers/vivid/vivid-vid-cap.h
new file mode 100644
index 000000000..1e422a59e
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-cap.h
@@ -0,0 +1,59 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vid-cap.h - video capture support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VID_CAP_H_
+#define _VIVID_VID_CAP_H_
+
+void vivid_update_quality(struct vivid_dev *dev);
+void vivid_update_format_cap(struct vivid_dev *dev, bool keep_controls);
+enum tpg_video_aspect vivid_get_video_aspect(const struct vivid_dev *dev);
+
+extern const v4l2_std_id vivid_standard[];
+extern const char * const vivid_ctrl_standard_strings[];
+
+extern const struct vb2_ops vivid_vid_cap_qops;
+
+int vivid_g_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_s_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_g_fmt_vid_cap_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_cap_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_cap_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_g_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_vid_cap_g_selection(struct file *file, void *priv, struct v4l2_selection *sel);
+int vivid_vid_cap_s_selection(struct file *file, void *fh, struct v4l2_selection *s);
+int vivid_vid_cap_g_pixelaspect(struct file *file, void *priv, int type, struct v4l2_fract *f);
+int vidioc_enum_fmt_vid_overlay(struct file *file, void *priv, struct v4l2_fmtdesc *f);
+int vidioc_g_fmt_vid_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_vid_cap_overlay(struct file *file, void *fh, unsigned i);
+int vivid_vid_cap_g_fbuf(struct file *file, void *fh, struct v4l2_framebuffer *a);
+int vivid_vid_cap_s_fbuf(struct file *file, void *fh, const struct v4l2_framebuffer *a);
+int vidioc_enum_input(struct file *file, void *priv, struct v4l2_input *inp);
+int vidioc_g_input(struct file *file, void *priv, unsigned *i);
+int vidioc_s_input(struct file *file, void *priv, unsigned i);
+int vidioc_enumaudio(struct file *file, void *fh, struct v4l2_audio *vin);
+int vidioc_g_audio(struct file *file, void *fh, struct v4l2_audio *vin);
+int vidioc_s_audio(struct file *file, void *fh, const struct v4l2_audio *vin);
+int vivid_video_g_frequency(struct file *file, void *fh, struct v4l2_frequency *vf);
+int vivid_video_s_frequency(struct file *file, void *fh, const struct v4l2_frequency *vf);
+int vivid_video_s_tuner(struct file *file, void *fh, const struct v4l2_tuner *vt);
+int vivid_video_g_tuner(struct file *file, void *fh, struct v4l2_tuner *vt);
+int vidioc_querystd(struct file *file, void *priv, v4l2_std_id *id);
+int vivid_vid_cap_s_std(struct file *file, void *priv, v4l2_std_id id);
+int vivid_vid_cap_s_dv_timings(struct file *file, void *_fh, struct v4l2_dv_timings *timings);
+int vidioc_query_dv_timings(struct file *file, void *_fh, struct v4l2_dv_timings *timings);
+int vidioc_s_edid(struct file *file, void *_fh, struct v4l2_edid *edid);
+int vidioc_enum_framesizes(struct file *file, void *fh, struct v4l2_frmsizeenum *fsize);
+int vidioc_enum_frameintervals(struct file *file, void *priv, struct v4l2_frmivalenum *fival);
+int vivid_vid_cap_g_parm(struct file *file, void *priv, struct v4l2_streamparm *parm);
+int vivid_vid_cap_s_parm(struct file *file, void *priv, struct v4l2_streamparm *parm);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-common.c b/drivers/media/test-drivers/vivid/vivid-vid-common.c
new file mode 100644
index 000000000..38d788b5c
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-common.c
@@ -0,0 +1,1075 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vid-common.c - common video support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/sched.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+
+const struct v4l2_dv_timings_cap vivid_dv_timings_cap = {
+ .type = V4L2_DV_BT_656_1120,
+ /* keep this initialization for compatibility with GCC < 4.4.6 */
+ .reserved = { 0 },
+ V4L2_INIT_BT_TIMINGS(16, MAX_WIDTH, 16, MAX_HEIGHT, 14000000, 775000000,
+ V4L2_DV_BT_STD_CEA861 | V4L2_DV_BT_STD_DMT |
+ V4L2_DV_BT_STD_CVT | V4L2_DV_BT_STD_GTF,
+ V4L2_DV_BT_CAP_PROGRESSIVE | V4L2_DV_BT_CAP_INTERLACED)
+};
+
+/* ------------------------------------------------------------------
+ Basic structures
+ ------------------------------------------------------------------*/
+
+struct vivid_fmt vivid_formats[] = {
+ {
+ .fourcc = V4L2_PIX_FMT_YUYV,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 1,
+ .buffers = 1,
+ .data_offset = { PLANE0_DATA_OFFSET },
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_UYVY,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YVYU,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_VYUY,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV422P,
+ .vdownsampling = { 1, 1, 1 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV420,
+ .vdownsampling = { 1, 2, 2 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YVU420,
+ .vdownsampling = { 1, 2, 2 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV12,
+ .vdownsampling = { 1, 2 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV21,
+ .vdownsampling = { 1, 2 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV16,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV61,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV24,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV42,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 16 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV555, /* uuuvvvvv ayyyyyuu */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x8000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV565, /* uuuvvvvv yyyyyuuu */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV444, /* uuuuvvvv aaaayyyy */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0xf000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV32, /* ayuv */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x000000ff,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_AYUV32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x000000ff,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XYUV32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_VUYA32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0xff000000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_VUYX32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUVA32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0xff000000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUVX32,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_GREY,
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .color_enc = TGP_COLOR_ENC_LUMA,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_Y10,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_LUMA,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_Y12,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_LUMA,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_Y16,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_LUMA,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_Y16_BE,
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .color_enc = TGP_COLOR_ENC_LUMA,
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB332, /* rrrgggbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB444, /* ggggbbbb xxxxrrrr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XRGB444, /* ggggbbbb xxxxrrrr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ARGB444, /* ggggbbbb aaaarrrr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x00f0,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBX444, /* bbbbxxxx rrrrgggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBA444, /* bbbbaaaa rrrrgggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x00f0,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XBGR444, /* ggggrrrr xxxxbbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ABGR444, /* ggggrrrr aaaabbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x00f0,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRX444, /* rrrrxxxx bbbbgggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRA444, /* rrrraaaa bbbbgggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x00f0,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb xrrrrrgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XRGB555, /* gggbbbbb xrrrrrgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ARGB555, /* gggbbbbb arrrrrgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ .alpha_mask = 0x8000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBX555, /* ggbbbbbx rrrrrggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBA555, /* ggbbbbba rrrrrggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ .alpha_mask = 0x8000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XBGR555, /* gggrrrrr xbbbbbgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ABGR555, /* gggrrrrr abbbbbgg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ .alpha_mask = 0x8000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRX555, /* ggrrrrrx bbbbbggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRA555, /* ggrrrrra bbbbbggg */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .can_do_overlay = true,
+ .alpha_mask = 0x8000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB555X, /* xrrrrrgg gggbbbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XRGB555X, /* xrrrrrgg gggbbbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ARGB555X, /* arrrrrgg gggbbbbb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x0080,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 24 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 24 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGR666, /* bbbbbbgg ggggrrrr rrxxxxxx */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGB32, /* xrgb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGR32, /* bgrx */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XRGB32, /* xrgb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_XBGR32, /* bgrx */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ARGB32, /* argb */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x000000ff,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_ABGR32, /* bgra */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0xff000000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBX32, /* rgbx */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRX32, /* xbgr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_RGBA32, /* rgba */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0x000000ff,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_BGRA32, /* abgr */
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ .alpha_mask = 0xff000000,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SBGGR8, /* Bayer BG/GR */
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGBRG8, /* Bayer GB/RG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGRBG8, /* Bayer GR/BG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SRGGB8, /* Bayer RG/GB */
+ .vdownsampling = { 1 },
+ .bit_depth = { 8 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SBGGR10, /* Bayer BG/GR */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGBRG10, /* Bayer GB/RG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGRBG10, /* Bayer GR/BG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SRGGB10, /* Bayer RG/GB */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SBGGR12, /* Bayer BG/GR */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGBRG12, /* Bayer GB/RG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGRBG12, /* Bayer GR/BG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SRGGB12, /* Bayer RG/GB */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SBGGR16, /* Bayer BG/GR */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGBRG16, /* Bayer GB/RG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SGRBG16, /* Bayer GR/BG */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_SRGGB16, /* Bayer RG/GB */
+ .vdownsampling = { 1 },
+ .bit_depth = { 16 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_HSV24, /* HSV 24bits */
+ .color_enc = TGP_COLOR_ENC_HSV,
+ .vdownsampling = { 1 },
+ .bit_depth = { 24 },
+ .planes = 1,
+ .buffers = 1,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_HSV32, /* HSV 32bits */
+ .color_enc = TGP_COLOR_ENC_HSV,
+ .vdownsampling = { 1 },
+ .bit_depth = { 32 },
+ .planes = 1,
+ .buffers = 1,
+ },
+
+ /* Multiplanar formats */
+
+ {
+ .fourcc = V4L2_PIX_FMT_NV16M,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 2,
+ .data_offset = { PLANE0_DATA_OFFSET, 0 },
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV61M,
+ .vdownsampling = { 1, 1 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 2,
+ .data_offset = { 0, PLANE0_DATA_OFFSET },
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV420M,
+ .vdownsampling = { 1, 2, 2 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YVU420M,
+ .vdownsampling = { 1, 2, 2 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV12M,
+ .vdownsampling = { 1, 2 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 2,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_NV21M,
+ .vdownsampling = { 1, 2 },
+ .bit_depth = { 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 2,
+ .buffers = 2,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV422M,
+ .vdownsampling = { 1, 1, 1 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YVU422M,
+ .vdownsampling = { 1, 1, 1 },
+ .bit_depth = { 8, 4, 4 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YUV444M,
+ .vdownsampling = { 1, 1, 1 },
+ .bit_depth = { 8, 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+ {
+ .fourcc = V4L2_PIX_FMT_YVU444M,
+ .vdownsampling = { 1, 1, 1 },
+ .bit_depth = { 8, 8, 8 },
+ .color_enc = TGP_COLOR_ENC_YCBCR,
+ .planes = 3,
+ .buffers = 3,
+ },
+};
+
+/* There are this many multiplanar formats in the list */
+#define VIVID_MPLANAR_FORMATS 10
+
+const struct vivid_fmt *vivid_get_format(struct vivid_dev *dev, u32 pixelformat)
+{
+ const struct vivid_fmt *fmt;
+ unsigned k;
+
+ for (k = 0; k < ARRAY_SIZE(vivid_formats); k++) {
+ fmt = &vivid_formats[k];
+ if (fmt->fourcc == pixelformat)
+ if (fmt->buffers == 1 || dev->multiplanar)
+ return fmt;
+ }
+
+ return NULL;
+}
+
+bool vivid_vid_can_loop(struct vivid_dev *dev)
+{
+ if (dev->src_rect.width != dev->sink_rect.width ||
+ dev->src_rect.height != dev->sink_rect.height)
+ return false;
+ if (dev->fmt_cap->fourcc != dev->fmt_out->fourcc)
+ return false;
+ if (dev->field_cap != dev->field_out)
+ return false;
+ /*
+ * While this can be supported, it is just too much work
+ * to actually implement.
+ */
+ if (dev->field_cap == V4L2_FIELD_SEQ_TB ||
+ dev->field_cap == V4L2_FIELD_SEQ_BT)
+ return false;
+ if (vivid_is_svid_cap(dev) && vivid_is_svid_out(dev)) {
+ if (!(dev->std_cap[dev->input] & V4L2_STD_525_60) !=
+ !(dev->std_out & V4L2_STD_525_60))
+ return false;
+ return true;
+ }
+ if (vivid_is_hdmi_cap(dev) && vivid_is_hdmi_out(dev))
+ return true;
+ return false;
+}
+
+void vivid_send_source_change(struct vivid_dev *dev, unsigned type)
+{
+ struct v4l2_event ev = {
+ .type = V4L2_EVENT_SOURCE_CHANGE,
+ .u.src_change.changes = V4L2_EVENT_SRC_CH_RESOLUTION,
+ };
+ unsigned i;
+
+ for (i = 0; i < dev->num_inputs; i++) {
+ ev.id = i;
+ if (dev->input_type[i] == type) {
+ if (video_is_registered(&dev->vid_cap_dev) && dev->has_vid_cap)
+ v4l2_event_queue(&dev->vid_cap_dev, &ev);
+ if (video_is_registered(&dev->vbi_cap_dev) && dev->has_vbi_cap)
+ v4l2_event_queue(&dev->vbi_cap_dev, &ev);
+ }
+ }
+}
+
+/*
+ * Conversion function that converts a single-planar format to a
+ * single-plane multiplanar format.
+ */
+void fmt_sp2mp(const struct v4l2_format *sp_fmt, struct v4l2_format *mp_fmt)
+{
+ struct v4l2_pix_format_mplane *mp = &mp_fmt->fmt.pix_mp;
+ struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
+ const struct v4l2_pix_format *pix = &sp_fmt->fmt.pix;
+ bool is_out = sp_fmt->type == V4L2_BUF_TYPE_VIDEO_OUTPUT;
+
+ memset(mp->reserved, 0, sizeof(mp->reserved));
+ mp_fmt->type = is_out ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE :
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ mp->width = pix->width;
+ mp->height = pix->height;
+ mp->pixelformat = pix->pixelformat;
+ mp->field = pix->field;
+ mp->colorspace = pix->colorspace;
+ mp->xfer_func = pix->xfer_func;
+ /* Also copies hsv_enc */
+ mp->ycbcr_enc = pix->ycbcr_enc;
+ mp->quantization = pix->quantization;
+ mp->num_planes = 1;
+ mp->flags = pix->flags;
+ ppix->sizeimage = pix->sizeimage;
+ ppix->bytesperline = pix->bytesperline;
+ memset(ppix->reserved, 0, sizeof(ppix->reserved));
+}
+
+int fmt_sp2mp_func(struct file *file, void *priv,
+ struct v4l2_format *f, fmtfunc func)
+{
+ struct v4l2_format fmt;
+ struct v4l2_pix_format_mplane *mp = &fmt.fmt.pix_mp;
+ struct v4l2_plane_pix_format *ppix = &mp->plane_fmt[0];
+ struct v4l2_pix_format *pix = &f->fmt.pix;
+ int ret;
+
+ /* Converts to a mplane format */
+ fmt_sp2mp(f, &fmt);
+ /* Passes it to the generic mplane format function */
+ ret = func(file, priv, &fmt);
+ /* Copies back the mplane data to the single plane format */
+ pix->width = mp->width;
+ pix->height = mp->height;
+ pix->pixelformat = mp->pixelformat;
+ pix->field = mp->field;
+ pix->colorspace = mp->colorspace;
+ pix->xfer_func = mp->xfer_func;
+ /* Also copies hsv_enc */
+ pix->ycbcr_enc = mp->ycbcr_enc;
+ pix->quantization = mp->quantization;
+ pix->sizeimage = ppix->sizeimage;
+ pix->bytesperline = ppix->bytesperline;
+ pix->flags = mp->flags;
+ return ret;
+}
+
+int vivid_vid_adjust_sel(unsigned flags, struct v4l2_rect *r)
+{
+ unsigned w = r->width;
+ unsigned h = r->height;
+
+ /* sanitize w and h in case someone passes ~0 as the value */
+ w &= 0xffff;
+ h &= 0xffff;
+ if (!(flags & V4L2_SEL_FLAG_LE)) {
+ w++;
+ h++;
+ if (w < 2)
+ w = 2;
+ if (h < 2)
+ h = 2;
+ }
+ if (!(flags & V4L2_SEL_FLAG_GE)) {
+ if (w > MAX_WIDTH)
+ w = MAX_WIDTH;
+ if (h > MAX_HEIGHT)
+ h = MAX_HEIGHT;
+ }
+ w = w & ~1;
+ h = h & ~1;
+ if (w < 2 || h < 2)
+ return -ERANGE;
+ if (w > MAX_WIDTH || h > MAX_HEIGHT)
+ return -ERANGE;
+ if (r->top < 0)
+ r->top = 0;
+ if (r->left < 0)
+ r->left = 0;
+ /* sanitize left and top in case someone passes ~0 as the value */
+ r->left &= 0xfffe;
+ r->top &= 0xfffe;
+ if (r->left + w > MAX_WIDTH)
+ r->left = MAX_WIDTH - w;
+ if (r->top + h > MAX_HEIGHT)
+ r->top = MAX_HEIGHT - h;
+ if ((flags & (V4L2_SEL_FLAG_GE | V4L2_SEL_FLAG_LE)) ==
+ (V4L2_SEL_FLAG_GE | V4L2_SEL_FLAG_LE) &&
+ (r->width != w || r->height != h))
+ return -ERANGE;
+ r->width = w;
+ r->height = h;
+ return 0;
+}
+
+int vivid_enum_fmt_vid(struct file *file, void *priv,
+ struct v4l2_fmtdesc *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct vivid_fmt *fmt;
+
+ if (f->index >= ARRAY_SIZE(vivid_formats) -
+ (dev->multiplanar ? 0 : VIVID_MPLANAR_FORMATS))
+ return -EINVAL;
+
+ fmt = &vivid_formats[f->index];
+
+ f->pixelformat = fmt->fourcc;
+
+ if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE &&
+ f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
+ return 0;
+ /*
+ * For capture devices, we support the CSC API.
+ * We allow userspace to:
+ * 1. set the colorspace
+ * 2. set the xfer_func
+ * 3. set the ycbcr_enc on YUV formats
+ * 4. set the hsv_enc on HSV formats
+ * 5. set the quantization on YUV and RGB formats
+ */
+ f->flags |= V4L2_FMT_FLAG_CSC_COLORSPACE;
+ f->flags |= V4L2_FMT_FLAG_CSC_XFER_FUNC;
+
+ if (fmt->color_enc == TGP_COLOR_ENC_YCBCR) {
+ f->flags |= V4L2_FMT_FLAG_CSC_YCBCR_ENC;
+ f->flags |= V4L2_FMT_FLAG_CSC_QUANTIZATION;
+ } else if (fmt->color_enc == TGP_COLOR_ENC_HSV) {
+ f->flags |= V4L2_FMT_FLAG_CSC_HSV_ENC;
+ } else if (fmt->color_enc == TGP_COLOR_ENC_RGB) {
+ f->flags |= V4L2_FMT_FLAG_CSC_QUANTIZATION;
+ }
+
+ return 0;
+}
+
+int vidioc_g_std(struct file *file, void *priv, v4l2_std_id *id)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ if (!vivid_is_sdtv_cap(dev))
+ return -ENODATA;
+ *id = dev->std_cap[dev->input];
+ } else {
+ if (!vivid_is_svid_out(dev))
+ return -ENODATA;
+ *id = dev->std_out;
+ }
+ return 0;
+}
+
+int vidioc_g_dv_timings(struct file *file, void *_fh,
+ struct v4l2_dv_timings *timings)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ if (!vivid_is_hdmi_cap(dev))
+ return -ENODATA;
+ *timings = dev->dv_timings_cap[dev->input];
+ } else {
+ if (!vivid_is_hdmi_out(dev))
+ return -ENODATA;
+ *timings = dev->dv_timings_out;
+ }
+ return 0;
+}
+
+int vidioc_enum_dv_timings(struct file *file, void *_fh,
+ struct v4l2_enum_dv_timings *timings)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ if (!vivid_is_hdmi_cap(dev))
+ return -ENODATA;
+ } else {
+ if (!vivid_is_hdmi_out(dev))
+ return -ENODATA;
+ }
+ return v4l2_enum_dv_timings_cap(timings, &vivid_dv_timings_cap,
+ NULL, NULL);
+}
+
+int vidioc_dv_timings_cap(struct file *file, void *_fh,
+ struct v4l2_dv_timings_cap *cap)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ if (!vivid_is_hdmi_cap(dev))
+ return -ENODATA;
+ } else {
+ if (!vivid_is_hdmi_out(dev))
+ return -ENODATA;
+ }
+ *cap = vivid_dv_timings_cap;
+ return 0;
+}
+
+int vidioc_g_edid(struct file *file, void *_fh,
+ struct v4l2_edid *edid)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct video_device *vdev = video_devdata(file);
+ struct cec_adapter *adap;
+
+ memset(edid->reserved, 0, sizeof(edid->reserved));
+ if (vdev->vfl_dir == VFL_DIR_RX) {
+ if (edid->pad >= dev->num_inputs)
+ return -EINVAL;
+ if (dev->input_type[edid->pad] != HDMI)
+ return -EINVAL;
+ adap = dev->cec_rx_adap;
+ } else {
+ unsigned int bus_idx;
+
+ if (edid->pad >= dev->num_outputs)
+ return -EINVAL;
+ if (dev->output_type[edid->pad] != HDMI)
+ return -EINVAL;
+ if (!dev->display_present[edid->pad])
+ return -ENODATA;
+ bus_idx = dev->cec_output2bus_map[edid->pad];
+ adap = dev->cec_tx_adap[bus_idx];
+ }
+ if (edid->start_block == 0 && edid->blocks == 0) {
+ edid->blocks = dev->edid_blocks;
+ return 0;
+ }
+ if (dev->edid_blocks == 0)
+ return -ENODATA;
+ if (edid->start_block >= dev->edid_blocks)
+ return -EINVAL;
+ if (edid->blocks > dev->edid_blocks - edid->start_block)
+ edid->blocks = dev->edid_blocks - edid->start_block;
+ if (adap)
+ v4l2_set_edid_phys_addr(dev->edid, dev->edid_blocks * 128, adap->phys_addr);
+ memcpy(edid->edid, dev->edid + edid->start_block * 128, edid->blocks * 128);
+ return 0;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-common.h b/drivers/media/test-drivers/vivid/vivid-vid-common.h
new file mode 100644
index 000000000..d908d9725
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-common.h
@@ -0,0 +1,38 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vid-common.h - common video support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VID_COMMON_H_
+#define _VIVID_VID_COMMON_H_
+
+typedef int (*fmtfunc)(struct file *file, void *priv, struct v4l2_format *f);
+
+/*
+ * Conversion function that converts a single-planar format to a
+ * single-plane multiplanar format.
+ */
+void fmt_sp2mp(const struct v4l2_format *sp_fmt, struct v4l2_format *mp_fmt);
+int fmt_sp2mp_func(struct file *file, void *priv,
+ struct v4l2_format *f, fmtfunc func);
+
+extern const struct v4l2_dv_timings_cap vivid_dv_timings_cap;
+
+const struct vivid_fmt *vivid_get_format(struct vivid_dev *dev, u32 pixelformat);
+
+bool vivid_vid_can_loop(struct vivid_dev *dev);
+void vivid_send_source_change(struct vivid_dev *dev, unsigned type);
+
+int vivid_vid_adjust_sel(unsigned flags, struct v4l2_rect *r);
+
+int vivid_enum_fmt_vid(struct file *file, void *priv, struct v4l2_fmtdesc *f);
+int vidioc_g_std(struct file *file, void *priv, v4l2_std_id *id);
+int vidioc_g_dv_timings(struct file *file, void *_fh, struct v4l2_dv_timings *timings);
+int vidioc_enum_dv_timings(struct file *file, void *_fh, struct v4l2_enum_dv_timings *timings);
+int vidioc_dv_timings_cap(struct file *file, void *_fh, struct v4l2_dv_timings_cap *cap);
+int vidioc_g_edid(struct file *file, void *_fh, struct v4l2_edid *edid);
+int vidioc_subscribe_event(struct v4l2_fh *fh, const struct v4l2_event_subscription *sub);
+
+#endif
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-out.c b/drivers/media/test-drivers/vivid/vivid-vid-out.c
new file mode 100644
index 000000000..9f731f085
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-out.c
@@ -0,0 +1,1206 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * vivid-vid-out.c - video output support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#include <linux/errno.h>
+#include <linux/kernel.h>
+#include <linux/sched.h>
+#include <linux/videodev2.h>
+#include <linux/v4l2-dv-timings.h>
+#include <media/v4l2-common.h>
+#include <media/v4l2-event.h>
+#include <media/v4l2-dv-timings.h>
+#include <media/v4l2-rect.h>
+
+#include "vivid-core.h"
+#include "vivid-vid-common.h"
+#include "vivid-kthread-out.h"
+#include "vivid-vid-out.h"
+
+static int vid_out_queue_setup(struct vb2_queue *vq,
+ unsigned *nbuffers, unsigned *nplanes,
+ unsigned sizes[], struct device *alloc_devs[])
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ const struct vivid_fmt *vfmt = dev->fmt_out;
+ unsigned planes = vfmt->buffers;
+ unsigned h = dev->fmt_out_rect.height;
+ unsigned int size = dev->bytesperline_out[0] * h + vfmt->data_offset[0];
+ unsigned p;
+
+ for (p = vfmt->buffers; p < vfmt->planes; p++)
+ size += dev->bytesperline_out[p] * h / vfmt->vdownsampling[p] +
+ vfmt->data_offset[p];
+
+ if (dev->field_out == V4L2_FIELD_ALTERNATE) {
+ /*
+ * You cannot use write() with FIELD_ALTERNATE since the field
+ * information (TOP/BOTTOM) cannot be passed to the kernel.
+ */
+ if (vb2_fileio_is_active(vq))
+ return -EINVAL;
+ }
+
+ if (dev->queue_setup_error) {
+ /*
+ * Error injection: test what happens if queue_setup() returns
+ * an error.
+ */
+ dev->queue_setup_error = false;
+ return -EINVAL;
+ }
+
+ if (*nplanes) {
+ /*
+ * Check if the number of requested planes match
+ * the number of planes in the current format. You can't mix that.
+ */
+ if (*nplanes != planes)
+ return -EINVAL;
+ if (sizes[0] < size)
+ return -EINVAL;
+ for (p = 1; p < planes; p++) {
+ if (sizes[p] < dev->bytesperline_out[p] * h +
+ vfmt->data_offset[p])
+ return -EINVAL;
+ }
+ } else {
+ for (p = 0; p < planes; p++)
+ sizes[p] = p ? dev->bytesperline_out[p] * h +
+ vfmt->data_offset[p] : size;
+ }
+
+ if (vq->num_buffers + *nbuffers < 2)
+ *nbuffers = 2 - vq->num_buffers;
+
+ *nplanes = planes;
+
+ dprintk(dev, 1, "%s: count=%d\n", __func__, *nbuffers);
+ for (p = 0; p < planes; p++)
+ dprintk(dev, 1, "%s: size[%u]=%u\n", __func__, p, sizes[p]);
+ return 0;
+}
+
+static int vid_out_buf_out_validate(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (dev->field_out != V4L2_FIELD_ALTERNATE)
+ vbuf->field = dev->field_out;
+ else if (vbuf->field != V4L2_FIELD_TOP &&
+ vbuf->field != V4L2_FIELD_BOTTOM)
+ return -EINVAL;
+ return 0;
+}
+
+static int vid_out_buf_prepare(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ const struct vivid_fmt *vfmt = dev->fmt_out;
+ unsigned int planes = vfmt->buffers;
+ unsigned int h = dev->fmt_out_rect.height;
+ unsigned int size = dev->bytesperline_out[0] * h;
+ unsigned p;
+
+ for (p = vfmt->buffers; p < vfmt->planes; p++)
+ size += dev->bytesperline_out[p] * h / vfmt->vdownsampling[p];
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ if (WARN_ON(NULL == dev->fmt_out))
+ return -EINVAL;
+
+ if (dev->buf_prepare_error) {
+ /*
+ * Error injection: test what happens if buf_prepare() returns
+ * an error.
+ */
+ dev->buf_prepare_error = false;
+ return -EINVAL;
+ }
+
+ for (p = 0; p < planes; p++) {
+ if (p)
+ size = dev->bytesperline_out[p] * h;
+ size += vb->planes[p].data_offset;
+
+ if (vb2_get_plane_payload(vb, p) < size) {
+ dprintk(dev, 1, "%s the payload is too small for plane %u (%lu < %u)\n",
+ __func__, p, vb2_get_plane_payload(vb, p), size);
+ return -EINVAL;
+ }
+ }
+
+ return 0;
+}
+
+static void vid_out_buf_queue(struct vb2_buffer *vb)
+{
+ struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+ struct vivid_buffer *buf = container_of(vbuf, struct vivid_buffer, vb);
+
+ dprintk(dev, 1, "%s\n", __func__);
+
+ spin_lock(&dev->slock);
+ list_add_tail(&buf->list, &dev->vid_out_active);
+ spin_unlock(&dev->slock);
+}
+
+static int vid_out_start_streaming(struct vb2_queue *vq, unsigned count)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+ int err;
+
+ if (vb2_is_streaming(&dev->vb_vid_cap_q))
+ dev->can_loop_video = vivid_vid_can_loop(dev);
+
+ dev->vid_out_seq_count = 0;
+ dprintk(dev, 1, "%s\n", __func__);
+ if (dev->start_streaming_error) {
+ dev->start_streaming_error = false;
+ err = -EINVAL;
+ } else {
+ err = vivid_start_generating_vid_out(dev, &dev->vid_out_streaming);
+ }
+ if (err) {
+ struct vivid_buffer *buf, *tmp;
+
+ list_for_each_entry_safe(buf, tmp, &dev->vid_out_active, list) {
+ list_del(&buf->list);
+ vb2_buffer_done(&buf->vb.vb2_buf,
+ VB2_BUF_STATE_QUEUED);
+ }
+ }
+ return err;
+}
+
+/* abort streaming and wait for last buffer */
+static void vid_out_stop_streaming(struct vb2_queue *vq)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vq);
+
+ dprintk(dev, 1, "%s\n", __func__);
+ vivid_stop_generating_vid_out(dev, &dev->vid_out_streaming);
+ dev->can_loop_video = false;
+}
+
+static void vid_out_buf_request_complete(struct vb2_buffer *vb)
+{
+ struct vivid_dev *dev = vb2_get_drv_priv(vb->vb2_queue);
+
+ v4l2_ctrl_request_complete(vb->req_obj.req, &dev->ctrl_hdl_vid_out);
+}
+
+const struct vb2_ops vivid_vid_out_qops = {
+ .queue_setup = vid_out_queue_setup,
+ .buf_out_validate = vid_out_buf_out_validate,
+ .buf_prepare = vid_out_buf_prepare,
+ .buf_queue = vid_out_buf_queue,
+ .start_streaming = vid_out_start_streaming,
+ .stop_streaming = vid_out_stop_streaming,
+ .buf_request_complete = vid_out_buf_request_complete,
+ .wait_prepare = vb2_ops_wait_prepare,
+ .wait_finish = vb2_ops_wait_finish,
+};
+
+/*
+ * Called whenever the format has to be reset which can occur when
+ * changing outputs, standard, timings, etc.
+ */
+void vivid_update_format_out(struct vivid_dev *dev)
+{
+ struct v4l2_bt_timings *bt = &dev->dv_timings_out.bt;
+ unsigned size, p;
+ u64 pixelclock;
+
+ switch (dev->output_type[dev->output]) {
+ case SVID:
+ default:
+ dev->field_out = dev->tv_field_out;
+ dev->sink_rect.width = 720;
+ if (dev->std_out & V4L2_STD_525_60) {
+ dev->sink_rect.height = 480;
+ dev->timeperframe_vid_out = (struct v4l2_fract) { 1001, 30000 };
+ dev->service_set_out = V4L2_SLICED_CAPTION_525;
+ } else {
+ dev->sink_rect.height = 576;
+ dev->timeperframe_vid_out = (struct v4l2_fract) { 1000, 25000 };
+ dev->service_set_out = V4L2_SLICED_WSS_625 | V4L2_SLICED_TELETEXT_B;
+ }
+ dev->colorspace_out = V4L2_COLORSPACE_SMPTE170M;
+ break;
+ case HDMI:
+ dev->sink_rect.width = bt->width;
+ dev->sink_rect.height = bt->height;
+ size = V4L2_DV_BT_FRAME_WIDTH(bt) * V4L2_DV_BT_FRAME_HEIGHT(bt);
+
+ if (can_reduce_fps(bt) && (bt->flags & V4L2_DV_FL_REDUCED_FPS))
+ pixelclock = div_u64(bt->pixelclock * 1000, 1001);
+ else
+ pixelclock = bt->pixelclock;
+
+ dev->timeperframe_vid_out = (struct v4l2_fract) {
+ size / 100, (u32)pixelclock / 100
+ };
+ if (bt->interlaced)
+ dev->field_out = V4L2_FIELD_ALTERNATE;
+ else
+ dev->field_out = V4L2_FIELD_NONE;
+ if (!dev->dvi_d_out && (bt->flags & V4L2_DV_FL_IS_CE_VIDEO)) {
+ if (bt->width == 720 && bt->height <= 576)
+ dev->colorspace_out = V4L2_COLORSPACE_SMPTE170M;
+ else
+ dev->colorspace_out = V4L2_COLORSPACE_REC709;
+ } else {
+ dev->colorspace_out = V4L2_COLORSPACE_SRGB;
+ }
+ break;
+ }
+ dev->xfer_func_out = V4L2_XFER_FUNC_DEFAULT;
+ dev->ycbcr_enc_out = V4L2_YCBCR_ENC_DEFAULT;
+ dev->hsv_enc_out = V4L2_HSV_ENC_180;
+ dev->quantization_out = V4L2_QUANTIZATION_DEFAULT;
+ dev->compose_out = dev->sink_rect;
+ dev->compose_bounds_out = dev->sink_rect;
+ dev->crop_out = dev->compose_out;
+ if (V4L2_FIELD_HAS_T_OR_B(dev->field_out))
+ dev->crop_out.height /= 2;
+ dev->fmt_out_rect = dev->crop_out;
+ for (p = 0; p < dev->fmt_out->planes; p++)
+ dev->bytesperline_out[p] =
+ (dev->sink_rect.width * dev->fmt_out->bit_depth[p]) / 8;
+}
+
+/* Map the field to something that is valid for the current output */
+static enum v4l2_field vivid_field_out(struct vivid_dev *dev, enum v4l2_field field)
+{
+ if (vivid_is_svid_out(dev)) {
+ switch (field) {
+ case V4L2_FIELD_INTERLACED_TB:
+ case V4L2_FIELD_INTERLACED_BT:
+ case V4L2_FIELD_SEQ_TB:
+ case V4L2_FIELD_SEQ_BT:
+ case V4L2_FIELD_ALTERNATE:
+ return field;
+ case V4L2_FIELD_INTERLACED:
+ default:
+ return V4L2_FIELD_INTERLACED;
+ }
+ }
+ if (vivid_is_hdmi_out(dev))
+ return dev->dv_timings_out.bt.interlaced ? V4L2_FIELD_ALTERNATE :
+ V4L2_FIELD_NONE;
+ return V4L2_FIELD_NONE;
+}
+
+static enum tpg_pixel_aspect vivid_get_pixel_aspect(const struct vivid_dev *dev)
+{
+ if (vivid_is_svid_out(dev))
+ return (dev->std_out & V4L2_STD_525_60) ?
+ TPG_PIXEL_ASPECT_NTSC : TPG_PIXEL_ASPECT_PAL;
+
+ if (vivid_is_hdmi_out(dev) &&
+ dev->sink_rect.width == 720 && dev->sink_rect.height <= 576)
+ return dev->sink_rect.height == 480 ?
+ TPG_PIXEL_ASPECT_NTSC : TPG_PIXEL_ASPECT_PAL;
+
+ return TPG_PIXEL_ASPECT_SQUARE;
+}
+
+int vivid_g_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ const struct vivid_fmt *fmt = dev->fmt_out;
+ unsigned p;
+
+ mp->width = dev->fmt_out_rect.width;
+ mp->height = dev->fmt_out_rect.height;
+ mp->field = dev->field_out;
+ mp->pixelformat = fmt->fourcc;
+ mp->colorspace = dev->colorspace_out;
+ mp->xfer_func = dev->xfer_func_out;
+ mp->ycbcr_enc = dev->ycbcr_enc_out;
+ mp->quantization = dev->quantization_out;
+ mp->num_planes = fmt->buffers;
+ for (p = 0; p < mp->num_planes; p++) {
+ mp->plane_fmt[p].bytesperline = dev->bytesperline_out[p];
+ mp->plane_fmt[p].sizeimage =
+ mp->plane_fmt[p].bytesperline * mp->height +
+ fmt->data_offset[p];
+ }
+ for (p = fmt->buffers; p < fmt->planes; p++) {
+ unsigned stride = dev->bytesperline_out[p];
+
+ mp->plane_fmt[0].sizeimage +=
+ (stride * mp->height) / fmt->vdownsampling[p];
+ }
+ return 0;
+}
+
+int vivid_try_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_bt_timings *bt = &dev->dv_timings_out.bt;
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ struct v4l2_plane_pix_format *pfmt = mp->plane_fmt;
+ const struct vivid_fmt *fmt;
+ unsigned bytesperline, max_bpl;
+ unsigned factor = 1;
+ unsigned w, h;
+ unsigned p;
+
+ fmt = vivid_get_format(dev, mp->pixelformat);
+ if (!fmt) {
+ dprintk(dev, 1, "Fourcc format (0x%08x) unknown.\n",
+ mp->pixelformat);
+ mp->pixelformat = V4L2_PIX_FMT_YUYV;
+ fmt = vivid_get_format(dev, mp->pixelformat);
+ }
+
+ mp->field = vivid_field_out(dev, mp->field);
+ if (vivid_is_svid_out(dev)) {
+ w = 720;
+ h = (dev->std_out & V4L2_STD_525_60) ? 480 : 576;
+ } else {
+ w = dev->sink_rect.width;
+ h = dev->sink_rect.height;
+ }
+ if (V4L2_FIELD_HAS_T_OR_B(mp->field))
+ factor = 2;
+ if (!dev->has_scaler_out && !dev->has_crop_out && !dev->has_compose_out) {
+ mp->width = w;
+ mp->height = h / factor;
+ } else {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height * factor };
+
+ v4l2_rect_set_min_size(&r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&r, &vivid_max_rect);
+ if (dev->has_scaler_out && !dev->has_crop_out) {
+ struct v4l2_rect max_r = { 0, 0, MAX_ZOOM * w, MAX_ZOOM * h };
+
+ v4l2_rect_set_max_size(&r, &max_r);
+ } else if (!dev->has_scaler_out && dev->has_compose_out && !dev->has_crop_out) {
+ v4l2_rect_set_max_size(&r, &dev->sink_rect);
+ } else if (!dev->has_scaler_out && !dev->has_compose_out) {
+ v4l2_rect_set_min_size(&r, &dev->sink_rect);
+ }
+ mp->width = r.width;
+ mp->height = r.height / factor;
+ }
+
+ /* This driver supports custom bytesperline values */
+
+ mp->num_planes = fmt->buffers;
+ for (p = 0; p < fmt->buffers; p++) {
+ /* Calculate the minimum supported bytesperline value */
+ bytesperline = (mp->width * fmt->bit_depth[p]) >> 3;
+ /* Calculate the maximum supported bytesperline value */
+ max_bpl = (MAX_ZOOM * MAX_WIDTH * fmt->bit_depth[p]) >> 3;
+
+ if (pfmt[p].bytesperline > max_bpl)
+ pfmt[p].bytesperline = max_bpl;
+ if (pfmt[p].bytesperline < bytesperline)
+ pfmt[p].bytesperline = bytesperline;
+
+ pfmt[p].sizeimage = (pfmt[p].bytesperline * mp->height) /
+ fmt->vdownsampling[p] + fmt->data_offset[p];
+
+ memset(pfmt[p].reserved, 0, sizeof(pfmt[p].reserved));
+ }
+ for (p = fmt->buffers; p < fmt->planes; p++)
+ pfmt[0].sizeimage += (pfmt[0].bytesperline * mp->height *
+ (fmt->bit_depth[p] / fmt->vdownsampling[p])) /
+ (fmt->bit_depth[0] / fmt->vdownsampling[0]);
+
+ mp->xfer_func = V4L2_XFER_FUNC_DEFAULT;
+ mp->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
+ mp->quantization = V4L2_QUANTIZATION_DEFAULT;
+ if (vivid_is_svid_out(dev)) {
+ mp->colorspace = V4L2_COLORSPACE_SMPTE170M;
+ } else if (dev->dvi_d_out || !(bt->flags & V4L2_DV_FL_IS_CE_VIDEO)) {
+ mp->colorspace = V4L2_COLORSPACE_SRGB;
+ if (dev->dvi_d_out)
+ mp->quantization = V4L2_QUANTIZATION_LIM_RANGE;
+ } else if (bt->width == 720 && bt->height <= 576) {
+ mp->colorspace = V4L2_COLORSPACE_SMPTE170M;
+ } else if (mp->colorspace != V4L2_COLORSPACE_SMPTE170M &&
+ mp->colorspace != V4L2_COLORSPACE_REC709 &&
+ mp->colorspace != V4L2_COLORSPACE_OPRGB &&
+ mp->colorspace != V4L2_COLORSPACE_BT2020 &&
+ mp->colorspace != V4L2_COLORSPACE_SRGB) {
+ mp->colorspace = V4L2_COLORSPACE_REC709;
+ }
+ memset(mp->reserved, 0, sizeof(mp->reserved));
+ return 0;
+}
+
+int vivid_s_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct v4l2_pix_format_mplane *mp = &f->fmt.pix_mp;
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rect *crop = &dev->crop_out;
+ struct v4l2_rect *compose = &dev->compose_out;
+ struct vb2_queue *q = &dev->vb_vid_out_q;
+ int ret = vivid_try_fmt_vid_out(file, priv, f);
+ unsigned factor = 1;
+ unsigned p;
+
+ if (ret < 0)
+ return ret;
+
+ if (vb2_is_busy(q) &&
+ (vivid_is_svid_out(dev) ||
+ mp->width != dev->fmt_out_rect.width ||
+ mp->height != dev->fmt_out_rect.height ||
+ mp->pixelformat != dev->fmt_out->fourcc ||
+ mp->field != dev->field_out)) {
+ dprintk(dev, 1, "%s device busy\n", __func__);
+ return -EBUSY;
+ }
+
+ /*
+ * Allow for changing the colorspace on the fly. Useful for testing
+ * purposes, and it is something that HDMI transmitters are able
+ * to do.
+ */
+ if (vb2_is_busy(q))
+ goto set_colorspace;
+
+ dev->fmt_out = vivid_get_format(dev, mp->pixelformat);
+ if (V4L2_FIELD_HAS_T_OR_B(mp->field))
+ factor = 2;
+
+ if (dev->has_scaler_out || dev->has_crop_out || dev->has_compose_out) {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height };
+
+ if (dev->has_scaler_out) {
+ if (dev->has_crop_out)
+ v4l2_rect_map_inside(crop, &r);
+ else
+ *crop = r;
+ if (dev->has_compose_out && !dev->has_crop_out) {
+ struct v4l2_rect min_r = {
+ 0, 0,
+ r.width / MAX_ZOOM,
+ factor * r.height / MAX_ZOOM
+ };
+ struct v4l2_rect max_r = {
+ 0, 0,
+ r.width * MAX_ZOOM,
+ factor * r.height * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(compose, &min_r);
+ v4l2_rect_set_max_size(compose, &max_r);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ } else if (dev->has_compose_out) {
+ struct v4l2_rect min_r = {
+ 0, 0,
+ crop->width / MAX_ZOOM,
+ factor * crop->height / MAX_ZOOM
+ };
+ struct v4l2_rect max_r = {
+ 0, 0,
+ crop->width * MAX_ZOOM,
+ factor * crop->height * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(compose, &min_r);
+ v4l2_rect_set_max_size(compose, &max_r);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ }
+ } else if (dev->has_compose_out && !dev->has_crop_out) {
+ v4l2_rect_set_size_to(crop, &r);
+ r.height *= factor;
+ v4l2_rect_set_size_to(compose, &r);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ } else if (!dev->has_compose_out) {
+ v4l2_rect_map_inside(crop, &r);
+ r.height /= factor;
+ v4l2_rect_set_size_to(compose, &r);
+ } else {
+ r.height *= factor;
+ v4l2_rect_set_max_size(compose, &r);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ crop->top *= factor;
+ crop->height *= factor;
+ v4l2_rect_set_size_to(crop, compose);
+ v4l2_rect_map_inside(crop, &r);
+ crop->top /= factor;
+ crop->height /= factor;
+ }
+ } else {
+ struct v4l2_rect r = { 0, 0, mp->width, mp->height };
+
+ v4l2_rect_set_size_to(crop, &r);
+ r.height /= factor;
+ v4l2_rect_set_size_to(compose, &r);
+ }
+
+ dev->fmt_out_rect.width = mp->width;
+ dev->fmt_out_rect.height = mp->height;
+ for (p = 0; p < mp->num_planes; p++)
+ dev->bytesperline_out[p] = mp->plane_fmt[p].bytesperline;
+ for (p = dev->fmt_out->buffers; p < dev->fmt_out->planes; p++)
+ dev->bytesperline_out[p] =
+ (dev->bytesperline_out[0] * dev->fmt_out->bit_depth[p]) /
+ dev->fmt_out->bit_depth[0];
+ dev->field_out = mp->field;
+ if (vivid_is_svid_out(dev))
+ dev->tv_field_out = mp->field;
+
+set_colorspace:
+ dev->colorspace_out = mp->colorspace;
+ dev->xfer_func_out = mp->xfer_func;
+ dev->ycbcr_enc_out = mp->ycbcr_enc;
+ dev->quantization_out = mp->quantization;
+ if (dev->loop_video) {
+ vivid_send_source_change(dev, SVID);
+ vivid_send_source_change(dev, HDMI);
+ }
+ return 0;
+}
+
+int vidioc_g_fmt_vid_out_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_g_fmt_vid_out(file, priv, f);
+}
+
+int vidioc_try_fmt_vid_out_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_try_fmt_vid_out(file, priv, f);
+}
+
+int vidioc_s_fmt_vid_out_mplane(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->multiplanar)
+ return -ENOTTY;
+ return vivid_s_fmt_vid_out(file, priv, f);
+}
+
+int vidioc_g_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_g_fmt_vid_out);
+}
+
+int vidioc_try_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_try_fmt_vid_out);
+}
+
+int vidioc_s_fmt_vid_out(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (dev->multiplanar)
+ return -ENOTTY;
+ return fmt_sp2mp_func(file, priv, f, vivid_s_fmt_vid_out);
+}
+
+int vivid_vid_out_g_selection(struct file *file, void *priv,
+ struct v4l2_selection *sel)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!dev->has_crop_out && !dev->has_compose_out)
+ return -ENOTTY;
+ if (sel->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
+ return -EINVAL;
+
+ sel->r.left = sel->r.top = 0;
+ switch (sel->target) {
+ case V4L2_SEL_TGT_CROP:
+ if (!dev->has_crop_out)
+ return -EINVAL;
+ sel->r = dev->crop_out;
+ break;
+ case V4L2_SEL_TGT_CROP_DEFAULT:
+ if (!dev->has_crop_out)
+ return -EINVAL;
+ sel->r = dev->fmt_out_rect;
+ break;
+ case V4L2_SEL_TGT_CROP_BOUNDS:
+ if (!dev->has_crop_out)
+ return -EINVAL;
+ sel->r = vivid_max_rect;
+ break;
+ case V4L2_SEL_TGT_COMPOSE:
+ if (!dev->has_compose_out)
+ return -EINVAL;
+ sel->r = dev->compose_out;
+ break;
+ case V4L2_SEL_TGT_COMPOSE_DEFAULT:
+ case V4L2_SEL_TGT_COMPOSE_BOUNDS:
+ if (!dev->has_compose_out)
+ return -EINVAL;
+ sel->r = dev->sink_rect;
+ break;
+ default:
+ return -EINVAL;
+ }
+ return 0;
+}
+
+int vivid_vid_out_s_selection(struct file *file, void *fh, struct v4l2_selection *s)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ struct v4l2_rect *crop = &dev->crop_out;
+ struct v4l2_rect *compose = &dev->compose_out;
+ unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_out) ? 2 : 1;
+ int ret;
+
+ if (!dev->has_crop_out && !dev->has_compose_out)
+ return -ENOTTY;
+ if (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
+ return -EINVAL;
+
+ switch (s->target) {
+ case V4L2_SEL_TGT_CROP:
+ if (!dev->has_crop_out)
+ return -EINVAL;
+ ret = vivid_vid_adjust_sel(s->flags, &s->r);
+ if (ret)
+ return ret;
+ v4l2_rect_set_min_size(&s->r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&s->r, &dev->fmt_out_rect);
+ if (dev->has_scaler_out) {
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ dev->sink_rect.width * MAX_ZOOM,
+ (dev->sink_rect.height / factor) * MAX_ZOOM
+ };
+
+ v4l2_rect_set_max_size(&s->r, &max_rect);
+ if (dev->has_compose_out) {
+ struct v4l2_rect min_rect = {
+ 0, 0,
+ s->r.width / MAX_ZOOM,
+ (s->r.height * factor) / MAX_ZOOM
+ };
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ s->r.width * MAX_ZOOM,
+ (s->r.height * factor) * MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(compose, &min_rect);
+ v4l2_rect_set_max_size(compose, &max_rect);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ }
+ } else if (dev->has_compose_out) {
+ s->r.top *= factor;
+ s->r.height *= factor;
+ v4l2_rect_set_max_size(&s->r, &dev->sink_rect);
+ v4l2_rect_set_size_to(compose, &s->r);
+ v4l2_rect_map_inside(compose, &dev->compose_bounds_out);
+ s->r.top /= factor;
+ s->r.height /= factor;
+ } else {
+ v4l2_rect_set_size_to(&s->r, &dev->sink_rect);
+ s->r.height /= factor;
+ }
+ v4l2_rect_map_inside(&s->r, &dev->fmt_out_rect);
+ *crop = s->r;
+ break;
+ case V4L2_SEL_TGT_COMPOSE:
+ if (!dev->has_compose_out)
+ return -EINVAL;
+ ret = vivid_vid_adjust_sel(s->flags, &s->r);
+ if (ret)
+ return ret;
+ v4l2_rect_set_min_size(&s->r, &vivid_min_rect);
+ v4l2_rect_set_max_size(&s->r, &dev->sink_rect);
+ v4l2_rect_map_inside(&s->r, &dev->compose_bounds_out);
+ s->r.top /= factor;
+ s->r.height /= factor;
+ if (dev->has_scaler_out) {
+ struct v4l2_rect fmt = dev->fmt_out_rect;
+ struct v4l2_rect max_rect = {
+ 0, 0,
+ s->r.width * MAX_ZOOM,
+ s->r.height * MAX_ZOOM
+ };
+ struct v4l2_rect min_rect = {
+ 0, 0,
+ s->r.width / MAX_ZOOM,
+ s->r.height / MAX_ZOOM
+ };
+
+ v4l2_rect_set_min_size(&fmt, &min_rect);
+ if (!dev->has_crop_out)
+ v4l2_rect_set_max_size(&fmt, &max_rect);
+ if (!v4l2_rect_same_size(&dev->fmt_out_rect, &fmt) &&
+ vb2_is_busy(&dev->vb_vid_out_q))
+ return -EBUSY;
+ if (dev->has_crop_out) {
+ v4l2_rect_set_min_size(crop, &min_rect);
+ v4l2_rect_set_max_size(crop, &max_rect);
+ }
+ dev->fmt_out_rect = fmt;
+ } else if (dev->has_crop_out) {
+ struct v4l2_rect fmt = dev->fmt_out_rect;
+
+ v4l2_rect_set_min_size(&fmt, &s->r);
+ if (!v4l2_rect_same_size(&dev->fmt_out_rect, &fmt) &&
+ vb2_is_busy(&dev->vb_vid_out_q))
+ return -EBUSY;
+ dev->fmt_out_rect = fmt;
+ v4l2_rect_set_size_to(crop, &s->r);
+ v4l2_rect_map_inside(crop, &dev->fmt_out_rect);
+ } else {
+ if (!v4l2_rect_same_size(&s->r, &dev->fmt_out_rect) &&
+ vb2_is_busy(&dev->vb_vid_out_q))
+ return -EBUSY;
+ v4l2_rect_set_size_to(&dev->fmt_out_rect, &s->r);
+ v4l2_rect_set_size_to(crop, &s->r);
+ crop->height /= factor;
+ v4l2_rect_map_inside(crop, &dev->fmt_out_rect);
+ }
+ s->r.top *= factor;
+ s->r.height *= factor;
+ if (dev->bitmap_out && (compose->width != s->r.width ||
+ compose->height != s->r.height)) {
+ vfree(dev->bitmap_out);
+ dev->bitmap_out = NULL;
+ }
+ *compose = s->r;
+ break;
+ default:
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+int vivid_vid_out_g_pixelaspect(struct file *file, void *priv,
+ int type, struct v4l2_fract *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (type != V4L2_BUF_TYPE_VIDEO_OUTPUT)
+ return -EINVAL;
+
+ switch (vivid_get_pixel_aspect(dev)) {
+ case TPG_PIXEL_ASPECT_NTSC:
+ f->numerator = 11;
+ f->denominator = 10;
+ break;
+ case TPG_PIXEL_ASPECT_PAL:
+ f->numerator = 54;
+ f->denominator = 59;
+ break;
+ default:
+ break;
+ }
+ return 0;
+}
+
+int vidioc_g_fmt_vid_out_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_out;
+ struct v4l2_window *win = &f->fmt.win;
+ unsigned clipcount = win->clipcount;
+
+ if (!dev->has_fb)
+ return -EINVAL;
+ win->w.top = dev->overlay_out_top;
+ win->w.left = dev->overlay_out_left;
+ win->w.width = compose->width;
+ win->w.height = compose->height;
+ win->clipcount = dev->clipcount_out;
+ win->field = V4L2_FIELD_ANY;
+ win->chromakey = dev->chromakey_out;
+ win->global_alpha = dev->global_alpha_out;
+ if (clipcount > dev->clipcount_out)
+ clipcount = dev->clipcount_out;
+ if (dev->bitmap_out == NULL)
+ win->bitmap = NULL;
+ else if (win->bitmap) {
+ if (copy_to_user(win->bitmap, dev->bitmap_out,
+ ((dev->compose_out.width + 7) / 8) * dev->compose_out.height))
+ return -EFAULT;
+ }
+ if (clipcount && win->clips)
+ memcpy(win->clips, dev->clips_out,
+ clipcount * sizeof(dev->clips_out[0]));
+ return 0;
+}
+
+int vidioc_try_fmt_vid_out_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_out;
+ struct v4l2_window *win = &f->fmt.win;
+ int i, j;
+
+ if (!dev->has_fb)
+ return -EINVAL;
+ win->w.left = clamp_t(int, win->w.left,
+ -dev->display_width, dev->display_width);
+ win->w.top = clamp_t(int, win->w.top,
+ -dev->display_height, dev->display_height);
+ win->w.width = compose->width;
+ win->w.height = compose->height;
+ /*
+ * It makes no sense for an OSD to overlay only top or bottom fields,
+ * so always set this to ANY.
+ */
+ win->field = V4L2_FIELD_ANY;
+ if (win->clipcount && !win->clips)
+ win->clipcount = 0;
+ if (win->clipcount > MAX_CLIPS)
+ win->clipcount = MAX_CLIPS;
+ if (win->clipcount) {
+ memcpy(dev->try_clips_out, win->clips,
+ win->clipcount * sizeof(dev->clips_out[0]));
+ for (i = 0; i < win->clipcount; i++) {
+ struct v4l2_rect *r = &dev->try_clips_out[i].c;
+
+ r->top = clamp_t(s32, r->top, 0, dev->display_height - 1);
+ r->height = clamp_t(s32, r->height, 1, dev->display_height - r->top);
+ r->left = clamp_t(u32, r->left, 0, dev->display_width - 1);
+ r->width = clamp_t(u32, r->width, 1, dev->display_width - r->left);
+ }
+ /*
+ * Yeah, so sue me, it's an O(n^2) algorithm. But n is a small
+ * number and it's typically a one-time deal.
+ */
+ for (i = 0; i < win->clipcount - 1; i++) {
+ struct v4l2_rect *r1 = &dev->try_clips_out[i].c;
+
+ for (j = i + 1; j < win->clipcount; j++) {
+ struct v4l2_rect *r2 = &dev->try_clips_out[j].c;
+
+ if (v4l2_rect_overlap(r1, r2))
+ return -EINVAL;
+ }
+ }
+ memcpy(win->clips, dev->try_clips_out,
+ win->clipcount * sizeof(dev->clips_out[0]));
+ }
+ return 0;
+}
+
+int vidioc_s_fmt_vid_out_overlay(struct file *file, void *priv,
+ struct v4l2_format *f)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const struct v4l2_rect *compose = &dev->compose_out;
+ struct v4l2_window *win = &f->fmt.win;
+ int ret = vidioc_try_fmt_vid_out_overlay(file, priv, f);
+ unsigned bitmap_size = ((compose->width + 7) / 8) * compose->height;
+ unsigned clips_size = win->clipcount * sizeof(dev->clips_out[0]);
+ void *new_bitmap = NULL;
+
+ if (ret)
+ return ret;
+
+ if (win->bitmap) {
+ new_bitmap = vzalloc(bitmap_size);
+
+ if (!new_bitmap)
+ return -ENOMEM;
+ if (copy_from_user(new_bitmap, win->bitmap, bitmap_size)) {
+ vfree(new_bitmap);
+ return -EFAULT;
+ }
+ }
+
+ dev->overlay_out_top = win->w.top;
+ dev->overlay_out_left = win->w.left;
+ vfree(dev->bitmap_out);
+ dev->bitmap_out = new_bitmap;
+ dev->clipcount_out = win->clipcount;
+ if (dev->clipcount_out)
+ memcpy(dev->clips_out, dev->try_clips_out, clips_size);
+ dev->chromakey_out = win->chromakey;
+ dev->global_alpha_out = win->global_alpha;
+ return ret;
+}
+
+int vivid_vid_out_overlay(struct file *file, void *fh, unsigned i)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (i && !dev->fmt_out->can_do_overlay) {
+ dprintk(dev, 1, "unsupported output format for output overlay\n");
+ return -EINVAL;
+ }
+
+ dev->overlay_out_enabled = i;
+ return 0;
+}
+
+int vivid_vid_out_g_fbuf(struct file *file, void *fh,
+ struct v4l2_framebuffer *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ a->capability = V4L2_FBUF_CAP_EXTERNOVERLAY |
+ V4L2_FBUF_CAP_BITMAP_CLIPPING |
+ V4L2_FBUF_CAP_LIST_CLIPPING |
+ V4L2_FBUF_CAP_CHROMAKEY |
+ V4L2_FBUF_CAP_SRC_CHROMAKEY |
+ V4L2_FBUF_CAP_GLOBAL_ALPHA |
+ V4L2_FBUF_CAP_LOCAL_ALPHA |
+ V4L2_FBUF_CAP_LOCAL_INV_ALPHA;
+ a->flags = V4L2_FBUF_FLAG_OVERLAY | dev->fbuf_out_flags;
+ a->base = (void *)dev->video_pbase;
+ a->fmt.width = dev->display_width;
+ a->fmt.height = dev->display_height;
+ if (dev->fb_defined.green.length == 5)
+ a->fmt.pixelformat = V4L2_PIX_FMT_ARGB555;
+ else
+ a->fmt.pixelformat = V4L2_PIX_FMT_RGB565;
+ a->fmt.bytesperline = dev->display_byte_stride;
+ a->fmt.sizeimage = a->fmt.height * a->fmt.bytesperline;
+ a->fmt.field = V4L2_FIELD_NONE;
+ a->fmt.colorspace = V4L2_COLORSPACE_SRGB;
+ a->fmt.priv = 0;
+ return 0;
+}
+
+int vivid_vid_out_s_fbuf(struct file *file, void *fh,
+ const struct v4l2_framebuffer *a)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ const unsigned chroma_flags = V4L2_FBUF_FLAG_CHROMAKEY |
+ V4L2_FBUF_FLAG_SRC_CHROMAKEY;
+ const unsigned alpha_flags = V4L2_FBUF_FLAG_GLOBAL_ALPHA |
+ V4L2_FBUF_FLAG_LOCAL_ALPHA |
+ V4L2_FBUF_FLAG_LOCAL_INV_ALPHA;
+
+
+ if ((a->flags & chroma_flags) == chroma_flags)
+ return -EINVAL;
+ switch (a->flags & alpha_flags) {
+ case 0:
+ case V4L2_FBUF_FLAG_GLOBAL_ALPHA:
+ case V4L2_FBUF_FLAG_LOCAL_ALPHA:
+ case V4L2_FBUF_FLAG_LOCAL_INV_ALPHA:
+ break;
+ default:
+ return -EINVAL;
+ }
+ dev->fbuf_out_flags &= ~(chroma_flags | alpha_flags);
+ dev->fbuf_out_flags |= a->flags & (chroma_flags | alpha_flags);
+ return 0;
+}
+
+static const struct v4l2_audioout vivid_audio_outputs[] = {
+ { 0, "Line-Out 1" },
+ { 1, "Line-Out 2" },
+};
+
+int vidioc_enum_output(struct file *file, void *priv,
+ struct v4l2_output *out)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (out->index >= dev->num_outputs)
+ return -EINVAL;
+
+ out->type = V4L2_OUTPUT_TYPE_ANALOG;
+ switch (dev->output_type[out->index]) {
+ case SVID:
+ snprintf(out->name, sizeof(out->name), "S-Video %u",
+ dev->output_name_counter[out->index]);
+ out->std = V4L2_STD_ALL;
+ if (dev->has_audio_outputs)
+ out->audioset = (1 << ARRAY_SIZE(vivid_audio_outputs)) - 1;
+ out->capabilities = V4L2_OUT_CAP_STD;
+ break;
+ case HDMI:
+ snprintf(out->name, sizeof(out->name), "HDMI %u",
+ dev->output_name_counter[out->index]);
+ out->capabilities = V4L2_OUT_CAP_DV_TIMINGS;
+ break;
+ }
+ return 0;
+}
+
+int vidioc_g_output(struct file *file, void *priv, unsigned *o)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ *o = dev->output;
+ return 0;
+}
+
+int vidioc_s_output(struct file *file, void *priv, unsigned o)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (o >= dev->num_outputs)
+ return -EINVAL;
+
+ if (o == dev->output)
+ return 0;
+
+ if (vb2_is_busy(&dev->vb_vid_out_q) ||
+ vb2_is_busy(&dev->vb_vbi_out_q) ||
+ vb2_is_busy(&dev->vb_meta_out_q))
+ return -EBUSY;
+
+ dev->output = o;
+ dev->tv_audio_output = 0;
+ if (dev->output_type[o] == SVID)
+ dev->vid_out_dev.tvnorms = V4L2_STD_ALL;
+ else
+ dev->vid_out_dev.tvnorms = 0;
+
+ dev->vbi_out_dev.tvnorms = dev->vid_out_dev.tvnorms;
+ dev->meta_out_dev.tvnorms = dev->vid_out_dev.tvnorms;
+ vivid_update_format_out(dev);
+
+ v4l2_ctrl_activate(dev->ctrl_display_present, vivid_is_hdmi_out(dev));
+ if (vivid_is_hdmi_out(dev))
+ v4l2_ctrl_s_ctrl(dev->ctrl_display_present,
+ dev->display_present[dev->output]);
+
+ return 0;
+}
+
+int vidioc_enumaudout(struct file *file, void *fh, struct v4l2_audioout *vout)
+{
+ if (vout->index >= ARRAY_SIZE(vivid_audio_outputs))
+ return -EINVAL;
+ *vout = vivid_audio_outputs[vout->index];
+ return 0;
+}
+
+int vidioc_g_audout(struct file *file, void *fh, struct v4l2_audioout *vout)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_svid_out(dev))
+ return -EINVAL;
+ *vout = vivid_audio_outputs[dev->tv_audio_output];
+ return 0;
+}
+
+int vidioc_s_audout(struct file *file, void *fh, const struct v4l2_audioout *vout)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_svid_out(dev))
+ return -EINVAL;
+ if (vout->index >= ARRAY_SIZE(vivid_audio_outputs))
+ return -EINVAL;
+ dev->tv_audio_output = vout->index;
+ return 0;
+}
+
+int vivid_vid_out_s_std(struct file *file, void *priv, v4l2_std_id id)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (!vivid_is_svid_out(dev))
+ return -ENODATA;
+ if (dev->std_out == id)
+ return 0;
+ if (vb2_is_busy(&dev->vb_vid_out_q) || vb2_is_busy(&dev->vb_vbi_out_q))
+ return -EBUSY;
+ dev->std_out = id;
+ vivid_update_format_out(dev);
+ return 0;
+}
+
+static bool valid_cvt_gtf_timings(struct v4l2_dv_timings *timings)
+{
+ struct v4l2_bt_timings *bt = &timings->bt;
+
+ if ((bt->standards & (V4L2_DV_BT_STD_CVT | V4L2_DV_BT_STD_GTF)) &&
+ v4l2_valid_dv_timings(timings, &vivid_dv_timings_cap, NULL, NULL))
+ return true;
+
+ return false;
+}
+
+int vivid_vid_out_s_dv_timings(struct file *file, void *_fh,
+ struct v4l2_dv_timings *timings)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+ if (!vivid_is_hdmi_out(dev))
+ return -ENODATA;
+ if (!v4l2_find_dv_timings_cap(timings, &vivid_dv_timings_cap,
+ 0, NULL, NULL) &&
+ !valid_cvt_gtf_timings(timings))
+ return -EINVAL;
+ if (v4l2_match_dv_timings(timings, &dev->dv_timings_out, 0, true))
+ return 0;
+ if (vb2_is_busy(&dev->vb_vid_out_q))
+ return -EBUSY;
+ dev->dv_timings_out = *timings;
+ vivid_update_format_out(dev);
+ return 0;
+}
+
+int vivid_vid_out_g_parm(struct file *file, void *priv,
+ struct v4l2_streamparm *parm)
+{
+ struct vivid_dev *dev = video_drvdata(file);
+
+ if (parm->type != (dev->multiplanar ?
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE :
+ V4L2_BUF_TYPE_VIDEO_OUTPUT))
+ return -EINVAL;
+
+ parm->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
+ parm->parm.output.timeperframe = dev->timeperframe_vid_out;
+ parm->parm.output.writebuffers = 1;
+
+ return 0;
+}
+
+int vidioc_subscribe_event(struct v4l2_fh *fh,
+ const struct v4l2_event_subscription *sub)
+{
+ switch (sub->type) {
+ case V4L2_EVENT_SOURCE_CHANGE:
+ if (fh->vdev->vfl_dir == VFL_DIR_RX)
+ return v4l2_src_change_event_subscribe(fh, sub);
+ break;
+ default:
+ return v4l2_ctrl_subscribe_event(fh, sub);
+ }
+ return -EINVAL;
+}
diff --git a/drivers/media/test-drivers/vivid/vivid-vid-out.h b/drivers/media/test-drivers/vivid/vivid-vid-out.h
new file mode 100644
index 000000000..8d56314f4
--- /dev/null
+++ b/drivers/media/test-drivers/vivid/vivid-vid-out.h
@@ -0,0 +1,44 @@
+/* SPDX-License-Identifier: GPL-2.0-only */
+/*
+ * vivid-vid-out.h - video output support functions.
+ *
+ * Copyright 2014 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
+ */
+
+#ifndef _VIVID_VID_OUT_H_
+#define _VIVID_VID_OUT_H_
+
+extern const struct vb2_ops vivid_vid_out_qops;
+
+void vivid_update_format_out(struct vivid_dev *dev);
+
+int vivid_g_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_try_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_s_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_g_fmt_vid_out_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_out_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_out_mplane(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_g_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_out(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_vid_out_g_selection(struct file *file, void *priv, struct v4l2_selection *sel);
+int vivid_vid_out_s_selection(struct file *file, void *fh, struct v4l2_selection *s);
+int vivid_vid_out_g_pixelaspect(struct file *file, void *priv, int type, struct v4l2_fract *f);
+int vidioc_enum_fmt_vid_out_overlay(struct file *file, void *priv, struct v4l2_fmtdesc *f);
+int vidioc_g_fmt_vid_out_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_try_fmt_vid_out_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vidioc_s_fmt_vid_out_overlay(struct file *file, void *priv, struct v4l2_format *f);
+int vivid_vid_out_overlay(struct file *file, void *fh, unsigned i);
+int vivid_vid_out_g_fbuf(struct file *file, void *fh, struct v4l2_framebuffer *a);
+int vivid_vid_out_s_fbuf(struct file *file, void *fh, const struct v4l2_framebuffer *a);
+int vidioc_enum_output(struct file *file, void *priv, struct v4l2_output *out);
+int vidioc_g_output(struct file *file, void *priv, unsigned *i);
+int vidioc_s_output(struct file *file, void *priv, unsigned i);
+int vidioc_enumaudout(struct file *file, void *fh, struct v4l2_audioout *vout);
+int vidioc_g_audout(struct file *file, void *fh, struct v4l2_audioout *vout);
+int vidioc_s_audout(struct file *file, void *fh, const struct v4l2_audioout *vout);
+int vivid_vid_out_s_std(struct file *file, void *priv, v4l2_std_id id);
+int vivid_vid_out_s_dv_timings(struct file *file, void *_fh, struct v4l2_dv_timings *timings);
+int vivid_vid_out_g_parm(struct file *file, void *priv, struct v4l2_streamparm *parm);
+
+#endif