From 76cb841cb886eef6b3bee341a2266c76578724ad Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Mon, 6 May 2024 03:02:30 +0200 Subject: Adding upstream version 4.19.249. Signed-off-by: Daniel Baumann --- Documentation/media/uapi/v4l/app-pri.rst | 30 + Documentation/media/uapi/v4l/async.rst | 9 + Documentation/media/uapi/v4l/audio.rst | 97 + Documentation/media/uapi/v4l/bayer.svg | 29 + Documentation/media/uapi/v4l/biblio.rst | 381 + Documentation/media/uapi/v4l/buffer.rst | 786 ++ Documentation/media/uapi/v4l/capture-example.rst | 13 + Documentation/media/uapi/v4l/capture.c.rst | 664 ++ Documentation/media/uapi/v4l/colorspaces-defs.rst | 175 + .../media/uapi/v4l/colorspaces-details.rst | 805 +++ Documentation/media/uapi/v4l/colorspaces.rst | 163 + Documentation/media/uapi/v4l/common-defs.rst | 13 + Documentation/media/uapi/v4l/common.rst | 46 + Documentation/media/uapi/v4l/compat.rst | 18 + Documentation/media/uapi/v4l/constraints.svg | 10 + Documentation/media/uapi/v4l/control.rst | 505 ++ Documentation/media/uapi/v4l/crop.rst | 317 + Documentation/media/uapi/v4l/crop.svg | 282 + Documentation/media/uapi/v4l/depth-formats.rst | 16 + Documentation/media/uapi/v4l/dev-capture.rst | 104 + Documentation/media/uapi/v4l/dev-codec.rst | 36 + Documentation/media/uapi/v4l/dev-effect.rst | 21 + Documentation/media/uapi/v4l/dev-event.rst | 47 + Documentation/media/uapi/v4l/dev-meta.rst | 60 + Documentation/media/uapi/v4l/dev-osd.rst | 150 + Documentation/media/uapi/v4l/dev-output.rst | 101 + Documentation/media/uapi/v4l/dev-overlay.rst | 321 + Documentation/media/uapi/v4l/dev-radio.rst | 52 + Documentation/media/uapi/v4l/dev-raw-vbi.rst | 299 + Documentation/media/uapi/v4l/dev-rds.rst | 184 + Documentation/media/uapi/v4l/dev-sdr.rst | 107 + Documentation/media/uapi/v4l/dev-sliced-vbi.rst | 665 ++ Documentation/media/uapi/v4l/dev-subdev.rst | 456 ++ Documentation/media/uapi/v4l/dev-teletext.rst | 34 + Documentation/media/uapi/v4l/dev-touch.rst | 56 + Documentation/media/uapi/v4l/devices.rst | 28 + Documentation/media/uapi/v4l/diff-v4l.rst | 686 ++ Documentation/media/uapi/v4l/dmabuf.rst | 162 + Documentation/media/uapi/v4l/dv-timings.rst | 38 + Documentation/media/uapi/v4l/extended-controls.rst | 3883 ++++++++++ Documentation/media/uapi/v4l/field-order.rst | 160 + Documentation/media/uapi/v4l/fieldseq_bt.svg | 2613 +++++++ Documentation/media/uapi/v4l/fieldseq_tb.svg | 2610 +++++++ Documentation/media/uapi/v4l/format.rst | 92 + Documentation/media/uapi/v4l/func-close.rst | 49 + Documentation/media/uapi/v4l/func-ioctl.rst | 62 + Documentation/media/uapi/v4l/func-mmap.rst | 141 + Documentation/media/uapi/v4l/func-munmap.rst | 58 + Documentation/media/uapi/v4l/func-open.rst | 83 + Documentation/media/uapi/v4l/func-poll.rst | 116 + Documentation/media/uapi/v4l/func-read.rst | 133 + Documentation/media/uapi/v4l/func-select.rst | 120 + Documentation/media/uapi/v4l/func-write.rst | 84 + Documentation/media/uapi/v4l/hist-v4l2.rst | 1367 ++++ Documentation/media/uapi/v4l/hsv-formats.rst | 19 + Documentation/media/uapi/v4l/io.rst | 51 + .../media/uapi/v4l/libv4l-introduction.rst | 184 + Documentation/media/uapi/v4l/libv4l.rst | 13 + Documentation/media/uapi/v4l/meta-formats.rst | 17 + Documentation/media/uapi/v4l/mmap.rst | 285 + Documentation/media/uapi/v4l/nv12mt.svg | 450 ++ Documentation/media/uapi/v4l/nv12mt_example.svg | 1589 ++++ Documentation/media/uapi/v4l/open.rst | 158 + Documentation/media/uapi/v4l/pipeline.dot | 12 + Documentation/media/uapi/v4l/pixfmt-compressed.rst | 104 + Documentation/media/uapi/v4l/pixfmt-grey.rst | 44 + Documentation/media/uapi/v4l/pixfmt-indexed.rst | 47 + Documentation/media/uapi/v4l/pixfmt-intro.rst | 51 + Documentation/media/uapi/v4l/pixfmt-inzi.rst | 82 + Documentation/media/uapi/v4l/pixfmt-m420.rst | 126 + Documentation/media/uapi/v4l/pixfmt-meta-uvc.rst | 51 + .../media/uapi/v4l/pixfmt-meta-vsp1-hgo.rst | 168 + .../media/uapi/v4l/pixfmt-meta-vsp1-hgt.rst | 120 + Documentation/media/uapi/v4l/pixfmt-nv12.rst | 129 + Documentation/media/uapi/v4l/pixfmt-nv12m.rst | 144 + Documentation/media/uapi/v4l/pixfmt-nv12mt.rst | 60 + Documentation/media/uapi/v4l/pixfmt-nv16.rst | 153 + Documentation/media/uapi/v4l/pixfmt-nv16m.rst | 157 + Documentation/media/uapi/v4l/pixfmt-nv24.rst | 95 + Documentation/media/uapi/v4l/pixfmt-packed-hsv.rst | 157 + Documentation/media/uapi/v4l/pixfmt-packed-rgb.rst | 863 +++ Documentation/media/uapi/v4l/pixfmt-packed-yuv.rst | 198 + Documentation/media/uapi/v4l/pixfmt-reserved.rst | 263 + Documentation/media/uapi/v4l/pixfmt-rgb.rst | 23 + Documentation/media/uapi/v4l/pixfmt-sdr-cs08.rst | 30 + Documentation/media/uapi/v4l/pixfmt-sdr-cs14le.rst | 34 + Documentation/media/uapi/v4l/pixfmt-sdr-cu08.rst | 30 + Documentation/media/uapi/v4l/pixfmt-sdr-cu16le.rst | 34 + .../media/uapi/v4l/pixfmt-sdr-pcu16be.rst | 55 + .../media/uapi/v4l/pixfmt-sdr-pcu18be.rst | 55 + .../media/uapi/v4l/pixfmt-sdr-pcu20be.rst | 54 + Documentation/media/uapi/v4l/pixfmt-sdr-ru12le.rst | 32 + .../media/uapi/v4l/pixfmt-srggb10-ipu3.rst | 335 + Documentation/media/uapi/v4l/pixfmt-srggb10.rst | 76 + .../media/uapi/v4l/pixfmt-srggb10alaw8.rst | 24 + .../media/uapi/v4l/pixfmt-srggb10dpcm8.rst | 28 + Documentation/media/uapi/v4l/pixfmt-srggb10p.rst | 74 + Documentation/media/uapi/v4l/pixfmt-srggb12.rst | 77 + Documentation/media/uapi/v4l/pixfmt-srggb12p.rst | 86 + Documentation/media/uapi/v4l/pixfmt-srggb14p.rst | 127 + Documentation/media/uapi/v4l/pixfmt-srggb16.rst | 69 + Documentation/media/uapi/v4l/pixfmt-srggb8.rst | 54 + Documentation/media/uapi/v4l/pixfmt-tch-td08.rst | 52 + Documentation/media/uapi/v4l/pixfmt-tch-td16.rst | 67 + Documentation/media/uapi/v4l/pixfmt-tch-tu08.rst | 50 + Documentation/media/uapi/v4l/pixfmt-tch-tu16.rst | 66 + Documentation/media/uapi/v4l/pixfmt-uv8.rst | 47 + Documentation/media/uapi/v4l/pixfmt-uyvy.rst | 110 + .../media/uapi/v4l/pixfmt-v4l2-mplane.rst | 111 + Documentation/media/uapi/v4l/pixfmt-v4l2.rst | 150 + Documentation/media/uapi/v4l/pixfmt-vyuy.rst | 108 + Documentation/media/uapi/v4l/pixfmt-y10.rst | 65 + Documentation/media/uapi/v4l/pixfmt-y10b.rst | 33 + Documentation/media/uapi/v4l/pixfmt-y10p.rst | 33 + Documentation/media/uapi/v4l/pixfmt-y12.rst | 65 + Documentation/media/uapi/v4l/pixfmt-y12i.rst | 36 + Documentation/media/uapi/v4l/pixfmt-y16-be.rst | 69 + Documentation/media/uapi/v4l/pixfmt-y16.rst | 69 + Documentation/media/uapi/v4l/pixfmt-y41p.rst | 151 + Documentation/media/uapi/v4l/pixfmt-y8i.rst | 66 + Documentation/media/uapi/v4l/pixfmt-yuv410.rst | 127 + Documentation/media/uapi/v4l/pixfmt-yuv411p.rst | 115 + Documentation/media/uapi/v4l/pixfmt-yuv420.rst | 143 + Documentation/media/uapi/v4l/pixfmt-yuv420m.rst | 152 + Documentation/media/uapi/v4l/pixfmt-yuv422m.rst | 141 + Documentation/media/uapi/v4l/pixfmt-yuv422p.rst | 129 + Documentation/media/uapi/v4l/pixfmt-yuv444m.rst | 141 + Documentation/media/uapi/v4l/pixfmt-yuyv.rst | 118 + Documentation/media/uapi/v4l/pixfmt-yvyu.rst | 108 + Documentation/media/uapi/v4l/pixfmt-z16.rst | 66 + Documentation/media/uapi/v4l/pixfmt.rst | 37 + Documentation/media/uapi/v4l/planar-apis.rst | 61 + Documentation/media/uapi/v4l/querycap.rst | 34 + Documentation/media/uapi/v4l/rw.rst | 47 + Documentation/media/uapi/v4l/sdr-formats.rst | 22 + .../media/uapi/v4l/selection-api-configuration.rst | 137 + .../media/uapi/v4l/selection-api-examples.rst | 84 + .../media/uapi/v4l/selection-api-intro.rst | 28 + .../media/uapi/v4l/selection-api-targets.rst | 20 + .../media/uapi/v4l/selection-api-vs-crop-api.rst | 39 + Documentation/media/uapi/v4l/selection-api.rst | 16 + Documentation/media/uapi/v4l/selection.svg | 1151 +++ Documentation/media/uapi/v4l/selections-common.rst | 23 + Documentation/media/uapi/v4l/standard.rst | 185 + Documentation/media/uapi/v4l/streaming-par.rst | 33 + Documentation/media/uapi/v4l/subdev-formats.rst | 7567 ++++++++++++++++++++ .../uapi/v4l/subdev-image-processing-crop.svg | 302 + .../uapi/v4l/subdev-image-processing-full.svg | 742 ++ ...ubdev-image-processing-scaling-multi-source.svg | 540 ++ Documentation/media/uapi/v4l/tch-formats.rst | 18 + Documentation/media/uapi/v4l/tuner.rst | 85 + Documentation/media/uapi/v4l/user-func.rst | 81 + Documentation/media/uapi/v4l/userp.rst | 121 + .../media/uapi/v4l/v4l2-selection-flags.rst | 44 + .../media/uapi/v4l/v4l2-selection-targets.rst | 74 + Documentation/media/uapi/v4l/v4l2.rst | 408 ++ Documentation/media/uapi/v4l/v4l2grab-example.rst | 17 + Documentation/media/uapi/v4l/v4l2grab.c.rst | 169 + Documentation/media/uapi/v4l/vbi_525.svg | 813 +++ Documentation/media/uapi/v4l/vbi_625.svg | 862 +++ Documentation/media/uapi/v4l/vbi_hsync.svg | 313 + Documentation/media/uapi/v4l/video.rst | 68 + Documentation/media/uapi/v4l/videodev.rst | 9 + .../media/uapi/v4l/vidioc-create-bufs.rst | 122 + Documentation/media/uapi/v4l/vidioc-cropcap.rst | 136 + .../media/uapi/v4l/vidioc-dbg-g-chip-info.rst | 160 + .../media/uapi/v4l/vidioc-dbg-g-register.rst | 164 + .../media/uapi/v4l/vidioc-decoder-cmd.rst | 210 + Documentation/media/uapi/v4l/vidioc-dqevent.rst | 390 + .../media/uapi/v4l/vidioc-dv-timings-cap.rst | 164 + .../media/uapi/v4l/vidioc-encoder-cmd.rst | 161 + .../media/uapi/v4l/vidioc-enum-dv-timings.rst | 107 + Documentation/media/uapi/v4l/vidioc-enum-fmt.rst | 130 + .../media/uapi/v4l/vidioc-enum-frameintervals.rst | 201 + .../media/uapi/v4l/vidioc-enum-framesizes.rst | 210 + .../media/uapi/v4l/vidioc-enum-freq-bands.rst | 143 + Documentation/media/uapi/v4l/vidioc-enumaudio.rst | 55 + .../media/uapi/v4l/vidioc-enumaudioout.rst | 60 + Documentation/media/uapi/v4l/vidioc-enuminput.rst | 235 + Documentation/media/uapi/v4l/vidioc-enumoutput.rst | 158 + Documentation/media/uapi/v4l/vidioc-enumstd.rst | 360 + Documentation/media/uapi/v4l/vidioc-expbuf.rst | 168 + Documentation/media/uapi/v4l/vidioc-g-audio.rst | 128 + Documentation/media/uapi/v4l/vidioc-g-audioout.rst | 101 + Documentation/media/uapi/v4l/vidioc-g-crop.rst | 112 + Documentation/media/uapi/v4l/vidioc-g-ctrl.rst | 99 + .../media/uapi/v4l/vidioc-g-dv-timings.rst | 296 + Documentation/media/uapi/v4l/vidioc-g-edid.rst | 147 + .../media/uapi/v4l/vidioc-g-enc-index.rst | 149 + .../media/uapi/v4l/vidioc-g-ext-ctrls.rst | 374 + Documentation/media/uapi/v4l/vidioc-g-fbuf.rst | 355 + Documentation/media/uapi/v4l/vidioc-g-fmt.rst | 156 + .../media/uapi/v4l/vidioc-g-frequency.rst | 105 + Documentation/media/uapi/v4l/vidioc-g-input.rst | 64 + Documentation/media/uapi/v4l/vidioc-g-jpegcomp.rst | 127 + .../media/uapi/v4l/vidioc-g-modulator.rst | 195 + Documentation/media/uapi/v4l/vidioc-g-output.rst | 66 + Documentation/media/uapi/v4l/vidioc-g-parm.rst | 264 + Documentation/media/uapi/v4l/vidioc-g-priority.rst | 93 + .../media/uapi/v4l/vidioc-g-selection.rst | 193 + .../media/uapi/v4l/vidioc-g-sliced-vbi-cap.rst | 195 + Documentation/media/uapi/v4l/vidioc-g-std.rst | 74 + Documentation/media/uapi/v4l/vidioc-g-tuner.rst | 469 ++ Documentation/media/uapi/v4l/vidioc-log-status.rst | 49 + Documentation/media/uapi/v4l/vidioc-overlay.rst | 54 + .../media/uapi/v4l/vidioc-prepare-buf.rst | 61 + Documentation/media/uapi/v4l/vidioc-qbuf.rst | 155 + .../media/uapi/v4l/vidioc-query-dv-timings.rst | 87 + Documentation/media/uapi/v4l/vidioc-querybuf.rst | 80 + Documentation/media/uapi/v4l/vidioc-querycap.rst | 274 + Documentation/media/uapi/v4l/vidioc-queryctrl.rst | 550 ++ Documentation/media/uapi/v4l/vidioc-querystd.rst | 70 + Documentation/media/uapi/v4l/vidioc-reqbufs.rst | 105 + .../media/uapi/v4l/vidioc-s-hw-freq-seek.rst | 140 + Documentation/media/uapi/v4l/vidioc-streamon.rst | 106 + .../uapi/v4l/vidioc-subdev-enum-frame-interval.rst | 113 + .../uapi/v4l/vidioc-subdev-enum-frame-size.rst | 118 + .../uapi/v4l/vidioc-subdev-enum-mbus-code.rst | 91 + .../media/uapi/v4l/vidioc-subdev-g-crop.rst | 118 + .../media/uapi/v4l/vidioc-subdev-g-fmt.rst | 147 + .../uapi/v4l/vidioc-subdev-g-frame-interval.rst | 110 + .../media/uapi/v4l/vidioc-subdev-g-selection.rst | 118 + .../media/uapi/v4l/vidioc-subscribe-event.rst | 116 + Documentation/media/uapi/v4l/yuv-formats.rst | 56 + 224 files changed, 53176 insertions(+) create mode 100644 Documentation/media/uapi/v4l/app-pri.rst create mode 100644 Documentation/media/uapi/v4l/async.rst create mode 100644 Documentation/media/uapi/v4l/audio.rst create mode 100644 Documentation/media/uapi/v4l/bayer.svg create mode 100644 Documentation/media/uapi/v4l/biblio.rst create mode 100644 Documentation/media/uapi/v4l/buffer.rst create mode 100644 Documentation/media/uapi/v4l/capture-example.rst create mode 100644 Documentation/media/uapi/v4l/capture.c.rst create mode 100644 Documentation/media/uapi/v4l/colorspaces-defs.rst create mode 100644 Documentation/media/uapi/v4l/colorspaces-details.rst create mode 100644 Documentation/media/uapi/v4l/colorspaces.rst create mode 100644 Documentation/media/uapi/v4l/common-defs.rst create mode 100644 Documentation/media/uapi/v4l/common.rst create mode 100644 Documentation/media/uapi/v4l/compat.rst create mode 100644 Documentation/media/uapi/v4l/constraints.svg create mode 100644 Documentation/media/uapi/v4l/control.rst create mode 100644 Documentation/media/uapi/v4l/crop.rst create mode 100644 Documentation/media/uapi/v4l/crop.svg create mode 100644 Documentation/media/uapi/v4l/depth-formats.rst create mode 100644 Documentation/media/uapi/v4l/dev-capture.rst create mode 100644 Documentation/media/uapi/v4l/dev-codec.rst create mode 100644 Documentation/media/uapi/v4l/dev-effect.rst create mode 100644 Documentation/media/uapi/v4l/dev-event.rst create mode 100644 Documentation/media/uapi/v4l/dev-meta.rst create mode 100644 Documentation/media/uapi/v4l/dev-osd.rst create mode 100644 Documentation/media/uapi/v4l/dev-output.rst create mode 100644 Documentation/media/uapi/v4l/dev-overlay.rst create mode 100644 Documentation/media/uapi/v4l/dev-radio.rst create mode 100644 Documentation/media/uapi/v4l/dev-raw-vbi.rst create mode 100644 Documentation/media/uapi/v4l/dev-rds.rst create mode 100644 Documentation/media/uapi/v4l/dev-sdr.rst create mode 100644 Documentation/media/uapi/v4l/dev-sliced-vbi.rst create mode 100644 Documentation/media/uapi/v4l/dev-subdev.rst create mode 100644 Documentation/media/uapi/v4l/dev-teletext.rst create mode 100644 Documentation/media/uapi/v4l/dev-touch.rst create mode 100644 Documentation/media/uapi/v4l/devices.rst create mode 100644 Documentation/media/uapi/v4l/diff-v4l.rst create mode 100644 Documentation/media/uapi/v4l/dmabuf.rst create mode 100644 Documentation/media/uapi/v4l/dv-timings.rst create mode 100644 Documentation/media/uapi/v4l/extended-controls.rst create mode 100644 Documentation/media/uapi/v4l/field-order.rst create mode 100644 Documentation/media/uapi/v4l/fieldseq_bt.svg create mode 100644 Documentation/media/uapi/v4l/fieldseq_tb.svg create mode 100644 Documentation/media/uapi/v4l/format.rst create mode 100644 Documentation/media/uapi/v4l/func-close.rst create mode 100644 Documentation/media/uapi/v4l/func-ioctl.rst create mode 100644 Documentation/media/uapi/v4l/func-mmap.rst create mode 100644 Documentation/media/uapi/v4l/func-munmap.rst create mode 100644 Documentation/media/uapi/v4l/func-open.rst create mode 100644 Documentation/media/uapi/v4l/func-poll.rst create mode 100644 Documentation/media/uapi/v4l/func-read.rst create mode 100644 Documentation/media/uapi/v4l/func-select.rst create mode 100644 Documentation/media/uapi/v4l/func-write.rst create mode 100644 Documentation/media/uapi/v4l/hist-v4l2.rst create mode 100644 Documentation/media/uapi/v4l/hsv-formats.rst create mode 100644 Documentation/media/uapi/v4l/io.rst create mode 100644 Documentation/media/uapi/v4l/libv4l-introduction.rst create mode 100644 Documentation/media/uapi/v4l/libv4l.rst create mode 100644 Documentation/media/uapi/v4l/meta-formats.rst create mode 100644 Documentation/media/uapi/v4l/mmap.rst create mode 100644 Documentation/media/uapi/v4l/nv12mt.svg create mode 100644 Documentation/media/uapi/v4l/nv12mt_example.svg create mode 100644 Documentation/media/uapi/v4l/open.rst create mode 100644 Documentation/media/uapi/v4l/pipeline.dot create mode 100644 Documentation/media/uapi/v4l/pixfmt-compressed.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-grey.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-indexed.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-intro.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-inzi.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-m420.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-meta-uvc.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-meta-vsp1-hgo.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-meta-vsp1-hgt.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv12.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv12m.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv12mt.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv16m.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-nv24.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-packed-hsv.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-packed-rgb.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-packed-yuv.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-reserved.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-rgb.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-cs08.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-cs14le.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-cu08.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-cu16le.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-pcu16be.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-pcu18be.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-pcu20be.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-sdr-ru12le.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb10-ipu3.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb10.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb10alaw8.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb10dpcm8.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb10p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb12.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb12p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb14p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-srggb8.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-tch-td08.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-tch-td16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-tch-tu08.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-tch-tu16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-uv8.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-uyvy.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-v4l2-mplane.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-v4l2.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-vyuy.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y10.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y10b.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y10p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y12.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y12i.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y16-be.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y41p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-y8i.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv410.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv411p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv420.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv420m.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv422m.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv422p.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuv444m.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yuyv.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-yvyu.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt-z16.rst create mode 100644 Documentation/media/uapi/v4l/pixfmt.rst create mode 100644 Documentation/media/uapi/v4l/planar-apis.rst create mode 100644 Documentation/media/uapi/v4l/querycap.rst create mode 100644 Documentation/media/uapi/v4l/rw.rst create mode 100644 Documentation/media/uapi/v4l/sdr-formats.rst create mode 100644 Documentation/media/uapi/v4l/selection-api-configuration.rst create mode 100644 Documentation/media/uapi/v4l/selection-api-examples.rst create mode 100644 Documentation/media/uapi/v4l/selection-api-intro.rst create mode 100644 Documentation/media/uapi/v4l/selection-api-targets.rst create mode 100644 Documentation/media/uapi/v4l/selection-api-vs-crop-api.rst create mode 100644 Documentation/media/uapi/v4l/selection-api.rst create mode 100644 Documentation/media/uapi/v4l/selection.svg create mode 100644 Documentation/media/uapi/v4l/selections-common.rst create mode 100644 Documentation/media/uapi/v4l/standard.rst create mode 100644 Documentation/media/uapi/v4l/streaming-par.rst create mode 100644 Documentation/media/uapi/v4l/subdev-formats.rst create mode 100644 Documentation/media/uapi/v4l/subdev-image-processing-crop.svg create mode 100644 Documentation/media/uapi/v4l/subdev-image-processing-full.svg create mode 100644 Documentation/media/uapi/v4l/subdev-image-processing-scaling-multi-source.svg create mode 100644 Documentation/media/uapi/v4l/tch-formats.rst create mode 100644 Documentation/media/uapi/v4l/tuner.rst create mode 100644 Documentation/media/uapi/v4l/user-func.rst create mode 100644 Documentation/media/uapi/v4l/userp.rst create mode 100644 Documentation/media/uapi/v4l/v4l2-selection-flags.rst create mode 100644 Documentation/media/uapi/v4l/v4l2-selection-targets.rst create mode 100644 Documentation/media/uapi/v4l/v4l2.rst create mode 100644 Documentation/media/uapi/v4l/v4l2grab-example.rst create mode 100644 Documentation/media/uapi/v4l/v4l2grab.c.rst create mode 100644 Documentation/media/uapi/v4l/vbi_525.svg create mode 100644 Documentation/media/uapi/v4l/vbi_625.svg create mode 100644 Documentation/media/uapi/v4l/vbi_hsync.svg create mode 100644 Documentation/media/uapi/v4l/video.rst create mode 100644 Documentation/media/uapi/v4l/videodev.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-create-bufs.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-cropcap.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-dbg-g-chip-info.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-dbg-g-register.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-decoder-cmd.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-dqevent.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-dv-timings-cap.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-encoder-cmd.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enum-dv-timings.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enum-fmt.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enum-frameintervals.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enum-framesizes.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enum-freq-bands.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enumaudio.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enumaudioout.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enuminput.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enumoutput.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-enumstd.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-expbuf.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-audio.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-audioout.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-crop.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-ctrl.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-dv-timings.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-edid.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-enc-index.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-ext-ctrls.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-fbuf.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-fmt.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-frequency.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-input.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-jpegcomp.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-modulator.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-output.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-parm.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-priority.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-selection.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-sliced-vbi-cap.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-std.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-g-tuner.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-log-status.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-overlay.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-prepare-buf.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-qbuf.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-query-dv-timings.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-querybuf.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-querycap.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-queryctrl.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-querystd.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-reqbufs.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-s-hw-freq-seek.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-streamon.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-enum-frame-interval.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-enum-frame-size.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-enum-mbus-code.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-g-crop.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-g-fmt.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-g-frame-interval.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subdev-g-selection.rst create mode 100644 Documentation/media/uapi/v4l/vidioc-subscribe-event.rst create mode 100644 Documentation/media/uapi/v4l/yuv-formats.rst (limited to 'Documentation/media/uapi/v4l') diff --git a/Documentation/media/uapi/v4l/app-pri.rst b/Documentation/media/uapi/v4l/app-pri.rst new file mode 100644 index 000000000..a8c41a7ec --- /dev/null +++ b/Documentation/media/uapi/v4l/app-pri.rst @@ -0,0 +1,30 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _app-pri: + +******************** +Application Priority +******************** + +When multiple applications share a device it may be desirable to assign +them different priorities. Contrary to the traditional "rm -rf /" school +of thought a video recording application could for example block other +applications from changing video controls or switching the current TV +channel. Another objective is to permit low priority applications +working in background, which can be preempted by user controlled +applications and automatically regain control of the device at a later +time. + +Since these features cannot be implemented entirely in user space V4L2 +defines the :ref:`VIDIOC_G_PRIORITY ` and +:ref:`VIDIOC_S_PRIORITY ` ioctls to request and +query the access priority associate with a file descriptor. Opening a +device assigns a medium priority, compatible with earlier versions of +V4L2 and drivers not supporting these ioctls. Applications requiring a +different priority will usually call :ref:`VIDIOC_S_PRIORITY +` after verifying the device with the +:ref:`VIDIOC_QUERYCAP` ioctl. + +Ioctls changing driver properties, such as +:ref:`VIDIOC_S_INPUT `, return an ``EBUSY`` error code +after another application obtained higher priority. diff --git a/Documentation/media/uapi/v4l/async.rst b/Documentation/media/uapi/v4l/async.rst new file mode 100644 index 000000000..5affc0adb --- /dev/null +++ b/Documentation/media/uapi/v4l/async.rst @@ -0,0 +1,9 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _async: + +**************** +Asynchronous I/O +**************** + +This method is not defined yet. diff --git a/Documentation/media/uapi/v4l/audio.rst b/Documentation/media/uapi/v4l/audio.rst new file mode 100644 index 000000000..5ec99a280 --- /dev/null +++ b/Documentation/media/uapi/v4l/audio.rst @@ -0,0 +1,97 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _audio: + +************************ +Audio Inputs and Outputs +************************ + +Audio inputs and outputs are physical connectors of a device. Video +capture devices have inputs, output devices have outputs, zero or more +each. Radio devices have no audio inputs or outputs. They have exactly +one tuner which in fact *is* an audio source, but this API associates +tuners with video inputs or outputs only, and radio devices have none of +these. [#f1]_ A connector on a TV card to loop back the received audio +signal to a sound card is not considered an audio output. + +Audio and video inputs and outputs are associated. Selecting a video +source also selects an audio source. This is most evident when the video +and audio source is a tuner. Further audio connectors can combine with +more than one video input or output. Assumed two composite video inputs +and two audio inputs exist, there may be up to four valid combinations. +The relation of video and audio connectors is defined in the +``audioset`` field of the respective struct +:c:type:`v4l2_input` or struct +:c:type:`v4l2_output`, where each bit represents the index +number, starting at zero, of one audio input or output. + +To learn about the number and attributes of the available inputs and +outputs applications can enumerate them with the +:ref:`VIDIOC_ENUMAUDIO` and +:ref:`VIDIOC_ENUMAUDOUT ` ioctl, respectively. +The struct :c:type:`v4l2_audio` returned by the +:ref:`VIDIOC_ENUMAUDIO` ioctl also contains signal +:status information applicable when the current audio input is queried. + +The :ref:`VIDIOC_G_AUDIO ` and +:ref:`VIDIOC_G_AUDOUT ` ioctls report the current +audio input and output, respectively. + +.. note:: + + Note that, unlike :ref:`VIDIOC_G_INPUT ` and + :ref:`VIDIOC_G_OUTPUT ` these ioctls return a + structure as :ref:`VIDIOC_ENUMAUDIO` and + :ref:`VIDIOC_ENUMAUDOUT ` do, not just an index. + +To select an audio input and change its properties applications call the +:ref:`VIDIOC_S_AUDIO ` ioctl. To select an audio +output (which presently has no changeable properties) applications call +the :ref:`VIDIOC_S_AUDOUT ` ioctl. + +Drivers must implement all audio input ioctls when the device has +multiple selectable audio inputs, all audio output ioctls when the +device has multiple selectable audio outputs. When the device has any +audio inputs or outputs the driver must set the ``V4L2_CAP_AUDIO`` flag +in the struct :c:type:`v4l2_capability` returned by +the :ref:`VIDIOC_QUERYCAP` ioctl. + + +Example: Information about the current audio input +================================================== + +.. code-block:: c + + struct v4l2_audio audio; + + memset(&audio, 0, sizeof(audio)); + + if (-1 == ioctl(fd, VIDIOC_G_AUDIO, &audio)) { + perror("VIDIOC_G_AUDIO"); + exit(EXIT_FAILURE); + } + + printf("Current input: %s\\n", audio.name); + + +Example: Switching to the first audio input +=========================================== + +.. code-block:: c + + struct v4l2_audio audio; + + memset(&audio, 0, sizeof(audio)); /* clear audio.mode, audio.reserved */ + + audio.index = 0; + + if (-1 == ioctl(fd, VIDIOC_S_AUDIO, &audio)) { + perror("VIDIOC_S_AUDIO"); + exit(EXIT_FAILURE); + } + +.. [#f1] + Actually struct :c:type:`v4l2_audio` ought to have a + ``tuner`` field like struct :c:type:`v4l2_input`, not + only making the API more consistent but also permitting radio devices + with multiple tuners. diff --git a/Documentation/media/uapi/v4l/bayer.svg b/Documentation/media/uapi/v4l/bayer.svg new file mode 100644 index 000000000..c395113d1 --- /dev/null +++ b/Documentation/media/uapi/v4l/bayer.svg @@ -0,0 +1,29 @@ + +image/svg+xmlB +G +G +R +BGGR +B +G +G +R +GBRG +B +G +G +R +RGGB +B +G +G +R +GRBG + diff --git a/Documentation/media/uapi/v4l/biblio.rst b/Documentation/media/uapi/v4l/biblio.rst new file mode 100644 index 000000000..386d6cf83 --- /dev/null +++ b/Documentation/media/uapi/v4l/biblio.rst @@ -0,0 +1,381 @@ +.. -*- coding: utf-8; mode: rst -*- + +********** +References +********** + + +.. _cea608: + +CEA 608-E +========= + + +:title: CEA-608-E R-2014 "Line 21 Data Services" + +:author: Consumer Electronics Association (http://www.ce.org) + +.. _en300294: + +EN 300 294 +========== + + +:title: EN 300 294 "625-line television Wide Screen Signalling (WSS)" + +:author: European Telecommunication Standards Institute (http://www.etsi.org) + +.. _ets300231: + +ETS 300 231 +=========== + + +:title: ETS 300 231 "Specification of the domestic video Programme Delivery Control system (PDC)" + +:author: European Telecommunication Standards Institute (http://www.etsi.org) + +.. _ets300706: + +ETS 300 706 +=========== + + +:title: ETS 300 706 "Enhanced Teletext specification" + +:author: European Telecommunication Standards Institute (http://www.etsi.org) + +.. _mpeg2part1: + +ISO 13818-1 +=========== + + +:title: ITU-T Rec. H.222.0 | ISO/IEC 13818-1 "Information technology — Generic coding of moving pictures and associated audio information: Systems" + +:author: International Telecommunication Union (http://www.itu.ch), International Organisation for Standardisation (http://www.iso.ch) + +.. _mpeg2part2: + +ISO 13818-2 +=========== + + +:title: ITU-T Rec. H.262 | ISO/IEC 13818-2 "Information technology — Generic coding of moving pictures and associated audio information: Video" + +:author: International Telecommunication Union (http://www.itu.ch), International Organisation for Standardisation (http://www.iso.ch) + +.. _itu470: + +ITU BT.470 +========== + + +:title: ITU-R Recommendation BT.470-6 "Conventional Television Systems" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _itu601: + +ITU BT.601 +========== + + +:title: ITU-R Recommendation BT.601-5 "Studio Encoding Parameters of Digital Television for Standard 4:3 and Wide-Screen 16:9 Aspect Ratios" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _itu653: + +ITU BT.653 +========== + + +:title: ITU-R Recommendation BT.653-3 "Teletext systems" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _itu709: + +ITU BT.709 +========== + + +:title: ITU-R Recommendation BT.709-5 "Parameter values for the HDTV standards for production and international programme exchange" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _itu1119: + +ITU BT.1119 +=========== + + +:title: ITU-R Recommendation BT.1119 "625-line television Wide Screen Signalling (WSS)" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _jfif: + +JFIF +==== + + +:title: JPEG File Interchange Format +:subtitle: Version 1.02 + +:author: Independent JPEG Group (http://www.ijg.org) + +.. _itu-t81: + +ITU-T.81 +======== + + +:title: ITU-T Recommendation T.81 "Information Technology — Digital Compression and Coding of Continous-Tone Still Images — Requirements and Guidelines" + +:author: International Telecommunication Union (http://www.itu.int) + +.. _w3c-jpeg-jfif: + +W3C JPEG JFIF +============= + + +:title: JPEG JFIF + +:author: The World Wide Web Consortium (http://www.w3.org) + +.. _smpte12m: + +SMPTE 12M +========= + + +:title: SMPTE 12M-1999 "Television, Audio and Film - Time and Control Code" + +:author: Society of Motion Picture and Television Engineers (http://www.smpte.org) + +.. _smpte170m: + +SMPTE 170M +========== + + +:title: SMPTE 170M-1999 "Television - Composite Analog Video Signal - NTSC for Studio Applications" + +:author: Society of Motion Picture and Television Engineers (http://www.smpte.org) + +.. _smpte240m: + +SMPTE 240M +========== + + +:title: SMPTE 240M-1999 "Television - Signal Parameters - 1125-Line High-Definition Production" + +:author: Society of Motion Picture and Television Engineers (http://www.smpte.org) + +.. _smpte431: + +SMPTE RP 431-2 +============== + + +:title: SMPTE RP 431-2:2011 "D-Cinema Quality - Reference Projector and Environment" + +:author: Society of Motion Picture and Television Engineers (http://www.smpte.org) + +.. _smpte2084: + +SMPTE ST 2084 +============= + + +:title: SMPTE ST 2084:2014 "High Dynamic Range Electro-Optical Transfer Function of Master Reference Displays" + +:author: Society of Motion Picture and Television Engineers (http://www.smpte.org) + +.. _srgb: + +sRGB +==== + + +:title: IEC 61966-2-1 ed1.0 "Multimedia systems and equipment - Colour measurement and management - Part 2-1: Colour management - Default RGB colour space - sRGB" + +:author: International Electrotechnical Commission (http://www.iec.ch) + +.. _sycc: + +sYCC +==== + + +:title: IEC 61966-2-1-am1 ed1.0 "Amendment 1 - Multimedia systems and equipment - Colour measurement and management - Part 2-1: Colour management - Default RGB colour space - sRGB" + +:author: International Electrotechnical Commission (http://www.iec.ch) + +.. _xvycc: + +xvYCC +===== + + +:title: IEC 61966-2-4 ed1.0 "Multimedia systems and equipment - Colour measurement and management - Part 2-4: Colour management - Extended-gamut YCC colour space for video applications - xvYCC" + +:author: International Electrotechnical Commission (http://www.iec.ch) + +.. _oprgb: + +opRGB +===== + + +:title: IEC 61966-2-5 "Multimedia systems and equipment - Colour measurement and management - Part 2-5: Colour management - Optional RGB colour space - opRGB" + +:author: International Electrotechnical Commission (http://www.iec.ch) + +.. _itu2020: + +ITU BT.2020 +=========== + + +:title: ITU-R Recommendation BT.2020 (08/2012) "Parameter values for ultra-high definition television systems for production and international programme exchange" + +:author: International Telecommunication Union (http://www.itu.ch) + +.. _tech3213: + +EBU Tech 3213 +============= + + +:title: E.B.U. Standard for Chromaticity Tolerances for Studio Monitors" + +:author: European Broadcast Union (http://www.ebu.ch) + +.. _iec62106: + +IEC 62106 +========= + + +:title: Specification of the radio data system (RDS) for VHF/FM sound broadcasting in the frequency range from 87,5 to 108,0 MHz + +:author: International Electrotechnical Commission (http://www.iec.ch) + +.. _nrsc4: + +NRSC-4-B +======== + + +:title: NRSC-4-B: United States RBDS Standard + +:author: National Radio Systems Committee (http://www.nrscstandards.org) + +.. _iso12232: + +ISO 12232:2006 +============== + + +:title: Photography — Digital still cameras — Determination of exposure index, ISO speed ratings, standard output sensitivity, and recommended exposure index + +:author: International Organization for Standardization (http://www.iso.org) + +.. _cea861: + +CEA-861-E +========= + + +:title: A DTV Profile for Uncompressed High Speed Digital Interfaces + +:author: Consumer Electronics Association (http://www.ce.org) + +.. _vesadmt: + +VESA DMT +======== + + +:title: VESA and Industry Standards and Guidelines for Computer Display Monitor Timing (DMT) + +:author: Video Electronics Standards Association (http://www.vesa.org) + +.. _vesaedid: + +EDID +==== + + +:title: VESA Enhanced Extended Display Identification Data Standard +:subtitle: Release A, Revision 2 + +:author: Video Electronics Standards Association (http://www.vesa.org) + +.. _hdcp: + +HDCP +==== + + +:title: High-bandwidth Digital Content Protection System +:subtitle: Revision 1.3 + +:author: Digital Content Protection LLC (http://www.digital-cp.com) + +.. _hdmi: + +HDMI +==== + + +:title: High-Definition Multimedia Interface +:subtitle: Specification Version 1.4a + +:author: HDMI Licensing LLC (http://www.hdmi.org) + +.. _hdmi2: + +HDMI2 +===== + +:title: High-Definition Multimedia Interface +:subtitle: Specification Version 2.0 + +:author: HDMI Licensing LLC (http://www.hdmi.org) + +.. _dp: + +DP +== + + +:title: VESA DisplayPort Standard +:subtitle: Version 1, Revision 2 + +:author: Video Electronics Standards Association (http://www.vesa.org) + +.. _poynton: + +poynton +======= + + +:title: Digital Video and HDTV, Algorithms and Interfaces + +:author: Charles Poynton + +.. _colimg: + +colimg +====== + + +:title: Color Imaging: Fundamentals and Applications + +:author: Erik Reinhard et al. diff --git a/Documentation/media/uapi/v4l/buffer.rst b/Documentation/media/uapi/v4l/buffer.rst new file mode 100644 index 000000000..e2c85ddc9 --- /dev/null +++ b/Documentation/media/uapi/v4l/buffer.rst @@ -0,0 +1,786 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _buffer: + +******* +Buffers +******* + +A buffer contains data exchanged by application and driver using one of +the Streaming I/O methods. In the multi-planar API, the data is held in +planes, while the buffer structure acts as a container for the planes. +Only pointers to buffers (planes) are exchanged, the data itself is not +copied. These pointers, together with meta-information like timestamps +or field parity, are stored in a struct :c:type:`v4l2_buffer`, +argument to the :ref:`VIDIOC_QUERYBUF`, +:ref:`VIDIOC_QBUF ` and +:ref:`VIDIOC_DQBUF ` ioctl. In the multi-planar API, +some plane-specific members of struct :c:type:`v4l2_buffer`, +such as pointers and sizes for each plane, are stored in struct +struct :c:type:`v4l2_plane` instead. In that case, struct +struct :c:type:`v4l2_buffer` contains an array of plane structures. + +Dequeued video buffers come with timestamps. The driver decides at which +part of the frame and with which clock the timestamp is taken. Please +see flags in the masks ``V4L2_BUF_FLAG_TIMESTAMP_MASK`` and +``V4L2_BUF_FLAG_TSTAMP_SRC_MASK`` in :ref:`buffer-flags`. These flags +are always valid and constant across all buffers during the whole video +stream. Changes in these flags may take place as a side effect of +:ref:`VIDIOC_S_INPUT ` or +:ref:`VIDIOC_S_OUTPUT ` however. The +``V4L2_BUF_FLAG_TIMESTAMP_COPY`` timestamp type which is used by e.g. on +mem-to-mem devices is an exception to the rule: the timestamp source +flags are copied from the OUTPUT video buffer to the CAPTURE video +buffer. + + +Interactions between formats, controls and buffers +================================================== + +V4L2 exposes parameters that influence the buffer size, or the way data is +laid out in the buffer. Those parameters are exposed through both formats and +controls. One example of such a control is the ``V4L2_CID_ROTATE`` control +that modifies the direction in which pixels are stored in the buffer, as well +as the buffer size when the selected format includes padding at the end of +lines. + +The set of information needed to interpret the content of a buffer (e.g. the +pixel format, the line stride, the tiling orientation or the rotation) is +collectively referred to in the rest of this section as the buffer layout. + +Controls that can modify the buffer layout shall set the +``V4L2_CTRL_FLAG_MODIFY_LAYOUT`` flag. + +Modifying formats or controls that influence the buffer size or layout require +the stream to be stopped. Any attempt at such a modification while the stream +is active shall cause the ioctl setting the format or the control to return +the ``EBUSY`` error code. In that case drivers shall also set the +``V4L2_CTRL_FLAG_GRABBED`` flag when calling +:c:func:`VIDIOC_QUERYCTRL` or :c:func:`VIDIOC_QUERY_EXT_CTRL` for such a +control while the stream is active. + +.. note:: + + The :c:func:`VIDIOC_S_SELECTION` ioctl can, depending on the hardware (for + instance if the device doesn't include a scaler), modify the format in + addition to the selection rectangle. Similarly, the + :c:func:`VIDIOC_S_INPUT`, :c:func:`VIDIOC_S_OUTPUT`, :c:func:`VIDIOC_S_STD` + and :c:func:`VIDIOC_S_DV_TIMINGS` ioctls can also modify the format and + selection rectangles. When those ioctls result in a buffer size or layout + change, drivers shall handle that condition as they would handle it in the + :c:func:`VIDIOC_S_FMT` ioctl in all cases described in this section. + +Controls that only influence the buffer layout can be modified at any time +when the stream is stopped. As they don't influence the buffer size, no +special handling is needed to synchronize those controls with buffer +allocation and the ``V4L2_CTRL_FLAG_GRABBED`` flag is cleared once the +stream is stopped. + +Formats and controls that influence the buffer size interact with buffer +allocation. The simplest way to handle this is for drivers to always require +buffers to be reallocated in order to change those formats or controls. In +that case, to perform such changes, userspace applications shall first stop +the video stream with the :c:func:`VIDIOC_STREAMOFF` ioctl if it is running +and free all buffers with the :c:func:`VIDIOC_REQBUFS` ioctl if they are +allocated. After freeing all buffers the ``V4L2_CTRL_FLAG_GRABBED`` flag +for controls is cleared. The format or controls can then be modified, and +buffers shall then be reallocated and the stream restarted. A typical ioctl +sequence is + + #. VIDIOC_STREAMOFF + #. VIDIOC_REQBUFS(0) + #. VIDIOC_S_EXT_CTRLS + #. VIDIOC_S_FMT + #. VIDIOC_REQBUFS(n) + #. VIDIOC_QBUF + #. VIDIOC_STREAMON + +The second :c:func:`VIDIOC_REQBUFS` call will take the new format and control +value into account to compute the buffer size to allocate. Applications can +also retrieve the size by calling the :c:func:`VIDIOC_G_FMT` ioctl if needed. + +.. note:: + + The API doesn't mandate the above order for control (3.) and format (4.) + changes. Format and controls can be set in a different order, or even + interleaved, depending on the device and use case. For instance some + controls might behave differently for different pixel formats, in which + case the format might need to be set first. + +When reallocation is required, any attempt to modify format or controls that +influences the buffer size while buffers are allocated shall cause the format +or control set ioctl to return the ``EBUSY`` error. Any attempt to queue a +buffer too small for the current format or controls shall cause the +:c:func:`VIDIOC_QBUF` ioctl to return a ``EINVAL`` error. + +Buffer reallocation is an expensive operation. To avoid that cost, drivers can +(and are encouraged to) allow format or controls that influence the buffer +size to be changed with buffers allocated. In that case, a typical ioctl +sequence to modify format and controls is + + #. VIDIOC_STREAMOFF + #. VIDIOC_S_EXT_CTRLS + #. VIDIOC_S_FMT + #. VIDIOC_QBUF + #. VIDIOC_STREAMON + +For this sequence to operate correctly, queued buffers need to be large enough +for the new format or controls. Drivers shall return a ``ENOSPC`` error in +response to format change (:c:func:`VIDIOC_S_FMT`) or control changes +(:c:func:`VIDIOC_S_CTRL` or :c:func:`VIDIOC_S_EXT_CTRLS`) if buffers too small +for the new format are currently queued. As a simplification, drivers are +allowed to return a ``EBUSY`` error from these ioctls if any buffer is +currently queued, without checking the queued buffers sizes. + +Additionally, drivers shall return a ``EINVAL`` error from the +:c:func:`VIDIOC_QBUF` ioctl if the buffer being queued is too small for the +current format or controls. Together, these requirements ensure that queued +buffers will always be large enough for the configured format and controls. + +Userspace applications can query the buffer size required for a given format +and controls by first setting the desired control values and then trying the +desired format. The :c:func:`VIDIOC_TRY_FMT` ioctl will return the required +buffer size. + + #. VIDIOC_S_EXT_CTRLS(x) + #. VIDIOC_TRY_FMT() + #. VIDIOC_S_EXT_CTRLS(y) + #. VIDIOC_TRY_FMT() + +The :c:func:`VIDIOC_CREATE_BUFS` ioctl can then be used to allocate buffers +based on the queried sizes (for instance by allocating a set of buffers large +enough for all the desired formats and controls, or by allocating separate set +of appropriately sized buffers for each use case). + + +.. c:type:: v4l2_buffer + +struct v4l2_buffer +================== + +.. tabularcolumns:: |p{2.8cm}|p{2.5cm}|p{1.3cm}|p{10.5cm}| + +.. cssclass:: longtable + +.. flat-table:: struct v4l2_buffer + :header-rows: 0 + :stub-columns: 0 + :widths: 1 2 1 10 + + * - __u32 + - ``index`` + - + - Number of the buffer, set by the application except when calling + :ref:`VIDIOC_DQBUF `, then it is set by the + driver. This field can range from zero to the number of buffers + allocated with the :ref:`VIDIOC_REQBUFS` ioctl + (struct :c:type:`v4l2_requestbuffers` + ``count``), plus any buffers allocated with + :ref:`VIDIOC_CREATE_BUFS` minus one. + * - __u32 + - ``type`` + - + - Type of the buffer, same as struct + :c:type:`v4l2_format` ``type`` or struct + :c:type:`v4l2_requestbuffers` ``type``, set + by the application. See :c:type:`v4l2_buf_type` + * - __u32 + - ``bytesused`` + - + - The number of bytes occupied by the data in the buffer. It depends + on the negotiated data format and may change with each buffer for + compressed variable size data like JPEG images. Drivers must set + this field when ``type`` refers to a capture stream, applications + when it refers to an output stream. If the application sets this + to 0 for an output stream, then ``bytesused`` will be set to the + size of the buffer (see the ``length`` field of this struct) by + the driver. For multiplanar formats this field is ignored and the + ``planes`` pointer is used instead. + * - __u32 + - ``flags`` + - + - Flags set by the application or driver, see :ref:`buffer-flags`. + * - __u32 + - ``field`` + - + - Indicates the field order of the image in the buffer, see + :c:type:`v4l2_field`. This field is not used when the buffer + contains VBI data. Drivers must set it when ``type`` refers to a + capture stream, applications when it refers to an output stream. + * - struct timeval + - ``timestamp`` + - + - For capture streams this is time when the first data byte was + captured, as returned by the :c:func:`clock_gettime()` function + for the relevant clock id; see ``V4L2_BUF_FLAG_TIMESTAMP_*`` in + :ref:`buffer-flags`. For output streams the driver stores the + time at which the last data byte was actually sent out in the + ``timestamp`` field. This permits applications to monitor the + drift between the video and system clock. For output streams that + use ``V4L2_BUF_FLAG_TIMESTAMP_COPY`` the application has to fill + in the timestamp which will be copied by the driver to the capture + stream. + * - struct :c:type:`v4l2_timecode` + - ``timecode`` + - + - When ``type`` is ``V4L2_BUF_TYPE_VIDEO_CAPTURE`` and the + ``V4L2_BUF_FLAG_TIMECODE`` flag is set in ``flags``, this + structure contains a frame timecode. In + :c:type:`V4L2_FIELD_ALTERNATE ` mode the top and + bottom field contain the same timecode. Timecodes are intended to + help video editing and are typically recorded on video tapes, but + also embedded in compressed formats like MPEG. This field is + independent of the ``timestamp`` and ``sequence`` fields. + * - __u32 + - ``sequence`` + - + - Set by the driver, counting the frames (not fields!) in sequence. + This field is set for both input and output devices. + * - :cspan:`3` + + In :c:type:`V4L2_FIELD_ALTERNATE ` mode the top and + bottom field have the same sequence number. The count starts at + zero and includes dropped or repeated frames. A dropped frame was + received by an input device but could not be stored due to lack of + free buffer space. A repeated frame was displayed again by an + output device because the application did not pass new data in + time. + + .. note:: + + This may count the frames received e.g. over USB, without + taking into account the frames dropped by the remote hardware due + to limited compression throughput or bus bandwidth. These devices + identify by not enumerating any video standards, see + :ref:`standard`. + + * - __u32 + - ``memory`` + - + - This field must be set by applications and/or drivers in + accordance with the selected I/O method. See :c:type:`v4l2_memory` + * - union + - ``m`` + * - + - __u32 + - ``offset`` + - For the single-planar API and when ``memory`` is + ``V4L2_MEMORY_MMAP`` this is the offset of the buffer from the + start of the device memory. The value is returned by the driver + and apart of serving as parameter to the + :ref:`mmap() ` function not useful for applications. + See :ref:`mmap` for details + * - + - unsigned long + - ``userptr`` + - For the single-planar API and when ``memory`` is + ``V4L2_MEMORY_USERPTR`` this is a pointer to the buffer (casted to + unsigned long type) in virtual memory, set by the application. See + :ref:`userp` for details. + * - + - struct v4l2_plane + - ``*planes`` + - When using the multi-planar API, contains a userspace pointer to + an array of struct :c:type:`v4l2_plane`. The size of + the array should be put in the ``length`` field of this + struct :c:type:`v4l2_buffer` structure. + * - + - int + - ``fd`` + - For the single-plane API and when ``memory`` is + ``V4L2_MEMORY_DMABUF`` this is the file descriptor associated with + a DMABUF buffer. + * - __u32 + - ``length`` + - + - Size of the buffer (not the payload) in bytes for the + single-planar API. This is set by the driver based on the calls to + :ref:`VIDIOC_REQBUFS` and/or + :ref:`VIDIOC_CREATE_BUFS`. For the + multi-planar API the application sets this to the number of + elements in the ``planes`` array. The driver will fill in the + actual number of valid elements in that array. + * - __u32 + - ``reserved2`` + - + - A place holder for future extensions. Drivers and applications + must set this to 0. + * - __u32 + - ``reserved`` + - + - A place holder for future extensions. Drivers and applications + must set this to 0. + + + +.. c:type:: v4l2_plane + +struct v4l2_plane +================= + +.. tabularcolumns:: |p{3.5cm}|p{3.5cm}|p{3.5cm}|p{7.0cm}| + +.. cssclass:: longtable + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 1 1 1 2 + + * - __u32 + - ``bytesused`` + - + - The number of bytes occupied by data in the plane (its payload). + Drivers must set this field when ``type`` refers to a capture + stream, applications when it refers to an output stream. If the + application sets this to 0 for an output stream, then + ``bytesused`` will be set to the size of the plane (see the + ``length`` field of this struct) by the driver. + + .. note:: + + Note that the actual image data starts at ``data_offset`` + which may not be 0. + * - __u32 + - ``length`` + - + - Size in bytes of the plane (not its payload). This is set by the + driver based on the calls to + :ref:`VIDIOC_REQBUFS` and/or + :ref:`VIDIOC_CREATE_BUFS`. + * - union + - ``m`` + - + - + * - + - __u32 + - ``mem_offset`` + - When the memory type in the containing struct + :c:type:`v4l2_buffer` is ``V4L2_MEMORY_MMAP``, this + is the value that should be passed to :ref:`mmap() `, + similar to the ``offset`` field in struct + :c:type:`v4l2_buffer`. + * - + - unsigned long + - ``userptr`` + - When the memory type in the containing struct + :c:type:`v4l2_buffer` is ``V4L2_MEMORY_USERPTR``, + this is a userspace pointer to the memory allocated for this plane + by an application. + * - + - int + - ``fd`` + - When the memory type in the containing struct + :c:type:`v4l2_buffer` is ``V4L2_MEMORY_DMABUF``, + this is a file descriptor associated with a DMABUF buffer, similar + to the ``fd`` field in struct :c:type:`v4l2_buffer`. + * - __u32 + - ``data_offset`` + - + - Offset in bytes to video data in the plane. Drivers must set this + field when ``type`` refers to a capture stream, applications when + it refers to an output stream. + + .. note:: + + That data_offset is included in ``bytesused``. So the + size of the image in the plane is ``bytesused``-``data_offset`` + at offset ``data_offset`` from the start of the plane. + * - __u32 + - ``reserved[11]`` + - + - Reserved for future use. Should be zeroed by drivers and + applications. + + + +.. c:type:: v4l2_buf_type + +enum v4l2_buf_type +================== + +.. cssclass:: longtable + +.. tabularcolumns:: |p{7.2cm}|p{0.6cm}|p{9.7cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 4 1 9 + + * - ``V4L2_BUF_TYPE_VIDEO_CAPTURE`` + - 1 + - Buffer of a single-planar video capture stream, see + :ref:`capture`. + * - ``V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE`` + - 9 + - Buffer of a multi-planar video capture stream, see + :ref:`capture`. + * - ``V4L2_BUF_TYPE_VIDEO_OUTPUT`` + - 2 + - Buffer of a single-planar video output stream, see + :ref:`output`. + * - ``V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE`` + - 10 + - Buffer of a multi-planar video output stream, see :ref:`output`. + * - ``V4L2_BUF_TYPE_VIDEO_OVERLAY`` + - 3 + - Buffer for video overlay, see :ref:`overlay`. + * - ``V4L2_BUF_TYPE_VBI_CAPTURE`` + - 4 + - Buffer of a raw VBI capture stream, see :ref:`raw-vbi`. + * - ``V4L2_BUF_TYPE_VBI_OUTPUT`` + - 5 + - Buffer of a raw VBI output stream, see :ref:`raw-vbi`. + * - ``V4L2_BUF_TYPE_SLICED_VBI_CAPTURE`` + - 6 + - Buffer of a sliced VBI capture stream, see :ref:`sliced`. + * - ``V4L2_BUF_TYPE_SLICED_VBI_OUTPUT`` + - 7 + - Buffer of a sliced VBI output stream, see :ref:`sliced`. + * - ``V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY`` + - 8 + - Buffer for video output overlay (OSD), see :ref:`osd`. + * - ``V4L2_BUF_TYPE_SDR_CAPTURE`` + - 11 + - Buffer for Software Defined Radio (SDR) capture stream, see + :ref:`sdr`. + * - ``V4L2_BUF_TYPE_SDR_OUTPUT`` + - 12 + - Buffer for Software Defined Radio (SDR) output stream, see + :ref:`sdr`. + * - ``V4L2_BUF_TYPE_META_CAPTURE`` + - 13 + - Buffer for metadata capture, see :ref:`metadata`. + + + +.. _buffer-flags: + +Buffer Flags +============ + +.. tabularcolumns:: |p{7.0cm}|p{2.2cm}|p{8.3cm}| + +.. cssclass:: longtable + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 3 1 4 + + * .. _`V4L2-BUF-FLAG-MAPPED`: + + - ``V4L2_BUF_FLAG_MAPPED`` + - 0x00000001 + - The buffer resides in device memory and has been mapped into the + application's address space, see :ref:`mmap` for details. + Drivers set or clear this flag when the + :ref:`VIDIOC_QUERYBUF`, + :ref:`VIDIOC_QBUF` or + :ref:`VIDIOC_DQBUF ` ioctl is called. Set by the + driver. + * .. _`V4L2-BUF-FLAG-QUEUED`: + + - ``V4L2_BUF_FLAG_QUEUED`` + - 0x00000002 + - Internally drivers maintain two buffer queues, an incoming and + outgoing queue. When this flag is set, the buffer is currently on + the incoming queue. It automatically moves to the outgoing queue + after the buffer has been filled (capture devices) or displayed + (output devices). Drivers set or clear this flag when the + ``VIDIOC_QUERYBUF`` ioctl is called. After (successful) calling + the ``VIDIOC_QBUF``\ ioctl it is always set and after + ``VIDIOC_DQBUF`` always cleared. + * .. _`V4L2-BUF-FLAG-DONE`: + + - ``V4L2_BUF_FLAG_DONE`` + - 0x00000004 + - When this flag is set, the buffer is currently on the outgoing + queue, ready to be dequeued from the driver. Drivers set or clear + this flag when the ``VIDIOC_QUERYBUF`` ioctl is called. After + calling the ``VIDIOC_QBUF`` or ``VIDIOC_DQBUF`` it is always + cleared. Of course a buffer cannot be on both queues at the same + time, the ``V4L2_BUF_FLAG_QUEUED`` and ``V4L2_BUF_FLAG_DONE`` flag + are mutually exclusive. They can be both cleared however, then the + buffer is in "dequeued" state, in the application domain so to + say. + * .. _`V4L2-BUF-FLAG-ERROR`: + + - ``V4L2_BUF_FLAG_ERROR`` + - 0x00000040 + - When this flag is set, the buffer has been dequeued successfully, + although the data might have been corrupted. This is recoverable, + streaming may continue as normal and the buffer may be reused + normally. Drivers set this flag when the ``VIDIOC_DQBUF`` ioctl is + called. + * .. _`V4L2-BUF-FLAG-KEYFRAME`: + + - ``V4L2_BUF_FLAG_KEYFRAME`` + - 0x00000008 + - Drivers set or clear this flag when calling the ``VIDIOC_DQBUF`` + ioctl. It may be set by video capture devices when the buffer + contains a compressed image which is a key frame (or field), i. e. + can be decompressed on its own. Also known as an I-frame. + Applications can set this bit when ``type`` refers to an output + stream. + * .. _`V4L2-BUF-FLAG-PFRAME`: + + - ``V4L2_BUF_FLAG_PFRAME`` + - 0x00000010 + - Similar to ``V4L2_BUF_FLAG_KEYFRAME`` this flags predicted frames + or fields which contain only differences to a previous key frame. + Applications can set this bit when ``type`` refers to an output + stream. + * .. _`V4L2-BUF-FLAG-BFRAME`: + + - ``V4L2_BUF_FLAG_BFRAME`` + - 0x00000020 + - Similar to ``V4L2_BUF_FLAG_KEYFRAME`` this flags a bi-directional + predicted frame or field which contains only the differences + between the current frame and both the preceding and following key + frames to specify its content. Applications can set this bit when + ``type`` refers to an output stream. + * .. _`V4L2-BUF-FLAG-TIMECODE`: + + - ``V4L2_BUF_FLAG_TIMECODE`` + - 0x00000100 + - The ``timecode`` field is valid. Drivers set or clear this flag + when the ``VIDIOC_DQBUF`` ioctl is called. Applications can set + this bit and the corresponding ``timecode`` structure when + ``type`` refers to an output stream. + * .. _`V4L2-BUF-FLAG-PREPARED`: + + - ``V4L2_BUF_FLAG_PREPARED`` + - 0x00000400 + - The buffer has been prepared for I/O and can be queued by the + application. Drivers set or clear this flag when the + :ref:`VIDIOC_QUERYBUF`, + :ref:`VIDIOC_PREPARE_BUF `, + :ref:`VIDIOC_QBUF` or + :ref:`VIDIOC_DQBUF ` ioctl is called. + * .. _`V4L2-BUF-FLAG-NO-CACHE-INVALIDATE`: + + - ``V4L2_BUF_FLAG_NO_CACHE_INVALIDATE`` + - 0x00000800 + - Caches do not have to be invalidated for this buffer. Typically + applications shall use this flag if the data captured in the + buffer is not going to be touched by the CPU, instead the buffer + will, probably, be passed on to a DMA-capable hardware unit for + further processing or output. + * .. _`V4L2-BUF-FLAG-NO-CACHE-CLEAN`: + + - ``V4L2_BUF_FLAG_NO_CACHE_CLEAN`` + - 0x00001000 + - Caches do not have to be cleaned for this buffer. Typically + applications shall use this flag for output buffers if the data in + this buffer has not been created by the CPU but by some + DMA-capable unit, in which case caches have not been used. + * .. _`V4L2-BUF-FLAG-LAST`: + + - ``V4L2_BUF_FLAG_LAST`` + - 0x00100000 + - Last buffer produced by the hardware. mem2mem codec drivers set + this flag on the capture queue for the last buffer when the + :ref:`VIDIOC_QUERYBUF` or + :ref:`VIDIOC_DQBUF ` ioctl is called. Due to + hardware limitations, the last buffer may be empty. In this case + the driver will set the ``bytesused`` field to 0, regardless of + the format. Any Any subsequent call to the + :ref:`VIDIOC_DQBUF ` ioctl will not block anymore, + but return an ``EPIPE`` error code. + * .. _`V4L2-BUF-FLAG-TIMESTAMP-MASK`: + + - ``V4L2_BUF_FLAG_TIMESTAMP_MASK`` + - 0x0000e000 + - Mask for timestamp types below. To test the timestamp type, mask + out bits not belonging to timestamp type by performing a logical + and operation with buffer flags and timestamp mask. + * .. _`V4L2-BUF-FLAG-TIMESTAMP-UNKNOWN`: + + - ``V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN`` + - 0x00000000 + - Unknown timestamp type. This type is used by drivers before Linux + 3.9 and may be either monotonic (see below) or realtime (wall + clock). Monotonic clock has been favoured in embedded systems + whereas most of the drivers use the realtime clock. Either kinds + of timestamps are available in user space via + :c:func:`clock_gettime` using clock IDs ``CLOCK_MONOTONIC`` + and ``CLOCK_REALTIME``, respectively. + * .. _`V4L2-BUF-FLAG-TIMESTAMP-MONOTONIC`: + + - ``V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC`` + - 0x00002000 + - The buffer timestamp has been taken from the ``CLOCK_MONOTONIC`` + clock. To access the same clock outside V4L2, use + :c:func:`clock_gettime`. + * .. _`V4L2-BUF-FLAG-TIMESTAMP-COPY`: + + - ``V4L2_BUF_FLAG_TIMESTAMP_COPY`` + - 0x00004000 + - The CAPTURE buffer timestamp has been taken from the corresponding + OUTPUT buffer. This flag applies only to mem2mem devices. + * .. _`V4L2-BUF-FLAG-TSTAMP-SRC-MASK`: + + - ``V4L2_BUF_FLAG_TSTAMP_SRC_MASK`` + - 0x00070000 + - Mask for timestamp sources below. The timestamp source defines the + point of time the timestamp is taken in relation to the frame. + Logical 'and' operation between the ``flags`` field and + ``V4L2_BUF_FLAG_TSTAMP_SRC_MASK`` produces the value of the + timestamp source. Applications must set the timestamp source when + ``type`` refers to an output stream and + ``V4L2_BUF_FLAG_TIMESTAMP_COPY`` is set. + * .. _`V4L2-BUF-FLAG-TSTAMP-SRC-EOF`: + + - ``V4L2_BUF_FLAG_TSTAMP_SRC_EOF`` + - 0x00000000 + - End Of Frame. The buffer timestamp has been taken when the last + pixel of the frame has been received or the last pixel of the + frame has been transmitted. In practice, software generated + timestamps will typically be read from the clock a small amount of + time after the last pixel has been received or transmitten, + depending on the system and other activity in it. + * .. _`V4L2-BUF-FLAG-TSTAMP-SRC-SOE`: + + - ``V4L2_BUF_FLAG_TSTAMP_SRC_SOE`` + - 0x00010000 + - Start Of Exposure. The buffer timestamp has been taken when the + exposure of the frame has begun. This is only valid for the + ``V4L2_BUF_TYPE_VIDEO_CAPTURE`` buffer type. + + + +.. c:type:: v4l2_memory + +enum v4l2_memory +================ + +.. tabularcolumns:: |p{6.6cm}|p{2.2cm}|p{8.7cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 3 1 4 + + * - ``V4L2_MEMORY_MMAP`` + - 1 + - The buffer is used for :ref:`memory mapping ` I/O. + * - ``V4L2_MEMORY_USERPTR`` + - 2 + - The buffer is used for :ref:`user pointer ` I/O. + * - ``V4L2_MEMORY_OVERLAY`` + - 3 + - [to do] + * - ``V4L2_MEMORY_DMABUF`` + - 4 + - The buffer is used for :ref:`DMA shared buffer ` I/O. + + + +Timecodes +========= + +The struct :c:type:`v4l2_timecode` structure is designed to hold a +:ref:`smpte12m` or similar timecode. (struct +struct :c:type:`timeval` timestamps are stored in struct +:c:type:`v4l2_buffer` field ``timestamp``.) + + +.. c:type:: v4l2_timecode + +struct v4l2_timecode +-------------------- + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 1 1 2 + + * - __u32 + - ``type`` + - Frame rate the timecodes are based on, see :ref:`timecode-type`. + * - __u32 + - ``flags`` + - Timecode flags, see :ref:`timecode-flags`. + * - __u8 + - ``frames`` + - Frame count, 0 ... 23/24/29/49/59, depending on the type of + timecode. + * - __u8 + - ``seconds`` + - Seconds count, 0 ... 59. This is a binary, not BCD number. + * - __u8 + - ``minutes`` + - Minutes count, 0 ... 59. This is a binary, not BCD number. + * - __u8 + - ``hours`` + - Hours count, 0 ... 29. This is a binary, not BCD number. + * - __u8 + - ``userbits``\ [4] + - The "user group" bits from the timecode. + + + +.. _timecode-type: + +Timecode Types +-------------- + +.. tabularcolumns:: |p{6.6cm}|p{2.2cm}|p{8.7cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 3 1 4 + + * - ``V4L2_TC_TYPE_24FPS`` + - 1 + - 24 frames per second, i. e. film. + * - ``V4L2_TC_TYPE_25FPS`` + - 2 + - 25 frames per second, i. e. PAL or SECAM video. + * - ``V4L2_TC_TYPE_30FPS`` + - 3 + - 30 frames per second, i. e. NTSC video. + * - ``V4L2_TC_TYPE_50FPS`` + - 4 + - + * - ``V4L2_TC_TYPE_60FPS`` + - 5 + - + + + +.. _timecode-flags: + +Timecode Flags +-------------- + +.. tabularcolumns:: |p{6.6cm}|p{1.4cm}|p{9.5cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 3 1 4 + + * - ``V4L2_TC_FLAG_DROPFRAME`` + - 0x0001 + - Indicates "drop frame" semantics for counting frames in 29.97 fps + material. When set, frame numbers 0 and 1 at the start of each + minute, except minutes 0, 10, 20, 30, 40, 50 are omitted from the + count. + * - ``V4L2_TC_FLAG_COLORFRAME`` + - 0x0002 + - The "color frame" flag. + * - ``V4L2_TC_USERBITS_field`` + - 0x000C + - Field mask for the "binary group flags". + * - ``V4L2_TC_USERBITS_USERDEFINED`` + - 0x0000 + - Unspecified format. + * - ``V4L2_TC_USERBITS_8BITCHARS`` + - 0x0008 + - 8-bit ISO characters. diff --git a/Documentation/media/uapi/v4l/capture-example.rst b/Documentation/media/uapi/v4l/capture-example.rst new file mode 100644 index 000000000..ac1cd057e --- /dev/null +++ b/Documentation/media/uapi/v4l/capture-example.rst @@ -0,0 +1,13 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _capture-example: + +********************* +Video Capture Example +********************* + + +.. toctree:: + :maxdepth: 1 + + capture.c diff --git a/Documentation/media/uapi/v4l/capture.c.rst b/Documentation/media/uapi/v4l/capture.c.rst new file mode 100644 index 000000000..56525a0fb --- /dev/null +++ b/Documentation/media/uapi/v4l/capture.c.rst @@ -0,0 +1,664 @@ +.. -*- coding: utf-8; mode: rst -*- + +file: media/v4l/capture.c +========================= + +.. code-block:: c + + /* + * V4L2 video capture example + * + * This program can be used and distributed without restrictions. + * + * This program is provided with the V4L2 API + * see https://linuxtv.org/docs.php for more information + */ + + #include + #include + #include + #include + + #include /* getopt_long() */ + + #include /* low-level i/o */ + #include + #include + #include + #include + #include + #include + #include + + #include + + #define CLEAR(x) memset(&(x), 0, sizeof(x)) + + enum io_method { + IO_METHOD_READ, + IO_METHOD_MMAP, + IO_METHOD_USERPTR, + }; + + struct buffer { + void *start; + size_t length; + }; + + static char *dev_name; + static enum io_method io = IO_METHOD_MMAP; + static int fd = -1; + struct buffer *buffers; + static unsigned int n_buffers; + static int out_buf; + static int force_format; + static int frame_count = 70; + + static void errno_exit(const char *s) + { + fprintf(stderr, "%s error %d, %s\\n", s, errno, strerror(errno)); + exit(EXIT_FAILURE); + } + + static int xioctl(int fh, int request, void *arg) + { + int r; + + do { + r = ioctl(fh, request, arg); + } while (-1 == r && EINTR == errno); + + return r; + } + + static void process_image(const void *p, int size) + { + if (out_buf) + fwrite(p, size, 1, stdout); + + fflush(stderr); + fprintf(stderr, "."); + fflush(stdout); + } + + static int read_frame(void) + { + struct v4l2_buffer buf; + unsigned int i; + + switch (io) { + case IO_METHOD_READ: + if (-1 == read(fd, buffers[0].start, buffers[0].length)) { + switch (errno) { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + errno_exit("read"); + } + } + + process_image(buffers[0].start, buffers[0].length); + break; + + case IO_METHOD_MMAP: + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) { + switch (errno) { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + errno_exit("VIDIOC_DQBUF"); + } + } + + assert(buf.index < n_buffers); + + process_image(buffers[buf.index].start, buf.bytesused); + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + errno_exit("VIDIOC_QBUF"); + break; + + case IO_METHOD_USERPTR: + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) { + switch (errno) { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + errno_exit("VIDIOC_DQBUF"); + } + } + + for (i = 0; i < n_buffers; ++i) + if (buf.m.userptr == (unsigned long)buffers[i].start + && buf.length == buffers[i].length) + break; + + assert(i < n_buffers); + + process_image((void *)buf.m.userptr, buf.bytesused); + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + errno_exit("VIDIOC_QBUF"); + break; + } + + return 1; + } + + static void mainloop(void) + { + unsigned int count; + + count = frame_count; + + while (count-- > 0) { + for (;;) { + fd_set fds; + struct timeval tv; + int r; + + FD_ZERO(&fds); + FD_SET(fd, &fds); + + /* Timeout. */ + tv.tv_sec = 2; + tv.tv_usec = 0; + + r = select(fd + 1, &fds, NULL, NULL, &tv); + + if (-1 == r) { + if (EINTR == errno) + continue; + errno_exit("select"); + } + + if (0 == r) { + fprintf(stderr, "select timeout\\n"); + exit(EXIT_FAILURE); + } + + if (read_frame()) + break; + /* EAGAIN - continue select loop. */ + } + } + } + + static void stop_capturing(void) + { + enum v4l2_buf_type type; + + switch (io) { + case IO_METHOD_READ: + /* Nothing to do. */ + break; + + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type)) + errno_exit("VIDIOC_STREAMOFF"); + break; + } + } + + static void start_capturing(void) + { + unsigned int i; + enum v4l2_buf_type type; + + switch (io) { + case IO_METHOD_READ: + /* Nothing to do. */ + break; + + case IO_METHOD_MMAP: + for (i = 0; i < n_buffers; ++i) { + struct v4l2_buffer buf; + + CLEAR(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + errno_exit("VIDIOC_QBUF"); + } + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMON, &type)) + errno_exit("VIDIOC_STREAMON"); + break; + + case IO_METHOD_USERPTR: + for (i = 0; i < n_buffers; ++i) { + struct v4l2_buffer buf; + + CLEAR(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.userptr = (unsigned long)buffers[i].start; + buf.length = buffers[i].length; + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + errno_exit("VIDIOC_QBUF"); + } + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMON, &type)) + errno_exit("VIDIOC_STREAMON"); + break; + } + } + + static void uninit_device(void) + { + unsigned int i; + + switch (io) { + case IO_METHOD_READ: + free(buffers[0].start); + break; + + case IO_METHOD_MMAP: + for (i = 0; i < n_buffers; ++i) + if (-1 == munmap(buffers[i].start, buffers[i].length)) + errno_exit("munmap"); + break; + + case IO_METHOD_USERPTR: + for (i = 0; i < n_buffers; ++i) + free(buffers[i].start); + break; + } + + free(buffers); + } + + static void init_read(unsigned int buffer_size) + { + buffers = calloc(1, sizeof(*buffers)); + + if (!buffers) { + fprintf(stderr, "Out of memory\\n"); + exit(EXIT_FAILURE); + } + + buffers[0].length = buffer_size; + buffers[0].start = malloc(buffer_size); + + if (!buffers[0].start) { + fprintf(stderr, "Out of memory\\n"); + exit(EXIT_FAILURE); + } + } + + static void init_mmap(void) + { + struct v4l2_requestbuffers req; + + CLEAR(req); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) { + if (EINVAL == errno) { + fprintf(stderr, "%s does not support " + "memory mappingn", dev_name); + exit(EXIT_FAILURE); + } else { + errno_exit("VIDIOC_REQBUFS"); + } + } + + if (req.count < 2) { + fprintf(stderr, "Insufficient buffer memory on %s\\n", + dev_name); + exit(EXIT_FAILURE); + } + + buffers = calloc(req.count, sizeof(*buffers)); + + if (!buffers) { + fprintf(stderr, "Out of memory\\n"); + exit(EXIT_FAILURE); + } + + for (n_buffers = 0; n_buffers < req.count; ++n_buffers) { + struct v4l2_buffer buf; + + CLEAR(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = n_buffers; + + if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)) + errno_exit("VIDIOC_QUERYBUF"); + + buffers[n_buffers].length = buf.length; + buffers[n_buffers].start = + mmap(NULL /* start anywhere */, + buf.length, + PROT_READ | PROT_WRITE /* required */, + MAP_SHARED /* recommended */, + fd, buf.m.offset); + + if (MAP_FAILED == buffers[n_buffers].start) + errno_exit("mmap"); + } + } + + static void init_userp(unsigned int buffer_size) + { + struct v4l2_requestbuffers req; + + CLEAR(req); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) { + if (EINVAL == errno) { + fprintf(stderr, "%s does not support " + "user pointer i/on", dev_name); + exit(EXIT_FAILURE); + } else { + errno_exit("VIDIOC_REQBUFS"); + } + } + + buffers = calloc(4, sizeof(*buffers)); + + if (!buffers) { + fprintf(stderr, "Out of memory\\n"); + exit(EXIT_FAILURE); + } + + for (n_buffers = 0; n_buffers < 4; ++n_buffers) { + buffers[n_buffers].length = buffer_size; + buffers[n_buffers].start = malloc(buffer_size); + + if (!buffers[n_buffers].start) { + fprintf(stderr, "Out of memory\\n"); + exit(EXIT_FAILURE); + } + } + } + + static void init_device(void) + { + struct v4l2_capability cap; + struct v4l2_cropcap cropcap; + struct v4l2_crop crop; + struct v4l2_format fmt; + unsigned int min; + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) { + if (EINVAL == errno) { + fprintf(stderr, "%s is no V4L2 device\\n", + dev_name); + exit(EXIT_FAILURE); + } else { + errno_exit("VIDIOC_QUERYCAP"); + } + } + + if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { + fprintf(stderr, "%s is no video capture device\\n", + dev_name); + exit(EXIT_FAILURE); + } + + switch (io) { + case IO_METHOD_READ: + if (!(cap.capabilities & V4L2_CAP_READWRITE)) { + fprintf(stderr, "%s does not support read i/o\\n", + dev_name); + exit(EXIT_FAILURE); + } + break; + + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + if (!(cap.capabilities & V4L2_CAP_STREAMING)) { + fprintf(stderr, "%s does not support streaming i/o\\n", + dev_name); + exit(EXIT_FAILURE); + } + break; + } + + + /* Select video input, video standard and tune here. */ + + + CLEAR(cropcap); + + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; /* reset to default */ + + if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { + switch (errno) { + case EINVAL: + /* Cropping not supported. */ + break; + default: + /* Errors ignored. */ + break; + } + } + } else { + /* Errors ignored. */ + } + + + CLEAR(fmt); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (force_format) { + fmt.fmt.pix.width = 640; + fmt.fmt.pix.height = 480; + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; + + if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) + errno_exit("VIDIOC_S_FMT"); + + /* Note VIDIOC_S_FMT may change width and height. */ + } else { + /* Preserve original settings as set by v4l2-ctl for example */ + if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) + errno_exit("VIDIOC_G_FMT"); + } + + /* Buggy driver paranoia. */ + min = fmt.fmt.pix.width * 2; + if (fmt.fmt.pix.bytesperline < min) + fmt.fmt.pix.bytesperline = min; + min = fmt.fmt.pix.bytesperline * fmt.fmt.pix.height; + if (fmt.fmt.pix.sizeimage < min) + fmt.fmt.pix.sizeimage = min; + + switch (io) { + case IO_METHOD_READ: + init_read(fmt.fmt.pix.sizeimage); + break; + + case IO_METHOD_MMAP: + init_mmap(); + break; + + case IO_METHOD_USERPTR: + init_userp(fmt.fmt.pix.sizeimage); + break; + } + } + + static void close_device(void) + { + if (-1 == close(fd)) + errno_exit("close"); + + fd = -1; + } + + static void open_device(void) + { + struct stat st; + + if (-1 == stat(dev_name, &st)) { + fprintf(stderr, "Cannot identify '%s': %d, %s\\n", + dev_name, errno, strerror(errno)); + exit(EXIT_FAILURE); + } + + if (!S_ISCHR(st.st_mode)) { + fprintf(stderr, "%s is no devicen", dev_name); + exit(EXIT_FAILURE); + } + + fd = open(dev_name, O_RDWR /* required */ | O_NONBLOCK, 0); + + if (-1 == fd) { + fprintf(stderr, "Cannot open '%s': %d, %s\\n", + dev_name, errno, strerror(errno)); + exit(EXIT_FAILURE); + } + } + + static void usage(FILE *fp, int argc, char **argv) + { + fprintf(fp, + "Usage: %s [options]\\n\\n" + "Version 1.3\\n" + "Options:\\n" + "-d | --device name Video device name [%s]n" + "-h | --help Print this messagen" + "-m | --mmap Use memory mapped buffers [default]n" + "-r | --read Use read() callsn" + "-u | --userp Use application allocated buffersn" + "-o | --output Outputs stream to stdoutn" + "-f | --format Force format to 640x480 YUYVn" + "-c | --count Number of frames to grab [%i]n" + "", + argv[0], dev_name, frame_count); + } + + static const char short_options[] = "d:hmruofc:"; + + static const struct option + long_options[] = { + { "device", required_argument, NULL, 'd' }, + { "help", no_argument, NULL, 'h' }, + { "mmap", no_argument, NULL, 'm' }, + { "read", no_argument, NULL, 'r' }, + { "userp", no_argument, NULL, 'u' }, + { "output", no_argument, NULL, 'o' }, + { "format", no_argument, NULL, 'f' }, + { "count", required_argument, NULL, 'c' }, + { 0, 0, 0, 0 } + }; + + int main(int argc, char **argv) + { + dev_name = "/dev/video0"; + + for (;;) { + int idx; + int c; + + c = getopt_long(argc, argv, + short_options, long_options, &idx); + + if (-1 == c) + break; + + switch (c) { + case 0: /* getopt_long() flag */ + break; + + case 'd': + dev_name = optarg; + break; + + case 'h': + usage(stdout, argc, argv); + exit(EXIT_SUCCESS); + + case 'm': + io = IO_METHOD_MMAP; + break; + + case 'r': + io = IO_METHOD_READ; + break; + + case 'u': + io = IO_METHOD_USERPTR; + break; + + case 'o': + out_buf++; + break; + + case 'f': + force_format++; + break; + + case 'c': + errno = 0; + frame_count = strtol(optarg, NULL, 0); + if (errno) + errno_exit(optarg); + break; + + default: + usage(stderr, argc, argv); + exit(EXIT_FAILURE); + } + } + + open_device(); + init_device(); + start_capturing(); + mainloop(); + stop_capturing(); + uninit_device(); + close_device(); + fprintf(stderr, "\\n"); + return 0; + } diff --git a/Documentation/media/uapi/v4l/colorspaces-defs.rst b/Documentation/media/uapi/v4l/colorspaces-defs.rst new file mode 100644 index 000000000..16e46bec8 --- /dev/null +++ b/Documentation/media/uapi/v4l/colorspaces-defs.rst @@ -0,0 +1,175 @@ +.. -*- coding: utf-8; mode: rst -*- + +**************************** +Defining Colorspaces in V4L2 +**************************** + +In V4L2 colorspaces are defined by four values. The first is the +colorspace identifier (enum :c:type:`v4l2_colorspace`) +which defines the chromaticities, the default transfer function, the +default Y'CbCr encoding and the default quantization method. The second +is the transfer function identifier (enum +:c:type:`v4l2_xfer_func`) to specify non-standard +transfer functions. The third is the Y'CbCr encoding identifier (enum +:c:type:`v4l2_ycbcr_encoding`) to specify +non-standard Y'CbCr encodings and the fourth is the quantization +identifier (enum :c:type:`v4l2_quantization`) to +specify non-standard quantization methods. Most of the time only the +colorspace field of struct :c:type:`v4l2_pix_format` +or struct :c:type:`v4l2_pix_format_mplane` +needs to be filled in. + +.. _hsv-colorspace: + +On :ref:`HSV formats ` the *Hue* is defined as the angle on +the cylindrical color representation. Usually this angle is measured in +degrees, i.e. 0-360. When we map this angle value into 8 bits, there are +two basic ways to do it: Divide the angular value by 2 (0-179), or use the +whole range, 0-255, dividing the angular value by 1.41. The enum +:c:type:`v4l2_hsv_encoding` specifies which encoding is used. + +.. note:: The default R'G'B' quantization is full range for all + colorspaces. HSV formats are always full range. + +.. tabularcolumns:: |p{6.0cm}|p{11.5cm}| + +.. c:type:: v4l2_colorspace + +.. flat-table:: V4L2 Colorspaces + :header-rows: 1 + :stub-columns: 0 + + * - Identifier + - Details + * - ``V4L2_COLORSPACE_DEFAULT`` + - The default colorspace. This can be used by applications to let + the driver fill in the colorspace. + * - ``V4L2_COLORSPACE_SMPTE170M`` + - See :ref:`col-smpte-170m`. + * - ``V4L2_COLORSPACE_REC709`` + - See :ref:`col-rec709`. + * - ``V4L2_COLORSPACE_SRGB`` + - See :ref:`col-srgb`. + * - ``V4L2_COLORSPACE_OPRGB`` + - See :ref:`col-oprgb`. + * - ``V4L2_COLORSPACE_BT2020`` + - See :ref:`col-bt2020`. + * - ``V4L2_COLORSPACE_DCI_P3`` + - See :ref:`col-dcip3`. + * - ``V4L2_COLORSPACE_SMPTE240M`` + - See :ref:`col-smpte-240m`. + * - ``V4L2_COLORSPACE_470_SYSTEM_M`` + - See :ref:`col-sysm`. + * - ``V4L2_COLORSPACE_470_SYSTEM_BG`` + - See :ref:`col-sysbg`. + * - ``V4L2_COLORSPACE_JPEG`` + - See :ref:`col-jpeg`. + * - ``V4L2_COLORSPACE_RAW`` + - The raw colorspace. This is used for raw image capture where the + image is minimally processed and is using the internal colorspace + of the device. The software that processes an image using this + 'colorspace' will have to know the internals of the capture + device. + + + +.. c:type:: v4l2_xfer_func + +.. tabularcolumns:: |p{5.5cm}|p{12.0cm}| + +.. flat-table:: V4L2 Transfer Function + :header-rows: 1 + :stub-columns: 0 + + * - Identifier + - Details + * - ``V4L2_XFER_FUNC_DEFAULT`` + - Use the default transfer function as defined by the colorspace. + * - ``V4L2_XFER_FUNC_709`` + - Use the Rec. 709 transfer function. + * - ``V4L2_XFER_FUNC_SRGB`` + - Use the sRGB transfer function. + * - ``V4L2_XFER_FUNC_OPRGB`` + - Use the opRGB transfer function. + * - ``V4L2_XFER_FUNC_SMPTE240M`` + - Use the SMPTE 240M transfer function. + * - ``V4L2_XFER_FUNC_NONE`` + - Do not use a transfer function (i.e. use linear RGB values). + * - ``V4L2_XFER_FUNC_DCI_P3`` + - Use the DCI-P3 transfer function. + * - ``V4L2_XFER_FUNC_SMPTE2084`` + - Use the SMPTE 2084 transfer function. See :ref:`xf-smpte-2084`. + + + +.. c:type:: v4l2_ycbcr_encoding + +.. tabularcolumns:: |p{6.5cm}|p{11.0cm}| + +.. flat-table:: V4L2 Y'CbCr Encodings + :header-rows: 1 + :stub-columns: 0 + + * - Identifier + - Details + * - ``V4L2_YCBCR_ENC_DEFAULT`` + - Use the default Y'CbCr encoding as defined by the colorspace. + * - ``V4L2_YCBCR_ENC_601`` + - Use the BT.601 Y'CbCr encoding. + * - ``V4L2_YCBCR_ENC_709`` + - Use the Rec. 709 Y'CbCr encoding. + * - ``V4L2_YCBCR_ENC_XV601`` + - Use the extended gamut xvYCC BT.601 encoding. + * - ``V4L2_YCBCR_ENC_XV709`` + - Use the extended gamut xvYCC Rec. 709 encoding. + * - ``V4L2_YCBCR_ENC_BT2020`` + - Use the default non-constant luminance BT.2020 Y'CbCr encoding. + * - ``V4L2_YCBCR_ENC_BT2020_CONST_LUM`` + - Use the constant luminance BT.2020 Yc'CbcCrc encoding. + * - ``V4L2_YCBCR_ENC_SMPTE_240M`` + - Use the SMPTE 240M Y'CbCr encoding. + + + +.. c:type:: v4l2_hsv_encoding + +.. tabularcolumns:: |p{6.5cm}|p{11.0cm}| + +.. flat-table:: V4L2 HSV Encodings + :header-rows: 1 + :stub-columns: 0 + + * - Identifier + - Details + * - ``V4L2_HSV_ENC_180`` + - For the Hue, each LSB is two degrees. + * - ``V4L2_HSV_ENC_256`` + - For the Hue, the 360 degrees are mapped into 8 bits, i.e. each + LSB is roughly 1.41 degrees. + + + +.. c:type:: v4l2_quantization + +.. tabularcolumns:: |p{6.5cm}|p{11.0cm}| + +.. flat-table:: V4L2 Quantization Methods + :header-rows: 1 + :stub-columns: 0 + + * - Identifier + - Details + * - ``V4L2_QUANTIZATION_DEFAULT`` + - Use the default quantization encoding as defined by the + colorspace. This is always full range for R'G'B' and HSV. + It is usually limited range for Y'CbCr. + * - ``V4L2_QUANTIZATION_FULL_RANGE`` + - Use the full range quantization encoding. I.e. the range [0…1] is + mapped to [0…255] (with possible clipping to [1…254] to avoid the + 0x00 and 0xff values). Cb and Cr are mapped from [-0.5…0.5] to + [0…255] (with possible clipping to [1…254] to avoid the 0x00 and + 0xff values). + * - ``V4L2_QUANTIZATION_LIM_RANGE`` + - Use the limited range quantization encoding. I.e. the range [0…1] + is mapped to [16…235]. Cb and Cr are mapped from [-0.5…0.5] to + [16…240]. Limited Range cannot be used with HSV. diff --git a/Documentation/media/uapi/v4l/colorspaces-details.rst b/Documentation/media/uapi/v4l/colorspaces-details.rst new file mode 100644 index 000000000..ca7176cae --- /dev/null +++ b/Documentation/media/uapi/v4l/colorspaces-details.rst @@ -0,0 +1,805 @@ +.. -*- coding: utf-8; mode: rst -*- + +******************************** +Detailed Colorspace Descriptions +******************************** + + +.. _col-smpte-170m: + +Colorspace SMPTE 170M (V4L2_COLORSPACE_SMPTE170M) +================================================= + +The :ref:`smpte170m` standard defines the colorspace used by NTSC and +PAL and by SDTV in general. The default transfer function is +``V4L2_XFER_FUNC_709``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_601``. The default Y'CbCr quantization is limited +range. The chromaticities of the primary colors and the white reference +are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: SMPTE 170M Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.630 + - 0.340 + * - Green + - 0.310 + - 0.595 + * - Blue + - 0.155 + - 0.070 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + +The red, green and blue chromaticities are also often referred to as the +SMPTE C set, so this colorspace is sometimes called SMPTE C as well. + +The transfer function defined for SMPTE 170M is the same as the one +defined in Rec. 709. + +.. math:: + + L' = -1.099(-L)^{0.45} + 0.099 \text{, for } L \le-0.018 + + L' = 4.5L \text{, for } -0.018 < L < 0.018 + + L' = 1.099L^{0.45} - 0.099 \text{, for } L \ge 0.018 + +Inverse Transfer function: + +.. math:: + + L = -\left( \frac{L' - 0.099}{-1.099} \right) ^{\frac{1}{0.45}} \text{, for } L' \le -0.081 + + L = \frac{L'}{4.5} \text{, for } -0.081 < L' < 0.081 + + L = \left(\frac{L' + 0.099}{1.099}\right)^{\frac{1}{0.45} } \text{, for } L' \ge 0.081 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_601`` encoding: + +.. math:: + + Y' = 0.2990R' + 0.5870G' + 0.1140B' + + Cb = -0.1687R' - 0.3313G' + 0.5B' + + Cr = 0.5R' - 0.4187G' - 0.0813B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. This conversion to Y'CbCr is identical to the one defined in +the :ref:`itu601` standard and this colorspace is sometimes called +BT.601 as well, even though BT.601 does not mention any color primaries. + +The default quantization is limited range, but full range is possible +although rarely seen. + + +.. _col-rec709: + +Colorspace Rec. 709 (V4L2_COLORSPACE_REC709) +============================================ + +The :ref:`itu709` standard defines the colorspace used by HDTV in +general. The default transfer function is ``V4L2_XFER_FUNC_709``. The +default Y'CbCr encoding is ``V4L2_YCBCR_ENC_709``. The default Y'CbCr +quantization is limited range. The chromaticities of the primary colors +and the white reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: Rec. 709 Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.640 + - 0.330 + * - Green + - 0.300 + - 0.600 + * - Blue + - 0.150 + - 0.060 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + +The full name of this standard is Rec. ITU-R BT.709-5. + +Transfer function. Normally L is in the range [0…1], but for the +extended gamut xvYCC encoding values outside that range are allowed. + +.. math:: + + L' = -1.099(-L)^{0.45} + 0.099 \text{, for } L \le -0.018 + + L' = 4.5L \text{, for } -0.018 < L < 0.018 + + L' = 1.099L^{0.45} - 0.099 \text{, for } L \ge 0.018 + +Inverse Transfer function: + +.. math:: + + L = -\left( \frac{L' - 0.099}{-1.099} \right)^\frac{1}{0.45} \text{, for } L' \le -0.081 + + L = \frac{L'}{4.5}\text{, for } -0.081 < L' < 0.081 + + L = \left(\frac{L' + 0.099}{1.099}\right)^{\frac{1}{0.45} } \text{, for } L' \ge 0.081 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_709`` encoding: + +.. math:: + + Y' = 0.2126R' + 0.7152G' + 0.0722B' + + Cb = -0.1146R' - 0.3854G' + 0.5B' + + Cr = 0.5R' - 0.4542G' - 0.0458B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. + +The default quantization is limited range, but full range is possible +although rarely seen. + +The ``V4L2_YCBCR_ENC_709`` encoding described above is the default for +this colorspace, but it can be overridden with ``V4L2_YCBCR_ENC_601``, +in which case the BT.601 Y'CbCr encoding is used. + +Two additional extended gamut Y'CbCr encodings are also possible with +this colorspace: + +The xvYCC 709 encoding (``V4L2_YCBCR_ENC_XV709``, :ref:`xvycc`) is +similar to the Rec. 709 encoding, but it allows for R', G' and B' values +that are outside the range [0…1]. The resulting Y', Cb and Cr values are +scaled and offset according to the limited range formula: + +.. math:: + + Y' = \frac{219}{256} * (0.2126R' + 0.7152G' + 0.0722B') + \frac{16}{256} + + Cb = \frac{224}{256} * (-0.1146R' - 0.3854G' + 0.5B') + + Cr = \frac{224}{256} * (0.5R' - 0.4542G' - 0.0458B') + +The xvYCC 601 encoding (``V4L2_YCBCR_ENC_XV601``, :ref:`xvycc`) is +similar to the BT.601 encoding, but it allows for R', G' and B' values +that are outside the range [0…1]. The resulting Y', Cb and Cr values are +scaled and offset according to the limited range formula: + +.. math:: + + Y' = \frac{219}{256} * (0.2990R' + 0.5870G' + 0.1140B') + \frac{16}{256} + + Cb = \frac{224}{256} * (-0.1687R' - 0.3313G' + 0.5B') + + Cr = \frac{224}{256} * (0.5R' - 0.4187G' - 0.0813B') + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5] and quantized without further scaling or offsets. +The non-standard xvYCC 709 or xvYCC 601 encodings can be +used by selecting ``V4L2_YCBCR_ENC_XV709`` or ``V4L2_YCBCR_ENC_XV601``. +As seen by the xvYCC formulas these encodings always use limited range quantization, +there is no full range variant. The whole point of these extended gamut encodings +is that values outside the limited range are still valid, although they +map to R', G' and B' values outside the [0…1] range and are therefore outside +the Rec. 709 colorspace gamut. + + +.. _col-srgb: + +Colorspace sRGB (V4L2_COLORSPACE_SRGB) +====================================== + +The :ref:`srgb` standard defines the colorspace used by most webcams +and computer graphics. The default transfer function is +``V4L2_XFER_FUNC_SRGB``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_601``. The default Y'CbCr quantization is limited range. + +Note that the :ref:`sycc` standard specifies full range quantization, +however all current capture hardware supported by the kernel convert +R'G'B' to limited range Y'CbCr. So choosing full range as the default +would break how applications interpret the quantization range. + +The chromaticities of the primary colors and the white reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: sRGB Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.640 + - 0.330 + * - Green + - 0.300 + - 0.600 + * - Blue + - 0.150 + - 0.060 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + +These chromaticities are identical to the Rec. 709 colorspace. + +Transfer function. Note that negative values for L are only used by the +Y'CbCr conversion. + +.. math:: + + L' = -1.055(-L)^{\frac{1}{2.4} } + 0.055\text{, for }L < -0.0031308 + + L' = 12.92L\text{, for }-0.0031308 \le L \le 0.0031308 + + L' = 1.055L ^{\frac{1}{2.4} } - 0.055\text{, for }0.0031308 < L \le 1 + +Inverse Transfer function: + +.. math:: + + L = -((-L' + 0.055) / 1.055) ^{2.4}\text{, for }L' < -0.04045 + + L = L' / 12.92\text{, for }-0.04045 \le L' \le 0.04045 + + L = ((L' + 0.055) / 1.055) ^{2.4}\text{, for }L' > 0.04045 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_601`` encoding as defined by :ref:`sycc`: + +.. math:: + + Y' = 0.2990R' + 0.5870G' + 0.1140B' + + Cb = -0.1687R' - 0.3313G' + 0.5B' + + Cr = 0.5R' - 0.4187G' - 0.0813B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. This transform is identical to one defined in SMPTE +170M/BT.601. The Y'CbCr quantization is limited range. + + +.. _col-oprgb: + +Colorspace opRGB (V4L2_COLORSPACE_OPRGB) +=============================================== + +The :ref:`oprgb` standard defines the colorspace used by computer +graphics that use the opRGB colorspace. The default transfer function is +``V4L2_XFER_FUNC_OPRGB``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_601``. The default Y'CbCr quantization is limited +range. + +Note that the :ref:`oprgb` standard specifies full range quantization, +however all current capture hardware supported by the kernel convert +R'G'B' to limited range Y'CbCr. So choosing full range as the default +would break how applications interpret the quantization range. + +The chromaticities of the primary colors and the white reference are: + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: opRGB Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.6400 + - 0.3300 + * - Green + - 0.2100 + - 0.7100 + * - Blue + - 0.1500 + - 0.0600 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + + +Transfer function: + +.. math:: + + L' = L ^{\frac{1}{2.19921875}} + +Inverse Transfer function: + +.. math:: + + L = L'^{(2.19921875)} + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_601`` encoding: + +.. math:: + + Y' = 0.2990R' + 0.5870G' + 0.1140B' + + Cb = -0.1687R' - 0.3313G' + 0.5B' + + Cr = 0.5R' - 0.4187G' - 0.0813B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. This transform is identical to one defined in SMPTE +170M/BT.601. The Y'CbCr quantization is limited range. + + +.. _col-bt2020: + +Colorspace BT.2020 (V4L2_COLORSPACE_BT2020) +=========================================== + +The :ref:`itu2020` standard defines the colorspace used by Ultra-high +definition television (UHDTV). The default transfer function is +``V4L2_XFER_FUNC_709``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_BT2020``. The default Y'CbCr quantization is limited range. +The chromaticities of the primary colors and the white reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: BT.2020 Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.708 + - 0.292 + * - Green + - 0.170 + - 0.797 + * - Blue + - 0.131 + - 0.046 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + + +Transfer function (same as Rec. 709): + +.. math:: + + L' = 4.5L\text{, for }0 \le L < 0.018 + + L' = 1.099L ^{0.45} - 0.099\text{, for } 0.018 \le L \le 1 + +Inverse Transfer function: + +.. math:: + + L = L' / 4.5\text{, for } L' < 0.081 + + L = \left( \frac{L' + 0.099}{1.099}\right) ^{\frac{1}{0.45} }\text{, for } L' \ge 0.081 + +Please note that while Rec. 709 is defined as the default transfer function +by the :ref:`itu2020` standard, in practice this colorspace is often used +with the :ref:`xf-smpte-2084`. In particular Ultra HD Blu-ray discs use +this combination. + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_BT2020`` encoding: + +.. math:: + + Y' = 0.2627R' + 0.6780G' + 0.0593B' + + Cb = -0.1396R' - 0.3604G' + 0.5B' + + Cr = 0.5R' - 0.4598G' - 0.0402B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. The Y'CbCr quantization is limited range. + +There is also an alternate constant luminance R'G'B' to Yc'CbcCrc +(``V4L2_YCBCR_ENC_BT2020_CONST_LUM``) encoding: + +Luma: + +.. math:: + :nowrap: + + \begin{align*} + Yc' = (0.2627R + 0.6780G + 0.0593B)'& \\ + B' - Yc' \le 0:& \\ + &Cbc = (B' - Yc') / 1.9404 \\ + B' - Yc' > 0: & \\ + &Cbc = (B' - Yc') / 1.5816 \\ + R' - Yc' \le 0:& \\ + &Crc = (R' - Y') / 1.7184 \\ + R' - Yc' > 0:& \\ + &Crc = (R' - Y') / 0.9936 + \end{align*} + +Yc' is clamped to the range [0…1] and Cbc and Crc are clamped to the +range [-0.5…0.5]. The Yc'CbcCrc quantization is limited range. + + +.. _col-dcip3: + +Colorspace DCI-P3 (V4L2_COLORSPACE_DCI_P3) +========================================== + +The :ref:`smpte431` standard defines the colorspace used by cinema +projectors that use the DCI-P3 colorspace. The default transfer function +is ``V4L2_XFER_FUNC_DCI_P3``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_709``. The default Y'CbCr quantization is limited range. + +.. note:: + + Note that this colorspace standard does not specify a + Y'CbCr encoding since it is not meant to be encoded to Y'CbCr. So this + default Y'CbCr encoding was picked because it is the HDTV encoding. + +The chromaticities of the primary colors and the white reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: DCI-P3 Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.6800 + - 0.3200 + * - Green + - 0.2650 + - 0.6900 + * - Blue + - 0.1500 + - 0.0600 + * - White Reference + - 0.3140 + - 0.3510 + + + +Transfer function: + +.. math:: + + L' = L^{\frac{1}{2.6}} + +Inverse Transfer function: + +.. math:: + + L = L'^{(2.6)} + +Y'CbCr encoding is not specified. V4L2 defaults to Rec. 709. + + +.. _col-smpte-240m: + +Colorspace SMPTE 240M (V4L2_COLORSPACE_SMPTE240M) +================================================= + +The :ref:`smpte240m` standard was an interim standard used during the +early days of HDTV (1988-1998). It has been superseded by Rec. 709. The +default transfer function is ``V4L2_XFER_FUNC_SMPTE240M``. The default +Y'CbCr encoding is ``V4L2_YCBCR_ENC_SMPTE240M``. The default Y'CbCr +quantization is limited range. The chromaticities of the primary colors +and the white reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: SMPTE 240M Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.630 + - 0.340 + * - Green + - 0.310 + - 0.595 + * - Blue + - 0.155 + - 0.070 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + +These chromaticities are identical to the SMPTE 170M colorspace. + +Transfer function: + +.. math:: + + L' = 4L\text{, for } 0 \le L < 0.0228 + + L' = 1.1115L ^{0.45} - 0.1115\text{, for } 0.0228 \le L \le 1 + +Inverse Transfer function: + +.. math:: + + L = \frac{L'}{4}\text{, for } 0 \le L' < 0.0913 + + L = \left( \frac{L' + 0.1115}{1.1115}\right) ^{\frac{1}{0.45} }\text{, for } L' \ge 0.0913 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_SMPTE240M`` encoding: + +.. math:: + + Y' = 0.2122R' + 0.7013G' + 0.0865B' + + Cb = -0.1161R' - 0.3839G' + 0.5B' + + Cr = 0.5R' - 0.4451G' - 0.0549B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the +range [-0.5…0.5]. The Y'CbCr quantization is limited range. + + +.. _col-sysm: + +Colorspace NTSC 1953 (V4L2_COLORSPACE_470_SYSTEM_M) +=================================================== + +This standard defines the colorspace used by NTSC in 1953. In practice +this colorspace is obsolete and SMPTE 170M should be used instead. The +default transfer function is ``V4L2_XFER_FUNC_709``. The default Y'CbCr +encoding is ``V4L2_YCBCR_ENC_601``. The default Y'CbCr quantization is +limited range. The chromaticities of the primary colors and the white +reference are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: NTSC 1953 Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.67 + - 0.33 + * - Green + - 0.21 + - 0.71 + * - Blue + - 0.14 + - 0.08 + * - White Reference (C) + - 0.310 + - 0.316 + + +.. note:: + + This colorspace uses Illuminant C instead of D65 as the white + reference. To correctly convert an image in this colorspace to another + that uses D65 you need to apply a chromatic adaptation algorithm such as + the Bradford method. + +The transfer function was never properly defined for NTSC 1953. The Rec. +709 transfer function is recommended in the literature: + +.. math:: + + L' = 4.5L\text{, for } 0 \le L < 0.018 + + L' = 1.099L ^{0.45} - 0.099\text{, for } 0.018 \le L \le 1 + +Inverse Transfer function: + +.. math:: + + L = \frac{L'}{4.5} \text{, for } L' < 0.081 + + L = \left( \frac{L' + 0.099}{1.099}\right) ^{\frac{1}{0.45} }\text{, for } L' \ge 0.081 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_601`` encoding: + +.. math:: + + Y' = 0.2990R' + 0.5870G' + 0.1140B' + + Cb = -0.1687R' - 0.3313G' + 0.5B' + + Cr = 0.5R' - 0.4187G' - 0.0813B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. The Y'CbCr quantization is limited range. This transform is +identical to one defined in SMPTE 170M/BT.601. + + +.. _col-sysbg: + +Colorspace EBU Tech. 3213 (V4L2_COLORSPACE_470_SYSTEM_BG) +========================================================= + +The :ref:`tech3213` standard defines the colorspace used by PAL/SECAM +in 1975. In practice this colorspace is obsolete and SMPTE 170M should +be used instead. The default transfer function is +``V4L2_XFER_FUNC_709``. The default Y'CbCr encoding is +``V4L2_YCBCR_ENC_601``. The default Y'CbCr quantization is limited +range. The chromaticities of the primary colors and the white reference +are: + + + +.. tabularcolumns:: |p{4.4cm}|p{4.4cm}|p{8.7cm}| + +.. flat-table:: EBU Tech. 3213 Chromaticities + :header-rows: 1 + :stub-columns: 0 + :widths: 1 1 2 + + * - Color + - x + - y + * - Red + - 0.64 + - 0.33 + * - Green + - 0.29 + - 0.60 + * - Blue + - 0.15 + - 0.06 + * - White Reference (D65) + - 0.3127 + - 0.3290 + + + +The transfer function was never properly defined for this colorspace. +The Rec. 709 transfer function is recommended in the literature: + +.. math:: + + L' = 4.5L\text{, for } 0 \le L < 0.018 + + L' = 1.099L ^{0.45} - 0.099\text{, for } 0.018 \le L \le 1 + +Inverse Transfer function: + +.. math:: + + L = \frac{L'}{4.5} \text{, for } L' < 0.081 + + L = \left(\frac{L' + 0.099}{1.099} \right) ^{\frac{1}{0.45} }\text{, for } L' \ge 0.081 + +The luminance (Y') and color difference (Cb and Cr) are obtained with +the following ``V4L2_YCBCR_ENC_601`` encoding: + +.. math:: + + Y' = 0.2990R' + 0.5870G' + 0.1140B' + + Cb = -0.1687R' - 0.3313G' + 0.5B' + + Cr = 0.5R' - 0.4187G' - 0.0813B' + +Y' is clamped to the range [0…1] and Cb and Cr are clamped to the range +[-0.5…0.5]. The Y'CbCr quantization is limited range. This transform is +identical to one defined in SMPTE 170M/BT.601. + + +.. _col-jpeg: + +Colorspace JPEG (V4L2_COLORSPACE_JPEG) +====================================== + +This colorspace defines the colorspace used by most (Motion-)JPEG +formats. The chromaticities of the primary colors and the white +reference are identical to sRGB. The transfer function use is +``V4L2_XFER_FUNC_SRGB``. The Y'CbCr encoding is ``V4L2_YCBCR_ENC_601`` +with full range quantization where Y' is scaled to [0…255] and Cb/Cr are +scaled to [-128…128] and then clipped to [-128…127]. + +.. note:: + + The JPEG standard does not actually store colorspace + information. So if something other than sRGB is used, then the driver + will have to set that information explicitly. Effectively + ``V4L2_COLORSPACE_JPEG`` can be considered to be an abbreviation for + ``V4L2_COLORSPACE_SRGB``, ``V4L2_YCBCR_ENC_601`` and + ``V4L2_QUANTIZATION_FULL_RANGE``. + +*************************************** +Detailed Transfer Function Descriptions +*************************************** + +.. _xf-smpte-2084: + +Transfer Function SMPTE 2084 (V4L2_XFER_FUNC_SMPTE2084) +======================================================= + +The :ref:`smpte2084` standard defines the transfer function used by +High Dynamic Range content. + +Constants: + m1 = (2610 / 4096) / 4 + + m2 = (2523 / 4096) * 128 + + c1 = 3424 / 4096 + + c2 = (2413 / 4096) * 32 + + c3 = (2392 / 4096) * 32 + +Transfer function: + L' = ((c1 + c2 * L\ :sup:`m1`) / (1 + c3 * L\ :sup:`m1`))\ :sup:`m2` + +Inverse Transfer function: + L = (max(L':sup:`1/m2` - c1, 0) / (c2 - c3 * + L'\ :sup:`1/m2`))\ :sup:`1/m1` + +Take care when converting between this transfer function and non-HDR transfer +functions: the linear RGB values [0…1] of HDR content map to a luminance range +of 0 to 10000 cd/m\ :sup:`2` whereas the linear RGB values of non-HDR (aka +Standard Dynamic Range or SDR) map to a luminance range of 0 to 100 cd/m\ :sup:`2`. + +To go from SDR to HDR you will have to divide L by 100 first. To go in the other +direction you will have to multiply L by 100. Of course, this clamps all +luminance values over 100 cd/m\ :sup:`2` to 100 cd/m\ :sup:`2`. + +There are better methods, see e.g. :ref:`colimg` for more in-depth information +about this. diff --git a/Documentation/media/uapi/v4l/colorspaces.rst b/Documentation/media/uapi/v4l/colorspaces.rst new file mode 100644 index 000000000..322eb94c1 --- /dev/null +++ b/Documentation/media/uapi/v4l/colorspaces.rst @@ -0,0 +1,163 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _colorspaces: + +*********** +Colorspaces +*********** + +'Color' is a very complex concept and depends on physics, chemistry and +biology. Just because you have three numbers that describe the 'red', +'green' and 'blue' components of the color of a pixel does not mean that +you can accurately display that color. A colorspace defines what it +actually *means* to have an RGB value of e.g. (255, 0, 0). That is, +which color should be reproduced on the screen in a perfectly calibrated +environment. + +In order to do that we first need to have a good definition of color, +i.e. some way to uniquely and unambiguously define a color so that +someone else can reproduce it. Human color vision is trichromatic since +the human eye has color receptors that are sensitive to three different +wavelengths of light. Hence the need to use three numbers to describe +color. Be glad you are not a mantis shrimp as those are sensitive to 12 +different wavelengths, so instead of RGB we would be using the +ABCDEFGHIJKL colorspace... + +Color exists only in the eye and brain and is the result of how strongly +color receptors are stimulated. This is based on the Spectral Power +Distribution (SPD) which is a graph showing the intensity (radiant +power) of the light at wavelengths covering the visible spectrum as it +enters the eye. The science of colorimetry is about the relationship +between the SPD and color as perceived by the human brain. + +Since the human eye has only three color receptors it is perfectly +possible that different SPDs will result in the same stimulation of +those receptors and are perceived as the same color, even though the SPD +of the light is different. + +In the 1920s experiments were devised to determine the relationship +between SPDs and the perceived color and that resulted in the CIE 1931 +standard that defines spectral weighting functions that model the +perception of color. Specifically that standard defines functions that +can take an SPD and calculate the stimulus for each color receptor. +After some further mathematical transforms these stimuli are known as +the *CIE XYZ tristimulus* values and these X, Y and Z values describe a +color as perceived by a human unambiguously. These X, Y and Z values are +all in the range [0…1]. + +The Y value in the CIE XYZ colorspace corresponds to luminance. Often +the CIE XYZ colorspace is transformed to the normalized CIE xyY +colorspace: + +x = X / (X + Y + Z) + +y = Y / (X + Y + Z) + +The x and y values are the chromaticity coordinates and can be used to +define a color without the luminance component Y. It is very confusing +to have such similar names for these colorspaces. Just be aware that if +colors are specified with lower case 'x' and 'y', then the CIE xyY +colorspace is used. Upper case 'X' and 'Y' refer to the CIE XYZ +colorspace. Also, y has nothing to do with luminance. Together x and y +specify a color, and Y the luminance. That is really all you need to +remember from a practical point of view. At the end of this section you +will find reading resources that go into much more detail if you are +interested. + +A monitor or TV will reproduce colors by emitting light at three +different wavelengths, the combination of which will stimulate the color +receptors in the eye and thus cause the perception of color. +Historically these wavelengths were defined by the red, green and blue +phosphors used in the displays. These *color primaries* are part of what +defines a colorspace. + +Different display devices will have different primaries and some +primaries are more suitable for some display technologies than others. +This has resulted in a variety of colorspaces that are used for +different display technologies or uses. To define a colorspace you need +to define the three color primaries (these are typically defined as x, y +chromaticity coordinates from the CIE xyY colorspace) but also the white +reference: that is the color obtained when all three primaries are at +maximum power. This determines the relative power or energy of the +primaries. This is usually chosen to be close to daylight which has been +defined as the CIE D65 Illuminant. + +To recapitulate: the CIE XYZ colorspace uniquely identifies colors. +Other colorspaces are defined by three chromaticity coordinates defined +in the CIE xyY colorspace. Based on those a 3x3 matrix can be +constructed that transforms CIE XYZ colors to colors in the new +colorspace. + +Both the CIE XYZ and the RGB colorspace that are derived from the +specific chromaticity primaries are linear colorspaces. But neither the +eye, nor display technology is linear. Doubling the values of all +components in the linear colorspace will not be perceived as twice the +intensity of the color. So each colorspace also defines a transfer +function that takes a linear color component value and transforms it to +the non-linear component value, which is a closer match to the +non-linear performance of both the eye and displays. Linear component +values are denoted RGB, non-linear are denoted as R'G'B'. In general +colors used in graphics are all R'G'B', except in openGL which uses +linear RGB. Special care should be taken when dealing with openGL to +provide linear RGB colors or to use the built-in openGL support to apply +the inverse transfer function. + +The final piece that defines a colorspace is a function that transforms +non-linear R'G'B' to non-linear Y'CbCr. This function is determined by +the so-called luma coefficients. There may be multiple possible Y'CbCr +encodings allowed for the same colorspace. Many encodings of color +prefer to use luma (Y') and chroma (CbCr) instead of R'G'B'. Since the +human eye is more sensitive to differences in luminance than in color +this encoding allows one to reduce the amount of color information +compared to the luma data. Note that the luma (Y') is unrelated to the Y +in the CIE XYZ colorspace. Also note that Y'CbCr is often called YCbCr +or YUV even though these are strictly speaking wrong. + +Sometimes people confuse Y'CbCr as being a colorspace. This is not +correct, it is just an encoding of an R'G'B' color into luma and chroma +values. The underlying colorspace that is associated with the R'G'B' +color is also associated with the Y'CbCr color. + +The final step is how the RGB, R'G'B' or Y'CbCr values are quantized. +The CIE XYZ colorspace where X, Y and Z are in the range [0…1] describes +all colors that humans can perceive, but the transform to another +colorspace will produce colors that are outside the [0…1] range. Once +clamped to the [0…1] range those colors can no longer be reproduced in +that colorspace. This clamping is what reduces the extent or gamut of +the colorspace. How the range of [0…1] is translated to integer values +in the range of [0…255] (or higher, depending on the color depth) is +called the quantization. This is *not* part of the colorspace +definition. In practice RGB or R'G'B' values are full range, i.e. they +use the full [0…255] range. Y'CbCr values on the other hand are limited +range with Y' using [16…235] and Cb and Cr using [16…240]. + +Unfortunately, in some cases limited range RGB is also used where the +components use the range [16…235]. And full range Y'CbCr also exists +using the [0…255] range. + +In order to correctly interpret a color you need to know the +quantization range, whether it is R'G'B' or Y'CbCr, the used Y'CbCr +encoding and the colorspace. From that information you can calculate the +corresponding CIE XYZ color and map that again to whatever colorspace +your display device uses. + +The colorspace definition itself consists of the three chromaticity +primaries, the white reference chromaticity, a transfer function and the +luma coefficients needed to transform R'G'B' to Y'CbCr. While some +colorspace standards correctly define all four, quite often the +colorspace standard only defines some, and you have to rely on other +standards for the missing pieces. The fact that colorspaces are often a +mix of different standards also led to very confusing naming conventions +where the name of a standard was used to name a colorspace when in fact +that standard was part of various other colorspaces as well. + +If you want to read more about colors and colorspaces, then the +following resources are useful: :ref:`poynton` is a good practical +book for video engineers, :ref:`colimg` has a much broader scope and +describes many more aspects of color (physics, chemistry, biology, +etc.). The +`http://www.brucelindbloom.com `__ +website is an excellent resource, especially with respect to the +mathematics behind colorspace conversions. The wikipedia +`CIE 1931 colorspace `__ +article is also very useful. diff --git a/Documentation/media/uapi/v4l/common-defs.rst b/Documentation/media/uapi/v4l/common-defs.rst new file mode 100644 index 000000000..39058216b --- /dev/null +++ b/Documentation/media/uapi/v4l/common-defs.rst @@ -0,0 +1,13 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _common-defs: + +****************************************************** +Common definitions for V4L2 and V4L2 subdev interfaces +****************************************************** + + +.. toctree:: + :maxdepth: 1 + + selections-common diff --git a/Documentation/media/uapi/v4l/common.rst b/Documentation/media/uapi/v4l/common.rst new file mode 100644 index 000000000..5f93e7112 --- /dev/null +++ b/Documentation/media/uapi/v4l/common.rst @@ -0,0 +1,46 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _common: + +################### +Common API Elements +################### +Programming a V4L2 device consists of these steps: + +- Opening the device + +- Changing device properties, selecting a video and audio input, video + standard, picture brightness a. o. + +- Negotiating a data format + +- Negotiating an input/output method + +- The actual input/output loop + +- Closing the device + +In practice most steps are optional and can be executed out of order. It +depends on the V4L2 device type, you can read about the details in +:ref:`devices`. In this chapter we will discuss the basic concepts +applicable to all devices. + + +.. toctree:: + :maxdepth: 1 + + open + querycap + app-pri + video + audio + tuner + standard + dv-timings + control + extended-controls + format + planar-apis + selection-api + crop + streaming-par diff --git a/Documentation/media/uapi/v4l/compat.rst b/Documentation/media/uapi/v4l/compat.rst new file mode 100644 index 000000000..8b5e1cebd --- /dev/null +++ b/Documentation/media/uapi/v4l/compat.rst @@ -0,0 +1,18 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _compat: + +******* +Changes +******* + +The following chapters document the evolution of the V4L2 API, errata or +extensions. They are also intended to help application and driver +writers to port or update their code. + + +.. toctree:: + :maxdepth: 1 + + diff-v4l + hist-v4l2 diff --git a/Documentation/media/uapi/v4l/constraints.svg b/Documentation/media/uapi/v4l/constraints.svg new file mode 100644 index 000000000..7e5d7185c --- /dev/null +++ b/Documentation/media/uapi/v4l/constraints.svg @@ -0,0 +1,10 @@ + +image/svg+xmlV4L2_SEL_FLAG_GE +ORIGINAL +V4L2_SEL_FLAG_LE + diff --git a/Documentation/media/uapi/v4l/control.rst b/Documentation/media/uapi/v4l/control.rst new file mode 100644 index 000000000..c1e6adbe8 --- /dev/null +++ b/Documentation/media/uapi/v4l/control.rst @@ -0,0 +1,505 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _control: + +************* +User Controls +************* + +Devices typically have a number of user-settable controls such as +brightness, saturation and so on, which would be presented to the user +on a graphical user interface. But, different devices will have +different controls available, and furthermore, the range of possible +values, and the default value will vary from device to device. The +control ioctls provide the information and a mechanism to create a nice +user interface for these controls that will work correctly with any +device. + +All controls are accessed using an ID value. V4L2 defines several IDs +for specific purposes. Drivers can also implement their own custom +controls using ``V4L2_CID_PRIVATE_BASE`` [#f1]_ and higher values. The +pre-defined control IDs have the prefix ``V4L2_CID_``, and are listed in +:ref:`control-id`. The ID is used when querying the attributes of a +control, and when getting or setting the current value. + +Generally applications should present controls to the user without +assumptions about their purpose. Each control comes with a name string +the user is supposed to understand. When the purpose is non-intuitive +the driver writer should provide a user manual, a user interface plug-in +or a driver specific panel application. Predefined IDs were introduced +to change a few controls programmatically, for example to mute a device +during a channel switch. + +Drivers may enumerate different controls after switching the current +video input or output, tuner or modulator, or audio input or output. +Different in the sense of other bounds, another default and current +value, step size or other menu items. A control with a certain *custom* +ID can also change name and type. + +If a control is not applicable to the current configuration of the +device (for example, it doesn't apply to the current video input) +drivers set the ``V4L2_CTRL_FLAG_INACTIVE`` flag. + +Control values are stored globally, they do not change when switching +except to stay within the reported bounds. They also do not change e. g. +when the device is opened or closed, when the tuner radio frequency is +changed or generally never without application request. + +V4L2 specifies an event mechanism to notify applications when controls +change value (see +:ref:`VIDIOC_SUBSCRIBE_EVENT`, event +``V4L2_EVENT_CTRL``), panel applications might want to make use of that +in order to always reflect the correct control value. + +All controls use machine endianness. + + +.. _control-id: + +Control IDs +=========== + +``V4L2_CID_BASE`` + First predefined ID, equal to ``V4L2_CID_BRIGHTNESS``. + +``V4L2_CID_USER_BASE`` + Synonym of ``V4L2_CID_BASE``. + +``V4L2_CID_BRIGHTNESS`` ``(integer)`` + Picture brightness, or more precisely, the black level. + +``V4L2_CID_CONTRAST`` ``(integer)`` + Picture contrast or luma gain. + +``V4L2_CID_SATURATION`` ``(integer)`` + Picture color saturation or chroma gain. + +``V4L2_CID_HUE`` ``(integer)`` + Hue or color balance. + +``V4L2_CID_AUDIO_VOLUME`` ``(integer)`` + Overall audio volume. Note some drivers also provide an OSS or ALSA + mixer interface. + +``V4L2_CID_AUDIO_BALANCE`` ``(integer)`` + Audio stereo balance. Minimum corresponds to all the way left, + maximum to right. + +``V4L2_CID_AUDIO_BASS`` ``(integer)`` + Audio bass adjustment. + +``V4L2_CID_AUDIO_TREBLE`` ``(integer)`` + Audio treble adjustment. + +``V4L2_CID_AUDIO_MUTE`` ``(boolean)`` + Mute audio, i. e. set the volume to zero, however without affecting + ``V4L2_CID_AUDIO_VOLUME``. Like ALSA drivers, V4L2 drivers must mute + at load time to avoid excessive noise. Actually the entire device + should be reset to a low power consumption state. + +``V4L2_CID_AUDIO_LOUDNESS`` ``(boolean)`` + Loudness mode (bass boost). + +``V4L2_CID_BLACK_LEVEL`` ``(integer)`` + Another name for brightness (not a synonym of + ``V4L2_CID_BRIGHTNESS``). This control is deprecated and should not + be used in new drivers and applications. + +``V4L2_CID_AUTO_WHITE_BALANCE`` ``(boolean)`` + Automatic white balance (cameras). + +``V4L2_CID_DO_WHITE_BALANCE`` ``(button)`` + This is an action control. When set (the value is ignored), the + device will do a white balance and then hold the current setting. + Contrast this with the boolean ``V4L2_CID_AUTO_WHITE_BALANCE``, + which, when activated, keeps adjusting the white balance. + +``V4L2_CID_RED_BALANCE`` ``(integer)`` + Red chroma balance. + +``V4L2_CID_BLUE_BALANCE`` ``(integer)`` + Blue chroma balance. + +``V4L2_CID_GAMMA`` ``(integer)`` + Gamma adjust. + +``V4L2_CID_WHITENESS`` ``(integer)`` + Whiteness for grey-scale devices. This is a synonym for + ``V4L2_CID_GAMMA``. This control is deprecated and should not be + used in new drivers and applications. + +``V4L2_CID_EXPOSURE`` ``(integer)`` + Exposure (cameras). [Unit?] + +``V4L2_CID_AUTOGAIN`` ``(boolean)`` + Automatic gain/exposure control. + +``V4L2_CID_GAIN`` ``(integer)`` + Gain control. + + Primarily used to control gain on e.g. TV tuners but also on + webcams. Most devices control only digital gain with this control + but on some this could include analogue gain as well. Devices that + recognise the difference between digital and analogue gain use + controls ``V4L2_CID_DIGITAL_GAIN`` and ``V4L2_CID_ANALOGUE_GAIN``. + +``V4L2_CID_HFLIP`` ``(boolean)`` + Mirror the picture horizontally. + +``V4L2_CID_VFLIP`` ``(boolean)`` + Mirror the picture vertically. + +.. _v4l2-power-line-frequency: + +``V4L2_CID_POWER_LINE_FREQUENCY`` ``(enum)`` + Enables a power line frequency filter to avoid flicker. Possible + values for ``enum v4l2_power_line_frequency`` are: + ``V4L2_CID_POWER_LINE_FREQUENCY_DISABLED`` (0), + ``V4L2_CID_POWER_LINE_FREQUENCY_50HZ`` (1), + ``V4L2_CID_POWER_LINE_FREQUENCY_60HZ`` (2) and + ``V4L2_CID_POWER_LINE_FREQUENCY_AUTO`` (3). + +``V4L2_CID_HUE_AUTO`` ``(boolean)`` + Enables automatic hue control by the device. The effect of setting + ``V4L2_CID_HUE`` while automatic hue control is enabled is + undefined, drivers should ignore such request. + +``V4L2_CID_WHITE_BALANCE_TEMPERATURE`` ``(integer)`` + This control specifies the white balance settings as a color + temperature in Kelvin. A driver should have a minimum of 2800 + (incandescent) to 6500 (daylight). For more information about color + temperature see + `Wikipedia `__. + +``V4L2_CID_SHARPNESS`` ``(integer)`` + Adjusts the sharpness filters in a camera. The minimum value + disables the filters, higher values give a sharper picture. + +``V4L2_CID_BACKLIGHT_COMPENSATION`` ``(integer)`` + Adjusts the backlight compensation in a camera. The minimum value + disables backlight compensation. + +``V4L2_CID_CHROMA_AGC`` ``(boolean)`` + Chroma automatic gain control. + +``V4L2_CID_CHROMA_GAIN`` ``(integer)`` + Adjusts the Chroma gain control (for use when chroma AGC is + disabled). + +``V4L2_CID_COLOR_KILLER`` ``(boolean)`` + Enable the color killer (i. e. force a black & white image in case + of a weak video signal). + +.. _v4l2-colorfx: + +``V4L2_CID_COLORFX`` ``(enum)`` + Selects a color effect. The following values are defined: + + + +.. tabularcolumns:: |p{5.5cm}|p{12cm}| + +.. flat-table:: + :header-rows: 0 + :stub-columns: 0 + :widths: 11 24 + + * - ``V4L2_COLORFX_NONE`` + - Color effect is disabled. + * - ``V4L2_COLORFX_ANTIQUE`` + - An aging (old photo) effect. + * - ``V4L2_COLORFX_ART_FREEZE`` + - Frost color effect. + * - ``V4L2_COLORFX_AQUA`` + - Water color, cool tone. + * - ``V4L2_COLORFX_BW`` + - Black and white. + * - ``V4L2_COLORFX_EMBOSS`` + - Emboss, the highlights and shadows replace light/dark boundaries + and low contrast areas are set to a gray background. + * - ``V4L2_COLORFX_GRASS_GREEN`` + - Grass green. + * - ``V4L2_COLORFX_NEGATIVE`` + - Negative. + * - ``V4L2_COLORFX_SEPIA`` + - Sepia tone. + * - ``V4L2_COLORFX_SKETCH`` + - Sketch. + * - ``V4L2_COLORFX_SKIN_WHITEN`` + - Skin whiten. + * - ``V4L2_COLORFX_SKY_BLUE`` + - Sky blue. + * - ``V4L2_COLORFX_SOLARIZATION`` + - Solarization, the image is partially reversed in tone, only color + values above or below a certain threshold are inverted. + * - ``V4L2_COLORFX_SILHOUETTE`` + - Silhouette (outline). + * - ``V4L2_COLORFX_VIVID`` + - Vivid colors. + * - ``V4L2_COLORFX_SET_CBCR`` + - The Cb and Cr chroma components are replaced by fixed coefficients + determined by ``V4L2_CID_COLORFX_CBCR`` control. + + + +``V4L2_CID_COLORFX_CBCR`` ``(integer)`` + Determines the Cb and Cr coefficients for ``V4L2_COLORFX_SET_CBCR`` + color effect. Bits [7:0] of the supplied 32 bit value are + interpreted as Cr component, bits [15:8] as Cb component and bits + [31:16] must be zero. + +``V4L2_CID_AUTOBRIGHTNESS`` ``(boolean)`` + Enable Automatic Brightness. + +``V4L2_CID_ROTATE`` ``(integer)`` + Rotates the image by specified angle. Common angles are 90, 270 and + 180. Rotating the image to 90 and 270 will reverse the height and + width of the display window. It is necessary to set the new height + and width of the picture using the + :ref:`VIDIOC_S_FMT ` ioctl according to the + rotation angle selected. + +``V4L2_CID_BG_COLOR`` ``(integer)`` + Sets the background color on the current output device. Background + color needs to be specified in the RGB24 format. The supplied 32 bit + value is interpreted as bits 0-7 Red color information, bits 8-15 + Green color information, bits 16-23 Blue color information and bits + 24-31 must be zero. + +``V4L2_CID_ILLUMINATORS_1 V4L2_CID_ILLUMINATORS_2`` ``(boolean)`` + Switch on or off the illuminator 1 or 2 of the device (usually a + microscope). + +``V4L2_CID_MIN_BUFFERS_FOR_CAPTURE`` ``(integer)`` + This is a read-only control that can be read by the application and + used as a hint to determine the number of CAPTURE buffers to pass to + REQBUFS. The value is the minimum number of CAPTURE buffers that is + necessary for hardware to work. + +``V4L2_CID_MIN_BUFFERS_FOR_OUTPUT`` ``(integer)`` + This is a read-only control that can be read by the application and + used as a hint to determine the number of OUTPUT buffers to pass to + REQBUFS. The value is the minimum number of OUTPUT buffers that is + necessary for hardware to work. + +.. _v4l2-alpha-component: + +``V4L2_CID_ALPHA_COMPONENT`` ``(integer)`` + Sets the alpha color component. When a capture device (or capture + queue of a mem-to-mem device) produces a frame format that includes + an alpha component (e.g. + :ref:`packed RGB image formats `) and the alpha value + is not defined by the device or the mem-to-mem input data this + control lets you select the alpha component value of all pixels. + When an output device (or output queue of a mem-to-mem device) + consumes a frame format that doesn't include an alpha component and + the device supports alpha channel processing this control lets you + set the alpha component value of all pixels for further processing + in the device. + +``V4L2_CID_LASTP1`` + End of the predefined control IDs (currently + ``V4L2_CID_ALPHA_COMPONENT`` + 1). + +``V4L2_CID_PRIVATE_BASE`` + ID of the first custom (driver specific) control. Applications + depending on particular custom controls should check the driver name + and version, see :ref:`querycap`. + +Applications can enumerate the available controls with the +:ref:`VIDIOC_QUERYCTRL` and +:ref:`VIDIOC_QUERYMENU ` ioctls, get and set a +control value with the :ref:`VIDIOC_G_CTRL ` and +:ref:`VIDIOC_S_CTRL ` ioctls. Drivers must implement +``VIDIOC_QUERYCTRL``, ``VIDIOC_G_CTRL`` and ``VIDIOC_S_CTRL`` when the +device has one or more controls, ``VIDIOC_QUERYMENU`` when it has one or +more menu type controls. + + +.. _enum_all_controls: + +Example: Enumerating all controls +================================= + +.. code-block:: c + + struct v4l2_queryctrl queryctrl; + struct v4l2_querymenu querymenu; + + static void enumerate_menu(__u32 id) + { + printf(" Menu items:\\n"); + + memset(&querymenu, 0, sizeof(querymenu)); + querymenu.id = id; + + for (querymenu.index = queryctrl.minimum; + querymenu.index <= queryctrl.maximum; + querymenu.index++) { + if (0 == ioctl(fd, VIDIOC_QUERYMENU, &querymenu)) { + printf(" %s\\n", querymenu.name); + } + } + } + + memset(&queryctrl, 0, sizeof(queryctrl)); + + queryctrl.id = V4L2_CTRL_FLAG_NEXT_CTRL; + while (0 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + if (!(queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)) { + printf("Control %s\\n", queryctrl.name); + + if (queryctrl.type == V4L2_CTRL_TYPE_MENU) + enumerate_menu(queryctrl.id); + } + + queryctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL; + } + if (errno != EINVAL) { + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + } + +Example: Enumerating all controls including compound controls +============================================================= + +.. code-block:: c + + struct v4l2_query_ext_ctrl query_ext_ctrl; + + memset(&query_ext_ctrl, 0, sizeof(query_ext_ctrl)); + + query_ext_ctrl.id = V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND; + while (0 == ioctl(fd, VIDIOC_QUERY_EXT_CTRL, &query_ext_ctrl)) { + if (!(query_ext_ctrl.flags & V4L2_CTRL_FLAG_DISABLED)) { + printf("Control %s\\n", query_ext_ctrl.name); + + if (query_ext_ctrl.type == V4L2_CTRL_TYPE_MENU) + enumerate_menu(query_ext_ctrl.id); + } + + query_ext_ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND; + } + if (errno != EINVAL) { + perror("VIDIOC_QUERY_EXT_CTRL"); + exit(EXIT_FAILURE); + } + +Example: Enumerating all user controls (old style) +================================================== + +.. code-block:: c + + + memset(&queryctrl, 0, sizeof(queryctrl)); + + for (queryctrl.id = V4L2_CID_BASE; + queryctrl.id < V4L2_CID_LASTP1; + queryctrl.id++) { + if (0 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) + continue; + + printf("Control %s\\n", queryctrl.name); + + if (queryctrl.type == V4L2_CTRL_TYPE_MENU) + enumerate_menu(queryctrl.id); + } else { + if (errno == EINVAL) + continue; + + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + } + } + + for (queryctrl.id = V4L2_CID_PRIVATE_BASE;; + queryctrl.id++) { + if (0 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) + continue; + + printf("Control %s\\n", queryctrl.name); + + if (queryctrl.type == V4L2_CTRL_TYPE_MENU) + enumerate_menu(queryctrl.id); + } else { + if (errno == EINVAL) + break; + + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + } + } + + +Example: Changing controls +========================== + +.. code-block:: c + + struct v4l2_queryctrl queryctrl; + struct v4l2_control control; + + memset(&queryctrl, 0, sizeof(queryctrl)); + queryctrl.id = V4L2_CID_BRIGHTNESS; + + if (-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + if (errno != EINVAL) { + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + } else { + printf("V4L2_CID_BRIGHTNESS is not supportedn"); + } + } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { + printf("V4L2_CID_BRIGHTNESS is not supportedn"); + } else { + memset(&control, 0, sizeof (control)); + control.id = V4L2_CID_BRIGHTNESS; + control.value = queryctrl.default_value; + + if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control)) { + perror("VIDIOC_S_CTRL"); + exit(EXIT_FAILURE); + } + } + + memset(&control, 0, sizeof(control)); + control.id = V4L2_CID_CONTRAST; + + if (0 == ioctl(fd, VIDIOC_G_CTRL, &control)) { + control.value += 1; + + /* The driver may clamp the value or return ERANGE, ignored here */ + + if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control) + && errno != ERANGE) { + perror("VIDIOC_S_CTRL"); + exit(EXIT_FAILURE); + } + /* Ignore if V4L2_CID_CONTRAST is unsupported */ + } else if (errno != EINVAL) { + perror("VIDIOC_G_CTRL"); + exit(EXIT_FAILURE); + } + + control.id = V4L2_CID_AUDIO_MUTE; + control.value = 1; /* silence */ + + /* Errors ignored */ + ioctl(fd, VIDIOC_S_CTRL, &control); + +.. [#f1] + The use of ``V4L2_CID_PRIVATE_BASE`` is problematic because different + drivers may use the same ``V4L2_CID_PRIVATE_BASE`` ID for different + controls. This makes it hard to programatically set such controls + since the meaning of the control with that ID is driver dependent. In + order to resolve this drivers use unique IDs and the + ``V4L2_CID_PRIVATE_BASE`` IDs are mapped to those unique IDs by the + kernel. Consider these ``V4L2_CID_PRIVATE_BASE`` IDs as aliases to + the real IDs. + + Many applications today still use the ``V4L2_CID_PRIVATE_BASE`` IDs + instead of using :ref:`VIDIOC_QUERYCTRL` with + the ``V4L2_CTRL_FLAG_NEXT_CTRL`` flag to enumerate all IDs, so + support for ``V4L2_CID_PRIVATE_BASE`` is still around. diff --git a/Documentation/media/uapi/v4l/crop.rst b/Documentation/media/uapi/v4l/crop.rst new file mode 100644 index 000000000..45e8a895a --- /dev/null +++ b/Documentation/media/uapi/v4l/crop.rst @@ -0,0 +1,317 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _crop: + +***************************************************** +Image Cropping, Insertion and Scaling -- the CROP API +***************************************************** + +.. note:: + + The CROP API is mostly superseded by the newer :ref:`SELECTION API + `. The new API should be preferred in most cases, + with the exception of pixel aspect ratio detection, which is + implemented by :ref:`VIDIOC_CROPCAP ` and has no + equivalent in the SELECTION API. See :ref:`selection-vs-crop` for a + comparison of the two APIs. + +Some video capture devices can sample a subsection of the picture and +shrink or enlarge it to an image of arbitrary size. We call these +abilities cropping and scaling. Some video output devices can scale an +image up or down and insert it at an arbitrary scan line and horizontal +offset into a video signal. + +Applications can use the following API to select an area in the video +signal, query the default area and the hardware limits. + +.. note:: + + Despite their name, the :ref:`VIDIOC_CROPCAP `, + :ref:`VIDIOC_G_CROP ` and :ref:`VIDIOC_S_CROP + ` ioctls apply to input as well as output devices. + +Scaling requires a source and a target. On a video capture or overlay +device the source is the video signal, and the cropping ioctls determine +the area actually sampled. The target are images read by the application +or overlaid onto the graphics screen. Their size (and position for an +overlay) is negotiated with the :ref:`VIDIOC_G_FMT ` +and :ref:`VIDIOC_S_FMT ` ioctls. + +On a video output device the source are the images passed in by the +application, and their size is again negotiated with the +:ref:`VIDIOC_G_FMT ` and :ref:`VIDIOC_S_FMT ` +ioctls, or may be encoded in a compressed video stream. The target is +the video signal, and the cropping ioctls determine the area where the +images are inserted. + +Source and target rectangles are defined even if the device does not +support scaling or the :ref:`VIDIOC_G_CROP ` and +:ref:`VIDIOC_S_CROP ` ioctls. Their size (and position +where applicable) will be fixed in this case. + +.. note:: + + All capture and output devices that support the CROP or SELECTION + API will also support the :ref:`VIDIOC_CROPCAP ` + ioctl. + +Cropping Structures +=================== + + +.. _crop-scale: + +.. kernel-figure:: crop.svg + :alt: crop.svg + :align: center + + Image Cropping, Insertion and Scaling + + The cropping, insertion and scaling process + + + +For capture devices the coordinates of the top left corner, width and +height of the area which can be sampled is given by the ``bounds`` +substructure of the struct :c:type:`v4l2_cropcap` returned +by the :ref:`VIDIOC_CROPCAP ` ioctl. To support a wide +range of hardware this specification does not define an origin or units. +However by convention drivers should horizontally count unscaled samples +relative to 0H (the leading edge of the horizontal sync pulse, see +:ref:`vbi-hsync`). Vertically ITU-R line numbers of the first field +(see ITU R-525 line numbering for :ref:`525 lines ` and for +:ref:`625 lines `), multiplied by two if the driver +can capture both fields. + +The top left corner, width and height of the source rectangle, that is +the area actually sampled, is given by struct +:c:type:`v4l2_crop` using the same coordinate system as +struct :c:type:`v4l2_cropcap`. Applications can use the +:ref:`VIDIOC_G_CROP ` and :ref:`VIDIOC_S_CROP ` +ioctls to get and set this rectangle. It must lie completely within the +capture boundaries and the driver may further adjust the requested size +and/or position according to hardware limitations. + +Each capture device has a default source rectangle, given by the +``defrect`` substructure of struct +:c:type:`v4l2_cropcap`. The center of this rectangle +shall align with the center of the active picture area of the video +signal, and cover what the driver writer considers the complete picture. +Drivers shall reset the source rectangle to the default when the driver +is first loaded, but not later. + +For output devices these structures and ioctls are used accordingly, +defining the *target* rectangle where the images will be inserted into +the video signal. + + +Scaling Adjustments +=================== + +Video hardware can have various cropping, insertion and scaling +limitations. It may only scale up or down, support only discrete scaling +factors, or have different scaling abilities in horizontal and vertical +direction. Also it may not support scaling at all. At the same time the +struct :c:type:`v4l2_crop` rectangle may have to be aligned, +and both the source and target rectangles may have arbitrary upper and +lower size limits. In particular the maximum ``width`` and ``height`` in +struct :c:type:`v4l2_crop` may be smaller than the struct +:c:type:`v4l2_cropcap`. ``bounds`` area. Therefore, as +usual, drivers are expected to adjust the requested parameters and +return the actual values selected. + +Applications can change the source or the target rectangle first, as +they may prefer a particular image size or a certain area in the video +signal. If the driver has to adjust both to satisfy hardware +limitations, the last requested rectangle shall take priority, and the +driver should preferably adjust the opposite one. The +:ref:`VIDIOC_TRY_FMT ` ioctl however shall not change +the driver state and therefore only adjust the requested rectangle. + +Suppose scaling on a video capture device is restricted to a factor 1:1 +or 2:1 in either direction and the target image size must be a multiple +of 16 × 16 pixels. The source cropping rectangle is set to defaults, +which are also the upper limit in this example, of 640 × 400 pixels at +offset 0, 0. An application requests an image size of 300 × 225 pixels, +assuming video will be scaled down from the "full picture" accordingly. +The driver sets the image size to the closest possible values 304 × 224, +then chooses the cropping rectangle closest to the requested size, that +is 608 × 224 (224 × 2:1 would exceed the limit 400). The offset 0, 0 is +still valid, thus unmodified. Given the default cropping rectangle +reported by :ref:`VIDIOC_CROPCAP ` the application can +easily propose another offset to center the cropping rectangle. + +Now the application may insist on covering an area using a picture +aspect ratio closer to the original request, so it asks for a cropping +rectangle of 608 × 456 pixels. The present scaling factors limit +cropping to 640 × 384, so the driver returns the cropping size 608 × 384 +and adjusts the image size to closest possible 304 × 192. + + +Examples +======== + +Source and target rectangles shall remain unchanged across closing and +reopening a device, such that piping data into or out of a device will +work without special preparations. More advanced applications should +ensure the parameters are suitable before starting I/O. + +.. note:: + + On the next two examples, a video capture device is assumed; + change ``V4L2_BUF_TYPE_VIDEO_CAPTURE`` for other types of device. + +Example: Resetting the cropping parameters +========================================== + +.. code-block:: c + + struct v4l2_cropcap cropcap; + struct v4l2_crop crop; + + memset (&cropcap, 0, sizeof (cropcap)); + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (-1 == ioctl (fd, VIDIOC_CROPCAP, &cropcap)) { + perror ("VIDIOC_CROPCAP"); + exit (EXIT_FAILURE); + } + + memset (&crop, 0, sizeof (crop)); + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; + + /* Ignore if cropping is not supported (EINVAL). */ + + if (-1 == ioctl (fd, VIDIOC_S_CROP, &crop) + && errno != EINVAL) { + perror ("VIDIOC_S_CROP"); + exit (EXIT_FAILURE); + } + + +Example: Simple downscaling +=========================== + +.. code-block:: c + + struct v4l2_cropcap cropcap; + struct v4l2_format format; + + reset_cropping_parameters (); + + /* Scale down to 1/4 size of full picture. */ + + memset (&format, 0, sizeof (format)); /* defaults */ + + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + format.fmt.pix.width = cropcap.defrect.width >> 1; + format.fmt.pix.height = cropcap.defrect.height >> 1; + format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + + if (-1 == ioctl (fd, VIDIOC_S_FMT, &format)) { + perror ("VIDIOC_S_FORMAT"); + exit (EXIT_FAILURE); + } + + /* We could check the actual image size now, the actual scaling factor + or if the driver can scale at all. */ + +Example: Selecting an output area +================================= + +.. note:: This example assumes an output device. + +.. code-block:: c + + struct v4l2_cropcap cropcap; + struct v4l2_crop crop; + + memset (&cropcap, 0, sizeof (cropcap)); + cropcap.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + + if (-1 == ioctl (fd, VIDIOC_CROPCAP;, &cropcap)) { + perror ("VIDIOC_CROPCAP"); + exit (EXIT_FAILURE); + } + + memset (&crop, 0, sizeof (crop)); + + crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + crop.c = cropcap.defrect; + + /* Scale the width and height to 50 % of their original size + and center the output. */ + + crop.c.width /= 2; + crop.c.height /= 2; + crop.c.left += crop.c.width / 2; + crop.c.top += crop.c.height / 2; + + /* Ignore if cropping is not supported (EINVAL). */ + + if (-1 == ioctl (fd, VIDIOC_S_CROP, &crop) + && errno != EINVAL) { + perror ("VIDIOC_S_CROP"); + exit (EXIT_FAILURE); + } + +Example: Current scaling factor and pixel aspect +================================================ + +.. note:: This example assumes a video capture device. + +.. code-block:: c + + struct v4l2_cropcap cropcap; + struct v4l2_crop crop; + struct v4l2_format format; + double hscale, vscale; + double aspect; + int dwidth, dheight; + + memset (&cropcap, 0, sizeof (cropcap)); + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (-1 == ioctl (fd, VIDIOC_CROPCAP, &cropcap)) { + perror ("VIDIOC_CROPCAP"); + exit (EXIT_FAILURE); + } + + memset (&crop, 0, sizeof (crop)); + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (-1 == ioctl (fd, VIDIOC_G_CROP, &crop)) { + if (errno != EINVAL) { + perror ("VIDIOC_G_CROP"); + exit (EXIT_FAILURE); + } + + /* Cropping not supported. */ + crop.c = cropcap.defrect; + } + + memset (&format, 0, sizeof (format)); + format.fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (-1 == ioctl (fd, VIDIOC_G_FMT, &format)) { + perror ("VIDIOC_G_FMT"); + exit (EXIT_FAILURE); + } + + /* The scaling applied by the driver. */ + + hscale = format.fmt.pix.width / (double) crop.c.width; + vscale = format.fmt.pix.height / (double) crop.c.height; + + aspect = cropcap.pixelaspect.numerator / + (double) cropcap.pixelaspect.denominator; + aspect = aspect * hscale / vscale; + + /* Devices following ITU-R BT.601 do not capture + square pixels. For playback on a computer monitor + we should scale the images to this size. */ + + dwidth = format.fmt.pix.width / aspect; + dheight = format.fmt.pix.height; diff --git a/Documentation/media/uapi/v4l/crop.svg b/Documentation/media/uapi/v4l/crop.svg new file mode 100644 index 000000000..3878fe4c4 --- /dev/null +++ b/Documentation/media/uapi/v4l/crop.svg @@ -0,0 +1,282 @@ + + + +image/svg+xmlv4l2_cropcap.bounds +v4l2_cropcap.defrect +v4l2_crop.c +v4l2_format + + diff --git a/Documentation/media/uapi/v4l/depth-formats.rst b/Documentation/media/uapi/v4l/depth-formats.rst new file mode 100644 index 000000000..d1641e968 --- /dev/null +++ b/Documentation/media/uapi/v4l/depth-formats.rst @@ -0,0 +1,16 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _depth-formats: + +************* +Depth Formats +************* + +Depth data provides distance to points, mapped onto the image plane + + +.. toctree:: + :maxdepth: 1 + + pixfmt-inzi + pixfmt-z16 diff --git a/Documentation/media/uapi/v4l/dev-capture.rst b/Documentation/media/uapi/v4l/dev-capture.rst new file mode 100644 index 000000000..4218742ab --- /dev/null +++ b/Documentation/media/uapi/v4l/dev-capture.rst @@ -0,0 +1,104 @@ +.. -*- coding: utf-8; mode: rst -*- + +.. _capture: + +*********************** +Video Capture Interface +*********************** + +Video capture devices sample an analog video signal and store the +digitized images in memory. Today nearly all devices can capture at full +25 or 30 frames/second. With this interface applications can control the +capture process and move images from the driver into user space. + +Conventionally V4L2 video capture devices are accessed through character +device special files named ``/dev/video`` and ``/dev/video0`` to +``/dev/video63`` with major number 81 and minor numbers 0 to 63. +``/dev/video`` is typically a symbolic link to the preferred video +device. + +.. note:: The same device file names are used for video output devices. + + +Querying Capabilities +===================== + +Devices supporting the video capture interface set the +``V4L2_CAP_VIDEO_CAPTURE`` or ``V4L2_CAP_VIDEO_CAPTURE_MPLANE`` flag in +the ``capabilities`` field of struct +:c:type:`v4l2_capability` returned by the +:ref:`VIDIOC_QUERYCAP` ioctl. As secondary device +functions they may also support the :ref:`video overlay ` +(``V4L2_CAP_VIDEO_OVERLAY``) and the :ref:`raw VBI capture ` +(``V4L2_CAP_VBI_CAPTURE``) interface. At least one of the read/write or +streaming I/O methods must be supported. Tuners and audio inputs are +optional. + + +Supplemental Functions +====================== + +Video capture devices shall support :ref:`audio input