-----BEGIN PGP SIGNATURE-----
 
 iQIcBAABAgAGBQJcgUDlAAoJEAhfPr2O5OEV2kIP/AiHMkMGi/fXmwzN0tFjYkim
 39t6rodj6rT/oMib4XvW55GjQy5sdXwz+1jE+kZA5imbUvt6YzUXFBzIBOGOIF0n
 1MukKa7M6ragnm2yR+42ucBr3jcuc91/keeVzWgP2cgeZeKUlBHme+rECYnwqDdT
 9rcG4U2XL0Wolbm4lAispaWYIYoOURvPeryJ244vlPmch5/2nmXbG7AgNlfJsAw4
 NFmdHBWxLeyB8F95ToikhuNlTWrsvdVHPHbDaDPwioSulZ1vw+lu4CHRd1uZo2iH
 W0INE65ukgyenzTDbmnj5/oWCqV4KRTs8A2x6eimz+wG/60jWQjDiBLSzhxjBH7x
 alrwhxnW3bD31ZUCkmaGd1+3txvLf+Lup9lLX3GCBKA45dW9pzVCLfxSfNaKKlTL
 0xCYSMxl5xbl8TL6hHxK7/n+LsButgTRWIoJpqkM9uPrljwzznpgqJvARqSuHEKJ
 3Tvnkc2DZsmlM8L02i929BsrsoTncm6wBBVlCJzhL0VNaOuL7yJVzXhrw7b/dZZw
 IZu6cH5RrZhIQR4y1UPlaEZoidUGvR0+K997AsURIHJA0RolWE5eI2JHSE86EX8S
 bzG5SChkQmbpYt5OXQvg5VxvqVElx/5/tamcHe/rKwaAwaG9aI9HICgP2e0Zaoce
 YOMJUpcHtSY5Fedk8P1a
 =tD1x
 -----END PGP SIGNATURE-----

Merge tag 'media/v5.1-1' of git://git.kernel.org/pub/scm/linux/kernel/git/mchehab/linux-media

Pull media updates from Mauro Carvalho Chehab:

 - remove sensor drivers that got converted from soc_camera

 - remaining soc_camera drivers got moved to staging

 - some documentation cleanups and improvements

 - the imx staging driver now supports imx7

 - the ov9640, mt9m001 and mt9m111 got converted from soc_camera

 - the vim2m driver now does what a m2m convert driver expects to do

 - epoll() fixes on media subsystems

 - several drivers fixes, typos, cleanups and improvements

* tag 'media/v5.1-1' of git://git.kernel.org/pub/scm/linux/kernel/git/mchehab/linux-media: (346 commits)
  media: dvb/earth-pt1: fix wrong initialization for demod blocks
  media: vim2m: Address some coding style issues
  media: vim2m: don't use BUG()
  media: vim2m: speedup passthrough copy
  media: vim2m: add an horizontal scaler
  media: vim2m: don't accept YUYV anymore as output format
  media: vim2m: add vertical linear scaler
  media: vim2m: better handle cap/out buffers with different sizes
  media: vim2m: use different framesizes for bayer formats
  media: vim2m: add support for VIDIOC_ENUM_FRAMESIZES
  media: vim2m: ensure that width is multiple of two
  media: vim2m: improve debug messages
  media: vim2m: add bayer capture formats
  media: a few more typos at staging, pci, platform, radio and usb
  media: Documentation: fix several typos
  media: staging: fix several typos
  media: include: fix several typos
  media: common: fix several typos
  media: v4l2-core: fix several typos
  media: usb: fix several typos
  ...
This commit is contained in:
Linus Torvalds 2019-03-09 14:45:54 -08:00
Родитель 36011ddc78 15d90a6ae9
Коммит 96a6de1a54
536 изменённых файлов: 15297 добавлений и 15373 удалений

Просмотреть файл

@ -48,7 +48,16 @@ are numbered as follows.
TXA source 10
TXB source 11
The digital output port nodes must contain at least one endpoint.
The digital output port nodes, when present, shall contain at least one
endpoint. Each of those endpoints shall contain the data-lanes property as
described in video-interfaces.txt.
Required source endpoint properties:
- data-lanes: an array of physical data lane indexes
The accepted value(s) for this property depends on which of the two
sources are described. For TXA 1, 2 or 4 data lanes can be described
while for TXB only 1 data lane is valid. See video-interfaces.txt
for detailed description.
Ports are optional if they are not connected to anything at the hardware level.

Просмотреть файл

@ -0,0 +1,20 @@
* Melexis MLX90640 FIR Sensor
Melexis MLX90640 FIR sensor support which allows recording of thermal data
with 32x24 resolution excluding 2 lines of coefficient data that is used by
userspace to render processed frames.
Required Properties:
- compatible : Must be "melexis,mlx90640"
- reg : i2c address of the device
Example:
i2c0@1c22000 {
...
mlx90640@33 {
compatible = "melexis,mlx90640";
reg = <0x33>;
};
...
};

Просмотреть файл

@ -0,0 +1,38 @@
MT9M001: 1/2-Inch Megapixel Digital Image Sensor
The MT9M001 is an SXGA-format with a 1/2-inch CMOS active-pixel digital
image sensor. It is programmable through I2C interface.
Required Properties:
- compatible: shall be "onnn,mt9m001".
- clocks: reference to the master clock into sensor
Optional Properties:
- reset-gpios: GPIO handle which is connected to the reset pin of the chip.
Active low.
- standby-gpios: GPIO handle which is connected to the standby pin of the chip.
Active high.
The device node must contain one 'port' child node with one 'endpoint' child
sub-node for its digital output video port, in accordance with the video
interface bindings defined in:
Documentation/devicetree/bindings/media/video-interfaces.txt
Example:
&i2c1 {
camera-sensor@5d {
compatible = "onnn,mt9m001";
reg = <0x5d>;
reset-gpios = <&gpio0 0 GPIO_ACTIVE_LOW>;
standby-gpios = <&gpio0 1 GPIO_ACTIVE_HIGH>;
clocks = <&camera_clk>;
port {
mt9m001_out: endpoint {
remote-endpoint = <&vcap_in>;
};
};
};
};

Просмотреть файл

@ -26,9 +26,9 @@ Example:
&i2c1 {
...
ov5645: ov5645@78 {
ov5645: ov5645@3c {
compatible = "ovti,ov5645";
reg = <0x78>;
reg = <0x3c>;
enable-gpios = <&gpio1 6 GPIO_ACTIVE_HIGH>;
reset-gpios = <&gpio5 20 GPIO_ACTIVE_LOW>;
@ -37,7 +37,7 @@ Example:
clocks = <&clks 200>;
clock-names = "xclk";
clock-frequency = <23880000>;
clock-frequency = <24000000>;
vdddo-supply = <&camera_dovdd_1v8>;
vdda-supply = <&camera_avdd_2v8>;

Просмотреть файл

@ -0,0 +1,45 @@
Freescale i.MX7 CMOS Sensor Interface
=====================================
csi node
--------
This is device node for the CMOS Sensor Interface (CSI) which enables the chip
to connect directly to external CMOS image sensors.
Required properties:
- compatible : "fsl,imx7-csi";
- reg : base address and length of the register set for the device;
- interrupts : should contain CSI interrupt;
- clocks : list of clock specifiers, see
Documentation/devicetree/bindings/clock/clock-bindings.txt for details;
- clock-names : must contain "axi", "mclk" and "dcic" entries, matching
entries in the clock property;
The device node shall contain one 'port' child node with one child 'endpoint'
node, according to the bindings defined in:
Documentation/devicetree/bindings/media/video-interfaces.txt.
In the following example a remote endpoint is a video multiplexer.
example:
csi: csi@30710000 {
#address-cells = <1>;
#size-cells = <0>;
compatible = "fsl,imx7-csi";
reg = <0x30710000 0x10000>;
interrupts = <GIC_SPI 7 IRQ_TYPE_LEVEL_HIGH>;
clocks = <&clks IMX7D_CLK_DUMMY>,
<&clks IMX7D_CSI_MCLK_ROOT_CLK>,
<&clks IMX7D_CLK_DUMMY>;
clock-names = "axi", "mclk", "dcic";
port {
csi_from_csi_mux: endpoint {
remote-endpoint = <&csi_mux_to_csi>;
};
};
};

Просмотреть файл

@ -0,0 +1,90 @@
Freescale i.MX7 Mipi CSI2
=========================
mipi_csi2 node
--------------
This is the device node for the MIPI CSI-2 receiver core in i.MX7 SoC. It is
compatible with previous version of Samsung D-phy.
Required properties:
- compatible : "fsl,imx7-mipi-csi2";
- reg : base address and length of the register set for the device;
- interrupts : should contain MIPI CSIS interrupt;
- clocks : list of clock specifiers, see
Documentation/devicetree/bindings/clock/clock-bindings.txt for details;
- clock-names : must contain "pclk", "wrap" and "phy" entries, matching
entries in the clock property;
- power-domains : a phandle to the power domain, see
Documentation/devicetree/bindings/power/power_domain.txt for details.
- reset-names : should include following entry "mrst";
- resets : a list of phandle, should contain reset entry of
reset-names;
- phy-supply : from the generic phy bindings, a phandle to a regulator that
provides power to MIPI CSIS core;
Optional properties:
- clock-frequency : The IP's main (system bus) clock frequency in Hz, default
value when this property is not specified is 166 MHz;
- fsl,csis-hs-settle : differential receiver (HS-RX) settle time;
The device node should contain two 'port' child nodes with one child 'endpoint'
node, according to the bindings defined in:
Documentation/devicetree/bindings/ media/video-interfaces.txt.
The following are properties specific to those nodes.
port node
---------
- reg : (required) can take the values 0 or 1, where 0 shall be
related to the sink port and port 1 shall be the source
one;
endpoint node
-------------
- data-lanes : (required) an array specifying active physical MIPI-CSI2
data input lanes and their mapping to logical lanes; this
shall only be applied to port 0 (sink port), the array's
content is unused only its length is meaningful,
in this case the maximum length supported is 2;
example:
mipi_csi: mipi-csi@30750000 {
#address-cells = <1>;
#size-cells = <0>;
compatible = "fsl,imx7-mipi-csi2";
reg = <0x30750000 0x10000>;
interrupts = <GIC_SPI 25 IRQ_TYPE_LEVEL_HIGH>;
clocks = <&clks IMX7D_IPG_ROOT_CLK>,
<&clks IMX7D_MIPI_CSI_ROOT_CLK>,
<&clks IMX7D_MIPI_DPHY_ROOT_CLK>;
clock-names = "pclk", "wrap", "phy";
clock-frequency = <166000000>;
power-domains = <&pgc_mipi_phy>;
phy-supply = <&reg_1p0d>;
resets = <&src IMX7_RESET_MIPI_PHY_MRST>;
reset-names = "mrst";
fsl,csis-hs-settle = <3>;
port@0 {
reg = <0>;
mipi_from_sensor: endpoint {
remote-endpoint = <&ov2680_to_mipi>;
data-lanes = <1>;
};
};
port@1 {
reg = <1>;
mipi_vc0_to_csi_mux: endpoint {
remote-endpoint = <&csi_mux_from_mipi_vc0>;
};
};
};

Просмотреть файл

@ -66,6 +66,15 @@ vcodec_dec: vcodec@16000000 {
"vencpll",
"venc_lt_sel",
"vdec_bus_clk_src";
assigned-clocks = <&topckgen CLK_TOP_VENC_LT_SEL>,
<&topckgen CLK_TOP_CCI400_SEL>,
<&topckgen CLK_TOP_VDEC_SEL>,
<&apmixedsys CLK_APMIXED_VCODECPLL>,
<&apmixedsys CLK_APMIXED_VENCPLL>;
assigned-clock-parents = <&topckgen CLK_TOP_VCODECPLL_370P5>,
<&topckgen CLK_TOP_UNIVPLL_D2>,
<&topckgen CLK_TOP_VCODECPLL>;
assigned-clock-rates = <0>, <0>, <0>, <1482000000>, <800000000>;
};
vcodec_enc: vcodec@18002000 {
@ -105,4 +114,8 @@ vcodec_dec: vcodec@16000000 {
"venc_sel",
"venc_lt_sel_src",
"venc_lt_sel";
assigned-clocks = <&topckgen CLK_TOP_VENC_SEL>,
<&topckgen CLK_TOP_VENC_LT_SEL>;
assigned-clock-parents = <&topckgen CLK_TOP_VENCPLL_D2>,
<&topckgen CLK_TOP_UNIVPLL1_D2>;
};

Просмотреть файл

@ -7,12 +7,13 @@ family of devices.
Each VIN instance has a single parallel input that supports RGB and YUV video,
with both external synchronization and BT.656 synchronization for the latter.
Depending on the instance the VIN input is connected to external SoC pins, or
on Gen3 platforms to a CSI-2 receiver.
on Gen3 and RZ/G2 platforms to a CSI-2 receiver.
- compatible: Must be one or more of the following
- "renesas,vin-r8a7743" for the R8A7743 device
- "renesas,vin-r8a7744" for the R8A7744 device
- "renesas,vin-r8a7745" for the R8A7745 device
- "renesas,vin-r8a774c0" for the R8A774C0 device
- "renesas,vin-r8a7778" for the R8A7778 device
- "renesas,vin-r8a7779" for the R8A7779 device
- "renesas,vin-r8a7790" for the R8A7790 device
@ -61,10 +62,10 @@ The per-board settings Gen2 platforms:
- data-enable-active: polarity of CLKENB signal, see [1] for
description. Default is active high.
The per-board settings Gen3 platforms:
The per-board settings Gen3 and RZ/G2 platforms:
Gen3 platforms can support both a single connected parallel input source
from external SoC pins (port@0) and/or multiple parallel input sources
Gen3 and RZ/G2 platforms can support both a single connected parallel input
source from external SoC pins (port@0) and/or multiple parallel input sources
from local SoC CSI-2 receivers (port@1) depending on SoC.
- renesas,id - ID number of the VIN, VINx in the documentation.

Просмотреть файл

@ -2,8 +2,9 @@ Renesas R-Car Frame Compression Processor (FCP)
-----------------------------------------------
The FCP is a companion module of video processing modules in the Renesas R-Car
Gen3 SoCs. It provides data compression and decompression, data caching, and
conversion of AXI transactions in order to reduce the memory bandwidth.
Gen3 and RZ/G2 SoCs. It provides data compression and decompression, data
caching, and conversion of AXI transactions in order to reduce the memory
bandwidth.
There are three types of FCP: FCP for Codec (FCPC), FCP for VSP (FCPV) and FCP
for FDP (FCPF). Their configuration and behaviour depend on the module they

Просмотреть файл

@ -2,12 +2,13 @@ Renesas R-Car MIPI CSI-2
------------------------
The R-Car CSI-2 receiver device provides MIPI CSI-2 capabilities for the
Renesas R-Car family of devices. It is used in conjunction with the
Renesas R-Car and RZ/G2 family of devices. It is used in conjunction with the
R-Car VIN module, which provides the video capture capabilities.
Mandatory properties
--------------------
- compatible: Must be one or more of the following
- "renesas,r8a774c0-csi2" for the R8A774C0 device.
- "renesas,r8a7795-csi2" for the R8A7795 device.
- "renesas,r8a7796-csi2" for the R8A7796 device.
- "renesas,r8a77965-csi2" for the R8A77965 device.

Просмотреть файл

@ -2,13 +2,13 @@
The VSP is a video processing engine that supports up-/down-scaling, alpha
blending, color space conversion and various other image processing features.
It can be found in the Renesas R-Car second generation SoCs.
It can be found in the Renesas R-Car Gen2, R-Car Gen3, RZ/G1, and RZ/G2 SoCs.
Required properties:
- compatible: Must contain one of the following values
- "renesas,vsp1" for the R-Car Gen2 VSP1
- "renesas,vsp2" for the R-Car Gen3 VSP2
- "renesas,vsp1" for the R-Car Gen2 and RZ/G1 VSP1
- "renesas,vsp2" for the R-Car Gen3 and RZ/G2 VSP2
- reg: Base address and length of the registers block for the VSP.
- interrupts: VSP interrupt specifier.

Просмотреть файл

@ -0,0 +1,26 @@
* Silicon Labs FM Radio receiver
The Silicon Labs Si470x is family of FM radio receivers with receive power scan
supporting 76-108 MHz, programmable through an I2C interface.
Some of them includes an RDS encoder.
Required Properties:
- compatible: Should contain "silabs,si470x"
- reg: the I2C address of the device
Optional Properties:
- interrupts : The interrupt number
- reset-gpios: GPIO specifier for the chips reset line
Example:
&i2c2 {
si470x@63 {
compatible = "silabs,si470x";
reg = <0x63>;
interrupt-parent = <&gpj2>;
interrupts = <4 IRQ_TYPE_EDGE_FALLING>;
reset-gpios = <&gpj2 5 GPIO_ACTIVE_HIGH>;
};
};

Просмотреть файл

@ -6,8 +6,9 @@ Allwinner V3s SoC features a CSI module(CSI1) with parallel interface.
Required properties:
- compatible: value must be one of:
* "allwinner,sun6i-a31-csi"
* "allwinner,sun8i-h3-csi", "allwinner,sun6i-a31-csi"
* "allwinner,sun8i-h3-csi"
* "allwinner,sun8i-v3s-csi"
* "allwinner,sun50i-a64-csi"
- reg: base address and size of the memory-mapped region.
- interrupts: interrupt associated to this IP
- clocks: phandles to the clocks feeding the CSI

Просмотреть файл

@ -125,7 +125,7 @@ https://linuxtv.org/wiki/index.php/DVB_USB
2004-12-26
- refactored the dibusb-driver, splitted into separate files
- refactored the dibusb-driver, split into separate files
- i2c-probing enabled
2004-12-06

Просмотреть файл

@ -12,7 +12,7 @@ Digital TV devices are implemented by several different drivers:
- Frontend drivers that are usually implemented as two separate drivers:
- A tuner driver that implements the logic with commands the part of the
hardware with is reponsible to tune into a digital TV transponder or
hardware with is responsible to tune into a digital TV transponder or
physical channel. The output of a tuner is usually a baseband or
Intermediate Frequency (IF) signal;

Просмотреть файл

@ -328,7 +328,7 @@ Statistics collect
On almost all frontend hardware, the bit and byte counts are stored by
the hardware after a certain amount of time or after the total bit/block
counter reaches a certain value (usually programable), for example, on
counter reaches a certain value (usually programmable), for example, on
every 1000 ms or after receiving 1,000,000 bits.
So, if you read the registers too soon, you'll end by reading the same

Просмотреть файл

@ -60,7 +60,7 @@ Drivers initialize entity pads by calling
Drivers register entities with a media device by calling
:c:func:`media_device_register_entity()`
and unregistred by calling
and unregistered by calling
:c:func:`media_device_unregister_entity()`.
Interfaces

Просмотреть файл

@ -93,7 +93,7 @@ You can iterate over all registered devices as follows:
int err;
/* Find driver 'ivtv' on the PCI bus.
pci_bus_type is a global. For USB busses use usb_bus_type. */
pci_bus_type is a global. For USB buses use usb_bus_type. */
drv = driver_find("ivtv", &pci_bus_type);
/* iterate over all ivtv device instances */
err = driver_for_each_device(drv, NULL, p, callback);

Просмотреть файл

@ -11,7 +11,7 @@ hardware: most devices have multiple ICs, export multiple device nodes in
Especially the fact that V4L2 drivers have to setup supporting ICs to
do audio/video muxing/encoding/decoding makes it more complex than most.
Usually these ICs are connected to the main bridge driver through one or
more I2C busses, but other busses can also be used. Such devices are
more I2C buses, but other buses can also be used. Such devices are
called 'sub-devices'.
For a long time the framework was limited to the video_device struct for

Просмотреть файл

@ -23,7 +23,7 @@ device data.
You also need a way to go from the low-level struct to :c:type:`v4l2_subdev`.
For the common i2c_client struct the i2c_set_clientdata() call is used to store
a :c:type:`v4l2_subdev` pointer, for other busses you may have to use other
a :c:type:`v4l2_subdev` pointer, for other buses you may have to use other
methods.
Bridges might also need to store per-subdev private data, such as a pointer to
@ -33,7 +33,7 @@ provides host private data for that purpose that can be accessed with
From the bridge driver perspective, you load the sub-device module and somehow
obtain the :c:type:`v4l2_subdev` pointer. For i2c devices this is easy: you call
``i2c_get_clientdata()``. For other busses something similar needs to be done.
``i2c_get_clientdata()``. For other buses something similar needs to be done.
Helper functions exists for sub-devices on an I2C bus that do most of this
tricky work for you.

Просмотреть файл

@ -60,6 +60,9 @@ ignore symbol RC_PROTO_SHARP
ignore symbol RC_PROTO_XMP
ignore symbol RC_PROTO_CEC
ignore symbol RC_PROTO_IMON
ignore symbol RC_PROTO_RCMM12
ignore symbol RC_PROTO_RCMM24
ignore symbol RC_PROTO_RCMM32
# Undocumented macros

Просмотреть файл

@ -57,7 +57,7 @@ Description
This ioctl call asks the Audio Device to bypass the Audio decoder and
forward the stream without decoding. This mode shall be used if streams
that cant be handled by the Digial TV system shall be decoded. Dolby
that cant be handled by the Digital TV system shall be decoded. Dolby
DigitalTM streams are automatically forwarded by the Digital TV subsystem if
the hardware can handle it.

Просмотреть файл

@ -39,7 +39,7 @@ Description
-----------
CA_SET_DESCR is used for feeding descrambler CA slots with descrambling
keys (refered as control words).
keys (referred as control words).
Return Value
------------

Просмотреть файл

@ -61,7 +61,7 @@ the device is closed.
Applications call the ``DMX_DQBUF`` ioctl to dequeue a filled
(capturing) buffer from the driver's outgoing queue.
They just set the ``index`` field withe the buffer ID to be queued.
They just set the ``index`` field with the buffer ID to be queued.
When ``DMX_DQBUF`` is called with a pointer to struct :c:type:`dmx_buffer`,
the driver fills the remaining fields or returns an error code.

Просмотреть файл

@ -44,7 +44,7 @@ with supports all digital TV delivery systems.
struct :c:type:`dvb_frontend_parameters`.
2. Don't use DVB API version 3 calls on hardware with supports
newer standards. Such API provides no suport or a very limited
newer standards. Such API provides no support or a very limited
support to new standards and/or new hardware.
3. Nowadays, most frontends support multiple delivery systems.

Просмотреть файл

@ -202,7 +202,7 @@ If video_blank is set video will be blanked out if the channel is
changed or if playback is stopped. Otherwise, the last picture will be
displayed. play_state indicates if the video is currently frozen,
stopped, or being played back. The stream_source corresponds to the
seleted source for the video stream. It can come either from the
selected source for the video stream. It can come either from the
demultiplexer or from memory. The video_format indicates the aspect
ratio (one of 4:3 or 16:9) of the currently played video stream.
Finally, display_format corresponds to the selected cropping mode in

Просмотреть файл

@ -363,7 +363,7 @@ various documents with a single copy that is included in the collection,
provided that you follow the rules of this License for verbatim copying
of each of the documents in all other respects.
You may extract a single document from such a collection, and dispbibute
You may extract a single document from such a collection, and distribute
it individually under this License, provided you insert a copy of this
License into the extracted document, and follow this License in all
other respects regarding verbatim copying of that document.

Просмотреть файл

@ -164,7 +164,7 @@ Types and flags used to represent the media graph elements
* - ``MEDIA_ENT_F_PROC_VIDEO_PIXEL_ENC_CONV``
- Video pixel encoding converter. An entity capable of pixel
enconding conversion must have at least one sink pad and one
encoding conversion must have at least one sink pad and one
source pad, and convert the encoding of pixels received on
its sink pad(s) to a different encoding output on its source
pad(s). Pixel encoding conversion includes but isn't limited

Просмотреть файл

@ -91,7 +91,7 @@ A request must contain at least one buffer, otherwise ``ENOENT`` is returned.
A queued request cannot be modified anymore.
.. caution::
For :ref:`memory-to-memory devices <codec>` you can use requests only for
For :ref:`memory-to-memory devices <mem2mem>` you can use requests only for
output buffers, not for capture buffers. Attempting to add a capture buffer
to a request will result in an ``EACCES`` error.
@ -152,7 +152,7 @@ if it had just been allocated.
Example for a Codec Device
--------------------------
For use-cases such as :ref:`codecs <codec>`, the request API can be used
For use-cases such as :ref:`codecs <mem2mem>`, the request API can be used
to associate specific controls to
be applied by the driver for the OUTPUT buffer, allowing user-space
to queue many such buffers in advance. It can also take advantage of requests'

Просмотреть файл

@ -385,7 +385,7 @@ the remote via /dev/input/event devices.
- ``KEY_CHANNELDOWN``
- Decrease channel sequencially
- Decrease channel sequentially
- CHANNEL - / CHANNEL DOWN / DOWN
@ -393,7 +393,7 @@ the remote via /dev/input/event devices.
- ``KEY_CHANNELUP``
- Increase channel sequencially
- Increase channel sequentially
- CHANNEL + / CHANNEL UP / UP

Просмотреть файл

@ -230,8 +230,7 @@ struct v4l2_buffer
* - struct :c:type:`v4l2_timecode`
- ``timecode``
-
- When ``type`` is ``V4L2_BUF_TYPE_VIDEO_CAPTURE`` and the
``V4L2_BUF_FLAG_TIMECODE`` flag is set in ``flags``, this
- When the ``V4L2_BUF_FLAG_TIMECODE`` flag is set in ``flags``, this
structure contains a frame timecode. In
:c:type:`V4L2_FIELD_ALTERNATE <v4l2_field>` mode the top and
bottom field contain the same timecode. Timecodes are intended to
@ -714,10 +713,10 @@ enum v4l2_memory
Timecodes
=========
The struct :c:type:`v4l2_timecode` structure is designed to hold a
:ref:`smpte12m` or similar timecode. (struct
struct :c:type:`timeval` timestamps are stored in struct
:c:type:`v4l2_buffer` field ``timestamp``.)
The :c:type:`v4l2_buffer_timecode` structure is designed to hold a
:ref:`smpte12m` or similar timecode.
(struct :c:type:`timeval` timestamps are stored in the struct
:c:type:`v4l2_buffer` ``timestamp`` field.)
.. c:type:: v4l2_timecode

Просмотреть файл

@ -46,6 +46,17 @@ applicable to all devices.
dv-timings
control
extended-controls
ext-ctrls-camera
ext-ctrls-flash
ext-ctrls-image-source
ext-ctrls-image-process
ext-ctrls-codec
ext-ctrls-jpeg
ext-ctrls-dv
ext-ctrls-rf-tuner
ext-ctrls-fm-tx
ext-ctrls-fm-rx
ext-ctrls-detect
format
planar-apis
selection-api

Просмотреть файл

@ -499,7 +499,7 @@ Example: Changing controls
.. [#f1]
The use of ``V4L2_CID_PRIVATE_BASE`` is problematic because different
drivers may use the same ``V4L2_CID_PRIVATE_BASE`` ID for different
controls. This makes it hard to programatically set such controls
controls. This makes it hard to programmatically set such controls
since the meaning of the control with that ID is driver dependent. In
order to resolve this drivers use unique IDs and the
``V4L2_CID_PRIVATE_BASE`` IDs are mapped to those unique IDs by the

Просмотреть файл

@ -1,28 +0,0 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _effect:
************************
Effect Devices Interface
************************
.. note::
This interface has been be suspended from the V4L2 API.
The implementation for such effects should be done
via mem2mem devices.
A V4L2 video effect device can do image effects, filtering, or combine
two or more images or image streams. For example video transitions or
wipes. Applications send data to be processed and receive the result
data either with :ref:`read() <func-read>` and
:ref:`write() <func-write>` functions, or through the streaming I/O
mechanism.
[to do]

Просмотреть файл

@ -7,37 +7,36 @@
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _codec:
.. _mem2mem:
***************
Codec Interface
***************
********************************
Video Memory-To-Memory Interface
********************************
A V4L2 codec can compress, decompress, transform, or otherwise convert
video data from one format into another format, in memory. Typically
such devices are memory-to-memory devices (i.e. devices with the
``V4L2_CAP_VIDEO_M2M`` or ``V4L2_CAP_VIDEO_M2M_MPLANE`` capability set).
A V4L2 memory-to-memory device can compress, decompress, transform, or
otherwise convert video data from one format into another format, in memory.
Such memory-to-memory devices set the ``V4L2_CAP_VIDEO_M2M`` or
``V4L2_CAP_VIDEO_M2M_MPLANE`` capability. Examples of memory-to-memory
devices are codecs, scalers, deinterlacers or format converters (i.e.
converting from YUV to RGB).
A memory-to-memory video node acts just like a normal video node, but it
supports both output (sending frames from memory to the codec hardware)
and capture (receiving the processed frames from the codec hardware into
supports both output (sending frames from memory to the hardware)
and capture (receiving the processed frames from the hardware into
memory) stream I/O. An application will have to setup the stream I/O for
both sides and finally call :ref:`VIDIOC_STREAMON <VIDIOC_STREAMON>`
for both capture and output to start the codec.
Video compression codecs use the MPEG controls to setup their codec
parameters
.. note::
The MPEG controls actually support many more codecs than
just MPEG. See :ref:`mpeg-controls`.
for both capture and output to start the hardware.
Memory-to-memory devices function as a shared resource: you can
open the video node multiple times, each application setting up their
own codec properties that are local to the file handle, and each can use
own properties that are local to the file handle, and each can use
it independently from the others. The driver will arbitrate access to
the codec and reprogram it whenever another file handler gets access.
the hardware and reprogram it whenever another file handler gets access.
This is different from the usual video node behavior where the video
properties are global to the device (i.e. changing something through one
file handle is visible through another file handle).
One of the most common memory-to-memory device is the codec. Codecs
are more complicated than most and require additional setup for
their codec parameters. This is done through codec controls.
See :ref:`mpeg-controls`.

Просмотреть файл

@ -1,41 +0,0 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _ttx:
******************
Teletext Interface
******************
This interface was aimed at devices receiving and demodulating Teletext
data [:ref:`ets300706`, :ref:`itu653`], evaluating the Teletext
packages and storing formatted pages in cache memory. Such devices are
usually implemented as microcontrollers with serial interface
(I\ :sup:`2`\ C) and could be found on old TV cards, dedicated Teletext
decoding cards and home-brew devices connected to the PC parallel port.
The Teletext API was designed by Martin Buck. It was defined in the
kernel header file ``linux/videotext.h``, the specification is available
from
`ftp://ftp.gwdg.de/pub/linux/misc/videotext/ <ftp://ftp.gwdg.de/pub/linux/misc/videotext/>`__.
(Videotext is the name of the German public television Teletext
service.)
Eventually the Teletext API was integrated into the V4L API with
character device file names ``/dev/vtx0`` to ``/dev/vtx31``, device
major number 81, minor numbers 192 to 223.
However, teletext decoders were quickly replaced by more generic VBI
demodulators and those dedicated teletext decoders no longer exist. For
many years the vtx devices were still around, even though nobody used
them. So the decision was made to finally remove support for the
Teletext API in kernel 2.6.37.
Modern devices all use the :ref:`raw <raw-vbi>` or
:ref:`sliced` VBI API.

Просмотреть файл

@ -21,11 +21,9 @@ Interfaces
dev-overlay
dev-output
dev-osd
dev-codec
dev-effect
dev-mem2mem
dev-raw-vbi
dev-sliced-vbi
dev-teletext
dev-radio
dev-rds
dev-sdr

Просмотреть файл

@ -0,0 +1,508 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _camera-controls:
************************
Camera Control Reference
************************
The Camera class includes controls for mechanical (or equivalent
digital) features of a device such as controllable lenses or sensors.
.. _camera-control-id:
Camera Control IDs
==================
``V4L2_CID_CAMERA_CLASS (class)``
The Camera class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
.. _v4l2-exposure-auto-type:
``V4L2_CID_EXPOSURE_AUTO``
(enum)
enum v4l2_exposure_auto_type -
Enables automatic adjustments of the exposure time and/or iris
aperture. The effect of manual changes of the exposure time or iris
aperture while these features are enabled is undefined, drivers
should ignore such requests. Possible values are:
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_EXPOSURE_AUTO``
- Automatic exposure time, automatic iris aperture.
* - ``V4L2_EXPOSURE_MANUAL``
- Manual exposure time, manual iris.
* - ``V4L2_EXPOSURE_SHUTTER_PRIORITY``
- Manual exposure time, auto iris.
* - ``V4L2_EXPOSURE_APERTURE_PRIORITY``
- Auto exposure time, manual iris.
``V4L2_CID_EXPOSURE_ABSOLUTE (integer)``
Determines the exposure time of the camera sensor. The exposure time
is limited by the frame interval. Drivers should interpret the
values as 100 µs units, where the value 1 stands for 1/10000th of a
second, 10000 for 1 second and 100000 for 10 seconds.
``V4L2_CID_EXPOSURE_AUTO_PRIORITY (boolean)``
When ``V4L2_CID_EXPOSURE_AUTO`` is set to ``AUTO`` or
``APERTURE_PRIORITY``, this control determines if the device may
dynamically vary the frame rate. By default this feature is disabled
(0) and the frame rate must remain constant.
``V4L2_CID_AUTO_EXPOSURE_BIAS (integer menu)``
Determines the automatic exposure compensation, it is effective only
when ``V4L2_CID_EXPOSURE_AUTO`` control is set to ``AUTO``,
``SHUTTER_PRIORITY`` or ``APERTURE_PRIORITY``. It is expressed in
terms of EV, drivers should interpret the values as 0.001 EV units,
where the value 1000 stands for +1 EV.
Increasing the exposure compensation value is equivalent to
decreasing the exposure value (EV) and will increase the amount of
light at the image sensor. The camera performs the exposure
compensation by adjusting absolute exposure time and/or aperture.
.. _v4l2-exposure-metering:
``V4L2_CID_EXPOSURE_METERING``
(enum)
enum v4l2_exposure_metering -
Determines how the camera measures the amount of light available for
the frame exposure. Possible values are:
.. tabularcolumns:: |p{8.5cm}|p{9.0cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_EXPOSURE_METERING_AVERAGE``
- Use the light information coming from the entire frame and average
giving no weighting to any particular portion of the metered area.
* - ``V4L2_EXPOSURE_METERING_CENTER_WEIGHTED``
- Average the light information coming from the entire frame giving
priority to the center of the metered area.
* - ``V4L2_EXPOSURE_METERING_SPOT``
- Measure only very small area at the center of the frame.
* - ``V4L2_EXPOSURE_METERING_MATRIX``
- A multi-zone metering. The light intensity is measured in several
points of the frame and the results are combined. The algorithm of
the zones selection and their significance in calculating the
final value is device dependent.
``V4L2_CID_PAN_RELATIVE (integer)``
This control turns the camera horizontally by the specified amount.
The unit is undefined. A positive value moves the camera to the
right (clockwise when viewed from above), a negative value to the
left. A value of zero does not cause motion. This is a write-only
control.
``V4L2_CID_TILT_RELATIVE (integer)``
This control turns the camera vertically by the specified amount.
The unit is undefined. A positive value moves the camera up, a
negative value down. A value of zero does not cause motion. This is
a write-only control.
``V4L2_CID_PAN_RESET (button)``
When this control is set, the camera moves horizontally to the
default position.
``V4L2_CID_TILT_RESET (button)``
When this control is set, the camera moves vertically to the default
position.
``V4L2_CID_PAN_ABSOLUTE (integer)``
This control turns the camera horizontally to the specified
position. Positive values move the camera to the right (clockwise
when viewed from above), negative values to the left. Drivers should
interpret the values as arc seconds, with valid values between -180
* 3600 and +180 * 3600 inclusive.
``V4L2_CID_TILT_ABSOLUTE (integer)``
This control turns the camera vertically to the specified position.
Positive values move the camera up, negative values down. Drivers
should interpret the values as arc seconds, with valid values
between -180 * 3600 and +180 * 3600 inclusive.
``V4L2_CID_FOCUS_ABSOLUTE (integer)``
This control sets the focal point of the camera to the specified
position. The unit is undefined. Positive values set the focus
closer to the camera, negative values towards infinity.
``V4L2_CID_FOCUS_RELATIVE (integer)``
This control moves the focal point of the camera by the specified
amount. The unit is undefined. Positive values move the focus closer
to the camera, negative values towards infinity. This is a
write-only control.
``V4L2_CID_FOCUS_AUTO (boolean)``
Enables continuous automatic focus adjustments. The effect of manual
focus adjustments while this feature is enabled is undefined,
drivers should ignore such requests.
``V4L2_CID_AUTO_FOCUS_START (button)``
Starts single auto focus process. The effect of setting this control
when ``V4L2_CID_FOCUS_AUTO`` is set to ``TRUE`` (1) is undefined,
drivers should ignore such requests.
``V4L2_CID_AUTO_FOCUS_STOP (button)``
Aborts automatic focusing started with ``V4L2_CID_AUTO_FOCUS_START``
control. It is effective only when the continuous autofocus is
disabled, that is when ``V4L2_CID_FOCUS_AUTO`` control is set to
``FALSE`` (0).
.. _v4l2-auto-focus-status:
``V4L2_CID_AUTO_FOCUS_STATUS (bitmask)``
The automatic focus status. This is a read-only control.
Setting ``V4L2_LOCK_FOCUS`` lock bit of the ``V4L2_CID_3A_LOCK``
control may stop updates of the ``V4L2_CID_AUTO_FOCUS_STATUS``
control value.
.. tabularcolumns:: |p{6.5cm}|p{11.0cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_AUTO_FOCUS_STATUS_IDLE``
- Automatic focus is not active.
* - ``V4L2_AUTO_FOCUS_STATUS_BUSY``
- Automatic focusing is in progress.
* - ``V4L2_AUTO_FOCUS_STATUS_REACHED``
- Focus has been reached.
* - ``V4L2_AUTO_FOCUS_STATUS_FAILED``
- Automatic focus has failed, the driver will not transition from
this state until another action is performed by an application.
.. _v4l2-auto-focus-range:
``V4L2_CID_AUTO_FOCUS_RANGE``
(enum)
enum v4l2_auto_focus_range -
Determines auto focus distance range for which lens may be adjusted.
.. tabularcolumns:: |p{6.5cm}|p{11.0cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_AUTO_FOCUS_RANGE_AUTO``
- The camera automatically selects the focus range.
* - ``V4L2_AUTO_FOCUS_RANGE_NORMAL``
- Normal distance range, limited for best automatic focus
performance.
* - ``V4L2_AUTO_FOCUS_RANGE_MACRO``
- Macro (close-up) auto focus. The camera will use its minimum
possible distance for auto focus.
* - ``V4L2_AUTO_FOCUS_RANGE_INFINITY``
- The lens is set to focus on an object at infinite distance.
``V4L2_CID_ZOOM_ABSOLUTE (integer)``
Specify the objective lens focal length as an absolute value. The
zoom unit is driver-specific and its value should be a positive
integer.
``V4L2_CID_ZOOM_RELATIVE (integer)``
Specify the objective lens focal length relatively to the current
value. Positive values move the zoom lens group towards the
telephoto direction, negative values towards the wide-angle
direction. The zoom unit is driver-specific. This is a write-only
control.
``V4L2_CID_ZOOM_CONTINUOUS (integer)``
Move the objective lens group at the specified speed until it
reaches physical device limits or until an explicit request to stop
the movement. A positive value moves the zoom lens group towards the
telephoto direction. A value of zero stops the zoom lens group
movement. A negative value moves the zoom lens group towards the
wide-angle direction. The zoom speed unit is driver-specific.
``V4L2_CID_IRIS_ABSOLUTE (integer)``
This control sets the camera's aperture to the specified value. The
unit is undefined. Larger values open the iris wider, smaller values
close it.
``V4L2_CID_IRIS_RELATIVE (integer)``
This control modifies the camera's aperture by the specified amount.
The unit is undefined. Positive values open the iris one step
further, negative values close it one step further. This is a
write-only control.
``V4L2_CID_PRIVACY (boolean)``
Prevent video from being acquired by the camera. When this control
is set to ``TRUE`` (1), no image can be captured by the camera.
Common means to enforce privacy are mechanical obturation of the
sensor and firmware image processing, but the device is not
restricted to these methods. Devices that implement the privacy
control must support read access and may support write access.
``V4L2_CID_BAND_STOP_FILTER (integer)``
Switch the band-stop filter of a camera sensor on or off, or specify
its strength. Such band-stop filters can be used, for example, to
filter out the fluorescent light component.
.. _v4l2-auto-n-preset-white-balance:
``V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE``
(enum)
enum v4l2_auto_n_preset_white_balance -
Sets white balance to automatic, manual or a preset. The presets
determine color temperature of the light as a hint to the camera for
white balance adjustments resulting in most accurate color
representation. The following white balance presets are listed in
order of increasing color temperature.
.. tabularcolumns:: |p{7.0 cm}|p{10.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_WHITE_BALANCE_MANUAL``
- Manual white balance.
* - ``V4L2_WHITE_BALANCE_AUTO``
- Automatic white balance adjustments.
* - ``V4L2_WHITE_BALANCE_INCANDESCENT``
- White balance setting for incandescent (tungsten) lighting. It
generally cools down the colors and corresponds approximately to
2500...3500 K color temperature range.
* - ``V4L2_WHITE_BALANCE_FLUORESCENT``
- White balance preset for fluorescent lighting. It corresponds
approximately to 4000...5000 K color temperature.
* - ``V4L2_WHITE_BALANCE_FLUORESCENT_H``
- With this setting the camera will compensate for fluorescent H
lighting.
* - ``V4L2_WHITE_BALANCE_HORIZON``
- White balance setting for horizon daylight. It corresponds
approximately to 5000 K color temperature.
* - ``V4L2_WHITE_BALANCE_DAYLIGHT``
- White balance preset for daylight (with clear sky). It corresponds
approximately to 5000...6500 K color temperature.
* - ``V4L2_WHITE_BALANCE_FLASH``
- With this setting the camera will compensate for the flash light.
It slightly warms up the colors and corresponds roughly to
5000...5500 K color temperature.
* - ``V4L2_WHITE_BALANCE_CLOUDY``
- White balance preset for moderately overcast sky. This option
corresponds approximately to 6500...8000 K color temperature
range.
* - ``V4L2_WHITE_BALANCE_SHADE``
- White balance preset for shade or heavily overcast sky. It
corresponds approximately to 9000...10000 K color temperature.
.. _v4l2-wide-dynamic-range:
``V4L2_CID_WIDE_DYNAMIC_RANGE (boolean)``
Enables or disables the camera's wide dynamic range feature. This
feature allows to obtain clear images in situations where intensity
of the illumination varies significantly throughout the scene, i.e.
there are simultaneously very dark and very bright areas. It is most
commonly realized in cameras by combining two subsequent frames with
different exposure times. [#f1]_
.. _v4l2-image-stabilization:
``V4L2_CID_IMAGE_STABILIZATION (boolean)``
Enables or disables image stabilization.
``V4L2_CID_ISO_SENSITIVITY (integer menu)``
Determines ISO equivalent of an image sensor indicating the sensor's
sensitivity to light. The numbers are expressed in arithmetic scale,
as per :ref:`iso12232` standard, where doubling the sensor
sensitivity is represented by doubling the numerical ISO value.
Applications should interpret the values as standard ISO values
multiplied by 1000, e.g. control value 800 stands for ISO 0.8.
Drivers will usually support only a subset of standard ISO values.
The effect of setting this control while the
``V4L2_CID_ISO_SENSITIVITY_AUTO`` control is set to a value other
than ``V4L2_CID_ISO_SENSITIVITY_MANUAL`` is undefined, drivers
should ignore such requests.
.. _v4l2-iso-sensitivity-auto-type:
``V4L2_CID_ISO_SENSITIVITY_AUTO``
(enum)
enum v4l2_iso_sensitivity_type -
Enables or disables automatic ISO sensitivity adjustments.
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_CID_ISO_SENSITIVITY_MANUAL``
- Manual ISO sensitivity.
* - ``V4L2_CID_ISO_SENSITIVITY_AUTO``
- Automatic ISO sensitivity adjustments.
.. _v4l2-scene-mode:
``V4L2_CID_SCENE_MODE``
(enum)
enum v4l2_scene_mode -
This control allows to select scene programs as the camera automatic
modes optimized for common shooting scenes. Within these modes the
camera determines best exposure, aperture, focusing, light metering,
white balance and equivalent sensitivity. The controls of those
parameters are influenced by the scene mode control. An exact
behavior in each mode is subject to the camera specification.
When the scene mode feature is not used, this control should be set
to ``V4L2_SCENE_MODE_NONE`` to make sure the other possibly related
controls are accessible. The following scene programs are defined:
.. tabularcolumns:: |p{6.0cm}|p{11.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_SCENE_MODE_NONE``
- The scene mode feature is disabled.
* - ``V4L2_SCENE_MODE_BACKLIGHT``
- Backlight. Compensates for dark shadows when light is coming from
behind a subject, also by automatically turning on the flash.
* - ``V4L2_SCENE_MODE_BEACH_SNOW``
- Beach and snow. This mode compensates for all-white or bright
scenes, which tend to look gray and low contrast, when camera's
automatic exposure is based on an average scene brightness. To
compensate, this mode automatically slightly overexposes the
frames. The white balance may also be adjusted to compensate for
the fact that reflected snow looks bluish rather than white.
* - ``V4L2_SCENE_MODE_CANDLELIGHT``
- Candle light. The camera generally raises the ISO sensitivity and
lowers the shutter speed. This mode compensates for relatively
close subject in the scene. The flash is disabled in order to
preserve the ambiance of the light.
* - ``V4L2_SCENE_MODE_DAWN_DUSK``
- Dawn and dusk. Preserves the colors seen in low natural light
before dusk and after down. The camera may turn off the flash, and
automatically focus at infinity. It will usually boost saturation
and lower the shutter speed.
* - ``V4L2_SCENE_MODE_FALL_COLORS``
- Fall colors. Increases saturation and adjusts white balance for
color enhancement. Pictures of autumn leaves get saturated reds
and yellows.
* - ``V4L2_SCENE_MODE_FIREWORKS``
- Fireworks. Long exposure times are used to capture the expanding
burst of light from a firework. The camera may invoke image
stabilization.
* - ``V4L2_SCENE_MODE_LANDSCAPE``
- Landscape. The camera may choose a small aperture to provide deep
depth of field and long exposure duration to help capture detail
in dim light conditions. The focus is fixed at infinity. Suitable
for distant and wide scenery.
* - ``V4L2_SCENE_MODE_NIGHT``
- Night, also known as Night Landscape. Designed for low light
conditions, it preserves detail in the dark areas without blowing
out bright objects. The camera generally sets itself to a
medium-to-high ISO sensitivity, with a relatively long exposure
time, and turns flash off. As such, there will be increased image
noise and the possibility of blurred image.
* - ``V4L2_SCENE_MODE_PARTY_INDOOR``
- Party and indoor. Designed to capture indoor scenes that are lit
by indoor background lighting as well as the flash. The camera
usually increases ISO sensitivity, and adjusts exposure for the
low light conditions.
* - ``V4L2_SCENE_MODE_PORTRAIT``
- Portrait. The camera adjusts the aperture so that the depth of
field is reduced, which helps to isolate the subject against a
smooth background. Most cameras recognize the presence of faces in
the scene and focus on them. The color hue is adjusted to enhance
skin tones. The intensity of the flash is often reduced.
* - ``V4L2_SCENE_MODE_SPORTS``
- Sports. Significantly increases ISO and uses a fast shutter speed
to freeze motion of rapidly-moving subjects. Increased image noise
may be seen in this mode.
* - ``V4L2_SCENE_MODE_SUNSET``
- Sunset. Preserves deep hues seen in sunsets and sunrises. It bumps
up the saturation.
* - ``V4L2_SCENE_MODE_TEXT``
- Text. It applies extra contrast and sharpness, it is typically a
black-and-white mode optimized for readability. Automatic focus
may be switched to close-up mode and this setting may also involve
some lens-distortion correction.
``V4L2_CID_3A_LOCK (bitmask)``
This control locks or unlocks the automatic focus, exposure and
white balance. The automatic adjustments can be paused independently
by setting the corresponding lock bit to 1. The camera then retains
the settings until the lock bit is cleared. The following lock bits
are defined:
When a given algorithm is not enabled, drivers should ignore
requests to lock it and should return no error. An example might be
an application setting bit ``V4L2_LOCK_WHITE_BALANCE`` when the
``V4L2_CID_AUTO_WHITE_BALANCE`` control is set to ``FALSE``. The
value of this control may be changed by exposure, white balance or
focus controls.
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_LOCK_EXPOSURE``
- Automatic exposure adjustments lock.
* - ``V4L2_LOCK_WHITE_BALANCE``
- Automatic white balance adjustments lock.
* - ``V4L2_LOCK_FOCUS``
- Automatic focus lock.
``V4L2_CID_PAN_SPEED (integer)``
This control turns the camera horizontally at the specific speed.
The unit is undefined. A positive value moves the camera to the
right (clockwise when viewed from above), a negative value to the
left. A value of zero stops the motion if one is in progress and has
no effect otherwise.
``V4L2_CID_TILT_SPEED (integer)``
This control turns the camera vertically at the specified speed. The
unit is undefined. A positive value moves the camera up, a negative
value down. A value of zero stops the motion if one is in progress
and has no effect otherwise.
.. [#f1]
This control may be changed to a menu control in the future, if more
options are required.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,71 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _detect-controls:
************************
Detect Control Reference
************************
The Detect class includes controls for common features of various motion
or object detection capable devices.
.. _detect-control-id:
Detect Control IDs
==================
``V4L2_CID_DETECT_CLASS (class)``
The Detect class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
``V4L2_CID_DETECT_MD_MODE (menu)``
Sets the motion detection mode.
.. tabularcolumns:: |p{7.5cm}|p{10.0cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_DETECT_MD_MODE_DISABLED``
- Disable motion detection.
* - ``V4L2_DETECT_MD_MODE_GLOBAL``
- Use a single motion detection threshold.
* - ``V4L2_DETECT_MD_MODE_THRESHOLD_GRID``
- The image is divided into a grid, each cell with its own motion
detection threshold. These thresholds are set through the
``V4L2_CID_DETECT_MD_THRESHOLD_GRID`` matrix control.
* - ``V4L2_DETECT_MD_MODE_REGION_GRID``
- The image is divided into a grid, each cell with its own region
value that specifies which per-region motion detection thresholds
should be used. Each region has its own thresholds. How these
per-region thresholds are set up is driver-specific. The region
values for the grid are set through the
``V4L2_CID_DETECT_MD_REGION_GRID`` matrix control.
``V4L2_CID_DETECT_MD_GLOBAL_THRESHOLD (integer)``
Sets the global motion detection threshold to be used with the
``V4L2_DETECT_MD_MODE_GLOBAL`` motion detection mode.
``V4L2_CID_DETECT_MD_THRESHOLD_GRID (__u16 matrix)``
Sets the motion detection thresholds for each cell in the grid. To
be used with the ``V4L2_DETECT_MD_MODE_THRESHOLD_GRID`` motion
detection mode. Matrix element (0, 0) represents the cell at the
top-left of the grid.
``V4L2_CID_DETECT_MD_REGION_GRID (__u8 matrix)``
Sets the motion detection region value for each cell in the grid. To
be used with the ``V4L2_DETECT_MD_MODE_REGION_GRID`` motion
detection mode. Matrix element (0, 0) represents the cell at the
top-left of the grid.

Просмотреть файл

@ -0,0 +1,166 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _dv-controls:
*******************************
Digital Video Control Reference
*******************************
The Digital Video control class is intended to control receivers and
transmitters for `VGA <http://en.wikipedia.org/wiki/Vga>`__,
`DVI <http://en.wikipedia.org/wiki/Digital_Visual_Interface>`__
(Digital Visual Interface), HDMI (:ref:`hdmi`) and DisplayPort
(:ref:`dp`). These controls are generally expected to be private to
the receiver or transmitter subdevice that implements them, so they are
only exposed on the ``/dev/v4l-subdev*`` device node.
.. note::
Note that these devices can have multiple input or output pads which are
hooked up to e.g. HDMI connectors. Even though the subdevice will
receive or transmit video from/to only one of those pads, the other pads
can still be active when it comes to EDID (Extended Display
Identification Data, :ref:`vesaedid`) and HDCP (High-bandwidth Digital
Content Protection System, :ref:`hdcp`) processing, allowing the
device to do the fairly slow EDID/HDCP handling in advance. This allows
for quick switching between connectors.
These pads appear in several of the controls in this section as
bitmasks, one bit for each pad. Bit 0 corresponds to pad 0, bit 1 to pad
1, etc. The maximum value of the control is the set of valid pads.
.. _dv-control-id:
Digital Video Control IDs
=========================
``V4L2_CID_DV_CLASS (class)``
The Digital Video class descriptor.
``V4L2_CID_DV_TX_HOTPLUG (bitmask)``
Many connectors have a hotplug pin which is high if EDID information
is available from the source. This control shows the state of the
hotplug pin as seen by the transmitter. Each bit corresponds to an
output pad on the transmitter. If an output pad does not have an
associated hotplug pin, then the bit for that pad will be 0. This
read-only control is applicable to DVI-D, HDMI and DisplayPort
connectors.
``V4L2_CID_DV_TX_RXSENSE (bitmask)``
Rx Sense is the detection of pull-ups on the TMDS clock lines. This
normally means that the sink has left/entered standby (i.e. the
transmitter can sense that the receiver is ready to receive video).
Each bit corresponds to an output pad on the transmitter. If an
output pad does not have an associated Rx Sense, then the bit for
that pad will be 0. This read-only control is applicable to DVI-D
and HDMI devices.
``V4L2_CID_DV_TX_EDID_PRESENT (bitmask)``
When the transmitter sees the hotplug signal from the receiver it
will attempt to read the EDID. If set, then the transmitter has read
at least the first block (= 128 bytes). Each bit corresponds to an
output pad on the transmitter. If an output pad does not support
EDIDs, then the bit for that pad will be 0. This read-only control
is applicable to VGA, DVI-A/D, HDMI and DisplayPort connectors.
``V4L2_CID_DV_TX_MODE``
(enum)
enum v4l2_dv_tx_mode -
HDMI transmitters can transmit in DVI-D mode (just video) or in HDMI
mode (video + audio + auxiliary data). This control selects which
mode to use: V4L2_DV_TX_MODE_DVI_D or V4L2_DV_TX_MODE_HDMI.
This control is applicable to HDMI connectors.
``V4L2_CID_DV_TX_RGB_RANGE``
(enum)
enum v4l2_dv_rgb_range -
Select the quantization range for RGB output. V4L2_DV_RANGE_AUTO
follows the RGB quantization range specified in the standard for the
video interface (ie. :ref:`cea861` for HDMI).
V4L2_DV_RANGE_LIMITED and V4L2_DV_RANGE_FULL override the
standard to be compatible with sinks that have not implemented the
standard correctly (unfortunately quite common for HDMI and DVI-D).
Full range allows all possible values to be used whereas limited
range sets the range to (16 << (N-8)) - (235 << (N-8)) where N is
the number of bits per component. This control is applicable to VGA,
DVI-A/D, HDMI and DisplayPort connectors.
``V4L2_CID_DV_TX_IT_CONTENT_TYPE``
(enum)
enum v4l2_dv_it_content_type -
Configures the IT Content Type of the transmitted video. This
information is sent over HDMI and DisplayPort connectors as part of
the AVI InfoFrame. The term 'IT Content' is used for content that
originates from a computer as opposed to content from a TV broadcast
or an analog source. The enum v4l2_dv_it_content_type defines
the possible content types:
.. tabularcolumns:: |p{7.0cm}|p{10.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_DV_IT_CONTENT_TYPE_GRAPHICS``
- Graphics content. Pixel data should be passed unfiltered and
without analog reconstruction.
* - ``V4L2_DV_IT_CONTENT_TYPE_PHOTO``
- Photo content. The content is derived from digital still pictures.
The content should be passed through with minimal scaling and
picture enhancements.
* - ``V4L2_DV_IT_CONTENT_TYPE_CINEMA``
- Cinema content.
* - ``V4L2_DV_IT_CONTENT_TYPE_GAME``
- Game content. Audio and video latency should be minimized.
* - ``V4L2_DV_IT_CONTENT_TYPE_NO_ITC``
- No IT Content information is available and the ITC bit in the AVI
InfoFrame is set to 0.
``V4L2_CID_DV_RX_POWER_PRESENT (bitmask)``
Detects whether the receiver receives power from the source (e.g.
HDMI carries 5V on one of the pins). This is often used to power an
eeprom which contains EDID information, such that the source can
read the EDID even if the sink is in standby/power off. Each bit
corresponds to an input pad on the receiver. If an input pad
cannot detect whether power is present, then the bit for that pad
will be 0. This read-only control is applicable to DVI-D, HDMI and
DisplayPort connectors.
``V4L2_CID_DV_RX_RGB_RANGE``
(enum)
enum v4l2_dv_rgb_range -
Select the quantization range for RGB input. V4L2_DV_RANGE_AUTO
follows the RGB quantization range specified in the standard for the
video interface (ie. :ref:`cea861` for HDMI).
V4L2_DV_RANGE_LIMITED and V4L2_DV_RANGE_FULL override the
standard to be compatible with sources that have not implemented the
standard correctly (unfortunately quite common for HDMI and DVI-D).
Full range allows all possible values to be used whereas limited
range sets the range to (16 << (N-8)) - (235 << (N-8)) where N is
the number of bits per component. This control is applicable to VGA,
DVI-A/D, HDMI and DisplayPort connectors.
``V4L2_CID_DV_RX_IT_CONTENT_TYPE``
(enum)
enum v4l2_dv_it_content_type -
Reads the IT Content Type of the received video. This information is
sent over HDMI and DisplayPort connectors as part of the AVI
InfoFrame. The term 'IT Content' is used for content that originates
from a computer as opposed to content from a TV broadcast or an
analog source. See ``V4L2_CID_DV_TX_IT_CONTENT_TYPE`` for the
available content types.

Просмотреть файл

@ -0,0 +1,192 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _flash-controls:
***********************
Flash Control Reference
***********************
The V4L2 flash controls are intended to provide generic access to flash
controller devices. Flash controller devices are typically used in
digital cameras.
The interface can support both LED and xenon flash devices. As of
writing this, there is no xenon flash driver using this interface.
.. _flash-controls-use-cases:
Supported use cases
===================
Unsynchronised LED flash (software strobe)
------------------------------------------
Unsynchronised LED flash is controlled directly by the host as the
sensor. The flash must be enabled by the host before the exposure of the
image starts and disabled once it ends. The host is fully responsible
for the timing of the flash.
Example of such device: Nokia N900.
Synchronised LED flash (hardware strobe)
----------------------------------------
The synchronised LED flash is pre-programmed by the host (power and
timeout) but controlled by the sensor through a strobe signal from the
sensor to the flash.
The sensor controls the flash duration and timing. This information
typically must be made available to the sensor.
LED flash as torch
------------------
LED flash may be used as torch in conjunction with another use case
involving camera or individually.
.. _flash-control-id:
Flash Control IDs
-----------------
``V4L2_CID_FLASH_CLASS (class)``
The FLASH class descriptor.
``V4L2_CID_FLASH_LED_MODE (menu)``
Defines the mode of the flash LED, the high-power white LED attached
to the flash controller. Setting this control may not be possible in
presence of some faults. See V4L2_CID_FLASH_FAULT.
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_FLASH_LED_MODE_NONE``
- Off.
* - ``V4L2_FLASH_LED_MODE_FLASH``
- Flash mode.
* - ``V4L2_FLASH_LED_MODE_TORCH``
- Torch mode. See V4L2_CID_FLASH_TORCH_INTENSITY.
``V4L2_CID_FLASH_STROBE_SOURCE (menu)``
Defines the source of the flash LED strobe.
.. tabularcolumns:: |p{7.0cm}|p{10.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_FLASH_STROBE_SOURCE_SOFTWARE``
- The flash strobe is triggered by using the
V4L2_CID_FLASH_STROBE control.
* - ``V4L2_FLASH_STROBE_SOURCE_EXTERNAL``
- The flash strobe is triggered by an external source. Typically
this is a sensor, which makes it possible to synchronises the
flash strobe start to exposure start.
``V4L2_CID_FLASH_STROBE (button)``
Strobe flash. Valid when V4L2_CID_FLASH_LED_MODE is set to
V4L2_FLASH_LED_MODE_FLASH and V4L2_CID_FLASH_STROBE_SOURCE
is set to V4L2_FLASH_STROBE_SOURCE_SOFTWARE. Setting this
control may not be possible in presence of some faults. See
V4L2_CID_FLASH_FAULT.
``V4L2_CID_FLASH_STROBE_STOP (button)``
Stop flash strobe immediately.
``V4L2_CID_FLASH_STROBE_STATUS (boolean)``
Strobe status: whether the flash is strobing at the moment or not.
This is a read-only control.
``V4L2_CID_FLASH_TIMEOUT (integer)``
Hardware timeout for flash. The flash strobe is stopped after this
period of time has passed from the start of the strobe.
``V4L2_CID_FLASH_INTENSITY (integer)``
Intensity of the flash strobe when the flash LED is in flash mode
(V4L2_FLASH_LED_MODE_FLASH). The unit should be milliamps (mA)
if possible.
``V4L2_CID_FLASH_TORCH_INTENSITY (integer)``
Intensity of the flash LED in torch mode
(V4L2_FLASH_LED_MODE_TORCH). The unit should be milliamps (mA)
if possible. Setting this control may not be possible in presence of
some faults. See V4L2_CID_FLASH_FAULT.
``V4L2_CID_FLASH_INDICATOR_INTENSITY (integer)``
Intensity of the indicator LED. The indicator LED may be fully
independent of the flash LED. The unit should be microamps (uA) if
possible.
``V4L2_CID_FLASH_FAULT (bitmask)``
Faults related to the flash. The faults tell about specific problems
in the flash chip itself or the LEDs attached to it. Faults may
prevent further use of some of the flash controls. In particular,
V4L2_CID_FLASH_LED_MODE is set to V4L2_FLASH_LED_MODE_NONE
if the fault affects the flash LED. Exactly which faults have such
an effect is chip dependent. Reading the faults resets the control
and returns the chip to a usable state if possible.
.. tabularcolumns:: |p{8.0cm}|p{9.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_FLASH_FAULT_OVER_VOLTAGE``
- Flash controller voltage to the flash LED has exceeded the limit
specific to the flash controller.
* - ``V4L2_FLASH_FAULT_TIMEOUT``
- The flash strobe was still on when the timeout set by the user ---
V4L2_CID_FLASH_TIMEOUT control --- has expired. Not all flash
controllers may set this in all such conditions.
* - ``V4L2_FLASH_FAULT_OVER_TEMPERATURE``
- The flash controller has overheated.
* - ``V4L2_FLASH_FAULT_SHORT_CIRCUIT``
- The short circuit protection of the flash controller has been
triggered.
* - ``V4L2_FLASH_FAULT_OVER_CURRENT``
- Current in the LED power supply has exceeded the limit specific to
the flash controller.
* - ``V4L2_FLASH_FAULT_INDICATOR``
- The flash controller has detected a short or open circuit
condition on the indicator LED.
* - ``V4L2_FLASH_FAULT_UNDER_VOLTAGE``
- Flash controller voltage to the flash LED has been below the
minimum limit specific to the flash controller.
* - ``V4L2_FLASH_FAULT_INPUT_VOLTAGE``
- The input voltage of the flash controller is below the limit under
which strobing the flash at full current will not be possible.The
condition persists until this flag is no longer set.
* - ``V4L2_FLASH_FAULT_LED_OVER_TEMPERATURE``
- The temperature of the LED has exceeded its allowed upper limit.
``V4L2_CID_FLASH_CHARGE (boolean)``
Enable or disable charging of the xenon flash capacitor.
``V4L2_CID_FLASH_READY (boolean)``
Is the flash ready to strobe? Xenon flashes require their capacitors
charged before strobing. LED flashes often require a cooldown period
after strobe during which another strobe will not be possible. This
is a read-only control.

Просмотреть файл

@ -0,0 +1,95 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _fm-rx-controls:
*****************************
FM Receiver Control Reference
*****************************
The FM Receiver (FM_RX) class includes controls for common features of
FM Reception capable devices.
.. _fm-rx-control-id:
FM_RX Control IDs
=================
``V4L2_CID_FM_RX_CLASS (class)``
The FM_RX class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
``V4L2_CID_RDS_RECEPTION (boolean)``
Enables/disables RDS reception by the radio tuner
``V4L2_CID_RDS_RX_PTY (integer)``
Gets RDS Programme Type field. This encodes up to 31 pre-defined
programme types.
``V4L2_CID_RDS_RX_PS_NAME (string)``
Gets the Programme Service name (PS_NAME). It is intended for
static display on a receiver. It is the primary aid to listeners in
programme service identification and selection. In Annex E of
:ref:`iec62106`, the RDS specification, there is a full
description of the correct character encoding for Programme Service
name strings. Also from RDS specification, PS is usually a single
eight character text. However, it is also possible to find receivers
which can scroll strings sized as 8 x N characters. So, this control
must be configured with steps of 8 characters. The result is it must
always contain a string with size multiple of 8.
``V4L2_CID_RDS_RX_RADIO_TEXT (string)``
Gets the Radio Text info. It is a textual description of what is
being broadcasted. RDS Radio Text can be applied when broadcaster
wishes to transmit longer PS names, programme-related information or
any other text. In these cases, RadioText can be used in addition to
``V4L2_CID_RDS_RX_PS_NAME``. The encoding for Radio Text strings is
also fully described in Annex E of :ref:`iec62106`. The length of
Radio Text strings depends on which RDS Block is being used to
transmit it, either 32 (2A block) or 64 (2B block). However, it is
also possible to find receivers which can scroll strings sized as 32
x N or 64 x N characters. So, this control must be configured with
steps of 32 or 64 characters. The result is it must always contain a
string with size multiple of 32 or 64.
``V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT (boolean)``
If set, then a traffic announcement is in progress.
``V4L2_CID_RDS_RX_TRAFFIC_PROGRAM (boolean)``
If set, then the tuned programme carries traffic announcements.
``V4L2_CID_RDS_RX_MUSIC_SPEECH (boolean)``
If set, then this channel broadcasts music. If cleared, then it
broadcasts speech. If the transmitter doesn't make this distinction,
then it will be set.
``V4L2_CID_TUNE_DEEMPHASIS``
(enum)
enum v4l2_deemphasis -
Configures the de-emphasis value for reception. A de-emphasis filter
is applied to the broadcast to accentuate the high audio
frequencies. Depending on the region, a time constant of either 50
or 75 useconds is used. The enum v4l2_deemphasis defines possible
values for de-emphasis. Here they are:
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_DEEMPHASIS_DISABLED``
- No de-emphasis is applied.
* - ``V4L2_DEEMPHASIS_50_uS``
- A de-emphasis of 50 uS is used.
* - ``V4L2_DEEMPHASIS_75_uS``
- A de-emphasis of 75 uS is used.

Просмотреть файл

@ -0,0 +1,188 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _fm-tx-controls:
********************************
FM Transmitter Control Reference
********************************
The FM Transmitter (FM_TX) class includes controls for common features
of FM transmissions capable devices. Currently this class includes
parameters for audio compression, pilot tone generation, audio deviation
limiter, RDS transmission and tuning power features.
.. _fm-tx-control-id:
FM_TX Control IDs
=================
``V4L2_CID_FM_TX_CLASS (class)``
The FM_TX class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
``V4L2_CID_RDS_TX_DEVIATION (integer)``
Configures RDS signal frequency deviation level in Hz. The range and
step are driver-specific.
``V4L2_CID_RDS_TX_PI (integer)``
Sets the RDS Programme Identification field for transmission.
``V4L2_CID_RDS_TX_PTY (integer)``
Sets the RDS Programme Type field for transmission. This encodes up
to 31 pre-defined programme types.
``V4L2_CID_RDS_TX_PS_NAME (string)``
Sets the Programme Service name (PS_NAME) for transmission. It is
intended for static display on a receiver. It is the primary aid to
listeners in programme service identification and selection. In
Annex E of :ref:`iec62106`, the RDS specification, there is a full
description of the correct character encoding for Programme Service
name strings. Also from RDS specification, PS is usually a single
eight character text. However, it is also possible to find receivers
which can scroll strings sized as 8 x N characters. So, this control
must be configured with steps of 8 characters. The result is it must
always contain a string with size multiple of 8.
``V4L2_CID_RDS_TX_RADIO_TEXT (string)``
Sets the Radio Text info for transmission. It is a textual
description of what is being broadcasted. RDS Radio Text can be
applied when broadcaster wishes to transmit longer PS names,
programme-related information or any other text. In these cases,
RadioText should be used in addition to ``V4L2_CID_RDS_TX_PS_NAME``.
The encoding for Radio Text strings is also fully described in Annex
E of :ref:`iec62106`. The length of Radio Text strings depends on
which RDS Block is being used to transmit it, either 32 (2A block)
or 64 (2B block). However, it is also possible to find receivers
which can scroll strings sized as 32 x N or 64 x N characters. So,
this control must be configured with steps of 32 or 64 characters.
The result is it must always contain a string with size multiple of
32 or 64.
``V4L2_CID_RDS_TX_MONO_STEREO (boolean)``
Sets the Mono/Stereo bit of the Decoder Identification code. If set,
then the audio was recorded as stereo.
``V4L2_CID_RDS_TX_ARTIFICIAL_HEAD (boolean)``
Sets the
`Artificial Head <http://en.wikipedia.org/wiki/Artificial_head>`__
bit of the Decoder Identification code. If set, then the audio was
recorded using an artificial head.
``V4L2_CID_RDS_TX_COMPRESSED (boolean)``
Sets the Compressed bit of the Decoder Identification code. If set,
then the audio is compressed.
``V4L2_CID_RDS_TX_DYNAMIC_PTY (boolean)``
Sets the Dynamic PTY bit of the Decoder Identification code. If set,
then the PTY code is dynamically switched.
``V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT (boolean)``
If set, then a traffic announcement is in progress.
``V4L2_CID_RDS_TX_TRAFFIC_PROGRAM (boolean)``
If set, then the tuned programme carries traffic announcements.
``V4L2_CID_RDS_TX_MUSIC_SPEECH (boolean)``
If set, then this channel broadcasts music. If cleared, then it
broadcasts speech. If the transmitter doesn't make this distinction,
then it should be set.
``V4L2_CID_RDS_TX_ALT_FREQS_ENABLE (boolean)``
If set, then transmit alternate frequencies.
``V4L2_CID_RDS_TX_ALT_FREQS (__u32 array)``
The alternate frequencies in kHz units. The RDS standard allows for
up to 25 frequencies to be defined. Drivers may support fewer
frequencies so check the array size.
``V4L2_CID_AUDIO_LIMITER_ENABLED (boolean)``
Enables or disables the audio deviation limiter feature. The limiter
is useful when trying to maximize the audio volume, minimize
receiver-generated distortion and prevent overmodulation.
``V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (integer)``
Sets the audio deviation limiter feature release time. Unit is in
useconds. Step and range are driver-specific.
``V4L2_CID_AUDIO_LIMITER_DEVIATION (integer)``
Configures audio frequency deviation level in Hz. The range and step
are driver-specific.
``V4L2_CID_AUDIO_COMPRESSION_ENABLED (boolean)``
Enables or disables the audio compression feature. This feature
amplifies signals below the threshold by a fixed gain and compresses
audio signals above the threshold by the ratio of Threshold/(Gain +
Threshold).
``V4L2_CID_AUDIO_COMPRESSION_GAIN (integer)``
Sets the gain for audio compression feature. It is a dB value. The
range and step are driver-specific.
``V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (integer)``
Sets the threshold level for audio compression freature. It is a dB
value. The range and step are driver-specific.
``V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (integer)``
Sets the attack time for audio compression feature. It is a useconds
value. The range and step are driver-specific.
``V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (integer)``
Sets the release time for audio compression feature. It is a
useconds value. The range and step are driver-specific.
``V4L2_CID_PILOT_TONE_ENABLED (boolean)``
Enables or disables the pilot tone generation feature.
``V4L2_CID_PILOT_TONE_DEVIATION (integer)``
Configures pilot tone frequency deviation level. Unit is in Hz. The
range and step are driver-specific.
``V4L2_CID_PILOT_TONE_FREQUENCY (integer)``
Configures pilot tone frequency value. Unit is in Hz. The range and
step are driver-specific.
``V4L2_CID_TUNE_PREEMPHASIS``
(enum)
enum v4l2_preemphasis -
Configures the pre-emphasis value for broadcasting. A pre-emphasis
filter is applied to the broadcast to accentuate the high audio
frequencies. Depending on the region, a time constant of either 50
or 75 useconds is used. The enum v4l2_preemphasis defines possible
values for pre-emphasis. Here they are:
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_PREEMPHASIS_DISABLED``
- No pre-emphasis is applied.
* - ``V4L2_PREEMPHASIS_50_uS``
- A pre-emphasis of 50 uS is used.
* - ``V4L2_PREEMPHASIS_75_uS``
- A pre-emphasis of 75 uS is used.
``V4L2_CID_TUNE_POWER_LEVEL (integer)``
Sets the output power level for signal transmission. Unit is in
dBuV. Range and step are driver-specific.
``V4L2_CID_TUNE_ANTENNA_CAPACITOR (integer)``
This selects the value of antenna tuning capacitor manually or
automatically if set to zero. Unit, range and step are
driver-specific.
For more details about RDS specification, refer to :ref:`iec62106`
document, from CENELEC.

Просмотреть файл

@ -0,0 +1,63 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _image-process-controls:
*******************************
Image Process Control Reference
*******************************
The Image Process control class is intended for low-level control of
image processing functions. Unlike ``V4L2_CID_IMAGE_SOURCE_CLASS``, the
controls in this class affect processing the image, and do not control
capturing of it.
.. _image-process-control-id:
Image Process Control IDs
=========================
``V4L2_CID_IMAGE_PROC_CLASS (class)``
The IMAGE_PROC class descriptor.
``V4L2_CID_LINK_FREQ (integer menu)``
Data bus frequency. Together with the media bus pixel code, bus type
(clock cycles per sample), the data bus frequency defines the pixel
rate (``V4L2_CID_PIXEL_RATE``) in the pixel array (or possibly
elsewhere, if the device is not an image sensor). The frame rate can
be calculated from the pixel clock, image width and height and
horizontal and vertical blanking. While the pixel rate control may
be defined elsewhere than in the subdev containing the pixel array,
the frame rate cannot be obtained from that information. This is
because only on the pixel array it can be assumed that the vertical
and horizontal blanking information is exact: no other blanking is
allowed in the pixel array. The selection of frame rate is performed
by selecting the desired horizontal and vertical blanking. The unit
of this control is Hz.
``V4L2_CID_PIXEL_RATE (64-bit integer)``
Pixel rate in the source pads of the subdev. This control is
read-only and its unit is pixels / second.
``V4L2_CID_TEST_PATTERN (menu)``
Some capture/display/sensor devices have the capability to generate
test pattern images. These hardware specific test patterns can be
used to test if a device is working properly.
``V4L2_CID_DEINTERLACING_MODE (menu)``
The video deinterlacing mode (such as Bob, Weave, ...). The menu items are
driver specific and are documented in :ref:`v4l-drivers`.
``V4L2_CID_DIGITAL_GAIN (integer)``
Digital gain is the value by which all colour components
are multiplied by. Typically the digital gain applied is the
control value divided by e.g. 0x100, meaning that to get no
digital gain the control value needs to be 0x100. The no-gain
configuration is also typically the default.

Просмотреть файл

@ -0,0 +1,57 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _image-source-controls:
******************************
Image Source Control Reference
******************************
The Image Source control class is intended for low-level control of
image source devices such as image sensors. The devices feature an
analogue to digital converter and a bus transmitter to transmit the
image data out of the device.
.. _image-source-control-id:
Image Source Control IDs
========================
``V4L2_CID_IMAGE_SOURCE_CLASS (class)``
The IMAGE_SOURCE class descriptor.
``V4L2_CID_VBLANK (integer)``
Vertical blanking. The idle period after every frame during which no
image data is produced. The unit of vertical blanking is a line.
Every line has length of the image width plus horizontal blanking at
the pixel rate defined by ``V4L2_CID_PIXEL_RATE`` control in the
same sub-device.
``V4L2_CID_HBLANK (integer)``
Horizontal blanking. The idle period after every line of image data
during which no image data is produced. The unit of horizontal
blanking is pixels.
``V4L2_CID_ANALOGUE_GAIN (integer)``
Analogue gain is gain affecting all colour components in the pixel
matrix. The gain operation is performed in the analogue domain
before A/D conversion.
``V4L2_CID_TEST_PATTERN_RED (integer)``
Test pattern red colour component.
``V4L2_CID_TEST_PATTERN_GREENR (integer)``
Test pattern green (next to red) colour component.
``V4L2_CID_TEST_PATTERN_BLUE (integer)``
Test pattern blue colour component.
``V4L2_CID_TEST_PATTERN_GREENB (integer)``
Test pattern green (next to blue) colour component.

Просмотреть файл

@ -0,0 +1,113 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _jpeg-controls:
**********************
JPEG Control Reference
**********************
The JPEG class includes controls for common features of JPEG encoders
and decoders. Currently it includes features for codecs implementing
progressive baseline DCT compression process with Huffman entrophy
coding.
.. _jpeg-control-id:
JPEG Control IDs
================
``V4L2_CID_JPEG_CLASS (class)``
The JPEG class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
``V4L2_CID_JPEG_CHROMA_SUBSAMPLING (menu)``
The chroma subsampling factors describe how each component of an
input image is sampled, in respect to maximum sample rate in each
spatial dimension. See :ref:`itu-t81`, clause A.1.1. for more
details. The ``V4L2_CID_JPEG_CHROMA_SUBSAMPLING`` control determines
how Cb and Cr components are downsampled after converting an input
image from RGB to Y'CbCr color space.
.. tabularcolumns:: |p{7.0cm}|p{10.5cm}|
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_444``
- No chroma subsampling, each pixel has Y, Cr and Cb values.
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_422``
- Horizontally subsample Cr, Cb components by a factor of 2.
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_420``
- Subsample Cr, Cb components horizontally and vertically by 2.
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_411``
- Horizontally subsample Cr, Cb components by a factor of 4.
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_410``
- Subsample Cr, Cb components horizontally by 4 and vertically by 2.
* - ``V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY``
- Use only luminance component.
``V4L2_CID_JPEG_RESTART_INTERVAL (integer)``
The restart interval determines an interval of inserting RSTm
markers (m = 0..7). The purpose of these markers is to additionally
reinitialize the encoder process, in order to process blocks of an
image independently. For the lossy compression processes the restart
interval unit is MCU (Minimum Coded Unit) and its value is contained
in DRI (Define Restart Interval) marker. If
``V4L2_CID_JPEG_RESTART_INTERVAL`` control is set to 0, DRI and RSTm
markers will not be inserted.
.. _jpeg-quality-control:
``V4L2_CID_JPEG_COMPRESSION_QUALITY (integer)``
``V4L2_CID_JPEG_COMPRESSION_QUALITY`` control determines trade-off
between image quality and size. It provides simpler method for
applications to control image quality, without a need for direct
reconfiguration of luminance and chrominance quantization tables. In
cases where a driver uses quantization tables configured directly by
an application, using interfaces defined elsewhere,
``V4L2_CID_JPEG_COMPRESSION_QUALITY`` control should be set by
driver to 0.
The value range of this control is driver-specific. Only positive,
non-zero values are meaningful. The recommended range is 1 - 100,
where larger values correspond to better image quality.
.. _jpeg-active-marker-control:
``V4L2_CID_JPEG_ACTIVE_MARKER (bitmask)``
Specify which JPEG markers are included in compressed stream. This
control is valid only for encoders.
.. flat-table::
:header-rows: 0
:stub-columns: 0
* - ``V4L2_JPEG_ACTIVE_MARKER_APP0``
- Application data segment APP\ :sub:`0`.
* - ``V4L2_JPEG_ACTIVE_MARKER_APP1``
- Application data segment APP\ :sub:`1`.
* - ``V4L2_JPEG_ACTIVE_MARKER_COM``
- Comment segment.
* - ``V4L2_JPEG_ACTIVE_MARKER_DQT``
- Quantization tables segment.
* - ``V4L2_JPEG_ACTIVE_MARKER_DHT``
- Huffman tables segment.
For more details about JPEG specification, refer to :ref:`itu-t81`,
:ref:`jfif`, :ref:`w3c-jpeg-jfif`.

Просмотреть файл

@ -0,0 +1,96 @@
.. Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GFDL-1.1-or-later WITH no-invariant-sections
.. _rf-tuner-controls:
**************************
RF Tuner Control Reference
**************************
The RF Tuner (RF_TUNER) class includes controls for common features of
devices having RF tuner.
In this context, RF tuner is radio receiver circuit between antenna and
demodulator. It receives radio frequency (RF) from the antenna and
converts that received signal to lower intermediate frequency (IF) or
baseband frequency (BB). Tuners that could do baseband output are often
called Zero-IF tuners. Older tuners were typically simple PLL tuners
inside a metal box, while newer ones are highly integrated chips
without a metal box "silicon tuners". These controls are mostly
applicable for new feature rich silicon tuners, just because older
tuners does not have much adjustable features.
For more information about RF tuners see
`Tuner (radio) <http://en.wikipedia.org/wiki/Tuner_%28radio%29>`__
and `RF front end <http://en.wikipedia.org/wiki/RF_front_end>`__
from Wikipedia.
.. _rf-tuner-control-id:
RF_TUNER Control IDs
====================
``V4L2_CID_RF_TUNER_CLASS (class)``
The RF_TUNER class descriptor. Calling
:ref:`VIDIOC_QUERYCTRL` for this control will
return a description of this control class.
``V4L2_CID_RF_TUNER_BANDWIDTH_AUTO (boolean)``
Enables/disables tuner radio channel bandwidth configuration. In
automatic mode bandwidth configuration is performed by the driver.
``V4L2_CID_RF_TUNER_BANDWIDTH (integer)``
Filter(s) on tuner signal path are used to filter signal according
to receiving party needs. Driver configures filters to fulfill
desired bandwidth requirement. Used when
V4L2_CID_RF_TUNER_BANDWIDTH_AUTO is not set. Unit is in Hz. The
range and step are driver-specific.
``V4L2_CID_RF_TUNER_LNA_GAIN_AUTO (boolean)``
Enables/disables LNA automatic gain control (AGC)
``V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO (boolean)``
Enables/disables mixer automatic gain control (AGC)
``V4L2_CID_RF_TUNER_IF_GAIN_AUTO (boolean)``
Enables/disables IF automatic gain control (AGC)
``V4L2_CID_RF_TUNER_RF_GAIN (integer)``
The RF amplifier is the very first amplifier on the receiver signal
path, just right after the antenna input. The difference between the
LNA gain and the RF gain in this document is that the LNA gain is
integrated in the tuner chip while the RF gain is a separate chip.
There may be both RF and LNA gain controls in the same device. The
range and step are driver-specific.
``V4L2_CID_RF_TUNER_LNA_GAIN (integer)``
LNA (low noise amplifier) gain is first gain stage on the RF tuner
signal path. It is located very close to tuner antenna input. Used
when ``V4L2_CID_RF_TUNER_LNA_GAIN_AUTO`` is not set. See
``V4L2_CID_RF_TUNER_RF_GAIN`` to understand how RF gain and LNA gain
differs from the each others. The range and step are
driver-specific.
``V4L2_CID_RF_TUNER_MIXER_GAIN (integer)``
Mixer gain is second gain stage on the RF tuner signal path. It is
located inside mixer block, where RF signal is down-converted by the
mixer. Used when ``V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO`` is not set.
The range and step are driver-specific.
``V4L2_CID_RF_TUNER_IF_GAIN (integer)``
IF gain is last gain stage on the RF tuner signal path. It is
located on output of RF tuner. It controls signal level of
intermediate frequency output or baseband output. Used when
``V4L2_CID_RF_TUNER_IF_GAIN_AUTO`` is not set. The range and step
are driver-specific.
``V4L2_CID_RF_TUNER_PLL_LOCK (boolean)``
Is synthesizer PLL locked? RF tuner is receiving given frequency
when that control is set. This is a read-only control.

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -19,8 +19,8 @@ These formats are used for the :ref:`metadata` interface only.
.. toctree::
:maxdepth: 1
pixfmt-meta-intel-ipu3
pixfmt-meta-d4xx
pixfmt-meta-intel-ipu3
pixfmt-meta-uvc
pixfmt-meta-vsp1-hgo
pixfmt-meta-vsp1-hgt

Просмотреть файл

@ -73,7 +73,7 @@ Compressed Formats
- 'MG2S'
- MPEG-2 parsed slice data, as extracted from the MPEG-2 bitstream.
This format is adapted for stateless video decoders that implement a
MPEG-2 pipeline (using the :ref:`codec` and :ref:`media-request-api`).
MPEG-2 pipeline (using the :ref:`mem2mem` and :ref:`media-request-api`).
Metadata associated with the frame to decode is required to be passed
through the ``V4L2_CID_MPEG_VIDEO_MPEG2_SLICE_PARAMS`` control and
quantization matrices can optionally be specified through the

Просмотреть файл

@ -1,4 +1,27 @@
.. -*- coding: utf-8; mode: rst -*-
.. This file is dual-licensed: you can use it either under the terms
.. of the GPL 2.0 or the GFDL 1.1+ license, at your option. Note that this
.. dual licensing only applies to this file, and not this project as a
.. whole.
..
.. a) This file is free software; you can redistribute it and/or
.. modify it under the terms of the GNU General Public License version
.. 2.0 as published by the Free Software Foundation.
..
.. This file is distributed in the hope that it will be useful,
.. but WITHOUT ANY WARRANTY; without even the implied warranty of
.. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
.. GNU General Public License version 2.0 for more details.
..
.. Or, alternatively,
..
.. b) Permission is granted to copy, distribute and/or modify this
.. document under the terms of the GNU Free Documentation License,
.. Version 1.1 or any later version published by the Free Software
.. Foundation, with no Invariant Sections, no Front-Cover Texts
.. and no Back-Cover Texts. A copy of the license is included at
.. Documentation/media/uapi/fdl-appendix.rst.
..
.. TODO: replace it to GPL-2.0 OR GFDL-1.1-or-later WITH no-invariant-sections
.. _v4l2-meta-fmt-params:
.. _v4l2-meta-fmt-stat-3a:
@ -7,21 +30,22 @@
V4L2_META_FMT_IPU3_PARAMS ('ip3p'), V4L2_META_FMT_IPU3_3A ('ip3s')
******************************************************************
.. c:type:: ipu3_uapi_stats_3a
.. ipu3_uapi_stats_3a
3A statistics
=============
For IPU3 ImgU, the 3A statistics accelerators collect different statistics over
an input bayer frame. Those statistics, defined in data struct :c:type:`ipu3_uapi_stats_3a`,
are obtained from "ipu3-imgu 3a stat" metadata capture video node, which are then
passed to user space for statistics analysis using :c:type:`v4l2_meta_format` interface.
The IPU3 ImgU 3A statistics accelerators collect different statistics over
an input Bayer frame. Those statistics are obtained from the "ipu3-imgu [01] 3a
stat" metadata capture video nodes, using the :c:type:`v4l2_meta_format`
interface. They are formatted as described by the :c:type:`ipu3_uapi_stats_3a`
structure.
The statistics collected are AWB (Auto-white balance) RGBS (Red, Green, Blue and
Saturation measure) cells, AWB filter response, AF (Auto-focus) filter response,
and AE (Auto-exposure) histogram.
struct :c:type:`ipu3_uapi_4a_config` saves configurable parameters for all above.
The struct :c:type:`ipu3_uapi_4a_config` saves all configurable parameters.
.. code-block:: c
@ -37,105 +61,14 @@ struct :c:type:`ipu3_uapi_4a_config` saves configurable parameters for all above
struct ipu3_uapi_ff_status stats_3a_status;
};
.. c:type:: ipu3_uapi_params
.. ipu3_uapi_params
Pipeline parameters
===================
IPU3 pipeline has a number of image processing stages, each of which takes a
set of parameters as input. The major stages of pipelines are shown here:
Raw pixels -> Bayer Downscaling -> Optical Black Correction ->
Linearization -> Lens Shading Correction -> White Balance / Exposure /
Focus Apply -> Bayer Noise Reduction -> ANR -> Demosaicing -> Color
Correction Matrix -> Gamma correction -> Color Space Conversion ->
Chroma Down Scaling -> Chromatic Noise Reduction -> Total Color
Correction -> XNR3 -> TNR -> DDR
The table below presents a description of the above algorithms.
======================== =======================================================
Name Description
======================== =======================================================
Optical Black Correction Optical Black Correction block subtracts a pre-defined
value from the respective pixel values to obtain better
image quality.
Defined in :c:type:`ipu3_uapi_obgrid_param`.
Linearization This algo block uses linearization parameters to
address non-linearity sensor effects. The Lookup table
table is defined in
:c:type:`ipu3_uapi_isp_lin_vmem_params`.
SHD Lens shading correction is used to correct spatial
non-uniformity of the pixel response due to optical
lens shading. This is done by applying a different gain
for each pixel. The gain, black level etc are
configured in :c:type:`ipu3_uapi_shd_config_static`.
BNR Bayer noise reduction block removes image noise by
applying a bilateral filter.
See :c:type:`ipu3_uapi_bnr_static_config` for details.
ANR Advanced Noise Reduction is a block based algorithm
that performs noise reduction in the Bayer domain. The
convolution matrix etc can be found in
:c:type:`ipu3_uapi_anr_config`.
Demosaicing Demosaicing converts raw sensor data in Bayer format
into RGB (Red, Green, Blue) presentation. Then add
outputs of estimation of Y channel for following stream
processing by Firmware. The struct is defined as
:c:type:`ipu3_uapi_dm_config`. (TODO)
Color Correction Color Correction algo transforms sensor specific color
space to the standard "sRGB" color space. This is done
by applying 3x3 matrix defined in
:c:type:`ipu3_uapi_ccm_mat_config`.
Gamma correction Gamma correction :c:type:`ipu3_uapi_gamma_config` is a
basic non-linear tone mapping correction that is
applied per pixel for each pixel component.
CSC Color space conversion transforms each pixel from the
RGB primary presentation to YUV (Y: brightness,
UV: Luminance) presentation. This is done by applying
a 3x3 matrix defined in
:c:type:`ipu3_uapi_csc_mat_config`
CDS Chroma down sampling
After the CSC is performed, the Chroma Down Sampling
is applied for a UV plane down sampling by a factor
of 2 in each direction for YUV 4:2:0 using a 4x2
configurable filter :c:type:`ipu3_uapi_cds_params`.
CHNR Chroma noise reduction
This block processes only the chrominance pixels and
performs noise reduction by cleaning the high
frequency noise.
See struct :c:type:`ipu3_uapi_yuvp1_chnr_config`.
TCC Total color correction as defined in struct
:c:type:`ipu3_uapi_yuvp2_tcc_static_config`.
XNR3 eXtreme Noise Reduction V3 is the third revision of
noise reduction algorithm used to improve image
quality. This removes the low frequency noise in the
captured image. Two related structs are being defined,
:c:type:`ipu3_uapi_isp_xnr3_params` for ISP data memory
and :c:type:`ipu3_uapi_isp_xnr3_vmem_params` for vector
memory.
TNR Temporal Noise Reduction block compares successive
frames in time to remove anomalies / noise in pixel
values. :c:type:`ipu3_uapi_isp_tnr3_vmem_params` and
:c:type:`ipu3_uapi_isp_tnr3_params` are defined for ISP
vector and data memory respectively.
======================== =======================================================
A few stages of the pipeline will be executed by firmware running on the ISP
processor, while many others will use a set of fixed hardware blocks also
called accelerator cluster (ACC) to crunch pixel data and produce statistics.
ACC parameters of individual algorithms, as defined by
:c:type:`ipu3_uapi_acc_param`, can be chosen to be applied by the user
space through struct :c:type:`ipu3_uapi_flags` embedded in
:c:type:`ipu3_uapi_params` structure. For parameters that are configured as
not enabled by the user space, the corresponding structs are ignored by the
driver, in which case the existing configuration of the algorithm will be
preserved.
The pipeline parameters are passed to the "ipu3-imgu [01] parameters" metadata
output video nodes, using the :c:type:`v4l2_meta_format` interface. They are
formatted as described by the :c:type:`ipu3_uapi_params` structure.
Both 3A statistics and pipeline parameters described here are closely tied to
the underlying camera sub-system (CSS) APIs. They are usually consumed and
@ -143,13 +76,6 @@ produced by dedicated user space libraries that comprise the important tuning
tools, thus freeing the developers from being bothered with the low level
hardware and algorithm details.
It should be noted that IPU3 DMA operations require the addresses of all data
structures (that includes both input and output) to be aligned on 32 byte
boundaries.
The meta data :c:type:`ipu3_uapi_params` will be sent to "ipu3-imgu parameters"
video node in ``V4L2_BUF_TYPE_META_CAPTURE`` format.
.. code-block:: c
struct ipu3_uapi_params {

Просмотреть файл

@ -190,6 +190,170 @@ component of each pixel in one 16 or 32 bit word.
- Cr\ :sub:`2`
- Cr\ :sub:`1`
- Cr\ :sub:`0`
-
* .. _V4L2-PIX-FMT-AYUV32:
- ``V4L2_PIX_FMT_AYUV32``
- 'AYUV'
- a\ :sub:`7`
- a\ :sub:`6`
- a\ :sub:`5`
- a\ :sub:`4`
- a\ :sub:`3`
- a\ :sub:`2`
- a\ :sub:`1`
- a\ :sub:`0`
- Y'\ :sub:`7`
- Y'\ :sub:`6`
- Y'\ :sub:`5`
- Y'\ :sub:`4`
- Y'\ :sub:`3`
- Y'\ :sub:`2`
- Y'\ :sub:`1`
- Y'\ :sub:`0`
- Cb\ :sub:`7`
- Cb\ :sub:`6`
- Cb\ :sub:`5`
- Cb\ :sub:`4`
- Cb\ :sub:`3`
- Cb\ :sub:`2`
- Cb\ :sub:`1`
- Cb\ :sub:`0`
- Cr\ :sub:`7`
- Cr\ :sub:`6`
- Cr\ :sub:`5`
- Cr\ :sub:`4`
- Cr\ :sub:`3`
- Cr\ :sub:`2`
- Cr\ :sub:`1`
- Cr\ :sub:`0`
-
* .. _V4L2-PIX-FMT-XYUV32:
- ``V4L2_PIX_FMT_XYUV32``
- 'XYUV'
-
-
-
-
-
-
-
-
- Y'\ :sub:`7`
- Y'\ :sub:`6`
- Y'\ :sub:`5`
- Y'\ :sub:`4`
- Y'\ :sub:`3`
- Y'\ :sub:`2`
- Y'\ :sub:`1`
- Y'\ :sub:`0`
- Cb\ :sub:`7`
- Cb\ :sub:`6`
- Cb\ :sub:`5`
- Cb\ :sub:`4`
- Cb\ :sub:`3`
- Cb\ :sub:`2`
- Cb\ :sub:`1`
- Cb\ :sub:`0`
- Cr\ :sub:`7`
- Cr\ :sub:`6`
- Cr\ :sub:`5`
- Cr\ :sub:`4`
- Cr\ :sub:`3`
- Cr\ :sub:`2`
- Cr\ :sub:`1`
- Cr\ :sub:`0`
-
* .. _V4L2-PIX-FMT-VUYA32:
- ``V4L2_PIX_FMT_VUYA32``
- 'VUYA'
- Cr\ :sub:`7`
- Cr\ :sub:`6`
- Cr\ :sub:`5`
- Cr\ :sub:`4`
- Cr\ :sub:`3`
- Cr\ :sub:`2`
- Cr\ :sub:`1`
- Cr\ :sub:`0`
- Cb\ :sub:`7`
- Cb\ :sub:`6`
- Cb\ :sub:`5`
- Cb\ :sub:`4`
- Cb\ :sub:`3`
- Cb\ :sub:`2`
- Cb\ :sub:`1`
- Cb\ :sub:`0`
- Y'\ :sub:`7`
- Y'\ :sub:`6`
- Y'\ :sub:`5`
- Y'\ :sub:`4`
- Y'\ :sub:`3`
- Y'\ :sub:`2`
- Y'\ :sub:`1`
- Y'\ :sub:`0`
- a\ :sub:`7`
- a\ :sub:`6`
- a\ :sub:`5`
- a\ :sub:`4`
- a\ :sub:`3`
- a\ :sub:`2`
- a\ :sub:`1`
- a\ :sub:`0`
-
* .. _V4L2-PIX-FMT-VUYX32:
- ``V4L2_PIX_FMT_VUYX32``
- 'VUYX'
- Cr\ :sub:`7`
- Cr\ :sub:`6`
- Cr\ :sub:`5`
- Cr\ :sub:`4`
- Cr\ :sub:`3`
- Cr\ :sub:`2`
- Cr\ :sub:`1`
- Cr\ :sub:`0`
- Cb\ :sub:`7`
- Cb\ :sub:`6`
- Cb\ :sub:`5`
- Cb\ :sub:`4`
- Cb\ :sub:`3`
- Cb\ :sub:`2`
- Cb\ :sub:`1`
- Cb\ :sub:`0`
- Y'\ :sub:`7`
- Y'\ :sub:`6`
- Y'\ :sub:`5`
- Y'\ :sub:`4`
- Y'\ :sub:`3`
- Y'\ :sub:`2`
- Y'\ :sub:`1`
- Y'\ :sub:`0`
-
-
-
-
-
-
-
-
.. raw:: latex
@ -202,4 +366,8 @@ component of each pixel in one 16 or 32 bit word.
#) The value of a = alpha bits is undefined when reading from the driver,
ignored when writing to the driver, except when alpha blending has
been negotiated for a :ref:`Video Overlay <overlay>` or
:ref:`Video Output Overlay <osd>`.
:ref:`Video Output Overlay <osd>` for the formats Y444, YUV555 and
YUV4. However, for formats AYUV32 and VUYA32, the alpha component is
expected to contain a meaningful value that can be used by drivers
and applications. And, the formats XYUV32 and VUYX32 contain undefined
alpha values that must be ignored by all applications and drivers.

Просмотреть файл

@ -75,15 +75,15 @@ Media Bus Pixel Codes
---------------------
The media bus pixel codes describe image formats as flowing over
physical busses (both between separate physical components and inside
physical buses (both between separate physical components and inside
SoC devices). This should not be confused with the V4L2 pixel formats
that describe, using four character codes, image formats as stored in
memory.
While there is a relationship between image formats on busses and image
While there is a relationship between image formats on buses and image
formats in memory (a raw Bayer image won't be magically converted to
JPEG just by storing it to memory), there is no one-to-one
correspondance between them.
correspondence between them.
Packed RGB Formats

Просмотреть файл

@ -213,7 +213,7 @@ union holding separate parameters for input and output devices.
.. _parm-caps:
.. flat-table:: Streaming Parameters Capabilites
.. flat-table:: Streaming Parameters Capabilities
:header-rows: 0
:stub-columns: 0
:widths: 3 1 4

Просмотреть файл

@ -43,10 +43,7 @@ Applications can optionally call the :ref:`VIDIOC_PREPARE_BUF` ioctl to
pass ownership of the buffer to the driver before actually enqueuing it,
using the :ref:`VIDIOC_QBUF <VIDIOC_QBUF>` ioctl, and to prepare it for future I/O. Such
preparations may include cache invalidation or cleaning. Performing them
in advance saves time during the actual I/O. In case such cache
operations are not required, the application can use one of
``V4L2_BUF_FLAG_NO_CACHE_INVALIDATE`` and
``V4L2_BUF_FLAG_NO_CACHE_CLEAN`` flags to skip the respective step.
in advance saves time during the actual I/O.
The struct :c:type:`v4l2_buffer` structure is specified in
:ref:`buffer`.

Просмотреть файл

@ -123,7 +123,7 @@ then ``EINVAL`` will be returned.
:ref:`VIDIOC_STREAMOFF <VIDIOC_STREAMON>` or calling :ref:`VIDIOC_REQBUFS`
the check for this will be reset.
For :ref:`memory-to-memory devices <codec>` you can specify the
For :ref:`memory-to-memory devices <mem2mem>` you can specify the
``request_fd`` only for output buffers, not for capture buffers. Attempting
to specify this for a capture buffer will result in an ``EACCES`` error.

Просмотреть файл

@ -85,7 +85,7 @@ same card listens there is much higher...
For problems with sound: There are a lot of different systems used
for TV sound all over the world. And there are also different chips
which decode the audio signal. Reports about sound problems ("stereo
does'nt work") are pretty useless unless you include some details
doesn't work") are pretty useless unless you include some details
about your hardware and the TV sound scheme used in your country (or
at least the country you are living in).
@ -771,7 +771,7 @@ Identifying:
- Lifeview.com.tw states (Feb. 2002):
"The FlyVideo2000 and FlyVideo2000s product name have renamed to FlyVideo98."
Their Bt8x8 cards are listed as discontinued.
- Flyvideo 2000S was probably sold as Flyvideo 3000 in some contries(Europe?).
- Flyvideo 2000S was probably sold as Flyvideo 3000 in some countries(Europe?).
The new Flyvideo 2000/3000 are SAA7130/SAA7134 based.
"Flyvideo II" had been the name for the 848 cards, nowadays (in Germany)

Просмотреть файл

@ -24,12 +24,12 @@ memory. Various dedicated DMA channels exist for both video capture and
display paths. During transfer, the IDMAC is also capable of vertical
image flip, 8x8 block transfer (see IRT description), pixel component
re-ordering (for example UYVY to YUYV) within the same colorspace, and
even packed <--> planar conversion. It can also perform a simple
de-interlacing by interleaving even and odd lines during transfer
packed <--> planar conversion. The IDMAC can also perform a simple
de-interlacing by interweaving even and odd lines during transfer
(without motion compensation which requires the VDIC).
The CSI is the backend capture unit that interfaces directly with
camera sensors over Parallel, BT.656/1120, and MIPI CSI-2 busses.
camera sensors over Parallel, BT.656/1120, and MIPI CSI-2 buses.
The IC handles color-space conversion, resizing (downscaling and
upscaling), horizontal flip, and 90/270 degree rotation operations.
@ -175,15 +175,21 @@ via the SMFC and an IDMAC channel, bypassing IC pre-processing. This
source pad is routed to a capture device node, with a node name of the
format "ipuX_csiY capture".
Note that since the IDMAC source pad makes use of an IDMAC channel, it
can do pixel reordering within the same colorspace. For example, the
sink pad can take UYVY2X8, but the IDMAC source pad can output YUYV2X8.
If the sink pad is receiving YUV, the output at the capture device can
also be converted to a planar YUV format such as YUV420.
Note that since the IDMAC source pad makes use of an IDMAC channel,
pixel reordering within the same colorspace can be carried out by the
IDMAC channel. For example, if the CSI sink pad is receiving in UYVY
order, the capture device linked to the IDMAC source pad can capture
in YUYV order. Also, if the CSI sink pad is receiving a packed YUV
format, the capture device can capture a planar YUV format such as
YUV420.
It will also perform simple de-interlace without motion compensation,
which is activated if the sink pad's field type is an interlaced type,
and the IDMAC source pad field type is set to none.
The IDMAC channel at the IDMAC source pad also supports simple
interweave without motion compensation, which is activated if the source
pad's field type is sequential top-bottom or bottom-top, and the
requested capture interface field type is set to interlaced (t-b, b-t,
or unqualified interlaced). The capture interface will enforce the same
field order as the source pad field order (interlaced-bt if source pad
is seq-bt, interlaced-tb if source pad is seq-tb).
This subdev can generate the following event when enabling the second
IDMAC source pad:
@ -201,7 +207,7 @@ The CSI supports cropping the incoming raw sensor frames. This is
implemented in the ipuX_csiY entities at the sink pad, using the
crop selection subdev API.
The CSI also supports fixed divide-by-two downscaling indepently in
The CSI also supports fixed divide-by-two downscaling independently in
width and height. This is implemented in the ipuX_csiY entities at
the sink pad, using the compose selection subdev API.
@ -325,14 +331,14 @@ ipuX_vdic
The VDIC carries out motion compensated de-interlacing, with three
motion compensation modes: low, medium, and high motion. The mode is
specified with the menu control V4L2_CID_DEINTERLACING_MODE. It has
two sink pads and a single source pad.
specified with the menu control V4L2_CID_DEINTERLACING_MODE. The VDIC
has two sink pads and a single source pad.
The direct sink pad receives from an ipuX_csiY direct pad. With this
link the VDIC can only operate in high motion mode.
When the IDMAC sink pad is activated, it receives from an output
or mem2mem device node. With this pipeline, it can also operate
or mem2mem device node. With this pipeline, the VDIC can also operate
in low and medium modes, because these modes require receiving
frames from memory buffers. Note that an output or mem2mem device
is not implemented yet, so this sink pad currently has no links.
@ -345,8 +351,8 @@ ipuX_ic_prp
This is the IC pre-processing entity. It acts as a router, routing
data from its sink pad to one or both of its source pads.
It has a single sink pad. The sink pad can receive from the ipuX_csiY
direct pad, or from ipuX_vdic.
This entity has a single sink pad. The sink pad can receive from the
ipuX_csiY direct pad, or from ipuX_vdic.
This entity has two source pads. One source pad routes to the
pre-process encode task entity (ipuX_ic_prpenc), the other to the
@ -369,8 +375,8 @@ color-space conversion, resizing (downscaling and upscaling),
horizontal and vertical flip, and 90/270 degree rotation. Flip
and rotation are provided via standard V4L2 controls.
Like the ipuX_csiY IDMAC source, it can also perform simple de-interlace
without motion compensation, and pixel reordering.
Like the ipuX_csiY IDMAC source, this entity also supports simple
de-interlace without motion compensation, and pixel reordering.
ipuX_ic_prpvf
-------------
@ -380,18 +386,18 @@ pad from ipuX_ic_prp, and a single source pad. The source pad is routed
to a capture device node, with a node name of the format
"ipuX_ic_prpvf capture".
It is identical in operation to ipuX_ic_prpenc, with the same resizing
and CSC operations and flip/rotation controls. It will receive and
process de-interlaced frames from the ipuX_vdic if ipuX_ic_prp is
This entity is identical in operation to ipuX_ic_prpenc, with the same
resizing and CSC operations and flip/rotation controls. It will receive
and process de-interlaced frames from the ipuX_vdic if ipuX_ic_prp is
receiving from ipuX_vdic.
Like the ipuX_csiY IDMAC source, it can perform simple de-interlace
without motion compensation. However, note that if the ipuX_vdic is
included in the pipeline (ipuX_ic_prp is receiving from ipuX_vdic),
it's not possible to use simple de-interlace in ipuX_ic_prpvf, since
the ipuX_vdic has already carried out de-interlacing (with motion
compensation) and therefore the field type output from ipuX_ic_prp can
only be none.
Like the ipuX_csiY IDMAC source, this entity supports simple
interweaving without motion compensation. However, note that if the
ipuX_vdic is included in the pipeline (ipuX_ic_prp is receiving from
ipuX_vdic), it's not possible to use interweave in ipuX_ic_prpvf,
since the ipuX_vdic has already carried out de-interlacing (with
motion compensation) and therefore the field type output from
ipuX_vdic can only be none (progressive).
Capture Pipelines
-----------------
@ -516,10 +522,33 @@ On the SabreAuto, an on-board ADV7180 SD decoder is connected to the
parallel bus input on the internal video mux to IPU1 CSI0.
The following example configures a pipeline to capture from the ADV7180
video decoder, assuming NTSC 720x480 input signals, with Motion
Compensated de-interlacing. Pad field types assume the adv7180 outputs
"interlaced". $outputfmt can be any format supported by the ipu1_ic_prpvf
entity at its output pad:
video decoder, assuming NTSC 720x480 input signals, using simple
interweave (unconverted and without motion compensation). The adv7180
must output sequential or alternating fields (field type 'seq-bt' for
NTSC, or 'alternate'):
.. code-block:: none
# Setup links
media-ctl -l "'adv7180 3-0021':0 -> 'ipu1_csi0_mux':1[1]"
media-ctl -l "'ipu1_csi0_mux':2 -> 'ipu1_csi0':0[1]"
media-ctl -l "'ipu1_csi0':2 -> 'ipu1_csi0 capture':0[1]"
# Configure pads
media-ctl -V "'adv7180 3-0021':0 [fmt:UYVY2X8/720x480 field:seq-bt]"
media-ctl -V "'ipu1_csi0_mux':2 [fmt:UYVY2X8/720x480]"
media-ctl -V "'ipu1_csi0':2 [fmt:AYUV32/720x480]"
# Configure "ipu1_csi0 capture" interface (assumed at /dev/video4)
v4l2-ctl -d4 --set-fmt-video=field=interlaced_bt
Streaming can then begin on /dev/video4. The v4l2-ctl tool can also be
used to select any supported YUV pixelformat on /dev/video4.
This example configures a pipeline to capture from the ADV7180
video decoder, assuming PAL 720x576 input signals, with Motion
Compensated de-interlacing. The adv7180 must output sequential or
alternating fields (field type 'seq-tb' for PAL, or 'alternate').
$outputfmt can be any format supported by the ipu1_ic_prpvf entity
at its output pad:
.. code-block:: none
@ -531,11 +560,11 @@ entity at its output pad:
media-ctl -l "'ipu1_ic_prp':2 -> 'ipu1_ic_prpvf':0[1]"
media-ctl -l "'ipu1_ic_prpvf':1 -> 'ipu1_ic_prpvf capture':0[1]"
# Configure pads
media-ctl -V "'adv7180 3-0021':0 [fmt:UYVY2X8/720x480]"
media-ctl -V "'ipu1_csi0_mux':2 [fmt:UYVY2X8/720x480 field:interlaced]"
media-ctl -V "'ipu1_csi0':1 [fmt:AYUV32/720x480 field:interlaced]"
media-ctl -V "'ipu1_vdic':2 [fmt:AYUV32/720x480 field:none]"
media-ctl -V "'ipu1_ic_prp':2 [fmt:AYUV32/720x480 field:none]"
media-ctl -V "'adv7180 3-0021':0 [fmt:UYVY2X8/720x576 field:seq-tb]"
media-ctl -V "'ipu1_csi0_mux':2 [fmt:UYVY2X8/720x576]"
media-ctl -V "'ipu1_csi0':1 [fmt:AYUV32/720x576]"
media-ctl -V "'ipu1_vdic':2 [fmt:AYUV32/720x576 field:none]"
media-ctl -V "'ipu1_ic_prp':2 [fmt:AYUV32/720x576 field:none]"
media-ctl -V "'ipu1_ic_prpvf':1 [fmt:$outputfmt field:none]"
Streaming can then begin on the capture device node at

Просмотреть файл

@ -0,0 +1,162 @@
.. SPDX-License-Identifier: GPL-2.0
i.MX7 Video Capture Driver
==========================
Introduction
------------
The i.MX7 contrary to the i.MX5/6 family does not contain an Image Processing
Unit (IPU); because of that the capabilities to perform operations or
manipulation of the capture frames are less feature rich.
For image capture the i.MX7 has three units:
- CMOS Sensor Interface (CSI)
- Video Multiplexer
- MIPI CSI-2 Receiver
.. code-block:: none
MIPI Camera Input ---> MIPI CSI-2 --- > |\
| \
| \
| M |
| U | ------> CSI ---> Capture
| X |
| /
Parallel Camera Input ----------------> | /
|/
For additional information, please refer to the latest versions of the i.MX7
reference manual [#f1]_.
Entities
--------
imx7-mipi-csi2
--------------
This is the MIPI CSI-2 receiver entity. It has one sink pad to receive the pixel
data from MIPI CSI-2 camera sensor. It has one source pad, corresponding to the
virtual channel 0. This module is compliant to previous version of Samsung
D-phy, and supports two D-PHY Rx Data lanes.
csi_mux
-------
This is the video multiplexer. It has two sink pads to select from either camera
sensor with a parallel interface or from MIPI CSI-2 virtual channel 0. It has
a single source pad that routes to the CSI.
csi
---
The CSI enables the chip to connect directly to external CMOS image sensor. CSI
can interface directly with Parallel and MIPI CSI-2 buses. It has 256 x 64 FIFO
to store received image pixel data and embedded DMA controllers to transfer data
from the FIFO through AHB bus.
This entity has one sink pad that receives from the csi_mux entity and a single
source pad that routes video frames directly to memory buffers. This pad is
routed to a capture device node.
Usage Notes
-----------
To aid in configuration and for backward compatibility with V4L2 applications
that access controls only from video device nodes, the capture device interfaces
inherit controls from the active entities in the current pipeline, so controls
can be accessed either directly from the subdev or from the active capture
device interface. For example, the sensor controls are available either from the
sensor subdevs or from the active capture device.
Warp7 with OV2680
-----------------
On this platform an OV2680 MIPI CSI-2 module is connected to the internal MIPI
CSI-2 receiver. The following example configures a video capture pipeline with
an output of 800x600, and BGGR 10 bit bayer format:
.. code-block:: none
# Setup links
media-ctl -l "'ov2680 1-0036':0 -> 'imx7-mipi-csis.0':0[1]"
media-ctl -l "'imx7-mipi-csis.0':1 -> 'csi_mux':1[1]"
media-ctl -l "'csi_mux':2 -> 'csi':0[1]"
media-ctl -l "'csi':1 -> 'csi capture':0[1]"
# Configure pads for pipeline
media-ctl -V "'ov2680 1-0036':0 [fmt:SBGGR10_1X10/800x600 field:none]"
media-ctl -V "'csi_mux':1 [fmt:SBGGR10_1X10/800x600 field:none]"
media-ctl -V "'csi_mux':2 [fmt:SBGGR10_1X10/800x600 field:none]"
media-ctl -V "'imx7-mipi-csis.0':0 [fmt:SBGGR10_1X10/800x600 field:none]"
media-ctl -V "'csi':0 [fmt:SBGGR10_1X10/800x600 field:none]"
After this streaming can start. The v4l2-ctl tool can be used to select any of
the resolutions supported by the sensor.
.. code-block:: none
root@imx7s-warp:~# media-ctl -p
Media controller API version 4.17.0
Media device information
------------------------
driver imx-media
model imx-media
serial
bus info
hw revision 0x0
driver version 4.17.0
Device topology
- entity 1: csi (2 pads, 2 links)
type V4L2 subdev subtype Unknown flags 0
device node name /dev/v4l-subdev0
pad0: Sink
[fmt:SBGGR10_1X10/800x600 field:none]
<- "csi_mux":2 [ENABLED]
pad1: Source
[fmt:SBGGR10_1X10/800x600 field:none]
-> "csi capture":0 [ENABLED]
- entity 4: csi capture (1 pad, 1 link)
type Node subtype V4L flags 0
device node name /dev/video0
pad0: Sink
<- "csi":1 [ENABLED]
- entity 10: csi_mux (3 pads, 2 links)
type V4L2 subdev subtype Unknown flags 0
device node name /dev/v4l-subdev1
pad0: Sink
[fmt:unknown/0x0]
pad1: Sink
[fmt:unknown/800x600 field:none]
<- "imx7-mipi-csis.0":1 [ENABLED]
pad2: Source
[fmt:unknown/800x600 field:none]
-> "csi":0 [ENABLED]
- entity 14: imx7-mipi-csis.0 (2 pads, 2 links)
type V4L2 subdev subtype Unknown flags 0
device node name /dev/v4l-subdev2
pad0: Sink
[fmt:SBGGR10_1X10/800x600 field:none]
<- "ov2680 1-0036":0 [ENABLED]
pad1: Source
[fmt:SBGGR10_1X10/800x600 field:none]
-> "csi_mux":1 [ENABLED]
- entity 17: ov2680 1-0036 (1 pad, 1 link)
type V4L2 subdev subtype Sensor flags 0
device node name /dev/v4l-subdev3
pad0: Source
[fmt:SBGGR10_1X10/800x600 field:none]
-> "imx7-mipi-csis.0":0 [ENABLED]
References
----------
.. [#f1] https://www.nxp.com/docs/en/reference-manual/IMX7SRM.pdf

Просмотреть файл

@ -44,6 +44,7 @@ For more details see the file COPYING in the source distribution of Linux.
davinci-vpbe
fimc
imx
imx7
ipu3
ivtv
max2175

Просмотреть файл

@ -1,3 +1,5 @@
.. SPDX-License-Identifier: GPL-2.0
.. include:: <isonum.txt>
===============================================================
@ -355,10 +357,157 @@ https://chromium.googlesource.com/chromiumos/platform/arc-camera/+/master/
The source can be located under hal/intel directory.
Overview of IPU3 pipeline
=========================
IPU3 pipeline has a number of image processing stages, each of which takes a
set of parameters as input. The major stages of pipelines are shown here:
.. kernel-render:: DOT
:alt: IPU3 ImgU Pipeline
:caption: IPU3 ImgU Pipeline Diagram
digraph "IPU3 ImgU" {
node [shape=box]
splines="ortho"
rankdir="LR"
a [label="Raw pixels"]
b [label="Bayer Downscaling"]
c [label="Optical Black Correction"]
d [label="Linearization"]
e [label="Lens Shading Correction"]
f [label="White Balance / Exposure / Focus Apply"]
g [label="Bayer Noise Reduction"]
h [label="ANR"]
i [label="Demosaicing"]
j [label="Color Correction Matrix"]
k [label="Gamma correction"]
l [label="Color Space Conversion"]
m [label="Chroma Down Scaling"]
n [label="Chromatic Noise Reduction"]
o [label="Total Color Correction"]
p [label="XNR3"]
q [label="TNR"]
r [label="DDR"]
{ rank=same; a -> b -> c -> d -> e -> f }
{ rank=same; g -> h -> i -> j -> k -> l }
{ rank=same; m -> n -> o -> p -> q -> r }
a -> g -> m [style=invis, weight=10]
f -> g
l -> m
}
The table below presents a description of the above algorithms.
======================== =======================================================
Name Description
======================== =======================================================
Optical Black Correction Optical Black Correction block subtracts a pre-defined
value from the respective pixel values to obtain better
image quality.
Defined in :c:type:`ipu3_uapi_obgrid_param`.
Linearization This algo block uses linearization parameters to
address non-linearity sensor effects. The Lookup table
table is defined in
:c:type:`ipu3_uapi_isp_lin_vmem_params`.
SHD Lens shading correction is used to correct spatial
non-uniformity of the pixel response due to optical
lens shading. This is done by applying a different gain
for each pixel. The gain, black level etc are
configured in :c:type:`ipu3_uapi_shd_config_static`.
BNR Bayer noise reduction block removes image noise by
applying a bilateral filter.
See :c:type:`ipu3_uapi_bnr_static_config` for details.
ANR Advanced Noise Reduction is a block based algorithm
that performs noise reduction in the Bayer domain. The
convolution matrix etc can be found in
:c:type:`ipu3_uapi_anr_config`.
DM Demosaicing converts raw sensor data in Bayer format
into RGB (Red, Green, Blue) presentation. Then add
outputs of estimation of Y channel for following stream
processing by Firmware. The struct is defined as
:c:type:`ipu3_uapi_dm_config`.
Color Correction Color Correction algo transforms sensor specific color
space to the standard "sRGB" color space. This is done
by applying 3x3 matrix defined in
:c:type:`ipu3_uapi_ccm_mat_config`.
Gamma correction Gamma correction :c:type:`ipu3_uapi_gamma_config` is a
basic non-linear tone mapping correction that is
applied per pixel for each pixel component.
CSC Color space conversion transforms each pixel from the
RGB primary presentation to YUV (Y: brightness,
UV: Luminance) presentation. This is done by applying
a 3x3 matrix defined in
:c:type:`ipu3_uapi_csc_mat_config`
CDS Chroma down sampling
After the CSC is performed, the Chroma Down Sampling
is applied for a UV plane down sampling by a factor
of 2 in each direction for YUV 4:2:0 using a 4x2
configurable filter :c:type:`ipu3_uapi_cds_params`.
CHNR Chroma noise reduction
This block processes only the chrominance pixels and
performs noise reduction by cleaning the high
frequency noise.
See struct :c:type:`ipu3_uapi_yuvp1_chnr_config`.
TCC Total color correction as defined in struct
:c:type:`ipu3_uapi_yuvp2_tcc_static_config`.
XNR3 eXtreme Noise Reduction V3 is the third revision of
noise reduction algorithm used to improve image
quality. This removes the low frequency noise in the
captured image. Two related structs are being defined,
:c:type:`ipu3_uapi_isp_xnr3_params` for ISP data memory
and :c:type:`ipu3_uapi_isp_xnr3_vmem_params` for vector
memory.
TNR Temporal Noise Reduction block compares successive
frames in time to remove anomalies / noise in pixel
values. :c:type:`ipu3_uapi_isp_tnr3_vmem_params` and
:c:type:`ipu3_uapi_isp_tnr3_params` are defined for ISP
vector and data memory respectively.
======================== =======================================================
Other often encountered acronyms not listed in above table:
ACC
Accelerator cluster
AWB_FR
Auto white balance filter response statistics
BDS
Bayer downscaler parameters
CCM
Color correction matrix coefficients
IEFd
Image enhancement filter directed
Obgrid
Optical black level compensation
OSYS
Output system configuration
ROI
Region of interest
YDS
Y down sampling
YTM
Y-tone mapping
A few stages of the pipeline will be executed by firmware running on the ISP
processor, while many others will use a set of fixed hardware blocks also
called accelerator cluster (ACC) to crunch pixel data and produce statistics.
ACC parameters of individual algorithms, as defined by
:c:type:`ipu3_uapi_acc_param`, can be chosen to be applied by the user
space through struct :c:type:`ipu3_uapi_flags` embedded in
:c:type:`ipu3_uapi_params` structure. For parameters that are configured as
not enabled by the user space, the corresponding structs are ignored by the
driver, in which case the existing configuration of the algorithm will be
preserved.
References
==========
.. [#f5] include/uapi/linux/intel-ipu3.h
.. [#f5] drivers/staging/media/ipu3/include/intel-ipu3.h
.. [#f1] https://github.com/intel/nvt

Просмотреть файл

@ -18,7 +18,7 @@ Global video workflow
---------------------
a) QCI stopped
Initialy, the QCI interface is stopped.
Initially, the QCI interface is stopped.
When a buffer is queued (pxa_videobuf_ops->buf_queue), the QCI starts.
b) QCI started

Просмотреть файл

@ -123,7 +123,7 @@ The considerations to split the driver in this particular way are as follows:
- representing CSIPHY and CSID modules by a separate sub-device for each module
allows to model the hardware links between these modules;
- representing VFE by a separate sub-devices for each input interface allows
to use the input interfaces concurently and independently as this is
to use the input interfaces concurrently and independently as this is
supported by the hardware;
- representing ISPIF by a number of sub-devices equal to the number of CSID
sub-devices allows to create linear media controller pipelines when using two

Просмотреть файл

@ -2140,8 +2140,9 @@ F: drivers/media/platform/s5p-cec/
F: Documentation/devicetree/bindings/media/s5p-cec.txt
ARM/SAMSUNG S5P SERIES JPEG CODEC SUPPORT
M: Andrzej Pietrasiewicz <andrzej.p@samsung.com>
M: Andrzej Pietrasiewicz <andrzejtp2010@gmail.com>
M: Jacek Anaszewski <jacek.anaszewski@gmail.com>
M: Sylwester Nawrocki <s.nawrocki@samsung.com>
L: linux-arm-kernel@lists.infradead.org
L: linux-media@vger.kernel.org
S: Maintained
@ -7852,7 +7853,6 @@ M: Yong Zhi <yong.zhi@intel.com>
M: Sakari Ailus <sakari.ailus@linux.intel.com>
M: Bingbu Cao <bingbu.cao@intel.com>
R: Tian Shu Qiu <tian.shu.qiu@intel.com>
R: Jian Xu Zheng <jian.xu.zheng@intel.com>
L: linux-media@vger.kernel.org
S: Maintained
F: drivers/media/pci/intel/ipu3/
@ -9526,6 +9526,17 @@ T: git git://linuxtv.org/media_tree.git
S: Maintained
F: drivers/media/platform/imx-pxp.[ch]
MEDIA DRIVERS FOR FREESCALE IMX7
M: Rui Miguel Silva <rmfrfs@gmail.com>
L: linux-media@vger.kernel.org
T: git git://linuxtv.org/media_tree.git
S: Maintained
F: Documentation/devicetree/bindings/media/imx7-csi.txt
F: Documentation/devicetree/bindings/media/imx7-mipi-csi2.txt
F: Documentation/media/v4l-drivers/imx7.rst
F: drivers/staging/media/imx/imx7-media-csi.c
F: drivers/staging/media/imx/imx7-mipi-csis.c
MEDIA DRIVERS FOR HELENE
M: Abylay Ospan <aospan@netup.ru>
L: linux-media@vger.kernel.org
@ -11441,6 +11452,19 @@ S: Maintained
F: drivers/media/i2c/ov7740.c
F: Documentation/devicetree/bindings/media/i2c/ov7740.txt
OMNIVISION OV9640 SENSOR DRIVER
M: Petr Cvek <petrcvekcz@gmail.com>
L: linux-media@vger.kernel.org
S: Maintained
F: drivers/media/i2c/ov9640.*
OMNIVISION OV8856 SENSOR DRIVER
M: Ben Kao <ben.kao@intel.com>
L: linux-media@vger.kernel.org
T: git git://linuxtv.org/media_tree.git
S: Maintained
F: drivers/media/i2c/ov8856.c
OMNIVISION OV9650 SENSOR DRIVER
M: Sakari Ailus <sakari.ailus@linux.intel.com>
R: Akinobu Mita <akinobu.mita@gmail.com>
@ -12585,6 +12609,7 @@ L: linux-media@vger.kernel.org
T: git git://linuxtv.org/media_tree.git
S: Odd Fixes
F: drivers/media/usb/pwc/*
F: include/trace/events/pwc.h
PWM FAN DRIVER
M: Kamil Debski <kamil@wypas.org>
@ -16764,6 +16789,11 @@ M: David Härdeman <david@hardeman.nu>
S: Maintained
F: drivers/media/rc/winbond-cir.c
RCMM REMOTE CONTROLS DECODER
M: Patrick Lerda <patrick9876@free.fr>
S: Maintained
F: drivers/media/rc/ir-rcmm-decoder.c
WINSYSTEMS EBC-C384 WATCHDOG DRIVER
M: William Breathitt Gray <vilhelm.gray@gmail.com>
L: linux-watchdog@vger.kernel.org

Просмотреть файл

@ -5,6 +5,7 @@
* Copyright (C) 2008 Magnus Damm
*/
#include <linux/clkdev.h>
#include <linux/dma-mapping.h>
#include <linux/init.h>
#include <linux/platform_device.h>
#include <linux/interrupt.h>

Просмотреть файл

@ -277,9 +277,10 @@ void ipu_cpmem_set_uv_offset(struct ipuv3_channel *ch, u32 u_off, u32 v_off)
}
EXPORT_SYMBOL_GPL(ipu_cpmem_set_uv_offset);
void ipu_cpmem_interlaced_scan(struct ipuv3_channel *ch, int stride)
void ipu_cpmem_interlaced_scan(struct ipuv3_channel *ch, int stride,
u32 pixelformat)
{
u32 ilo, sly;
u32 ilo, sly, sluv;
if (stride < 0) {
stride = -stride;
@ -290,9 +291,30 @@ void ipu_cpmem_interlaced_scan(struct ipuv3_channel *ch, int stride)
sly = (stride * 2) - 1;
switch (pixelformat) {
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
sluv = stride / 2 - 1;
break;
case V4L2_PIX_FMT_NV12:
sluv = stride - 1;
break;
case V4L2_PIX_FMT_YUV422P:
sluv = stride - 1;
break;
case V4L2_PIX_FMT_NV16:
sluv = stride * 2 - 1;
break;
default:
sluv = 0;
break;
}
ipu_ch_param_write_field(ch, IPU_FIELD_SO, 1);
ipu_ch_param_write_field(ch, IPU_FIELD_ILO, ilo);
ipu_ch_param_write_field(ch, IPU_FIELD_SLY, sly);
if (sluv)
ipu_ch_param_write_field(ch, IPU_FIELD_SLUV, sluv);
};
EXPORT_SYMBOL_GPL(ipu_cpmem_interlaced_scan);

Просмотреть файл

@ -325,12 +325,21 @@ static int mbus_code_to_bus_cfg(struct ipu_csi_bus_config *cfg, u32 mbus_code,
return 0;
}
/* translate alternate field mode based on given standard */
static inline enum v4l2_field
ipu_csi_translate_field(enum v4l2_field field, v4l2_std_id std)
{
return (field != V4L2_FIELD_ALTERNATE) ? field :
((std & V4L2_STD_525_60) ?
V4L2_FIELD_SEQ_BT : V4L2_FIELD_SEQ_TB);
}
/*
* Fill a CSI bus config struct from mbus_config and mbus_framefmt.
*/
static int fill_csi_bus_cfg(struct ipu_csi_bus_config *csicfg,
struct v4l2_mbus_config *mbus_cfg,
struct v4l2_mbus_framefmt *mbus_fmt)
const struct v4l2_mbus_config *mbus_cfg,
const struct v4l2_mbus_framefmt *mbus_fmt)
{
int ret;
@ -374,22 +383,76 @@ static int fill_csi_bus_cfg(struct ipu_csi_bus_config *csicfg,
return 0;
}
static int
ipu_csi_set_bt_interlaced_codes(struct ipu_csi *csi,
const struct v4l2_mbus_framefmt *infmt,
const struct v4l2_mbus_framefmt *outfmt,
v4l2_std_id std)
{
enum v4l2_field infield, outfield;
bool swap_fields;
/* get translated field type of input and output */
infield = ipu_csi_translate_field(infmt->field, std);
outfield = ipu_csi_translate_field(outfmt->field, std);
/*
* Write the H-V-F codes the CSI will match against the
* incoming data for start/end of active and blanking
* field intervals. If input and output field types are
* sequential but not the same (one is SEQ_BT and the other
* is SEQ_TB), swap the F-bit so that the CSI will capture
* field 1 lines before field 0 lines.
*/
swap_fields = (V4L2_FIELD_IS_SEQUENTIAL(infield) &&
V4L2_FIELD_IS_SEQUENTIAL(outfield) &&
infield != outfield);
if (!swap_fields) {
/*
* Field0BlankEnd = 110, Field0BlankStart = 010
* Field0ActiveEnd = 100, Field0ActiveStart = 000
* Field1BlankEnd = 111, Field1BlankStart = 011
* Field1ActiveEnd = 101, Field1ActiveStart = 001
*/
ipu_csi_write(csi, 0x40596 | CSI_CCIR_ERR_DET_EN,
CSI_CCIR_CODE_1);
ipu_csi_write(csi, 0xD07DF, CSI_CCIR_CODE_2);
} else {
dev_dbg(csi->ipu->dev, "capture field swap\n");
/* same as above but with F-bit inverted */
ipu_csi_write(csi, 0xD07DF | CSI_CCIR_ERR_DET_EN,
CSI_CCIR_CODE_1);
ipu_csi_write(csi, 0x40596, CSI_CCIR_CODE_2);
}
ipu_csi_write(csi, 0xFF0000, CSI_CCIR_CODE_3);
return 0;
}
int ipu_csi_init_interface(struct ipu_csi *csi,
struct v4l2_mbus_config *mbus_cfg,
struct v4l2_mbus_framefmt *mbus_fmt)
const struct v4l2_mbus_config *mbus_cfg,
const struct v4l2_mbus_framefmt *infmt,
const struct v4l2_mbus_framefmt *outfmt)
{
struct ipu_csi_bus_config cfg;
unsigned long flags;
u32 width, height, data = 0;
v4l2_std_id std;
int ret;
ret = fill_csi_bus_cfg(&cfg, mbus_cfg, mbus_fmt);
ret = fill_csi_bus_cfg(&cfg, mbus_cfg, infmt);
if (ret < 0)
return ret;
/* set default sensor frame width and height */
width = mbus_fmt->width;
height = mbus_fmt->height;
width = infmt->width;
height = infmt->height;
if (infmt->field == V4L2_FIELD_ALTERNATE)
height *= 2;
/* Set the CSI_SENS_CONF register remaining fields */
data |= cfg.data_width << CSI_SENS_CONF_DATA_WIDTH_SHIFT |
@ -416,42 +479,22 @@ int ipu_csi_init_interface(struct ipu_csi *csi,
ipu_csi_write(csi, 0xFF0000, CSI_CCIR_CODE_3);
break;
case IPU_CSI_CLK_MODE_CCIR656_INTERLACED:
if (mbus_fmt->width == 720 && mbus_fmt->height == 576) {
/*
* PAL case
*
* Field0BlankEnd = 0x6, Field0BlankStart = 0x2,
* Field0ActiveEnd = 0x4, Field0ActiveStart = 0
* Field1BlankEnd = 0x7, Field1BlankStart = 0x3,
* Field1ActiveEnd = 0x5, Field1ActiveStart = 0x1
*/
height = 625; /* framelines for PAL */
ipu_csi_write(csi, 0x40596 | CSI_CCIR_ERR_DET_EN,
CSI_CCIR_CODE_1);
ipu_csi_write(csi, 0xD07DF, CSI_CCIR_CODE_2);
ipu_csi_write(csi, 0xFF0000, CSI_CCIR_CODE_3);
} else if (mbus_fmt->width == 720 && mbus_fmt->height == 480) {
/*
* NTSC case
*
* Field0BlankEnd = 0x7, Field0BlankStart = 0x3,
* Field0ActiveEnd = 0x5, Field0ActiveStart = 0x1
* Field1BlankEnd = 0x6, Field1BlankStart = 0x2,
* Field1ActiveEnd = 0x4, Field1ActiveStart = 0
*/
height = 525; /* framelines for NTSC */
ipu_csi_write(csi, 0xD07DF | CSI_CCIR_ERR_DET_EN,
CSI_CCIR_CODE_1);
ipu_csi_write(csi, 0x40596, CSI_CCIR_CODE_2);
ipu_csi_write(csi, 0xFF0000, CSI_CCIR_CODE_3);
if (width == 720 && height == 480) {
std = V4L2_STD_NTSC;
height = 525;
} else if (width == 720 && height == 576) {
std = V4L2_STD_PAL;
height = 625;
} else {
dev_err(csi->ipu->dev,
"Unsupported CCIR656 interlaced video mode\n");
spin_unlock_irqrestore(&csi->lock, flags);
return -EINVAL;
"Unsupported interlaced video mode\n");
ret = -EINVAL;
goto out_unlock;
}
ret = ipu_csi_set_bt_interlaced_codes(csi, infmt, outfmt, std);
if (ret)
goto out_unlock;
break;
case IPU_CSI_CLK_MODE_CCIR1120_PROGRESSIVE_DDR:
case IPU_CSI_CLK_MODE_CCIR1120_PROGRESSIVE_SDR:
@ -476,9 +519,10 @@ int ipu_csi_init_interface(struct ipu_csi *csi,
dev_dbg(csi->ipu->dev, "CSI_ACT_FRM_SIZE = 0x%08X\n",
ipu_csi_read(csi, CSI_ACT_FRM_SIZE));
out_unlock:
spin_unlock_irqrestore(&csi->lock, flags);
return 0;
return ret;
}
EXPORT_SYMBOL_GPL(ipu_csi_init_interface);

Просмотреть файл

@ -38,6 +38,7 @@ static __poll_t cec_poll(struct file *filp,
struct cec_adapter *adap = fh->adap;
__poll_t res = 0;
poll_wait(filp, &fh->wait, poll);
if (!cec_is_registered(adap))
return EPOLLERR | EPOLLHUP;
mutex_lock(&adap->lock);
@ -48,7 +49,6 @@ static __poll_t cec_poll(struct file *filp,
res |= EPOLLIN | EPOLLRDNORM;
if (fh->total_queued_events)
res |= EPOLLPRI;
poll_wait(filp, &fh->wait, poll);
mutex_unlock(&adap->lock);
return res;
}

Просмотреть файл

@ -105,7 +105,7 @@ void saa7146_buffer_finish(struct saa7146_dev *dev,
}
q->curr->vb.state = state;
v4l2_get_timestamp(&q->curr->vb.ts);
q->curr->vb.ts = ktime_get_ns();
wake_up(&q->curr->vb.done);
q->curr = NULL;

Просмотреть файл

@ -54,10 +54,7 @@ static int saa7146_i2c_msg_prepare(const struct i2c_msg *m, int num, __le32 *op)
/* loop through all messages */
for(i = 0; i < num; i++) {
/* insert the address of the i2c-slave.
note: we get 7 bit i2c-addresses,
so we have to perform a translation */
addr = (m[i].addr*2) + ( (0 != (m[i].flags & I2C_M_RD)) ? 1 : 0);
addr = i2c_8bit_addr_from_msg(&m[i]);
h1 = op_count/3; h2 = op_count%3;
op[h1] |= cpu_to_le32( (u8)addr << ((3-h2)*8));
op[h1] |= cpu_to_le32(SAA7146_I2C_START << ((3-h2)*2));

Просмотреть файл

@ -796,7 +796,7 @@ static int vidioc_s_fmt_vid_overlay(struct file *file, void *__fh, struct v4l2_f
return -EFAULT;
}
/* vv->ov.fh is used to indicate that we have valid overlay informations, too */
/* vv->ov.fh is used to indicate that we have valid overlay information, too */
vv->ov.fh = fh;
/* check if our current overlay is active */

Просмотреть файл

@ -311,7 +311,7 @@ int sms_board_led_feedback(struct smscore_device_t *coredev, int led)
int board_id = smscore_get_board_id(coredev);
struct sms_board *board = sms_get_board(board_id);
/* dont touch GPIO if LEDs are already set */
/* don't touch GPIO if LEDs are already set */
if (smscore_led_state(coredev, -1) == led)
return 0;

Просмотреть файл

@ -750,7 +750,7 @@ struct sms_stats {
u32 num_of_corrected_mpe_tlbs;/* Number of MPE tables which were
corrected by MPE RS decoding */
/* Common params */
u32 ber_error_count; /* Number of errornous SYNC bits. */
u32 ber_error_count; /* Number of erroneous SYNC bits. */
u32 ber_bit_count; /* Total number of SYNC bits. */
/* Interface information */

Просмотреть файл

@ -246,6 +246,10 @@ bool tpg_s_fourcc(struct tpg_data *tpg, u32 fourcc)
case V4L2_PIX_FMT_YUV555:
case V4L2_PIX_FMT_YUV565:
case V4L2_PIX_FMT_YUV32:
case V4L2_PIX_FMT_AYUV32:
case V4L2_PIX_FMT_XYUV32:
case V4L2_PIX_FMT_VUYA32:
case V4L2_PIX_FMT_VUYX32:
tpg->color_enc = TGP_COLOR_ENC_YCBCR;
break;
case V4L2_PIX_FMT_YUV420M:
@ -372,6 +376,10 @@ bool tpg_s_fourcc(struct tpg_data *tpg, u32 fourcc)
case V4L2_PIX_FMT_ARGB32:
case V4L2_PIX_FMT_ABGR32:
case V4L2_PIX_FMT_YUV32:
case V4L2_PIX_FMT_AYUV32:
case V4L2_PIX_FMT_XYUV32:
case V4L2_PIX_FMT_VUYA32:
case V4L2_PIX_FMT_VUYX32:
case V4L2_PIX_FMT_HSV32:
tpg->twopixelsize[0] = 2 * 4;
break;
@ -1267,10 +1275,12 @@ static void gen_twopix(struct tpg_data *tpg,
case V4L2_PIX_FMT_RGB32:
case V4L2_PIX_FMT_XRGB32:
case V4L2_PIX_FMT_HSV32:
case V4L2_PIX_FMT_XYUV32:
alpha = 0;
/* fall through */
case V4L2_PIX_FMT_YUV32:
case V4L2_PIX_FMT_ARGB32:
case V4L2_PIX_FMT_AYUV32:
buf[0][offset] = alpha;
buf[0][offset + 1] = r_y_h;
buf[0][offset + 2] = g_u_s;
@ -1278,9 +1288,11 @@ static void gen_twopix(struct tpg_data *tpg,
break;
case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_XBGR32:
case V4L2_PIX_FMT_VUYX32:
alpha = 0;
/* fall through */
case V4L2_PIX_FMT_ABGR32:
case V4L2_PIX_FMT_VUYA32:
buf[0][offset] = b_v;
buf[0][offset + 1] = g_u_s;
buf[0][offset + 2] = r_y_h;

Просмотреть файл

@ -499,9 +499,9 @@ static int __vb2_queue_free(struct vb2_queue *q, unsigned int buffers)
pr_info(" buf_init: %u buf_cleanup: %u buf_prepare: %u buf_finish: %u\n",
vb->cnt_buf_init, vb->cnt_buf_cleanup,
vb->cnt_buf_prepare, vb->cnt_buf_finish);
pr_info(" buf_queue: %u buf_done: %u buf_request_complete: %u\n",
vb->cnt_buf_queue, vb->cnt_buf_done,
vb->cnt_buf_request_complete);
pr_info(" buf_out_validate: %u buf_queue: %u buf_done: %u buf_request_complete: %u\n",
vb->cnt_buf_out_validate, vb->cnt_buf_queue,
vb->cnt_buf_done, vb->cnt_buf_request_complete);
pr_info(" alloc: %u put: %u prepare: %u finish: %u mmap: %u\n",
vb->cnt_mem_alloc, vb->cnt_mem_put,
vb->cnt_mem_prepare, vb->cnt_mem_finish,
@ -934,7 +934,7 @@ void vb2_buffer_done(struct vb2_buffer *vb, enum vb2_buffer_state state)
/* sync buffers */
for (plane = 0; plane < vb->num_planes; ++plane)
call_void_memop(vb, finish, vb->planes[plane].mem_priv);
vb->synced = false;
vb->synced = 0;
}
spin_lock_irqsave(&q->done_lock, flags);
@ -1041,6 +1041,7 @@ static int __prepare_userptr(struct vb2_buffer *vb)
if (vb->planes[plane].mem_priv) {
if (!reacquired) {
reacquired = true;
vb->copied_timestamp = 0;
call_void_vb_qop(vb, buf_cleanup, vb);
}
call_void_memop(vb, put_userptr, vb->planes[plane].mem_priv);
@ -1165,6 +1166,7 @@ static int __prepare_dmabuf(struct vb2_buffer *vb)
if (!reacquired) {
reacquired = true;
vb->copied_timestamp = 0;
call_void_vb_qop(vb, buf_cleanup, vb);
}
@ -1196,6 +1198,9 @@ static int __prepare_dmabuf(struct vb2_buffer *vb)
* userspace knows sooner rather than later if the dma-buf map fails.
*/
for (plane = 0; plane < vb->num_planes; ++plane) {
if (vb->planes[plane].dbuf_mapped)
continue;
ret = call_memop(vb, map_dmabuf, vb->planes[plane].mem_priv);
if (ret) {
dprintk(1, "failed to map dmabuf for plane %d\n",
@ -1274,6 +1279,14 @@ static int __buf_prepare(struct vb2_buffer *vb)
return 0;
WARN_ON(vb->synced);
if (q->is_output) {
ret = call_vb_qop(vb, buf_out_validate, vb);
if (ret) {
dprintk(1, "buffer validation failed\n");
return ret;
}
}
vb->state = VB2_BUF_STATE_PREPARING;
switch (q->memory) {
@ -1302,8 +1315,8 @@ static int __buf_prepare(struct vb2_buffer *vb)
for (plane = 0; plane < vb->num_planes; ++plane)
call_void_memop(vb, prepare, vb->planes[plane].mem_priv);
vb->synced = true;
vb->prepared = true;
vb->synced = 1;
vb->prepared = 1;
vb->state = orig_state;
return 0;
@ -1520,6 +1533,14 @@ int vb2_core_qbuf(struct vb2_queue *q, unsigned int index, void *pb,
return -EINVAL;
}
if (q->is_output && !vb->prepared) {
ret = call_vb_qop(vb, buf_out_validate, vb);
if (ret) {
dprintk(1, "buffer validation failed\n");
return ret;
}
}
media_request_object_init(&vb->req_obj);
/* Make sure the request is in a safe state for updating. */
@ -1750,7 +1771,6 @@ EXPORT_SYMBOL_GPL(vb2_wait_for_all_buffers);
static void __vb2_dqbuf(struct vb2_buffer *vb)
{
struct vb2_queue *q = vb->vb2_queue;
unsigned int i;
/* nothing to do if the buffer is already dequeued */
if (vb->state == VB2_BUF_STATE_DEQUEUED)
@ -1758,14 +1778,6 @@ static void __vb2_dqbuf(struct vb2_buffer *vb)
vb->state = VB2_BUF_STATE_DEQUEUED;
/* unmap DMABUF buffer */
if (q->memory == VB2_MEMORY_DMABUF)
for (i = 0; i < vb->num_planes; ++i) {
if (!vb->planes[i].dbuf_mapped)
continue;
call_void_memop(vb, unmap_dmabuf, vb->planes[i].mem_priv);
vb->planes[i].dbuf_mapped = 0;
}
call_void_bufop(q, init_buffer, vb);
}
@ -1792,7 +1804,7 @@ int vb2_core_dqbuf(struct vb2_queue *q, unsigned int *pindex, void *pb,
}
call_void_vb_qop(vb, buf_finish, vb);
vb->prepared = false;
vb->prepared = 0;
if (pindex)
*pindex = vb->index;
@ -1916,12 +1928,12 @@ static void __vb2_queue_cancel(struct vb2_queue *q)
for (plane = 0; plane < vb->num_planes; ++plane)
call_void_memop(vb, finish,
vb->planes[plane].mem_priv);
vb->synced = false;
vb->synced = 0;
}
if (vb->prepared) {
call_void_vb_qop(vb, buf_finish, vb);
vb->prepared = false;
vb->prepared = 0;
}
__vb2_dqbuf(vb);
@ -1932,6 +1944,7 @@ static void __vb2_queue_cancel(struct vb2_queue *q)
if (vb->request)
media_request_put(vb->request);
vb->request = NULL;
vb->copied_timestamp = 0;
}
}
@ -2278,6 +2291,8 @@ __poll_t vb2_core_poll(struct vb2_queue *q, struct file *file,
if (q->is_output && !(req_events & (EPOLLOUT | EPOLLWRNORM)))
return 0;
poll_wait(file, &q->done_wq, wait);
/*
* Start file I/O emulator only if streaming API has not been used yet.
*/
@ -2329,8 +2344,6 @@ __poll_t vb2_core_poll(struct vb2_queue *q, struct file *file,
*/
if (q->last_buffer_dequeued)
return EPOLLIN | EPOLLRDNORM;
poll_wait(file, &q->done_wq, wait);
}
/*

Просмотреть файл

@ -3,7 +3,7 @@
*
* Copyright (C) 2010 Samsung Electronics
*
* Author: Andrzej Pietrasiewicz <andrzej.p@samsung.com>
* Author: Andrzej Pietrasiewicz <andrzejtp2010@gmail.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@ -67,7 +67,7 @@ static int vb2_dma_sg_alloc_compacted(struct vb2_dma_sg_buf *buf,
int i;
order = get_order(size);
/* Dont over allocate*/
/* Don't over allocate*/
if ((PAGE_SIZE << order) > size)
order--;

Просмотреть файл

@ -121,7 +121,7 @@ static void vb2_common_vm_close(struct vm_area_struct *vma)
}
/*
* vb2_common_vm_ops - common vm_ops used for tracking refcount of mmaped
* vb2_common_vm_ops - common vm_ops used for tracking refcount of mmapped
* video buffers
*/
const struct vm_operations_struct vb2_common_vm_ops = {

Просмотреть файл

@ -143,7 +143,7 @@ static void __copy_timestamp(struct vb2_buffer *vb, const void *pb)
* and the timecode field and flag if needed.
*/
if (q->copy_timestamp)
vb->timestamp = timeval_to_ns(&b->timestamp);
vb->timestamp = v4l2_timeval_to_ns(&b->timestamp);
vbuf->flags |= b->flags & V4L2_BUF_FLAG_TIMECODE;
if (b->flags & V4L2_BUF_FLAG_TIMECODE)
vbuf->timecode = b->timecode;
@ -409,6 +409,15 @@ static int vb2_queue_or_prepare_buf(struct vb2_queue *q, struct media_device *md
*/
if (WARN_ON(!q->ops->buf_request_complete))
return -EINVAL;
/*
* Make sure this op is implemented by the driver for the output queue.
* It's easy to forget this callback, but is it important to correctly
* validate the 'field' value at QBUF time.
*/
if (WARN_ON((q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) &&
!q->ops->buf_out_validate))
return -EINVAL;
if (vb->state != VB2_BUF_STATE_DEQUEUED) {
dprintk(1, "%s: buffer is not in dequeued state\n", opname);
@ -567,7 +576,7 @@ static int __fill_vb2_buffer(struct vb2_buffer *vb, struct vb2_plane *planes)
struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
unsigned int plane;
if (!vb->vb2_queue->is_output || !vb->vb2_queue->copy_timestamp)
if (!vb->vb2_queue->copy_timestamp)
vb->timestamp = 0;
for (plane = 0; plane < vb->num_planes; ++plane) {
@ -589,6 +598,19 @@ static const struct vb2_buf_ops v4l2_buf_ops = {
.copy_timestamp = __copy_timestamp,
};
int vb2_find_timestamp(const struct vb2_queue *q, u64 timestamp,
unsigned int start_idx)
{
unsigned int i;
for (i = start_idx; i < q->num_buffers; i++)
if (q->bufs[i]->copied_timestamp &&
q->bufs[i]->timestamp == timestamp)
return i;
return -1;
}
EXPORT_SYMBOL_GPL(vb2_find_timestamp);
/*
* vb2_querybuf() - query video buffer information
* @q: videobuf queue
@ -846,16 +868,14 @@ EXPORT_SYMBOL_GPL(vb2_queue_release);
__poll_t vb2_poll(struct vb2_queue *q, struct file *file, poll_table *wait)
{
struct video_device *vfd = video_devdata(file);
__poll_t req_events = poll_requested_events(wait);
__poll_t res = 0;
if (test_bit(V4L2_FL_USES_V4L2_FH, &vfd->flags)) {
struct v4l2_fh *fh = file->private_data;
poll_wait(file, &fh->wait, wait);
if (v4l2_event_pending(fh))
res = EPOLLPRI;
else if (req_events & EPOLLPRI)
poll_wait(file, &fh->wait, wait);
}
return res | vb2_core_poll(q, file, wait);

Просмотреть файл

@ -1195,13 +1195,13 @@ static __poll_t dvb_demux_poll(struct file *file, poll_table *wait)
struct dmxdev_filter *dmxdevfilter = file->private_data;
__poll_t mask = 0;
poll_wait(file, &dmxdevfilter->buffer.queue, wait);
if ((!dmxdevfilter) || dmxdevfilter->dev->exit)
return EPOLLERR;
if (dvb_vb2_is_streaming(&dmxdevfilter->vb2_ctx))
return dvb_vb2_poll(&dmxdevfilter->vb2_ctx, file, wait);
poll_wait(file, &dmxdevfilter->buffer.queue, wait);
if (dmxdevfilter->state != DMXDEV_STATE_GO &&
dmxdevfilter->state != DMXDEV_STATE_DONE &&
dmxdevfilter->state != DMXDEV_STATE_TIMEDOUT)
@ -1346,13 +1346,13 @@ static __poll_t dvb_dvr_poll(struct file *file, poll_table *wait)
dprintk("%s\n", __func__);
poll_wait(file, &dmxdev->dvr_buffer.queue, wait);
if (dmxdev->exit)
return EPOLLERR;
if (dvb_vb2_is_streaming(&dmxdev->dvr_vb2_ctx))
return dvb_vb2_poll(&dmxdev->dvr_vb2_ctx, file, wait);
poll_wait(file, &dmxdev->dvr_buffer.queue, wait);
if (((file->f_flags & O_ACCMODE) == O_RDONLY) ||
dmxdev->may_do_mmap) {
if (dmxdev->dvr_buffer.error)

Просмотреть файл

@ -1797,6 +1797,8 @@ static __poll_t dvb_ca_en50221_io_poll(struct file *file, poll_table *wait)
dprintk("%s\n", __func__);
poll_wait(file, &ca->wait_queue, wait);
if (dvb_ca_en50221_io_read_condition(ca, &result, &slot) == 1)
mask |= EPOLLIN;
@ -1804,9 +1806,6 @@ static __poll_t dvb_ca_en50221_io_poll(struct file *file, poll_table *wait)
if (mask)
return mask;
/* wait for something to happen */
poll_wait(file, &ca->wait_queue, wait);
if (dvb_ca_en50221_io_read_condition(ca, &result, &slot) == 1)
mask |= EPOLLIN;

Просмотреть файл

@ -1596,7 +1596,7 @@ static bool is_dvbv3_delsys(u32 delsys)
*
* Provides emulation for delivery systems that are compatible with the old
* DVBv3 call. Among its usages, it provices support for ISDB-T, and allows
* using a DVB-S2 only frontend just like it were a DVB-S, if the frontent
* using a DVB-S2 only frontend just like it were a DVB-S, if the frontend
* parameters are compatible with DVB-S spec.
*/
static int emulate_delivery_system(struct dvb_frontend *fe, u32 delsys)

Просмотреть файл

@ -898,7 +898,7 @@ EXPORT_SYMBOL(dvb_unregister_adapter);
/* if the miracle happens and "generic_usercopy()" is included into
the kernel, then this can vanish. please don't make the mistake and
define this as video_usercopy(). this will introduce a dependecy
define this as video_usercopy(). this will introduce a dependency
to the v4l "videodev.o" module, which is unnecessary for some
cards (ie. the budget dvb-cards don't need the v4l module...) */
int dvb_usercopy(struct file *file,

Просмотреть файл

@ -2947,7 +2947,7 @@ static int cxd2841er_sleep_tc_to_active_t(struct cxd2841er_priv *priv,
((priv->flags & CXD2841ER_ASCOT) ? 0x01 : 0x00), 0x01);
/* Set SLV-T Bank : 0x18 */
cxd2841er_write_reg(priv, I2C_SLVT, 0x00, 0x18);
/* Pre-RS BER moniter setting */
/* Pre-RS BER monitor setting */
cxd2841er_set_reg_bits(priv, I2C_SLVT, 0x36, 0x40, 0x07);
/* FEC Auto Recovery setting */
cxd2841er_set_reg_bits(priv, I2C_SLVT, 0x30, 0x01, 0x01);

Просмотреть файл

@ -2459,7 +2459,7 @@ static int dib0090_tune(struct dvb_frontend *fe)
state->current_standard = state->fe->dtv_property_cache.delivery_system;
ret = 20;
state->calibrate = CAPTRIM_CAL; /* captrim serach now */
state->calibrate = CAPTRIM_CAL; /* captrim search now */
}
else if (*tune_state == CT_TUNER_STEP_0) { /* Warning : because of captrim cal, if you change this step, change it also in _cal.c file because it is the step following captrim cal state machine */

Просмотреть файл

@ -369,7 +369,7 @@ static int dib7000m_sad_calib(struct dib7000m_state *state)
{
/* internal */
// dib7000m_write_word(state, 928, (3 << 14) | (1 << 12) | (524 << 0)); // sampling clock of the SAD is writting in set_bandwidth
// dib7000m_write_word(state, 928, (3 << 14) | (1 << 12) | (524 << 0)); // sampling clock of the SAD is writing in set_bandwidth
dib7000m_write_word(state, 929, (0 << 1) | (0 << 0));
dib7000m_write_word(state, 930, 776); // 0.625*3.3 / 4096
@ -928,7 +928,7 @@ static void dib7000m_set_channel(struct dib7000m_state *state, struct dtv_fronte
}
state->div_sync_wait = (value * 3) / 2 + 32; // add 50% SFN margin + compensate for one DVSY-fifo TODO
/* deactive the possibility of diversity reception if extended interleave - not for 7000MC */
/* deactivate the possibility of diversity reception if extended interleave - not for 7000MC */
/* P_dvsy_sync_mode = 0, P_dvsy_sync_enable=1, P_dvcb_comb_mode=2 */
if (1 == 1 || state->revision > 0x4000)
state->div_force_off = 0;

Просмотреть файл

@ -94,7 +94,7 @@ enum dib7000p_power_mode {
DIB7000P_POWER_INTERFACE_ONLY,
};
/* dib7090 specific fonctions */
/* dib7090 specific functions */
static int dib7090_set_output_mode(struct dvb_frontend *fe, int mode);
static int dib7090_set_diversity_in(struct dvb_frontend *fe, int onoff);
static void dib7090_setDibTxMux(struct dib7000p_state *state, int mode);
@ -319,7 +319,7 @@ static void dib7000p_set_adc_state(struct dib7000p_state *state, enum dibx000_ad
dib7000p_write_word(state, 1925, reg | (1 << 4) | (1 << 2)); /* en_slowAdc = 1 & reset_sladc = 1 */
reg = dib7000p_read_word(state, 1925); /* read acces to make it works... strange ... */
reg = dib7000p_read_word(state, 1925); /* read access to make it works... strange ... */
msleep(200);
dib7000p_write_word(state, 1925, reg & ~(1 << 4)); /* en_slowAdc = 1 & reset_sladc = 0 */
@ -1101,7 +1101,7 @@ static void dib7000p_set_channel(struct dib7000p_state *state,
else
state->div_sync_wait = (value * 3) / 2 + state->cfg.diversity_delay;
/* deactive the possibility of diversity reception if extended interleaver */
/* deactivate the possibility of diversity reception if extended interleaver */
state->div_force_off = !1 && ch->transmission_mode != TRANSMISSION_MODE_8K;
dib7000p_set_diversity_in(&state->demod, state->div_state);
@ -2378,7 +2378,7 @@ static int dib7090_tuner_xfer(struct i2c_adapter *i2c_adap, struct i2c_msg msg[]
}
}
if (apb_address != 0) /* R/W acces via APB */
if (apb_address != 0) /* R/W access via APB */
return dib7090p_rw_on_apb(i2c_adap, msg, num, apb_address);
else /* R/W access via SERPAR */
return w7090p_tuner_rw_serpar(i2c_adap, msg, num);

Просмотреть файл

@ -564,7 +564,7 @@ static int dib8000_set_adc_state(struct dib8000_state *state, enum dibx000_adc_s
dib8000_write_word(state, 1925, reg |
(1<<4) | (1<<2));
/* read acces to make it works... strange ... */
/* read access to make it works... strange ... */
reg = dib8000_read_word(state, 1925);
msleep(20);
/* en_slowAdc = 1 & reset_sladc = 0 */
@ -1091,7 +1091,7 @@ static int dib8000_reset(struct dvb_frontend *fe)
if ((state->revision != 0x8090) &&
(dib8000_set_output_mode(fe, OUTMODE_HIGH_Z) != 0))
dprintk("OUTPUT_MODE could not be resetted.\n");
dprintk("OUTPUT_MODE could not be reset.\n");
state->current_agc = NULL;
@ -1867,7 +1867,7 @@ static int dib8096p_tuner_xfer(struct i2c_adapter *i2c_adap,
}
}
if (apb_address != 0) /* R/W acces via APB */
if (apb_address != 0) /* R/W access via APB */
return dib8096p_rw_on_apb(i2c_adap, msg, num, apb_address);
else /* R/W access via SERPAR */
return dib8096p_tuner_rw_serpar(i2c_adap, msg, num);
@ -3082,7 +3082,7 @@ static int dib8000_tune(struct dvb_frontend *fe)
state->autosearch_state = AS_DONE;
*tune_state = CT_DEMOD_STOP; /* else we are done here */
break;
case 2: /* Succes */
case 2: /* Success */
state->status = FE_STATUS_FFT_SUCCESS; /* signal to the upper layer, that there was a channel found and the parameters can be read */
*tune_state = CT_DEMOD_STEP_3;
if (state->autosearch_state == AS_SEARCHING_GUARD)
@ -3193,10 +3193,10 @@ static int dib8000_tune(struct dvb_frontend *fe)
case CT_DEMOD_STEP_6: /* (36) if there is an input (diversity) */
if ((state->fe[1] != NULL) && (state->output_mode != OUTMODE_DIVERSITY)) {
/* if there is a diversity fe in input and this fe is has not already failled : wait here until this this fe has succedeed or failled */
/* if there is a diversity fe in input and this fe is has not already failed : wait here until this this fe has succedeed or failed */
if (dib8000_get_status(state->fe[1]) <= FE_STATUS_STD_SUCCESS) /* Something is locked on the input fe */
*tune_state = CT_DEMOD_STEP_8; /* go for mpeg */
else if (dib8000_get_status(state->fe[1]) >= FE_STATUS_TUNE_TIME_TOO_SHORT) { /* fe in input failled also, break the current one */
else if (dib8000_get_status(state->fe[1]) >= FE_STATUS_TUNE_TIME_TOO_SHORT) { /* fe in input failed also, break the current one */
*tune_state = CT_DEMOD_STOP; /* else we are done here ; step 8 will close the loops and exit */
dib8000_viterbi_state(state, 1); /* start viterbi chandec */
dib8000_set_isdbt_loop_params(state, LOOP_TUNE_2);

Просмотреть файл

@ -1020,7 +1020,7 @@ static int dib9000_risc_apb_access_read(struct dib9000_state *state, u32 address
if (address >= 1024 || !state->platform.risc.fw_is_running)
return -EINVAL;
/* dprintk( "APB access thru rd fw %d %x\n", address, attribute); */
/* dprintk( "APB access through rd fw %d %x\n", address, attribute); */
mb[0] = (u16) address;
mb[1] = len / 2;
@ -1050,7 +1050,7 @@ static int dib9000_risc_apb_access_write(struct dib9000_state *state, u32 addres
if (len > 18)
return -EINVAL;
/* dprintk( "APB access thru wr fw %d %x\n", address, attribute); */
/* dprintk( "APB access through wr fw %d %x\n", address, attribute); */
mb[0] = (u16)address;
for (i = 0; i + 1 < len; i += 2)

Просмотреть файл

@ -67,7 +67,7 @@
* (2 bytes). The DAP can operate in 3 modes:
* (1) only short
* (2) only long
* (3) both long and short but short preferred and long only when necesarry
* (3) both long and short but short preferred and long only when necessary
*
* These modes must be selected compile time via compile switches.
* Compile switch settings for the different modes:
@ -112,14 +112,14 @@
* + single master mode means no use of repeated starts
* + multi master mode means use of repeated starts
* Default is single master.
* Default can be overriden by setting the compile switch DRXDAP_SINGLE_MASTER.
* Default can be overridden by setting the compile switch DRXDAP_SINGLE_MASTER.
*
* Slave:
* Single/multi master selected via the flags in the FASI protocol.
* + single master means remember memory address between i2c packets
* + multimaster means flush memory address between i2c packets
* Default is single master, DAP FASI changes multi-master setting silently
* into single master setting. This cannot be overrriden.
* into single master setting. This cannot be overridden.
*
*/
/* set default */
@ -139,7 +139,7 @@
* In single master mode, data can be written by sending the register address
* first, then two or four bytes of data in the next packet.
* Because the device address plus a register address equals five bytes,
* the mimimum chunk size must be five.
* the minimum chunk size must be five.
* If ten-bit I2C device addresses are used, the minimum chunk size must be six,
* because the I2C device address will then occupy two bytes when writing.
*

Просмотреть файл

@ -94,7 +94,7 @@ int drxbsp_i2c_term(void);
* \param r_count The number of bytes to read
* \param r_data The array to read the data from
* \return int Return status.
* \retval 0 Succes.
* \retval 0 Success.
* \retval -EIO Failure.
* \retval -EINVAL Parameter 'wcount' is not zero but parameter
* 'wdata' contains NULL.
@ -986,7 +986,7 @@ struct drx_filter_info {
* \struct struct drx_channel * \brief The set of parameters describing a single channel.
*
* Used by DRX_CTRL_SET_CHANNEL and DRX_CTRL_GET_CHANNEL.
* Only certain fields need to be used for a specfic standard.
* Only certain fields need to be used for a specific standard.
*
*/
struct drx_channel {
@ -1606,7 +1606,7 @@ struct drx_version_list {
DRX_AUD_I2S_MATRIX_B_MONO,
/*< B sound only, stereo or mono */
DRX_AUD_I2S_MATRIX_STEREO,
/*< A+B sound, transparant */
/*< A+B sound, transparent */
DRX_AUD_I2S_MATRIX_MONO /*< A+B mixed to mono sum, (L+R)/2 */};
/*
@ -1870,7 +1870,7 @@ struct drx_reg_dump {
/*< current power management mode */
/* Tuner */
u8 tuner_port_nr; /*< nr of I2C port to wich tuner is */
u8 tuner_port_nr; /*< nr of I2C port to which tuner is */
s32 tuner_min_freq_rf;
/*< minimum RF input frequency, in kHz */
s32 tuner_max_freq_rf;

Просмотреть файл

@ -380,10 +380,10 @@ DEFINES
*/
/*****************************************************************************/
/* Audio block 0x103 is write only. To avoid shadowing in driver accessing */
/* RAM adresses directly. This must be READ ONLY to avoid problems. */
/* Writing to the interface adresses is more than only writing the RAM */
/* locations */
/* Audio block 0x103 is write only. To avoid shadowing in driver accessing */
/* RAM addresses directly. This must be READ ONLY to avoid problems. */
/* Writing to the interface addresses are more than only writing the RAM */
/* locations */
/*****************************************************************************/
/*
* \brief RAM location of MODUS registers
@ -656,8 +656,8 @@ static struct drxj_data drxj_data_g = {
false, /* flag: true=bypass */
ATV_TOP_VID_PEAK__PRE, /* shadow of ATV_TOP_VID_PEAK__A */
ATV_TOP_NOISE_TH__PRE, /* shadow of ATV_TOP_NOISE_TH__A */
true, /* flag CVBS ouput enable */
false, /* flag SIF ouput enable */
true, /* flag CVBS output enable */
false, /* flag SIF output enable */
DRXJ_SIF_ATTENUATION_0DB, /* current SIF att setting */
{ /* qam_rf_agc_cfg */
DRX_STANDARD_ITU_B, /* standard */
@ -832,7 +832,7 @@ static struct drx_common_attr drxj_default_comm_attr_g = {
false, /* If true mirror frequency spectrum */
{
/* MPEG output configuration */
true, /* If true, enable MPEG ouput */
true, /* If true, enable MPEG output */
false, /* If true, insert RS byte */
false, /* If true, parallel out otherwise serial */
false, /* If true, invert DATA signals */
@ -848,7 +848,7 @@ static struct drx_common_attr drxj_default_comm_attr_g = {
DRX_MPEG_STR_WIDTH_1 /* MPEG Start width in clock cycles */
},
/* Initilisations below can be omitted, they require no user input and
are initialy 0, NULL or false. The compiler will initialize them to these
are initially 0, NULL or false. The compiler will initialize them to these
values when omitted. */
false, /* is_opened */
@ -869,7 +869,7 @@ static struct drx_common_attr drxj_default_comm_attr_g = {
DRX_POWER_UP,
/* Tuner */
1, /* nr of I2C port to wich tuner is */
1, /* nr of I2C port to which tuner is */
0L, /* minimum RF input frequency, in kHz */
0L, /* maximum RF input frequency, in kHz */
false, /* Rf Agc Polarity */
@ -1656,7 +1656,7 @@ static int drxdap_fasi_write_block(struct i2c_device_addr *dev_addr,
sequense will be visible: (1) write address {i2c addr,
4 bytes chip address} (2) write data {i2c addr, 4 bytes data }
(3) write address (4) write data etc...
Address must be rewriten because HI is reset after data transport and
Address must be rewritten because HI is reset after data transport and
expects an address.
*/
todo = (block_size < datasize ? block_size : datasize);
@ -1820,7 +1820,7 @@ static int drxdap_fasi_write_reg32(struct i2c_device_addr *dev_addr,
* \param wdata Data to write
* \param rdata Buffer for data to read
* \return int
* \retval 0 Succes
* \retval 0 Success
* \retval -EIO Timeout, I2C error, illegal bank
*
* 16 bits register read modify write access using short addressing format only.
@ -1897,7 +1897,7 @@ static int drxj_dap_read_modify_write_reg16(struct i2c_device_addr *dev_addr,
* \param addr
* \param data
* \return int
* \retval 0 Succes
* \retval 0 Success
* \retval -EIO Timeout, I2C error, illegal bank
*
* 16 bits register read access via audio token ring interface.
@ -2004,7 +2004,7 @@ static int drxj_dap_read_reg16(struct i2c_device_addr *dev_addr,
* \param addr
* \param data
* \return int
* \retval 0 Succes
* \retval 0 Success
* \retval -EIO Timeout, I2C error, illegal bank
*
* 16 bits register write access via audio token ring interface.
@ -2094,7 +2094,7 @@ static int drxj_dap_write_reg16(struct i2c_device_addr *dev_addr,
* \param datasize size of data buffer in bytes
* \param data pointer to data buffer
* \return int
* \retval 0 Succes
* \retval 0 Success
* \retval -EIO Timeout, I2C error, illegal bank
*
*/
@ -2338,7 +2338,7 @@ hi_command(struct i2c_device_addr *dev_addr, const struct drxj_hi_cmd *cmd, u16
if ((cmd->cmd) == SIO_HI_RA_RAM_CMD_RESET)
msleep(1);
/* Detect power down to ommit reading result */
/* Detect power down to omit reading result */
powerdown_cmd = (bool) ((cmd->cmd == SIO_HI_RA_RAM_CMD_CONFIG) &&
(((cmd->
param5) & SIO_HI_RA_RAM_PAR_5_CFG_SLEEP__M)
@ -2754,7 +2754,7 @@ ctrl_set_cfg_mpeg_output(struct drx_demod_instance *demod, struct drx_cfg_mpeg_o
common_attr = (struct drx_common_attr *) demod->my_common_attr;
if (cfg_data->enable_mpeg_output == true) {
/* quick and dirty patch to set MPEG incase current std is not
/* quick and dirty patch to set MPEG in case current std is not
producing MPEG */
switch (ext_attr->standard) {
case DRX_STANDARD_8VSB:
@ -2894,7 +2894,7 @@ ctrl_set_cfg_mpeg_output(struct drx_demod_instance *demod, struct drx_cfg_mpeg_o
break;
default:
break;
} /* swtich (standard) */
} /* switch (standard) */
/* Check insertion of the Reed-Solomon parity bytes */
rc = drxj_dap_read_reg16(dev_addr, FEC_OC_MODE__A, &fec_oc_reg_mode, 0);
@ -4127,7 +4127,7 @@ rw_error:
* \param datasize size of data buffer in bytes
* \param data pointer to data buffer
* \return int
* \retval 0 Succes
* \retval 0 Success
* \retval -EIO Timeout, I2C error, illegal bank
*
*/
@ -8989,7 +8989,7 @@ qam64auto(struct drx_demod_instance *demod,
((jiffies_to_msecs(jiffies) - start_time) <
(DRXJ_QAM_MAX_WAITTIME + timeout_ofs))
);
/* Returning control to apllication ... */
/* Returning control to application ... */
return 0;
rw_error:
@ -9309,7 +9309,7 @@ get_qamrs_err_count(struct i2c_device_addr *dev_addr,
return -EINVAL;
/* all reported errors are received in the */
/* most recently finished measurment period */
/* most recently finished measurement period */
/* no of pre RS bit errors */
rc = drxj_dap_read_reg16(dev_addr, FEC_RS_NR_BIT_ERRORS__A, &nr_bit_errors, 0);
if (rc != 0) {
@ -9689,7 +9689,7 @@ rw_error:
(3) SIF AGC (used to amplify the output signal in case input to low)
The SIF AGC is now coupled to the RF/IF AGCs.
The SIF AGC is needed for both SIF ouput and the internal SIF signal to
The SIF AGC is needed for both SIF output and the internal SIF signal to
the AUD block.
RF and IF AGCs DACs are part of AFE, Video and SIF AGC DACs are part of
@ -9702,11 +9702,11 @@ rw_error:
later on because of the schedule)
Several HW/SCU "settings" can be used for ATV. The standard selection
will reset most of these settings. To avoid that the end user apllication
will reset most of these settings. To avoid that the end user application
has to perform these settings each time the ATV or FM standards is
selected the driver will shadow these settings. This enables the end user
to perform the settings only once after a drx_open(). The driver must
write the shadow settings to HW/SCU incase:
write the shadow settings to HW/SCU in case:
( setstandard FM/ATV) ||
( settings have changed && FM/ATV standard is active)
The shadow settings will be stored in the device specific data container.
@ -9908,7 +9908,7 @@ rw_error:
#define IMPULSE_COSINE_ALPHA_0_5 { 2, 0, -2, -2, 2, 5, 2, -10, -20, -14, 20, 74, 125, 145} /*sqrt raised-cosine filter with alpha=0.5 */
#define IMPULSE_COSINE_ALPHA_RO_0_5 { 0, 0, 1, 2, 3, 0, -7, -15, -16, 0, 34, 77, 114, 128} /*full raised-cosine filter with alpha=0.5 (receiver only) */
/* Coefficients for the nyquist fitler (total: 27 taps) */
/* Coefficients for the nyquist filter (total: 27 taps) */
#define NYQFILTERLEN 27
static int ctrl_set_oob(struct drx_demod_instance *demod, struct drxoob *oob_param)

Просмотреть файл

@ -49,7 +49,7 @@ INCLUDES
#if ((DRXDAP_SINGLE_MASTER == 0) && (DRXDAPFASI_LONG_ADDR_ALLOWED == 0))
#error "Multi master mode and short addressing only is an illegal combination"
*; /* Generate a fatal compiler error to make sure it stops here,
this is necesarry because not all compilers stop after a #error. */
this is necessary because not all compilers stop after a #error. */
#endif
/*-------------------------------------------------------------------------
@ -203,7 +203,7 @@ struct drxj_agc_status {
* /struct drxjrs_errors
* Available failure information in DRXJ_FEC_RS.
*
* Container for errors that are received in the most recently finished measurment period
* Container for errors that are received in the most recently finished measurement period
*
*/
struct drxjrs_errors {
@ -405,7 +405,7 @@ struct drxj_cfg_atv_output {
*
*/
struct drxj_data {
/* device capabilties (determined during drx_open()) */
/* device capabilities (determined during drx_open()) */
bool has_lna; /*< true if LNA (aka PGA) present */
bool has_oob; /*< true if OOB supported */
bool has_ntsc; /*< true if NTSC supported */
@ -455,7 +455,7 @@ struct drxj_cfg_atv_output {
/* IQM fs frequecy shift and inversion */
u32 iqm_fs_rate_ofs; /*< frequency shifter setting after setchannel */
bool pos_image; /*< Ture: positive image */
bool pos_image; /*< True: positive image */
/* IQM RC frequecy shift */
u32 iqm_rc_rate_ofs; /*< frequency shifter setting after setchannel */
@ -468,8 +468,8 @@ struct drxj_cfg_atv_output {
bool phase_correction_bypass;/*< flag: true=bypass */
s16 atv_top_vid_peak; /*< shadow of ATV_TOP_VID_PEAK__A */
u16 atv_top_noise_th; /*< shadow of ATV_TOP_NOISE_TH__A */
bool enable_cvbs_output; /*< flag CVBS ouput enable */
bool enable_sif_output; /*< flag SIF ouput enable */
bool enable_cvbs_output; /*< flag CVBS output enable */
bool enable_sif_output; /*< flag SIF output enable */
enum drxjsif_attenuation sif_attenuation;
/*< current SIF att setting */
/* Agc configuration for QAM and VSB */

Просмотреть файл

@ -890,7 +890,7 @@ u8 DRXD_StartDiversityEnd[] = {
/* End demod, combining RF in and diversity in, MPEG TS out */
WR16(B_FE_CF_REG_IMP_VAL__A, 0x0), /* disable impulse noise cruncher */
WR16(B_FE_AD_REG_INVEXT__A, 0x0), /* clock inversion (for sohard board) */
WR16(B_CP_REG_BR_STR_DEL__A, 10), /* apperently no mb delay matching is best */
WR16(B_CP_REG_BR_STR_DEL__A, 10), /* apparently no mb delay matching is best */
WR16(B_EQ_REG_RC_SEL_CAR__A, B_EQ_REG_RC_SEL_CAR_DIV_ON | /* org = 0x81 combining enabled */
B_EQ_REG_RC_SEL_CAR_MEAS_A_CC |

Просмотреть файл

@ -1144,6 +1144,8 @@ static int EnableAndResetMB(struct drxd_state *state)
static int InitCC(struct drxd_state *state)
{
int status = 0;
if (state->osc_clock_freq == 0 ||
state->osc_clock_freq > 20000 ||
(state->osc_clock_freq % 4000) != 0) {
@ -1151,14 +1153,17 @@ static int InitCC(struct drxd_state *state)
return -1;
}
Write16(state, CC_REG_OSC_MODE__A, CC_REG_OSC_MODE_M20, 0);
Write16(state, CC_REG_PLL_MODE__A, CC_REG_PLL_MODE_BYPASS_PLL |
CC_REG_PLL_MODE_PUMP_CUR_12, 0);
Write16(state, CC_REG_REF_DIVIDE__A, state->osc_clock_freq / 4000, 0);
Write16(state, CC_REG_PWD_MODE__A, CC_REG_PWD_MODE_DOWN_PLL, 0);
Write16(state, CC_REG_UPDATE__A, CC_REG_UPDATE_KEY, 0);
status |= Write16(state, CC_REG_OSC_MODE__A, CC_REG_OSC_MODE_M20, 0);
status |= Write16(state, CC_REG_PLL_MODE__A,
CC_REG_PLL_MODE_BYPASS_PLL |
CC_REG_PLL_MODE_PUMP_CUR_12, 0);
status |= Write16(state, CC_REG_REF_DIVIDE__A,
state->osc_clock_freq / 4000, 0);
status |= Write16(state, CC_REG_PWD_MODE__A, CC_REG_PWD_MODE_DOWN_PLL,
0);
status |= Write16(state, CC_REG_UPDATE__A, CC_REG_UPDATE_KEY, 0);
return 0;
return status;
}
static int ResetECOD(struct drxd_state *state)
@ -1312,7 +1317,10 @@ static int SC_SendCommand(struct drxd_state *state, u16 cmd)
int status = 0, ret;
u16 errCode;
Write16(state, SC_RA_RAM_CMD__A, cmd, 0);
status = Write16(state, SC_RA_RAM_CMD__A, cmd, 0);
if (status < 0)
return status;
SC_WaitForReady(state);
ret = Read16(state, SC_RA_RAM_CMD_ADDR__A, &errCode, 0);
@ -1339,9 +1347,9 @@ static int SC_ProcStartCommand(struct drxd_state *state,
break;
}
SC_WaitForReady(state);
Write16(state, SC_RA_RAM_CMD_ADDR__A, subCmd, 0);
Write16(state, SC_RA_RAM_PARAM1__A, param1, 0);
Write16(state, SC_RA_RAM_PARAM0__A, param0, 0);
status |= Write16(state, SC_RA_RAM_CMD_ADDR__A, subCmd, 0);
status |= Write16(state, SC_RA_RAM_PARAM1__A, param1, 0);
status |= Write16(state, SC_RA_RAM_PARAM0__A, param0, 0);
SC_SendCommand(state, SC_RA_RAM_CMD_PROC_START);
} while (0);

Просмотреть файл

@ -24,7 +24,7 @@
* @microcode_name: Name of the firmware file with the microcode
* @qam_demod_parameter_count: The number of parameters used for the command
* to set the demodulator parameters. All
* firmwares are using the 2-parameter commmand.
* firmwares are using the 2-parameter command.
* An exception is the ``drxk_a3.mc`` firmware,
* which uses the 4-parameter command.
* A value of 0 (default) or lower indicates that

Просмотреть файл

@ -723,7 +723,7 @@ static int init_state(struct drxk_state *state)
state->m_drxk_state = DRXK_UNINITIALIZED;
/* MPEG output configuration */
state->m_enable_mpeg_output = true; /* If TRUE; enable MPEG ouput */
state->m_enable_mpeg_output = true; /* If TRUE; enable MPEG output */
state->m_insert_rs_byte = false; /* If TRUE; insert RS byte */
state->m_invert_data = false; /* If TRUE; invert DATA signals */
state->m_invert_err = false; /* If TRUE; invert ERR signal */
@ -3870,7 +3870,7 @@ static int set_dvbt(struct drxk_state *state, u16 intermediate_freqk_hz,
goto error;
}
#else
/* Set Priorty high */
/* Set Priority high */
transmission_params |= OFDM_SC_RA_RAM_OP_PARAM_PRIO_HI;
status = write16(state, OFDM_EC_SB_PRIOR__A, OFDM_EC_SB_PRIOR_HI);
if (status < 0)
@ -3901,7 +3901,7 @@ static int set_dvbt(struct drxk_state *state, u16 intermediate_freqk_hz,
}
/*
* SAW filter selection: normaly not necesarry, but if wanted
* SAW filter selection: normally not necessary, but if wanted
* the application can select a SAW filter via the driver by
* using UIOs
*/
@ -5423,7 +5423,7 @@ static int qam_demodulator_command(struct drxk_state *state,
set_param_parameters[3] |= (QAM_MIRROR_AUTO_ON);
/* Env parameters */
/* check for LOCKRANGE Extented */
/* check for LOCKRANGE Extended */
/* set_param_parameters[3] |= QAM_LOCKRANGE_NORMAL; */
status = scu_command(state,

Просмотреть файл

@ -914,7 +914,7 @@ static int ds3000_set_frontend(struct dvb_frontend *fe)
/* ds3000 global reset */
ds3000_writereg(state, 0x07, 0x80);
ds3000_writereg(state, 0x07, 0x00);
/* ds3000 build-in uC reset */
/* ds3000 built-in uC reset */
ds3000_writereg(state, 0xb2, 0x01);
/* ds3000 software reset */
ds3000_writereg(state, 0x00, 0x01);
@ -1023,7 +1023,7 @@ static int ds3000_set_frontend(struct dvb_frontend *fe)
/* ds3000 out of software reset */
ds3000_writereg(state, 0x00, 0x00);
/* start ds3000 build-in uC */
/* start ds3000 built-in uC */
ds3000_writereg(state, 0xb2, 0x00);
if (fe->ops.tuner_ops.get_frequency) {

Просмотреть файл

@ -98,7 +98,7 @@ static int isl6421_set_voltage(struct dvb_frontend *fe,
if (ret != 2)
return -EIO;
/* Store off status now incase future commands fail */
/* Store off status now in case future commands fail */
isl6421->is_off = is_off;
/* On overflow, the device will try again after 900 ms (typically) */

Просмотреть файл

@ -1685,7 +1685,10 @@ static int lgdt3306a_read_signal_strength(struct dvb_frontend *fe,
case QAM_256:
case QAM_AUTO:
/* need to know actual modulation to set proper SNR baseline */
lgdt3306a_read_reg(state, 0x00a6, &val);
ret = lgdt3306a_read_reg(state, 0x00a6, &val);
if (lg_chkerr(ret))
goto fail;
if(val & 0x04)
ref_snr = 2800; /* QAM-256 28dB */
else

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше