V4L/DVB (12534): soc-camera: V4L2 API compliant scaling (S_FMT) and cropping (S_CROP)

The initial soc-camera scaling and cropping implementation turned out to be
incompliant with the V4L2 API, e.g., it expected the user to specify cropping
in output window pixels, instead of input window pixels. This patch converts
the soc-camera core and all drivers to comply with the standard.

Signed-off-by: Guennadi Liakhovetski <g.liakhovetski@gmx.de>
Signed-off-by: Mauro Carvalho Chehab <mchehab@redhat.com>
This commit is contained in:
Guennadi Liakhovetski 2009-08-25 11:50:46 -03:00 коммит произвёл Mauro Carvalho Chehab
Родитель 0166b74374
Коммит 6a6c878672
14 изменённых файлов: 1573 добавлений и 682 удалений

Просмотреть файл

@ -116,5 +116,45 @@ functionality.
struct soc_camera_device also links to an array of struct soc_camera_data_format,
listing pixel formats, supported by the camera.
VIDIOC_S_CROP and VIDIOC_S_FMT behaviour
----------------------------------------
Above user ioctls modify image geometry as follows:
VIDIOC_S_CROP: sets location and sizes of the sensor window. Unit is one sensor
pixel. Changing sensor window sizes preserves any scaling factors, therefore
user window sizes change as well.
VIDIOC_S_FMT: sets user window. Should preserve previously set sensor window as
much as possible by modifying scaling factors. If the sensor window cannot be
preserved precisely, it may be changed too.
In soc-camera there are two locations, where scaling and cropping can taks
place: in the camera driver and in the host driver. User ioctls are first passed
to the host driver, which then generally passes them down to the camera driver.
It is more efficient to perform scaling and cropping in the camera driver to
save camera bus bandwidth and maximise the framerate. However, if the camera
driver failed to set the required parameters with sufficient precision, the host
driver may decide to also use its own scaling and cropping to fulfill the user's
request.
Camera drivers are interfaced to the soc-camera core and to host drivers over
the v4l2-subdev API, which is completely functional, it doesn't pass any data.
Therefore all camera drivers shall reply to .g_fmt() requests with their current
output geometry. This is necessary to correctly configure the camera bus.
.s_fmt() and .try_fmt() have to be implemented too. Sensor window and scaling
factors have to be maintained by camera drivers internally. According to the
V4L2 API all capture drivers must support the VIDIOC_CROPCAP ioctl, hence we
rely on camera drivers implementing .cropcap(). If the camera driver does not
support cropping, it may choose to not implement .s_crop(), but to enable
cropping support by the camera host driver at least the .g_crop method must be
implemented.
User window geometry is kept in .user_width and .user_height fields in struct
soc_camera_device and used by the soc-camera core and host drivers. The core
updates these fields upon successful completion of a .s_fmt() call, but if these
fields change elsewhere, e.g., during .s_crop() processing, the host driver is
responsible for updating them.
--
Author: Guennadi Liakhovetski <g.liakhovetski@gmx.de>

Просмотреть файл

@ -39,6 +39,13 @@
#define MT9M001_GLOBAL_GAIN 0x35
#define MT9M001_CHIP_ENABLE 0xF1
#define MT9M001_MAX_WIDTH 1280
#define MT9M001_MAX_HEIGHT 1024
#define MT9M001_MIN_WIDTH 48
#define MT9M001_MIN_HEIGHT 32
#define MT9M001_COLUMN_SKIP 20
#define MT9M001_ROW_SKIP 12
static const struct soc_camera_data_format mt9m001_colour_formats[] = {
/* Order important: first natively supported,
* second supported with a GPIO extender */
@ -70,6 +77,8 @@ static const struct soc_camera_data_format mt9m001_monochrome_formats[] = {
struct mt9m001 {
struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */
__u32 fourcc;
int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */
unsigned char autoexposure;
};
@ -196,13 +205,31 @@ static unsigned long mt9m001_query_bus_param(struct soc_camera_device *icd)
static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct v4l2_rect *rect = &a->c;
struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_rect rect = a->c;
struct soc_camera_device *icd = client->dev.platform_data;
int ret;
const u16 hblank = 9, vblank = 25;
if (mt9m001->fourcc == V4L2_PIX_FMT_SBGGR8 ||
mt9m001->fourcc == V4L2_PIX_FMT_SBGGR16)
/*
* Bayer format - even number of rows for simplicity,
* but let the user play with the top row.
*/
rect.height = ALIGN(rect.height, 2);
/* Datasheet requirement: see register description */
rect.width = ALIGN(rect.width, 2);
rect.left = ALIGN(rect.left, 2);
soc_camera_limit_side(&rect.left, &rect.width,
MT9M001_COLUMN_SKIP, MT9M001_MIN_WIDTH, MT9M001_MAX_WIDTH);
soc_camera_limit_side(&rect.top, &rect.height,
MT9M001_ROW_SKIP, MT9M001_MIN_HEIGHT, MT9M001_MAX_HEIGHT);
/* Blanking and start values - default... */
ret = reg_write(client, MT9M001_HORIZONTAL_BLANKING, hblank);
if (!ret)
@ -211,46 +238,98 @@ static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
/* The caller provides a supported format, as verified per
* call to icd->try_fmt() */
if (!ret)
ret = reg_write(client, MT9M001_COLUMN_START, rect->left);
ret = reg_write(client, MT9M001_COLUMN_START, rect.left);
if (!ret)
ret = reg_write(client, MT9M001_ROW_START, rect->top);
ret = reg_write(client, MT9M001_ROW_START, rect.top);
if (!ret)
ret = reg_write(client, MT9M001_WINDOW_WIDTH, rect->width - 1);
ret = reg_write(client, MT9M001_WINDOW_WIDTH, rect.width - 1);
if (!ret)
ret = reg_write(client, MT9M001_WINDOW_HEIGHT,
rect->height + icd->y_skip_top - 1);
rect.height + icd->y_skip_top - 1);
if (!ret && mt9m001->autoexposure) {
ret = reg_write(client, MT9M001_SHUTTER_WIDTH,
rect->height + icd->y_skip_top + vblank);
rect.height + icd->y_skip_top + vblank);
if (!ret) {
const struct v4l2_queryctrl *qctrl =
soc_camera_find_qctrl(icd->ops,
V4L2_CID_EXPOSURE);
icd->exposure = (524 + (rect->height + icd->y_skip_top +
icd->exposure = (524 + (rect.height + icd->y_skip_top +
vblank - 1) *
(qctrl->maximum - qctrl->minimum)) /
1048 + qctrl->minimum;
}
}
if (!ret)
mt9m001->rect = rect;
return ret;
}
static int mt9m001_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client);
a->c = mt9m001->rect;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int mt9m001_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = MT9M001_COLUMN_SKIP;
a->bounds.top = MT9M001_ROW_SKIP;
a->bounds.width = MT9M001_MAX_WIDTH;
a->bounds.height = MT9M001_MAX_HEIGHT;
a->defrect = a->bounds;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int mt9m001_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9m001->rect.width;
pix->height = mt9m001->rect.height;
pix->pixelformat = mt9m001->fourcc;
pix->field = V4L2_FIELD_NONE;
pix->colorspace = V4L2_COLORSPACE_SRGB;
return 0;
}
static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct soc_camera_device *icd = client->dev.platform_data;
struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_crop a = {
.c = {
.left = icd->rect_current.left,
.top = icd->rect_current.top,
.width = f->fmt.pix.width,
.height = f->fmt.pix.height,
.left = mt9m001->rect.left,
.top = mt9m001->rect.top,
.width = pix->width,
.height = pix->height,
},
};
int ret;
/* No support for scaling so far, just crop. TODO: use skipping */
return mt9m001_s_crop(sd, &a);
ret = mt9m001_s_crop(sd, &a);
if (!ret) {
pix->width = mt9m001->rect.width;
pix->height = mt9m001->rect.height;
mt9m001->fourcc = pix->pixelformat;
}
return ret;
}
static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
@ -259,9 +338,14 @@ static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
struct soc_camera_device *icd = client->dev.platform_data;
struct v4l2_pix_format *pix = &f->fmt.pix;
v4l_bound_align_image(&pix->width, 48, 1280, 1,
&pix->height, 32 + icd->y_skip_top,
1024 + icd->y_skip_top, 0, 0);
v4l_bound_align_image(&pix->width, MT9M001_MIN_WIDTH,
MT9M001_MAX_WIDTH, 1,
&pix->height, MT9M001_MIN_HEIGHT + icd->y_skip_top,
MT9M001_MAX_HEIGHT + icd->y_skip_top, 0, 0);
if (pix->pixelformat == V4L2_PIX_FMT_SBGGR8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16)
pix->height = ALIGN(pix->height - 1, 2);
return 0;
}
@ -472,11 +556,11 @@ static int mt9m001_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl)
if (ctrl->value) {
const u16 vblank = 25;
if (reg_write(client, MT9M001_SHUTTER_WIDTH,
icd->rect_current.height +
mt9m001->rect.height +
icd->y_skip_top + vblank) < 0)
return -EIO;
qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE);
icd->exposure = (524 + (icd->rect_current.height +
icd->exposure = (524 + (mt9m001->rect.height +
icd->y_skip_top + vblank - 1) *
(qctrl->maximum - qctrl->minimum)) /
1048 + qctrl->minimum;
@ -548,6 +632,8 @@ static int mt9m001_video_probe(struct soc_camera_device *icd,
if (flags & SOCAM_DATAWIDTH_8)
icd->num_formats++;
mt9m001->fourcc = icd->formats->fourcc;
dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data,
data == 0x8431 ? "C12STM" : "C12ST");
@ -556,10 +642,9 @@ static int mt9m001_video_probe(struct soc_camera_device *icd,
static void mt9m001_video_remove(struct soc_camera_device *icd)
{
struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd));
struct soc_camera_link *icl = to_soc_camera_link(icd);
dev_dbg(&client->dev, "Video %x removed: %p, %p\n", client->addr,
dev_dbg(&icd->dev, "Video removed: %p, %p\n",
icd->dev.parent, icd->vdev);
if (icl->free_bus)
icl->free_bus(icl);
@ -578,8 +663,11 @@ static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = {
static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = {
.s_stream = mt9m001_s_stream,
.s_fmt = mt9m001_s_fmt,
.g_fmt = mt9m001_g_fmt,
.try_fmt = mt9m001_try_fmt,
.s_crop = mt9m001_s_crop,
.g_crop = mt9m001_g_crop,
.cropcap = mt9m001_cropcap,
};
static struct v4l2_subdev_ops mt9m001_subdev_ops = {
@ -621,15 +709,13 @@ static int mt9m001_probe(struct i2c_client *client,
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9m001_ops;
icd->rect_max.left = 20;
icd->rect_max.top = 12;
icd->rect_max.width = 1280;
icd->rect_max.height = 1024;
icd->rect_current.left = 20;
icd->rect_current.top = 12;
icd->width_min = 48;
icd->height_min = 32;
icd->y_skip_top = 1;
mt9m001->rect.left = MT9M001_COLUMN_SKIP;
mt9m001->rect.top = MT9M001_ROW_SKIP;
mt9m001->rect.width = MT9M001_MAX_WIDTH;
mt9m001->rect.height = MT9M001_MAX_HEIGHT;
/* Simulated autoexposure. If enabled, we calculate shutter width
* ourselves in the driver based on vertical blanking and frame width */
mt9m001->autoexposure = 1;

Просмотреть файл

@ -194,7 +194,7 @@ static int mt9m111_reg_read(struct i2c_client *client, const u16 reg)
ret = reg_page_map_set(client, reg);
if (!ret)
ret = swab16(i2c_smbus_read_word_data(client, (reg & 0xff)));
ret = swab16(i2c_smbus_read_word_data(client, reg & 0xff));
dev_dbg(&client->dev, "read reg.%03x -> %04x\n", reg, ret);
return ret;
@ -257,8 +257,8 @@ static int mt9m111_setup_rect(struct i2c_client *client,
int width = rect->width;
int height = rect->height;
if ((mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8)
|| (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16))
if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 ||
mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16)
is_raw_format = 1;
else
is_raw_format = 0;
@ -395,23 +395,85 @@ static int mt9m111_set_bus_param(struct soc_camera_device *icd, unsigned long f)
return 0;
}
static int mt9m111_make_rect(struct i2c_client *client,
struct v4l2_rect *rect)
{
struct mt9m111 *mt9m111 = to_mt9m111(client);
if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 ||
mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) {
/* Bayer format - even size lengths */
rect->width = ALIGN(rect->width, 2);
rect->height = ALIGN(rect->height, 2);
/* Let the user play with the starting pixel */
}
/* FIXME: the datasheet doesn't specify minimum sizes */
soc_camera_limit_side(&rect->left, &rect->width,
MT9M111_MIN_DARK_COLS, 2, MT9M111_MAX_WIDTH);
soc_camera_limit_side(&rect->top, &rect->height,
MT9M111_MIN_DARK_ROWS, 2, MT9M111_MAX_HEIGHT);
return mt9m111_setup_rect(client, rect);
}
static int mt9m111_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct v4l2_rect *rect = &a->c;
struct v4l2_rect rect = a->c;
struct i2c_client *client = sd->priv;
struct mt9m111 *mt9m111 = to_mt9m111(client);
int ret;
dev_dbg(&client->dev, "%s left=%d, top=%d, width=%d, height=%d\n",
__func__, rect->left, rect->top, rect->width,
rect->height);
__func__, rect.left, rect.top, rect.width, rect.height);
ret = mt9m111_setup_rect(client, rect);
ret = mt9m111_make_rect(client, &rect);
if (!ret)
mt9m111->rect = *rect;
mt9m111->rect = rect;
return ret;
}
static int mt9m111_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct i2c_client *client = sd->priv;
struct mt9m111 *mt9m111 = to_mt9m111(client);
a->c = mt9m111->rect;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int mt9m111_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = MT9M111_MIN_DARK_COLS;
a->bounds.top = MT9M111_MIN_DARK_ROWS;
a->bounds.width = MT9M111_MAX_WIDTH;
a->bounds.height = MT9M111_MAX_HEIGHT;
a->defrect = a->bounds;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int mt9m111_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct mt9m111 *mt9m111 = to_mt9m111(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9m111->rect.width;
pix->height = mt9m111->rect.height;
pix->pixelformat = mt9m111->pixfmt;
pix->field = V4L2_FIELD_NONE;
pix->colorspace = V4L2_COLORSPACE_SRGB;
return 0;
}
static int mt9m111_set_pixfmt(struct i2c_client *client, u32 pixfmt)
{
struct mt9m111 *mt9m111 = to_mt9m111(client);
@ -478,7 +540,7 @@ static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
__func__, pix->pixelformat, rect.left, rect.top, rect.width,
rect.height);
ret = mt9m111_setup_rect(client, &rect);
ret = mt9m111_make_rect(client, &rect);
if (!ret)
ret = mt9m111_set_pixfmt(client, pix->pixelformat);
if (!ret)
@ -489,11 +551,27 @@ static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
static int mt9m111_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct v4l2_pix_format *pix = &f->fmt.pix;
bool bayer = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16;
/*
* With Bayer format enforce even side lengths, but let the user play
* with the starting pixel
*/
if (pix->height > MT9M111_MAX_HEIGHT)
pix->height = MT9M111_MAX_HEIGHT;
else if (pix->height < 2)
pix->height = 2;
else if (bayer)
pix->height = ALIGN(pix->height, 2);
if (pix->width > MT9M111_MAX_WIDTH)
pix->width = MT9M111_MAX_WIDTH;
else if (pix->width < 2)
pix->width = 2;
else if (bayer)
pix->width = ALIGN(pix->width, 2);
return 0;
}
@ -906,8 +984,11 @@ static struct v4l2_subdev_core_ops mt9m111_subdev_core_ops = {
static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = {
.s_fmt = mt9m111_s_fmt,
.g_fmt = mt9m111_g_fmt,
.try_fmt = mt9m111_try_fmt,
.s_crop = mt9m111_s_crop,
.g_crop = mt9m111_g_crop,
.cropcap = mt9m111_cropcap,
};
static struct v4l2_subdev_ops mt9m111_subdev_ops = {
@ -949,16 +1030,13 @@ static int mt9m111_probe(struct i2c_client *client,
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9m111_ops;
icd->rect_max.left = MT9M111_MIN_DARK_COLS;
icd->rect_max.top = MT9M111_MIN_DARK_ROWS;
icd->rect_max.width = MT9M111_MAX_WIDTH;
icd->rect_max.height = MT9M111_MAX_HEIGHT;
icd->rect_current.left = icd->rect_max.left;
icd->rect_current.top = icd->rect_max.top;
icd->width_min = MT9M111_MIN_DARK_ROWS;
icd->height_min = MT9M111_MIN_DARK_COLS;
icd->y_skip_top = 0;
mt9m111->rect.left = MT9M111_MIN_DARK_COLS;
mt9m111->rect.top = MT9M111_MIN_DARK_ROWS;
mt9m111->rect.width = MT9M111_MAX_WIDTH;
mt9m111->rect.height = MT9M111_MAX_HEIGHT;
ret = mt9m111_video_probe(icd, client);
if (ret) {
icd->ops = NULL;

Просмотреть файл

@ -47,7 +47,7 @@
#define MT9T031_MAX_HEIGHT 1536
#define MT9T031_MAX_WIDTH 2048
#define MT9T031_MIN_HEIGHT 2
#define MT9T031_MIN_WIDTH 2
#define MT9T031_MIN_WIDTH 18
#define MT9T031_HORIZONTAL_BLANK 142
#define MT9T031_VERTICAL_BLANK 25
#define MT9T031_COLUMN_SKIP 32
@ -69,10 +69,11 @@ static const struct soc_camera_data_format mt9t031_colour_formats[] = {
struct mt9t031 {
struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */
int model; /* V4L2_IDENT_MT9T031* codes from v4l2-chip-ident.h */
unsigned char autoexposure;
u16 xskip;
u16 yskip;
unsigned char autoexposure;
};
static struct mt9t031 *to_mt9t031(const struct i2c_client *client)
@ -218,55 +219,67 @@ static unsigned long mt9t031_query_bus_param(struct soc_camera_device *icd)
return soc_camera_apply_sensor_flags(icl, MT9T031_BUS_PARAM);
}
/* Round up minima and round down maxima */
static void recalculate_limits(struct soc_camera_device *icd,
u16 xskip, u16 yskip)
/* target must be _even_ */
static u16 mt9t031_skip(s32 *source, s32 target, s32 max)
{
icd->rect_max.left = (MT9T031_COLUMN_SKIP + xskip - 1) / xskip;
icd->rect_max.top = (MT9T031_ROW_SKIP + yskip - 1) / yskip;
icd->width_min = (MT9T031_MIN_WIDTH + xskip - 1) / xskip;
icd->height_min = (MT9T031_MIN_HEIGHT + yskip - 1) / yskip;
icd->rect_max.width = MT9T031_MAX_WIDTH / xskip;
icd->rect_max.height = MT9T031_MAX_HEIGHT / yskip;
unsigned int skip;
if (*source < target + target / 2) {
*source = target;
return 1;
}
skip = min(max, *source + target / 2) / target;
if (skip > 8)
skip = 8;
*source = target * skip;
return skip;
}
/* rect is the sensor rectangle, the caller guarantees parameter validity */
static int mt9t031_set_params(struct soc_camera_device *icd,
struct v4l2_rect *rect, u16 xskip, u16 yskip)
{
struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd));
struct mt9t031 *mt9t031 = to_mt9t031(client);
int ret;
u16 xbin, ybin, width, height, left, top;
u16 xbin, ybin;
const u16 hblank = MT9T031_HORIZONTAL_BLANK,
vblank = MT9T031_VERTICAL_BLANK;
width = rect->width * xskip;
height = rect->height * yskip;
left = rect->left * xskip;
top = rect->top * yskip;
xbin = min(xskip, (u16)3);
ybin = min(yskip, (u16)3);
dev_dbg(&client->dev, "xskip %u, width %u/%u, yskip %u, height %u/%u\n",
xskip, width, rect->width, yskip, height, rect->height);
/* Could just do roundup(rect->left, [xy]bin * 2); but this is cheaper */
/*
* Could just do roundup(rect->left, [xy]bin * 2); but this is cheaper.
* There is always a valid suitably aligned value. The worst case is
* xbin = 3, width = 2048. Then we will start at 36, the last read out
* pixel will be 2083, which is < 2085 - first black pixel.
*
* MT9T031 datasheet imposes window left border alignment, depending on
* the selected xskip. Failing to conform to this requirement produces
* dark horizontal stripes in the image. However, even obeying to this
* requirement doesn't eliminate the stripes in all configurations. They
* appear "locally reproducibly," but can differ between tests under
* different lighting conditions.
*/
switch (xbin) {
case 1:
rect->left &= ~1;
break;
case 2:
left = (left + 3) & ~3;
rect->left &= ~3;
break;
case 3:
left = roundup(left, 6);
rect->left = rect->left > roundup(MT9T031_COLUMN_SKIP, 6) ?
(rect->left / 6) * 6 : roundup(MT9T031_COLUMN_SKIP, 6);
}
switch (ybin) {
case 2:
top = (top + 3) & ~3;
break;
case 3:
top = roundup(top, 6);
}
rect->top &= ~1;
dev_dbg(&client->dev, "skip %u:%u, rect %ux%u@%u:%u\n",
xskip, yskip, rect->width, rect->height, rect->left, rect->top);
/* Disable register update, reconfigure atomically */
ret = reg_set(client, MT9T031_OUTPUT_CONTROL, 1);
@ -287,27 +300,29 @@ static int mt9t031_set_params(struct soc_camera_device *icd,
ret = reg_write(client, MT9T031_ROW_ADDRESS_MODE,
((ybin - 1) << 4) | (yskip - 1));
}
dev_dbg(&client->dev, "new physical left %u, top %u\n", left, top);
dev_dbg(&client->dev, "new physical left %u, top %u\n",
rect->left, rect->top);
/* The caller provides a supported format, as guaranteed by
* icd->try_fmt_cap(), soc_camera_s_crop() and soc_camera_cropcap() */
if (ret >= 0)
ret = reg_write(client, MT9T031_COLUMN_START, left);
ret = reg_write(client, MT9T031_COLUMN_START, rect->left);
if (ret >= 0)
ret = reg_write(client, MT9T031_ROW_START, top);
ret = reg_write(client, MT9T031_ROW_START, rect->top);
if (ret >= 0)
ret = reg_write(client, MT9T031_WINDOW_WIDTH, width - 1);
ret = reg_write(client, MT9T031_WINDOW_WIDTH, rect->width - 1);
if (ret >= 0)
ret = reg_write(client, MT9T031_WINDOW_HEIGHT,
height + icd->y_skip_top - 1);
rect->height + icd->y_skip_top - 1);
if (ret >= 0 && mt9t031->autoexposure) {
ret = set_shutter(client, height + icd->y_skip_top + vblank);
ret = set_shutter(client,
rect->height + icd->y_skip_top + vblank);
if (ret >= 0) {
const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank;
const struct v4l2_queryctrl *qctrl =
soc_camera_find_qctrl(icd->ops,
V4L2_CID_EXPOSURE);
icd->exposure = (shutter_max / 2 + (height +
icd->exposure = (shutter_max / 2 + (rect->height +
icd->y_skip_top + vblank - 1) *
(qctrl->maximum - qctrl->minimum)) /
shutter_max + qctrl->minimum;
@ -318,27 +333,72 @@ static int mt9t031_set_params(struct soc_camera_device *icd,
if (ret >= 0)
ret = reg_clear(client, MT9T031_OUTPUT_CONTROL, 1);
if (ret >= 0) {
mt9t031->rect = *rect;
mt9t031->xskip = xskip;
mt9t031->yskip = yskip;
}
return ret < 0 ? ret : 0;
}
static int mt9t031_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct v4l2_rect *rect = &a->c;
struct v4l2_rect rect = a->c;
struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client);
struct soc_camera_device *icd = client->dev.platform_data;
/* Make sure we don't exceed sensor limits */
if (rect->left + rect->width > icd->rect_max.left + icd->rect_max.width)
rect->left = icd->rect_max.width + icd->rect_max.left -
rect->width;
rect.width = ALIGN(rect.width, 2);
rect.height = ALIGN(rect.height, 2);
if (rect->top + rect->height > icd->rect_max.height + icd->rect_max.top)
rect->top = icd->rect_max.height + icd->rect_max.top -
rect->height;
soc_camera_limit_side(&rect.left, &rect.width,
MT9T031_COLUMN_SKIP, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH);
/* CROP - no change in scaling, or in limits */
return mt9t031_set_params(icd, rect, mt9t031->xskip, mt9t031->yskip);
soc_camera_limit_side(&rect.top, &rect.height,
MT9T031_ROW_SKIP, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT);
return mt9t031_set_params(icd, &rect, mt9t031->xskip, mt9t031->yskip);
}
static int mt9t031_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client);
a->c = mt9t031->rect;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int mt9t031_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = MT9T031_COLUMN_SKIP;
a->bounds.top = MT9T031_ROW_SKIP;
a->bounds.width = MT9T031_MAX_WIDTH;
a->bounds.height = MT9T031_MAX_HEIGHT;
a->defrect = a->bounds;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int mt9t031_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9t031->rect.width / mt9t031->xskip;
pix->height = mt9t031->rect.height / mt9t031->yskip;
pix->pixelformat = V4L2_PIX_FMT_SGRBG10;
pix->field = V4L2_FIELD_NONE;
pix->colorspace = V4L2_COLORSPACE_SRGB;
return 0;
}
static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
@ -346,40 +406,25 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client);
struct soc_camera_device *icd = client->dev.platform_data;
int ret;
struct v4l2_pix_format *pix = &f->fmt.pix;
u16 xskip, yskip;
struct v4l2_rect rect = {
.left = icd->rect_current.left,
.top = icd->rect_current.top,
.width = f->fmt.pix.width,
.height = f->fmt.pix.height,
};
struct v4l2_rect rect = mt9t031->rect;
/*
* try_fmt has put rectangle within limits.
* S_FMT - use binning and skipping for scaling, recalculate
* limits, used for cropping
* try_fmt has put width and height within limits.
* S_FMT: use binning and skipping for scaling
*/
/* Is this more optimal than just a division? */
for (xskip = 8; xskip > 1; xskip--)
if (rect.width * xskip <= MT9T031_MAX_WIDTH)
break;
xskip = mt9t031_skip(&rect.width, pix->width, MT9T031_MAX_WIDTH);
yskip = mt9t031_skip(&rect.height, pix->height, MT9T031_MAX_HEIGHT);
for (yskip = 8; yskip > 1; yskip--)
if (rect.height * yskip <= MT9T031_MAX_HEIGHT)
break;
recalculate_limits(icd, xskip, yskip);
ret = mt9t031_set_params(icd, &rect, xskip, yskip);
if (!ret) {
mt9t031->xskip = xskip;
mt9t031->yskip = yskip;
}
return ret;
/* mt9t031_set_params() doesn't change width and height */
return mt9t031_set_params(icd, &rect, xskip, yskip);
}
/*
* If a user window larger than sensor window is requested, we'll increase the
* sensor window.
*/
static int mt9t031_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct v4l2_pix_format *pix = &f->fmt.pix;
@ -620,12 +665,12 @@ static int mt9t031_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl)
if (ctrl->value) {
const u16 vblank = MT9T031_VERTICAL_BLANK;
const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank;
if (set_shutter(client, icd->rect_current.height +
if (set_shutter(client, mt9t031->rect.height +
icd->y_skip_top + vblank) < 0)
return -EIO;
qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE);
icd->exposure = (shutter_max / 2 +
(icd->rect_current.height +
(mt9t031->rect.height +
icd->y_skip_top + vblank - 1) *
(qctrl->maximum - qctrl->minimum)) /
shutter_max + qctrl->minimum;
@ -645,12 +690,6 @@ static int mt9t031_video_probe(struct i2c_client *client)
struct mt9t031 *mt9t031 = to_mt9t031(client);
s32 data;
/* We must have a parent by now. And it cannot be a wrong one.
* So this entire test is completely redundant. */
if (!icd->dev.parent ||
to_soc_camera_host(icd->dev.parent)->nr != icd->iface)
return -ENODEV;
/* Enable the chip */
data = reg_write(client, MT9T031_CHIP_ENABLE, 1);
dev_dbg(&client->dev, "write: %d\n", data);
@ -688,8 +727,11 @@ static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = {
static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = {
.s_stream = mt9t031_s_stream,
.s_fmt = mt9t031_s_fmt,
.g_fmt = mt9t031_g_fmt,
.try_fmt = mt9t031_try_fmt,
.s_crop = mt9t031_s_crop,
.g_crop = mt9t031_g_crop,
.cropcap = mt9t031_cropcap,
};
static struct v4l2_subdev_ops mt9t031_subdev_ops = {
@ -731,15 +773,13 @@ static int mt9t031_probe(struct i2c_client *client,
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9t031_ops;
icd->rect_max.left = MT9T031_COLUMN_SKIP;
icd->rect_max.top = MT9T031_ROW_SKIP;
icd->rect_current.left = icd->rect_max.left;
icd->rect_current.top = icd->rect_max.top;
icd->width_min = MT9T031_MIN_WIDTH;
icd->rect_max.width = MT9T031_MAX_WIDTH;
icd->height_min = MT9T031_MIN_HEIGHT;
icd->rect_max.height = MT9T031_MAX_HEIGHT;
icd->y_skip_top = 0;
mt9t031->rect.left = MT9T031_COLUMN_SKIP;
mt9t031->rect.top = MT9T031_ROW_SKIP;
mt9t031->rect.width = MT9T031_MAX_WIDTH;
mt9t031->rect.height = MT9T031_MAX_HEIGHT;
/* Simulated autoexposure. If enabled, we calculate shutter width
* ourselves in the driver based on vertical blanking and frame width */
mt9t031->autoexposure = 1;

Просмотреть файл

@ -55,6 +55,13 @@ MODULE_PARM_DESC(sensor_type, "Sensor type: \"colour\" or \"monochrome\"");
/* Progressive scan, master, defaults */
#define MT9V022_CHIP_CONTROL_DEFAULT 0x188
#define MT9V022_MAX_WIDTH 752
#define MT9V022_MAX_HEIGHT 480
#define MT9V022_MIN_WIDTH 48
#define MT9V022_MIN_HEIGHT 32
#define MT9V022_COLUMN_SKIP 1
#define MT9V022_ROW_SKIP 4
static const struct soc_camera_data_format mt9v022_colour_formats[] = {
/* Order important: first natively supported,
* second supported with a GPIO extender */
@ -86,6 +93,8 @@ static const struct soc_camera_data_format mt9v022_monochrome_formats[] = {
struct mt9v022 {
struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */
__u32 fourcc;
int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */
u16 chip_control;
};
@ -250,44 +259,101 @@ static unsigned long mt9v022_query_bus_param(struct soc_camera_device *icd)
static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct v4l2_rect *rect = &a->c;
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_rect rect = a->c;
struct soc_camera_device *icd = client->dev.platform_data;
int ret;
/* Bayer format - even size lengths */
if (mt9v022->fourcc == V4L2_PIX_FMT_SBGGR8 ||
mt9v022->fourcc == V4L2_PIX_FMT_SBGGR16) {
rect.width = ALIGN(rect.width, 2);
rect.height = ALIGN(rect.height, 2);
/* Let the user play with the starting pixel */
}
soc_camera_limit_side(&rect.left, &rect.width,
MT9V022_COLUMN_SKIP, MT9V022_MIN_WIDTH, MT9V022_MAX_WIDTH);
soc_camera_limit_side(&rect.top, &rect.height,
MT9V022_ROW_SKIP, MT9V022_MIN_HEIGHT, MT9V022_MAX_HEIGHT);
/* Like in example app. Contradicts the datasheet though */
ret = reg_read(client, MT9V022_AEC_AGC_ENABLE);
if (ret >= 0) {
if (ret & 1) /* Autoexposure */
ret = reg_write(client, MT9V022_MAX_TOTAL_SHUTTER_WIDTH,
rect->height + icd->y_skip_top + 43);
rect.height + icd->y_skip_top + 43);
else
ret = reg_write(client, MT9V022_TOTAL_SHUTTER_WIDTH,
rect->height + icd->y_skip_top + 43);
rect.height + icd->y_skip_top + 43);
}
/* Setup frame format: defaults apart from width and height */
if (!ret)
ret = reg_write(client, MT9V022_COLUMN_START, rect->left);
ret = reg_write(client, MT9V022_COLUMN_START, rect.left);
if (!ret)
ret = reg_write(client, MT9V022_ROW_START, rect->top);
ret = reg_write(client, MT9V022_ROW_START, rect.top);
if (!ret)
/* Default 94, Phytec driver says:
* "width + horizontal blank >= 660" */
ret = reg_write(client, MT9V022_HORIZONTAL_BLANKING,
rect->width > 660 - 43 ? 43 :
660 - rect->width);
rect.width > 660 - 43 ? 43 :
660 - rect.width);
if (!ret)
ret = reg_write(client, MT9V022_VERTICAL_BLANKING, 45);
if (!ret)
ret = reg_write(client, MT9V022_WINDOW_WIDTH, rect->width);
ret = reg_write(client, MT9V022_WINDOW_WIDTH, rect.width);
if (!ret)
ret = reg_write(client, MT9V022_WINDOW_HEIGHT,
rect->height + icd->y_skip_top);
rect.height + icd->y_skip_top);
if (ret < 0)
return ret;
dev_dbg(&client->dev, "Frame %ux%u pixel\n", rect->width, rect->height);
dev_dbg(&client->dev, "Frame %ux%u pixel\n", rect.width, rect.height);
mt9v022->rect = rect;
return 0;
}
static int mt9v022_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
a->c = mt9v022->rect;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int mt9v022_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = MT9V022_COLUMN_SKIP;
a->bounds.top = MT9V022_ROW_SKIP;
a->bounds.width = MT9V022_MAX_WIDTH;
a->bounds.height = MT9V022_MAX_HEIGHT;
a->defrect = a->bounds;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int mt9v022_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9v022->rect.width;
pix->height = mt9v022->rect.height;
pix->pixelformat = mt9v022->fourcc;
pix->field = V4L2_FIELD_NONE;
pix->colorspace = V4L2_COLORSPACE_SRGB;
return 0;
}
@ -296,16 +362,16 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
struct soc_camera_device *icd = client->dev.platform_data;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_crop a = {
.c = {
.left = icd->rect_current.left,
.top = icd->rect_current.top,
.left = mt9v022->rect.left,
.top = mt9v022->rect.top,
.width = pix->width,
.height = pix->height,
},
};
int ret;
/* The caller provides a supported format, as verified per call to
* icd->try_fmt(), datawidth is from our supported format list */
@ -328,7 +394,14 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
}
/* No support for scaling on this camera, just crop. */
return mt9v022_s_crop(sd, &a);
ret = mt9v022_s_crop(sd, &a);
if (!ret) {
pix->width = mt9v022->rect.width;
pix->height = mt9v022->rect.height;
mt9v022->fourcc = pix->pixelformat;
}
return ret;
}
static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
@ -336,10 +409,13 @@ static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
struct i2c_client *client = sd->priv;
struct soc_camera_device *icd = client->dev.platform_data;
struct v4l2_pix_format *pix = &f->fmt.pix;
int align = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16;
v4l_bound_align_image(&pix->width, 48, 752, 2 /* ? */,
&pix->height, 32 + icd->y_skip_top,
480 + icd->y_skip_top, 0, 0);
v4l_bound_align_image(&pix->width, MT9V022_MIN_WIDTH,
MT9V022_MAX_WIDTH, align,
&pix->height, MT9V022_MIN_HEIGHT + icd->y_skip_top,
MT9V022_MAX_HEIGHT + icd->y_skip_top, align, 0);
return 0;
}
@ -669,6 +745,8 @@ static int mt9v022_video_probe(struct soc_camera_device *icd,
if (flags & SOCAM_DATAWIDTH_8)
icd->num_formats++;
mt9v022->fourcc = icd->formats->fourcc;
dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n",
data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ?
"monochrome" : "colour");
@ -679,10 +757,9 @@ ei2c:
static void mt9v022_video_remove(struct soc_camera_device *icd)
{
struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd));
struct soc_camera_link *icl = to_soc_camera_link(icd);
dev_dbg(&client->dev, "Video %x removed: %p, %p\n", client->addr,
dev_dbg(&icd->dev, "Video removed: %p, %p\n",
icd->dev.parent, icd->vdev);
if (icl->free_bus)
icl->free_bus(icl);
@ -701,8 +778,11 @@ static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = {
static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = {
.s_stream = mt9v022_s_stream,
.s_fmt = mt9v022_s_fmt,
.g_fmt = mt9v022_g_fmt,
.try_fmt = mt9v022_try_fmt,
.s_crop = mt9v022_s_crop,
.g_crop = mt9v022_g_crop,
.cropcap = mt9v022_cropcap,
};
static struct v4l2_subdev_ops mt9v022_subdev_ops = {
@ -745,16 +825,13 @@ static int mt9v022_probe(struct i2c_client *client,
mt9v022->chip_control = MT9V022_CHIP_CONTROL_DEFAULT;
icd->ops = &mt9v022_ops;
icd->rect_max.left = 1;
icd->rect_max.top = 4;
icd->rect_max.width = 752;
icd->rect_max.height = 480;
icd->rect_current.left = 1;
icd->rect_current.top = 4;
icd->width_min = 48;
icd->height_min = 32;
icd->y_skip_top = 1;
mt9v022->rect.left = MT9V022_COLUMN_SKIP;
mt9v022->rect.top = MT9V022_ROW_SKIP;
mt9v022->rect.width = MT9V022_MAX_WIDTH;
mt9v022->rect.height = MT9V022_MAX_HEIGHT;
ret = mt9v022_video_probe(icd, client);
if (ret) {
icd->ops = NULL;

Просмотреть файл

@ -126,7 +126,7 @@ static int mx1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
{
struct soc_camera_device *icd = vq->priv_data;
*size = icd->rect_current.width * icd->rect_current.height *
*size = icd->user_width * icd->user_height *
((icd->current_fmt->depth + 7) >> 3);
if (!*count)
@ -178,12 +178,12 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq,
buf->inwork = 1;
if (buf->fmt != icd->current_fmt ||
vb->width != icd->rect_current.width ||
vb->height != icd->rect_current.height ||
vb->width != icd->user_width ||
vb->height != icd->user_height ||
vb->field != field) {
buf->fmt = icd->current_fmt;
vb->width = icd->rect_current.width;
vb->height = icd->rect_current.height;
vb->width = icd->user_width;
vb->height = icd->user_height;
vb->field = field;
vb->state = VIDEOBUF_NEEDS_INIT;
}

Просмотреть файл

@ -220,7 +220,7 @@ static int mx3_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
if (!mx3_cam->idmac_channel[0])
return -EINVAL;
*size = icd->rect_current.width * icd->rect_current.height * bpp;
*size = icd->user_width * icd->user_height * bpp;
if (!*count)
*count = 32;
@ -241,7 +241,7 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq,
struct mx3_camera_buffer *buf =
container_of(vb, struct mx3_camera_buffer, vb);
/* current_fmt _must_ always be set */
size_t new_size = icd->rect_current.width * icd->rect_current.height *
size_t new_size = icd->user_width * icd->user_height *
((icd->current_fmt->depth + 7) >> 3);
int ret;
@ -251,12 +251,12 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq,
*/
if (buf->fmt != icd->current_fmt ||
vb->width != icd->rect_current.width ||
vb->height != icd->rect_current.height ||
vb->width != icd->user_width ||
vb->height != icd->user_height ||
vb->field != field) {
buf->fmt = icd->current_fmt;
vb->width = icd->rect_current.width;
vb->height = icd->rect_current.height;
vb->width = icd->user_width;
vb->height = icd->user_height;
vb->field = field;
if (vb->state != VIDEOBUF_NEEDS_INIT)
free_buffer(vq, buf);
@ -354,9 +354,9 @@ static void mx3_videobuf_queue(struct videobuf_queue *vq,
/* This is the configuration of one sg-element */
video->out_pixel_fmt = fourcc_to_ipu_pix(data_fmt->fourcc);
video->out_width = icd->rect_current.width;
video->out_height = icd->rect_current.height;
video->out_stride = icd->rect_current.width;
video->out_width = icd->user_width;
video->out_height = icd->user_height;
video->out_stride = icd->user_width;
#ifdef DEBUG
/* helps to see what DMA actually has written */
@ -541,7 +541,7 @@ static bool channel_change_requested(struct soc_camera_device *icd,
/* Do buffers have to be re-allocated or channel re-configured? */
return ichan && rect->width * rect->height >
icd->rect_current.width * icd->rect_current.height;
icd->user_width * icd->user_height;
}
static int test_platform_param(struct mx3_camera_dev *mx3_cam,
@ -589,8 +589,8 @@ static int test_platform_param(struct mx3_camera_dev *mx3_cam,
*flags |= SOCAM_DATAWIDTH_4;
break;
default:
dev_info(mx3_cam->soc_host.v4l2_dev.dev, "Unsupported bus width %d\n",
buswidth);
dev_warn(mx3_cam->soc_host.v4l2_dev.dev,
"Unsupported bus width %d\n", buswidth);
return -EINVAL;
}
@ -605,8 +605,7 @@ static int mx3_camera_try_bus_param(struct soc_camera_device *icd,
unsigned long bus_flags, camera_flags;
int ret = test_platform_param(mx3_cam, depth, &bus_flags);
dev_dbg(icd->dev.parent, "requested bus width %d bit: %d\n",
depth, ret);
dev_dbg(icd->dev.parent, "request bus width %d bit: %d\n", depth, ret);
if (ret < 0)
return ret;
@ -727,13 +726,13 @@ passthrough:
}
static void configure_geometry(struct mx3_camera_dev *mx3_cam,
struct v4l2_rect *rect)
unsigned int width, unsigned int height)
{
u32 ctrl, width_field, height_field;
/* Setup frame size - this cannot be changed on-the-fly... */
width_field = rect->width - 1;
height_field = rect->height - 1;
width_field = width - 1;
height_field = height - 1;
csi_reg_write(mx3_cam, width_field | (height_field << 16), CSI_SENS_FRM_SIZE);
csi_reg_write(mx3_cam, width_field << 16, CSI_FLASH_STROBE_1);
@ -745,11 +744,6 @@ static void configure_geometry(struct mx3_camera_dev *mx3_cam,
ctrl = csi_reg_read(mx3_cam, CSI_OUT_FRM_CTRL) & 0xffff0000;
/* Sensor does the cropping */
csi_reg_write(mx3_cam, ctrl | 0 | (0 << 8), CSI_OUT_FRM_CTRL);
/*
* No need to free resources here if we fail, we'll see if we need to
* do this next time we are called
*/
}
static int acquire_dma_channel(struct mx3_camera_dev *mx3_cam)
@ -786,6 +780,22 @@ static int acquire_dma_channel(struct mx3_camera_dev *mx3_cam)
return 0;
}
/*
* FIXME: learn to use stride != width, then we can keep stride properly aligned
* and support arbitrary (even) widths.
*/
static inline void stride_align(__s32 *width)
{
if (((*width + 7) & ~7) < 4096)
*width = (*width + 7) & ~7;
else
*width = *width & ~7;
}
/*
* As long as we don't implement host-side cropping and scaling, we can use
* default g_crop and cropcap from soc_camera.c
*/
static int mx3_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *a)
{
@ -793,20 +803,51 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct mx3_camera_dev *mx3_cam = ici->priv;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE};
struct v4l2_pix_format *pix = &f.fmt.pix;
int ret;
/*
* We now know pixel formats and can decide upon DMA-channel(s)
* So far only direct camera-to-memory is supported
*/
if (channel_change_requested(icd, rect)) {
int ret = acquire_dma_channel(mx3_cam);
soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096);
soc_camera_limit_side(&rect->top, &rect->height, 0, 2, 4096);
ret = v4l2_subdev_call(sd, video, s_crop, a);
if (ret < 0)
return ret;
/* The capture device might have changed its output */
ret = v4l2_subdev_call(sd, video, g_fmt, &f);
if (ret < 0)
return ret;
if (pix->width & 7) {
/* Ouch! We can only handle 8-byte aligned width... */
stride_align(&pix->width);
ret = v4l2_subdev_call(sd, video, s_fmt, &f);
if (ret < 0)
return ret;
}
configure_geometry(mx3_cam, rect);
if (pix->width != icd->user_width || pix->height != icd->user_height) {
/*
* We now know pixel formats and can decide upon DMA-channel(s)
* So far only direct camera-to-memory is supported
*/
if (channel_change_requested(icd, rect)) {
int ret = acquire_dma_channel(mx3_cam);
if (ret < 0)
return ret;
}
return v4l2_subdev_call(sd, video, s_crop, a);
configure_geometry(mx3_cam, pix->width, pix->height);
}
dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n",
pix->width, pix->height);
icd->user_width = pix->width;
icd->user_height = pix->height;
return ret;
}
static int mx3_camera_set_fmt(struct soc_camera_device *icd,
@ -817,12 +858,6 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_rect rect = {
.left = icd->rect_current.left,
.top = icd->rect_current.top,
.width = pix->width,
.height = pix->height,
};
int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
@ -832,6 +867,9 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
return -EINVAL;
}
stride_align(&pix->width);
dev_dbg(icd->dev.parent, "Set format %dx%d\n", pix->width, pix->height);
ret = acquire_dma_channel(mx3_cam);
if (ret < 0)
return ret;
@ -842,7 +880,7 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
* mxc_v4l2_s_fmt()
*/
configure_geometry(mx3_cam, &rect);
configure_geometry(mx3_cam, pix->width, pix->height);
ret = v4l2_subdev_call(sd, video, s_fmt, f);
if (!ret) {
@ -850,6 +888,8 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
icd->current_fmt = xlate->host_fmt;
}
dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height);
return ret;
}

Просмотреть файл

@ -382,11 +382,10 @@ struct regval_list {
};
struct ov772x_color_format {
char *name;
__u32 fourcc;
u8 dsp3;
u8 com3;
u8 com7;
const struct soc_camera_data_format *format;
u8 dsp3;
u8 com3;
u8 com7;
};
struct ov772x_win_size {
@ -481,43 +480,43 @@ static const struct soc_camera_data_format ov772x_fmt_lists[] = {
*/
static const struct ov772x_color_format ov772x_cfmts[] = {
{
SETFOURCC(YUYV),
.format = &ov772x_fmt_lists[0],
.dsp3 = 0x0,
.com3 = SWAP_YUV,
.com7 = OFMT_YUV,
},
{
SETFOURCC(YVYU),
.format = &ov772x_fmt_lists[1],
.dsp3 = UV_ON,
.com3 = SWAP_YUV,
.com7 = OFMT_YUV,
},
{
SETFOURCC(UYVY),
.format = &ov772x_fmt_lists[2],
.dsp3 = 0x0,
.com3 = 0x0,
.com7 = OFMT_YUV,
},
{
SETFOURCC(RGB555),
.format = &ov772x_fmt_lists[3],
.dsp3 = 0x0,
.com3 = SWAP_RGB,
.com7 = FMT_RGB555 | OFMT_RGB,
},
{
SETFOURCC(RGB555X),
.format = &ov772x_fmt_lists[4],
.dsp3 = 0x0,
.com3 = 0x0,
.com7 = FMT_RGB555 | OFMT_RGB,
},
{
SETFOURCC(RGB565),
.format = &ov772x_fmt_lists[5],
.dsp3 = 0x0,
.com3 = SWAP_RGB,
.com7 = FMT_RGB565 | OFMT_RGB,
},
{
SETFOURCC(RGB565X),
.format = &ov772x_fmt_lists[6],
.dsp3 = 0x0,
.com3 = 0x0,
.com7 = FMT_RGB565 | OFMT_RGB,
@ -648,8 +647,8 @@ static int ov772x_s_stream(struct v4l2_subdev *sd, int enable)
ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0);
dev_dbg(&client->dev,
"format %s, win %s\n", priv->fmt->name, priv->win->name);
dev_dbg(&client->dev, "format %s, win %s\n",
priv->fmt->format->name, priv->win->name);
return 0;
}
@ -818,7 +817,7 @@ static int ov772x_set_params(struct i2c_client *client,
*/
priv->fmt = NULL;
for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) {
if (pixfmt == ov772x_cfmts[i].fourcc) {
if (pixfmt == ov772x_cfmts[i].format->fourcc) {
priv->fmt = ov772x_cfmts + i;
break;
}
@ -955,6 +954,56 @@ ov772x_set_fmt_error:
return ret;
}
static int ov772x_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
a->c.left = 0;
a->c.top = 0;
a->c.width = VGA_WIDTH;
a->c.height = VGA_HEIGHT;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int ov772x_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = 0;
a->bounds.top = 0;
a->bounds.width = VGA_WIDTH;
a->bounds.height = VGA_HEIGHT;
a->defrect = a->bounds;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int ov772x_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct ov772x_priv *priv = to_ov772x(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
if (!priv->win || !priv->fmt) {
u32 width = VGA_WIDTH, height = VGA_HEIGHT;
int ret = ov772x_set_params(client, &width, &height,
V4L2_PIX_FMT_YUYV);
if (ret < 0)
return ret;
}
f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
pix->width = priv->win->width;
pix->height = priv->win->height;
pix->pixelformat = priv->fmt->format->fourcc;
pix->colorspace = priv->fmt->format->colorspace;
pix->field = V4L2_FIELD_NONE;
return 0;
}
static int ov772x_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
@ -1060,8 +1109,11 @@ static struct v4l2_subdev_core_ops ov772x_subdev_core_ops = {
static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = {
.s_stream = ov772x_s_stream,
.g_fmt = ov772x_g_fmt,
.s_fmt = ov772x_s_fmt,
.try_fmt = ov772x_try_fmt,
.cropcap = ov772x_cropcap,
.g_crop = ov772x_g_crop,
};
static struct v4l2_subdev_ops ov772x_subdev_ops = {
@ -1110,8 +1162,6 @@ static int ov772x_probe(struct i2c_client *client,
v4l2_i2c_subdev_init(&priv->subdev, client, &ov772x_subdev_ops);
icd->ops = &ov772x_ops;
icd->rect_max.width = MAX_WIDTH;
icd->rect_max.height = MAX_HEIGHT;
ret = ov772x_video_probe(icd, client);
if (ret) {

Просмотреть файл

@ -225,6 +225,10 @@ struct pxa_camera_dev {
u32 save_cicr[5];
};
struct pxa_cam {
unsigned long flags;
};
static const char *pxa_cam_driver_description = "PXA_Camera";
static unsigned int vid_limit = 16; /* Video memory limit, in Mb */
@ -239,7 +243,7 @@ static int pxa_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size);
*size = roundup(icd->rect_current.width * icd->rect_current.height *
*size = roundup(icd->user_width * icd->user_height *
((icd->current_fmt->depth + 7) >> 3), 8);
if (0 == *count)
@ -443,12 +447,12 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq,
buf->inwork = 1;
if (buf->fmt != icd->current_fmt ||
vb->width != icd->rect_current.width ||
vb->height != icd->rect_current.height ||
vb->width != icd->user_width ||
vb->height != icd->user_height ||
vb->field != field) {
buf->fmt = icd->current_fmt;
vb->width = icd->rect_current.width;
vb->height = icd->rect_current.height;
vb->width = icd->user_width;
vb->height = icd->user_height;
vb->field = field;
vb->state = VIDEOBUF_NEEDS_INIT;
}
@ -839,7 +843,7 @@ static u32 mclk_get_divisor(struct platform_device *pdev,
struct pxa_camera_dev *pcdev)
{
unsigned long mclk = pcdev->mclk;
struct device *dev = pcdev->soc_host.v4l2_dev.dev;
struct device *dev = &pdev->dev;
u32 div;
unsigned long lcdclk;
@ -1040,13 +1044,100 @@ static int test_platform_param(struct pxa_camera_dev *pcdev,
return 0;
}
static void pxa_camera_setup_cicr(struct soc_camera_device *icd,
unsigned long flags, __u32 pixfmt)
{
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct pxa_camera_dev *pcdev = ici->priv;
unsigned long dw, bpp;
u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0;
/* Datawidth is now guaranteed to be equal to one of the three values.
* We fix bit-per-pixel equal to data-width... */
switch (flags & SOCAM_DATAWIDTH_MASK) {
case SOCAM_DATAWIDTH_10:
dw = 4;
bpp = 0x40;
break;
case SOCAM_DATAWIDTH_9:
dw = 3;
bpp = 0x20;
break;
default:
/* Actually it can only be 8 now,
* default is just to silence compiler warnings */
case SOCAM_DATAWIDTH_8:
dw = 2;
bpp = 0;
}
if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN)
cicr4 |= CICR4_PCLK_EN;
if (pcdev->platform_flags & PXA_CAMERA_MCLK_EN)
cicr4 |= CICR4_MCLK_EN;
if (flags & SOCAM_PCLK_SAMPLE_FALLING)
cicr4 |= CICR4_PCP;
if (flags & SOCAM_HSYNC_ACTIVE_LOW)
cicr4 |= CICR4_HSP;
if (flags & SOCAM_VSYNC_ACTIVE_LOW)
cicr4 |= CICR4_VSP;
cicr0 = __raw_readl(pcdev->base + CICR0);
if (cicr0 & CICR0_ENB)
__raw_writel(cicr0 & ~CICR0_ENB, pcdev->base + CICR0);
cicr1 = CICR1_PPL_VAL(icd->user_width - 1) | bpp | dw;
switch (pixfmt) {
case V4L2_PIX_FMT_YUV422P:
pcdev->channels = 3;
cicr1 |= CICR1_YCBCR_F;
/*
* Normally, pxa bus wants as input UYVY format. We allow all
* reorderings of the YUV422 format, as no processing is done,
* and the YUV stream is just passed through without any
* transformation. Note that UYVY is the only format that
* should be used if pxa framebuffer Overlay2 is used.
*/
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_VYUY:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
cicr1 |= CICR1_COLOR_SP_VAL(2);
break;
case V4L2_PIX_FMT_RGB555:
cicr1 |= CICR1_RGB_BPP_VAL(1) | CICR1_RGBT_CONV_VAL(2) |
CICR1_TBIT | CICR1_COLOR_SP_VAL(1);
break;
case V4L2_PIX_FMT_RGB565:
cicr1 |= CICR1_COLOR_SP_VAL(1) | CICR1_RGB_BPP_VAL(2);
break;
}
cicr2 = 0;
cicr3 = CICR3_LPF_VAL(icd->user_height - 1) |
CICR3_BFW_VAL(min((unsigned short)255, icd->y_skip_top));
cicr4 |= pcdev->mclk_divisor;
__raw_writel(cicr1, pcdev->base + CICR1);
__raw_writel(cicr2, pcdev->base + CICR2);
__raw_writel(cicr3, pcdev->base + CICR3);
__raw_writel(cicr4, pcdev->base + CICR4);
/* CIF interrupts are not used, only DMA */
cicr0 = (cicr0 & CICR0_ENB) | (pcdev->platform_flags & PXA_CAMERA_MASTER ?
CICR0_SIM_MP : (CICR0_SL_CAP_EN | CICR0_SIM_SP));
cicr0 |= CICR0_DMAEN | CICR0_IRQ_MASK;
__raw_writel(cicr0, pcdev->base + CICR0);
}
static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
{
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct pxa_camera_dev *pcdev = ici->priv;
unsigned long dw, bpp, bus_flags, camera_flags, common_flags;
u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0;
unsigned long bus_flags, camera_flags, common_flags;
int ret = test_platform_param(pcdev, icd->buswidth, &bus_flags);
struct pxa_cam *cam = icd->host_priv;
if (ret < 0)
return ret;
@ -1084,87 +1175,13 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
common_flags &= ~SOCAM_PCLK_SAMPLE_FALLING;
}
cam->flags = common_flags;
ret = icd->ops->set_bus_param(icd, common_flags);
if (ret < 0)
return ret;
/* Datawidth is now guaranteed to be equal to one of the three values.
* We fix bit-per-pixel equal to data-width... */
switch (common_flags & SOCAM_DATAWIDTH_MASK) {
case SOCAM_DATAWIDTH_10:
dw = 4;
bpp = 0x40;
break;
case SOCAM_DATAWIDTH_9:
dw = 3;
bpp = 0x20;
break;
default:
/* Actually it can only be 8 now,
* default is just to silence compiler warnings */
case SOCAM_DATAWIDTH_8:
dw = 2;
bpp = 0;
}
if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN)
cicr4 |= CICR4_PCLK_EN;
if (pcdev->platform_flags & PXA_CAMERA_MCLK_EN)
cicr4 |= CICR4_MCLK_EN;
if (common_flags & SOCAM_PCLK_SAMPLE_FALLING)
cicr4 |= CICR4_PCP;
if (common_flags & SOCAM_HSYNC_ACTIVE_LOW)
cicr4 |= CICR4_HSP;
if (common_flags & SOCAM_VSYNC_ACTIVE_LOW)
cicr4 |= CICR4_VSP;
cicr0 = __raw_readl(pcdev->base + CICR0);
if (cicr0 & CICR0_ENB)
__raw_writel(cicr0 & ~CICR0_ENB, pcdev->base + CICR0);
cicr1 = CICR1_PPL_VAL(icd->rect_current.width - 1) | bpp | dw;
switch (pixfmt) {
case V4L2_PIX_FMT_YUV422P:
pcdev->channels = 3;
cicr1 |= CICR1_YCBCR_F;
/*
* Normally, pxa bus wants as input UYVY format. We allow all
* reorderings of the YUV422 format, as no processing is done,
* and the YUV stream is just passed through without any
* transformation. Note that UYVY is the only format that
* should be used if pxa framebuffer Overlay2 is used.
*/
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_VYUY:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
cicr1 |= CICR1_COLOR_SP_VAL(2);
break;
case V4L2_PIX_FMT_RGB555:
cicr1 |= CICR1_RGB_BPP_VAL(1) | CICR1_RGBT_CONV_VAL(2) |
CICR1_TBIT | CICR1_COLOR_SP_VAL(1);
break;
case V4L2_PIX_FMT_RGB565:
cicr1 |= CICR1_COLOR_SP_VAL(1) | CICR1_RGB_BPP_VAL(2);
break;
}
cicr2 = 0;
cicr3 = CICR3_LPF_VAL(icd->rect_current.height - 1) |
CICR3_BFW_VAL(min((unsigned short)255, icd->y_skip_top));
cicr4 |= pcdev->mclk_divisor;
__raw_writel(cicr1, pcdev->base + CICR1);
__raw_writel(cicr2, pcdev->base + CICR2);
__raw_writel(cicr3, pcdev->base + CICR3);
__raw_writel(cicr4, pcdev->base + CICR4);
/* CIF interrupts are not used, only DMA */
cicr0 = (cicr0 & CICR0_ENB) | (pcdev->platform_flags & PXA_CAMERA_MASTER ?
CICR0_SIM_MP : (CICR0_SL_CAP_EN | CICR0_SIM_SP));
cicr0 |= CICR0_DMAEN | CICR0_IRQ_MASK;
__raw_writel(cicr0, pcdev->base + CICR0);
pxa_camera_setup_cicr(icd, common_flags, pixfmt);
return 0;
}
@ -1230,6 +1247,7 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx,
{
struct device *dev = icd->dev.parent;
int formats = 0, buswidth, ret;
struct pxa_cam *cam;
buswidth = required_buswidth(icd->formats + idx);
@ -1240,6 +1258,16 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx,
if (ret < 0)
return 0;
if (!icd->host_priv) {
cam = kzalloc(sizeof(*cam), GFP_KERNEL);
if (!cam)
return -ENOMEM;
icd->host_priv = cam;
} else {
cam = icd->host_priv;
}
switch (icd->formats[idx].fourcc) {
case V4L2_PIX_FMT_UYVY:
formats++;
@ -1284,6 +1312,19 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx,
return formats;
}
static void pxa_camera_put_formats(struct soc_camera_device *icd)
{
kfree(icd->host_priv);
icd->host_priv = NULL;
}
static int pxa_camera_check_frame(struct v4l2_pix_format *pix)
{
/* limit to pxa hardware capabilities */
return pix->height < 32 || pix->height > 2048 || pix->width < 48 ||
pix->width > 2048 || (pix->width & 0x01);
}
static int pxa_camera_set_crop(struct soc_camera_device *icd,
struct v4l2_crop *a)
{
@ -1296,6 +1337,9 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
.master_clock = pcdev->mclk,
.pixel_clock_max = pcdev->ciclk / 4,
};
struct v4l2_format f;
struct v4l2_pix_format *pix = &f.fmt.pix, pix_tmp;
struct pxa_cam *cam = icd->host_priv;
int ret;
/* If PCLK is used to latch data from the sensor, check sense */
@ -1309,7 +1353,37 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
if (ret < 0) {
dev_warn(dev, "Failed to crop to %ux%u@%u:%u\n",
rect->width, rect->height, rect->left, rect->top);
} else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) {
return ret;
}
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = v4l2_subdev_call(sd, video, g_fmt, &f);
if (ret < 0)
return ret;
pix_tmp = *pix;
if (pxa_camera_check_frame(pix)) {
/*
* Camera cropping produced a frame beyond our capabilities.
* FIXME: just extract a subframe, that we can process.
*/
v4l_bound_align_image(&pix->width, 48, 2048, 1,
&pix->height, 32, 2048, 0,
icd->current_fmt->fourcc == V4L2_PIX_FMT_YUV422P ?
4 : 0);
ret = v4l2_subdev_call(sd, video, s_fmt, &f);
if (ret < 0)
return ret;
if (pxa_camera_check_frame(pix)) {
dev_warn(icd->dev.parent,
"Inconsistent state. Use S_FMT to repair\n");
return -EINVAL;
}
}
if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) {
if (sense.pixel_clock > sense.pixel_clock_max) {
dev_err(dev,
"pixel clock %lu set by the camera too high!",
@ -1319,6 +1393,11 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
recalculate_fifo_timeout(pcdev, sense.pixel_clock);
}
icd->user_width = pix->width;
icd->user_height = pix->height;
pxa_camera_setup_cicr(icd, cam->flags, icd->current_fmt->fourcc);
return ret;
}
@ -1359,6 +1438,11 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
if (ret < 0) {
dev_warn(dev, "Failed to configure for format %x\n",
pix->pixelformat);
} else if (pxa_camera_check_frame(pix)) {
dev_warn(dev,
"Camera driver produced an unsupported frame %dx%d\n",
pix->width, pix->height);
ret = -EINVAL;
} else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) {
if (sense.pixel_clock > sense.pixel_clock_max) {
dev_err(dev,
@ -1402,7 +1486,7 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
*/
v4l_bound_align_image(&pix->width, 48, 2048, 1,
&pix->height, 32, 2048, 0,
xlate->host_fmt->fourcc == V4L2_PIX_FMT_YUV422P ? 4 : 0);
pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0);
pix->bytesperline = pix->width *
DIV_ROUND_UP(xlate->host_fmt->depth, 8);
@ -1412,7 +1496,7 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
pix->pixelformat = xlate->cam_fmt->fourcc;
/* limit to sensor capabilities */
ret = v4l2_subdev_call(sd, video, try_fmt, f);
pix->pixelformat = xlate->host_fmt->fourcc;
pix->pixelformat = pixfmt;
field = pix->field;
@ -1525,6 +1609,7 @@ static struct soc_camera_host_ops pxa_soc_camera_host_ops = {
.resume = pxa_camera_resume,
.set_crop = pxa_camera_set_crop,
.get_formats = pxa_camera_get_formats,
.put_formats = pxa_camera_put_formats,
.set_fmt = pxa_camera_set_fmt,
.try_fmt = pxa_camera_try_fmt,
.init_videobuf = pxa_camera_init_videobuf,

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -278,6 +278,9 @@ static void soc_camera_free_user_formats(struct soc_camera_device *icd)
icd->user_formats = NULL;
}
#define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \
((x) >> 24) & 0xff
/* Called with .vb_lock held */
static int soc_camera_set_fmt(struct soc_camera_file *icf,
struct v4l2_format *f)
@ -287,6 +290,9 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf,
struct v4l2_pix_format *pix = &f->fmt.pix;
int ret;
dev_dbg(&icd->dev, "S_FMT(%c%c%c%c, %ux%u)\n",
pixfmtstr(pix->pixelformat), pix->width, pix->height);
/* We always call try_fmt() before set_fmt() or set_crop() */
ret = ici->ops->try_fmt(icd, f);
if (ret < 0)
@ -302,17 +308,17 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf,
return -EINVAL;
}
icd->rect_current.width = pix->width;
icd->rect_current.height = pix->height;
icf->vb_vidq.field =
icd->field = pix->field;
icd->user_width = pix->width;
icd->user_height = pix->height;
icf->vb_vidq.field =
icd->field = pix->field;
if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
dev_warn(&icd->dev, "Attention! Wrong buf-type %d\n",
f->type);
dev_dbg(&icd->dev, "set width: %d height: %d\n",
icd->rect_current.width, icd->rect_current.height);
icd->user_width, icd->user_height);
/* set physical bus parameters */
return ici->ops->set_bus_param(icd, pix->pixelformat);
@ -355,8 +361,8 @@ static int soc_camera_open(struct file *file)
struct v4l2_format f = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.fmt.pix = {
.width = icd->rect_current.width,
.height = icd->rect_current.height,
.width = icd->user_width,
.height = icd->user_height,
.field = icd->field,
.pixelformat = icd->current_fmt->fourcc,
.colorspace = icd->current_fmt->colorspace,
@ -557,8 +563,8 @@ static int soc_camera_g_fmt_vid_cap(struct file *file, void *priv,
WARN_ON(priv != file->private_data);
pix->width = icd->rect_current.width;
pix->height = icd->rect_current.height;
pix->width = icd->user_width;
pix->height = icd->user_height;
pix->field = icf->vb_vidq.field;
pix->pixelformat = icd->current_fmt->fourcc;
pix->bytesperline = pix->width *
@ -722,17 +728,9 @@ static int soc_camera_cropcap(struct file *file, void *fh,
{
struct soc_camera_file *icf = file->private_data;
struct soc_camera_device *icd = icf->icd;
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->bounds = icd->rect_max;
a->defrect.left = icd->rect_max.left;
a->defrect.top = icd->rect_max.top;
a->defrect.width = DEFAULT_WIDTH;
a->defrect.height = DEFAULT_HEIGHT;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
return ici->ops->cropcap(icd, a);
}
static int soc_camera_g_crop(struct file *file, void *fh,
@ -740,11 +738,14 @@ static int soc_camera_g_crop(struct file *file, void *fh,
{
struct soc_camera_file *icf = file->private_data;
struct soc_camera_device *icd = icf->icd;
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
int ret;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->c = icd->rect_current;
mutex_lock(&icf->vb_vidq.vb_lock);
ret = ici->ops->get_crop(icd, a);
mutex_unlock(&icf->vb_vidq.vb_lock);
return 0;
return ret;
}
/*
@ -759,49 +760,33 @@ static int soc_camera_s_crop(struct file *file, void *fh,
struct soc_camera_file *icf = file->private_data;
struct soc_camera_device *icd = icf->icd;
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct v4l2_rect rect = a->c;
struct v4l2_rect *rect = &a->c;
struct v4l2_crop current_crop;
int ret;
if (a->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
return -EINVAL;
dev_dbg(&icd->dev, "S_CROP(%ux%u@%u:%u)\n",
rect->width, rect->height, rect->left, rect->top);
/* Cropping is allowed during a running capture, guard consistency */
mutex_lock(&icf->vb_vidq.vb_lock);
/* If get_crop fails, we'll let host and / or client drivers decide */
ret = ici->ops->get_crop(icd, &current_crop);
/* Prohibit window size change with initialised buffers */
if (icf->vb_vidq.bufs[0] && (rect.width != icd->rect_current.width ||
rect.height != icd->rect_current.height)) {
if (icf->vb_vidq.bufs[0] && !ret &&
(a->c.width != current_crop.c.width ||
a->c.height != current_crop.c.height)) {
dev_err(&icd->dev,
"S_CROP denied: queue initialised and sizes differ\n");
ret = -EBUSY;
goto unlock;
} else {
ret = ici->ops->set_crop(icd, a);
}
if (rect.width > icd->rect_max.width)
rect.width = icd->rect_max.width;
if (rect.width < icd->width_min)
rect.width = icd->width_min;
if (rect.height > icd->rect_max.height)
rect.height = icd->rect_max.height;
if (rect.height < icd->height_min)
rect.height = icd->height_min;
if (rect.width + rect.left > icd->rect_max.width + icd->rect_max.left)
rect.left = icd->rect_max.width + icd->rect_max.left -
rect.width;
if (rect.height + rect.top > icd->rect_max.height + icd->rect_max.top)
rect.top = icd->rect_max.height + icd->rect_max.top -
rect.height;
ret = ici->ops->set_crop(icd, a);
if (!ret)
icd->rect_current = rect;
unlock:
mutex_unlock(&icf->vb_vidq.vb_lock);
return ret;
@ -926,6 +911,8 @@ static int soc_camera_probe(struct device *dev)
struct soc_camera_host *ici = to_soc_camera_host(dev->parent);
struct soc_camera_link *icl = to_soc_camera_link(icd);
struct device *control = NULL;
struct v4l2_subdev *sd;
struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE};
int ret;
dev_info(dev, "Probing %s\n", dev_name(dev));
@ -982,7 +969,6 @@ static int soc_camera_probe(struct device *dev)
if (ret < 0)
goto eiufmt;
icd->rect_current = icd->rect_max;
icd->field = V4L2_FIELD_ANY;
/* ..._video_start() will create a device node, so we have to protect */
@ -992,9 +978,15 @@ static int soc_camera_probe(struct device *dev)
if (ret < 0)
goto evidstart;
/* Try to improve our guess of a reasonable window format */
sd = soc_camera_to_subdev(icd);
if (!v4l2_subdev_call(sd, video, g_fmt, &f)) {
icd->user_width = f.fmt.pix.width;
icd->user_height = f.fmt.pix.height;
}
/* Do we have to sysfs_remove_link() before device_unregister()? */
if (to_soc_camera_control(icd) &&
sysfs_create_link(&icd->dev.kobj, &to_soc_camera_control(icd)->kobj,
if (sysfs_create_link(&icd->dev.kobj, &to_soc_camera_control(icd)->kobj,
"control"))
dev_warn(&icd->dev, "Failed creating the control symlink\n");
@ -1103,6 +1095,25 @@ static void dummy_release(struct device *dev)
{
}
static int default_cropcap(struct soc_camera_device *icd,
struct v4l2_cropcap *a)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
return v4l2_subdev_call(sd, video, cropcap, a);
}
static int default_g_crop(struct soc_camera_device *icd, struct v4l2_crop *a)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
return v4l2_subdev_call(sd, video, g_crop, a);
}
static int default_s_crop(struct soc_camera_device *icd, struct v4l2_crop *a)
{
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
return v4l2_subdev_call(sd, video, s_crop, a);
}
int soc_camera_host_register(struct soc_camera_host *ici)
{
struct soc_camera_host *ix;
@ -1111,7 +1122,6 @@ int soc_camera_host_register(struct soc_camera_host *ici)
if (!ici || !ici->ops ||
!ici->ops->try_fmt ||
!ici->ops->set_fmt ||
!ici->ops->set_crop ||
!ici->ops->set_bus_param ||
!ici->ops->querycap ||
!ici->ops->init_videobuf ||
@ -1122,6 +1132,13 @@ int soc_camera_host_register(struct soc_camera_host *ici)
!ici->v4l2_dev.dev)
return -EINVAL;
if (!ici->ops->set_crop)
ici->ops->set_crop = default_s_crop;
if (!ici->ops->get_crop)
ici->ops->get_crop = default_g_crop;
if (!ici->ops->cropcap)
ici->ops->cropcap = default_cropcap;
mutex_lock(&list_lock);
list_for_each_entry(ix, &hosts, list) {
if (ix->nr == ici->nr) {
@ -1321,6 +1338,9 @@ static int __devinit soc_camera_pdrv_probe(struct platform_device *pdev)
if (ret < 0)
goto escdevreg;
icd->user_width = DEFAULT_WIDTH;
icd->user_height = DEFAULT_HEIGHT;
return 0;
escdevreg:

Просмотреть файл

@ -127,10 +127,6 @@ static int soc_camera_platform_probe(struct platform_device *pdev)
/* Set the control device reference */
dev_set_drvdata(&icd->dev, &pdev->dev);
icd->width_min = 0;
icd->rect_max.width = p->format.width;
icd->height_min = 0;
icd->rect_max.height = p->format.height;
icd->y_skip_top = 0;
icd->ops = &soc_camera_platform_ops;

Просмотреть файл

@ -715,8 +715,88 @@ tw9910_set_fmt_error:
return ret;
}
static int tw9910_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{
struct i2c_client *client = sd->priv;
struct tw9910_priv *priv = to_tw9910(client);
if (!priv->scale) {
int ret;
struct v4l2_crop crop = {
.c = {
.left = 0,
.top = 0,
.width = 640,
.height = 480,
},
};
ret = tw9910_s_crop(sd, &crop);
if (ret < 0)
return ret;
}
a->c.left = 0;
a->c.top = 0;
a->c.width = priv->scale->width;
a->c.height = priv->scale->height;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
return 0;
}
static int tw9910_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
{
a->bounds.left = 0;
a->bounds.top = 0;
a->bounds.width = 768;
a->bounds.height = 576;
a->defrect.left = 0;
a->defrect.top = 0;
a->defrect.width = 640;
a->defrect.height = 480;
a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
a->pixelaspect.numerator = 1;
a->pixelaspect.denominator = 1;
return 0;
}
static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct tw9910_priv *priv = to_tw9910(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
if (!priv->scale) {
int ret;
struct v4l2_crop crop = {
.c = {
.left = 0,
.top = 0,
.width = 640,
.height = 480,
},
};
ret = tw9910_s_crop(sd, &crop);
if (ret < 0)
return ret;
}
f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
pix->width = priv->scale->width;
pix->height = priv->scale->height;
pix->pixelformat = V4L2_PIX_FMT_VYUY;
pix->colorspace = V4L2_COLORSPACE_SMPTE170M;
pix->field = V4L2_FIELD_INTERLACED;
return 0;
}
static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
struct tw9910_priv *priv = to_tw9910(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
/* See tw9910_s_crop() - no proper cropping support */
struct v4l2_crop a = {
@ -741,8 +821,8 @@ static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
ret = tw9910_s_crop(sd, &a);
if (!ret) {
pix->width = a.c.width;
pix->height = a.c.height;
pix->width = priv->scale->width;
pix->height = priv->scale->height;
}
return ret;
}
@ -838,8 +918,11 @@ static struct v4l2_subdev_core_ops tw9910_subdev_core_ops = {
static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = {
.s_stream = tw9910_s_stream,
.g_fmt = tw9910_g_fmt,
.s_fmt = tw9910_s_fmt,
.try_fmt = tw9910_try_fmt,
.cropcap = tw9910_cropcap,
.g_crop = tw9910_g_crop,
.s_crop = tw9910_s_crop,
};
@ -852,20 +935,6 @@ static struct v4l2_subdev_ops tw9910_subdev_ops = {
* i2c_driver function
*/
/* This is called during probe, so, setting rect_max is Ok here: scale == 1 */
static void limit_to_scale(struct soc_camera_device *icd,
const struct tw9910_scale_ctrl *scale)
{
if (scale->width > icd->rect_max.width)
icd->rect_max.width = scale->width;
if (scale->width < icd->width_min)
icd->width_min = scale->width;
if (scale->height > icd->rect_max.height)
icd->rect_max.height = scale->height;
if (scale->height < icd->height_min)
icd->height_min = scale->height;
}
static int tw9910_probe(struct i2c_client *client,
const struct i2c_device_id *did)
@ -876,8 +945,7 @@ static int tw9910_probe(struct i2c_client *client,
struct i2c_adapter *adapter =
to_i2c_adapter(client->dev.parent);
struct soc_camera_link *icl;
const struct tw9910_scale_ctrl *scale;
int i, ret;
int ret;
if (!icd) {
dev_err(&client->dev, "TW9910: missing soc-camera data!\n");
@ -908,22 +976,6 @@ static int tw9910_probe(struct i2c_client *client,
icd->ops = &tw9910_ops;
icd->iface = info->link.bus_id;
/*
* set width and height
*/
icd->rect_max.width = tw9910_ntsc_scales[0].width; /* set default */
icd->width_min = tw9910_ntsc_scales[0].width;
icd->rect_max.height = tw9910_ntsc_scales[0].height;
icd->height_min = tw9910_ntsc_scales[0].height;
scale = tw9910_ntsc_scales;
for (i = 0; i < ARRAY_SIZE(tw9910_ntsc_scales); i++)
limit_to_scale(icd, scale + i);
scale = tw9910_pal_scales;
for (i = 0; i < ARRAY_SIZE(tw9910_pal_scales); i++)
limit_to_scale(icd, scale + i);
ret = tw9910_video_probe(icd, client);
if (ret) {
icd->ops = NULL;

Просмотреть файл

@ -22,8 +22,8 @@ struct soc_camera_device {
struct list_head list;
struct device dev;
struct device *pdev; /* Platform device */
struct v4l2_rect rect_current; /* Current window */
struct v4l2_rect rect_max; /* Maximum window */
s32 user_width;
s32 user_height;
unsigned short width_min;
unsigned short height_min;
unsigned short y_skip_top; /* Lines to skip at the top */
@ -76,6 +76,8 @@ struct soc_camera_host_ops {
int (*get_formats)(struct soc_camera_device *, int,
struct soc_camera_format_xlate *);
void (*put_formats)(struct soc_camera_device *);
int (*cropcap)(struct soc_camera_device *, struct v4l2_cropcap *);
int (*get_crop)(struct soc_camera_device *, struct v4l2_crop *);
int (*set_crop)(struct soc_camera_device *, struct v4l2_crop *);
int (*set_fmt)(struct soc_camera_device *, struct v4l2_format *);
int (*try_fmt)(struct soc_camera_device *, struct v4l2_format *);
@ -277,6 +279,21 @@ static inline unsigned long soc_camera_bus_param_compatible(
common_flags;
}
static inline void soc_camera_limit_side(unsigned int *start,
unsigned int *length, unsigned int start_min,
unsigned int length_min, unsigned int length_max)
{
if (*length < length_min)
*length = length_min;
else if (*length > length_max)
*length = length_max;
if (*start < start_min)
*start = start_min;
else if (*start > start_min + length_max - *length)
*start = start_min + length_max - *length;
}
extern unsigned long soc_camera_apply_sensor_flags(struct soc_camera_link *icl,
unsigned long flags);