patch-2.4.0-test11 linux/drivers/usb/ov511.c
Next file: linux/drivers/usb/pegasus.c
Previous file: linux/drivers/usb/net1080.c
Back to the patch index
Back to the overall index
- Lines: 710
- Date:
Sun Nov 12 20:45:19 2000
- Orig file:
v2.4.0-test10/linux/drivers/usb/ov511.c
- Orig date:
Sun Oct 8 10:50:30 2000
diff -u --recursive --new-file v2.4.0-test10/linux/drivers/usb/ov511.c linux/drivers/usb/ov511.c
@@ -30,7 +30,7 @@
* Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
-static const char version[] = "1.25";
+static const char version[] = "1.28";
#define __NO_VERSION__
@@ -51,8 +51,6 @@
#include "ov511.h"
-#undef OV511_GBR422 /* Experimental -- sets the 7610 to GBR422 */
-
#define OV511_I2C_RETRIES 3
/* Video Size 640 x 480 x 3 bytes for RGB */
@@ -60,8 +58,6 @@
#define MAX_DATA_SIZE (MAX_FRAME_SIZE + sizeof(struct timeval))
#define GET_SEGSIZE(p) ((p) == VIDEO_PALETTE_GREY ? 256 : 384)
-#define GET_DEPTH(p) ((p) == VIDEO_PALETTE_GREY ? 8 : \
- ((p) == VIDEO_PALETTE_YUV422 ? 16 : 24))
/* PARAMETER VARIABLES: */
static int autoadjust = 1; /* CCD dynamically changes exposure, etc... */
@@ -74,7 +70,7 @@
* 5=highly repetitive mesgs
* NOTE: This should be changed to 0, 1, or 2 for production kernels
*/
-static int debug = 3;
+static int debug = 0;
/* Fix vertical misalignment of red and blue at 640x480 */
static int fix_rgb_offset = 0;
@@ -112,6 +108,13 @@
/* Display test pattern - doesn't work yet either */
static int testpat = 0;
+/* Setting this to 1 will make the sensor output GBR422 instead on YUV420. Only
+ * affects RGB24 mode. */
+static int sensor_gbr = 0;
+
+/* Dump raw pixel data, in one of 3 formats. See ov511_dumppix() for details. */
+static int dumppix = 0;
+
MODULE_PARM(autoadjust, "i");
MODULE_PARM_DESC(autoadjust, "CCD dynamically changes exposure");
MODULE_PARM(debug, "i");
@@ -138,6 +141,10 @@
MODULE_PARM_DESC(compress, "Turn on compression (not functional yet)");
MODULE_PARM(testpat, "i");
MODULE_PARM_DESC(testpat, "Replace image with vertical bar testpattern (only partially working)");
+MODULE_PARM(sensor_gbr, "i");
+MODULE_PARM_DESC(sensor_gbr, "Make sensor output GBR422 rather than YUV420");
+MODULE_PARM(dumppix, "i");
+MODULE_PARM_DESC(dumppix, "Dump raw pixel data, in one of 3 formats. See ov511_dumppix() for details");
MODULE_AUTHOR("Mark McClelland <mwm@i.am> & Bret Wallach & Orion Sky Lawlor <olawlor@acm.org> & Kevin Moore & Charl P. Botha <cpbotha@ieee.org> & Claudio Matsuoka <claudio@conectiva.com>");
MODULE_DESCRIPTION("OV511 USB Camera Driver");
@@ -167,6 +174,15 @@
{ -1, NULL }
};
+static __devinitdata struct usb_device_id device_table [] = {
+ { idVendor: 0x05a9, idProduct: 0x0511 }, /* OV511 */
+ { idVendor: 0x05a9, idProduct: 0xA511 }, /* OV511+ */
+ { idVendor: 0x0813, idProduct: 0x0002 }, /* Intel Play Me2Cam OV511+ */
+ { } /* Terminating entry */
+};
+
+MODULE_DEVICE_TABLE (usb, device_table);
+
#if defined(CONFIG_PROC_FS) && defined(CONFIG_VIDEO_PROC_FS)
static struct palette_list plist[] = {
{ VIDEO_PALETTE_GREY, "GREY" },
@@ -357,6 +373,7 @@
ov511->bridge == BRG_OV511PLUS ? "OV511+" :
"unknown");
out += sprintf (out, "sensor : %s\n",
+ ov511->sensor == SEN_OV6620 ? "OV6620" :
ov511->sensor == SEN_OV7610 ? "OV7610" :
ov511->sensor == SEN_OV7620 ? "OV7620" :
ov511->sensor == SEN_OV7620AE ? "OV7620AE" :
@@ -448,7 +465,6 @@
}
#endif /* CONFIG_PROC_FS && CONFIG_VIDEO_PROC_FS */
-
/**********************************************************************
*
* Camera interface
@@ -700,7 +716,7 @@
{
int rc;
- PDEBUG(3, "Reset: type=0x%X", reset_type);
+ PDEBUG(4, "Reset: type=0x%X", reset_type);
rc = ov511_reg_write(dev, OV511_REG_SYSTEM_RESET, reset_type);
rc = ov511_reg_write(dev, OV511_REG_SYSTEM_RESET, 0);
@@ -829,14 +845,21 @@
} else if ((ov511->sensor == SEN_OV7620)
|| (ov511->sensor == SEN_OV7620AE)) {
#if 0
- cur_con = ov511_i2c_read(dev, OV7610_REG_CNT);
- cur_brt = ov511_i2c_read(dev, OV7610_REG_BRT);
- // DEBUG_CODE
- PDEBUG(1, "con=%d brt=%d", ov511_i2c_read(dev, OV7610_REG_CNT),
- ov511_i2c_read(dev, OV7610_REG_BRT));
+ int cur_sat, new_sat, tmp;
- if (ov511_i2c_write(dev, OV7610_REG_CNT, p->contrast >> 8) < 0)
+ cur_sat = ov511_i2c_read(dev, OV7610_REG_BLUE);
+
+ tmp = (p->hue >> 8) - cur_sat;
+ new_sat = (tmp < 0) ? (-tmp) | 0x80 : tmp;
+
+ PDEBUG(1, "cur=%d target=%d diff=%d", cur_sat, p->hue >> 8, tmp);
+
+ if (ov511_i2c_write(dev, OV7610_REG_BLUE, new_sat) < 0)
return -EIO;
+
+ // DEBUG_CODE
+ PDEBUG(1, "hue=%d", ov511_i2c_read(dev, OV7610_REG_BLUE));
+
#endif
}
@@ -882,6 +905,23 @@
return 0;
}
+/* Returns number of bits per pixel (regardless of where they are located; planar or
+ * not), or zero for unsupported format.
+ */
+static int ov511_get_depth(int palette)
+{
+ switch (palette) {
+ case VIDEO_PALETTE_GREY: return 8;
+ case VIDEO_PALETTE_RGB565: return 16;
+ case VIDEO_PALETTE_RGB24: return 24;
+ case VIDEO_PALETTE_YUV422: return 16;
+ case VIDEO_PALETTE_YUYV: return 16;
+ case VIDEO_PALETTE_YUV420: return 24;
+ case VIDEO_PALETTE_YUV422P: return 24; /* Planar */
+ default: return 0; /* Invalid format */
+ }
+}
+
/* LNCNT values fixed by Lawrence Glaister <lg@jfm.bc.ca> */
static struct mode_list mlist[] = {
/* W H C PXCNT LNCNT PXDIV LNDIV M420 COMA COML */
@@ -917,6 +957,12 @@
if (ov511_stop(ov511->dev) < 0)
return -EIO;
+ /* Dumppix only works with RGB24 */
+ if (dumppix && (mode != VIDEO_PALETTE_RGB24)) {
+ err("dumppix only supported with RGB 24");
+ return -EINVAL;
+ }
+
if (mode == VIDEO_PALETTE_GREY) {
ov511_reg_write(dev, 0x16, 0x00);
if (ov511->sensor == SEN_OV7610
@@ -953,9 +999,9 @@
break;
case SEN_OV6620:
hwsbase = 0x38;
- hwebase = 0x39;
- vwsbase = 0x03;
- vwebase = 0x04;
+ hwebase = 0x3a;
+ vwsbase = 0x05;
+ vwebase = 0x06;
break;
case SEN_OV7620:
hwsbase = 0x2c;
@@ -1039,13 +1085,16 @@
/* Calculate and set the clock divisor */
clock = ((sub_flag ? ov511->subw * ov511->subh : width * height)
* (mlist[i].color ? 3 : 2) / 2) / 66000;
+#if 0
+ clock *= cams;
+#endif
ov511_i2c_write(dev, 0x11, clock);
-#ifdef OV511_GBR422
- ov511_i2c_write(dev, 0x12, mlist[i].common_A | 0x08);
-#else
- ov511_i2c_write(dev, 0x12, mlist[i].common_A | (testpat?0x02:0x00));
-#endif
+ /* We only have code to convert GBR -> RGB24 */
+ if ((mode == VIDEO_PALETTE_RGB24) && sensor_gbr)
+ ov511_i2c_write(dev, 0x12, mlist[i].common_A | (testpat?0x0a:0x08));
+ else
+ ov511_i2c_write(dev, 0x12, mlist[i].common_A | (testpat?0x02:0x00));
/* 7620/6620 don't have register 0x35, so play it safe */
if (ov511->sensor == SEN_OV7610 ||
@@ -1113,7 +1162,7 @@
static inline void
ov511_move_420_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
- int rowPixels, unsigned char * rgb)
+ int rowPixels, unsigned char * rgb, int bits)
{
const int rvScale = 91881;
const int guScale = -22553;
@@ -1134,14 +1183,32 @@
yTL *= yScale; yTR *= yScale;
yBL *= yScale; yBR *= yScale;
- /* Write out top two pixels */
- rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL); rgb[2] = LIMIT(r+yTL);
- rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR); rgb[5] = LIMIT(r+yTR);
-
- /* Skip down to next line to write out bottom two pixels */
- rgb += 3 * rowPixels;
- rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL); rgb[2] = LIMIT(r+yBL);
- rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR); rgb[5] = LIMIT(r+yBR);
+ if (bits == 24) {
+ /* Write out top two pixels */
+ rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL); rgb[2] = LIMIT(r+yTL);
+ rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR); rgb[5] = LIMIT(r+yTR);
+
+ /* Skip down to next line to write out bottom two pixels */
+ rgb += 3 * rowPixels;
+ rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL); rgb[2] = LIMIT(r+yBL);
+ rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR); rgb[5] = LIMIT(r+yBR);
+ } else if (bits == 16) {
+ /* Write out top two pixels */
+ rgb[0] = ((LIMIT(b+yTL) >> 3) & 0x1F) | ((LIMIT(g+yTL) << 3) & 0xE0);
+ rgb[1] = ((LIMIT(g+yTL) >> 5) & 0x07) | (LIMIT(r+yTL) & 0xF8);
+
+ rgb[2] = ((LIMIT(b+yTR) >> 3) & 0x1F) | ((LIMIT(g+yTR) << 3) & 0xE0);
+ rgb[3] = ((LIMIT(g+yTR) >> 5) & 0x07) | (LIMIT(r+yTR) & 0xF8);
+
+ /* Skip down to next line to write out bottom two pixels */
+ rgb += 2 * rowPixels;
+
+ rgb[0] = ((LIMIT(b+yBL) >> 3) & 0x1F) | ((LIMIT(g+yBL) << 3) & 0xE0);
+ rgb[1] = ((LIMIT(g+yBL) >> 5) & 0x07) | (LIMIT(r+yBL) & 0xF8);
+
+ rgb[2] = ((LIMIT(b+yBR) >> 3) & 0x1F) | ((LIMIT(g+yBR) << 3) & 0xE0);
+ rgb[3] = ((LIMIT(g+yBR) >> 5) & 0x07) | (LIMIT(r+yBR) & 0xF8);
+ }
}
/*
@@ -1167,7 +1234,7 @@
* Note that the U and V data in one segment represents a 16 x 16 pixel
* area, but the Y data represents a 32 x 8 pixel area.
*
- * If OV511_DUMPPIX is defined, _parse_data just dumps the incoming segments,
+ * If dumppix module param is set, _parse_data just dumps the incoming segments,
* verbatim, in order, into the frame. When used with vidcat -f ppm -s 640x480
* this puts the data on the standard output and can be analyzed with the
* parseppm.c utility I wrote. That's a much faster way for figuring out how
@@ -1177,14 +1244,8 @@
#define HDIV 8
#define WDIV (256/HDIV)
-#undef OV511_DUMPPIX
-
-/* #define this and OV511_DUMPPIX to disable parsing of UV data */
-#undef OV511_FORCE_MONO
-
-#ifdef OV511_GBR422
static void
-ov511_parse_data_rgb24(unsigned char *pIn0, unsigned char *pOut0,
+ov511_parse_gbr422_to_rgb24(unsigned char *pIn0, unsigned char *pOut0,
int iOutY, int iOutUV, int iHalf, int iWidth)
{
int k, l, m;
@@ -1231,14 +1292,12 @@
}
}
-#else
-
static void
-ov511_parse_data_rgb24(unsigned char *pIn0, unsigned char *pOut0,
- int iOutY, int iOutUV, int iHalf, int iWidth)
+ov511_parse_yuv420_to_rgb(unsigned char *pIn0, unsigned char *pOut0,
+ int iOutY, int iOutUV, int iHalf, int iWidth, int bits)
{
-#ifndef OV511_DUMPPIX
int k, l, m;
+ int bytes = bits >> 3;
unsigned char *pIn;
unsigned char *pOut, *pOut1;
@@ -1251,11 +1310,11 @@
for (l = 0; l < 8; l++) {
for (m = 0; m < 8; m++) {
*pOut1 = *pIn++;
- pOut1 += 3;
+ pOut1 += bytes;
}
- pOut1 += (iWidth - 8) * 3;
+ pOut1 += (iWidth - 8) * bytes;
}
- pOut += 8 * 3;
+ pOut += 8 * bytes;
}
}
@@ -1265,16 +1324,16 @@
for (l = 0; l < 4; l++) {
for (m=0; m<8; m++) {
int y00 = *(pOut);
- int y01 = *(pOut+3);
- int y10 = *(pOut+iWidth*3);
- int y11 = *(pOut+iWidth*3+3);
+ int y01 = *(pOut+bytes);
+ int y10 = *(pOut+iWidth*bytes);
+ int y11 = *(pOut+iWidth*bytes+bytes);
int v = *(pIn+64) - 128;
int u = *pIn++ - 128;
ov511_move_420_block(y00, y01, y10, y11, u, v, iWidth,
- pOut);
- pOut += 6;
+ pOut, bits);
+ pOut += 2 * bytes;
}
- pOut += (iWidth*2 - 16) * 3;
+ pOut += (iWidth*2 - 16) * bytes;
}
/* Just copy the other UV rows */
@@ -1282,9 +1341,9 @@
for (m = 0; m < 8; m++) {
*pOut++ = *(pIn + 64);
*pOut = *pIn++;
- pOut += 5;
+ pOut += 2 * bytes - 1;
}
- pOut += (iWidth*2 - 16) * 3;
+ pOut += (iWidth*2 - 16) * bytes;
}
/* Calculate values if it's the second half */
@@ -1302,72 +1361,65 @@
int v = *pOut1 - 128;
int u = *(pOut1+1) - 128;
ov511_move_420_block(y00, y01, y10,
- y11, u, v, iWidth, pOut1);
- pOut1 += 6;
+ y11, u, v, iWidth, pOut1, bits);
+ pOut1 += 2 * bytes;
}
- pOut1 += (iWidth*2 - 8) * 3;
+ pOut1 += (iWidth*2 - 8) * bytes;
pIn += 8;
}
- pOut += 8 * 3;
+ pOut += 8 * bytes;
}
}
-#else
+}
-#ifndef OV511_FORCE_MONO
- /* Just dump pix data straight out for debug */
- int i, j;
-
- pOut0 += iOutY;
- for (i = 0; i < HDIV; i++) {
- for (j = 0; j < WDIV; j++) {
- *pOut0++ = *pIn0++;
- *pOut0++ = *pIn0++;
- *pOut0++ = *pIn0++;
- }
- pOut0 += (iWidth - WDIV) * 3;
- }
-#else
-
-#if 1
- /* This converts the Y data to "black-and-white" RGB data */
- /* Useful for experimenting with compression */
- int k, l, m;
+static void
+ov511_dumppix(unsigned char *pIn0, unsigned char *pOut0,
+ int iOutY, int iOutUV, int iHalf, int iWidth)
+{
+ int i, j, k;
unsigned char *pIn, *pOut, *pOut1;
- pIn = pIn0 + 128;
- pOut = pOut0 + iOutY;
- for (k = 0; k < 4; k++) {
- pOut1 = pOut;
- for (l = 0; l < 8; l++) {
- for (m = 0; m < 8; m++) {
- *pOut1++ = *pIn;
- *pOut1++ = *pIn;
- *pOut1++ = *pIn++;
+ switch (dumppix) {
+ case 1: /* Just dump YUV data straight out for debug */
+ pOut0 += iOutY;
+ for (i = 0; i < HDIV; i++) {
+ for (j = 0; j < WDIV; j++) {
+ *pOut0++ = *pIn0++;
+ *pOut0++ = *pIn0++;
+ *pOut0++ = *pIn0++;
}
- pOut1 += (iWidth - 8) * 3;
+ pOut0 += (iWidth - WDIV) * 3;
}
- pOut += 8 * 3;
- }
-#else
- /* This will dump the Y channel data stream as-is */
- int count;
- unsigned char *pIn, *pOut;
-
- pIn = pIn0 + 128;
- pOut = pOut0 + output_offset;
- for (count = 0; count < 256; count++) {
- *pOut++ = *pIn;
- *pOut++ = *pIn;
- *pOut++ = *pIn++;
- output_offset += 3;
- }
-#endif
-
-#endif
-
-#endif
+ break;
+ case 2: /* This converts the Y data to "black-and-white" RGB data */
+ /* Useful for experimenting with compression */
+ pIn = pIn0 + 128;
+ pOut = pOut0 + iOutY;
+ for (i = 0; i < 4; i++) {
+ pOut1 = pOut;
+ for (j = 0; j < 8; j++) {
+ for (k = 0; k < 8; k++) {
+ *pOut1++ = *pIn;
+ *pOut1++ = *pIn;
+ *pOut1++ = *pIn++;
+ }
+ pOut1 += (iWidth - 8) * 3;
+ }
+ pOut += 8 * 3;
+ }
+ break;
+ case 3: /* This will dump only the Y channel data stream as-is */
+ pIn = pIn0 + 128;
+ pOut = pOut0 + output_offset;
+ for (i = 0; i < 256; i++) {
+ *pOut++ = *pIn;
+ *pOut++ = *pIn;
+ *pOut++ = *pIn++;
+ output_offset += 3;
+ }
+ break;
+ } /* End switch (dumppix) */
}
-#endif
/* This converts YUV420 segments to YUYV */
static void
@@ -1662,6 +1714,11 @@
/* Frame start */
PDEBUG(4, "Frame start, framenum = %d", ov511->curframe);
+#if 0
+ /* Make sure no previous data carries over; necessary
+ * for compression experimentation */
+ memset(frame->data, 0, MAX_DATA_SIZE);
+#endif
output_offset = 0;
/* Check to see if it's a snapshot frame */
@@ -1740,8 +1797,19 @@
ov511_parse_data_grey (pData, pOut, iOutY, frame->width);
break;
case VIDEO_PALETTE_RGB24:
- ov511_parse_data_rgb24 (pData, pOut, iOutY, iOutUV,
- iY & 1, frame->width);
+ if (dumppix)
+ ov511_dumppix(pData, pOut, iOutY, iOutUV,
+ iY & 1, frame->width);
+ else if (sensor_gbr)
+ ov511_parse_gbr422_to_rgb24(pData, pOut, iOutY, iOutUV,
+ iY & 1, frame->width);
+ else
+ ov511_parse_yuv420_to_rgb(pData, pOut, iOutY, iOutUV,
+ iY & 1, frame->width, 24);
+ break;
+ case VIDEO_PALETTE_RGB565:
+ ov511_parse_yuv420_to_rgb(pData, pOut, iOutY, iOutUV,
+ iY & 1, frame->width, 16);
break;
case VIDEO_PALETTE_YUV422:
case VIDEO_PALETTE_YUYV:
@@ -1782,11 +1850,20 @@
static void ov511_isoc_irq(struct urb *urb)
{
int len;
- struct usb_ov511 *ov511 = urb->context;
+ struct usb_ov511 *ov511;
struct ov511_sbuf *sbuf;
- if (!ov511->dev)
+ if (!urb->context) {
+ PDEBUG(4, "no context");
+ return;
+ }
+
+ ov511 = (struct usb_ov511 *) urb->context;
+
+ if (!ov511->dev || !ov511->user) {
+ PDEBUG(4, "no device, or not open");
return;
+ }
if (!ov511->streaming) {
PDEBUG(4, "hmmm... not streaming, but got interrupt");
@@ -1805,6 +1882,8 @@
/* Move to the next sbuf */
ov511->cursbuf = (ov511->cursbuf + 1) % OV511_NUMSBUF;
+ urb->dev = ov511->dev;
+
return;
}
@@ -1872,6 +1951,7 @@
ov511->sbuf[n].urb->next = ov511->sbuf[n+1].urb;
for (n = 0; n < OV511_NUMSBUF; n++) {
+ ov511->sbuf[n].urb->dev = ov511->dev;
err = usb_submit_urb(ov511->sbuf[n].urb);
if (err)
err("init isoc: usb_submit_urb(%d) ret %d", n, err);
@@ -2079,7 +2159,7 @@
static int ov511_open(struct video_device *dev, int flags)
{
struct usb_ov511 *ov511 = (struct usb_ov511 *)dev;
- int err = 0;
+ int err;
MOD_INC_USE_COUNT;
PDEBUG(4, "opening");
@@ -2174,8 +2254,8 @@
b.audios = 0;
b.maxwidth = ov511->maxwidth;
b.maxheight = ov511->maxheight;
- b.minwidth = 32;
- b.minheight = 16;
+ b.minwidth = 160;
+ b.minheight = 120;
if (copy_to_user(arg, &b, sizeof(b)))
return -EFAULT;
@@ -2237,12 +2317,7 @@
if (copy_from_user(&p, arg, sizeof(p)))
return -EFAULT;
- if (p.palette != VIDEO_PALETTE_GREY &&
- p.palette != VIDEO_PALETTE_RGB24 &&
- p.palette != VIDEO_PALETTE_YUV422 &&
- p.palette != VIDEO_PALETTE_YUYV &&
- p.palette != VIDEO_PALETTE_YUV420 &&
- p.palette != VIDEO_PALETTE_YUV422P)
+ if (!ov511_get_depth(p.palette))
return -EINVAL;
if (ov7610_set_picture(ov511, &p))
@@ -2373,7 +2448,7 @@
case VIDIOCMCAPTURE:
{
struct video_mmap vm;
- int ret;
+ int ret, depth;
if (copy_from_user((void *)&vm, (void *)arg, sizeof(vm)))
return -EFAULT;
@@ -2382,19 +2457,21 @@
PDEBUG(4, "frame: %d, size: %dx%d, format: %d",
vm.frame, vm.width, vm.height, vm.format);
- if (vm.format != VIDEO_PALETTE_RGB24 &&
- vm.format != VIDEO_PALETTE_YUV422 &&
- vm.format != VIDEO_PALETTE_YUYV &&
- vm.format != VIDEO_PALETTE_YUV420 &&
- vm.format != VIDEO_PALETTE_YUV422P &&
- vm.format != VIDEO_PALETTE_GREY)
+ depth = ov511_get_depth(vm.format);
+ if (!depth) {
+ err("VIDIOCMCAPTURE: invalid format (%d)", vm.format);
return -EINVAL;
+ }
- if ((vm.frame != 0) && (vm.frame != 1))
+ if ((vm.frame != 0) && (vm.frame != 1)) {
+ err("VIDIOCMCAPTURE: invalid frame (%d)", vm.frame);
return -EINVAL;
-
- if (vm.width > ov511->maxwidth || vm.height > ov511->maxheight)
+ }
+
+ if (vm.width > ov511->maxwidth || vm.height > ov511->maxheight) {
+ err("VIDIOCMCAPTURE: requested dimensions too big");
return -EINVAL;
+ }
if (ov511->frame[vm.frame].grabstate == FRAME_GRABBING)
return -EBUSY;
@@ -2422,7 +2499,7 @@
ov511->frame[vm.frame].format = vm.format;
ov511->frame[vm.frame].sub_flag = ov511->sub_flag;
ov511->frame[vm.frame].segsize = GET_SEGSIZE(vm.format);
- ov511->frame[vm.frame].depth = GET_DEPTH(vm.format);
+ ov511->frame[vm.frame].depth = depth;
/* Mark it as ready */
ov511->frame[vm.frame].grabstate = FRAME_READY;
@@ -2481,6 +2558,13 @@
goto redo;
}
case FRAME_DONE:
+ if (ov511->snap_enabled && !ov511->frame[frame].snapshot) {
+ int ret;
+ if ((ret = ov511_new_frame(ov511, frame)) < 0)
+ return ret;
+ goto redo;
+ }
+
ov511->frame[frame].grabstate = FRAME_UNUSED;
/* Reset the hardware snapshot button */
@@ -2657,7 +2741,7 @@
PDEBUG(4, "mmap: %ld (%lX) bytes", size, size);
- if (size > (((2 * MAX_DATA_SIZE) + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1)))
+ if (size > (((OV511_NUMFRAMES * MAX_DATA_SIZE) + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1)))
return -EINVAL;
pos = (unsigned long)ov511->fbuf;
@@ -3057,7 +3141,7 @@
init_waitqueue_head(&ov511->wq);
- if (video_register_device(&ov511->vdev, VFL_TYPE_GRABBER) == -1) {
+ if (video_register_device(&ov511->vdev, VFL_TYPE_GRABBER) < 0) {
err("video_register_device failed");
return -EBUSY;
}
@@ -3144,7 +3228,9 @@
*
***************************************************************************/
-static void* ov511_probe(struct usb_device *dev, unsigned int ifnum)
+static void * __devinit
+ov511_probe(struct usb_device *dev, unsigned int ifnum,
+ const struct usb_device_id *id)
{
struct usb_interface_descriptor *interface;
struct usb_ov511 *ov511;
@@ -3158,17 +3244,8 @@
interface = &dev->actconfig->interface[ifnum].altsetting[0];
- /* Is it an OV511/OV511+? */
- if (dev->descriptor.idVendor != 0x05a9
- && dev->descriptor.idVendor != 0x0813)
- return NULL;
- if (dev->descriptor.idProduct != 0x0511
- && dev->descriptor.idProduct != 0xA511
- && dev->descriptor.idProduct != 0x0002)
- return NULL;
-
/* Checking vendor/product should be enough, but what the hell */
- if (interface->bInterfaceClass != 0xFF)
+ if (interface->bInterfaceClass != 0xFF)
return NULL;
if (interface->bInterfaceSubClass != 0x00)
return NULL;
@@ -3263,7 +3340,8 @@
}
-static void ov511_disconnect(struct usb_device *dev, void *ptr)
+static void __devexit
+ov511_disconnect(struct usb_device *dev, void *ptr)
{
struct usb_ov511 *ov511 = (struct usb_ov511 *) ptr;
int n;
@@ -3321,10 +3399,10 @@
}
static struct usb_driver ov511_driver = {
- "ov511",
- ov511_probe,
- ov511_disconnect,
- { NULL, NULL }
+ name: "ov511",
+ id_table: device_table,
+ probe: ov511_probe,
+ disconnect: ov511_disconnect
};
FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen (who was at: slshen@lbl.gov)