V4L/DVB (10104): uvcvideo: Add support for video output devices

Extend the range of supported UVC devices by allowing video output devices
matching the following structure:

TT_STREAMING -> VC_PROCESSING_UNIT -> VC_EXTENSION_UNIT{0,n} -> OTT_*

Video output devices are reported with the V4L2_CAP_VIDEO_OUTPUT capability
flag and are subject to the same restrictions as video input devices.

Signed-off-by: Laurent Pinchart <laurent.pinchart@skynet.be>
Signed-off-by: Mauro Carvalho Chehab <mchehab@redhat.com>
This commit is contained in:
Laurent Pinchart 2008-12-28 22:32:29 -03:00 committed by Mauro Carvalho Chehab
parent 538e7a004b
commit ff924203c9
5 changed files with 252 additions and 85 deletions

View file

@ -12,8 +12,8 @@
*/
/*
* This driver aims to support video input devices compliant with the 'USB
* Video Class' specification.
* This driver aims to support video input and ouput devices compliant with the
* 'USB Video Class' specification.
*
* The driver doesn't support the deprecated v4l1 interface. It implements the
* mmap capture method only, and doesn't do any image format conversion in
@ -609,46 +609,55 @@ static int uvc_parse_streaming(struct uvc_device *dev,
}
/* Parse the header descriptor. */
if (buffer[2] == VS_OUTPUT_HEADER) {
uvc_trace(UVC_TRACE_DESCR, "device %d videostreaming interface "
"%d OUTPUT HEADER descriptor is not supported.\n",
dev->udev->devnum, alts->desc.bInterfaceNumber);
goto error;
} else if (buffer[2] == VS_INPUT_HEADER) {
p = buflen >= 5 ? buffer[3] : 0;
n = buflen >= 12 ? buffer[12] : 0;
switch (buffer[2]) {
case VS_OUTPUT_HEADER:
streaming->type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
size = 9;
break;
if (buflen < 13 + p*n || buffer[2] != VS_INPUT_HEADER) {
uvc_trace(UVC_TRACE_DESCR, "device %d videostreaming "
"interface %d INPUT HEADER descriptor is "
"invalid.\n", dev->udev->devnum,
alts->desc.bInterfaceNumber);
goto error;
}
case VS_INPUT_HEADER:
streaming->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
size = 13;
break;
streaming->header.bNumFormats = p;
streaming->header.bEndpointAddress = buffer[6];
streaming->header.bmInfo = buffer[7];
streaming->header.bTerminalLink = buffer[8];
streaming->header.bStillCaptureMethod = buffer[9];
streaming->header.bTriggerSupport = buffer[10];
streaming->header.bTriggerUsage = buffer[11];
streaming->header.bControlSize = n;
streaming->header.bmaControls = kmalloc(p*n, GFP_KERNEL);
if (streaming->header.bmaControls == NULL) {
ret = -ENOMEM;
goto error;
}
memcpy(streaming->header.bmaControls, &buffer[13], p*n);
} else {
default:
uvc_trace(UVC_TRACE_DESCR, "device %d videostreaming interface "
"%d HEADER descriptor not found.\n", dev->udev->devnum,
alts->desc.bInterfaceNumber);
goto error;
}
p = buflen >= 4 ? buffer[3] : 0;
n = buflen >= size ? buffer[size-1] : 0;
if (buflen < size + p*n) {
uvc_trace(UVC_TRACE_DESCR, "device %d videostreaming "
"interface %d HEADER descriptor is invalid.\n",
dev->udev->devnum, alts->desc.bInterfaceNumber);
goto error;
}
streaming->header.bNumFormats = p;
streaming->header.bEndpointAddress = buffer[6];
if (buffer[2] == VS_INPUT_HEADER) {
streaming->header.bmInfo = buffer[7];
streaming->header.bTerminalLink = buffer[8];
streaming->header.bStillCaptureMethod = buffer[9];
streaming->header.bTriggerSupport = buffer[10];
streaming->header.bTriggerUsage = buffer[11];
} else {
streaming->header.bTerminalLink = buffer[7];
}
streaming->header.bControlSize = n;
streaming->header.bmaControls = kmalloc(p*n, GFP_KERNEL);
if (streaming->header.bmaControls == NULL) {
ret = -ENOMEM;
goto error;
}
memcpy(streaming->header.bmaControls, &buffer[size], p*n);
buflen -= buffer[0];
buffer += buffer[0];
@ -1258,6 +1267,26 @@ static int uvc_scan_chain_entity(struct uvc_video_device *video,
list_add_tail(&entity->chain, &video->iterms);
break;
case TT_STREAMING:
if (uvc_trace_param & UVC_TRACE_PROBE)
printk(" <- IT %d\n", entity->id);
if (!UVC_ENTITY_IS_ITERM(entity)) {
uvc_trace(UVC_TRACE_DESCR, "Unsupported input "
"terminal %u.\n", entity->id);
return -1;
}
if (video->sterm != NULL) {
uvc_trace(UVC_TRACE_DESCR, "Found multiple streaming "
"entities in chain.\n");
return -1;
}
list_add_tail(&entity->chain, &video->iterms);
video->sterm = entity;
break;
default:
uvc_trace(UVC_TRACE_DESCR, "Unsupported entity type "
"0x%04x found in chain.\n", UVC_ENTITY_TYPE(entity));
@ -1368,6 +1397,10 @@ static int uvc_scan_chain(struct uvc_video_device *video)
entity = video->oterm;
uvc_trace(UVC_TRACE_PROBE, "Scanning UVC chain: OT %d", entity->id);
if (UVC_ENTITY_TYPE(entity) == TT_STREAMING)
video->sterm = entity;
id = entity->output.bSourceID;
while (id != 0) {
prev = entity;
@ -1396,8 +1429,11 @@ static int uvc_scan_chain(struct uvc_video_device *video)
return id;
}
/* Initialize the video buffers queue. */
uvc_queue_init(&video->queue);
if (video->sterm == NULL) {
uvc_trace(UVC_TRACE_DESCR, "No streaming entity found in "
"chain.\n");
return -1;
}
return 0;
}
@ -1408,7 +1444,8 @@ static int uvc_scan_chain(struct uvc_video_device *video)
* The driver currently supports a single video device per control interface
* only. The terminal and units must match the following structure:
*
* ITT_CAMERA -> VC_PROCESSING_UNIT -> VC_EXTENSION_UNIT{0,n} -> TT_STREAMING
* ITT_* -> VC_PROCESSING_UNIT -> VC_EXTENSION_UNIT{0,n} -> TT_STREAMING
* TT_STREAMING -> VC_PROCESSING_UNIT -> VC_EXTENSION_UNIT{0,n} -> OTT_*
*
* The Extension Units, if present, must have a single input pin. The
* Processing Unit and Extension Units can be in any order. Additional
@ -1425,7 +1462,7 @@ static int uvc_register_video(struct uvc_device *dev)
list_for_each_entry(term, &dev->entities, list) {
struct uvc_streaming *streaming;
if (UVC_ENTITY_TYPE(term) != TT_STREAMING)
if (!UVC_ENTITY_IS_TERM(term) || !UVC_ENTITY_IS_OTERM(term))
continue;
memset(&dev->video, 0, sizeof dev->video);
@ -1438,7 +1475,8 @@ static int uvc_register_video(struct uvc_device *dev)
continue;
list_for_each_entry(streaming, &dev->streaming, list) {
if (streaming->header.bTerminalLink == term->id) {
if (streaming->header.bTerminalLink ==
dev->video.sterm->id) {
dev->video.streaming = streaming;
found = 1;
break;
@ -1464,6 +1502,9 @@ static int uvc_register_video(struct uvc_device *dev)
printk(" -> %d).\n", dev->video.oterm->id);
}
/* Initialize the video buffers queue. */
uvc_queue_init(&dev->video.queue, dev->video.streaming->type);
/* Initialize the streaming interface with default streaming
* parameters.
*/

View file

@ -79,12 +79,13 @@
*
*/
void uvc_queue_init(struct uvc_video_queue *queue)
void uvc_queue_init(struct uvc_video_queue *queue, enum v4l2_buf_type type)
{
mutex_init(&queue->mutex);
spin_lock_init(&queue->irqlock);
INIT_LIST_HEAD(&queue->mainqueue);
INIT_LIST_HEAD(&queue->irqqueue);
queue->type = type;
}
/*
@ -132,7 +133,7 @@ int uvc_alloc_buffers(struct uvc_video_queue *queue, unsigned int nbuffers,
queue->buffer[i].buf.index = i;
queue->buffer[i].buf.m.offset = i * bufsize;
queue->buffer[i].buf.length = buflength;
queue->buffer[i].buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue->buffer[i].buf.type = queue->type;
queue->buffer[i].buf.sequence = 0;
queue->buffer[i].buf.field = V4L2_FIELD_NONE;
queue->buffer[i].buf.memory = V4L2_MEMORY_MMAP;
@ -226,7 +227,7 @@ int uvc_queue_buffer(struct uvc_video_queue *queue,
uvc_trace(UVC_TRACE_CAPTURE, "Queuing buffer %u.\n", v4l2_buf->index);
if (v4l2_buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
if (v4l2_buf->type != queue->type ||
v4l2_buf->memory != V4L2_MEMORY_MMAP) {
uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer type (%u) "
"and/or memory (%u).\n", v4l2_buf->type,
@ -249,6 +250,13 @@ int uvc_queue_buffer(struct uvc_video_queue *queue,
goto done;
}
if (v4l2_buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
v4l2_buf->bytesused > buf->buf.length) {
uvc_trace(UVC_TRACE_CAPTURE, "[E] Bytes used out of bounds.\n");
ret = -EINVAL;
goto done;
}
spin_lock_irqsave(&queue->irqlock, flags);
if (queue->flags & UVC_QUEUE_DISCONNECTED) {
spin_unlock_irqrestore(&queue->irqlock, flags);
@ -256,7 +264,11 @@ int uvc_queue_buffer(struct uvc_video_queue *queue,
goto done;
}
buf->state = UVC_BUF_STATE_QUEUED;
buf->buf.bytesused = 0;
if (v4l2_buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
buf->buf.bytesused = 0;
else
buf->buf.bytesused = v4l2_buf->bytesused;
list_add_tail(&buf->stream, &queue->mainqueue);
list_add_tail(&buf->queue, &queue->irqqueue);
spin_unlock_irqrestore(&queue->irqlock, flags);
@ -289,7 +301,7 @@ int uvc_dequeue_buffer(struct uvc_video_queue *queue,
struct uvc_buffer *buf;
int ret = 0;
if (v4l2_buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
if (v4l2_buf->type != queue->type ||
v4l2_buf->memory != V4L2_MEMORY_MMAP) {
uvc_trace(UVC_TRACE_CAPTURE, "[E] Invalid buffer type (%u) "
"and/or memory (%u).\n", v4l2_buf->type,
@ -397,6 +409,7 @@ int uvc_queue_enable(struct uvc_video_queue *queue, int enable)
}
queue->sequence = 0;
queue->flags |= UVC_QUEUE_STREAMING;
queue->buf_used = 0;
} else {
uvc_queue_cancel(queue, 0);
INIT_LIST_HEAD(&queue->mainqueue);

View file

@ -110,7 +110,7 @@ static int uvc_v4l2_try_format(struct uvc_video_device *video,
int ret = 0;
__u8 *fcc;
if (fmt->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (fmt->type != video->streaming->type)
return -EINVAL;
fcc = (__u8 *)&fmt->fmt.pix.pixelformat;
@ -216,7 +216,7 @@ static int uvc_v4l2_get_format(struct uvc_video_device *video,
struct uvc_format *format = video->streaming->cur_format;
struct uvc_frame *frame = video->streaming->cur_frame;
if (fmt->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (fmt->type != video->streaming->type)
return -EINVAL;
if (format == NULL || frame == NULL)
@ -242,7 +242,7 @@ static int uvc_v4l2_set_format(struct uvc_video_device *video,
struct uvc_frame *frame;
int ret;
if (fmt->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (fmt->type != video->streaming->type)
return -EINVAL;
if (uvc_queue_streaming(&video->queue))
@ -264,7 +264,7 @@ static int uvc_v4l2_get_streamparm(struct uvc_video_device *video,
{
uint32_t numerator, denominator;
if (parm->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (parm->type != video->streaming->type)
return -EINVAL;
numerator = video->streaming->ctrl.dwFrameInterval;
@ -272,13 +272,21 @@ static int uvc_v4l2_get_streamparm(struct uvc_video_device *video,
uvc_simplify_fraction(&numerator, &denominator, 8, 333);
memset(parm, 0, sizeof *parm);
parm->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
parm->parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
parm->parm.capture.capturemode = 0;
parm->parm.capture.timeperframe.numerator = numerator;
parm->parm.capture.timeperframe.denominator = denominator;
parm->parm.capture.extendedmode = 0;
parm->parm.capture.readbuffers = 0;
parm->type = video->streaming->type;
if (video->streaming->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
parm->parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
parm->parm.capture.capturemode = 0;
parm->parm.capture.timeperframe.numerator = numerator;
parm->parm.capture.timeperframe.denominator = denominator;
parm->parm.capture.extendedmode = 0;
parm->parm.capture.readbuffers = 0;
} else {
parm->parm.output.capability = V4L2_CAP_TIMEPERFRAME;
parm->parm.output.outputmode = 0;
parm->parm.output.timeperframe.numerator = numerator;
parm->parm.output.timeperframe.denominator = denominator;
}
return 0;
}
@ -288,24 +296,27 @@ static int uvc_v4l2_set_streamparm(struct uvc_video_device *video,
{
struct uvc_frame *frame = video->streaming->cur_frame;
struct uvc_streaming_control probe;
struct v4l2_fract timeperframe;
uint32_t interval;
int ret;
if (parm->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (parm->type != video->streaming->type)
return -EINVAL;
if (uvc_queue_streaming(&video->queue))
return -EBUSY;
if (parm->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
timeperframe = parm->parm.capture.timeperframe;
else
timeperframe = parm->parm.output.timeperframe;
memcpy(&probe, &video->streaming->ctrl, sizeof probe);
interval = uvc_fraction_to_interval(
parm->parm.capture.timeperframe.numerator,
parm->parm.capture.timeperframe.denominator);
interval = uvc_fraction_to_interval(timeperframe.numerator,
timeperframe.denominator);
uvc_trace(UVC_TRACE_FORMAT, "Setting frame interval to %u/%u (%u).\n",
parm->parm.capture.timeperframe.numerator,
parm->parm.capture.timeperframe.denominator,
interval);
timeperframe.numerator, timeperframe.denominator, interval);
probe.dwFrameInterval = uvc_try_frame_interval(frame, interval);
/* Probe the device with the new settings. */
@ -315,11 +326,15 @@ static int uvc_v4l2_set_streamparm(struct uvc_video_device *video,
memcpy(&video->streaming->ctrl, &probe, sizeof probe);
/* Return the actual frame period. */
parm->parm.capture.timeperframe.numerator = probe.dwFrameInterval;
parm->parm.capture.timeperframe.denominator = 10000000;
uvc_simplify_fraction(&parm->parm.capture.timeperframe.numerator,
&parm->parm.capture.timeperframe.denominator,
8, 333);
timeperframe.numerator = probe.dwFrameInterval;
timeperframe.denominator = 10000000;
uvc_simplify_fraction(&timeperframe.numerator,
&timeperframe.denominator, 8, 333);
if (parm->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
parm->parm.capture.timeperframe = timeperframe;
else
parm->parm.output.timeperframe = timeperframe;
return 0;
}
@ -476,8 +491,12 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
strncpy(cap->bus_info, video->dev->udev->bus->bus_name,
sizeof cap->bus_info);
cap->version = DRIVER_VERSION_NUMBER;
cap->capabilities = V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING;
if (video->streaming->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
cap->capabilities = V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING;
else
cap->capabilities = V4L2_CAP_VIDEO_OUTPUT
| V4L2_CAP_STREAMING;
break;
}
@ -655,7 +674,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
struct v4l2_fmtdesc *fmt = arg;
struct uvc_format *format;
if (fmt->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
if (fmt->type != video->streaming->type ||
fmt->index >= video->streaming->nformats)
return -EINVAL;
@ -794,7 +813,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
struct v4l2_cropcap *ccap = arg;
struct uvc_frame *frame = video->streaming->cur_frame;
if (ccap->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (ccap->type != video->streaming->type)
return -EINVAL;
ccap->bounds.left = 0;
@ -820,7 +839,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
unsigned int bufsize =
video->streaming->ctrl.dwMaxVideoFrameSize;
if (rb->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
if (rb->type != video->streaming->type ||
rb->memory != V4L2_MEMORY_MMAP)
return -EINVAL;
@ -840,7 +859,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
{
struct v4l2_buffer *buf = arg;
if (buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (buf->type != video->streaming->type)
return -EINVAL;
if (!uvc_has_privileges(handle))
@ -866,7 +885,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
{
int *type = arg;
if (*type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (*type != video->streaming->type)
return -EINVAL;
if (!uvc_has_privileges(handle))
@ -881,7 +900,7 @@ static int uvc_v4l2_do_ioctl(struct file *file, unsigned int cmd, void *arg)
{
int *type = arg;
if (*type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (*type != video->streaming->type)
return -EINVAL;
if (!uvc_has_privileges(handle))

View file

@ -453,6 +453,34 @@ static void uvc_video_decode_end(struct uvc_video_device *video,
}
}
static int uvc_video_encode_header(struct uvc_video_device *video,
struct uvc_buffer *buf, __u8 *data, int len)
{
data[0] = 2; /* Header length */
data[1] = UVC_STREAM_EOH | UVC_STREAM_EOF
| (video->last_fid & UVC_STREAM_FID);
return 2;
}
static int uvc_video_encode_data(struct uvc_video_device *video,
struct uvc_buffer *buf, __u8 *data, int len)
{
struct uvc_video_queue *queue = &video->queue;
unsigned int nbytes;
void *mem;
/* Copy video data to the URB buffer. */
mem = queue->mem + buf->buf.m.offset + queue->buf_used;
nbytes = min((unsigned int)len, buf->buf.bytesused - queue->buf_used);
nbytes = min(video->bulk.max_payload_size - video->bulk.payload_size,
nbytes);
memcpy(data, mem, nbytes);
queue->buf_used += nbytes;
return nbytes;
}
/* ------------------------------------------------------------------------
* URB handling
*/
@ -559,6 +587,48 @@ static void uvc_video_decode_bulk(struct urb *urb,
}
}
static void uvc_video_encode_bulk(struct urb *urb,
struct uvc_video_device *video, struct uvc_buffer *buf)
{
u8 *mem = urb->transfer_buffer;
int len = video->urb_size, ret;
if (buf == NULL) {
urb->transfer_buffer_length = 0;
return;
}
/* If the URB is the first of its payload, add the header. */
if (video->bulk.header_size == 0) {
ret = uvc_video_encode_header(video, buf, mem, len);
video->bulk.header_size = ret;
video->bulk.payload_size += ret;
mem += ret;
len -= ret;
}
/* Process video data. */
ret = uvc_video_encode_data(video, buf, mem, len);
video->bulk.payload_size += ret;
len -= ret;
if (buf->buf.bytesused == video->queue.buf_used ||
video->bulk.payload_size == video->bulk.max_payload_size) {
if (buf->buf.bytesused == video->queue.buf_used) {
video->queue.buf_used = 0;
buf->state = UVC_BUF_STATE_DONE;
uvc_queue_next_buffer(&video->queue, buf);
video->last_fid ^= UVC_STREAM_FID;
}
video->bulk.header_size = 0;
video->bulk.payload_size = 0;
}
urb->transfer_buffer_length = video->urb_size - len;
}
static void uvc_video_complete(struct urb *urb)
{
struct uvc_video_device *video = urb->context;
@ -756,7 +826,15 @@ static int uvc_init_video_bulk(struct uvc_video_device *video,
if (uvc_alloc_urb_buffers(video, size) < 0)
return -ENOMEM;
pipe = usb_rcvbulkpipe(video->dev->udev, ep->desc.bEndpointAddress);
if (usb_endpoint_dir_in(&ep->desc))
pipe = usb_rcvbulkpipe(video->dev->udev,
ep->desc.bEndpointAddress);
else
pipe = usb_sndbulkpipe(video->dev->udev,
ep->desc.bEndpointAddress);
if (video->streaming->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
size = 0;
for (i = 0; i < UVC_URBS; ++i) {
urb = usb_alloc_urb(0, gfp_flags);
@ -977,12 +1055,22 @@ int uvc_video_init(struct uvc_video_device *video)
atomic_set(&video->active, 0);
/* Select the video decoding function */
if (video->dev->quirks & UVC_QUIRK_BUILTIN_ISIGHT)
video->decode = uvc_video_decode_isight;
else if (video->streaming->intf->num_altsetting > 1)
video->decode = uvc_video_decode_isoc;
else
video->decode = uvc_video_decode_bulk;
if (video->streaming->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
if (video->dev->quirks & UVC_QUIRK_BUILTIN_ISIGHT)
video->decode = uvc_video_decode_isight;
else if (video->streaming->intf->num_altsetting > 1)
video->decode = uvc_video_decode_isoc;
else
video->decode = uvc_video_decode_bulk;
} else {
if (video->streaming->intf->num_altsetting == 1)
video->decode = uvc_video_encode_bulk;
else {
uvc_printk(KERN_INFO, "Isochronous endpoints are not "
"supported for video output devices.\n");
return -EINVAL;
}
}
return 0;
}

View file

@ -529,6 +529,7 @@ struct uvc_streaming {
__u16 maxpsize;
struct uvc_streaming_header header;
enum v4l2_buf_type type;
unsigned int nformats;
struct uvc_format *format;
@ -564,12 +565,15 @@ struct uvc_buffer {
#define UVC_QUEUE_DROP_INCOMPLETE (1 << 2)
struct uvc_video_queue {
enum v4l2_buf_type type;
void *mem;
unsigned int flags;
__u32 sequence;
unsigned int count;
unsigned int buf_size;
unsigned int buf_used;
struct uvc_buffer buffer[UVC_MAX_VIDEO_BUFFERS];
struct mutex mutex; /* protects buffers and mainqueue */
spinlock_t irqlock; /* protects irqqueue */
@ -584,8 +588,9 @@ struct uvc_video_device {
atomic_t active;
unsigned int frozen : 1;
struct list_head iterms;
struct uvc_entity *oterm;
struct list_head iterms; /* Input terminals */
struct uvc_entity *oterm; /* Output terminal */
struct uvc_entity *sterm; /* USB streaming terminal */
struct uvc_entity *processing;
struct uvc_entity *selector;
struct list_head extensions;
@ -726,7 +731,8 @@ extern struct uvc_driver uvc_driver;
extern void uvc_delete(struct kref *kref);
/* Video buffers queue management. */
extern void uvc_queue_init(struct uvc_video_queue *queue);
extern void uvc_queue_init(struct uvc_video_queue *queue,
enum v4l2_buf_type type);
extern int uvc_alloc_buffers(struct uvc_video_queue *queue,
unsigned int nbuffers, unsigned int buflength);
extern int uvc_free_buffers(struct uvc_video_queue *queue);