Device Drivers ---><*> Multimedia support --->[*] Video capture adapters --->[*] V4L USB devices ---><M> GSPCA based webcams ---><M> ZC3XX USB Camera Driver
从 GSPCA based webcams 项的help看到 CONFIG_USB_GSPCA
从 ZC3XX USB Camera 项的help看到CONFIG_USB_GSPCA_ZC3XX
然后去Makefile寻找之,找到2 lines
obj-$(CONFIG_USB_GSPCA_ZC3XX) += gspca_zc3xx.o
obj-$(CONFIG_USB_GSPCA) += gspca_main.o
然后又找到
gspca_zc3xx-objs := zc3xx.o
gspca_main-objs := gspca.o
然后去 drivers/media/video/gspca/ 目录下找zc3xx.c gspca.c
就是源码了。
此时编译成模块,生成 gspca_zc3xx.ko gspca_main.ko。---------搞到板子上执行
[root@FriendlyARM plg]# insmod gspca_main.ko gspca: main v2.7.0 registere如果先加载gspca_zc3xx.ko,会出现错误。---------这个错误很友好哦,如果你不知道2者啥关系,故意搞出错误就稍微知道啦。
[root@FriendlyARM plg]# insmod gspca_zc3xx.ko
gspca: probing 0ac8:301b
zc3xx: probe 2wr ov vga 0x0000
zc3xx: probe sensor -> 0011
zc3xx: Find Sensor HV7131R(c)
gspca: probe ok
usbcore: registered new interface driver zc3xx
zc3xx: registered
[root@FriendlyARM plg]# insmod gspca_zc3xx.ko gspca_zc3xx: Unknown symbol gspca_frame_addgspca_zc3xx: Unknown symbol gspca_debuggspca_zc3xx: Unknown symbol gspca_disconnectgspca_zc3xx: Unknown symbol gspca_dev_probeinsmod: cannot insert 'gspca_zc3xx.ko': unknown symbol in module or invalid parameter
/*********************************************************************************************************************************************************/
下面从usb摄像头插入到向应用程序提供图像的线索看一下万能摄像头驱动源码,看一下法国的这位天才coder怎么将v4l2和gspca和这么多的摄像头驱动结合在一起的。
简言之:各厂家在自家的文件中比如zc3xx.c中usb_register(&usb_driver),usb_driver的id_table中包括了自己venderid,productid。
厂家在usb_probe时调用gspca.c中的gspca_dev_probe(),里面注册v4l2驱动,并生成videox,之后用户空间就通过gspca写的v4l2接口操作了。
当usb摄像头-zc0301pl插入时,usbcore通过枚举过程获知当前设备的信息,包括两个id,然后和注册在usb总线上的各个usb驱动(zc3xx.c)里的usb_device_id table比较,有相同的的话,就调用哪个驱动的probe函数--sd_probe(),并把该设备的资源信息(inteface)作为参数交给 sd_probe
比如 zc0301pl 的idVendor=0ac8, idProduct=301b,在列表中,如下
zc3xx.c
static const struct sd_desc sd_desc = { .name = MODULE_NAME,在sd_probe函数中是调用的gspca_dev_probe(intf, id, &sd_desc, sizeof(struct sd),THIS_MODULE)
.ctrls = sd_ctrls,
.nctrls = ARRAY_SIZE(sd_ctrls),
.config = sd_config,
.init = sd_init,
.start = sd_start,
.stop0 = sd_stop0,
.pkt_scan = sd_pkt_scan,
.querymenu = sd_querymenu,
.get_jcomp = sd_get_jcomp,
.set_jcomp = sd_set_jcomp,
};
static const __devinitdata struct usb_device_id device_table[] = {
{USB_DEVICE(0x041e, 0x041e)},
{USB_DEVICE(0x041e, 0x4017)},
{USB_DEVICE(0x041e, 0x401c), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x041e, 0x401e)},
{USB_DEVICE(0x041e, 0x401f)},
{USB_DEVICE(0x041e, 0x4022)},
{USB_DEVICE(0x041e, 0x4029)},
{USB_DEVICE(0x041e, 0x4034), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x041e, 0x4035), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x041e, 0x4036)},
{USB_DEVICE(0x041e, 0x403a)},
{USB_DEVICE(0x041e, 0x4051), .driver_info = SENSOR_TAS5130C_VF0250},
{USB_DEVICE(0x041e, 0x4053), .driver_info = SENSOR_TAS5130C_VF0250},
{USB_DEVICE(0x0458, 0x7007)},
{USB_DEVICE(0x0458, 0x700c)},
{USB_DEVICE(0x0458, 0x700f)},
{USB_DEVICE(0x0461, 0x0a00)},
{USB_DEVICE(0x046d, 0x089d), .driver_info = SENSOR_MC501CB},
{USB_DEVICE(0x046d, 0x08a0)},
{USB_DEVICE(0x046d, 0x08a1)},
{USB_DEVICE(0x046d, 0x08a2)},
{USB_DEVICE(0x046d, 0x08a3)},
{USB_DEVICE(0x046d, 0x08a6)},
{USB_DEVICE(0x046d, 0x08a7)},
{USB_DEVICE(0x046d, 0x08a9)},
{USB_DEVICE(0x046d, 0x08aa)},
{USB_DEVICE(0x046d, 0x08ac)},
{USB_DEVICE(0x046d, 0x08ad)},
#if !defined CONFIG_USB_ZC0301 && !defined CONFIG_USB_ZC0301_MODULE
{USB_DEVICE(0x046d, 0x08ae)},
#endif
{USB_DEVICE(0x046d, 0x08af)},
{USB_DEVICE(0x046d, 0x08b9)},
{USB_DEVICE(0x046d, 0x08d7)},
{USB_DEVICE(0x046d, 0x08d9)},
{USB_DEVICE(0x046d, 0x08d8)},
{USB_DEVICE(0x046d, 0x08da)},
{USB_DEVICE(0x046d, 0x08dd), .driver_info = SENSOR_MC501CB},
{USB_DEVICE(0x0471, 0x0325), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x0471, 0x0326), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x0471, 0x032d), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x0471, 0x032e), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x055f, 0xc005)},
{USB_DEVICE(0x055f, 0xd003)},
{USB_DEVICE(0x055f, 0xd004)},
{USB_DEVICE(0x0698, 0x2003)},
{USB_DEVICE(0x0ac8, 0x0301), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x0ac8, 0x0302), .driver_info = SENSOR_PAS106},
{USB_DEVICE(0x0ac8, 0x301b)},//for my usb camera
{USB_DEVICE(0x0ac8, 0x303b)},
{USB_DEVICE(0x0ac8, 0x305b), .driver_info = SENSOR_TAS5130C_VF0250},
{USB_DEVICE(0x0ac8, 0x307b)},
{USB_DEVICE(0x10fd, 0x0128)},
{USB_DEVICE(0x10fd, 0x804d)},
{USB_DEVICE(0x10fd, 0x8050)},
{} /* end of entry */
};
#undef DVNAME
MODULE_DEVICE_TABLE(usb, device_table);
/* -- device connect -- */
static int sd_probe(struct usb_interface *intf,
const struct usb_device_id *id)
{
return gspca_dev_probe(intf, id, &sd_desc, sizeof(struct sd),
THIS_MODULE);
}
/* USB driver */
static struct usb_driver sd_driver = {
.name = MODULE_NAME,
.id_table = device_table,
.probe = sd_probe,
.disconnect = gspca_disconnect,
#ifdef CONFIG_PM
.suspend = gspca_suspend,
.resume = gspca_resume,
#endif
};
static int __init sd_mod_init(void)
{
int ret;
ret = usb_register(&sd_driver);
if (ret < 0)
return ret;
PDEBUG(D_PROBE, "registered");
return 0;
}
static void __exit sd_mod_exit(void)
{
usb_deregister(&sd_driver);
PDEBUG(D_PROBE, "deregistered");
}
module_init(sd_mod_init);
module_exit(sd_mod_exit);
其中有个重要的参数struct sd_desc sd_desc,
跟进gspca_dev_probe()
gspca.c
/* * probe and create a new gspca device上面定义了struct gspca_dev *gspca_dev;结构体如下
*
* This function must be called by the sub-driver when it is
* called for probing a new device.
*/
int gspca_dev_probe(struct usb_interface *intf,
const struct usb_device_id *id,
const struct sd_desc *sd_desc,
int dev_size,
struct module *module)
{
struct usb_interface_descriptor *interface;
struct gspca_dev *gspca_dev;
struct usb_device *dev = interface_to_usbdev(intf);
int ret;
PDEBUG(D_PROBE, "probing %04x:%04x", id->idVendor, id->idProduct);
/* we don't handle multi-config cameras */
if (dev->descriptor.bNumConfigurations != 1)
return -ENODEV;
interface = &intf->cur_altsetting->desc;
if (interface->bInterfaceNumber > 0)
return -ENODEV;
/* create the device */
if (dev_size < sizeof *gspca_dev)
dev_size = sizeof *gspca_dev;
gspca_dev = kzalloc(dev_size, GFP_KERNEL);
if (!gspca_dev) {
err("couldn't kzalloc gspca struct");
return -ENOMEM;
}
gspca_dev->usb_buf = kmalloc(USB_BUF_SZ, GFP_KERNEL);
if (!gspca_dev->usb_buf) {
err("out of memory");
ret = -ENOMEM;
goto out;
}
gspca_dev->dev = dev;
gspca_dev->iface = interface->bInterfaceNumber;
gspca_dev->nbalt = intf->num_altsetting;
gspca_dev->sd_desc = sd_desc;
gspca_dev->nbufread = 2;
gspca_dev->empty_packet = -1; /* don't check the empty packets */
/* configure the subdriver and initialize the USB device */
ret = sd_desc->config(gspca_dev, id);
if (ret < 0)
goto out;
ret = sd_desc->init(gspca_dev);
if (ret < 0)
goto out;
ret = gspca_set_alt0(gspca_dev);
if (ret < 0)
goto out;
gspca_set_default_mode(gspca_dev);
mutex_init(&gspca_dev->usb_lock);
mutex_init(&gspca_dev->read_lock);
mutex_init(&gspca_dev->queue_lock);
init_waitqueue_head(&gspca_dev->wq);
/* init video stuff */
memcpy(&gspca_dev->vdev, &gspca_template, sizeof gspca_template);
gspca_dev->vdev.parent = &intf->dev;
gspca_dev->module = module;
gspca_dev->present = 1;
ret = video_register_device(&gspca_dev->vdev,
VFL_TYPE_GRABBER,
-1);
if (ret < 0) {
err("video_register_device err %d", ret);
goto out;
}
usb_set_intfdata(intf, gspca_dev);
PDEBUG(D_PROBE, "probe ok");
return 0;
out:
kfree(gspca_dev->usb_buf);
kfree(gspca_dev);
return ret;
}
EXPORT_SYMBOL(gspca_dev_probe);
gspca.h
struct gspca_dev { struct video_device vdev; /* !! must be the first item */
struct module *module; /* subdriver handling the device */
struct usb_device *dev;
struct file *capt_file; /* file doing video capture */
struct cam cam; /* device information */
const struct sd_desc *sd_desc; /* subdriver description */
unsigned ctrl_dis; /* disabled controls (bit map) */
#define USB_BUF_SZ 64
__u8 *usb_buf; /* buffer for USB exchanges */
struct urb *urb[MAX_NURBS];
__u8 *frbuf; /* buffer for nframes */
struct gspca_frame frame[GSPCA_MAX_FRAMES];
__u32 frsz; /* frame size */
char nframes; /* number of frames */
char fr_i; /* frame being filled */
char fr_q; /* next frame to queue */
char fr_o; /* next frame to dequeue */
signed char fr_queue[GSPCA_MAX_FRAMES]; /* frame queue */
__u8 last_packet_type;
__s8 empty_packet; /* if (-1) don't check empty packets */
__u8 streaming;
__u8 curr_mode; /* current camera mode */
__u32 pixfmt; /* current mode parameters */
__u16 width;
__u16 height;
__u32 sequence; /* frame sequence number */
wait_queue_head_t wq; /* wait queue */
struct mutex usb_lock; /* usb exchange protection */
struct mutex read_lock; /* read protection */
struct mutex queue_lock; /* ISOC queue protection */
#ifdef CONFIG_PM
char frozen; /* suspend - resume */
#endif
char users; /* number of opens */
char present; /* device connected */
char nbufread; /* number of buffers for read() */
char nurbs; /* number of allocated URBs */
char memory; /* memory type (V4L2_MEMORY_xxx) */
__u8 iface; /* USB interface number */
__u8 alt; /* USB alternate setting */
__u8 nbalt; /* number of USB alternate settings */
u16 pkt_size; /* ISOC packet size */
};
然后 用各个参数填充gspca_dev的各个成员或者为某些成员分配内存
然后调用sd_desc->config(gspca_dev, id);sd_desc->init(gspca_dev).当然这两个函数都是在zc3xx.c中定义的。
然后执行memcpy(&gspca_dev->vdev, &gspca_template, sizeof gspca_template);
参数1 gspca_dev->vdev是struct video_device,如下
v4l2-dev.h
struct video_device{
/* device ops */
const struct v4l2_file_operations *fops;
/* sysfs */
struct device dev; /* v4l device */
struct cdev *cdev; /* character device */
/* Set either parent or v4l2_dev if your driver uses v4l2_device */
struct device *parent; /* device parent */
struct v4l2_device *v4l2_dev; /* v4l2_device parent */
/* device info */
char name[32];
int vfl_type;
/* 'minor' is set to -1 if the registration failed */
int minor;
u16 num;
/* use bitops to set/clear/test flags */
unsigned long flags;
/* attribute to differentiate multiple indices on one physical device */
int index;
int debug; /* Activates debug level*/
/* Video standard vars */
v4l2_std_id tvnorms; /* Supported tv norms */
v4l2_std_id current_norm; /* Current tvnorm */
/* callbacks */
void (*release)(struct video_device *vdev);
/* ioctl callbacks */
const struct v4l2_ioctl_ops *ioctl_ops;
};
参数2 gspca_template,其定义
gspca.c
static struct video_device gspca_template = { .name = "gspca main driver",从 gspca_template的名字也可看出是一个模子,相当于给 gspca_dev->vdev的各个成员赋初值。两个较重要的是函数指针集合:
.fops = &dev_fops,
.ioctl_ops = &dev_ioctl_ops,
.release = gspca_release,
.minor = -1,
};
.fops = &dev_fops,.ioctl_ops = &dev_ioctl_ops,
其定义如下
gspca.c
static struct v4l2_file_operations dev_fops = { .owner = THIS_MODULE,memcpy()先跟到这里,回到gspca_dev_probe(),接着执行
.open = dev_open,
.release = dev_close,
.read = dev_read,
.mmap = dev_mmap,
.unlocked_ioctl = video_ioctl2,
.poll = dev_poll,
};
static const struct v4l2_ioctl_ops dev_ioctl_ops = {
.vidioc_querycap = vidioc_querycap,
.vidioc_dqbuf = vidioc_dqbuf,
.vidioc_qbuf = vidioc_qbuf,
.vidioc_enum_fmt_vid_cap = vidioc_enum_fmt_vid_cap,
.vidioc_try_fmt_vid_cap = vidioc_try_fmt_vid_cap,
.vidioc_g_fmt_vid_cap = vidioc_g_fmt_vid_cap,
.vidioc_s_fmt_vid_cap = vidioc_s_fmt_vid_cap,
.vidioc_streamon = vidioc_streamon,
.vidioc_queryctrl = vidioc_queryctrl,
.vidioc_g_ctrl = vidioc_g_ctrl,
.vidioc_s_ctrl = vidioc_s_ctrl,
.vidioc_g_audio = vidioc_g_audio,
.vidioc_s_audio = vidioc_s_audio,
.vidioc_enumaudio = vidioc_enumaudio,
.vidioc_querymenu = vidioc_querymenu,
.vidioc_enum_input = vidioc_enum_input,
.vidioc_g_input = vidioc_g_input,
.vidioc_s_input = vidioc_s_input,
.vidioc_reqbufs = vidioc_reqbufs,
.vidioc_querybuf = vidioc_querybuf,
.vidioc_streamoff = vidioc_streamoff,
.vidioc_g_jpegcomp = vidioc_g_jpegcomp,
.vidioc_s_jpegcomp = vidioc_s_jpegcomp,
.vidioc_g_parm = vidioc_g_parm,
.vidioc_s_parm = vidioc_s_parm,
.vidioc_enum_framesizes = vidioc_enum_framesizes,
#ifdef CONFIG_VIDEO_ADV_DEBUG
.vidioc_g_register = vidioc_g_register,
.vidioc_s_register = vidioc_s_register,
#endif
.vidioc_g_chip_ident = vidioc_g_chip_ident,
#ifdef CONFIG_VIDEO_V4L1_COMPAT
.vidiocgmbuf = vidiocgmbuf,
#endif
};
video_register_device(&gspca_dev->vdev,VFL_TYPE_GRABBER, -1);
参数1是gspca_dev->vdev,即一个已填充过dev_fops和dev_ioctl_ops的struct video_device,继续追踪
v4l2-dev.c
int video_register_device(struct video_device *vdev, int type, int nr){v4l2-dev.c
return __video_register_device(vdev, type, nr, 1);
}
static int __video_register_device(struct video_device *vdev, int type, int nr, int warn_if_nr_in_use){ int i = 0; int ret; int minor_offset = 0; int minor_cnt = VIDEO_NUM_DEVICES; const char *name_base; void *priv = video_get_drvdata(vdev); /* A minor value of -1 marks this video device as never having been registered */ vdev->minor = -1; /* the release callback MUST be present */ WARN_ON(!vdev->release); if (!vdev->release) return -EINVAL; /* Part 1: check device type */ switch (type) { case VFL_TYPE_GRABBER: name_base = "video"; break; case VFL_TYPE_VTX: name_base = "vtx"; break; case VFL_TYPE_VBI: name_base = "vbi"; break; case VFL_TYPE_RADIO: name_base = "radio"; break; default: printk(KERN_ERR "%s called with unknown type: %d\n", __func__, type); return -EINVAL; } vdev->vfl_type = type; vdev->cdev = NULL; if (vdev->v4l2_dev && vdev->v4l2_dev->dev) vdev->parent = vdev->v4l2_dev->dev; /* Part 2: find a free minor, device node number and device index. */#ifdef CONFIG_VIDEO_FIXED_MINOR_RANGES /* Keep the ranges for the first four types for historical * reasons. * Newer devices (not yet in place) should use the range * of 128-191 and just pick the first free minor there * (new style). */ switch (type) { case VFL_TYPE_GRABBER: minor_offset = 0; minor_cnt = 64; break; case VFL_TYPE_RADIO: minor_offset = 64; minor_cnt = 64; break; case VFL_TYPE_VTX: minor_offset = 192; minor_cnt = 32; break; case VFL_TYPE_VBI: minor_offset = 224; minor_cnt = 32; break; default: minor_offset = 128; minor_cnt = 64; break; }#endif /* Pick a device node number */ mutex_lock(&videodev_lock); nr = devnode_find(vdev, nr == -1 ? 0 : nr, minor_cnt); if (nr == minor_cnt) nr = devnode_find(vdev, 0, minor_cnt); if (nr == minor_cnt) { printk(KERN_ERR "could not get a free device node number\n"); mutex_unlock(&videodev_lock); return -ENFILE; }#ifdef CONFIG_VIDEO_FIXED_MINOR_RANGES /* 1-on-1 mapping of device node number to minor number */ i = nr;#else /* The device node number and minor numbers are independent, so we just find the first free minor number. */ for (i = 0; i < VIDEO_NUM_DEVICES; i++) if (video_device[i] == NULL) break; if (i == VIDEO_NUM_DEVICES) { mutex_unlock(&videodev_lock); printk(KERN_ERR "could not get a free minor\n"); return -ENFILE; }#endif vdev->minor = i + minor_offset; vdev->num = nr; devnode_set(vdev); /* Should not happen since we thought this minor was free */ WARN_ON(video_device[vdev->minor] != NULL); vdev->index = get_index(vdev); mutex_unlock(&videodev_lock); /* Part 3: Initialize the character device */ vdev->cdev = cdev_alloc(); if (vdev->cdev == NULL) { ret = -ENOMEM; goto cleanup; } if (vdev->fops->unlocked_ioctl) vdev->cdev->ops = &v4l2_unlocked_fops; else vdev->cdev->ops = &v4l2_fops; vdev->cdev->owner = vdev->fops->owner; ret = cdev_add(vdev->cdev, MKDEV(VIDEO_MAJOR, vdev->minor), 1); if (ret < 0) { printk(KERN_ERR "%s: cdev_add failed\n", __func__); kfree(vdev->cdev); vdev->cdev = NULL; goto cleanup; } /* Part 4: register the device with sysfs */ memset(&vdev->dev, 0, sizeof(vdev->dev)); /* The memset above cleared the device's drvdata, so put back the copy we made earlier. */ video_set_drvdata(vdev, priv); vdev->dev.class = &video_class;//note vdev->dev.devt = MKDEV(VIDEO_MAJOR, vdev->minor);//note if (vdev->parent) vdev->dev.parent = vdev->parent; dev_set_name(&vdev->dev, "%s%d", name_base, vdev->num); ret = device_register(&vdev->dev);//note if (ret < 0) { printk(KERN_ERR "%s: device_register failed\n", __func__); goto cleanup; } /* Register the release callback that will be called when the last reference to the device goes away. */ vdev->dev.release = v4l2_device_release; if (nr != -1 && nr != vdev->num && warn_if_nr_in_use) printk(KERN_WARNING "%s: requested %s%d, got %s%d\n", __func__, name_base, nr, name_base, vdev->num); /* Part 5: Activate this minor. The char device can now be used. */ mutex_lock(&videodev_lock); video_device[vdev->minor] = vdev; mutex_unlock(&videodev_lock); return 0;cleanup: mutex_lock(&videodev_lock); if (vdev->cdev) cdev_del(vdev->cdev); devnode_clear(vdev); mutex_unlock(&videodev_lock); /* Mark this video device as never having been registered. */ vdev->minor = -1; return ret;}在__video_register_device()注册了字符设备,并指定了file_operation: vdev->cdev->ops = &v4l2_unlocked_fops;并创建了设备节点。比如/dev/video0。不过不怎么清楚怎么创建设备节点的----device_register(&vdev->dev),
register_chrdev,register_chrdev_region,alloc_chrdev_region---udev class bus driver device
其中字符设备的file_operation是
v4l2-dev.c
static const struct file_operations v4l2_unlocked_fops = { .owner = THIS_MODULE,
.read = v4l2_read,
.write = v4l2_write,
.open = v4l2_open,
.get_unmapped_area = v4l2_get_unmapped_area,
.mmap = v4l2_mmap,
.unlocked_ioctl = v4l2_unlocked_ioctl,
#ifdef CONFIG_COMPAT
.compat_ioctl = v4l2_compat_ioctl32,
#endif
.release = v4l2_release,
.poll = v4l2_poll,
.llseek = no_llseek,
}
在用户空间open /dev/video0,会调用对应字符设备的v4l2_unlocked_fops的open,函数源码如下
v4l2-dev.c
static int v4l2_open(struct inode *inode, struct file *filp){line 19,可知是跳到vdev->fops->open()即前面刚贴出的 static struct v4l2_file_operations dev_fops的open()
struct video_device *vdev;
int ret = 0;
/* Check if the video device is available */
mutex_lock(&videodev_lock);
vdev = video_devdata(filp);
/* return ENODEV if the video device has been removed
already or if it is not registered anymore. */
if (vdev == NULL || video_is_unregistered(vdev)) {
mutex_unlock(&videodev_lock);
return -ENODEV;
}
/* and increase the device refcount */
video_get(vdev);
mutex_unlock(&videodev_lock);
if (vdev->fops->open)
ret = vdev->fops->open(filp);//
/* decrease the refcount in case of an error */
if (ret)
video_put(vdev);
return ret;
}
跟进此open()--在gspca.c里面,增加一下计数,其他也没做什么。
在用户空间ioctl /dev/video0,会调用对应字符设备的v4l2_unlocked_fops的v4l2_unlocked_ioctl,然后调用到gspca.c里 dev_fops的video_ioctl2(),函数源码如下
v4l2-ioctl.c
long video_ioctl2(struct file *file, unsigned int cmd, unsigned long arg)v4l2-ioctl.c-----搜索case取得关键
{
/* Handles IOCTL */
err = __video_do_ioctl(file, cmd, parg);
}
static long __video_do_ioctl(struct file *file, unsigned int cmd, void *arg){ struct video_device *vfd = video_devdata(file); const struct v4l2_ioctl_ops *ops = vfd->ioctl_ops; void *fh = file->private_data; long ret = -EINVAL; if ((vfd->debug & V4L2_DEBUG_IOCTL) && !(vfd->debug & V4L2_DEBUG_IOCTL_ARG)) { v4l_print_ioctl(vfd->name, cmd); printk(KERN_CONT "\n"); } if (ops == NULL) { printk(KERN_WARNING "videodev: \"%s\" has no ioctl_ops.\n", vfd->name); return -EINVAL; }#ifdef CONFIG_VIDEO_V4L1_COMPAT /*********************************************************** Handles calls to the obsoleted V4L1 API Due to the nature of VIDIOCGMBUF, each driver that supports V4L1 should implement its own handler for this ioctl. ***********************************************************/ /* --- streaming capture ------------------------------------- */ if (cmd == VIDIOCGMBUF) { struct video_mbuf *p = arg; if (!ops->vidiocgmbuf) return ret; ret = ops->vidiocgmbuf(file, fh, p); if (!ret) dbgarg(cmd, "size=%d, frames=%d, offsets=0x%08lx\n", p->size, p->frames, (unsigned long)p->offsets); return ret; } /******************************************************** All other V4L1 calls are handled by v4l1_compat module. Those calls will be translated into V4L2 calls, and __video_do_ioctl will be called again, with one or more V4L2 ioctls. ********************************************************/ if (_IOC_TYPE(cmd) == 'v' && _IOC_NR(cmd) < BASE_VIDIOCPRIVATE) return v4l_compat_translate_ioctl(file, cmd, arg, __video_do_ioctl);#endif switch (cmd) { /* --- capabilities ------------------------------------------ */ case VIDIOC_QUERYCAP: { struct v4l2_capability *cap = (struct v4l2_capability *)arg; if (!ops->vidioc_querycap) break; ret = ops->vidioc_querycap(file, fh, cap); if (!ret) dbgarg(cmd, "driver=%s, card=%s, bus=%s, " "version=0x%08x, " "capabilities=0x%08x\n", cap->driver, cap->card, cap->bus_info, cap->version, cap->capabilities); break; } /* --- priority ------------------------------------------ */ case VIDIOC_G_PRIORITY: { enum v4l2_priority *p = arg; if (!ops->vidioc_g_priority) break; ret = ops->vidioc_g_priority(file, fh, p); if (!ret) dbgarg(cmd, "priority is %d\n", *p); break; } case VIDIOC_S_PRIORITY: { enum v4l2_priority *p = arg; if (!ops->vidioc_s_priority) break; dbgarg(cmd, "setting priority to %d\n", *p); ret = ops->vidioc_s_priority(file, fh, *p); break; } /* --- capture ioctls ---------------------------------------- */ case VIDIOC_ENUM_FMT: { struct v4l2_fmtdesc *f = arg; switch (f->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: if (ops->vidioc_enum_fmt_vid_cap) ret = ops->vidioc_enum_fmt_vid_cap(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OVERLAY: if (ops->vidioc_enum_fmt_vid_overlay) ret = ops->vidioc_enum_fmt_vid_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT: if (ops->vidioc_enum_fmt_vid_out) ret = ops->vidioc_enum_fmt_vid_out(file, fh, f); break; case V4L2_BUF_TYPE_PRIVATE: if (ops->vidioc_enum_fmt_type_private) ret = ops->vidioc_enum_fmt_type_private(file, fh, f); break; default: break; } if (!ret) dbgarg(cmd, "index=%d, type=%d, flags=%d, " "pixelformat=%c%c%c%c, description='%s'\n", f->index, f->type, f->flags, (f->pixelformat & 0xff), (f->pixelformat >> 8) & 0xff, (f->pixelformat >> 16) & 0xff, (f->pixelformat >> 24) & 0xff, f->description); break; } case VIDIOC_G_FMT: { struct v4l2_format *f = (struct v4l2_format *)arg; /* FIXME: Should be one dump per type */ dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names)); switch (f->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: if (ops->vidioc_g_fmt_vid_cap) ret = ops->vidioc_g_fmt_vid_cap(file, fh, f); if (!ret) v4l_print_pix_fmt(vfd, &f->fmt.pix); break; case V4L2_BUF_TYPE_VIDEO_OVERLAY: if (ops->vidioc_g_fmt_vid_overlay) ret = ops->vidioc_g_fmt_vid_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT: if (ops->vidioc_g_fmt_vid_out) ret = ops->vidioc_g_fmt_vid_out(file, fh, f); if (!ret) v4l_print_pix_fmt(vfd, &f->fmt.pix); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY: if (ops->vidioc_g_fmt_vid_out_overlay) ret = ops->vidioc_g_fmt_vid_out_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VBI_CAPTURE: if (ops->vidioc_g_fmt_vbi_cap) ret = ops->vidioc_g_fmt_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_VBI_OUTPUT: if (ops->vidioc_g_fmt_vbi_out) ret = ops->vidioc_g_fmt_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE: if (ops->vidioc_g_fmt_sliced_vbi_cap) ret = ops->vidioc_g_fmt_sliced_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT: if (ops->vidioc_g_fmt_sliced_vbi_out) ret = ops->vidioc_g_fmt_sliced_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_PRIVATE: if (ops->vidioc_g_fmt_type_private) ret = ops->vidioc_g_fmt_type_private(file, fh, f); break; } break; } case VIDIOC_S_FMT: { struct v4l2_format *f = (struct v4l2_format *)arg; /* FIXME: Should be one dump per type */ dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names)); switch (f->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.pix); v4l_print_pix_fmt(vfd, &f->fmt.pix); if (ops->vidioc_s_fmt_vid_cap) ret = ops->vidioc_s_fmt_vid_cap(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OVERLAY: CLEAR_AFTER_FIELD(f, fmt.win); if (ops->vidioc_s_fmt_vid_overlay) ret = ops->vidioc_s_fmt_vid_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.pix); v4l_print_pix_fmt(vfd, &f->fmt.pix); if (ops->vidioc_s_fmt_vid_out) ret = ops->vidioc_s_fmt_vid_out(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY: CLEAR_AFTER_FIELD(f, fmt.win); if (ops->vidioc_s_fmt_vid_out_overlay) ret = ops->vidioc_s_fmt_vid_out_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VBI_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.vbi); if (ops->vidioc_s_fmt_vbi_cap) ret = ops->vidioc_s_fmt_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_VBI_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.vbi); if (ops->vidioc_s_fmt_vbi_out) ret = ops->vidioc_s_fmt_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.sliced); if (ops->vidioc_s_fmt_sliced_vbi_cap) ret = ops->vidioc_s_fmt_sliced_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.sliced); if (ops->vidioc_s_fmt_sliced_vbi_out) ret = ops->vidioc_s_fmt_sliced_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_PRIVATE: /* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */ if (ops->vidioc_s_fmt_type_private) ret = ops->vidioc_s_fmt_type_private(file, fh, f); break; } break; } case VIDIOC_TRY_FMT: { struct v4l2_format *f = (struct v4l2_format *)arg; /* FIXME: Should be one dump per type */ dbgarg(cmd, "type=%s\n", prt_names(f->type, v4l2_type_names)); switch (f->type) { case V4L2_BUF_TYPE_VIDEO_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.pix); if (ops->vidioc_try_fmt_vid_cap) ret = ops->vidioc_try_fmt_vid_cap(file, fh, f); if (!ret) v4l_print_pix_fmt(vfd, &f->fmt.pix); break; case V4L2_BUF_TYPE_VIDEO_OVERLAY: CLEAR_AFTER_FIELD(f, fmt.win); if (ops->vidioc_try_fmt_vid_overlay) ret = ops->vidioc_try_fmt_vid_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.pix); if (ops->vidioc_try_fmt_vid_out) ret = ops->vidioc_try_fmt_vid_out(file, fh, f); if (!ret) v4l_print_pix_fmt(vfd, &f->fmt.pix); break; case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY: CLEAR_AFTER_FIELD(f, fmt.win); if (ops->vidioc_try_fmt_vid_out_overlay) ret = ops->vidioc_try_fmt_vid_out_overlay(file, fh, f); break; case V4L2_BUF_TYPE_VBI_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.vbi); if (ops->vidioc_try_fmt_vbi_cap) ret = ops->vidioc_try_fmt_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_VBI_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.vbi); if (ops->vidioc_try_fmt_vbi_out) ret = ops->vidioc_try_fmt_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE: CLEAR_AFTER_FIELD(f, fmt.sliced); if (ops->vidioc_try_fmt_sliced_vbi_cap) ret = ops->vidioc_try_fmt_sliced_vbi_cap(file, fh, f); break; case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT: CLEAR_AFTER_FIELD(f, fmt.sliced); if (ops->vidioc_try_fmt_sliced_vbi_out) ret = ops->vidioc_try_fmt_sliced_vbi_out(file, fh, f); break; case V4L2_BUF_TYPE_PRIVATE: /* CLEAR_AFTER_FIELD(f, fmt.raw_data); <- does nothing */ if (ops->vidioc_try_fmt_type_private) ret = ops->vidioc_try_fmt_type_private(file, fh, f); break; } break; } /* FIXME: Those buf reqs could be handled here, with some changes on videobuf to allow its header to be included at videodev2.h or being merged at videodev2. */ case VIDIOC_REQBUFS: { struct v4l2_requestbuffers *p = arg; if (!ops->vidioc_reqbufs) break; ret = check_fmt(ops, p->type); if (ret) break; if (p->type < V4L2_BUF_TYPE_PRIVATE) CLEAR_AFTER_FIELD(p, memory); ret = ops->vidioc_reqbufs(file, fh, p); dbgarg(cmd, "count=%d, type=%s, memory=%s\n", p->count, prt_names(p->type, v4l2_type_names), prt_names(p->memory, v4l2_memory_names)); break; } case VIDIOC_QUERYBUF: { struct v4l2_buffer *p = arg; if (!ops->vidioc_querybuf) break; ret = check_fmt(ops, p->type); if (ret) break; ret = ops->vidioc_querybuf(file, fh, p); if (!ret) dbgbuf(cmd, vfd, p); break; } case VIDIOC_QBUF: { struct v4l2_buffer *p = arg; if (!ops->vidioc_qbuf) break; ret = check_fmt(ops, p->type); if (ret) break; ret = ops->vidioc_qbuf(file, fh, p); if (!ret) dbgbuf(cmd, vfd, p); break; } case VIDIOC_DQBUF: { struct v4l2_buffer *p = arg; if (!ops->vidioc_dqbuf) break; ret = check_fmt(ops, p->type); if (ret) break; ret = ops->vidioc_dqbuf(file, fh, p); if (!ret) dbgbuf(cmd, vfd, p); break; } case VIDIOC_OVERLAY: { int *i = arg; if (!ops->vidioc_overlay) break; dbgarg(cmd, "value=%d\n", *i); ret = ops->vidioc_overlay(file, fh, *i); break; } case VIDIOC_G_FBUF: { struct v4l2_framebuffer *p = arg; if (!ops->vidioc_g_fbuf) break; ret = ops->vidioc_g_fbuf(file, fh, arg); if (!ret) { dbgarg(cmd, "capability=0x%x, flags=%d, base=0x%08lx\n", p->capability, p->flags, (unsigned long)p->base); v4l_print_pix_fmt(vfd, &p->fmt); } break; } case VIDIOC_S_FBUF: { struct v4l2_framebuffer *p = arg; if (!ops->vidioc_s_fbuf) break; dbgarg(cmd, "capability=0x%x, flags=%d, base=0x%08lx\n", p->capability, p->flags, (unsigned long)p->base); v4l_print_pix_fmt(vfd, &p->fmt); ret = ops->vidioc_s_fbuf(file, fh, arg); break; } case VIDIOC_STREAMON: { enum v4l2_buf_type i = *(int *)arg; if (!ops->vidioc_streamon) break; dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names)); ret = ops->vidioc_streamon(file, fh, i); break; } case VIDIOC_STREAMOFF: { enum v4l2_buf_type i = *(int *)arg; if (!ops->vidioc_streamoff) break; dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names)); ret = ops->vidioc_streamoff(file, fh, i); break; } /* ---------- tv norms ---------- */ case VIDIOC_ENUMSTD: { struct v4l2_standard *p = arg; v4l2_std_id id = vfd->tvnorms, curr_id = 0; unsigned int index = p->index, i, j = 0; const char *descr = ""; /* Return norm array in a canonical way */ for (i = 0; i <= index && id; i++) { /* last std value in the standards array is 0, so this while always ends there since (id & 0) == 0. */ while ((id & standards[j].std) != standards[j].std) j++; curr_id = standards[j].std; descr = standards[j].descr; j++; if (curr_id == 0) break; if (curr_id != V4L2_STD_PAL && curr_id != V4L2_STD_SECAM && curr_id != V4L2_STD_NTSC) id &= ~curr_id; } if (i <= index) return -EINVAL; v4l2_video_std_construct(p, curr_id, descr); dbgarg(cmd, "index=%d, id=0x%Lx, name=%s, fps=%d/%d, " "framelines=%d\n", p->index, (unsigned long long)p->id, p->name, p->frameperiod.numerator, p->frameperiod.denominator, p->framelines); ret = 0; break; } case VIDIOC_G_STD: { v4l2_std_id *id = arg; ret = 0; /* Calls the specific handler */ if (ops->vidioc_g_std) ret = ops->vidioc_g_std(file, fh, id); else if (vfd->current_norm) *id = vfd->current_norm; else ret = -EINVAL; if (!ret) dbgarg(cmd, "std=0x%08Lx\n", (long long unsigned)*id); break; } case VIDIOC_S_STD: { v4l2_std_id *id = arg, norm; dbgarg(cmd, "std=%08Lx\n", (long long unsigned)*id); norm = (*id) & vfd->tvnorms; if (vfd->tvnorms && !norm) /* Check if std is supported */ break; /* Calls the specific handler */ if (ops->vidioc_s_std) ret = ops->vidioc_s_std(file, fh, &norm); else ret = -EINVAL; /* Updates standard information */ if (ret >= 0) vfd->current_norm = norm; break; } case VIDIOC_QUERYSTD: { v4l2_std_id *p = arg; if (!ops->vidioc_querystd) break; ret = ops->vidioc_querystd(file, fh, arg); if (!ret) dbgarg(cmd, "detected std=%08Lx\n", (unsigned long long)*p); break; } /* ------ input switching ---------- */ /* FIXME: Inputs can be handled inside videodev2 */ case VIDIOC_ENUMINPUT: { struct v4l2_input *p = arg; if (!ops->vidioc_enum_input) break; ret = ops->vidioc_enum_input(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, type=%d, " "audioset=%d, " "tuner=%d, std=%08Lx, status=%d\n", p->index, p->name, p->type, p->audioset, p->tuner, (unsigned long long)p->std, p->status); break; } case VIDIOC_G_INPUT: { unsigned int *i = arg; if (!ops->vidioc_g_input) break; ret = ops->vidioc_g_input(file, fh, i); if (!ret) dbgarg(cmd, "value=%d\n", *i); break; } case VIDIOC_S_INPUT: { unsigned int *i = arg; if (!ops->vidioc_s_input) break; dbgarg(cmd, "value=%d\n", *i); ret = ops->vidioc_s_input(file, fh, *i); break; } /* ------ output switching ---------- */ case VIDIOC_ENUMOUTPUT: { struct v4l2_output *p = arg; if (!ops->vidioc_enum_output) break; ret = ops->vidioc_enum_output(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, type=%d, " "audioset=0x%x, " "modulator=%d, std=0x%08Lx\n", p->index, p->name, p->type, p->audioset, p->modulator, (unsigned long long)p->std); break; } case VIDIOC_G_OUTPUT: { unsigned int *i = arg; if (!ops->vidioc_g_output) break; ret = ops->vidioc_g_output(file, fh, i); if (!ret) dbgarg(cmd, "value=%d\n", *i); break; } case VIDIOC_S_OUTPUT: { unsigned int *i = arg; if (!ops->vidioc_s_output) break; dbgarg(cmd, "value=%d\n", *i); ret = ops->vidioc_s_output(file, fh, *i); break; } /* --- controls ---------------------------------------------- */ case VIDIOC_QUERYCTRL: { struct v4l2_queryctrl *p = arg; if (!ops->vidioc_queryctrl) break; ret = ops->vidioc_queryctrl(file, fh, p); if (!ret) dbgarg(cmd, "id=0x%x, type=%d, name=%s, min/max=%d/%d, " "step=%d, default=%d, flags=0x%08x\n", p->id, p->type, p->name, p->minimum, p->maximum, p->step, p->default_value, p->flags); else dbgarg(cmd, "id=0x%x\n", p->id); break; } case VIDIOC_G_CTRL: { struct v4l2_control *p = arg; if (ops->vidioc_g_ctrl) ret = ops->vidioc_g_ctrl(file, fh, p); else if (ops->vidioc_g_ext_ctrls) { struct v4l2_ext_controls ctrls; struct v4l2_ext_control ctrl; ctrls.ctrl_class = V4L2_CTRL_ID2CLASS(p->id); ctrls.count = 1; ctrls.controls = &ctrl; ctrl.id = p->id; ctrl.value = p->value; if (check_ext_ctrls(&ctrls, 1)) { ret = ops->vidioc_g_ext_ctrls(file, fh, &ctrls); if (ret == 0) p->value = ctrl.value; } } else break; if (!ret) dbgarg(cmd, "id=0x%x, value=%d\n", p->id, p->value); else dbgarg(cmd, "id=0x%x\n", p->id); break; } case VIDIOC_S_CTRL: { struct v4l2_control *p = arg; struct v4l2_ext_controls ctrls; struct v4l2_ext_control ctrl; if (!ops->vidioc_s_ctrl && !ops->vidioc_s_ext_ctrls) break; dbgarg(cmd, "id=0x%x, value=%d\n", p->id, p->value); if (ops->vidioc_s_ctrl) { ret = ops->vidioc_s_ctrl(file, fh, p); break; } if (!ops->vidioc_s_ext_ctrls) break; ctrls.ctrl_class = V4L2_CTRL_ID2CLASS(p->id); ctrls.count = 1; ctrls.controls = &ctrl; ctrl.id = p->id; ctrl.value = p->value; if (check_ext_ctrls(&ctrls, 1)) ret = ops->vidioc_s_ext_ctrls(file, fh, &ctrls); break; } case VIDIOC_G_EXT_CTRLS: { struct v4l2_ext_controls *p = arg; p->error_idx = p->count; if (!ops->vidioc_g_ext_ctrls) break; if (check_ext_ctrls(p, 0)) ret = ops->vidioc_g_ext_ctrls(file, fh, p); v4l_print_ext_ctrls(cmd, vfd, p, !ret); break; } case VIDIOC_S_EXT_CTRLS: { struct v4l2_ext_controls *p = arg; p->error_idx = p->count; if (!ops->vidioc_s_ext_ctrls) break; v4l_print_ext_ctrls(cmd, vfd, p, 1); if (check_ext_ctrls(p, 0)) ret = ops->vidioc_s_ext_ctrls(file, fh, p); break; } case VIDIOC_TRY_EXT_CTRLS: { struct v4l2_ext_controls *p = arg; p->error_idx = p->count; if (!ops->vidioc_try_ext_ctrls) break; v4l_print_ext_ctrls(cmd, vfd, p, 1); if (check_ext_ctrls(p, 0)) ret = ops->vidioc_try_ext_ctrls(file, fh, p); break; } case VIDIOC_QUERYMENU: { struct v4l2_querymenu *p = arg; if (!ops->vidioc_querymenu) break; ret = ops->vidioc_querymenu(file, fh, p); if (!ret) dbgarg(cmd, "id=0x%x, index=%d, name=%s\n", p->id, p->index, p->name); else dbgarg(cmd, "id=0x%x, index=%d\n", p->id, p->index); break; } /* --- audio ---------------------------------------------- */ case VIDIOC_ENUMAUDIO: { struct v4l2_audio *p = arg; if (!ops->vidioc_enumaudio) break; ret = ops->vidioc_enumaudio(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, capability=0x%x, " "mode=0x%x\n", p->index, p->name, p->capability, p->mode); else dbgarg(cmd, "index=%d\n", p->index); break; } case VIDIOC_G_AUDIO: { struct v4l2_audio *p = arg; if (!ops->vidioc_g_audio) break; ret = ops->vidioc_g_audio(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, capability=0x%x, " "mode=0x%x\n", p->index, p->name, p->capability, p->mode); else dbgarg(cmd, "index=%d\n", p->index); break; } case VIDIOC_S_AUDIO: { struct v4l2_audio *p = arg; if (!ops->vidioc_s_audio) break; dbgarg(cmd, "index=%d, name=%s, capability=0x%x, " "mode=0x%x\n", p->index, p->name, p->capability, p->mode); ret = ops->vidioc_s_audio(file, fh, p); break; } case VIDIOC_ENUMAUDOUT: { struct v4l2_audioout *p = arg; if (!ops->vidioc_enumaudout) break; dbgarg(cmd, "Enum for index=%d\n", p->index); ret = ops->vidioc_enumaudout(file, fh, p); if (!ret) dbgarg2("index=%d, name=%s, capability=%d, " "mode=%d\n", p->index, p->name, p->capability, p->mode); break; } case VIDIOC_G_AUDOUT: { struct v4l2_audioout *p = arg; if (!ops->vidioc_g_audout) break; ret = ops->vidioc_g_audout(file, fh, p); if (!ret) dbgarg2("index=%d, name=%s, capability=%d, " "mode=%d\n", p->index, p->name, p->capability, p->mode); break; } case VIDIOC_S_AUDOUT: { struct v4l2_audioout *p = arg; if (!ops->vidioc_s_audout) break; dbgarg(cmd, "index=%d, name=%s, capability=%d, " "mode=%d\n", p->index, p->name, p->capability, p->mode); ret = ops->vidioc_s_audout(file, fh, p); break; } case VIDIOC_G_MODULATOR: { struct v4l2_modulator *p = arg; if (!ops->vidioc_g_modulator) break; ret = ops->vidioc_g_modulator(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, " "capability=%d, rangelow=%d," " rangehigh=%d, txsubchans=%d\n", p->index, p->name, p->capability, p->rangelow, p->rangehigh, p->txsubchans); break; } case VIDIOC_S_MODULATOR: { struct v4l2_modulator *p = arg; if (!ops->vidioc_s_modulator) break; dbgarg(cmd, "index=%d, name=%s, capability=%d, " "rangelow=%d, rangehigh=%d, txsubchans=%d\n", p->index, p->name, p->capability, p->rangelow, p->rangehigh, p->txsubchans); ret = ops->vidioc_s_modulator(file, fh, p); break; } case VIDIOC_G_CROP: { struct v4l2_crop *p = arg; if (!ops->vidioc_g_crop) break; dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names)); ret = ops->vidioc_g_crop(file, fh, p); if (!ret) dbgrect(vfd, "", &p->c); break; } case VIDIOC_S_CROP: { struct v4l2_crop *p = arg; if (!ops->vidioc_s_crop) break; dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names)); dbgrect(vfd, "", &p->c); ret = ops->vidioc_s_crop(file, fh, p); break; } case VIDIOC_CROPCAP: { struct v4l2_cropcap *p = arg; /*FIXME: Should also show v4l2_fract pixelaspect */ if (!ops->vidioc_cropcap) break; dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names)); ret = ops->vidioc_cropcap(file, fh, p); if (!ret) { dbgrect(vfd, "bounds ", &p->bounds); dbgrect(vfd, "defrect ", &p->defrect); } break; } case VIDIOC_G_JPEGCOMP: { struct v4l2_jpegcompression *p = arg; if (!ops->vidioc_g_jpegcomp) break; ret = ops->vidioc_g_jpegcomp(file, fh, p); if (!ret) dbgarg(cmd, "quality=%d, APPn=%d, " "APP_len=%d, COM_len=%d, " "jpeg_markers=%d\n", p->quality, p->APPn, p->APP_len, p->COM_len, p->jpeg_markers); break; } case VIDIOC_S_JPEGCOMP: { struct v4l2_jpegcompression *p = arg; if (!ops->vidioc_g_jpegcomp) break; dbgarg(cmd, "quality=%d, APPn=%d, APP_len=%d, " "COM_len=%d, jpeg_markers=%d\n", p->quality, p->APPn, p->APP_len, p->COM_len, p->jpeg_markers); ret = ops->vidioc_s_jpegcomp(file, fh, p); break; } case VIDIOC_G_ENC_INDEX: { struct v4l2_enc_idx *p = arg; if (!ops->vidioc_g_enc_index) break; ret = ops->vidioc_g_enc_index(file, fh, p); if (!ret) dbgarg(cmd, "entries=%d, entries_cap=%d\n", p->entries, p->entries_cap); break; } case VIDIOC_ENCODER_CMD: { struct v4l2_encoder_cmd *p = arg; if (!ops->vidioc_encoder_cmd) break; ret = ops->vidioc_encoder_cmd(file, fh, p); if (!ret) dbgarg(cmd, "cmd=%d, flags=%x\n", p->cmd, p->flags); break; } case VIDIOC_TRY_ENCODER_CMD: { struct v4l2_encoder_cmd *p = arg; if (!ops->vidioc_try_encoder_cmd) break; ret = ops->vidioc_try_encoder_cmd(file, fh, p); if (!ret) dbgarg(cmd, "cmd=%d, flags=%x\n", p->cmd, p->flags); break; } case VIDIOC_G_PARM: { struct v4l2_streamparm *p = arg; if (ops->vidioc_g_parm) { ret = check_fmt(ops, p->type); if (ret) break; ret = ops->vidioc_g_parm(file, fh, p); } else { v4l2_std_id std = vfd->current_norm; if (p->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; ret = 0; if (ops->vidioc_g_std) ret = ops->vidioc_g_std(file, fh, &std); else if (std == 0) ret = -EINVAL; if (ret == 0) v4l2_video_std_frame_period(std, &p->parm.capture.timeperframe); } dbgarg(cmd, "type=%d\n", p->type); break; } case VIDIOC_S_PARM: { struct v4l2_streamparm *p = arg; if (!ops->vidioc_s_parm) break; ret = check_fmt(ops, p->type); if (ret) break; dbgarg(cmd, "type=%d\n", p->type); ret = ops->vidioc_s_parm(file, fh, p); break; } case VIDIOC_G_TUNER: { struct v4l2_tuner *p = arg; if (!ops->vidioc_g_tuner) break; ret = ops->vidioc_g_tuner(file, fh, p); if (!ret) dbgarg(cmd, "index=%d, name=%s, type=%d, " "capability=0x%x, rangelow=%d, " "rangehigh=%d, signal=%d, afc=%d, " "rxsubchans=0x%x, audmode=%d\n", p->index, p->name, p->type, p->capability, p->rangelow, p->rangehigh, p->signal, p->afc, p->rxsubchans, p->audmode); break; } case VIDIOC_S_TUNER: { struct v4l2_tuner *p = arg; if (!ops->vidioc_s_tuner) break; dbgarg(cmd, "index=%d, name=%s, type=%d, " "capability=0x%x, rangelow=%d, " "rangehigh=%d, signal=%d, afc=%d, " "rxsubchans=0x%x, audmode=%d\n", p->index, p->name, p->type, p->capability, p->rangelow, p->rangehigh, p->signal, p->afc, p->rxsubchans, p->audmode); ret = ops->vidioc_s_tuner(file, fh, p); break; } case VIDIOC_G_FREQUENCY: { struct v4l2_frequency *p = arg; if (!ops->vidioc_g_frequency) break; ret = ops->vidioc_g_frequency(file, fh, p); if (!ret) dbgarg(cmd, "tuner=%d, type=%d, frequency=%d\n", p->tuner, p->type, p->frequency); break; } case VIDIOC_S_FREQUENCY: { struct v4l2_frequency *p = arg; if (!ops->vidioc_s_frequency) break; dbgarg(cmd, "tuner=%d, type=%d, frequency=%d\n", p->tuner, p->type, p->frequency); ret = ops->vidioc_s_frequency(file, fh, p); break; } case VIDIOC_G_SLICED_VBI_CAP: { struct v4l2_sliced_vbi_cap *p = arg; if (!ops->vidioc_g_sliced_vbi_cap) break; /* Clear up to type, everything after type is zerod already */ memset(p, 0, offsetof(struct v4l2_sliced_vbi_cap, type)); dbgarg(cmd, "type=%s\n", prt_names(p->type, v4l2_type_names)); ret = ops->vidioc_g_sliced_vbi_cap(file, fh, p); if (!ret) dbgarg2("service_set=%d\n", p->service_set); break; } case VIDIOC_LOG_STATUS: { if (!ops->vidioc_log_status) break; ret = ops->vidioc_log_status(file, fh); break; }#ifdef CONFIG_VIDEO_ADV_DEBUG case VIDIOC_DBG_G_REGISTER: { struct v4l2_dbg_register *p = arg; if (!capable(CAP_SYS_ADMIN)) ret = -EPERM; else if (ops->vidioc_g_register) ret = ops->vidioc_g_register(file, fh, p); break; } case VIDIOC_DBG_S_REGISTER: { struct v4l2_dbg_register *p = arg; if (!capable(CAP_SYS_ADMIN)) ret = -EPERM; else if (ops->vidioc_s_register) ret = ops->vidioc_s_register(file, fh, p); break; }#endif case VIDIOC_DBG_G_CHIP_IDENT: { struct v4l2_dbg_chip_ident *p = arg; if (!ops->vidioc_g_chip_ident) break; p->ident = V4L2_IDENT_NONE; p->revision = 0; ret = ops->vidioc_g_chip_ident(file, fh, p); if (!ret) dbgarg(cmd, "chip_ident=%u, revision=0x%x\n", p->ident, p->revision); break; } case VIDIOC_S_HW_FREQ_SEEK: { struct v4l2_hw_freq_seek *p = arg; if (!ops->vidioc_s_hw_freq_seek) break; dbgarg(cmd, "tuner=%d, type=%d, seek_upward=%d, wrap_around=%d\n", p->tuner, p->type, p->seek_upward, p->wrap_around); ret = ops->vidioc_s_hw_freq_seek(file, fh, p); break; } case VIDIOC_ENUM_FRAMESIZES: { struct v4l2_frmsizeenum *p = arg; if (!ops->vidioc_enum_framesizes) break; ret = ops->vidioc_enum_framesizes(file, fh, p); dbgarg(cmd, "index=%d, pixelformat=%c%c%c%c, type=%d ", p->index, (p->pixel_format & 0xff), (p->pixel_format >> 8) & 0xff, (p->pixel_format >> 16) & 0xff, (p->pixel_format >> 24) & 0xff, p->type); switch (p->type) { case V4L2_FRMSIZE_TYPE_DISCRETE: dbgarg3("width = %d, height=%d\n", p->discrete.width, p->discrete.height); break; case V4L2_FRMSIZE_TYPE_STEPWISE: dbgarg3("min %dx%d, max %dx%d, step %dx%d\n", p->stepwise.min_width, p->stepwise.min_height, p->stepwise.step_width, p->stepwise.step_height, p->stepwise.max_width, p->stepwise.max_height); break; case V4L2_FRMSIZE_TYPE_CONTINUOUS: dbgarg3("continuous\n"); break; default: dbgarg3("- Unknown type!\n"); } break; } case VIDIOC_ENUM_FRAMEINTERVALS: { struct v4l2_frmivalenum *p = arg; if (!ops->vidioc_enum_frameintervals) break; ret = ops->vidioc_enum_frameintervals(file, fh, p); dbgarg(cmd, "index=%d, pixelformat=%d, width=%d, height=%d, type=%d ", p->index, p->pixel_format, p->width, p->height, p->type); switch (p->type) { case V4L2_FRMIVAL_TYPE_DISCRETE: dbgarg2("fps=%d/%d\n", p->discrete.numerator, p->discrete.denominator); break; case V4L2_FRMIVAL_TYPE_STEPWISE: dbgarg2("min=%d/%d, max=%d/%d, step=%d/%d\n", p->stepwise.min.numerator, p->stepwise.min.denominator, p->stepwise.max.numerator, p->stepwise.max.denominator, p->stepwise.step.numerator, p->stepwise.step.denominator); break; case V4L2_FRMIVAL_TYPE_CONTINUOUS: dbgarg2("continuous\n"); break; default: dbgarg2("- Unknown type!\n"); } break; } default: { if (!ops->vidioc_default) break; ret = ops->vidioc_default(file, fh, cmd, arg); break; } } /* switch */ if (vfd->debug & V4L2_DEBUG_IOCTL_ARG) { if (ret < 0) { v4l_print_ioctl(vfd->name, cmd); printk(KERN_CONT " error %ld\n", ret); } } return ret;}ioctl的case太长,举几个例子,
/**********************************************************************************************************************************************/
case VIDIOC_STREAMON: //启动视频采集命令
{
enum v4l2_buf_type i = *(int *)arg;
if (!ops->vidioc_streamon)
break;
dbgarg(cmd, "type=%s\n", prt_names(i, v4l2_type_names));
ret = ops->vidioc_streamon(file, fh, i);
break;
}
会调用在gspcal.c中定义的struct v4l2_ioctl_ops dev_ioctl_ops(前面已贴出)的 vidioc_streamon (),在此函数里会调用gspca_init_transfer(gspca_dev);
此函数比较关键,其
1.调用 create_urbs(gspca_dev, ep);此函数对urb进行分配和初始化操作如下
urb = usb_alloc_urb(npkt, GFP_KERNEL);
urb->transfer_buffer = usb_buffer_alloc(gspca_dev->dev,bsize,GFP_KERNEL,&urb->transfer_dma);//读回数据的缓冲区
urb->pipe = usb_rcvisocpipe(gspca_dev->dev,ep->desc.bEndpointAddress);//同步传输,或者是批量传输usb_rcvbulkpipe
urb->interval = ep->desc.bInterval;
urb->complete = isoc_irq;//指定u如b完成时的回调函数
2.调用zc3xx.c中的一些函数对摄像头传输初始化
gspca_dev->sd_desc->isoc_init(gspca_dev);
开始从摄像头采集数据
gspca_dev->sd_desc->start(gspca_dev);
3.提交usb
usb_submit_urb(gspca_dev->urb[n], GFP_KERNEL);
/**********************************************************************************************************************************************/
case VIDIOC_QBUF://投放一个空的视频缓冲区到视频缓冲区输入队列中
{
struct v4l2_buffer *p = arg;
if (!ops->vidioc_qbuf)
break;
ret = check_fmt(ops, p->type);
if (ret)
break;
ret = ops->vidioc_qbuf(file, fh, p);
if (!ret)
dbgbuf(cmd, vfd, p);
break;
}
同上类似,会调用ops->vidioc_qbuf的vidioc_qbuf,此函数执行
/* put the buffer in the 'queued' queue */
i = gspca_dev->fr_q;
gspca_dev->fr_queue[i] = index;
gspca_dev->fr_q = (i + 1) % gspca_dev->nframes;
涉及到gspca_dev的部分成员
__u8 *frbuf; /* buffer for nframes */
struct gspca_frame frame[GSPCA_MAX_FRAMES];
__u32 frsz; /* frame size */
char nframes; /* number of frames */
char fr_i; /* frame being filled */
char fr_q; /* next frame to queue */
char fr_o; /* next frame to dequeue */
signed char fr_queue[GSPCA_MAX_FRAMES]; /* frame queue */
或许上面看的朦朦胧胧,结合一个用户空间的实例就清楚啦,demo refer to http://blog.csdn.net/Sasoritattoo/article/details/6225486
structv4l2_buffer tV4L2buf;
memset(&tV4L2buf, 0, sizeof(struct v4l2_buffer));
tV4L2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
tV4L2buf.memory = V4L2_MEMORY_MMAP;
tV4L2buf.index = i;//指令(指定)要投放到视频输入队列中的内核空间视频缓冲区的编号;
iret = ioctl(fd_usbcam, VIDIOC_QBUF, &tV4L2buf);
清楚了吗?视频缓冲区(队列)都是在内核中实现的,在用户空间需要指定使用这个队列里的哪一个视频缓冲区 .在启动视频设备拍摄图像时,相应的视频数据被保存到视频输入队列相应的视频缓冲区中。
/**********************************************************************************************************************************************/
case VIDIOC_DQBUF://从视频缓冲区的输出队列中取得一个已经保存有一帧视频数据的视频缓冲区;
{
struct v4l2_buffer *p = arg;
if (!ops->vidioc_dqbuf)
break;
ret = check_fmt(ops, p->type);
if (ret)
break;
ret = ops->vidioc_dqbuf(file, fh, p);
if (!ret)
dbgbuf(cmd, vfd, p);
break;
}
/**********************************************************************************************************************************************/
然后看一下mmap的实现,gspca.c
static int dev_mmap(struct file *file, struct vm_area_struct *vma){简言之
struct gspca_dev *gspca_dev = file->private_data;
struct gspca_frame *frame;
struct page *page;
unsigned long addr, start, size;
int i, ret;
start = vma->vm_start;
size = vma->vm_end - vma->vm_start;
PDEBUG(D_STREAM, "mmap start:%08x size:%d", (int) start, (int) size);
if (mutex_lock_interruptible(&gspca_dev->queue_lock))
return -ERESTARTSYS;
if (!gspca_dev->present) {
ret = -ENODEV;
goto out;
}
if (gspca_dev->capt_file != file) {
ret = -EINVAL;
goto out;
}
frame = NULL;
for (i = 0; i < gspca_dev->nframes; ++i) {
if (gspca_dev->frame[i].v4l2_buf.memory != V4L2_MEMORY_MMAP) {
PDEBUG(D_STREAM, "mmap bad memory type");
break;
}
if ((gspca_dev->frame[i].v4l2_buf.m.offset >> PAGE_SHIFT)
== vma->vm_pgoff) {
frame = &gspca_dev->frame[i];
break;
}
}
if (frame == NULL) {
PDEBUG(D_STREAM, "mmap no frame buffer found");
ret = -EINVAL;
goto out;
}
#ifdef CONFIG_VIDEO_V4L1_COMPAT
/* v4l1 maps all the buffers */
if (i != 0
|| size != frame->v4l2_buf.length * gspca_dev->nframes)
#endif
if (size != frame->v4l2_buf.length) {
PDEBUG(D_STREAM, "mmap bad size");
ret = -EINVAL;
goto out;
}
/*
* - VM_IO marks the area as being a mmaped region for I/O to a
* device. It also prevents the region from being core dumped.
*/
vma->vm_flags |= VM_IO;
addr = (unsigned long) frame->data;
while (size > 0) {
page = vmalloc_to_page((void *) addr);
ret = vm_insert_page(vma, start, page);
if (ret < 0)
goto out;
start += PAGE_SIZE;
addr += PAGE_SIZE;
size -= PAGE_SIZE;
}
vma->vm_ops = &gspca_vm_ops;
vma->vm_private_data = frame;
gspca_vm_open(vma);
ret = 0;
out:
mutex_unlock(&gspca_dev->queue_lock);
return ret;
}
start = vma->vm_start;
size = vma->vm_end - vma->vm_start;
page = vmalloc_to_page((void *) addr);
vm_insert_page(vma, start, page);
这个mmap的实现和ldd3中讲的都不一样,
ldd3中所讲是mmap()实现函数中不做什么,而在nopage()--每次缺页错误时会自动调用,里调用 vmalloc_to_page ()然后return page,
或者是在mmap()实现函数中调用remap_pfn_range()一次性将vma的页表建立完毕
此处则是在mmap()即 dev_mmap() 函数中调用 vmalloc_to_page ()然后再调用 vm_insert_page() ----观其名,此函数应该是填充vma中相应页表的。
所以是这个mmap中尽管没使用remap_pfn_range,却也是在用户空间之星mmap()时一次性将页表建立好的,而不是用一页映射一页。
/**********************************************************************************************************************************************/
即v4l2创建设备节点--字符设备和向用户提供设备接口,当用户访问设备时最终都会被v4l2跳到gspca.c里的这2个集合里的某个函数执行
struct v4l2_file_operations dev_fops
struct v4l2_ioctl_ops dev_ioctl_ops
比如用户执行open时,v4l2会调用gspca.c里的dev_fops的open()对设备进行操作。
比如用户执行ioctl时,v4l2会调用gspca.c里的dev_ioctl_ops的ioctl()对设备进行操作。
首先用户空间需要 ioctl(VIDIOC_REQBUFS)指示驱动分配视频缓冲区(若干个,组成队列,gspca_dev->frame[0].data),然后ioctl(视频缓冲区)查询上一步分配的V4L2的视频缓冲区的相关信息,然后mmap();然后ioctl(VIDIOC_STREAMON),以便初始化urb和开启摄像头的采集等,然后ioctl(VIDIOC_QBUF)指示驱动使用视频缓冲区队列的哪个缓冲区,然后ioctl(VIDIOC_DQBUF)指示驱动将这个视频缓冲区塞进图像数据,然后读取(用户空间读)
下段或许有误:
结构体gspca_dev 使用以下几个成员维护了一个队列,
atomic_t fr_q; /* next frame to queue */
atomic_t fr_i; /* frame being filled */
signed char fr_queue[GSPCA_MAX_FRAMES]; /* frame queue */
char nframes; /* number of frames */
u8 fr_o; /* next frame to dequeue */
从而在vidioc_qbuf和vidioc_dqbuf里面决定使用哪个缓冲区即哪个缓冲区入列(出列),实际上是哪个缓冲区的index入列(出列),如下,部分代码有简化
在vidioc_qbuf里面,
(从用户层会先传来一个index);
fr_queue[fr_q]=index;//将要入列的index给fr_queue[fr_q]
fr_q=(fr_q + 1) % GSPCA_MAX_FRAMES;//fr_q从0到15循环,GSPCA_MAX_FRAMES默认是是16,初始时fr_q=0
frame[index].v4l2_buf->flags |= V4L2_BUF_FLAG_QUEUED;//设置第index帧为入列标志
frame[index].v4l2_buf->flags &= ~V4L2_BUF_FLAG_DONE;//清空第index帧的完成标志
如下在vidioc_dqbuf里面,
index = fr_queue[fr_o];//将要出列的fr_queue[fr_o]给index
fr_o = (i + 1) % GSPCA_MAX_FRAMES;//fr_o从0到GSPCA_MAX_FRAMES循环,初始时fr_o=0
frame[index]->v4l2_buf.flags &= ~V4L2_BUF_FLAG_DONE;//清空第index帧的完成标志
memcpy(v4l2_buf, &frame->v4l2_buf, sizeof *v4l2_buf);
还有一个地方是在urb完成时调用到的fill_frame(位于gspca.c)里面调用的pkt_scan(位于zc3xx.c)里面调用的gspca_frame_add(位于gspca.c),
如果是最后图像的最后一包数据,则
index = fr_queue[fr_i];
gspca_dev->image = frame[index]->data;
i = (i + 1) % GSPCA_MAX_FRAMES;
atomic_set(fr_i, i);
下面根据v4l2操作摄像头的流程理清一下数据获取的思路
init_v4l2()函数里面顺序使用到的ioctl:
ioctl(vd->fd, VIDIOC_QUERYCAP, &vd->cap);//查询视频设备的功能
ioctl(vd->fd, VIDIOC_S_FMT, &vd->fmt);//设置视频设备的视频数据格式
ioctl(vd->fd, VIDIOC_S_PARM, setfps);//number offrames per second
ioctl(vd->fd, VIDIOC_REQBUFS, &vd->rb);//请求V4L2驱动分配视频缓冲区(若干个,组成队列),位于内核空间
ioctl(vd->fd, VIDIOC_QUERYBUF, &vd->buf);//查询上一步分配的V4L2的视频缓冲区的相关信息,为下一步的mmap()。
mmap();
for(i=0;i<n;i++){
vd->buf.index = i;//指定第i个缓冲区
ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);//指示驱动将第i个缓冲区入列,共计入列n个即申请到的缓冲区全部入列。之后驱动会维护这么一个队列,即之后再次执行入列时,就不需要执行index了,驱动会将刚刚出列的缓冲区入列。如下面
}
while(!stop){
ioctl(vd->fd, VIDIOC_STREAMON, &type);//启动摄像头抓取图像数据
ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf);//将队列中的第一个缓冲区出列,因为第一个缓冲区中已经有了数据(在urb中断时放入的,一般好几个urb中断之后才能引起其出列)
memcpy(vd->tmpbuffer, vd->mem[vd->buf.index], vd->buf.bytesused);//拷贝图像数据
ioctl(vd->fd, VIDIOC_QBUF, &vd->buf);//指示驱动将刚才出列的缓冲区再次入列,(如果不执行此句,则队列中仅有初始时放入的4个帧,那4个帧读完队列就空了)
}
解析:如下图
用户空间命令驱动执行vidioc_reqbufs时,会在内核创建n个frame,mjpg默认是4个(NB_BUFFER=4),首地址分别gspca_dev->frmame[i].data
用户空间mmap时(mjpg执行4次),会将内核空间的4个frame[i].data映射到用户空间的vd->mem[i]
用户空间命令驱动执行vidioc_qbuf时,会将选定的那个index入列,从而frame[index]入列,从而使得frame[index].data作为图像数据的保存地址
用户空间命令驱动执行vidioc_streamon时,会创建同步urb,并设置urb完成函数为isoc_irq,
用户空间命令驱动执行vidioc_dqbuf时,会阻塞在此函数里面,直到usb中断
在中断函数里面,读取从urb中读取数据到gspca_dev->image(此时它指向frame[index].data)
memcpy(gspca_dev->image + gspca_dev->image_len,data, len);
然后唤醒线程,此时线程即可从vd->mem[index}读取数据了
由于一次urb传输一般不可能传输完所有的图像数据,所以会有多个urb包才组成一幅图像
另外结构体驱动中的v4l2_buffer并不保存图像数据,而是驱动提供给用户的一个接口,使得用户在调用mmap,ioctl(vd->fd, VIDIOC_QBUF, &vd->buf),ioctl(vd->fd, VIDIOC_DQBUF, &vd->buf)时能传给驱动一些设置参数。当然,驱动也能通过这个结构体的某些元素返回给用户空间某些信息。
struct v4l2_buffer {
__u32 index;
enum v4l2_buf_type type;
__u32 bytesused;
__u32 flags;
enum v4l2_field field;
struct timeval timestamp;
struct v4l2_timecode timecode;
__u32 sequence;
/* memory location */
enum v4l2_memory memory;
union {
__u32 offset;
unsigned long userptr;
} m;
__u32 length;
__u32 input;
__u32 reserved;
};
另外干吗非得申请这么多帧(4个),一个帧不也够用吗?反正同一时间只会用到一个帧缓存。莫非是用4个帧可以平均内存单元损耗?
/**********************************************************************************************************************************************/
gspca.c的作用最关键,一方面要实现v4l2的函数指针来处理用户的交互(open,mmap,ioctl...),一方面通过zc3xx.c的sd_probe获取到usb_device实现提交urb获取图像数据,一方面通过调用zc3xx.c里的函数去设置和启动摄像头采集的一些工作(也是usb传输的)。
zc3xx.c实现了一些设置硬件的函数,实现了usb_register()用于枚举时的匹配然后调用gspca.c的gspca_dev_probe让其去处理usb_device