1、open the VFL device

int deviceHandle;
 char *devicename="/dev/video0";
 deviceHandle=open(devicename,O_RDWR);
 if(deviceHandle==-1)
 {//fail to open device
 }


调用成功,返回一个文件标示符;调用失败,返回-1。
2、查询设备属性(optional)
这一步可以省略,但是如果程序用在不同的机器和设备上,作为一个通用程序,最好进行这一步。

struct video_capability capability;
 if(ioctl(deviceHandle,VIDIOCGCAP,&capability)!=-1)
 {//query was successful
 }
 else
 {//query failed
 }


至此video_capability结构已经被填充,可以通过

if((capability.type & VID_TYPE_CAPTURE)!=0)
 {//device can capture video
 }
 else
 {//this device cann't capture video,exit.
 }


video_capability结构的字段列表可以查询VFL API文档。
3、列举视频源的可用频道(optional)
如果频道数固定,此步骤可省略;要列举可用频道数,你必须在step.2中查询设备能力属性,然后继续下面

struct video_channel queryChannel;
 i = 0;
 while (i < capability.channels)
 {
 queryChannel.channel = i;
 if (ioctl (deviceHandle, VIDIOCGCHAN, &queryChannel) != -1)
 {       // ioctl success, queryChannel contains information about this channel
                 printf ("%d. %s\n", queryChannel.channel, queryChannel.name);
 }
        
 else
 {       // ioctl failure
 }
 ++ i;
 }


4、设置频道属性(optional)
如果不关心视频源来自哪个频道,此项可省略;

struct video_channel selectedChannel;
 selectedChannel.channel=channelNumber;//用户定义的channelNumber
 selectedChannel.norm=VIDEO_MODE_NTSC;//VIDEO_MODE_PAL|VIDEO_MODE_AUTO
 if(ioctl(deviceHandle,VIDIOCSCHAN,&selectedChannel)==-1)
 {//could not set channel
 }


5、设置捕捉画面的高和宽(optional)
如果使用默认的图像尺寸,此项可省略;并不是每一种设备都支持尺寸裁减,所以要使用step2测试一下能力属性;假设已经完成step2,继续下面

if ((capability.type & VID_TYPE_SCALES) != 0)//经测试有点问题,每一个都不为0,但&后居然为0
 {       // supports the ability to scale captured images
         struct video_window captureWindow;
         captureWindow.x = 0;
         captureWindow.y = 0;
         captureWindow.width = width;
         captureWindow.height = height;
         captureWindow.chromakey = 0;
         captureWindow.flags = 0;
         captureWindow.clips = 0;
         captureWindow.clipcount = 0;
        
 if (ioctl (deviceHandle, VIDIOCSWIN, &captureWindow) == -1)
         {       // could not set window values for capture
         }


}//至此设置好了你想要的画面尺寸大小,但某些设备不支持scale,所以未必都成功设置
6、获取捕捉画面的实际尺寸
由于某些设备不支持scale,设置未必成功,所以有必要查询一下实际的画面尺寸,如下:

int width;
 int height;
 struct video_window captureWindow;
 if (ioctl (deviceHandle, VIDIOCGWIN, &captureWindow) == -1)
 {       // could not obtain specifics of capture window
 }
 width = captureWindow.width;
 height = captureWindow.height;


7、设置捕捉画面的palette和bit depth(可选)
下面首先获取默认值,然后再设置想要改变的字段:

// get image properties
 struct video_picture imageProperties;
 if (ioctl (deviceHandle, VIDIOCGPICT, &imageProperties) != -1)
 {       // successfully retrieved the default image properties==成功获取默认值
         // the following values are for requesting 8bit grayscale==改变为gray模式
         imageProperties.depth = 8;
         imageProperties.palette = VIDEO_PALETTE_GREY;
         if (ioctl (deviceHandle, VIDIOCSPICT, &imageProperties) == -1)
         {       // failed to set the image properties==不支持改变
         }
 }


深度depth和palette的值对应关系如下:
imageProperties.depth imageProperties.palette
8bit  GREY VIDEO_PALETTE_GREY
15bit RGB 15 VIDEO_PALETTE_RGB555
16bit RGB 16 VIDEO_PALETTE_RGB565
24bit RGB 24 VIDEO_PALETTE_RGB24
32bit RGB 32 VIDEO_PALETTE_RGB32

8、获取捕捉画面实际的depth和palette

int depth;
 int palette;
 struct video_picture imageProperties;
 if (ioctl (deviceHandle, VIDIOCGPICT, &imageProperties) == -1)
 {       // failed to retrieve default image properties
 }
 depth = imageProperties.depth;
 palette = imageProperties.palette;
 比如要求24bit RGB模式,可进行如下校验:
 if ((depth != 24) || (palette != VIDEO_PALETTE_RGB24))
 {       // not a format our program supports
 }



9、设置内存映射MMIO,把硬件视频buffer映射到内存空间
第一步,获取MMIO所需的信息:
struct video_mbuf memoryBuffer;
if (ioctl (deviceHandle, VIDIOCGMBUF, &memoryBuffer) == -1)
{       // failed to retrieve information about capture memory space
}
video_mbuf这个数据结构包括了内存映射区域的尺寸、捕捉设备缓存帧的数目、偏移地址;
第二步:获取内存映射区域的首地址
// obtain memory mapped area
char* memoryMap;
memoryMap = (char*)mmap (0, memoryBuffer.size, PROT_READ | PROT_WRITE, MAP_SHARED, deviceHandle, 0);
if ((int)memoryMap == -1)
{       // failed to retrieve pointer to memory mapped area
}
内存映射首地址和偏移确定了每个缓存帧的地址:
Buffered Frame 0 is located at: memoryMap + memoryBuffer.offsets[0]
Buffered Frame 1 is located at: memoryMap + memoryBuffer.offsets[1]
Buffered Frame 2 is located at: memoryMap + memoryBuffer.offsets[2]
etc...
The number of buffered frames is stored in memoryBuffer.frames.
捕捉过程的每一个buffer用到了video_mmap结构,定位、填充这个结构:
// allocate structures
struct video_mmap* mmaps;
mmaps = (struct video_mmap*)(malloc (memoryBuffer.frames * sizeof (struct video_mmap)));
// fill out the fields
int i = 0;
while (i < memoryBuffer.frames)
{
mmaps[i].frame = i;
mmaps[i].width = width;
mmaps[i].height = height;
mmaps[i].format = palette;
++ i;
}
//变量width、height、palette来自前面的设置过程。

10、使用MMIO进行捕捉
下面代码请求每一帧进行捕捉,除了最后一帧:
int i = 0;
while (i < (memoryBuffer.frames-1))
{
        if (ioctl (deviceHandle, VIDIOCMCAPTURE, &mmaps[i]) == -1)
        {       // capture request failed
        }
        ++ i;
}
设置一个index追踪正在捕捉的帧:
int bufferIndex;
bufferIndex = memoryBuffer.frames-1;//为何初始值指向最后一帧?
写一个循环过程控制捕捉过程,返回当前可用帧的地址:
char* NextFrame()
{
        // send a request to begin capturing to the currently indexed buffer
//向当前帧缓存发送采集请求
        if (ioctl (deviceHandle, VIDIOCMCAPTURE, &mmaps[bufferIndex]) == -1)
        {       // capture request failed
        }
//移到下一帧
        // move bufferIndex to the next frame
++ bufferIndex;
        if (bufferIndex == memoryBuffer.frames)
        {       // bufferIndex is indexing beyond the last buffer
                // set it to index the first buffer
                bufferIndex = 0;
        }
//等待当前帧完成采集过程
        // wait for the currently indexed frame to complete capture
        if (ioctl (deviceHandle, VIDIOCSYNC, &mmaps[bufferIndex]) == -1)
        {       // sync request failed
        }
//当前帧采集完成,返回帧数据的地址
        // return the address of the frame data for the current buffer index
        return (memoryMap + memoryBuffer.offsets[bufferIndex]);
}//成功的调用NextFrame(),返回当前帧的地址
11、清理工作
//free the video_mmap structures
free(mmaps);
//unmap the capture memory
munmap(memoryMap,memoryBuffer.size);

12、关闭视频设备
close (deviceHandle);
 


(jacky)