socket 实现实时视频聊天

视频聊天从摄像头中取得缓冲数据,转换成NSData,通过网络发送,接收端取得NSData后把NSData转换成图像,双方不停的收发数据,播放图像,就形成了视频聊天。

首先创建视频输入输出:

NSError *error = nil;

    // Setup the video input
    AVCaptureDevice *videoDevice = [self getFrontCamera];//[AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
    // Create a device input with the device and add it to the session.
    AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
    // Setup the video output
    _videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    _videoOutput.alwaysDiscardsLateVideoFrames = NO;
    _videoOutput.videoSettings =
    [NSDictionary dictionaryWithObject:
     [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];     


    // Create the session
    _capSession = [[AVCaptureSession alloc] init];
    [_capSession addInput:videoInput];
    //[_capSession addInput:audioInput];
    [_capSession addOutput:_videoOutput];
    //[_capSession addOutput:_audioOutput];

    _capSession.sessionPreset = AVCaptureSessionPresetLow;     

    // Setup the queue
    dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
    [_videoOutput setSampleBufferDelegate:self queue:queue];
    [_audioOutput setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    [_capSession startRunning];

通过AVCaptureVideoDataOutputSampleBufferDelegate 代理 取得摄像头数据

#pragma mark AVCaptureSession delegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
                                                fromConnection:(AVCaptureConnection *)connection
{

    NSData *data = [NSData dataWithBytes:&sampleBuffer length:malloc_size(sampleBuffer)];
    [self recieveVideoFromData:data];
}

收到数据后转换成图像

- (void)recieveVideoFromData:(NSData *)data{
    CMSampleBufferRef sampleBuffer;
    [data getBytes:&sampleBuffer length:sizeof(sampleBuffer)];
    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,
                                                    width, height, 8, bytesPerRow, colorSpace,
                                                    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);

    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);

    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
                                  orientation:UIImageOrientationRight];

    CGImageRelease(newImage);
    [self.imageView performSelectorOnMainThread:@selector(setImage:)
                                withObject:image waitUntilDone:YES];
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    [pool drain];

}