用到的FrameWork有:

MediaPlayer.framework,QuartzCore.framework,CoreVideoframework,CoreMedia.framework,AVFoundation.framework

代码如下:

- (void) testCompressionSession

{

NSArrayNSArrayarrayWithObjects:[[UIImageimageNamed:@"114.png"] CGImage],[[UIImageimageNamed:@"114.png"] CGImage],[[UIImageimageNamed:@"placeholderImage.png"] CGImage],[[UIImageimageNamed:@"dfd.png"] CGImage],[[UIImageimageNamed:@"viewbg.png"] CGImage], nil];

    

CGSizeCGSizeMake(480, 320);

    

    

NSStringNSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];

    

NSErrornil;

    

unlink([betaCompressionDirectory UTF8String]);

    

//----initialize compression engine

AVAssetWriterAVAssetWriteralloc] initWithURL:[NSURLfileURLWithPath:betaCompressionDirectory]

fileType:AVFileTypeQuickTimeMovie

error:&error];

NSParameterAssert(videoWriter);

if(error)

NSLog(@"error = %@", [error localizedDescription]);

    

NSDictionaryNSDictionarydictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,

NSNumbernumberWithInt:size.width], AVVideoWidthKey,

NSNumbernumberWithInt:size.height], AVVideoHeightKey, nil];

AVAssetWriterInputAVAssetWriterInputassetWriterInputWithMediaType:AVMediaTypeVideooutputSettings:videoSettings];

    

NSDictionaryNSDictionarydictionaryWithObjectsAndKeys:

NSNumbernumberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    

AVAssetWriterInputPixelBufferAdaptorAVAssetWriterInputPixelBufferAdaptorassetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput

sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

NSParameterAssert(writerInput);

NSParameterAssert([videoWriter canAddInput:writerInput]);

    

ifcanAddInput:writerInput])

NSLog(@"I can add this input");

else

NSLog(@"i can't add this input");

    

addInput:writerInput];

    

startWriting];

startSessionAtSourceTime:kCMTimeZero];

    

//---

// insert demo debugging code to write the same image repeated as a movie

    

CGImageRefUIImageimageNamed:@"114.png"] CGImage];

    

dispatch_queue_tdispatch_queue_create("mediaInputQueue", NULL);

int__block0;

    

requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{

whileisReadyForMoreMediaData])

        {

if(++frame >= imageArr.count40)

            {

markAsFinished];

finishWriting];

                

break;

            }

int40;

            

CVPixelBufferRefCVPixelBufferRef)[selfpixelBufferFromCGImage:(__bridgeCGImageRef)([imageArr objectAtIndex:idx]) size:size];

if (buffer)

            {

if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])

NSLog(@"FAIL");

else

NSLog(@"Success:%d", frame);

CFRelease(buffer);

            }

        }

    }];

    

NSLog(@"outside for loop");

    

}

 

 

- (CVPixelBufferRefCGImageRef)image size:(CGSize)size

{

NSDictionaryNSDictionarydictionaryWithObjectsAndKeys:

NSNumbernumberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,

NSNumbernumberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];

CVPixelBufferRefNULL;

CVReturnCVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridgeCFDictionaryRef) options, &pxbuffer);

// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    

NSParameterAssert(status == kCVReturnSuccessNULL);

    

CVPixelBufferLockBaseAddress(pxbuffer, 0);

voidCVPixelBufferGetBaseAddress(pxbuffer);

NSParameterAssert(pxdata != NULL);

    

CGColorSpaceRefCGColorSpaceCreateDeviceRGB();

CGContextRefCGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);

NSParameterAssert(context);

    

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    

CGColorSpaceRelease(rgbColorSpace);

CGContextRelease(context);

    

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    

return pxbuffer;

}