qRenderer = [[QCRenderer alloc] initOffScreenWithSize:NSMakeSize(720,480) colorSpace:CGcolorSpaceCreateWithname(kCGcolorSpaceGenericRGB) composition:[QCComposition compositionWithfile:qcPatchPath]]; // define an "offscreen" Quartz composition renderer with the right image sizeimageAttrs = [NSDictionary dictionaryWithObjectsAndKeys: @"avc1",// use the H264 codec QTAddImageCodecType,nil];qtMovIE = [[QTMovIE alloc] initToWritablefile: outputVIDeofile error:NulL]; // initialize the output QT movIE objectlong fps = 25;frameNum = 0;NSTimeInterval renderingTime = 0;NSTimeInterval frameInc = (1./fps);NSTimeInterval myMovIEDuration = 70;NSImage * myImage;while (renderingTime <= myMovIEDuration){ if(![qRenderer renderAtTime: renderingTime arguments:NulL]) NSLog(@"Rendering Failed at time %.3fs",renderingTime); myImage = [qRenderer snapshotimage]; [qtMovIE addImage:myImage forDuration: QTMakeTimeWithTimeInterval(frameInc) withAttributes:imageAttrs]; [myImage release]; frameNum ++; renderingTime = frameNum * frameInc;}[qtMovIE updateMovIEfile];[qRenderer release];[qtMovIE release];
它可以工作,但我的应用程序无法在我的新MacBook Pro上实时执行此 *** 作,而我知道QuickTime broadcaster可以在H264中实时编码图像,其质量甚至比我在同一台计算机上使用的图像更高.
所以为什么 ?这有什么问题?这是硬件管理问题(多核线程,GPU,……)还是我遗漏了什么?让我先说一下,我是Apple开发领域的新手(2周练习),包括Objective-C,cocoa,X-code,Quicktime和Quartz Composer库等.
谢谢你的帮助
解决方法 AVFoundation是一种将QuartzComposer动画渲染为H.264视频流的更有效方法.size_t wIDth = 640;size_t height = 480;const char *outputfile = "/tmp/arabesque.mp4";QCComposition *composition = [QCComposition compositionWithfile:@"/System/library/Screen Savers/arabesque.qtz"];QCRenderer *renderer = [[QCRenderer alloc] initOffScreenWithSize:NSMakeSize(wIDth,height) colorSpace:CGcolorSpaceCreateWithname(kCGcolorSpaceGenericRGB) composition:composition];unlink(outputfile);AVAssetWriter *vIDeoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:@(outputfile)] fileType:AVfileTypeMPEG4 error:NulL];NSDictionary *vIDeoSettings = @{ AVVIDeoCodecKey : AVVIDeoCodecH264,AVVIDeoWIDthKey : @(wIDth),AVVIDeoHeightKey : @(height) };AVAssetWriterinput* writerinput = [[AVAssetWriterinput alloc] initWithMediaType:AVMediaTypeVIDeo outputSettings:vIDeoSettings];[vIDeoWriter addinput:writerinput];[writerinput release];AVAssetWriterinputPixelBufferAdaptor *pixelBufferAdaptor = [AVAssetWriterinputPixelBufferAdaptor assetWriterinputPixelBufferAdaptorWithAssetWriterinput:writerinput sourcePixelBufferAttributes:NulL];int framesPerSecond = 30;int totalDuration = 30;int totalFrameCount = framesPerSecond * totalDuration;[vIDeoWriter startWriting];[vIDeoWriter startSessionAtSourceTime:kCMTimeZero];__block long frameNumber = 0;dispatch_queue_t workQueue = dispatch_queue_create("com.example.work-queue",disPATCH_QUEUE_SERIAL);NSLog(@"Starting.");[writerinput requestMediaDataWhenReadyOnQueue:workQueue usingBlock:^{ while ([writerinput isReadyForMoreMediaData]) { NSTimeInterval frameTime = (float)frameNumber / framesPerSecond; if (![renderer renderAtTime:frameTime arguments:NulL]) { NSLog(@"Rendering Failed at time %.3fs",frameTime); break; } CVPixelBufferRef frame = (CVPixelBufferRef)[renderer createSnapshotimageOfType:@"CVPixelBuffer"]; [pixelBufferAdaptor appendPixelBuffer:frame withPresentationTime:CMTimeMake(frameNumber,framesPerSecond)]; CFRelease(frame); frameNumber++; if (frameNumber >= totalFrameCount) { [writerinput markAsFinished]; [vIDeoWriter finishWriting]; [vIDeoWriter release]; [renderer release]; NSLog(@"Rendered %ld frames.",frameNumber); break; } }}];
在我的测试中,这大约是使用QTKit的发布代码的两倍.最大的改进似乎来自于将H.264编码传递给GPU而不是在软件中执行.从快速浏览一下配置文件看,剩下的瓶颈似乎是组合物本身的渲染,并将渲染后的数据从GPU读回到像素缓冲区.显然,你的作文的复杂性会对此产生一些影响.
可以通过使用QCRenderer提供CVOpenGLBufferRefs快照的能力来进一步优化这一点,CVOpenGLBufferRefs可以将帧的数据保留在GPU上而不是将其读回以将其传递给编码器.虽然我对此并不太了解.
总结以上是内存溢出为你收集整理的cocoa – 为什么我的基于QTKit的图像编码应用程序如此之慢?全部内容,希望文章能够帮你解决cocoa – 为什么我的基于QTKit的图像编码应用程序如此之慢?所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)