将当前视图屏幕放在视频文件之间的任何其他想法.
用于中断视频文件
NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Affagogato" ofType:@"mp4"]]; AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];for(int i = 0; i < 2; i++) { AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:anAsset presetname:AVAssetExportPresetLowQuality]; Nsstring *filePath = nil; NSUInteger count = 0; do { filePath = NstemporaryDirectory(); Nsstring *numberString = count > 0 ? [Nsstring stringWithFormat:@"-%i",count] : @""; filePath = [filePath stringByAppendingPathComponent:[Nsstring stringWithFormat:@"Output-%@.mov",numberString]]; count++; } while([[NSfileManager defaultManager] fileExistsAtPath:filePath]); exportSession.outputURL = [NSURL fileURLWithPath:filePath]; exportSession.outputfileType = AVfileTypeQuickTimeMovIE; CMTimeRange range; if(i == 0){ CMTime start = CMTimeMakeWithSeconds(0.0,600); CMTime duration = CMTimeMakeWithSeconds(10.0,600); range = CMTimeRangeMake(start,duration); }else{ CMTime start = CMTimeMakeWithSeconds(10.0,anAsset.duration); } exportSession.timeRange = range; [exportSession exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(),^{ [self exportDIDFinish:exportSession Tag:i]; }); }]; }
从图像中获取视频
CGRect rect=CGRectMake(0,320,480);vIEw = [[UIVIEw alloc]initWithFrame:rect];NSArray *paths = NSSearchPathForDirectorIEsInDomains(NSdocumentDirectory,NSUserDomainMask,YES);Nsstring *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;Nsstring *path = [documentsDirectory stringByAppendingPathComponent:[@"vIDeo2" stringByAppendingString:@".mov"]];CGSize size = self.vIEw.frame.size;NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init];[attributes setobject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(Nsstring*)kCVPixelBufferPixelFormatTypeKey];[attributes setobject:[NSNumber numberWithUnsignedInt:320] forKey:(Nsstring*)kCVPixelBufferWIDthKey];[attributes setobject:[NSNumber numberWithUnsignedInt:480] forKey:(Nsstring*)kCVPixelBufferHeightKey];NSError *error = nil;AVAssetWriter *vIDeoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:path] fileType:AVfileTypeQuickTimeMovIE error:&error];NSParameterassert(vIDeoWriter);NSDictionary *vIDeoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVIDeoCodecH264,AVVIDeoCodecKey,[NSNumber numberWithInt:size.wIDth],AVVIDeoWIDthKey,[NSNumber numberWithInt:size.height],AVVIDeoHeightKey,nil];AVAssetWriterinput* writerinput = [[AVAssetWriterinput assetWriterinputWithMediaType:AVMediaTypeVIDeo outputSettings:vIDeoSettings] retain];AVAssetWriterinputPixelBufferAdaptor *adaptor = [AVAssetWriterinputPixelBufferAdaptor assetWriterinputPixelBufferAdaptorWithAssetWriterinput:writerinput sourcePixelBufferAttributes:nil];NSParameterassert(writerinput);NSParameterassert([vIDeoWriter canAddinput:writerinput]);[vIDeoWriter addinput:writerinput];//Start a session:[vIDeoWriter startWriting];[vIDeoWriter startSessionAtSourceTime:kCMTimeZero];CVPixelBufferRef buffer = NulL;//convert uiimage to CGImage.xPixel=0;yPixel=250;buffer = [self pixelBufferFromCGImage:[[UIImage imagenamed:@"1.jpeg"] CGImage]];CVPixelBufferPoolCreatePixelBuffer (NulL,adaptor.pixelBufferPool,&buffer);[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];for (int i = 0;i<2; i++){ if([writerinput isReadyForMoreMediaData]) { //NSLog(@"insIDe for loop %d",i); for(int pframetime=1;pframetime<=2;pframetime++) { CMTime frameTime = CMTimeMake(pframetime,25); CMTime lastTime=CMTimeMake(i,1); //i is from 0 to 19 of the loop above CMTime presentTime=CMTimeAdd(lastTime,frameTime); if(i==0) buffer = [self pixelBufferFromCGImage:[[UIImage imagenamed:@"1.jpeg"] CGImage]]; else buffer = [self pixelBufferFromCGImage:[[UIImage imagenamed:@"2.jpeg"] CGImage]]; while ( ![writerinput isReadyForMoreMediaData] ) { [NSThread sleepForTimeInterval:0.05]; } [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; i++; } if(buffer) CVBufferRelease(buffer); //[NSThread sleepForTimeInterval:0.1]; }}[writerinput markAsFinished];[vIDeoWriter finishWriting];[vIDeoPathArray addobject:path];//Finish the session:[vIDeoWriter release];[writerinput release];CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
对于合并视频文件,我尝试此代码,但这里没有用,是视频之间的一些空白屏幕
AVMutableComposition* mixComposition = [AVMutableComposition composition]; Nsstring *documentsDirectoryPath = [NSSearchPathForDirectorIEsInDomains(NSdocumentDirectory,YES) objectAtIndex:0]; Nsstring* vIDeo_inputfilePath1 = [vIDeoPathArray objectAtIndex:1]; NSURL* vIDeo_inputfileUrl1 = [NSURL fileURLWithPath:vIDeo_inputfilePath1]; Nsstring* vIDeo_inputfilePath2 = [vIDeoPathArray objectAtIndex:0]; NSURL* vIDeo_inputfileUrl2 = [NSURL fileURLWithPath:vIDeo_inputfilePath2]; Nsstring* vIDeo_inputfilePath3 = [vIDeoPathArray objectAtIndex:2]; NSURL* vIDeo_inputfileUrl3 = [NSURL fileURLWithPath:vIDeo_inputfilePath3]; Nsstring* outputfilename = @"outputfile.mov"; Nsstring* outputfilePath = [Nsstring stringWithFormat:@"%@/%@",documentsDirectoryPath,outputfilename]; NSURL* outputfileUrl = [NSURL fileURLWithPath:outputfilePath]; if ([[NSfileManager defaultManager] fileExistsAtPath:outputfilePath]) [[NSfileManager defaultManager] removeItemAtPath:outputfilePath error:nil]; CMTime nextClipStartTime = kCMTimeZero; AVURLAsset* vIDeoAsset1 = [[AVURLAsset alloc]initWithURL:vIDeo_inputfileUrl1 options:nil]; AVURLAsset* vIDeoAsset2 = [[AVURLAsset alloc]initWithURL:vIDeo_inputfileUrl2 options:nil]; AVURLAsset* vIDeoAsset3 = [[AVURLAsset alloc]initWithURL:vIDeo_inputfileUrl3 options:nil]; CMTimeRange vIDeo_timeRange1 = CMTimeRangeMake(kCMTimeZero,vIDeoAsset1.duration); AVMutableCompositionTrack *a_compositionVIDeoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVIDeo preferredTrackID:kCMPersistentTrackID_InvalID]; [a_compositionVIDeoTrack1 insertTimeRange:vIDeo_timeRange1 ofTrack:[[vIDeoAsset1 tracksWithMediaType:AVMediaTypeVIDeo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; CMTimeRange vIDeo_timeRange3 = CMTimeRangeMake(nextClipStartTime,vIDeoAsset3.duration); [a_compositionVIDeoTrack1 insertTimeRange:vIDeo_timeRange3 ofTrack:[[vIDeoAsset3 tracksWithMediaType:AVMediaTypeVIDeo] objectAtIndex:0] atTime:vIDeoAsset1.duration error:nil]; CMTimeRange vIDeo_timeRange2 = CMTimeRangeMake(nextClipStartTime,vIDeoAsset1.duration); [a_compositionVIDeoTrack1 insertTimeRange:vIDeo_timeRange2 ofTrack:[[vIDeoAsset2 tracksWithMediaType:AVMediaTypeVIDeo] objectAtIndex:0] atTime:vIDeoAsset1.duration error:nil]; AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetname:AVAssetExportPresetLowQuality]; _assetExport.shouldOptimizeforNetworkUse = YES; _assetExport.outputfileType = @"com.apple.quicktime-movIE"; _assetExport.outputURL = outputfileUrl;解决方法 尝试在Apple开发人员门户网站上观看名为“在AV Foundation中使用媒体”的视频.它告诉你如何做你正在描述的.
https://developer.apple.com/videos/wwdc/2011/
总结以上是内存溢出为你收集整理的合并iPhone应用程序中的两个视频文件全部内容,希望文章能够帮你解决合并iPhone应用程序中的两个视频文件所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)