- (voID)MergeAndSave_internal{ AVMutableComposition *composition = [AVMutableComposition composition]; AVMutableCompositionTrack *compositionVIDeoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVIDeo preferredTrackID:kCMPersistentTrackID_InvalID]; AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_InvalID]; AVMutableVIDeoComposition *vIDeoComposition = [AVMutableVIDeoComposition vIDeoComposition]; vIDeoComposition.frameDuration = CMTimeMake(1,30); vIDeoComposition.renderScale = 1.0; AVMutableVIDeoCompositionInstruction *instruction = [AVMutableVIDeoCompositionInstruction vIDeoCompositionInstruction]; AVMutableVIDeoCompositionLayerInstruction *layerInstruction = [AVMutableVIDeoCompositionLayerInstruction vIDeoCompositionLayerInstructionWithAssetTrack:compositionVIDeoTrack]; NSLog(@"%@",vIDeoPathArray); float time = 0; CMTime startTime = kCMTimeZero; for (int i = 0; i<vIDeoPathArray.count; i++) { AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[vIDeoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]]; NSError *error = nil; BOol ok = NO; AVAssetTrack *sourceVIDeoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVIDeo] objectAtIndex:0]; AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; CGSize temp = CGSizeApplyAffinetransform(sourceVIDeoTrack.naturalSize,sourceVIDeoTrack.preferredtransform); CGSize size = CGSizeMake(fabsf(temp.wIDth),fabsf(temp.height)); CGAffinetransform transform = sourceVIDeoTrack.preferredtransform; vIDeoComposition.renderSize = sourceVIDeoTrack.naturalSize; if (size.wIDth > size.height) { [layerInstruction settransform:transform atTime:CMTimeMakeWithSeconds(time,30)]; } else { float s = size.wIDth/size.height; CGAffinetransform newe = CGAffinetransformConcat(transform,CGAffinetransformMakeScale(s,s)); float x = (size.height - size.wIDth*s)/2; CGAffinetransform newer = CGAffinetransformConcat(newe,CGAffinetransformMakeTranslation(x,0)); [layerInstruction settransform:newer atTime:CMTimeMakeWithSeconds(time,30)]; } if(i==0){ [compositionVIDeoTrack setPreferredtransform:sourceVIDeoTrack.preferredtransform]; } ok = [compositionVIDeoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,[sourceAsset duration]) ofTrack:sourceVIDeoTrack atTime:startTime error:&error]; ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,[sourceAsset duration]) ofTrack:sourceAudioTrack atTime:startTime error:nil]; if (!ok) { { [radialVIEw4 setHIDden:YES]; NSLog(@"Export Failed: %@",[[self.exportSession error] localizedDescription]); UIAlertVIEw *alert = [[UIAlertVIEw alloc] initWithTitle:@"Error" message:@"Something Went Wrong :(" delegate:nil cancelbuttonTitle:@"Ok" otherbuttonTitles: nil,nil]; [alert show]; [radialVIEw4 setHIDden:YES]; break; } } startTime = CMTimeAdd(startTime,[sourceAsset duration]); } instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; instruction.timeRange = compositionVIDeoTrack.timeRange; vIDeoComposition.instructions = [NSArray arrayWithObject:instruction]; NSArray *paths = NSSearchPathForDirectorIEsInDomains(NS@R_403_4617@Directory,NSUserDomainMask,YES); Nsstring *@R_403_4617@sDirectory = [paths objectAtIndex:0]; Nsstring *myPathDocs = [@R_403_4617@sDirectory stringByAppendingPathComponent: [Nsstring stringWithFormat:@"RampMergedVIDeo.mov"]]; unlink([myPathDocs UTF8String]); NSURL *url = [NSURL fileURLWithPath:myPathDocs]; AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetname:AVAssetExportPreset1280x720]; exporter.outputURL=url; exporter.outputfileType = AVfileTypeQuickTimeMovIE; exporter.shouldOptimizeforNetworkUse = YES; [exporter exportAsynchronouslyWithCompletionHandler:^{ dispatch_async(dispatch_get_main_queue(),^{ switch ([exporter status]) { case AVAssetExportSessionStatusFailed: NSLog(@"Export Failed: %@",[exporter error]); break; case AVAssetExportSessionStatusCancelled: NSLog(@"Export canceled"); break; case AVAssetExportSessionStatusCompleted:{ NSLog(@"Export successfully"); } default: break; } if (exporter.status != AVAssetExportSessionStatusCompleted){ NSLog(@"Retry export"); } }); }];}
但视频在保存到系统并在快速播放器中播放时看起来很破旧.我认为CFAffline中的问题转化了.有人可以请教吗?
这是视频中间的破解屏幕:
解决方法 您尚未将vIDeoComposition设置为AVAssetExportSession.尝试做这个exporter.vIDeoComposition = vIDeoComposition;.虽然尝试了这个但是应该工作. 总结以上是内存溢出为你收集整理的ios – 使用AVMutableComposition合并视频时视频破解全部内容,希望文章能够帮你解决ios – 使用AVMutableComposition合并视频时视频破解所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)