ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?

ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?,第1张

概述我正在尝试创建一个显示相机预览的应用程序,然后在某些情况下开始使用语音输入录制此内容,最后重复录制的电影. 我已经编写了预览/录制/重放的类和管理协调的控制器. 似乎这些功能在独立调用时工作得很好,但是我不能让它们一起工作:当重放视频时,声音会运行,但图像需要大约五秒钟才能显示然后断断续续. 这是我的代码: 预览: - (void) createSession{ _session = [ 我正在尝试创建一个显示相机预览的应用程序,然后在某些情况下开始使用语音输入录制此内容,最后重复录制的电影.

我已经编写了预览/录制/重放的类和管理协调的控制器.

似乎这些功能在独立调用时工作得很好,但是我不能让它们一起工作:当重放视频时,声音会运行,但图像需要大约五秒钟才能显示然后断断续续.

这是我的代码:

预览:

- (voID) createSession{    _session = [[AVCaptureSession alloc] init];    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID];    if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVIDeo];    NSError *error = nil;    _cVIDeoinput = [[AVCaptureDeviceinput deviceinputWithDevice:device error:&error] retain];    if (!error) [_session addinput:_cVIDeoinput];    device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];    error = nil;    _cAudioinput = [[AVCaptureDeviceinput deviceinputWithDevice:device error:&error] retain];    if (!error) [_session addinput:_cAudioinput];    _cameraLayer = [[AVCaptureVIDeoPrevIEwLayer alloc] initWithSession:_session];    _cameraLayer.frame = self.bounds;    [self.layer addSublayer:_cameraLayer];    _vIDeoOutput = [[AVCaptureVIDeoDataOutput alloc] init];    [_session setSessionPreset:AVCaptureSessionPreset640x480];    [_vIDeoOutput setVIDeoSettings:[NSDictionary dictionaryWithContentsOffile:VIDEO_SETTINGS]];    _audioOutput = [[AVCaptureAudioDataOutput alloc] init];    dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_name,NulL);    [_vIDeoOutput setSampleBufferDelegate:self queue:queue];    [_session addOutput:_vIDeoOutput];    [_audioOutput setSampleBufferDelegate:self queue:queue];    [_session addOutput:_audioOutput];    dispatch_set_context(queue,self);    dispatch_set_finalizer_f(queue,queue_finalizer);    dispatch_release(queue);    [_session startRunning];}- (voID) deleteSession{    [_session stopRunning];    [(AVCaptureVIDeoPrevIEwLayer *)_cameraLayer setSession:nil];    [_cameraLayer removeFromSuperlayer];    [_cameraLayer release];    _cameraLayer = nil;    [_audioOutput setSampleBufferDelegate:nil queue:NulL];    [_vIDeoOutput setSampleBufferDelegate:nil queue:NulL];    [_audioOutput release];    _audioOutput = nil;    [_vIDeoOutput release];    _vIDeoOutput = nil;    [_cAudioinput release];    _cAudioinput = nil;    [_cVIDeoinput release];    _cVIDeoinput = nil;    NSArray *inputs = [_session inputs];    for (AVCaptureinput *input in inputs)         [_session removeinput:input];    NSArray *outputs = [_session outputs];    for (AVCaptureOutput *output in outputs)        [_session removeOutput:output];    [_session release];    _session = nil;}

记录:

- (voID) createWriter{    Nsstring *file = [self file];    if ([[NSfileManager defaultManager] fileExistsAtPath:file]) [[NSfileManager defaultManager] removeItemAtPath:file error:NulL];    NSError *error = nil;    _writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVfileTypeQuickTimeMovIE error:&error] retain];    if (error)    {        [_writer release];        _writer = nil;        NSLog(@"%@",error);        return;    }    AudioChannelLayout acl;    bzero( &acl,sizeof(acl));    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;    NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:                              [NSNumber numberWithInt:kAudioFormatMPEG4AAC],AVFormatIDKey,[NSNumber numberWithfloat:44100.0],AVSampleRateKey,[NSNumber numberWithInt:1],AVNumberOfChannelsKey,[NSNumber numberWithInt:64000],AVEncoderBitRateKey,[NSData dataWithBytes:&acl length:sizeof(acl)],AVChannelLayoutKey,nil ];    _wAudioinput = [[AVAssetWriterinput assetWriterinputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain];    [_writer addinput:_wAudioinput];    settings = [NSDictionary dictionaryWithObjectsAndKeys:                AVVIDeoCodecH264,AVVIDeoCodecKey,[NSNumber numberWithInt:640],AVVIDeoWIDthKey,[NSNumber numberWithInt:480],AVVIDeoHeightKey,nil];    _wVIDeoinput = [[AVAssetWriterinput assetWriterinputWithMediaType:AVMediaTypeVIDeo outputSettings:settings] retain];    [_writer addinput:_wVIDeoinput];}- (voID) deleteWriter{    [_wVIDeoinput release];    _wVIDeoinput = nil;    [_wAudioinput release];    _wAudioinput = nil;    [_writer release];    _writer = nil;}- (voID) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer{    if (![self canRecordBuffer:sampleBuffer])        return;    if ([_wAudioinput isReadyForMoreMediaData])        [_wAudioinput appendSampleBuffer:sampleBuffer];}- (voID) RecordingVIDeoWithBuffer:(CMSampleBufferRef)sampleBuffer{    if (![self canRecordBuffer:sampleBuffer])        return;    if ([_wVIDeoinput isReadyForMoreMediaData])        [_wVIDeoinput appendSampleBuffer:sampleBuffer];}

播放:

- (voID) observeValueForKeyPath:(Nsstring *)keyPath ofObject:(ID)object change:(NSDictionary *)change context:(voID *)context{    AVPlayerItem *item = (AVPlayerItem *)object;    [item removeObserver:self forKeyPath:@"status"];    switch (item.status)     {        case AVPlayerItemStatusReadytoplay:            [_player seektotime:kCMTimeZero];            [_player play];            [[NSNotificationCenter defaultCenter] addobserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidplayToEndTimeNotification object:item];            break;        case AVPlayerItemStatusUnkNown:        case AVPlayerItemStatusFailed:            break;        default:            break;    }}- (voID) finishPlaying:(NSNotification *)notification{    [_player pause];    [_playerLayer removeFromSuperlayer];    [_playerLayer release];    _playerLayer = nil;    [_player release];    _player = nil;    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidplayToEndTimeNotification object:nil];}- (voID) play:(Nsstring *)path{    _player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];    _playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];    _playerLayer.transform = CAtransform3DScale(CAtransform3DMakeRotation(M_PI_2,1),1,-1,1);    _playerLayer.frame = self.bounds;    [self.layer addSublayer:_playerLayer];    [_player.currentItem addobserver:self forKeyPath:@"status" options:0 context:NulL];}
解决方法 我有一个类似的问题,虽然这并没有完全解决它,它确实帮助了很多:
如果您还没有准备再次写入,请尝试让设备稍微休眠,然后重新评估是否可以写入数据,而不是仅丢失示例缓冲区.

- (voID) RecordingVIDeoWithBuffer:(CMSampleBufferRef)sampleBuffer{if (![self canRecordBuffer:sampleBuffer])    return;if (!_wVIDeoinput.readyForMoreMediaData && _isRecording){    [self performSelector:@selector(RecordingVIDeoWithBuffer:) withObject:(__brIDge ID)(sampleBuffer) afterDelay:0.05];    return;}[_wVIDeoinput appendSampleBuffer:sampleBuffer];

如果您不使用ARC,只需要sampleBuffer即可.但ARC需要添加__brIDge.

编辑:我使用performSelector并返回而不是使用NSThread睡眠的while循环,因为它是非阻塞的.阻塞很糟糕.

总结

以上是内存溢出为你收集整理的ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?全部内容,希望文章能够帮你解决ios – 为什么用AVCaptureSession,AVAssetWriter和AVPlayer口吃录制和重放视频?所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址: http://outofmemory.cn/web/999470.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-21
下一篇 2022-05-21

发表评论

登录后才能评论

评论列表(0条)

保存