- 需要
公司混合开发,uni端拍小视频不是很现实,为达到仿微信成果,原生插件走起
- 思路
第1步:1个AVCaptureSession, 1块AVCaptureVideoPreviewLayer[思考兼容替换成AVPreView]
第2步:视频录制需video & audio, 须要对应的AVCaptureDeviceInput,同理对应的AVCaptureVideoDataOutput与AVCaptureAudioDataOutput
第3步:代理中设置output辨别video与audio, 并将对应的CMSampleBufferRef写入到视频文件中
第4步:写入视频文件中,用到AVAssetWriter, 对应video & audio 需两个AVAssetWriterInput, 退出AVAssetWriter
第5步:CMSampleBufferRef一直过去,AssetWriter一直写入,直到进行
上菜
第一步的初始化就不写了,没事能够翻看自己后面的博客
第2步:两个AVCaptureDeviceInput 两个Output, 且设置Output的代理
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];if (error) { NSLog(@"获得设施摄入videoInput对象时出错, 谬误起因: %@", error); return;}// 设施增加到会话中if ([self.session canAddInput:self.videoInput]) { [self.session addInput:self.videoInput];}[self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue];if ([self.session canAddOutput:self.videoOutput]) { [self.session addOutput:self.videoOutput];}// 音频相干AVCaptureDevice *adevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:adevice error:&error];if ([self.session canAddInput:self.audioInput]) { [self.session addInput:self.audioInput];}[self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue];if ([self.session canAddOutput:self.audioOutput]) { [self.session addOutput:self.audioOutput];}// 视频输入- (AVCaptureVideoDataOutput *)videoOutput { if (!_videoOutput) { _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; _videoOutput.alwaysDiscardsLateVideoFrames = YES; } return _videoOutput;}// 音频输入- (AVCaptureAudioDataOutput *)audioOutput { if (!_audioOutput) { _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; } return _audioOutput;}
第3步:启动Session,代理外面操作CMSampleBufferRef
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate & AVCaptureAudioDataOutputSampleBufferDelegate- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { @autoreleasepool { // 视频 if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]) { if (!self.manager.outputVideoFormatDescription) { @synchronized(self) { CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); self.manager.outputVideoFormatDescription = formatDescription; } } else { @synchronized(self) { if (self.manager.state == StateRecording) { [self.manager appendBuffer:sampleBuffer type:AVMediaTypeVideo]; } } } } //音频 if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]) { if (!self.manager.outputAudioFormatDescription) { @synchronized(self) { CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); self.manager.outputAudioFormatDescription = formatDescription; } } @synchronized(self) { if (self.manager.state == StateRecording) { [self.manager appendBuffer:sampleBuffer type:AVMediaTypeAudio]; } } } }}
第4步:AVAssetWriter以及对应的Input
// writer初始化self.writer = [AVAssetWriter assetWriterWithURL:_videoUrl fileType:AVFileTypeMPEG4 error:nil];_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];//expectsMediaDataInRealTime 必须设为yes,须要从capture session 实时获取数据_videoInput.expectsMediaDataInRealTime = YES;_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:_audioSettings];_audioInput.expectsMediaDataInRealTime = YES;if ([_writer canAddInput:_videoInput]) { [_writer addInput:_videoInput];}if ([_writer canAddInput:_audioInput]) { [_writer addInput:_audioInput];}
第5步:第3步的CMSampleBufferRef通过AVAssetWriter写入到视频文件中
- (void)appendBuffer:(CMSampleBufferRef)buffer type:(NSString *)mediaType { if (buffer == NULL) { NSLog(@"empty sampleBuffer"); return; } @synchronized (self) { if (self.state < StateRecording) { NSLog(@"not ready yet"); return; } } CFRetain(buffer); dispatch_async(self.queue, ^{ @autoreleasepool { @synchronized (self) { if (self.state > StateFinish) { CFRelease(buffer); return; } } if (!self.canWrite && mediaType == AVMediaTypeVideo) { [self.writer startWriting]; [self.writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(buffer)]; self.canWrite = YES; } if(!self.timer) { dispatch_async(dispatch_get_main_queue(), ^{ self.timer = [NSTimer scheduledTimerWithTimeInterval:TIMER_INTERVAL target:self selector:@selector(updateProgress) userInfo:nil repeats:YES]; [[NSRunLoop currentRunLoop] addTimer:self.timer forMode:NSDefaultRunLoopMode]; }); } // 写入视频数据 if (mediaType == AVMediaTypeVideo) { if (self.videoInput.readyForMoreMediaData) { BOOL success = [self.videoInput appendSampleBuffer:buffer]; if (!success) { @synchronized (self) { [self stop:^{}]; [self destroy]; } } } } // 写入音频数据 if (mediaType == AVMediaTypeAudio) { if (self.audioInput.readyForMoreMediaData) { BOOL success = [self.audioInput appendSampleBuffer:buffer]; if (!success) { @synchronized (self) { [self stop:^{}]; [self destroy]; } } } } CFRelease(buffer); } });}
写在开端:
- AVAssetWriterInput设置视频属性时,依照本人的须要设计,其中码率与帧率的设置会影响到拍摄后视频的品质与大小,具体看各自我的项目的要求
- 如果视频视角存在问题,能够从三个方向动手调整
1.layer的connect设置下videoOrientation
2.AVCaptureOutput的connect设置下videoOrientation
3.AVAssetWriterInput针对video是设置下transform,比方Rotation M_PI/2 角度