共计 4877 个字符,预计需要花费 13 分钟才能阅读完成。
- 需要
公司混合开发,uni 端拍小视频不是很现实,为达到仿微信成果,原生插件走起
- 思路
第 1 步:1 个 AVCaptureSession, 1 块 AVCaptureVideoPreviewLayer[思考兼容替换成 AVPreView]
第 2 步:视频录制需 video & audio, 须要对应的 AVCaptureDeviceInput,同理对应的 AVCaptureVideoDataOutput 与 AVCaptureAudioDataOutput
第 3 步:代理中设置 output 辨别 video 与 audio, 并将对应的 CMSampleBufferRef 写入到视频文件中
第 4 步:写入视频文件中,用到 AVAssetWriter, 对应 video & audio 需两个 AVAssetWriterInput, 退出 AVAssetWriter
第 5 步:CMSampleBufferRef 一直过去,AssetWriter 一直写入,直到进行
-
上菜
第一步的初始化就不写了,没事能够翻看自己后面的博客
第 2 步:两个 AVCaptureDeviceInput 两个 Output, 且设置 Output 的代理
self.videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error]; if (error) {NSLog(@"获得设施摄入 videoInput 对象时出错, 谬误起因: %@", error); return; } // 设施增加到会话中 if ([self.session canAddInput:self.videoInput]) {[self.session addInput:self.videoInput]; } [self.videoOutput setSampleBufferDelegate:self queue:self.videoQueue]; if ([self.session canAddOutput:self.videoOutput]) {[self.session addOutput:self.videoOutput]; } // 音频相干 AVCaptureDevice *adevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:adevice error:&error]; if ([self.session canAddInput:self.audioInput]) {[self.session addInput:self.audioInput]; } [self.audioOutput setSampleBufferDelegate:self queue:self.videoQueue]; if ([self.session canAddOutput:self.audioOutput]) {[self.session addOutput:self.audioOutput]; } // 视频输入 - (AVCaptureVideoDataOutput *)videoOutput {if (!_videoOutput) {_videoOutput = [[AVCaptureVideoDataOutput alloc] init]; _videoOutput.alwaysDiscardsLateVideoFrames = YES; } return _videoOutput; } // 音频输入 - (AVCaptureAudioDataOutput *)audioOutput {if (!_audioOutput) {_audioOutput = [[AVCaptureAudioDataOutput alloc] init]; } return _audioOutput; }
第 3 步:启动 Session,代理外面操作 CMSampleBufferRef
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate & AVCaptureAudioDataOutputSampleBufferDelegate - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { @autoreleasepool { // 视频 if (connection == [self.videoOutput connectionWithMediaType:AVMediaTypeVideo]) {if (!self.manager.outputVideoFormatDescription) {@synchronized(self) {CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); self.manager.outputVideoFormatDescription = formatDescription; } } else {@synchronized(self) {if (self.manager.state == StateRecording) {[self.manager appendBuffer:sampleBuffer type:AVMediaTypeVideo]; } } } } // 音频 if (connection == [self.audioOutput connectionWithMediaType:AVMediaTypeAudio]) {if (!self.manager.outputAudioFormatDescription) {@synchronized(self) {CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); self.manager.outputAudioFormatDescription = formatDescription; } } @synchronized(self) {if (self.manager.state == StateRecording) {[self.manager appendBuffer:sampleBuffer type:AVMediaTypeAudio]; } } } } }
第 4 步:AVAssetWriter 以及对应的 Input
// writer 初始化 self.writer = [AVAssetWriter assetWriterWithURL:_videoUrl fileType:AVFileTypeMPEG4 error:nil]; _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings]; //expectsMediaDataInRealTime 必须设为 yes,须要从 capture session 实时获取数据 _videoInput.expectsMediaDataInRealTime = YES; _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:_audioSettings]; _audioInput.expectsMediaDataInRealTime = YES; if ([_writer canAddInput:_videoInput]) {[_writer addInput:_videoInput]; } if ([_writer canAddInput:_audioInput]) {[_writer addInput:_audioInput]; }
第 5 步:第 3 步的 CMSampleBufferRef 通过 AVAssetWriter 写入到视频文件中
- (void)appendBuffer:(CMSampleBufferRef)buffer type:(NSString *)mediaType {if (buffer == NULL) {NSLog(@"empty sampleBuffer"); return; } @synchronized (self) {if (self.state < StateRecording) {NSLog(@"not ready yet"); return; } } CFRetain(buffer); dispatch_async(self.queue, ^{ @autoreleasepool {@synchronized (self) {if (self.state > StateFinish) {CFRelease(buffer); return; } } if (!self.canWrite && mediaType == AVMediaTypeVideo) {[self.writer startWriting]; [self.writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(buffer)]; self.canWrite = YES; } if(!self.timer) {dispatch_async(dispatch_get_main_queue(), ^{self.timer = [NSTimer scheduledTimerWithTimeInterval:TIMER_INTERVAL target:self selector:@selector(updateProgress) userInfo:nil repeats:YES]; [[NSRunLoop currentRunLoop] addTimer:self.timer forMode:NSDefaultRunLoopMode]; }); } // 写入视频数据 if (mediaType == AVMediaTypeVideo) {if (self.videoInput.readyForMoreMediaData) {BOOL success = [self.videoInput appendSampleBuffer:buffer]; if (!success) {@synchronized (self) {[self stop:^{}]; [self destroy]; } } } } // 写入音频数据 if (mediaType == AVMediaTypeAudio) {if (self.audioInput.readyForMoreMediaData) {BOOL success = [self.audioInput appendSampleBuffer:buffer]; if (!success) {@synchronized (self) {[self stop:^{}]; [self destroy]; } } } } CFRelease(buffer); } }); }
-
写在开端:
- AVAssetWriterInput 设置视频属性时,依照本人的须要设计,其中码率与帧率的设置会影响到拍摄后视频的品质与大小,具体看各自我的项目的要求
- 如果视频视角存在问题,能够从三个方向动手调整
1.layer 的 connect 设置下 videoOrientation
2.AVCaptureOutput 的 connect 设置下 videoOrientation
3.AVAssetWriterInput 针对 video 是设置下 transform,比方 Rotation M_PI/2 角度