1. 程式人生 > >iOS 關於視訊錄製實現 (仿微信)

iOS 關於視訊錄製實現 (仿微信)

專案demo
tips:建議參照我的demo進行了解,如果有幫助到你的話請幫我點亮 star, 謝謝啦~

類似於微信的 點選拍照 , 長按拍視訊的功能.
匯入框架

#import <AVFoundation/AVFoundation.h>

在需要進行錄屏的時候,建立 視訊流 和 音訊流 的 輸入輸出流, 程式碼如下:

 NSArray *devicesVideo = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    NSArray *devicesAudio = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];

    self
.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:devicesVideo[0] error:nil]; AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:devicesAudio[0] error:nil]; self.videoDevice = devicesVideo[0]; self.videoDataOut = [[AVCaptureVideoDataOutput alloc] init]; self
.videoDataOut.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}; self.videoDataOut.alwaysDiscardsLateVideoFrames = YES; [self.videoDataOut setSampleBufferDelegate:self queue:self.recodingQueue]; self.audioDataOut = [[AVCaptureAudioDataOutput alloc] init]; [self
.audioDataOut setSampleBufferDelegate:self queue:self.recodingQueue];

在進行錄影的時候,主要使用 AVCaptureVideoPreviewLayer 所建立的例項進行展示,它的建立需要上述所建立的資料流

    self.videoSession = [[AVCaptureSession alloc] init];

    if ([self.videoSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
        self.videoSession.sessionPreset = AVCaptureSessionPreset1280x720;
    }
    if ([self.videoSession canAddInput:self.videoInput]) {
        [self.videoSession addInput:self.videoInput];
    }
    if ([self.videoSession canAddInput:audioInput]) {
        [self.videoSession addInput:audioInput];
    }
    if ([self.videoSession canAddOutput:self.videoDataOut]) {
        [self.videoSession addOutput:self.videoDataOut];
    }
    if ([self.videoSession canAddOutput:self.audioDataOut]) {
        [self.videoSession addOutput:self.audioDataOut];
    }
    self.videoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.videoSession];
    self.videoPreLayer.frame = self.videoView.bounds;
    self.videoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.videoView.layer addSublayer:self.videoPreLayer];
    [self.videoSession startRunning];

在 startRunning 之後,這個時候在你的螢幕上已經可以看到攝像頭的實時畫面了,同時不斷觸發代理方法,接下來要開始錄製,先要建立錄製物件

    //這裡的 assetUrl 是儲存到本地的地址,在錄製的過程中,可以將實時資料儲存到本地
    _assetWriter = [AVAssetWriter assetWriterWithURL:assetUrl fileType:AVFileTypeMPEG4 error:nil];
    //預設橫屏錄入的,所以這裡要旋轉 90
    NSDictionary *outputSettings = @{
                                     AVVideoCodecKey : AVVideoCodecH264,
                                     AVVideoWidthKey : @(self.recordH),
                                     AVVideoHeightKey : @(self.recordW),
                                     AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
                                     //                          AVVideoCompressionPropertiesKey:codecSettings
                                     };
    _assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
    _assetWriterVideoInput.expectsMediaDataInRealTime = YES;

    CGFloat rate = M_PI / 2.0;

    _assetWriterVideoInput.transform = CGAffineTransformMakeRotation(rate);


    NSDictionary *audioOutputSettings = @{
                                          AVFormatIDKey:@(kAudioFormatMPEG4AAC),
                                          AVEncoderBitRateKey:@(64000),
                                          AVSampleRateKey:@(44100),
                                          AVNumberOfChannelsKey:@(1),
                                          };

    _assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
    _assetWriterAudioInput.expectsMediaDataInRealTime = YES;


    NSDictionary *SPBADictionary = @{
                                     (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
                                     (__bridge NSString *)kCVPixelBufferWidthKey : @(self.recordW),
                                     (__bridge NSString *)kCVPixelBufferHeightKey  : @(self.recordH),
                                     (__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : ((__bridge NSNumber *)kCFBooleanTrue)
                                     };
    _assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterVideoInput sourcePixelBufferAttributes:SPBADictionary];
    if ([_assetWriter canAddInput:_assetWriterVideoInput]) {
        [_assetWriter addInput:_assetWriterVideoInput];
    }else {
        NSLog(@"不能新增視訊writer的input \(assetWriterVideoInput)");
    }
    if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
        [_assetWriter addInput:_assetWriterAudioInput];
    }else {
        NSLog(@"不能新增視訊writer的input \(assetWriterVideoInput)");
    }

在攝像頭使用的過程中會不斷觸發代理方法,我們在代理方法裡面就可以開啟或者關閉視訊的錄製了

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
   //這裡就是取得攝像頭資料的某一幀,當 長按 時間非常短的時候,我們就可以取這個圖片為實時拍攝的照片了
    UIImage * image = [self imageFromSampleBuffer:sampleBuffer];
    if (image && !self.isGetShootImg) {
        self.shootImage = image;
    }
    if (!_recoding) return;

    @autoreleasepool {
        _currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
        if (_assetWriter.status != AVAssetWriterStatusWriting) {
            [_assetWriter startWriting];
            [_assetWriter startSessionAtSourceTime:_currentSampleTime];
        }
        if (captureOutput == _videoDataOut) {
            if (_assetWriterPixelBufferInput.assetWriterInput.isReadyForMoreMediaData) {
                CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
                BOOL success = [_assetWriterPixelBufferInput appendPixelBuffer:pixelBuffer withPresentationTime:_currentSampleTime];
                if (!success) {
                    NSLog(@"Pixel Buffer沒有append成功");
                }
            }
        }
        if (captureOutput == _audioDataOut) {
            [_assetWriterAudioInput appendSampleBuffer:sampleBuffer];
        }
    }
}

注: 關於 點選拍照 功能的實現, 我之前嘗試過在 點選結束 的時候直接截圖,但是, self.videoPreLayer 的位置擷取到的只是黑色背景,所以轉而採用了這種方法. 如果有更好的建議請留言交流.