1. 程式人生 > >iOS開發之錄製小視訊

iOS開發之錄製小視訊

呼叫
- (void)setupConfigWithVideo {
NSString *unUserInfo = nil;
if (TARGET_IPHONE_SIMULATOR) {
    unUserInfo = @"您的裝置不支援此功能";
}
AVAuthorizationStatus videoAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(videoAuthStatus == ALAuthorizationStatusRestricted || videoAuthStatus == ALAuthorizationStatusDenied){
    unUserInfo = @"相機訪問受限";
}
AVAuthorizationStatus audioAuthStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if(audioAuthStatus == ALAuthorizationStatusRestricted || audioAuthStatus == ALAuthorizationStatusDenied){
    unUserInfo = @"錄音訪問受限";
}

ALAuthorizationStatus author = [ALAssetsLibrary authorizationStatus];
if (author == AVAuthorizationStatusRestricted || author == AVAuthorizationStatusDenied){

    unUserInfo = @"相簿訪問許可權受限";

    return;
  }

if (unUserInfo != nil) {
    [self alertWithClick:unUserInfo];
    return;
} else {
    [self pushWithTakeVideo];
}
}

   - (void)alertWithClick:(NSString *)msg {

UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"提示" message:msg preferredStyle:1];

UIAlertAction *action    = [UIAlertAction actionWithTitle:@"確定" style:(UIAlertActionStyleDefault) handler:^(UIAlertAction * _Nonnull action) {
    [SVProgressHUD dismiss];
}];

[alert addAction:action];
[self presentViewController:alert animated:YES completion:nil];
}
跳轉至拍攝介面
- (void)pushWithTakeVideo {
WBTakeVideoViewController *videoVC = [[WBTakeVideoViewController alloc]init];
videoVC.delegate = self;
videoVC.takeDelegate = self;
[self presentViewController:videoVC animated:YES completion:nil];
  }
WBTakeVideoViewController.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
 @protocol TakeVideoDelegate <NSObject>
 - (void)takeVideoDelegateAction:(NSString *)videoPath;
 @end


@protocol WBVideoViewControllerDelegate ;
@interface WBTakeVideoViewController : UIViewController

@property (nonatomic, assign) id<WBVideoViewControllerDelegate> delegate;

@property (nonatomic, assign) BOOL savePhotoAlbum;  // 是否儲存至相簿

@property (nonatomic, weak) id <TakeVideoDelegate>takeDelegate;

@end


//  錄製的代理
@protocol WBVideoViewControllerDelegate <NSObject>

@required
- (void)videoViewController:(WBTakeVideoViewController *)videoController didRecordVideo:(WBVideoModel *)videoModel;  // 錄製完成

@optional
- (void)videoViewControllerDidCancel:(WBTakeVideoViewController *)videoController;  // 取消

@end
WBTakeVideoViewController.m
#import "WBTakeVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <Photos/Photos.h>
#import "WBVideoConfig.h"
#import "WBVideoSupport.h"

@interface WBTakeVideoViewController () <WBControllerBarDelegate,AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate>

@property (nonatomic, strong) UIView *videoView;
@property (nonatomic, strong) UIView *holdView;
@property (nonatomic, strong) UIButton  *holdBut;
@property (nonatomic, strong) UIView *actionView;

@end

static WBTakeVideoViewController *__currentVC = nil;
@implementation WBTakeVideoViewController
{
dispatch_queue_t _recoding_queue;

AVCaptureSession *_videoSession;  // 輸入輸出裝置之間的資料傳遞
AVCaptureVideoPreviewLayer *_videoPreLayer; // 圖片預覽層
AVCaptureDevice *_videoDevice; // 輸入裝置(麥克風,相機等)

AVCaptureVideoDataOutput *_videoDataOut;
AVCaptureAudioDataOutput *_audioDataOut;

WBControllerBar *_ctrlBar;  // 控制條

AVAssetWriter *_assetWriter;
AVAssetWriterInputPixelBufferAdaptor *_assetWriterPixelBufferInput;
AVAssetWriterInput *_assetWriterVideoInput;
AVAssetWriterInput *_assetWriterAudioInput;
CMTime _currentSampleTime;
BOOL _recoding;

WBFocusView *_focusView;
UILabel *_statusInfo;
UILabel *_cancelInfo;

WBVideoModel *_currentRecord;
BOOL _currentRecordIsCancel;
}


- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.

__currentVC = self;

[self setupSubView];

    self.view.backgroundColor = THENEBLACK;
    [self viewDidComplete];
[self setupWithVideo];

_savePhotoAlbum = YES;


UIView *topView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, ScreenWidth, 64)];
topView.backgroundColor = THENEBLACK;
[self.view addSubview:topView];

UIButton *left = [UIButton buttonWithType:UIButtonTypeCustom];
left.frame = CGRectMake(10, 20, 40, 30);
left.font = [UIFont systemFontOfSize:14.0f];

[left setTitle:@"取消" forState:UIControlStateNormal];
[left addTarget:self action:@selector(leftAction) forControlEvents:UIControlEventTouchUpInside];
[topView addSubview:left];
}

 - (void)leftAction {
[self dismissViewControllerAnimated:YES completion:nil];
}

- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
}


- (void)endAniamtion {
[UIView animateWithDuration:0.3 animations:^{
    self.view.backgroundColor = [UIColor clearColor];
    self.actionView.transform = CGAffineTransformTranslate(CGAffineTransformIdentity, 0, ScreenHeight);
} completion:^(BOOL finished) {
    [self closeView];
}];
}

- (void)closeView {
[_videoSession stopRunning];
[_videoPreLayer removeFromSuperlayer];
_videoPreLayer = nil;
[_videoView removeFromSuperview];
_videoView = nil;

_videoDevice = nil;
_videoDataOut = nil;
_assetWriter = nil;
_assetWriterAudioInput = nil;
_assetWriterVideoInput = nil;
_assetWriterPixelBufferInput = nil;
__currentVC = nil;
}



 //新增view 控制條  放大label

- (void)setupSubView {

/*self.view Config*/
self.view.backgroundColor = [UIColor whiteColor];

if ([self.navigationController respondsToSelector:@selector(interactivePopGestureRecognizer)]) {
    self.navigationController.interactivePopGestureRecognizer.enabled = NO;
}



 _actionView = [[UIView alloc] initWithFrame:[WBVideoConfig viewFrame]];
[self.view addSubview:_actionView];
_actionView.clipsToBounds = YES;


CGSize videoViewSize = [WBVideoConfig videoViewDefaultSize];

//   視訊下部的控制條
_videoView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, videoViewSize.height)];
[self.actionView addSubview:_videoView];

_ctrlBar = [[WBControllerBar alloc] initWithFrame:CGRectMake(0,0, ScreenWidth, ScreenHeight)];
[_ctrlBar setupSubViews];
_ctrlBar.delegate = self;
[self.view addSubview:_ctrlBar];
[_ctrlBar mas_makeConstraints:^(MASConstraintMaker *make) {
    make.left.right.mas_equalTo(0);
    make.height.mas_equalTo(ScreenHeight/2);
    make.top.mas_equalTo(self.videoView.mas_bottom);
}];

UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(focusAction:)];
tapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:tapGesture];

UITapGestureRecognizer *doubleTapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(zoomVideo:)];
doubleTapGesture.numberOfTapsRequired = 2;
doubleTapGesture.numberOfTouchesRequired = 1;
doubleTapGesture.delaysTouchesBegan = YES;
[_videoView addGestureRecognizer:doubleTapGesture];
[tapGesture requireGestureRecognizerToFail:doubleTapGesture];

_focusView = [[WBFocusView alloc] initWithFrame:CGRectMake(0, 0, 60, 60)];
_focusView.backgroundColor = [UIColor clearColor];

_statusInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, CGRectGetMaxY(_videoView.frame) - 30, _videoView.frame.size.width, 20)];
_statusInfo.textAlignment = NSTextAlignmentCenter;
_statusInfo.font = [UIFont systemFontOfSize:14.0];
_statusInfo.textColor = [UIColor whiteColor];
_statusInfo.hidden = YES;
[self.actionView addSubview:_statusInfo];

_cancelInfo = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 120, 24)];
_cancelInfo.center = _videoView.center;
_cancelInfo.textAlignment = NSTextAlignmentCenter;
_cancelInfo.hidden = YES;
[self.actionView addSubview:_cancelInfo];

[_actionView sendSubviewToBack:_videoView];
}


- (void)setupWithVideo {
_recoding_queue = dispatch_queue_create("com.wbsmallvideo.queue", DISPATCH_QUEUE_SERIAL);
NSArray *deviceVideo = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
NSArray *deviceAudio = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceVideo[0] error:nil];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:deviceAudio[0] error:nil];

_videoDevice = deviceVideo[0];

_videoDataOut = [[AVCaptureVideoDataOutput alloc] init];
_videoDataOut.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
_videoDataOut.alwaysDiscardsLateVideoFrames = YES;
[_videoDataOut setSampleBufferDelegate:self queue:_recoding_queue];

_audioDataOut = [[AVCaptureAudioDataOutput alloc] init];
[_audioDataOut setSampleBufferDelegate:self queue:_recoding_queue];


_videoSession = [[AVCaptureSession alloc] init];
if ([_videoSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {
    _videoSession.sessionPreset = AVCaptureSessionPreset640x480;
}

if ([_videoSession canAddInput:videoInput]) {
    [_videoSession addInput:videoInput];
}
if ([_videoSession canAddInput:audioInput]) {
    [_videoSession addInput:audioInput];
}
if ([_videoSession canAddOutput:_videoDataOut]) {
    [_videoSession addOutput:_videoDataOut];
}
if ([_videoSession canAddOutput:_audioDataOut]) {
    [_videoSession addOutput:_audioDataOut];
}
CGFloat viewWidth = CGRectGetWidth(self.videoView.frame);
_videoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_videoSession];
_videoPreLayer.frame = CGRectMake(0, -CGRectGetMinY(_videoView.frame), viewWidth, viewWidth*wbVideo_w_h);
_videoPreLayer.position = CGPointMake(viewWidth/2, CGRectGetHeight(_videoView.frame)/2);
_videoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[_videoView.layer addSublayer:_videoPreLayer];

[_videoSession startRunning];

}



//  放大label
- (void)viewDidComplete {

__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 200, 20)];
zoomLab.center = CGPointMake(self.videoView.center.x, CGRectGetMaxY(self.videoView.frame) - 50);
zoomLab.font = [UIFont boldSystemFontOfSize:14];
zoomLab.text = @"雙擊放大";
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[_videoView addSubview:zoomLab];
[_videoView bringSubviewToFront:zoomLab];

wbdispatch_after(1.6, ^{
    [zoomLab removeFromSuperview];
});
}

- (void)focusInPointAtVideoView:(CGPoint)point {
CGPoint cameraPoint= [_videoPreLayer captureDevicePointOfInterestForPoint:point];
_focusView.center = point;
[_videoView addSubview:_focusView];
[_videoView bringSubviewToFront:_focusView];
[_focusView focusing];

NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
    if ([_videoDevice isFocusPointOfInterestSupported]) {
        _videoDevice.focusPointOfInterest = cameraPoint;
    }
    if ([_videoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
        _videoDevice.focusMode = AVCaptureFocusModeAutoFocus;
    }
    if ([_videoDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
        _videoDevice.exposureMode = AVCaptureExposureModeAutoExpose;
    }
    if ([_videoDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
        _videoDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
    }
    [_videoDevice unlockForConfiguration];
}
if (error) {
    NSLog(@"聚焦失敗:%@",error);
}
wbdispatch_after(1.0, ^{
    [_focusView removeFromSuperview];
});
}


  #pragma mark - Actions --
- (void)focusAction:(UITapGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:_videoView];
[self focusInPointAtVideoView:point];
}

 - (void)zoomVideo:(UITapGestureRecognizer *)gesture {
NSError *error = nil;
if ([_videoDevice lockForConfiguration:&error]) {
    CGFloat zoom = _videoDevice.videoZoomFactor == 2.0?1.0:2.0;
    _videoDevice.videoZoomFactor = zoom;
    [_videoDevice unlockForConfiguration];
}
}

 #pragma mark - controllerBarDelegate

- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar {

_currentRecord = [WBVideoUtil createNewVideo];
_currentRecordIsCancel = NO;

NSURL *outURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];
[self createWriter:outURL];

_statusInfo.textColor = THEMEGREEN;
_statusInfo.text = @"↑上移取消";
_statusInfo.hidden = NO;
wbdispatch_after(0.5, ^{
    _statusInfo.hidden = YES;
});

_recoding = YES;
NSLog(@"視訊開始錄製");

UIBarButtonItem *leftItem = [[UIBarButtonItem alloc]initWithTitle:@"Cancel" style:UIBarButtonItemStylePlain target:self action:@selector(leftItemAction)];
self.navigationController.navigationItem.leftBarButtonItem = leftItem;

}

- (void)leftItemAction {
[self dismissViewControllerAnimated:YES completion:nil];
}

 - (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar {
_recoding = NO;
[self saveVideo:^(NSURL *outFileURL) {
    if (_delegate) {
        [_delegate videoViewController:self didRecordVideo:_currentRecord];
        [self endAniamtion];
    }
}];

    NSLog(@"視訊錄製結束");
}

- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason{
_currentRecordIsCancel = YES;
_recoding = NO;
if (reason == WBRecordCancelReasonTimeShort) {
    [WBVideoConfig showHinInfo:@"錄製時間過短" inView:_videoView frame:CGRectMake(0,CGRectGetHeight(_videoView.frame)/3*2,CGRectGetWidth(_videoView.frame),20) timeLong:1.0];
}
    NSLog(@"當前視訊錄製取消");
}

- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar {
if (!_cancelInfo.hidden) {
    return;
}
_cancelInfo.text = @"鬆手取消";
_cancelInfo.hidden = NO;
wbdispatch_after(0.5, ^{
    _cancelInfo.hidden = YES;
});
}

- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar {
//    _topSlideView.isRecoding = YES;
//    NSLog(@"視訊錄又過了 1 秒");
}

- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar {
//    NSLog(@"錄製介面關閉");
if (_delegate && [_delegate respondsToSelector:@selector(videoViewControllerDidCancel:)]) {
    [_delegate videoViewControllerDidCancel:self];
}
[self endAniamtion];
}


//  建立錄影物件(視訊,音訊)
- (void)createWriter:(NSURL *)assetUrl {
NSError *error = nil;
_assetWriter = [AVAssetWriter assetWriterWithURL:assetUrl fileType:AVFileTypeQuickTimeMovie error:&error];

int videoWidth = [WBVideoConfig defualtVideoSize].width;
int videoHeight = [WBVideoConfig defualtVideoSize].height;


NSDictionary *outputSettings = @{
                                 AVVideoCodecKey : AVVideoCodecH264,
                                 AVVideoWidthKey : @(videoHeight),
                                 AVVideoHeightKey : @(videoWidth),
                                 AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
                                 //                          AVVideoCompressionPropertiesKey:codecSettings
                                 };
_assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
_assetWriterVideoInput.expectsMediaDataInRealTime = YES; // 設定資料為實時輸入
_assetWriterVideoInput.transform = CGAffineTransformMakeRotation(M_PI / 2.0);


NSDictionary *audioOutputSettings = @{
                                      AVFormatIDKey:@(kAudioFormatMPEG4AAC),
                                      AVEncoderBitRateKey:@(64000),
                                      AVSampleRateKey:@(44100),
                                      AVNumberOfChannelsKey:@(1),
                                      };

_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;


NSDictionary *SPBADictionary = @{
                                 (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
                                 (__bridge NSString *)kCVPixelBufferWidthKey : @(videoWidth),
                                 (__bridge NSString *)kCVPixelBufferHeightKey  : @(videoHeight),
                                 (__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : ((__bridge NSNumber *)kCFBooleanTrue)
                                 };
_assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterVideoInput sourcePixelBufferAttributes:SPBADictionary];
if ([_assetWriter canAddInput:_assetWriterVideoInput]) {
    [_assetWriter addInput:_assetWriterVideoInput];
}else {
    NSLog(@"不能新增視訊writer的input \(assetWriterVideoInput)");
}
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
    [_assetWriter addInput:_assetWriterAudioInput];
}else {
    NSLog(@"不能新增視訊writer的input \(assetWriterVideoInput)");
}

if(error)
{
    NSLog(@"error = %@", [error localizedDescription]);
}

NSLog(@"_assetWriter = %ld",(long)_assetWriter.status);

}




- (void)saveVideo:(void(^)(NSURL *outFileURL))complier {

if (_recoding) return;

if (!_recoding_queue){
    complier(nil);
    return;
};


dispatch_async(_recoding_queue, ^{
    NSURL *outputFileURL = [NSURL fileURLWithPath:_currentRecord.videoAbsolutePath];


    [_assetWriter finishWritingWithCompletionHandler:^{

        if (_currentRecordIsCancel) return ;

        //  儲存
        [WBVideoUtil saveThumImageWithVideoURL:outputFileURL second:1];

        if (complier) {
            dispatch_async(dispatch_get_main_queue(), ^{
                complier(outputFileURL);
            });
        }
        if (_savePhotoAlbum) {
            BOOL ios8Later = [[[UIDevice currentDevice] systemVersion] floatValue] >= 8;
            if (ios8Later) {
                [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
                    [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:outputFileURL];
                } completionHandler:^(BOOL success, NSError * _Nullable error) {
                    if (!error && success) {


                        [self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];


                    }
                    else {
                        NSLog(@"儲存相簿失敗! :%@",error);
                    }
                }];

            }
            else {
                [[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
                    if (!error) {

                        [self.takeDelegate takeVideoDelegateAction:_currentRecord.videoAbsolutePath];

                        NSLog(@"儲存相簿成功!");
                    }
                    else {
                        NSLog(@"儲存相簿失敗!");
                    }
                }];

            }

        }
    }];
});

}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

if (!_recoding) return;

@autoreleasepool {
    _currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
    if (_assetWriter.status != AVAssetWriterStatusWriting) {
        [_assetWriter startWriting];
        [_assetWriter startSessionAtSourceTime:_currentSampleTime];
    }
    if (captureOutput == _videoDataOut) {
        if (_assetWriterPixelBufferInput.assetWriterInput.isReadyForMoreMediaData) {
            CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
            BOOL success = [_assetWriterPixelBufferInput appendPixelBuffer:pixelBuffer withPresentationTime:_currentSampleTime];
            if (!success) {
                NSLog(@"Pixel Buffer沒有append成功");
            }
        }
    }
    if (captureOutput == _audioDataOut) {
        [_assetWriterAudioInput appendSampleBuffer:sampleBuffer];
    }
}
}

- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {

}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) {
    [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error){
        dispatch_async(dispatch_get_main_queue(), ^{
            if (error) {
                // erre
            }else
            {
                // success
            }
        });
    }];
}
NSLog(@"recordEnd");
}

 @end
WBVideoConfig.h
#import <Foundation/Foundation.h>


// 視訊錄製 時長
#define wbRecordTime        10.0

// 視訊的長寬按比例
#define wbVideo_w_h (4.0/3)

// 視訊預設 寬的解析度  高 = kzVideoWidthPX / kzVideo_w_h
#define wbVideoWidthPX  [UIScreen mainScreen].bounds.size.height

//控制條高度 小螢幕時
#define wbControViewHeight  120.0

// 視訊儲存路徑
#define wbVideoDicName      @"wbSmailVideo"

extern void wbdispatch_after(float time, dispatch_block_t block);

@interface WBVideoConfig : NSObject
+ (CGRect)viewFrame;

//  視訊view的尺寸
+ (CGSize)videoViewDefaultSize;
//  預設視訊解析度
+ (CGSize)defualtVideoSize;

+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time;


@end


 //視訊物件 Model
@interface WBVideoModel : NSObject

/// 完整視訊 本地路徑
@property (nonatomic, copy) NSString *videoAbsolutePath;
/// 縮圖 路徑
@property (nonatomic, copy) NSString *thumAbsolutePath;
// 錄製時間
//@property (nonatomic, strong) NSDate *recordTime;
@end

//錄製視訊
@interface WBVideoUtil : NSObject


//儲存縮圖
//@param videoUrl 視訊路徑
//@param second   第幾秒的縮圖

+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second;

 //產生新的物件
+ (WBVideoModel *)createNewVideo;


 //有視訊的存在
+ (BOOL)existVideo;

//  刪除視訊
+ (void)deleteVideo:(NSString *)videoPath;

//+ (NSString *)getVideoPath;

@end

######WBVideoConfig.m void wbdispatch_after(float time, dispatch_block_t block) { dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(time * NSEC_PER_SEC)), dispatch_get_main_queue(), block); }

@implementation WBVideoConfig
+ (CGRect)viewFrame {

return CGRectMake(0, 0, ScreenWidth, ScreenHeight/2);
}

+ (CGSize)videoViewDefaultSize {
return CGSizeMake(ScreenWidth, ScreenHeight/2);
}
+ (CGSize)defualtVideoSize {
return CGSizeMake(wbVideoWidthPX, wbVideoWidthPX/wbVideo_w_h);
}
+ (void)showHinInfo:(NSString *)text inView:(UIView *)superView frame:(CGRect)frame timeLong:(NSTimeInterval)time {
__block UILabel *zoomLab = [[UILabel alloc] initWithFrame:frame];
zoomLab.font = [UIFont boldSystemFontOfSize:15.0];
zoomLab.text = text;
zoomLab.textColor = [UIColor whiteColor];
zoomLab.textAlignment = NSTextAlignmentCenter;
[superView addSubview:zoomLab];
[superView bringSubviewToFront:zoomLab];
wbdispatch_after(1.6, ^{
    [zoomLab removeFromSuperview];
});
}
@end


@implementation WBVideoModel

+ (instancetype)modelWithPath:(NSString *)videoPath thumPath:(NSString *)thumPath recordTime:(NSDate *)recordTime {
WBVideoModel *model = [[WBVideoModel alloc] init];
model.videoAbsolutePath = videoPath;
model.thumAbsolutePath = thumPath;
return model;
}
@end


@implementation WBVideoUtil
+ (void)saveThumImageWithVideoURL:(NSURL *)videoUrl second:(int64_t)second {
AVURLAsset *urlSet = [AVURLAsset assetWithURL:videoUrl];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
NSString *videoPath = [videoUrl.absoluteString stringByReplacingOccurrencesOfString:@"file://" withString: @""];

NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString: @"JPG"];

UIImage *shotImage;
//視訊路徑URL
NSURL *fileURL = [NSURL fileURLWithPath:videoPath];

AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:fileURL options:nil];

AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];

gen.appliesPreferredTrackTransform = YES;

CMTime time = CMTimeMake(second, 10);

NSError *error = nil;

CMTime actualTime;

CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];

if (error) {
    NSLog(@"縮圖獲取失敗!:%@",error);
    return;
}

shotImage = [[UIImage alloc] initWithCGImage:image];

NSData *imgData = UIImageJPEGRepresentation(shotImage, 1.0);

BOOL isok = [imgData writeToFile:thumPath atomically: YES];
NSLog(@"縮圖獲取結果:%d",isok);    
CGImageRelease(image);

}

+ (WBVideoModel *)createNewVideo {
    model.videoAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.MOV"];
model.thumAbsolutePath = [NSHomeDirectory() stringByAppendingString:@"/Documents/test.JPG"];
unlink([model.videoAbsolutePath UTF8String]);
unlink([model.thumAbsolutePath UTF8String]);
return model;
}

+ (NSString *)getDocumentSubPath {
NSString *documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentationDirectory, NSUserDomainMask, YES) firstObject];
return [documentPath stringByAppendingPathComponent:wbVideoDicName];
}


+ (void)deleteVideo:(NSString *)videoPath {
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error = nil;
[fileManager removeItemAtPath:videoPath error:&error];
if (error) {
    NSLog(@"刪除視訊失敗:%@",error);
}
NSString *thumPath = [videoPath stringByReplacingOccurrencesOfString:@"MOV" withString:@"JPG"];
NSError *error2 = nil;
[fileManager removeItemAtPath:thumPath error:&error2];
if (error2) {
    NSLog(@"刪除縮圖失敗:%@",error);
}
}
@end
WBVideoSupport.h
#import <UIKit/UIKit.h>
#import "WBVideoConfig.h"
@class WBVideoModel;

//************* 點選錄製的按鈕 ****************
@interface WBRecordBtn : UILabel
- (instancetype)initWithFrame:(CGRect)frame;
@end

//************* 聚焦的方框 ****************
@interface WBFocusView : UIView
- (void)focusing;
@end

//************* 錄視訊下部的控制條 ****************
typedef NS_ENUM(NSUInteger, WBRecordCancelReason) {
WBRecordCancelReasonDefault,
WBRecordCancelReasonTimeShort,
WBRecordCancelReasonUnknown,
};

@class WBControllerBar;
@protocol WBControllerBarDelegate <NSObject>
@optional

- (void)ctrollVideoDidStart:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidEnd:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidCancel:(WBControllerBar *)controllerBar reason:(WBRecordCancelReason)reason;

- (void)ctrollVideoWillCancel:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidRecordSEC:(WBControllerBar *)controllerBar;

- (void)ctrollVideoDidClose:(WBControllerBar *)controllerBar;

- (void)ctrollVideoOpenVideoList:(WBControllerBar *)controllerBar;

@end

//************* 錄視訊下部的控制條 ****************
@interface WBControllerBar : UIView <UIGestureRecognizerDelegate>

@property (nonatomic, assign) id<WBControllerBarDelegate> delegate;
- (void)setupSubViews;
@end
WBVideoSupport.m
#import "WBVideoSupport.h"
#import "WBVideoConfig.h"
#pragma mark - Custom View --

@implementation WBRecordBtn {
UITapGestureRecognizer *_tapGesture;
}

- (instancetype)initWithFrame:(CGRect)frame{
if (self = [super initWithFrame:frame]) {
    [self setupRoundButton];
    self.layer.cornerRadius = 40.0f;
    self.layer.masksToBounds = YES;
    self.userInteractionEnabled = YES;
}
return self;
}

- (void)setupRoundButton {
self.backgroundColor = [UIColor clearColor];

CGFloat width = self.frame.size.width;
UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:self.bounds cornerRadius:width/2];

CAShapeLayer *trackLayer = [CAShapeLayer layer];
trackLayer.frame = self.bounds;
trackLayer.strokeColor = THEMEGREEN.CGColor;
trackLayer.fillColor = [UIColor clearColor].CGColor;
trackLayer.opacity = 1.0;
trackLayer.lineCap = kCALineCapRound;
trackLayer.lineWidth = 2.0;
trackLayer.path = path.CGPath;
[self.layer addSublayer:trackLayer];

    CATextLayer *textLayer = [CATextLayer layer];
    textLayer.string = @"按住拍";
    textLayer.frame = CGRectMake(0, 0, 120, 30);
    textLayer.position = CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2);
    UIFont *font = [UIFont boldSystemFontOfSize:22];
    CFStringRef fontName = (__bridge CFStringRef)font.fontName;
    CGFontRef fontRef = CGFontCreateWithFontName(fontName);
    textLayer.font = fontRef;
    textLayer.fontSize = font.pointSize;
    CGFontRelease(fontRef);
    textLayer.contentsScale = [UIScreen mainScreen].scale;
    textLayer.foregroundColor = THEMEGREEN.CGColor;
    textLayer.alignmentMode = kCAAlignmentCenter;
    textLayer.wrapped = YES;
    [trackLayer addSublayer:textLayer];
    CAGradientLayer *gradLayer = [CAGradientLayer layer];
gradLayer.frame = self.bounds;
[self.layer addSublayer:gradLayer];
gradLayer.mask = trackLayer;
}
@end

@implementation WBFocusView {
CGFloat _width;
CGFloat _height;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
    _width = CGRectGetWidth(frame);
    _height = _width;
}
return self;
}

- (void)focusing {
[UIView animateWithDuration:0.5 animations:^{

    self.transform = CGAffineTransformScale(CGAffineTransformIdentity, 0.8, 0.8);
} completion:^(BOOL finished) {
    self.transform = CGAffineTransformIdentity;
}];
}

- (void)drawRect:(CGRect)rect {
[super drawRect:rect];
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetStrokeColorWithColor(context, THEMEGREEN.CGColor);
CGContextSetLineWidth(context, 1.0);

CGFloat len = 4;

CGContextMoveToPoint(context, 0.0, 0.0);
CGContextAddRect(context, self.bounds);

CGContextMoveToPoint(context, 0, _height/2);
CGContextAddLineToPoint(context, len, _height/2);
CGContextMoveToPoint(context, _width/2, _height);
CGContextAddLineToPoint(context, _width/2, _height - len);
CGContextMoveToPoint(context, _width, _height/2);
CGContextAddLineToPoint(context, _width - len, _height/2);
CGContextMoveToPoint(context, _width/2, 0);
CGContextAddLineToPoint(context, _width/2, len);
    CGContextDrawPath(context, kCGPathStroke);
}
@end

//------  分割線  ------

@implementation WBControllerBar {
WBRecordBtn *_startBtn;
UILongPressGestureRecognizer *_longPress;
UIView *_progressLine;
BOOL _touchIsInside;
BOOL _recording;
 NSTimer *_timer;
NSTimeInterval _surplusTime;
BOOL _videoDidEnd;
}

- (void)setupSubViews {
[self layoutIfNeeded];

_startBtn = [[WBRecordBtn alloc] initWithFrame:CGRectMake(0, 100, 200, 100)];
_startBtn.text = @"按住拍";
_startBtn.textAlignment = NSTextAlignmentCenter;
_startBtn.textColor = [UIColor whiteColor];

CAShapeLayer *solidLine =  [CAShapeLayer layer];
CGMutablePathRef solidPath =  CGPathCreateMutable();
solidLine.lineWidth = 2.0f ;
solidLine.strokeColor = THEMEGREEN.CGColor;
solidLine.fillColor = [UIColor clearColor].CGColor;
CGPathAddEllipseInRect(solidPath, nil, CGRectMake(1,  1, 132, 132));
solidLine.path = solidPath;
CGPathRelease(solidPath);
[_startBtn.layer addSublayer:solidLine];

[self addSubview:_startBtn];
[_startBtn mas_makeConstraints:^(MASConstraintMaker *make) {
    make.centerX.mas_equalTo(self.mas_centerX);
    make.centerY.mas_equalTo(self.mas_centerY);
    make.height.width.mas_equalTo(135);
}];

_longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(longpressAction:)];
_longPress.minimumPressDuration = 0.01;
_longPress.delegate = self;
[self addGestureRecognizer:_longPress];

_progressLine = [[UIView alloc] initWithFrame:CGRectMake(0, 0, ScreenWidth, 4)];
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = YES;
[self addSubview:_progressLine];
_surplusTime = wbRecordTime;
}

- (void)startRecordSet {
_startBtn.alpha = 1.0;

_progressLine.frame = CGRectMake(0, 0, self.bounds.size.width, 2);
_progressLine.backgroundColor = THEMEGREEN;
_progressLine.hidden = NO;

_surplusTime = wbRecordTime;
_recording = YES;

_videoDidEnd = NO;

if (_timer == nil) {
    _timer = [NSTimer timerWithTimeInterval:1.0 target:self selector:@selector(recordTimerAction) userInfo:nil repeats:YES];
    [[NSRunLoop currentRunLoop] addTimer:_timer forMode:NSDefaultRunLoopMode];
}
[_timer fire];

[UIView animateWithDuration:0.4 animations:^{
    _startBtn.alpha = 0.0;
    _startBtn.transform = CGAffineTransformScale(CGAffineTransformIdentity, 2.0, 2.0);
} completion:^(BOOL finished) {
    if (finished) {
        _startBtn.transform = CGAffineTransformIdentity;
    }
}];
}

- (void)endRecordSet {
_progressLine.hidden = YES;
[_timer invalidate];
_timer = nil;
_recording = NO;
_startBtn.alpha = 1;
}

//pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
if (gestureRecognizer == _longPress) {
    if (_surplusTime <= 0) return  NO;

    CGPoint point = [gestureRecognizer locationInView:self];
    CGPoint startBtnCent = _startBtn.center;

    CGFloat dx = point.x - startBtnCent.x;
    CGFloat dy = point.y - startBtnCent.y;

    CGFloat startWidth = _startBtn.bounds.size.width;
    if ((dx * dx) + (dy * dy) < (startWidth * startWidth)) {
        return YES;
    }
    return NO;
}
return YES;
}

//pragma mark - Actions --
- (void)longpressAction:(UILongPressGestureRecognizer *)gesture {
CGPoint point = [gesture locationInView:self];
_touchIsInside = point.y >= 0;
switch (gesture.state) {
    case UIGestureRecognizerStateBegan: {
        [self videoStartAction];
    }
        break;
    case UIGestureRecognizerStateChanged: {
        if (!_touchIsInside) {
            _progressLine.backgroundColor = THEMEGREEN;
            if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoWillCancel:)]) {
                [_delegate ctrollVideoWillCancel:self];
            }
        }
        else {
            _progressLine.backgroundColor = THEMEGREEN;
        }
    }
        break;
    case UIGestureRecognizerStateEnded: {
        [self endRecordSet];
        if (!_touchIsInside || wbRecordTime - _surplusTime <= 1) {
            WBRecordCancelReason reason = WBRecordCancelReasonTimeShort;
            if (!_touchIsInside) {
                reason = WBRecordCancelReasonDefault;
            }
            [self videoCancelAction:reason];
        }
        else {
            [self videoEndAction];
        }
    }
        break;
    case UIGestureRecognizerStateCancelled:
        break;
    default:
        break;

}
}

- (void)videoStartAction {
[self startRecordSet];
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidStart:)]) {
    [_delegate ctrollVideoDidStart:self];
}
}

- (void)videoCancelAction:(WBRecordCancelReason)reason {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidCancel:reason:)]) {
    [_delegate ctrollVideoDidCancel:self reason:reason];
}
}

- (void)videoEndAction {

if (_videoDidEnd) return;

_videoDidEnd = YES;
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidEnd:)]) {
    [_delegate ctrollVideoDidEnd:self];
}
}

- (void)videoListAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoOpenVideoList:)]) {
    [_delegate ctrollVideoOpenVideoList:self];
}
}

- (void)videoCloseAction {
if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidClose:)]) {
    [_delegate ctrollVideoDidClose:self];
}
}

- (void)recordTimerAction {
CGFloat reduceLen = self.bounds.size.width/wbRecordTime;
CGFloat oldLineLen = _progressLine.frame.size.width;
CGRect oldFrame = _progressLine.frame;

[UIView animateWithDuration:1.0 delay: 0.0 options: UIViewAnimationOptionCurveLinear animations:^{
    _progressLine.frame = CGRectMake(oldFrame.origin.x, oldFrame.origin.y, oldLineLen - reduceLen, oldFrame.size.height);
    _progressLine.center = CGPointMake(self.bounds.size.width/2, _progressLine.bounds.size.height/2);
} completion:^(BOOL finished) {
    _surplusTime --;
    if (_recording) {
        if (_delegate && [_delegate respondsToSelector:@selector(ctrollVideoDidRecordSEC:)]) {
            [_delegate ctrollVideoDidRecordSEC:self];
        }
    }
    if (_surplusTime <= 0.0) {
        [self endRecordSet];
        [self videoEndAction];
    }
}];
}

@end

如果上傳視訊使用保利威視的話要注意一下幾點: 1.如果上傳是正常的,但是到保利威視後臺卻有綠屏的情況下,檢視自己拍攝的視訊解析度,一般出現都是解析度太低 2.播放的視訊不清楚,需要自己在保利威視後臺設定播放的格式,一般分流暢–>標清–>高清等 3.如果上傳之後,但是獲取不到返回的mp4地址,就去自己的管理後臺看一下,自己的視訊到底有沒有上傳上去 4.有時候根據視訊url會出現點選播放但是沒有視訊url卻存在的問題,因為保利威視會有個視訊轉碼的過程,此處需要自己處理一下,可根據網路url,擷取視訊第一幀,通過第一幀的image,判斷image是否等於nil,如果等於nil,就做其他處理,例如提示視訊正在轉碼類,不等於nil就直接播放