// // XHCaptureHelper.m // MessageDisplayExample // // Created by 曾 宪华 on 14-5-25. // Copyright (c) 2014年 嗨,我是曾宪华(@xhzengAIB),曾加入YY Inc.担任高级移动开发工程师,拍立秀App联合创始人,热衷于简洁、而富有理性的事物 QQ:543413507 主页:http://zengxianhua.com All rights reserved. // #import "XHCaptureHelper.h" @interface XHCaptureHelper () @property (nonatomic, copy) DidOutputSampleBufferBlock didOutputSampleBuffer; @property (nonatomic, strong) dispatch_queue_t captureSessionQueue; @property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; @property (nonatomic, strong) AVCaptureSession *captureSession; @property (nonatomic, strong) AVCaptureDeviceInput *captureInput; @property (nonatomic, strong) AVCaptureVideoDataOutput *captureOutput; @end @implementation XHCaptureHelper - (void)setDidOutputSampleBufferHandle:(DidOutputSampleBufferBlock)didOutputSampleBuffer { self.didOutputSampleBuffer = didOutputSampleBuffer; } - (void)showCaptureOnView:(UIView *)preview { dispatch_async(self.captureSessionQueue, ^{ [self.captureSession startRunning]; dispatch_async(dispatch_get_main_queue(), ^{ self.captureVideoPreviewLayer.frame = preview.bounds; [preview.layer addSublayer:self.captureVideoPreviewLayer]; }); }); } #pragma mark - Propertys - (AVCaptureSession *)captureSession { if (!_captureSession) { _captureSession = [[AVCaptureSession alloc] init]; AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; _captureInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:nil]; if ([_captureSession canAddInput:self.captureInput]) [self.captureSession addInput:self.captureInput]; _captureOutput = [[AVCaptureVideoDataOutput alloc] init]; _captureOutput.alwaysDiscardsLateVideoFrames = YES; [_captureOutput setSampleBufferDelegate:self queue:self.captureSessionQueue]; NSString *key = (NSString *)kCVPixelBufferPixelFormatTypeKey; NSNumber *value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [_captureOutput setVideoSettings:videoSettings]; [self.captureSession addOutput:self.captureOutput]; NSString* preset = 0; if (NSClassFromString(@"NSOrderedSet") && // Proxy for "is this iOS 5" ... [UIScreen mainScreen].scale > 1 && [inputDevice supportsAVCaptureSessionPreset:AVCaptureSessionPresetiFrame960x540]) { preset = AVCaptureSessionPresetiFrame960x540; } if (!preset) { preset = AVCaptureSessionPresetMedium; } self.captureSession.sessionPreset = preset; } return _captureSession; } - (AVCaptureVideoPreviewLayer *)captureVideoPreviewLayer { if (!_captureVideoPreviewLayer) { _captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; } return _captureVideoPreviewLayer; } #pragma mark - Life Cycle - (id)init { self = [super init]; if (self) { _captureSessionQueue = dispatch_queue_create("com.HUAJIE.captureSessionQueue", 0); } return self; } - (void)dealloc { _captureSessionQueue = nil; _captureVideoPreviewLayer = nil; if (![_captureSession canAddOutput:self.captureOutput]) [_captureSession removeOutput:self.captureOutput]; self.captureOutput = nil; [_captureSession stopRunning]; _captureSession = nil; } #pragma mark - AVCaptureVideoDataOutputSampleBuffer Delegate - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { if (self.didOutputSampleBuffer) { self.didOutputSampleBuffer(sampleBuffer); } } @end