添加ios相关代码

This commit is contained in:
zher52 2021-09-29 13:46:03 +08:00
parent 2ee0c1a205
commit 3e89107a9f
15 changed files with 1933 additions and 0 deletions

View File

@ -40,5 +40,35 @@
</feature>
</config-file>
<source-file src="src/ios/CapturePlugin.m"/>
<header-file src="src/ios/SGRecord/SGMotionManager.h"/>
<source-file src="src/ios/SGRecord/SGMotionManager.m"/>
<header-file src="src/ios/SGRecord/SGRecordEncoder.h"/>
<source-file src="src/ios/SGRecord/SGRecordEncoder.m"/>
<header-file src="src/ios/SGRecord/SGRecordManager.h"/>
<source-file src="src/ios/SGRecord/SGRecordManager.m"/>
<header-file src="src/ios/SGRecord/SGRecordProgressView.h"/>
<source-file src="src/ios/SGRecord/SGRecordProgressView.m"/>
<header-file src="src/ios/SGRecord/SGRecordSuccessPreview.h"/>
<source-file src="src/ios/SGRecord/SGRecordSuccessPreview.m"/>
<header-file src="src/ios/SGRecord/SGRecordViewController.h"/>
<source-file src="src/ios/SGRecord/SGRecordViewController.m"/>
<header-file src="src/ios/SGRecord/UIButton+Convenience.h"/>
<source-file src="src/ios/SGRecord/UIButton+Convenience.m"/>
<framework src="AVFoundation.framework"/>
<framework src="AVKit.framework"/>
<framework src="CoreMotion.framework"/>
<framework src="MobileCoreServices.framework"/>
<preference name="CAMERA_USAGE_DESCRIPTION" default="This app requires access to your camera to take pictures" />
<config-file target="*-Info.plist" parent="NSCameraUsageDescription">
<string>$CAMERA_USAGE_DESCRIPTION</string>
</config-file>
<preference name="MICROPHONE_USAGE_DESCRIPTION" default="This app requires access to your microphone to take pictures" />
<config-file target="*-Info.plist" parent="NSMicrophoneUsageDescription">
<string>$MICROPHONE_USAGE_DESCRIPTION</string>
</config-file>
<preference name="PHOTO_LIBRARY_ADD_USAGE_DESCRIPTION" default="This app requires access to your photo library to save your pictures" />
<config-file target="*-Info.plist" parent="NSPhotoLibraryAddUsageDescription">
<string>$PHOTO_LIBRARY_ADD_USAGE_DESCRIPTION</string>
</config-file>
</platform>
</plugin>

View File

@ -0,0 +1,44 @@
//
// SGMotionManager.h
// 短视频录制
//
// Created by lihaohao on 2017/5/24.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
@protocol SGMotionManagerDeviceOrientationDelegate<NSObject>
@optional
- (void)motionManagerDeviceOrientation:(UIDeviceOrientation)deviceOrientation;
@end
@interface SGMotionManager : NSObject
@property (nonatomic ,assign) UIDeviceOrientation deviceOrientation;
@property (nonatomic ,assign) AVCaptureVideoOrientation videoOrientation;
@property (nonatomic ,weak) id<SGMotionManagerDeviceOrientationDelegate>delegate;
/**
SGMotionManager实例
@return SGMotionManager实例
*/
+ (instancetype)sharedManager;
/**
*/
- (void)startDeviceMotionUpdates;
/**
*/
- (void)stopDeviceMotionUpdates;
/**
@return
*/
- (AVCaptureVideoOrientation)currentVideoOrientation;
@end

View File

@ -0,0 +1,106 @@
//
// SGMotionManager.m
//
//
// Created by lihaohao on 2017/5/24.
// Copyright © 2017 . All rights reserved.
//
#import "SGMotionManager.h"
#import <CoreMotion/CoreMotion.h>
#define MOTION_UPDATE_INTERVAL 1/15.0
@interface SGMotionManager()
@property (nonatomic ,strong) CMMotionManager *motionManager;
@end
@implementation SGMotionManager
+ (instancetype)sharedManager{
static SGMotionManager *manager = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
manager = [[SGMotionManager alloc]init];
});
return manager;
}
- (instancetype)init{
self = [super init];
if (self) {
[self motionManager];
}
return self;
}
- (CMMotionManager *)motionManager{
if (!_motionManager) {
_motionManager = [[CMMotionManager alloc]init];
_motionManager.deviceMotionUpdateInterval = MOTION_UPDATE_INTERVAL;
}
return _motionManager;
}
//
- (void)startDeviceMotionUpdates{
if (_motionManager.deviceMotionAvailable) {
[_motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) {
[self performSelectorOnMainThread:@selector(handleDeviceMotion:) withObject:motion waitUntilDone:YES];
}];
}
}
//
- (void)stopDeviceMotionUpdates{
[_motionManager stopDeviceMotionUpdates];
}
- (void)handleDeviceMotion:(CMDeviceMotion *)deviceMotion{
double x = deviceMotion.gravity.x;
double y = deviceMotion.gravity.y;
if (fabs(y) >= fabs(x))
{
if (y >= 0){
_deviceOrientation = UIDeviceOrientationPortraitUpsideDown;
_videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
//NSLog(@"UIDeviceOrientationPortraitUpsideDown--AVCaptureVideoOrientationPortraitUpsideDown");
}
else{
_deviceOrientation = UIDeviceOrientationPortrait;
_videoOrientation = AVCaptureVideoOrientationPortrait;
//NSLog(@"UIDeviceOrientationPortrait--AVCaptureVideoOrientationPortrait");
}
}
else{
if (x >= 0){
_deviceOrientation = UIDeviceOrientationLandscapeRight;
_videoOrientation = AVCaptureVideoOrientationLandscapeRight;
//NSLog(@"UIDeviceOrientationLandscapeRight--AVCaptureVideoOrientationLandscapeRight");
}
else{
_deviceOrientation = UIDeviceOrientationLandscapeLeft;
_videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
// NSLog(@"UIDeviceOrientationLandscapeLeft--AVCaptureVideoOrientationLandscapeLeft");
}
}
;
if (_delegate && [_delegate respondsToSelector:@selector(motionManagerDeviceOrientation:)]) {
[_delegate motionManagerDeviceOrientation:_deviceOrientation];
}
}
//
- (AVCaptureVideoOrientation)currentVideoOrientation{
AVCaptureVideoOrientation orientation;
switch ([SGMotionManager sharedManager].deviceOrientation) {
case UIDeviceOrientationPortrait:
orientation = AVCaptureVideoOrientationPortrait;
break;
case UIDeviceOrientationLandscapeRight:
orientation = AVCaptureVideoOrientationLandscapeLeft;
break;
case UIDeviceOrientationPortraitUpsideDown:
orientation = AVCaptureVideoOrientationPortraitUpsideDown;
break;
default:
orientation = AVCaptureVideoOrientationLandscapeRight;
break;
}
return orientation;
}
- (void)dealloc{
NSLog(@"%s",__func__);
}
@end

View File

@ -0,0 +1,63 @@
//
// SGRecordEncoder.h
// 短视频录制
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
@interface SGRecordEncoder : NSObject
@property (nonatomic, readonly) NSString *path;
/**
SGRecordEncoder遍历构造器
@param path
@param cy
@param cx
@param ch
@param rate
@return SGRecordEncoder实例
*/
+ (SGRecordEncoder*)encoderForPath:(NSString*) path Height:(NSInteger) cy width:(NSInteger) cx channels: (int) ch samples:(Float64) rate;
/**
@param path
@param cy
@param cx
@param ch
@param rate
@return SGRecordEncoder实例
*/
- (instancetype)initPath:(NSString*)path Height:(NSInteger)cy width:(NSInteger)cx channels: (int)ch samples:(Float64)rate;
/**
@param handler block
*/
- (void)finishWithCompletionHandler:(void (^)(void))handler;
/**
*
*
* @param sampleBuffer
* @param isVideo
*
* @return
*/
/**
@param sampleBuffer
@param isVideo
@return
*/
- (BOOL)encodeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo;
@end

View File

@ -0,0 +1,168 @@
//
// SGRecordEncoder.m
//
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017 . All rights reserved.
//
#import "SGRecordEncoder.h"
#import "SGMotionManager.h"
@interface SGRecordEncoder()
@property (nonatomic, strong) AVAssetWriter *writer;//
@property (nonatomic, strong) AVAssetWriterInput *videoInput;//
@property (nonatomic, strong) AVAssetWriterInput *audioInput;//
@property (nonatomic, strong) NSString *path;//
@end
@implementation SGRecordEncoder
+ (SGRecordEncoder*)encoderForPath:(NSString*) path Height:(NSInteger) cy width:(NSInteger) cx channels: (int) ch samples:(Float64) rate{
SGRecordEncoder* enc = [SGRecordEncoder alloc];
return [enc initPath:path Height:cy width:cx channels:ch samples:rate];
}
- (instancetype)initPath:(NSString*)path Height:(NSInteger)cy width:(NSInteger)cx channels:(int)ch samples:(Float64) rate{
self = [super init];
if (self) {
self.path = path;
//
[[NSFileManager defaultManager] removeItemAtPath:self.path error:nil];
NSURL* url = [NSURL fileURLWithPath:self.path];
//MP4
_writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeMPEG4 error:nil];
//使
_writer.shouldOptimizeForNetworkUse = YES;
//
[self initVideoInputHeight:cy width:cx];
//ratech
if (rate != 0 && ch != 0) {
//
[self initAudioInputChannels:ch samples:rate];
}
}
return self;
}
//
- (void)initVideoInputHeight:(NSInteger)cy width:(NSInteger)cx {
//
NSInteger numPixels = cx * cy;
//
CGFloat bitsPerPixel = 6.0;
NSInteger bitsPerSecond = numPixels * bitsPerPixel;
//
NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey:@(bitsPerSecond),
AVVideoExpectedSourceFrameRateKey:@(30),
AVVideoMaxKeyFrameIntervalKey:@(30),
AVVideoProfileLevelKey:AVVideoProfileLevelH264BaselineAutoLevel };
NSDictionary* settings = @{AVVideoCodecKey:AVVideoCodecH264,
AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
AVVideoWidthKey:@(cx),
AVVideoHeightKey:@(cy),
AVVideoCompressionPropertiesKey:compressionProperties };
//
_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
_videoInput.transform = [self transformFromCurrentVideoOrientationToOrientation:AVCaptureVideoOrientationPortrait];
//
_videoInput.expectsMediaDataInRealTime = YES;
//
if ([_writer canAddInput:_videoInput]) {
[_writer addInput:_videoInput];
}
}
//
- (void)initAudioInputChannels:(int)ch samples:(Float64)rate {
//AAC,
NSDictionary *settings = @{AVEncoderBitRatePerChannelKey:@(28000),
AVFormatIDKey:@(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey:@(1),
AVSampleRateKey:@(22050) };
//
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings];
//
_audioInput.expectsMediaDataInRealTime = YES;
//
[_writer addInput:_audioInput];
}
//
- (void)finishWithCompletionHandler:(void (^)(void))handler {
[_writer finishWritingWithCompletionHandler: handler];
}
//
- (BOOL)encodeFrame:(CMSampleBufferRef) sampleBuffer isVideo:(BOOL)isVideo {
//
if (CMSampleBufferDataIsReady(sampleBuffer)) {
//,
if (_writer.status == AVAssetWriterStatusUnknown && isVideo) {
//CMTime
CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
//
[_writer startWriting];
[_writer startSessionAtSourceTime:startTime];
}
//
if (_writer.status == AVAssetWriterStatusFailed) {
NSLog(@"writer error %@", _writer.error.localizedDescription);
return NO;
}
//
if (isVideo) {
//
if (_videoInput.readyForMoreMediaData == YES) {
//
[_videoInput appendSampleBuffer:sampleBuffer];
return YES;
}
}else {
//
if (_audioInput.readyForMoreMediaData) {
//
[_audioInput appendSampleBuffer:sampleBuffer];
return YES;
}
}
}
return NO;
}
//
- (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation];
CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:[SGMotionManager sharedManager].videoOrientation];
CGFloat angleOffset;
angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
CGAffineTransform transform = CGAffineTransformMakeRotation(angleOffset);
return transform;
}
- (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGFloat angle = 0.0;
switch (orientation)
{
case AVCaptureVideoOrientationPortrait:
angle = 0.0;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
angle = M_PI;
break;
case AVCaptureVideoOrientationLandscapeRight:
angle = -M_PI_2;
break;
case AVCaptureVideoOrientationLandscapeLeft:
angle = M_PI_2;
break;
default:
break;
}
return angle;
}
@end

View File

@ -0,0 +1,107 @@
//
// SGRecordManager.h
// 短视频录制
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVCaptureVideoPreviewLayer.h>
#import <AVFoundation/AVFoundation.h>
@protocol SGRecordEngineDelegate <NSObject>
- (void)recordProgress:(CGFloat)progress;
@end
@interface SGRecordManager : NSObject
@property (atomic, assign, readonly) BOOL isCapturing;//正在录制
@property (atomic, assign, readonly) BOOL isPaused;//是否暂停
@property (atomic, assign, readonly) CGFloat currentRecordTime;//当前录制时间
@property (atomic, assign) CGFloat maxRecordTime;//录制最长时间
@property (weak, nonatomic) id<SGRecordEngineDelegate>delegate;
@property (atomic, strong) NSString *videoPath;//视频路径
@property (nonatomic, readonly,getter=isRunning) BOOL isRunning; // 捕捉图像
/**
layer
@return AVCaptureVideoPreviewLayer
*/
- (AVCaptureVideoPreviewLayer *)previewLayer;
/**
@param callback
*/
- (void)takePhoto:(void(^)(UIImage *image))callback;
/**
*/
- (void)startUp;
/**
*/
- (void)shutdown;
/**
*/
- (void) startCapture;
/**
*/
- (void) pauseCapture;
/**
@param isSuccess
@param handler
*/
- (void) stopCaptureWithStatus:(BOOL)isSuccess handler:(void (^)(UIImage *movieImage,NSString *path))handler;
/**
*/
- (void) resumeCapture;
/**
*/
- (void)openFlashLight;
/**
*/
- (void)closeFlashLight;
/**
@param isFront YES: , NO:
*/
- (void)changeCameraInputDeviceisFront:(BOOL)isFront;
/**
mov的视频转成mp4
@param mediaURL
@param handler
*/
- (void)changeMovToMp4:(NSURL *)mediaURL dataBlock:(void (^)(UIImage *movieImage,NSString *path))handler;
/**
@param focusMode
@param exposureMode
@param point
*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point;
@end

View File

@ -0,0 +1,658 @@
//
// SGRecordManager.m
//
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017 . All rights reserved.
//
#import "SGRecordManager.h"
#import "SGRecordEncoder.h"
#import <Photos/Photos.h>
#import "SGMotionManager.h"
#define VIDEO_WIDTH 360
#define VIDEO_HEIGHT 640
#define MAX_TIME 10
typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
@interface SGRecordManager ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate, CAAnimationDelegate> {
CMTime _timeOffset;//CMTime
CMTime _lastVideo;//CMTime
CMTime _lastAudio;//CMTime
NSInteger _cx;//
NSInteger _cy;//
int _channels;//
Float64 _samplerate;//
}
@property (strong, nonatomic) SGRecordEncoder *recordEncoder;//
@property (strong, nonatomic) AVCaptureSession *recordSession;//
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;//layer
@property (strong, nonatomic) AVCaptureDeviceInput *backCameraInput;//
@property (strong, nonatomic) AVCaptureDeviceInput *frontCameraInput;//
@property (strong, nonatomic) AVCaptureDeviceInput *audioMicInput;//
@property (copy , nonatomic) dispatch_queue_t captureQueue;//
@property (strong, nonatomic) AVCaptureConnection *audioConnection;//
@property (strong, nonatomic) AVCaptureConnection *videoConnection;//
@property (strong, nonatomic) AVCaptureVideoDataOutput *videoOutput;//
@property (strong, nonatomic) AVCaptureAudioDataOutput *audioOutput;//
@property (strong, nonatomic) AVCaptureStillImageOutput *stillImageOutput;//
@property (atomic, assign) BOOL isCapturing;//
@property (atomic, assign) BOOL isPaused;//
@property (atomic, assign) BOOL discont;//
@property (atomic, assign) CMTime startTime;//
@property (atomic, assign) CGFloat currentRecordTime;//
@end
@implementation SGRecordManager
- (instancetype)init {
self = [super init];
if (self) {
self.maxRecordTime = MAX_TIME;
}
return self;
}
#pragma mark -
//
- (void)takePhoto:(void(^)(UIImage *image))callback{
AVCaptureConnection *connection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection.isVideoOrientationSupported) {
connection.videoOrientation = [[SGMotionManager sharedManager] currentVideoOrientation];
}
id takePictureSuccess = ^(CMSampleBufferRef sampleBuffer,NSError *error){
if (sampleBuffer == NULL) {
return ;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
callback(image);
};
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:takePictureSuccess];
}
//
- (void)startUp {
NSLog(@"启动录制功能");
self.startTime = CMTimeMake(0, 0);
self.isCapturing = NO;
self.isPaused = NO;
self.discont = NO;
[self.recordSession startRunning];
}
//
- (void)shutdown {
_startTime = CMTimeMake(0, 0);
if (_recordSession) {
[_recordSession stopRunning];
}
[_recordEncoder finishWithCompletionHandler:^{
NSLog(@"录制完成");
}];
}
//
- (void) startCapture {
@synchronized(self) {
if (!self.isCapturing) {
NSLog(@"开始录制");
self.recordEncoder = nil;
self.isPaused = NO;
self.discont = NO;
_timeOffset = CMTimeMake(0, 0);
self.isCapturing = YES;
}
}
}
//
- (void) pauseCapture {
@synchronized(self) {
if (self.isCapturing) {
NSLog(@"暂停录制");
self.isPaused = YES;
self.discont = YES;
}
}
}
//
- (void) resumeCapture {
@synchronized(self) {
if (self.isPaused) {
NSLog(@"继续录制");
self.isPaused = NO;
}
}
}
//
- (void) stopCaptureWithStatus:(BOOL)isSuccess handler:(void (^)(UIImage *movieImage,NSString *path))handler {
@synchronized(self) {
if (self.isCapturing) {
NSString* path = self.recordEncoder.path;
// NSURL* url = [NSURL fileURLWithPath:path];
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[self.recordEncoder finishWithCompletionHandler:^{
self.isCapturing = NO;
self.recordEncoder = nil;
self.startTime = CMTimeMake(0, 0);
self.currentRecordTime = 0;
if (!isSuccess) {
NSError *error;
[[NSFileManager defaultManager] removeItemAtPath:path error:&error];
NSLog(@"录制时间小于3秒,自动清理视频路径path:%@;error:%@",path,error);
return ;
}
if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate recordProgress:self.currentRecordTime/self.maxRecordTime];
});
}
//
// [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
// [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:url];
// } completionHandler:^(BOOL success, NSError * _Nullable error) {
// NSLog(@"保存成功");
// }];
[self movieToImageHandler:handler];
}];
});
}
}
}
//
- (void)movieToImageHandler:(void (^)(UIImage *movieImage,NSString *path))handler {
NSURL *url = [NSURL fileURLWithPath:self.videoPath];
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:url options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform = TRUE;
CMTime thumbTime = CMTimeMakeWithSeconds(0, 60);
generator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
AVAssetImageGeneratorCompletionHandler generatorHandler =
^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
if (result == AVAssetImageGeneratorSucceeded) {
UIImage *thumbImg = [UIImage imageWithCGImage:im];
if (handler) {
dispatch_async(dispatch_get_main_queue(), ^{
handler(thumbImg,self.videoPath);
});
}
}
};
[generator generateCGImagesAsynchronouslyForTimes:
[NSArray arrayWithObject:[NSValue valueWithCMTime:thumbTime]] completionHandler:generatorHandler];
}
#pragma mark - setget
//
- (AVCaptureSession *)recordSession {
if (_recordSession == nil) {
_recordSession = [[AVCaptureSession alloc] init];
_recordSession.sessionPreset = AVCaptureSessionPresetHigh;
//
if ([_recordSession canAddInput:self.backCameraInput]) {
[_recordSession addInput:self.backCameraInput];
}
//
if ([_recordSession canAddInput:self.audioMicInput]) {
[_recordSession addInput:self.audioMicInput];
}
//
if ([_recordSession canAddOutput:self.videoOutput]) {
[_recordSession addOutput:self.videoOutput];
_cx = VIDEO_WIDTH;
_cy = VIDEO_HEIGHT;
}
//
if ([_recordSession canAddOutput:self.audioOutput]) {
[_recordSession addOutput:self.audioOutput];
}
//
if ([_recordSession canAddOutput:self.stillImageOutput]) {
[_recordSession addOutput:self.stillImageOutput];
}
//
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
}
return _recordSession;
}
//
- (AVCaptureDeviceInput *)backCameraInput {
if (_backCameraInput == nil) {
NSError *error;
_backCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self backCamara] error:&error];
if (error) {
NSLog(@"获取后置摄像头失败~");
}
}
return _backCameraInput;
}
//
- (AVCaptureDeviceInput *)frontCameraInput {
if (_frontCameraInput == nil) {
NSError *error;
_frontCameraInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self frontCamara] error:&error];
if (error) {
NSLog(@"获取前置摄像头失败~");
}
}
return _frontCameraInput;
}
//
- (AVCaptureDeviceInput *)audioMicInput {
if (_audioMicInput == nil) {
AVCaptureDevice *mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error;
_audioMicInput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:&error];
if (error) {
NSLog(@"获取麦克风失败~");
}
}
return _audioMicInput;
}
//
- (AVCaptureVideoDataOutput *)videoOutput {
if (_videoOutput == nil) {
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_videoOutput setSampleBufferDelegate:self queue:self.captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
_videoOutput.videoSettings = setcapSettings;
}
return _videoOutput;
}
//
- (AVCaptureAudioDataOutput *)audioOutput {
if (_audioOutput == nil) {
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[_audioOutput setSampleBufferDelegate:self queue:self.captureQueue];
}
return _audioOutput;
}
//
- (AVCaptureStillImageOutput *)stillImageOutput{
if (_stillImageOutput == nil) {
_stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
_stillImageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
}
return _stillImageOutput;
}
//
- (AVCaptureConnection *)videoConnection {
_videoConnection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
return _videoConnection;
}
//
- (AVCaptureConnection *)audioConnection {
if (_audioConnection == nil) {
_audioConnection = [self.audioOutput connectionWithMediaType:AVMediaTypeAudio];
}
return _audioConnection;
}
//layer
- (AVCaptureVideoPreviewLayer *)previewLayer {
if (_previewLayer == nil) {
//AVCaptureSession
AVCaptureVideoPreviewLayer *preview = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.recordSession];
//
preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
_previewLayer = preview;
}
return _previewLayer;
}
//
- (dispatch_queue_t)captureQueue {
if (_captureQueue == nil) {
_captureQueue = dispatch_queue_create("cn.qiuyouqun.im.wclrecordengine.capture", DISPATCH_QUEUE_SERIAL);
}
return _captureQueue;
}
- (BOOL)isRunning{
return _recordSession.isRunning;
}
#pragma mark -
- (void)changeCameraAnimation {
CATransition *changeAnimation = [CATransition animation];
changeAnimation.delegate = self;
changeAnimation.duration = 0.45;
changeAnimation.type = @"oglFlip";
changeAnimation.subtype = kCATransitionFromRight;
changeAnimation.timingFunction = UIViewAnimationCurveEaseInOut;
[self.previewLayer addAnimation:changeAnimation forKey:@"changeAnimation"];
}
- (void)animationDidStart:(CAAnimation *)anim {
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
[self.recordSession startRunning];
}
#pragma -mark movMP4
- (void)changeMovToMp4:(NSURL *)mediaURL dataBlock:(void (^)(UIImage *movieImage,NSString *path))handler {
AVAsset *video = [AVAsset assetWithURL:mediaURL];
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:video presetName:AVAssetExportPreset1280x720];
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeMPEG4;
NSString * basePath=[self getVideoCachePath];
self.videoPath = [basePath stringByAppendingPathComponent:[self getUploadFile_type:@"video" fileType:@"mp4"]];
exportSession.outputURL = [NSURL fileURLWithPath:self.videoPath];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
[self movieToImageHandler:handler];
}];
}
#pragma mark -
//
- (AVCaptureDevice *)frontCamara {
return [self cameraWithPosition:AVCaptureDevicePositionFront];
}
//
- (AVCaptureDevice *)backCamara {
return [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//
- (void)changeCameraInputDeviceisFront:(BOOL)isFront {
//
[self.recordSession beginConfiguration];
if (isFront) {
[self.recordSession removeInput:self.backCameraInput];
if ([self.recordSession canAddInput:self.frontCameraInput]) {
// [self changeCameraAnimation];
[self.recordSession addInput:self.frontCameraInput];
}
}else {
[self.recordSession removeInput:self.frontCameraInput];
if ([self.recordSession canAddInput:self.backCameraInput]) {
// [self changeCameraAnimation];
[self.recordSession addInput:self.backCameraInput];
}
}
if (self.videoConnection.isVideoMirroringSupported) {
self.videoConnection.videoMirrored = isFront;
}
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
[self.recordSession commitConfiguration];
}
//
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition) position {
//
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
//position
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
return device;
}
}
return nil;
}
#pragma mark -
#pragma mark -
#pragma mark -
#pragma mark -
- (void)focus:(CGPoint)point{
CGPoint camaraPoint = [self.previewLayer captureDevicePointOfInterestForPoint:point];
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:camaraPoint];
}
#pragma mark -
#pragma mark -openFlashlight
/**
*/
- (void)openFlashlight{
AVCaptureDevice *backCamara = [self backCamara];
if (!backCamara.flashAvailable) {
return;
}
if (backCamara.torchMode == AVCaptureTorchModeOff) {
[backCamara lockForConfiguration:nil];
backCamara.torchMode = AVCaptureTorchModeOn;
backCamara.flashMode = AVCaptureFlashModeOn;
[backCamara unlockForConfiguration];
}
}
#pragma mark -
#pragma mark -closeFlashlight
/**
*/
- (void)closeFlashlight{
AVCaptureDevice *backCamara = [self backCamara];
if (!backCamara.flashAvailable) {
return;
}
if (backCamara.torchMode == AVCaptureTorchModeOn) {
[backCamara lockForConfiguration:nil];
backCamara.torchMode = AVCaptureTorchModeOff;
backCamara.flashMode = AVCaptureFlashModeOff;
[backCamara unlockForConfiguration];
}
}
/**
@param flashMode
*/
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFlashModeSupported:flashMode]) {
[captureDevice setFlashMode:flashMode];
}
}];
}
/**
@param focusMode
*/
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
}];
}
/**
@param exposureMode
*/
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
}];
}
/**
@param focusMode
@param exposureMode
@param point
*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
if ([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:point];
}
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
if ([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:point];
}
}];
}
/**
@param propertyChange callback
*/
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
AVCaptureDevice *captureDevice= [self backCamara];
NSError *error;
/*
In order to set hardware properties on an AVCaptureDevice, such as focusMode and exposureMode, clients must first acquire a lock on the device. Clients should only hold the device lock if they require settable device properties to remain unchanged. Holding the device lock unnecessarily may degrade capture quality in other applications sharing the device.
*/
if ([captureDevice lockForConfiguration:&error]) {
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
}else{
NSLog(@"设置设备属性过程发生错误:error%@",error.localizedDescription);
}
}
//
- (NSString *)getVideoCachePath {
NSString *videoCache = [NSTemporaryDirectory() stringByAppendingPathComponent:@"videos"] ;
BOOL isDir = NO;
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL existed = [fileManager fileExistsAtPath:videoCache isDirectory:&isDir];
if ( !(isDir == YES && existed == YES) ) {
[fileManager createDirectoryAtPath:videoCache withIntermediateDirectories:YES attributes:nil error:nil];
};
return videoCache;
}
- (NSString *)getUploadFile_type:(NSString *)type fileType:(NSString *)fileType {
NSTimeInterval now = [[NSDate date] timeIntervalSince1970];
NSDateFormatter * formatter = [[NSDateFormatter alloc] init];
[formatter setDateFormat:@"HHmmss"];
NSDate * NowDate = [NSDate dateWithTimeIntervalSince1970:now];
;
NSString * timeStr = [formatter stringFromDate:NowDate];
NSString *fileName = [NSString stringWithFormat:@"%@_%@.%@",type,timeStr,fileType];
return fileName;
}
#pragma mark -
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
BOOL isVideo = YES;
@synchronized(self) {
if (!self.isCapturing || self.isPaused) {
return;
}
if (captureOutput != self.videoOutput) {
isVideo = NO;
}
//
if ((self.recordEncoder == nil) && !isVideo) {
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString *videoName = [self getUploadFile_type:@"video" fileType:@"mp4"];
self.videoPath = [[self getVideoCachePath] stringByAppendingPathComponent:videoName];
self.recordEncoder = [SGRecordEncoder encoderForPath:self.videoPath Height:_cy width:_cx channels:_channels samples:_samplerate];
}
//
if (self.discont) {
if (isVideo) {
return;
}
self.discont = NO;
//
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = isVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid) {
if (_timeOffset.flags & kCMTimeFlags_Valid) {
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
if (_timeOffset.value == 0) {
_timeOffset = offset;
}else {
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// sampleBuffer,
CFRetain(sampleBuffer);
if (_timeOffset.value > 0) {
CFRelease(sampleBuffer);
//timeOffset
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
//
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0) {
pts = CMTimeAdd(pts, dur);
}
if (isVideo) {
_lastVideo = pts;
}else {
_lastAudio = pts;
}
}
CMTime dur = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.startTime.value == 0) {
self.startTime = dur;
}
CMTime sub = CMTimeSubtract(dur, self.startTime);
self.currentRecordTime = CMTimeGetSeconds(sub);
if (self.currentRecordTime > self.maxRecordTime) {
if (self.currentRecordTime - self.maxRecordTime < 0.1) {
if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate recordProgress:self.currentRecordTime/self.maxRecordTime];
});
}
}
return;
}
if ([self.delegate respondsToSelector:@selector(recordProgress:)]) {
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate recordProgress:self.currentRecordTime/self.maxRecordTime];
});
}
//
[self.recordEncoder encodeFrame:sampleBuffer isVideo:isVideo];
CFRelease(sampleBuffer);
}
//
- (void)setAudioFormat:(CMFormatDescriptionRef)fmt {
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
//
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sample by:(CMTime)offset {
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++) {
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
@end

View File

@ -0,0 +1,15 @@
//
// SGRecordProgressView.h
// 短视频录制
//
// Created by lihaohao on 2017/5/23.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface SGRecordProgressView : UIButton
@property (nonatomic ,assign) CGFloat progress;
- (void)resetScale;
- (void)setScale;
@end

View File

@ -0,0 +1,74 @@
//
// SGRecordProgressView.m
//
//
// Created by lihaohao on 2017/5/23.
// Copyright © 2017 . All rights reserved.
//
#import "SGRecordProgressView.h"
#define SG_LINE_WIDTH 4
#define SPRING_DAMPING 50
#define SPRING_VELOCITY 29
@interface SGRecordProgressView()
@property (nonatomic ,strong) CALayer *centerlayer;
@end
@implementation SGRecordProgressView
- (instancetype)initWithFrame:(CGRect)frame{
self = [super initWithFrame:frame];
if (self) {
[self setupUI];
}
return self;
}
- (void)setupUI{
self.layer.cornerRadius = self.bounds.size.height / 2;
self.clipsToBounds = YES;
//
self.backgroundColor = [UIColor colorWithRed:255/255.0 green:255/255.0 blue:255/255.0 alpha:0.24];
CALayer *centerlayer = [CALayer layer];
centerlayer.backgroundColor = [UIColor whiteColor].CGColor;
centerlayer.position = self.center;
centerlayer.bounds = CGRectMake(0, 0, 116/2, 116/2);
centerlayer.cornerRadius = 116/4;
centerlayer.masksToBounds = YES;
[self.layer addSublayer:centerlayer];
_centerlayer = centerlayer;
}
- (void)resetScale{
[UIView animateWithDuration:0.25 animations:^{
_centerlayer.transform = CATransform3DIdentity;
self.transform = CGAffineTransformIdentity;
}];
}
- (void)setScale{
[UIView animateWithDuration:0.25 animations:^{
_centerlayer.transform = CATransform3DScale(_centerlayer.transform, 30/58.0, 30/58.0, 1);
self.transform = CGAffineTransformScale(self.transform, 172/156.0, 172/156.0);
}];
}
-(void)setProgress:(CGFloat)progress{
_progress = progress;
[self setNeedsDisplay];
}
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
CGContextRef contexRef = UIGraphicsGetCurrentContext();//
CGPoint ceterPoint = CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2); //
CGFloat radius = self.bounds.size.height / 2 - SG_LINE_WIDTH/2;//
CGFloat startA = -M_PI_2; //
CGFloat endA = -M_PI_2 + M_PI * 2 *_progress; //
// 线()
UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:ceterPoint radius:radius startAngle:startA endAngle:endA clockwise:YES];
CGContextSetLineWidth(contexRef, SG_LINE_WIDTH);// 线
[[UIColor colorWithRed:255/255.0 green:214/255.0 blue:34/255.0 alpha:1] setStroke];// 线
CGContextAddPath(contexRef, path.CGPath);// 线
CGContextStrokePath(contexRef);//
}
@end

View File

@ -0,0 +1,25 @@
//
// SGRecordSuccessPreview.h
// 短视频录制
//
// Created by lihaohao on 2017/5/22.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <MediaPlayer/MediaPlayer.h>
#import <AVFoundation/AVFoundation.h>
#import <AVKit/AVKit.h>
@interface SGRecordSuccessPreview : UIView
@property (nonatomic ,copy) void (^sendBlock) (UIImage *image, NSString *videoPath);
@property (nonatomic ,copy) void (^cancelBlcok) (void);
/**
@param image
@param videoPath
@param orientation
*/
- (void)setImage:(UIImage *)image videoPath:(NSString *)videoPath captureVideoOrientation:(AVCaptureVideoOrientation)orientation;
@end

View File

@ -0,0 +1,115 @@
//
// SGRecordSuccessPreview.m
//
//
// Created by lihaohao on 2017/5/22.
// Copyright © 2017 . All rights reserved.
//
#import "SGRecordSuccessPreview.h"
#import "UIButton+Convenience.h"
@interface SGRecordSuccessPreview(){
float _width;
float _distance;
}
@property (nonatomic ,strong) UIButton *cancelButton;
@property (nonatomic ,strong) UIButton *sendButton;
@property (nonatomic ,strong) UIImage *image;//
@property (nonatomic ,copy) NSString *videoPath; //
@property (nonatomic ,assign) BOOL isPhoto;//
#if __IPHONE_OS_VERSION_MAX_ALLOWED > __IPHONE_8_4
@property (nonatomic ,strong) AVPlayerViewController *avPlayer;
#endif
@property (nonatomic ,assign) AVCaptureVideoOrientation orientation;
@end
@implementation SGRecordSuccessPreview
- (void)setImage:(UIImage *)image videoPath:(NSString *)videoPath captureVideoOrientation:(AVCaptureVideoOrientation)orientation{
_image = image;
_videoPath = videoPath;
_orientation = orientation;
self.backgroundColor = [UIColor blackColor];
if (_image && !videoPath) {
_isPhoto = YES;
}
[self setupUI];
}
- (void)setupUI{
if (_isPhoto) {
UIImageView *imageview = [[UIImageView alloc]initWithImage:_image];
imageview.frame = self.bounds;
if (_orientation == AVCaptureVideoOrientationLandscapeRight || _orientation ==AVCaptureVideoOrientationLandscapeLeft) {
imageview.contentMode = UIViewContentModeScaleAspectFit;
}
[self addSubview:imageview];
} else {
#if __IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_9_0
MPMoviePlayerController *mpPlayer = [[MPMoviePlayerController alloc]initWithContentURL:[NSURL fileURLWithPath:_videoPath]];
mpPlayer.view.frame = self.bounds;
mpPlayer.controlStyle = MPMovieControlStyleNone;
mpPlayer.movieSourceType = MPMovieSourceTypeFile;
mpPlayer.repeatMode = MPMovieRepeatModeOne;
[mpPlayer prepareToPlay];
[mpPlayer play];
[self addSubview:mpPlayer.view];
#else
AVPlayerViewController *avPlayer = [[AVPlayerViewController alloc]init];
avPlayer.view.frame = self.bounds;
avPlayer.showsPlaybackControls = NO;
avPlayer.videoGravity = AVLayerVideoGravityResizeAspect;
avPlayer.player = [AVPlayer playerWithURL:[NSURL fileURLWithPath:_videoPath]];
[avPlayer.player play];
[self addSubview:avPlayer.view];
_avPlayer = avPlayer;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(replay) name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
#endif
}
_width = 148/2;
_distance = 120/2;
//
UIButton *cancelButton = [UIButton image:@"短视频_重拍" target:self action:@selector(cancel)];
cancelButton.bounds = CGRectMake(0, 0, _width, _width);
cancelButton.center = CGPointMake(self.center.x, self.bounds.size.height -_distance - _width/2);
[self addSubview:cancelButton];
_cancelButton = cancelButton;
//
UIButton *sendButton = [UIButton image:@"短视频_完成" target:self action:@selector(send)];
sendButton.bounds = CGRectMake(0, 0, _width, _width);
sendButton.center = CGPointMake(self.center.x, self.bounds.size.height - _distance - _width/2);
[self addSubview:sendButton];
_sendButton = sendButton;
}
-(void)layoutSubviews{
[super layoutSubviews];
NSLog(@"预览图");
[UIView animateWithDuration:0.25 animations:^{
_cancelButton.bounds = CGRectMake(0, 0, _width, _width);
_cancelButton.center = CGPointMake(self.bounds.size.width / 4, self.bounds.size.height -_distance - _width/2);
_sendButton.bounds = CGRectMake(0, 0, _width, _width);
_sendButton.center = CGPointMake(self.bounds.size.width / 4 * 3, self.bounds.size.height - _distance - _width/2);
}];
}
#if __IPHONE_OS_VERSION_MAX_ALLOWED > __IPHONE_8_4
- (void)replay{
if (_avPlayer) {
[_avPlayer.player seekToTime:CMTimeMake(0, 1)];
[_avPlayer.player play];
}
}
#endif
- (void)cancel{
if (self.cancelBlcok) {
self.cancelBlcok();
}
}
- (void)send{
if (self.sendBlock) {
self.sendBlock(_image, _videoPath);
}
}
- (void)dealloc{
[[NSNotificationCenter defaultCenter] removeObserver:self];
NSLog(@"%s",__func__);
}
@end

View File

@ -0,0 +1,13 @@
//
// SGRecordViewController.h
// 短视频录制
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface SGRecordViewController : UIViewController
@end

View File

@ -0,0 +1,459 @@
//
// SGRecordViewController.m
//
//
// Created by lihaohao on 2017/5/19.
// Copyright © 2017 . All rights reserved.
//
#import "SGRecordViewController.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import "SGRecordManager.h"
#import "SGRecordSuccessPreview.h"
#import "SGRecordProgressView.h"
#import "UIButton+Convenience.h"
#import "SGMotionManager.h"
#define WEAKSELF __weak typeof(self) weakSelf = self;
#define STRONGSELF __strong typeof(weakSelf) strongSelf = weakSelf;
#define TIMER_INTERVAL 0.5 //
#define RECORD_TIME 0.5 //
#define VIDEO_MIN_TIME 3 //
@interface SGRecordViewController ()<SGRecordEngineDelegate,UIGestureRecognizerDelegate,SGMotionManagerDeviceOrientationDelegate>
@property (nonatomic ,strong) SGRecordManager *recordManger;
@property (nonatomic ,assign) BOOL allowRecord;//
@property (nonatomic ,strong) NSTimer *timer;//
@property (nonatomic ,assign) NSTimeInterval timeInterval;//
@property (nonatomic ,assign) BOOL isEndRecord;//
@property (nonatomic ,strong) UIImageView *focusView;//
@property (nonatomic ,strong) SGRecordSuccessPreview *preview;//
@property (nonatomic ,strong) SGRecordProgressView *recordButton;//
@property (nonatomic ,strong) UILabel *tipLabel;//
@property (nonatomic ,strong) UIButton *exitButton;// 退
@property (nonatomic ,strong) UIButton *switchButton;//
@property (nonatomic ,assign) UIDeviceOrientation lastDeviceOrientation;//
@property (nonatomic ,strong) UILabel *alartLabel;// (),3s
@end
@implementation SGRecordViewController
#pragma mark -
#pragma mark -Life Cycle
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor grayColor];
self.title = @"拍摄或录像";
self.allowRecord = YES;
[self setupUI];
}
- (void)viewDidAppear:(BOOL)animated{
[super viewDidAppear:animated];
//
[[SGMotionManager sharedManager] startDeviceMotionUpdates];
[SGMotionManager sharedManager].delegate = self;;
#if __IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_9_0
[[UIApplication sharedApplication] setStatusBarHidden:YES];
#endif
if (_recordManger == nil) {
[self.recordManger previewLayer].frame = self.view.bounds;
[self.view.layer insertSublayer:[self.recordManger previewLayer] atIndex:0];
}
[self.recordManger startUp];
}
- (void)viewDidDisappear:(BOOL)animated{
[super viewDidDisappear:animated];
//
[SGMotionManager sharedManager].delegate = nil;
[[SGMotionManager sharedManager] stopDeviceMotionUpdates];
#if __IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_9_0
[[UIApplication sharedApplication] setStatusBarHidden:NO];
#endif
[self removeTimer];
[self.recordManger shutdown];
}
- (void)exitRecordController{
[self dismissViewControllerAnimated:YES completion:nil];
}
#if __IPHONE_OS_VERSION_MAX_ALLOWED > __IPHONE_8_4
- (BOOL)prefersStatusBarHidden{
return YES;
}
#endif
#pragma mark -
#pragma mark -
- (void)takephoto{
__weak __typeof__(self) weakSelf = self;
[self.recordManger takePhoto:^(UIImage *image) {
NSLog(@"拍照结束:%@",image);
//UIImageWriteToSavedPhotosAlbum(image, nil, nil, NULL);
__strong __typeof(self) strongSelf = weakSelf;
dispatch_async(dispatch_get_main_queue(), ^{
[strongSelf.recordManger shutdown];
[strongSelf.preview setImage:image videoPath:nil captureVideoOrientation:[[SGMotionManager sharedManager] currentVideoOrientation]];
});
}];
}
#pragma mark -
#pragma mark -
- (void)startRecord{
if (self.recordManger.isCapturing) {
[self.recordManger resumeCapture];
}else {
[self.recordManger startCapture];
}
}
- (void)stopRecord:(BOOL)isSuccess{
__weak __typeof__(self) weakSelf = self;
_isEndRecord = NO;
[self.recordButton setProgress:0];
if (isSuccess) {
[self hideAllOperationViews];
} else {
[self showExitAndSwitchViews];
}
[self.recordManger stopCaptureWithStatus:isSuccess handler:^(UIImage *movieImage,NSString *filePath) {
NSLog(@"第一帧:image:%@",movieImage);
__strong __typeof(self) strongSelf = weakSelf;
dispatch_async(dispatch_get_main_queue(), ^{
[strongSelf.recordManger shutdown];
[strongSelf.preview setImage:nil videoPath:filePath captureVideoOrientation:[[SGMotionManager sharedManager] currentVideoOrientation]];
});
}];
}
#pragma mark -
#pragma mark - or
//
- (void)sendWithImage:(UIImage *)image videoPath:(NSString *)videoPath{
NSLog(@"发送");
[self exitRecordController];
}
//
- (void)cancel{
NSLog(@"重拍");
if (_preview) {
[_preview removeFromSuperview];
_preview = nil;
}
[self.recordButton resetScale];
[self.recordButton setEnabled:YES];
[self showAllOperationViews];
[self.recordManger startUp];
}
#pragma mark -
#pragma mark - setget
- (SGRecordManager *)recordManger {
if (!_recordManger) {
_recordManger = [[SGRecordManager alloc] init];
_recordManger.delegate = self;
}
return _recordManger;
}
- (NSTimer *)timer{
if (!_timer) {
_timer = [NSTimer scheduledTimerWithTimeInterval:TIMER_INTERVAL target:self selector:@selector(caculateTime) userInfo:nil repeats:YES];
}
return _timer;
}
- (UIImageView *)focusView{
if (!_focusView) {
_focusView = [[UIImageView alloc]initWithImage:[UIImage imageNamed:@"camera_focus_red"]];
_focusView.bounds = CGRectMake(0, 0, 40, 40);
[self.view addSubview:_focusView];
}
return _focusView;
}
- (SGRecordSuccessPreview *)preview{
if (!_preview) {
_preview = [[SGRecordSuccessPreview alloc]initWithFrame:self.view.bounds];
__weak __typeof__(self) weakSelf = self;
[_preview setSendBlock:^(UIImage *image,NSString *videoPath){
__strong __typeof(self) strongSelf = weakSelf;
[strongSelf sendWithImage:image videoPath:videoPath];
}];
[_preview setCancelBlcok:^{
__strong __typeof(self) strongSelf = weakSelf;
[strongSelf cancel];
}];
[self.view addSubview:_preview];
}
return _preview;
}
- (UILabel *)alartLabel{
if (!_alartLabel) {
_alartLabel = [[UILabel alloc]init];
_alartLabel.text = @"拍摄时间太短,不少于3s";
_alartLabel.font = [UIFont systemFontOfSize:15];
_alartLabel.textColor = [UIColor whiteColor];
_alartLabel.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.6];
_alartLabel.textAlignment = NSTextAlignmentCenter;
_alartLabel.layer.cornerRadius = 19;
_alartLabel.clipsToBounds = YES;
CGFloat width = [_alartLabel.text boundingRectWithSize:CGSizeMake(MAXFLOAT, 76/2) options:NSStringDrawingUsesLineFragmentOrigin attributes:@{NSFontAttributeName:[UIFont systemFontOfSize:15]} context:nil].size.width;
_alartLabel.bounds = CGRectMake(0, 0, width + 30, 76/2);
_alartLabel.center = CGPointMake(self.view.center.x, _tipLabel.center.y - _tipLabel.bounds.size.height/2 - 48/2 - _tipLabel.bounds.size.height/2);
[self.view addSubview:_alartLabel];
}
return _alartLabel;
}
#pragma mark -
#pragma mark -Set Up UI
- (void)setupUI{
// 退
UIButton *exitButton = [UIButton image:@"短视频_关闭" target:self action:@selector(exitRecordController)];
exitButton.frame = CGRectMake(5, 10, 44,44);
[self.view addSubview:exitButton];
_exitButton = exitButton;
//
SGRecordProgressView *recordButton = [[SGRecordProgressView alloc]initWithFrame:CGRectMake(0, 0, 156/2, 156/2)];
recordButton.center = CGPointMake(self.view.center.x, self.view.bounds.size.height - 97);
[recordButton addTarget:self action:@selector(toucheUpInsideOrOutSide:) forControlEvents:UIControlEventTouchUpInside | UIControlEventTouchUpOutside];
[recordButton addTarget:self action:@selector(touchDown:) forControlEvents:UIControlEventTouchDown];
[self.view addSubview:recordButton];
_recordButton = recordButton;
// :,
UILabel *tipLabel = [[UILabel alloc]init];
tipLabel.bounds = CGRectMake(0, 0, 200, 20);
tipLabel.center = CGPointMake(self.view.center.x, self.view.bounds.size.height - 160 - 13/2);
tipLabel.text = @"点击拍照,长按拍摄";
tipLabel.font = [UIFont systemFontOfSize:13];
tipLabel.textAlignment = NSTextAlignmentCenter;
tipLabel.textColor = [UIColor whiteColor];
[self.view addSubview:tipLabel];
_tipLabel = tipLabel;
//
UIButton *switchButton = [UIButton image:@"短视频_翻转"target:self action:@selector(switchCamara:)];
switchButton.frame = CGRectMake(self.view.bounds.size.width - 44 - 5 , 10, 44, 44);
[self.view addSubview:switchButton];
_switchButton = switchButton;
//
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapGesture:)];
tapGesture.delegate = self;
[self.view addGestureRecognizer:tapGesture];
}
#pragma mark -
#pragma mark -,,,
- (void)tapGesture:(UITapGestureRecognizer *)tapGesture{
NSLog(@"点击屏幕");
if (!self.recordManger.isRunning) return;
CGPoint point = [tapGesture locationInView:self.view];
[self setFocusCursorWithPoint:point];
CGPoint camaraPoint = [self.recordManger.previewLayer captureDevicePointOfInterestForPoint:point];
[self.recordManger focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:camaraPoint];
}
/**
@param point
*/
-(void)setFocusCursorWithPoint:(CGPoint)point{
self.focusView.center=point;
self.focusView.transform=CGAffineTransformMakeScale(1.5, 1.5);
self.focusView.alpha=1.0;
[UIView animateWithDuration:1.0 animations:^{
self.focusView.transform=CGAffineTransformIdentity;
} completion:^(BOOL finished) {
self.focusView.alpha=0;
}];
}
//
- (void)switchCamara:(UIButton *)button{
button.selected = !button.selected;
[self.recordManger changeCameraInputDeviceisFront:button.selected];
}
#pragma mark -
#pragma mark -
//
- (void)showAllOperationViews{
dispatch_async(dispatch_get_main_queue(), ^{
[self.recordButton setHidden:NO];
[self.exitButton setHidden:NO];
[self.tipLabel setHidden:NO];
[self.switchButton setHidden:NO];
});
}
//
- (void)hideAllOperationViews{
dispatch_async(dispatch_get_main_queue(), ^{
[self.recordButton setHidden:YES];
[self.exitButton setHidden:YES];
[self.tipLabel setHidden:YES];
[self.switchButton setHidden:YES];
});
}
// 退
- (void)showExitAndSwitchViews{
dispatch_async(dispatch_get_main_queue(), ^{
[self.exitButton setHidden:NO];
[self.switchButton setHidden:NO];
});
}
// 退
- (void)hideExitAndSwitchViews{
dispatch_async(dispatch_get_main_queue(), ^{
[self.exitButton setHidden:YES];
[self.switchButton setHidden:YES];
});
}
#pragma mark -
#pragma mark -
//
- (void)caculateTime{
_timeInterval += TIMER_INTERVAL;
NSLog(@"计时器:_timeInterval:%f",_timeInterval);
if (_timeInterval == RECORD_TIME) {
NSLog(@"开始录制视频");
[self.recordButton setScale];
[self startRecord];
} else if (_timeInterval >= RECORD_TIME + VIDEO_MIN_TIME) {
[self removeTimer];
}
}
//
- (void)touchDown:(UIButton *)button{
NSLog(@"按下按钮");
[self hideExitAndSwitchViews];
[self removeTimer];
[self timer];
}
//
- (void)toucheUpInsideOrOutSide:(UIButton *)button{
NSLog(@"抬起按钮:__timeInterval==:%f",_timeInterval);
[self removeTimer];
if (_timeInterval >= RECORD_TIME && _timeInterval < RECORD_TIME + VIDEO_MIN_TIME) {
//
NSLog(@"录制时间太短");
[self stopRecord:NO];
[self alart];//
[self.recordButton resetScale];
} else if (_timeInterval < RECORD_TIME) {
//
NSLog(@"拍照");
[self.recordButton setEnabled:NO];
[self hideAllOperationViews];
[self takephoto];
} else {
//
NSLog(@"结束录制");
if (!_isEndRecord) {
[self.recordButton setEnabled:NO];
[self stopRecord:YES];
}
}
_timeInterval = 0;
}
//
- (void)removeTimer{
if (_timer) {
[_timer invalidate];
_timer = nil;
}
}
#pragma mark -
#pragma mark -,
- (void)authorizationStatus{
AVAuthorizationStatus videoStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) {
NSLog(@"允许访问相机权限");
} else {
NSLog(@"不允许相机访问");
}
}];
AVAuthorizationStatus audioStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:^(BOOL granted) {
if (granted) {
NSLog(@"允许麦克风权限");
} else {
NSLog(@"不允麦克风访问");
}
}];
}
#pragma mark -
#pragma mark -SGRecordEngineDelegate()
- (void)recordProgress:(CGFloat)progress{
NSLog(@"progress:%f",progress);
if (progress >= 0) {
[_recordButton setProgress:progress];
}
if ((int)progress == 1) {
_isEndRecord = YES;
[self stopRecord:YES];
}
}
#pragma mark -
#pragma mark -UIGestureRecognizerDelegate
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldReceiveTouch:(UITouch *)touch{
CGPoint point = [touch locationInView:self.view];
if (point.y >= self.view.bounds.size.height - 190) {
return NO;
}
return YES;
}
#pragma mark -
#pragma mark -
-(BOOL)shouldAutorotate{
return NO;
}
#pragma mark -
#pragma mark -SGMotionManagerDeviceOrientationDelegate -->
-(void)motionManagerDeviceOrientation:(UIDeviceOrientation)deviceOrientation{
if (_lastDeviceOrientation == deviceOrientation) return;
CGFloat angle = 0;
switch (deviceOrientation) {
case UIDeviceOrientationPortrait:
angle = 0;
break;
case UIDeviceOrientationPortraitUpsideDown:
angle = M_PI;
break;
case UIDeviceOrientationLandscapeLeft:
angle = M_PI_2;
break;
case UIDeviceOrientationLandscapeRight:
angle = -M_PI_2;
break;
default:
break;
}
[UIView animateWithDuration:0.25 animations:^{
_exitButton.transform = CGAffineTransformRotate(CGAffineTransformIdentity, angle);
_switchButton.transform = CGAffineTransformRotate(CGAffineTransformIdentity, angle);
}];
_lastDeviceOrientation = deviceOrientation;
NSLog(@"deviceOrientation:%ld",(long)deviceOrientation);
}
- (void)alart{
[self.view bringSubviewToFront:self.alartLabel];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
if (_alartLabel) {
[UIView animateWithDuration:0.25 animations:^{
_alartLabel.alpha = 0;
} completion:^(BOOL finished) {
[_alartLabel removeFromSuperview];
_alartLabel = nil;
}];
}
});
}
#pragma mark -
#pragma mark -dealloc
- (void)dealloc{
NSLog(@"%s",__func__);
}
@end

View File

@ -0,0 +1,32 @@
//
// UIButton+Convenience.h
// 短视频录制
//
// Created by lihaohao on 2017/5/22.
// Copyright © 2017年 低调的魅力. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UIButton (Convenience)
/**
便
@param imageName
@param target
@param action
@return UIButton
*/
+ (UIButton *)image:(NSString *)imageName target:(id)target action:(SEL)action;
/**
便
@param title
@param target
@param action
@return UIButton
*/
+ (UIButton *)title:(NSString *)title target:(id)target action:(SEL)action;
@end

View File

@ -0,0 +1,24 @@
//
// UIButton+Convenience.m
//
//
// Created by lihaohao on 2017/5/22.
// Copyright © 2017 . All rights reserved.
//
#import "UIButton+Convenience.h"
@implementation UIButton (Convenience)
+ (UIButton *)image:(NSString *)imageName target:(id)target action:(SEL)action{
UIButton *button = [self buttonWithType:UIButtonTypeCustom];
[button setImage:[UIImage imageNamed:imageName] forState:UIControlStateNormal];
[button addTarget:target action:action forControlEvents:UIControlEventTouchUpInside];
return button;
}
+ (UIButton *)title:(NSString *)title target:(id)target action:(SEL)action{
UIButton *button = [self buttonWithType:UIButtonTypeCustom];
[button setTitle:title forState:UIControlStateNormal];
[button addTarget:target action:action forControlEvents:UIControlEventTouchUpInside];
return button;
}
@end