iOS開發(fā)-自定義相機(jī)實(shí)例(仿微信)
網(wǎng)上有很多自定義相機(jī)的例子,這里只是我臨時(shí)寫的一個(gè)小demo,僅供參考:
用到了下面幾個(gè)庫:
#import <AVFoundation/AVFoundation.h> #import <AssetsLibrary/AssetsLibrary.h>
在使用的時(shí)候需要在Info.plist中把相關(guān)權(quán)限寫進(jìn)去:
Privacy - Microphone Usage Description Privacy - Photo Library Usage Description Privacy - Camera Usage Description
我在寫這個(gè)demo時(shí),是按照微信的樣式寫的,同樣是點(diǎn)擊拍照、長按錄制視頻,視頻錄制完直接進(jìn)行播放,這里封裝了一個(gè)簡易的播放器:
m文件
#import "HAVPlayer.h"
#import <AVFoundation/AVFoundation.h>
@interface HAVPlayer ()
@property (nonatomic,strong) AVPlayer *player;//播放器對象
@end
@implementation HAVPlayer
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
}
*/
- (instancetype)initWithFrame:(CGRect)frame withShowInView:(UIView *)bgView url:(NSURL *)url {
if (self = [self initWithFrame:frame]) {
//創(chuàng)建播放器層
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
playerLayer.frame = self.bounds;
[self.layer addSublayer:playerLayer];
if (url) {
self.videoUrl = url;
}
[bgView addSubview:self];
}
return self;
}
- (void)dealloc {
[self removeAvPlayerNtf];
[self stopPlayer];
self.player = nil;
}
- (AVPlayer *)player {
if (!_player) {
_player = [AVPlayer playerWithPlayerItem:[self getAVPlayerItem]];
[self addAVPlayerNtf:_player.currentItem];
}
return _player;
}
- (AVPlayerItem *)getAVPlayerItem {
AVPlayerItem *playerItem=[AVPlayerItem playerItemWithURL:self.videoUrl];
return playerItem;
}
- (void)setVideoUrl:(NSURL *)videoUrl {
_videoUrl = videoUrl;
[self removeAvPlayerNtf];
[self nextPlayer];
}
- (void)nextPlayer {
[self.player seekToTime:CMTimeMakeWithSeconds(0, _player.currentItem.duration.timescale)];
[self.player replaceCurrentItemWithPlayerItem:[self getAVPlayerItem]];
[self addAVPlayerNtf:self.player.currentItem];
if (self.player.rate == 0) {
[self.player play];
}
}
- (void) addAVPlayerNtf:(AVPlayerItem *)playerItem {
//監(jiān)控狀態(tài)屬性
[playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
//監(jiān)控網(wǎng)絡(luò)加載情況屬性
[playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:self.player.currentItem];
}
- (void)removeAvPlayerNtf {
AVPlayerItem *playerItem = self.player.currentItem;
[playerItem removeObserver:self forKeyPath:@"status"];
[playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"];
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
- (void)stopPlayer {
if (self.player.rate == 1) {
[self.player pause];//如果在播放狀態(tài)就停止
}
}
/**
* 通過KVO監(jiān)控播放器狀態(tài)
*
* @param keyPath 監(jiān)控屬性
* @param object 監(jiān)視器
* @param change 狀態(tài)改變
* @param context 上下文
*/
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{
AVPlayerItem *playerItem = object;
if ([keyPath isEqualToString:@"status"]) {
AVPlayerStatus status= [[change objectForKey:@"new"] intValue];
if(status==AVPlayerStatusReadyToPlay){
NSLog(@"正在播放...,視頻總長度:%.2f",CMTimeGetSeconds(playerItem.duration));
}
}else if([keyPath isEqualToString:@"loadedTimeRanges"]){
NSArray *array=playerItem.loadedTimeRanges;
CMTimeRange timeRange = [array.firstObject CMTimeRangeValue];//本次緩沖時(shí)間范圍
float startSeconds = CMTimeGetSeconds(timeRange.start);
float durationSeconds = CMTimeGetSeconds(timeRange.duration);
NSTimeInterval totalBuffer = startSeconds + durationSeconds;//緩沖總長度
NSLog(@"共緩沖:%.2f",totalBuffer);
}
}
- (void)playbackFinished:(NSNotification *)ntf {
Plog(@"視頻播放完成");
[self.player seekToTime:CMTimeMake(0, 1)];
[self.player play];
}
@end
另外微信下面的按鈕長按會出現(xiàn)圓弧時(shí)間條:
m文件
#import "HProgressView.h"
@interface HProgressView ()
/**
* 進(jìn)度值0-1.0之間
*/
@property (nonatomic,assign)CGFloat progressValue;
@property (nonatomic, assign) CGFloat currentTime;
@end
@implementation HProgressView
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
CGContextRef ctx = UIGraphicsGetCurrentContext();//獲取上下文
Plog(@"width = %f",self.frame.size.width);
CGPoint center = CGPointMake(self.frame.size.width/2.0, self.frame.size.width/2.0); //設(shè)置圓心位置
CGFloat radius = self.frame.size.width/2.0-5; //設(shè)置半徑
CGFloat startA = - M_PI_2; //圓起點(diǎn)位置
CGFloat endA = -M_PI_2 + M_PI * 2 * _progressValue; //圓終點(diǎn)位置
UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:center radius:radius startAngle:startA endAngle:endA clockwise:YES];
CGContextSetLineWidth(ctx, 10); //設(shè)置線條寬度
[[UIColor whiteColor] setStroke]; //設(shè)置描邊顏色
CGContextAddPath(ctx, path.CGPath); //把路徑添加到上下文
CGContextStrokePath(ctx); //渲染
}
- (void)setTimeMax:(NSInteger)timeMax {
_timeMax = timeMax;
self.currentTime = 0;
self.progressValue = 0;
[self setNeedsDisplay];
self.hidden = NO;
[self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
}
- (void)clearProgress {
_currentTime = _timeMax;
self.hidden = YES;
}
- (void)startProgress {
_currentTime += 0.1;
if (_timeMax > _currentTime) {
_progressValue = _currentTime/_timeMax;
Plog(@"progress = %f",_progressValue);
[self setNeedsDisplay];
[self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
}
if (_timeMax <= _currentTime) {
[self clearProgress];
}
}
@end
接下來就是相機(jī)的控制器了,由于是臨時(shí)寫的,所以用的xib,大家不要直接使用,直接上m文件代碼吧:
#import "HVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "HAVPlayer.h"
#import "HProgressView.h"
#import <Foundation/Foundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
@interface HVideoViewController ()<AVCaptureFileOutputRecordingDelegate>
//輕觸拍照,按住攝像
@property (strong, nonatomic) IBOutlet UILabel *labelTipTitle;
//視頻輸出流
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
//圖片輸出流
//@property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//照片輸出流
//負(fù)責(zé)從AVCaptureDevice獲得輸入數(shù)據(jù)
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;
//后臺任務(wù)標(biāo)識
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;
@property (assign,nonatomic) UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier;
@property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //聚焦光標(biāo)
//負(fù)責(zé)輸入和輸出設(shè)備之間的數(shù)據(jù)傳遞
@property(nonatomic)AVCaptureSession *session;
//圖像預(yù)覽層,實(shí)時(shí)顯示捕獲的圖像
@property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer;
@property (strong, nonatomic) IBOutlet UIButton *btnBack;
//重新錄制
@property (strong, nonatomic) IBOutlet UIButton *btnAfresh;
//確定
@property (strong, nonatomic) IBOutlet UIButton *btnEnsure;
//攝像頭切換
@property (strong, nonatomic) IBOutlet UIButton *btnCamera;
@property (strong, nonatomic) IBOutlet UIImageView *bgView;
//記錄錄制的時(shí)間 默認(rèn)最大60秒
@property (assign, nonatomic) NSInteger seconds;
//記錄需要保存視頻的路徑
@property (strong, nonatomic) NSURL *saveVideoUrl;
//是否在對焦
@property (assign, nonatomic) BOOL isFocus;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *afreshCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *ensureCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *backCenterX;
//視頻播放
@property (strong, nonatomic) HAVPlayer *player;
@property (strong, nonatomic) IBOutlet HProgressView *progressView;
//是否是攝像 YES 代表是錄制 NO 表示拍照
@property (assign, nonatomic) BOOL isVideo;
@property (strong, nonatomic) UIImage *takeImage;
@property (strong, nonatomic) UIImageView *takeImageView;
@property (strong, nonatomic) IBOutlet UIImageView *imgRecord;
@end
//時(shí)間大于這個(gè)就是視頻,否則為拍照
#define TimeMax 1
@implementation HVideoViewController
-(void)dealloc{
[self removeNotification];
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
UIImage *image = [UIImage imageNamed:@"sc_btn_take.png"];
self.backCenterX.constant = -(SCREEN_WIDTH/2/2)-image.size.width/2/2;
self.progressView.layer.cornerRadius = self.progressView.frame.size.width/2;
if (self.HSeconds == 0) {
self.HSeconds = 60;
}
[self performSelector:@selector(hiddenTipsLabel) withObject:nil afterDelay:4];
}
- (void)hiddenTipsLabel {
self.labelTipTitle.hidden = YES;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
[[UIApplication sharedApplication] setStatusBarHidden:YES];
[self customCamera];
[self.session startRunning];
}
-(void)viewDidAppear:(BOOL)animated{
[super viewDidAppear:animated];
}
-(void)viewDidDisappear:(BOOL)animated{
[super viewDidDisappear:animated];
[self.session stopRunning];
}
- (void)viewWillDisappear:(BOOL)animated {
[super viewWillDisappear:animated];
[[UIApplication sharedApplication] setStatusBarHidden:NO];
}
- (void)customCamera {
//初始化會話,用來結(jié)合輸入輸出
self.session = [[AVCaptureSession alloc] init];
//設(shè)置分辨率 (設(shè)備支持的最高分辨率)
if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
self.session.sessionPreset = AVCaptureSessionPresetHigh;
}
//取得后置攝像頭
AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
//添加一個(gè)音頻輸入設(shè)備
AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
//初始化輸入設(shè)備
NSError *error = nil;
self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
if (error) {
Plog(@"取得設(shè)備輸入對象時(shí)出錯(cuò),錯(cuò)誤原因:%@",error.localizedDescription);
return;
}
//添加音頻
error = nil;
AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
if (error) {
NSLog(@"取得設(shè)備輸入對象時(shí)出錯(cuò),錯(cuò)誤原因:%@",error.localizedDescription);
return;
}
//輸出對象
self.captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];//視頻輸出
//將輸入設(shè)備添加到會話
if ([self.session canAddInput:self.captureDeviceInput]) {
[self.session addInput:self.captureDeviceInput];
[self.session addInput:audioCaptureDeviceInput];
//設(shè)置視頻防抖
AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported]) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
}
}
//將輸出設(shè)備添加到會話 (剛開始 是照片為輸出對象)
if ([self.session canAddOutput:self.captureMovieFileOutput]) {
[self.session addOutput:self.captureMovieFileOutput];
}
//創(chuàng)建視頻預(yù)覽層,用于實(shí)時(shí)展示攝像頭狀態(tài)
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
self.previewLayer.frame = self.view.bounds;//CGRectMake(0, 0, self.view.width, self.view.height);
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式
[self.bgView.layer addSublayer:self.previewLayer];
[self addNotificationToCaptureDevice:captureDevice];
[self addGenstureRecognizer];
}
- (IBAction)onCancelAction:(UIButton *)sender {
[self dismissViewControllerAnimated:YES completion:^{
[Utility hideProgressDialog];
}];
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
if ([[touches anyObject] view] == self.imgRecord) {
Plog(@"開始錄制");
//根據(jù)設(shè)備輸出獲得連接
AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];
//根據(jù)連接取得設(shè)備輸出的數(shù)據(jù)
if (![self.captureMovieFileOutput isRecording]) {
//如果支持多任務(wù)則開始多任務(wù)
if ([[UIDevice currentDevice] isMultitaskingSupported]) {
self.backgroundTaskIdentifier = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
}
if (self.saveVideoUrl) {
[[NSFileManager defaultManager] removeItemAtURL:self.saveVideoUrl error:nil];
}
//預(yù)覽圖層和視頻方向保持一致
connection.videoOrientation = [self.previewLayer connection].videoOrientation;
NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];
NSLog(@"save path is :%@",outputFielPath);
NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
NSLog(@"fileUrl:%@",fileUrl);
[self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
} else {
[self.captureMovieFileOutput stopRecording];
}
}
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
if ([[touches anyObject] view] == self.imgRecord) {
Plog(@"結(jié)束觸摸");
if (!self.isVideo) {
[self performSelector:@selector(endRecord) withObject:nil afterDelay:0.3];
} else {
[self endRecord];
}
}
}
- (void)endRecord {
[self.captureMovieFileOutput stopRecording];//停止錄制
}
- (IBAction)onAfreshAction:(UIButton *)sender {
Plog(@"重新錄制");
[self recoverLayout];
}
- (IBAction)onEnsureAction:(UIButton *)sender {
Plog(@"確定 這里進(jìn)行保存或者發(fā)送出去");
if (self.saveVideoUrl) {
WS(weakSelf)
[Utility showProgressDialogText:@"視頻處理中..."];
ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
[assetsLibrary writeVideoAtPathToSavedPhotosAlbum:self.saveVideoUrl completionBlock:^(NSURL *assetURL, NSError *error) {
Plog(@"outputUrl:%@",weakSelf.saveVideoUrl);
[[NSFileManager defaultManager] removeItemAtURL:weakSelf.saveVideoUrl error:nil];
if (weakSelf.lastBackgroundTaskIdentifier!= UIBackgroundTaskInvalid) {
[[UIApplication sharedApplication] endBackgroundTask:weakSelf.lastBackgroundTaskIdentifier];
}
if (error) {
Plog(@"保存視頻到相簿過程中發(fā)生錯(cuò)誤,錯(cuò)誤信息:%@",error.localizedDescription);
[Utility showAllTextDialog:KAppDelegate.window Text:@"保存視頻到相冊發(fā)生錯(cuò)誤"];
} else {
if (weakSelf.takeBlock) {
weakSelf.takeBlock(assetURL);
}
Plog(@"成功保存視頻到相簿.");
[weakSelf onCancelAction:nil];
}
}];
} else {
//照片
UIImageWriteToSavedPhotosAlbum(self.takeImage, self, nil, nil);
if (self.takeBlock) {
self.takeBlock(self.takeImage);
}
[self onCancelAction:nil];
}
}
//前后攝像頭的切換
- (IBAction)onCameraAction:(UIButton *)sender {
Plog(@"切換攝像頭");
AVCaptureDevice *currentDevice=[self.captureDeviceInput device];
AVCaptureDevicePosition currentPosition=[currentDevice position];
[self removeNotificationFromCaptureDevice:currentDevice];
AVCaptureDevice *toChangeDevice;
AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;//前
if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
toChangePosition = AVCaptureDevicePositionBack;//后
}
toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];
[self addNotificationToCaptureDevice:toChangeDevice];
//獲得要調(diào)整的設(shè)備輸入對象
AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];
//改變會話的配置前一定要先開啟配置,配置完成后提交配置改變
[self.session beginConfiguration];
//移除原有輸入對象
[self.session removeInput:self.captureDeviceInput];
//添加新的輸入對象
if ([self.session canAddInput:toChangeDeviceInput]) {
[self.session addInput:toChangeDeviceInput];
self.captureDeviceInput = toChangeDeviceInput;
}
//提交會話配置
[self.session commitConfiguration];
}
- (void)onStartTranscribe:(NSURL *)fileURL {
if ([self.captureMovieFileOutput isRecording]) {
-- self.seconds;
if (self.seconds > 0) {
if (self.HSeconds - self.seconds >= TimeMax && !self.isVideo) {
self.isVideo = YES;//長按時(shí)間超過TimeMax 表示是視頻錄制
self.progressView.timeMax = self.seconds;
}
[self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
} else {
if ([self.captureMovieFileOutput isRecording]) {
[self.captureMovieFileOutput stopRecording];
}
}
}
}
#pragma mark - 視頻輸出代理
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
Plog(@"開始錄制...");
self.seconds = self.HSeconds;
[self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
Plog(@"視頻錄制完成.");
[self changeLayout];
if (self.isVideo) {
self.saveVideoUrl = outputFileURL;
if (!self.player) {
self.player = [[HAVPlayer alloc] initWithFrame:self.bgView.bounds withShowInView:self.bgView url:outputFileURL];
} else {
if (outputFileURL) {
self.player.videoUrl = outputFileURL;
self.player.hidden = NO;
}
}
} else {
//照片
self.saveVideoUrl = nil;
[self videoHandlePhoto:outputFileURL];
}
}
- (void)videoHandlePhoto:(NSURL *)url {
AVURLAsset *urlSet = [AVURLAsset assetWithURL:url];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
imageGenerator.appliesPreferredTrackTransform = YES; // 截圖的時(shí)候調(diào)整到正確的方向
NSError *error = nil;
CMTime time = CMTimeMake(0,30);//縮略圖創(chuàng)建時(shí)間 CMTime是表示電影時(shí)間信息的結(jié)構(gòu)體,第一個(gè)參數(shù)表示是視頻第幾秒,第二個(gè)參數(shù)表示每秒幀數(shù).(如果要獲取某一秒的第幾幀可以使用CMTimeMake方法)
CMTime actucalTime; //縮略圖實(shí)際生成的時(shí)間
CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];
if (error) {
Plog(@"截取視頻圖片失敗:%@",error.localizedDescription);
}
CMTimeShow(actucalTime);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
if (image) {
Plog(@"視頻截取成功");
} else {
Plog(@"視頻截取失敗");
}
self.takeImage = image;//[UIImage imageWithCGImage:cgImage];
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
if (!self.takeImageView) {
self.takeImageView = [[UIImageView alloc] initWithFrame:self.view.frame];
[self.bgView addSubview:self.takeImageView];
}
self.takeImageView.hidden = NO;
self.takeImageView.image = self.takeImage;
}
#pragma mark - 通知
//注冊通知
- (void)setupObservers
{
NSNotificationCenter *notification = [NSNotificationCenter defaultCenter];
[notification addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationWillResignActiveNotification object:[UIApplication sharedApplication]];
}
//進(jìn)入后臺就退出視頻錄制
- (void)applicationDidEnterBackground:(NSNotification *)notification {
[self onCancelAction:nil];
}
/**
* 給輸入設(shè)備添加通知
*/
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
//注意添加區(qū)域改變捕獲通知必須首先設(shè)置設(shè)備允許捕獲
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
captureDevice.subjectAreaChangeMonitoringEnabled=YES;
}];
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
//捕獲區(qū)域發(fā)生改變
[notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/**
* 移除所有通知
*/
-(void)removeNotification{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
[notificationCenter removeObserver:self];
}
-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{
NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
//會話出錯(cuò)
[notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
}
/**
* 設(shè)備連接成功
*
* @param notification 通知對象
*/
-(void)deviceConnected:(NSNotification *)notification{
NSLog(@"設(shè)備已連接...");
}
/**
* 設(shè)備連接斷開
*
* @param notification 通知對象
*/
-(void)deviceDisconnected:(NSNotification *)notification{
NSLog(@"設(shè)備已斷開.");
}
/**
* 捕獲區(qū)域改變
*
* @param notification 通知對象
*/
-(void)areaChange:(NSNotification *)notification{
NSLog(@"捕獲區(qū)域改變...");
}
/**
* 會話出錯(cuò)
*
* @param notification 通知對象
*/
-(void)sessionRuntimeError:(NSNotification *)notification{
NSLog(@"會話發(fā)生錯(cuò)誤.");
}
/**
* 取得指定位置的攝像頭
*
* @param position 攝像頭位置
*
* @return 攝像頭設(shè)備
*/
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *camera in cameras) {
if ([camera position] == position) {
return camera;
}
}
return nil;
}
/**
* 改變設(shè)備屬性的統(tǒng)一操作方法
*
* @param propertyChange 屬性改變操作
*/
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
NSError *error;
//注意改變設(shè)備屬性前一定要首先調(diào)用lockForConfiguration:調(diào)用完之后使用unlockForConfiguration方法解鎖
if ([captureDevice lockForConfiguration:&error]) {
//自動白平衡
if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
[captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
}
//自動根據(jù)環(huán)境條件開啟閃光燈
if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
[captureDevice setFlashMode:AVCaptureFlashModeAuto];
}
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
}else{
NSLog(@"設(shè)置設(shè)備屬性過程發(fā)生錯(cuò)誤,錯(cuò)誤信息:%@",error.localizedDescription);
}
}
/**
* 設(shè)置閃光燈模式
*
* @param flashMode 閃光燈模式
*/
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFlashModeSupported:flashMode]) {
[captureDevice setFlashMode:flashMode];
}
}];
}
/**
* 設(shè)置聚焦模式
*
* @param focusMode 聚焦模式
*/
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
}];
}
/**
* 設(shè)置曝光模式
*
* @param exposureMode 曝光模式
*/
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
}];
}
/**
* 設(shè)置聚焦點(diǎn)
*
* @param point 聚焦點(diǎn)
*/
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
[self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
// if ([captureDevice isFocusPointOfInterestSupported]) {
// [captureDevice setFocusPointOfInterest:point];
// }
// if ([captureDevice isExposurePointOfInterestSupported]) {
// [captureDevice setExposurePointOfInterest:point];
// }
if ([captureDevice isExposureModeSupported:exposureMode]) {
[captureDevice setExposureMode:exposureMode];
}
if ([captureDevice isFocusModeSupported:focusMode]) {
[captureDevice setFocusMode:focusMode];
}
}];
}
/**
* 添加點(diǎn)按手勢,點(diǎn)按時(shí)聚焦
*/
-(void)addGenstureRecognizer{
UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
[self.bgView addGestureRecognizer:tapGesture];
}
-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{
if ([self.session isRunning]) {
CGPoint point= [tapGesture locationInView:self.bgView];
//將UI坐標(biāo)轉(zhuǎn)化為攝像頭坐標(biāo)
CGPoint cameraPoint= [self.previewLayer captureDevicePointOfInterestForPoint:point];
[self setFocusCursorWithPoint:point];
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:cameraPoint];
}
}
/**
* 設(shè)置聚焦光標(biāo)位置
*
* @param point 光標(biāo)位置
*/
-(void)setFocusCursorWithPoint:(CGPoint)point{
if (!self.isFocus) {
self.isFocus = YES;
self.focusCursor.center=point;
self.focusCursor.transform = CGAffineTransformMakeScale(1.25, 1.25);
self.focusCursor.alpha = 1.0;
[UIView animateWithDuration:0.5 animations:^{
self.focusCursor.transform = CGAffineTransformIdentity;
} completion:^(BOOL finished) {
[self performSelector:@selector(onHiddenFocusCurSorAction) withObject:nil afterDelay:0.5];
}];
}
}
- (void)onHiddenFocusCurSorAction {
self.focusCursor.alpha=0;
self.isFocus = NO;
}
//拍攝完成時(shí)調(diào)用
- (void)changeLayout {
self.imgRecord.hidden = YES;
self.btnCamera.hidden = YES;
self.btnAfresh.hidden = NO;
self.btnEnsure.hidden = NO;
self.btnBack.hidden = YES;
if (self.isVideo) {
[self.progressView clearProgress];
}
self.afreshCenterX.constant = -(SCREEN_WIDTH/2/2);
self.ensureCenterX.constant = SCREEN_WIDTH/2/2;
[UIView animateWithDuration:0.25 animations:^{
[self.view layoutIfNeeded];
}];
self.lastBackgroundTaskIdentifier = self.backgroundTaskIdentifier;
self.backgroundTaskIdentifier = UIBackgroundTaskInvalid;
[self.session stopRunning];
}
//重新拍攝時(shí)調(diào)用
- (void)recoverLayout {
if (self.isVideo) {
self.isVideo = NO;
[self.player stopPlayer];
self.player.hidden = YES;
}
[self.session startRunning];
if (!self.takeImageView.hidden) {
self.takeImageView.hidden = YES;
}
// self.saveVideoUrl = nil;
self.afreshCenterX.constant = 0;
self.ensureCenterX.constant = 0;
self.imgRecord.hidden = NO;
self.btnCamera.hidden = NO;
self.btnAfresh.hidden = YES;
self.btnEnsure.hidden = YES;
self.btnBack.hidden = NO;
[UIView animateWithDuration:0.25 animations:^{
[self.view layoutIfNeeded];
}];
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
@end
使用也挺簡單:
- (IBAction)onCameraAction:(UIButton *)sender {
//額 。。由于是demo,所以用的xib,大家根據(jù)需求自己更改,該demo只是提供一個(gè)思路,使用時(shí)不要直接拖入項(xiàng)目
HVideoViewController *ctrl = [[NSBundle mainBundle] loadNibNamed:@"HVideoViewController" owner:nil options:nil].lastObject;
ctrl.HSeconds = 30;//設(shè)置可錄制最長時(shí)間
ctrl.takeBlock = ^(id item) {
if ([item isKindOfClass:[NSURL class]]) {
NSURL *videoURL = item;
//視頻url
} else {
//圖片
}
};
[self presentViewController:ctrl animated:YES completion:nil];
}
demo地址也給出來吧:iosCamera_jb51.rar
自此就結(jié)束啦,寫的比較簡單,希望能幫助到大家,謝謝!
x效果圖也帖出來吧:

以上就是本文的全部內(nèi)容,希望對大家的學(xué)習(xí)有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
Flutter之PageView頁面緩存與KeepAlive
這篇文章主要為大家介紹了Flutter之PageView頁面緩存與KeepAlive示例詳解,有需要的朋友可以借鑒參考下,希望能夠有所幫助,祝大家多多進(jìn)步,早日升職加薪2022-10-10

