iOS仿微信相机拍照、视频录制功能

时间:2022-08-22 09:00:31

网上有很多自定义相机的例子,这里只是我临时写的一个iOS自定义相机(仿微信拍照视频录制demo,仅供参考:

用到了下面几个库:

?
1
2
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>

在使用的时候需要在Info.plist中把相关权限写进去:

?
1
2
3
Privacy - Microphone Usage Description
Privacy - Photo Library Usage Description
Privacy - Camera Usage Description

我在写这个demo时,是按照微信的样式写的,同样是点击拍照、长按录制视频,视频录制完直接进行播放,这里封装了一个简易的播放器:

m文件

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
#import "HAVPlayer.h"
#import <AVFoundation/AVFoundation.h>
 
@interface HAVPlayer ()
 
@property (nonatomic,strong) AVPlayer *player;//播放器对象
 
@end
 
@implementation HAVPlayer
 
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
 // Drawing code
}
*/
 
- (instancetype)initWithFrame:(CGRect)frame withShowInView:(UIView *)bgView url:(NSURL *)url {
 if (self = [self initWithFrame:frame]) {
  //创建播放器层
  AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:self.player];
  playerLayer.frame = self.bounds;
 
  [self.layer addSublayer:playerLayer];
  if (url) {
   self.videoUrl = url;
  }
 
  [bgView addSubview:self];
 }
 return self;
}
 
- (void)dealloc {
 [self removeAvPlayerNtf];
 [self stopPlayer];
 self.player = nil;
}
 
- (AVPlayer *)player {
 if (!_player) {
  _player = [AVPlayer playerWithPlayerItem:[self getAVPlayerItem]];
  [self addAVPlayerNtf:_player.currentItem];
 
 }
 
 return _player;
}
 
- (AVPlayerItem *)getAVPlayerItem {
 AVPlayerItem *playerItem=[AVPlayerItem playerItemWithURL:self.videoUrl];
 return playerItem;
}
 
- (void)setVideoUrl:(NSURL *)videoUrl {
 _videoUrl = videoUrl;
 [self removeAvPlayerNtf];
 [self nextPlayer];
}
 
- (void)nextPlayer {
 [self.player seekToTime:CMTimeMakeWithSeconds(0, _player.currentItem.duration.timescale)];
 [self.player replaceCurrentItemWithPlayerItem:[self getAVPlayerItem]];
 [self addAVPlayerNtf:self.player.currentItem];
 if (self.player.rate == 0) {
  [self.player play];
 }
}
 
- (void) addAVPlayerNtf:(AVPlayerItem *)playerItem {
 //监控状态属性
 [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil];
 //监控网络加载情况属性
 [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil];
 
 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:self.player.currentItem];
}
 
- (void)removeAvPlayerNtf {
 AVPlayerItem *playerItem = self.player.currentItem;
 [playerItem removeObserver:self forKeyPath:@"status"];
 [playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"];
 [[NSNotificationCenter defaultCenter] removeObserver:self];
}
 
- (void)stopPlayer {
 if (self.player.rate == 1) {
  [self.player pause];//如果在播放状态就停止
 }
}
 
/**
 * 通过KVO监控播放器状态
 *
 * @param keyPath 监控属性
 * @param object 监视器
 * @param change 状态改变
 * @param context 上下文
 */
-(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{
 AVPlayerItem *playerItem = object;
 if ([keyPath isEqualToString:@"status"]) {
  AVPlayerStatus status= [[change objectForKey:@"new"] intValue];
  if(status==AVPlayerStatusReadyToPlay){
   NSLog(@"正在播放...,视频总长度:%.2f",CMTimeGetSeconds(playerItem.duration));
  }
 }else if([keyPath isEqualToString:@"loadedTimeRanges"]){
  NSArray *array=playerItem.loadedTimeRanges;
  CMTimeRange timeRange = [array.firstObject CMTimeRangeValue];//本次缓冲时间范围
  float startSeconds = CMTimeGetSeconds(timeRange.start);
  float durationSeconds = CMTimeGetSeconds(timeRange.duration);
  NSTimeInterval totalBuffer = startSeconds + durationSeconds;//缓冲总长度
  NSLog(@"共缓冲:%.2f",totalBuffer);
 }
}
 
- (void)playbackFinished:(NSNotification *)ntf {
 Plog(@"视频播放完成");
 [self.player seekToTime:CMTimeMake(0, 1)];
 [self.player play];
}
 
@end

另外微信下面的按钮长按会出现圆弧时间条:

m文件

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
#import "HProgressView.h"
 
@interface HProgressView ()
 
/**
 * 进度值0-1.0之间
 */
@property (nonatomic,assign)CGFloat progressValue;
 
@property (nonatomic, assign) CGFloat currentTime;
 
@end
 
@implementation HProgressView
 
 
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
 // Drawing code
 CGContextRef ctx = UIGraphicsGetCurrentContext();//获取上下文
 Plog(@"width = %f",self.frame.size.width);
 CGPoint center = CGPointMake(self.frame.size.width/2.0, self.frame.size.width/2.0); //设置圆心位置
 CGFloat radius = self.frame.size.width/2.0-5; //设置半径
 CGFloat startA = - M_PI_2; //圆起点位置
 CGFloat endA = -M_PI_2 + M_PI * 2 * _progressValue; //圆终点位置
 
 UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:center radius:radius startAngle:startA endAngle:endA clockwise:YES];
 
 CGContextSetLineWidth(ctx, 10); //设置线条宽度
 [[UIColor whiteColor] setStroke]; //设置描边颜色
 
 CGContextAddPath(ctx, path.CGPath); //把路径添加到上下文
 
 CGContextStrokePath(ctx); //渲染
}
 
- (void)setTimeMax:(NSInteger)timeMax {
 _timeMax = timeMax;
 self.currentTime = 0;
 self.progressValue = 0;
 [self setNeedsDisplay];
 self.hidden = NO;
 [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
}
 
- (void)clearProgress {
 _currentTime = _timeMax;
 self.hidden = YES;
}
 
- (void)startProgress {
 _currentTime += 0.1;
 if (_timeMax > _currentTime) {
  _progressValue = _currentTime/_timeMax;
  Plog(@"progress = %f",_progressValue);
  [self setNeedsDisplay];
  [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1];
 }
 
 if (_timeMax <= _currentTime) {
  [self clearProgress];
 
 }
}
 
@end

接下来就是相机的控制器了,由于是临时写的,所以用的xib,大家不要直接使用,直接上m文件代码吧:

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
#import "HVideoViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "HAVPlayer.h"
#import "HProgressView.h"
#import <Foundation/Foundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
 
typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice);
@interface HVideoViewController ()<AVCaptureFileOutputRecordingDelegate>
 
//轻触拍照,按住摄像
@property (strong, nonatomic) IBOutlet UILabel *labelTipTitle;
 
//视频输出流
@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;
//图片输出流
//@property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//照片输出流
//负责从AVCaptureDevice获得输入数据
@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;
//后台任务标识
@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;
 
@property (assign,nonatomic) UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier;
 
@property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //聚焦光标
 
//负责输入和输出设备之间的数据传递
@property(nonatomic)AVCaptureSession *session;
 
//图像预览层,实时显示捕获的图像
@property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer;
 
@property (strong, nonatomic) IBOutlet UIButton *btnBack;
//重新录制
@property (strong, nonatomic) IBOutlet UIButton *btnAfresh;
//确定
@property (strong, nonatomic) IBOutlet UIButton *btnEnsure;
//摄像头切换
@property (strong, nonatomic) IBOutlet UIButton *btnCamera;
 
@property (strong, nonatomic) IBOutlet UIImageView *bgView;
//记录录制的时间 默认最大60秒
@property (assign, nonatomic) NSInteger seconds;
 
//记录需要保存视频的路径
@property (strong, nonatomic) NSURL *saveVideoUrl;
 
//是否在对焦
@property (assign, nonatomic) BOOL isFocus;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *afreshCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *ensureCenterX;
@property (strong, nonatomic) IBOutlet NSLayoutConstraint *backCenterX;
 
//视频播放
@property (strong, nonatomic) HAVPlayer *player;
 
@property (strong, nonatomic) IBOutlet HProgressView *progressView;
 
//是否是摄像 YES 代表是录制 NO 表示拍照
@property (assign, nonatomic) BOOL isVideo;
 
@property (strong, nonatomic) UIImage *takeImage;
@property (strong, nonatomic) UIImageView *takeImageView;
@property (strong, nonatomic) IBOutlet UIImageView *imgRecord;
 
 
@end
 
//时间大于这个就是视频,否则为拍照
#define TimeMax 1
 
@implementation HVideoViewController
 
 
-(void)dealloc{
 [self removeNotification];
 
 
}
 
- (void)viewDidLoad {
 [super viewDidLoad];
 // Do any additional setup after loading the view.
 
 UIImage *image = [UIImage imageNamed:@"sc_btn_take.png"];
 self.backCenterX.constant = -(SCREEN_WIDTH/2/2)-image.size.width/2/2;
 
 self.progressView.layer.cornerRadius = self.progressView.frame.size.width/2;
 
 if (self.HSeconds == 0) {
  self.HSeconds = 60;
 }
 
 [self performSelector:@selector(hiddenTipsLabel) withObject:nil afterDelay:4];
}
 
- (void)hiddenTipsLabel {
 self.labelTipTitle.hidden = YES;
}
 
- (void)didReceiveMemoryWarning {
 [super didReceiveMemoryWarning];
 // Dispose of any resources that can be recreated.
}
 
- (void)viewWillAppear:(BOOL)animated {
 [super viewWillAppear:animated];
 [[UIApplication sharedApplication] setStatusBarHidden:YES];
 [self customCamera];
 [self.session startRunning];
}
 
 
-(void)viewDidAppear:(BOOL)animated{
 [super viewDidAppear:animated];
}
 
-(void)viewDidDisappear:(BOOL)animated{
 [super viewDidDisappear:animated];
 [self.session stopRunning];
}
 
- (void)viewWillDisappear:(BOOL)animated {
 [super viewWillDisappear:animated];
 [[UIApplication sharedApplication] setStatusBarHidden:NO];
}
 
- (void)customCamera {
 
 //初始化会话,用来结合输入输出
 self.session = [[AVCaptureSession alloc] init];
 //设置分辨率 (设备支持的最高分辨率)
 if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
  self.session.sessionPreset = AVCaptureSessionPresetHigh;
 }
 //取得后置摄像头
 AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
 //添加一个音频输入设备
 AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
 
 //初始化输入设备
 NSError *error = nil;
 self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
 if (error) {
  Plog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
  return;
 }
 
 //添加音频
 error = nil;
 AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error];
 if (error) {
  NSLog(@"取得设备输入对象时出错,错误原因:%@",error.localizedDescription);
  return;
 }
 
 //输出对象
 self.captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];//视频输出
 
 //将输入设备添加到会话
 if ([self.session canAddInput:self.captureDeviceInput]) {
  [self.session addInput:self.captureDeviceInput];
  [self.session addInput:audioCaptureDeviceInput];
  //设置视频防抖
  AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
  if ([connection isVideoStabilizationSupported]) {
   connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
  }
 }
 
 //将输出设备添加到会话 (刚开始 是照片为输出对象)
 if ([self.session canAddOutput:self.captureMovieFileOutput]) {
  [self.session addOutput:self.captureMovieFileOutput];
 }
 
 //创建视频预览层,用于实时展示摄像头状态
 self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
 self.previewLayer.frame = self.view.bounds;//CGRectMake(0, 0, self.view.width, self.view.height);
 self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式
 [self.bgView.layer addSublayer:self.previewLayer];
 
 [self addNotificationToCaptureDevice:captureDevice];
 [self addGenstureRecognizer];
}
 
 
 
- (IBAction)onCancelAction:(UIButton *)sender {
 [self dismissViewControllerAnimated:YES completion:^{
  [Utility hideProgressDialog];
 }];
}
 
 
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
 if ([[touches anyObject] view] == self.imgRecord) {
  Plog(@"开始录制");
  //根据设备输出获得连接
  AVCaptureConnection *connection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio];
  //根据连接取得设备输出的数据
  if (![self.captureMovieFileOutput isRecording]) {
   //如果支持多任务则开始多任务
   if ([[UIDevice currentDevice] isMultitaskingSupported]) {
    self.backgroundTaskIdentifier = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil];
   }
   if (self.saveVideoUrl) {
    [[NSFileManager defaultManager] removeItemAtURL:self.saveVideoUrl error:nil];
   }
   //预览图层和视频方向保持一致
   connection.videoOrientation = [self.previewLayer connection].videoOrientation;
   NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"myMovie.mov"];
   NSLog(@"save path is :%@",outputFielPath);
   NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
   NSLog(@"fileUrl:%@",fileUrl);
   [self.captureMovieFileOutput startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
  } else {
   [self.captureMovieFileOutput stopRecording];
  }
 }
}
 
 
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
 if ([[touches anyObject] view] == self.imgRecord) {
  Plog(@"结束触摸");
  if (!self.isVideo) {
   [self performSelector:@selector(endRecord) withObject:nil afterDelay:0.3];
  } else {
   [self endRecord];
  }
 }
}
 
- (void)endRecord {
 [self.captureMovieFileOutput stopRecording];//停止录制
}
 
- (IBAction)onAfreshAction:(UIButton *)sender {
 Plog(@"重新录制");
 [self recoverLayout];
}
 
- (IBAction)onEnsureAction:(UIButton *)sender {
 Plog(@"确定 这里进行保存或者发送出去");
 if (self.saveVideoUrl) {
  WS(weakSelf)
  [Utility showProgressDialogText:@"视频处理中..."];
  ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init];
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:self.saveVideoUrl completionBlock:^(NSURL *assetURL, NSError *error) {
   Plog(@"outputUrl:%@",weakSelf.saveVideoUrl);
   [[NSFileManager defaultManager] removeItemAtURL:weakSelf.saveVideoUrl error:nil];
   if (weakSelf.lastBackgroundTaskIdentifier!= UIBackgroundTaskInvalid) {
    [[UIApplication sharedApplication] endBackgroundTask:weakSelf.lastBackgroundTaskIdentifier];
   }
   if (error) {
    Plog(@"保存视频到相簿过程中发生错误,错误信息:%@",error.localizedDescription);
    [Utility showAllTextDialog:KAppDelegate.window Text:@"保存视频到相册发生错误"];
   } else {
    if (weakSelf.takeBlock) {
     weakSelf.takeBlock(assetURL);
    }
    Plog(@"成功保存视频到相簿.");
    [weakSelf onCancelAction:nil];
   }
  }];
 } else {
  //照片
  UIImageWriteToSavedPhotosAlbum(self.takeImage, self, nil, nil);
  if (self.takeBlock) {
   self.takeBlock(self.takeImage);
  }
 
  [self onCancelAction:nil];
 }
}
 
//前后摄像头的切换
- (IBAction)onCameraAction:(UIButton *)sender {
 Plog(@"切换摄像头");
 AVCaptureDevice *currentDevice=[self.captureDeviceInput device];
 AVCaptureDevicePosition currentPosition=[currentDevice position];
 [self removeNotificationFromCaptureDevice:currentDevice];
 AVCaptureDevice *toChangeDevice;
 AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;//前
 if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
  toChangePosition = AVCaptureDevicePositionBack;//后
 }
 toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition];
 [self addNotificationToCaptureDevice:toChangeDevice];
 //获得要调整的设备输入对象
 AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil];
 
 //改变会话的配置前一定要先开启配置,配置完成后提交配置改变
 [self.session beginConfiguration];
 //移除原有输入对象
 [self.session removeInput:self.captureDeviceInput];
 //添加新的输入对象
 if ([self.session canAddInput:toChangeDeviceInput]) {
  [self.session addInput:toChangeDeviceInput];
  self.captureDeviceInput = toChangeDeviceInput;
 }
 //提交会话配置
 [self.session commitConfiguration];
}
 
- (void)onStartTranscribe:(NSURL *)fileURL {
 if ([self.captureMovieFileOutput isRecording]) {
  -- self.seconds;
  if (self.seconds > 0) {
   if (self.HSeconds - self.seconds >= TimeMax && !self.isVideo) {
    self.isVideo = YES;//长按时间超过TimeMax 表示是视频录制
    self.progressView.timeMax = self.seconds;
   }
   [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
  } else {
   if ([self.captureMovieFileOutput isRecording]) {
    [self.captureMovieFileOutput stopRecording];
   }
  }
 }
}
 
 
#pragma mark - 视频输出代理
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
 Plog(@"开始录制...");
 self.seconds = self.HSeconds;
 [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0];
}
 
 
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
 Plog(@"视频录制完成.");
 [self changeLayout];
 if (self.isVideo) {
  self.saveVideoUrl = outputFileURL;
  if (!self.player) {
   self.player = [[HAVPlayer alloc] initWithFrame:self.bgView.bounds withShowInView:self.bgView url:outputFileURL];
  } else {
   if (outputFileURL) {
    self.player.videoUrl = outputFileURL;
    self.player.hidden = NO;
   }
  }
 } else {
  //照片
  self.saveVideoUrl = nil;
  [self videoHandlePhoto:outputFileURL];
 }
 
}
 
- (void)videoHandlePhoto:(NSURL *)url {
 AVURLAsset *urlSet = [AVURLAsset assetWithURL:url];
 AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];
 imageGenerator.appliesPreferredTrackTransform = YES; // 截图的时候调整到正确的方向
 NSError *error = nil;
 CMTime time = CMTimeMake(0,30);//缩略图创建时间 CMTime是表示电影时间信息的结构体,第一个参数表示是视频第几秒,第二个参数表示每秒帧数.(如果要获取某一秒的第几帧可以使用CMTimeMake方法)
 CMTime actucalTime; //缩略图实际生成的时间
 CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];
 if (error) {
  Plog(@"截取视频图片失败:%@",error.localizedDescription);
 }
 CMTimeShow(actucalTime);
 UIImage *image = [UIImage imageWithCGImage:cgImage];
 
 CGImageRelease(cgImage);
 if (image) {
  Plog(@"视频截取成功");
 } else {
  Plog(@"视频截取失败");
 }
 
 
 self.takeImage = image;//[UIImage imageWithCGImage:cgImage];
 
 [[NSFileManager defaultManager] removeItemAtURL:url error:nil];
 
 if (!self.takeImageView) {
  self.takeImageView = [[UIImageView alloc] initWithFrame:self.view.frame];
  [self.bgView addSubview:self.takeImageView];
 }
 self.takeImageView.hidden = NO;
 self.takeImageView.image = self.takeImage;
}
 
#pragma mark - 通知
 
//注册通知
- (void)setupObservers
{
 NSNotificationCenter *notification = [NSNotificationCenter defaultCenter];
 [notification addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationWillResignActiveNotification object:[UIApplication sharedApplication]];
}
 
//进入后台就退出视频录制
- (void)applicationDidEnterBackground:(NSNotification *)notification {
 [self onCancelAction:nil];
}
 
/**
 * 给输入设备添加通知
 */
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{
 //注意添加区域改变捕获通知必须首先设置设备允许捕获
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  captureDevice.subjectAreaChangeMonitoringEnabled=YES;
 }];
 NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
 //捕获区域发生改变
 [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{
 NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
 [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice];
}
/**
 * 移除所有通知
 */
-(void)removeNotification{
 NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
 [notificationCenter removeObserver:self];
}
 
-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{
 NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter];
 //会话出错
 [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession];
}
 
/**
 * 设备连接成功
 *
 * @param notification 通知对象
 */
-(void)deviceConnected:(NSNotification *)notification{
 NSLog(@"设备已连接...");
}
/**
 * 设备连接断开
 *
 * @param notification 通知对象
 */
-(void)deviceDisconnected:(NSNotification *)notification{
 NSLog(@"设备已断开.");
}
/**
 * 捕获区域改变
 *
 * @param notification 通知对象
 */
-(void)areaChange:(NSNotification *)notification{
 NSLog(@"捕获区域改变...");
}
 
/**
 * 会话出错
 *
 * @param notification 通知对象
 */
-(void)sessionRuntimeError:(NSNotification *)notification{
 NSLog(@"会话发生错误.");
}
 
 
 
/**
 * 取得指定位置的摄像头
 *
 * @param position 摄像头位置
 *
 * @return 摄像头设备
 */
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
 NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
 for (AVCaptureDevice *camera in cameras) {
  if ([camera position] == position) {
   return camera;
  }
 }
 return nil;
}
 
/**
 * 改变设备属性的统一操作方法
 *
 * @param propertyChange 属性改变操作
 */
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{
 AVCaptureDevice *captureDevice= [self.captureDeviceInput device];
 NSError *error;
 //注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁
 if ([captureDevice lockForConfiguration:&error]) {
  //自动白平衡
  if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
   [captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
  }
  //自动根据环境条件开启闪光灯
  if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
   [captureDevice setFlashMode:AVCaptureFlashModeAuto];
  }
 
  propertyChange(captureDevice);
  [captureDevice unlockForConfiguration];
 }else{
  NSLog(@"设置设备属性过程发生错误,错误信息:%@",error.localizedDescription);
 }
}
 
/**
 * 设置闪光灯模式
 *
 * @param flashMode 闪光灯模式
 */
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isFlashModeSupported:flashMode]) {
   [captureDevice setFlashMode:flashMode];
  }
 }];
}
/**
 * 设置聚焦模式
 *
 * @param focusMode 聚焦模式
 */
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isFocusModeSupported:focusMode]) {
   [captureDevice setFocusMode:focusMode];
  }
 }];
}
/**
 * 设置曝光模式
 *
 * @param exposureMode 曝光模式
 */
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
  if ([captureDevice isExposureModeSupported:exposureMode]) {
   [captureDevice setExposureMode:exposureMode];
  }
 }];
}
/**
 * 设置聚焦点
 *
 * @param point 聚焦点
 */
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
 [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) {
//  if ([captureDevice isFocusPointOfInterestSupported]) {
//   [captureDevice setFocusPointOfInterest:point];
//  }
//  if ([captureDevice isExposurePointOfInterestSupported]) {
//   [captureDevice setExposurePointOfInterest:point];
//  }
  if ([captureDevice isExposureModeSupported:exposureMode]) {
   [captureDevice setExposureMode:exposureMode];
  }
  if ([captureDevice isFocusModeSupported:focusMode]) {
   [captureDevice setFocusMode:focusMode];
  }
 }];
}
 
/**
 * 添加点按手势,点按时聚焦
 */
-(void)addGenstureRecognizer{
 UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)];
 [self.bgView addGestureRecognizer:tapGesture];
}
 
-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{
 if ([self.session isRunning]) {
  CGPoint point= [tapGesture locationInView:self.bgView];
  //将UI坐标转化为摄像头坐标
  CGPoint cameraPoint= [self.previewLayer captureDevicePointOfInterestForPoint:point];
  [self setFocusCursorWithPoint:point];
  [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:cameraPoint];
 }
}
 
/**
 * 设置聚焦光标位置
 *
 * @param point 光标位置
 */
-(void)setFocusCursorWithPoint:(CGPoint)point{
 if (!self.isFocus) {
  self.isFocus = YES;
  self.focusCursor.center=point;
  self.focusCursor.transform = CGAffineTransformMakeScale(1.25, 1.25);
  self.focusCursor.alpha = 1.0;
  [UIView animateWithDuration:0.5 animations:^{
   self.focusCursor.transform = CGAffineTransformIdentity;
  } completion:^(BOOL finished) {
   [self performSelector:@selector(onHiddenFocusCurSorAction) withObject:nil afterDelay:0.5];
  }];
 }
}
 
- (void)onHiddenFocusCurSorAction {
 self.focusCursor.alpha=0;
 self.isFocus = NO;
}
 
//拍摄完成时调用
- (void)changeLayout {
 self.imgRecord.hidden = YES;
 self.btnCamera.hidden = YES;
 self.btnAfresh.hidden = NO;
 self.btnEnsure.hidden = NO;
 self.btnBack.hidden = YES;
 if (self.isVideo) {
  [self.progressView clearProgress];
 }
 self.afreshCenterX.constant = -(SCREEN_WIDTH/2/2);
 self.ensureCenterX.constant = SCREEN_WIDTH/2/2;
 [UIView animateWithDuration:0.25 animations:^{
  [self.view layoutIfNeeded];
 }];
 
 self.lastBackgroundTaskIdentifier = self.backgroundTaskIdentifier;
 self.backgroundTaskIdentifier = UIBackgroundTaskInvalid;
 [self.session stopRunning];
}
 
 
//重新拍摄时调用
- (void)recoverLayout {
 if (self.isVideo) {
  self.isVideo = NO;
  [self.player stopPlayer];
  self.player.hidden = YES;
 }
 [self.session startRunning];
 
 if (!self.takeImageView.hidden) {
  self.takeImageView.hidden = YES;
 }
// self.saveVideoUrl = nil;
 self.afreshCenterX.constant = 0;
 self.ensureCenterX.constant = 0;
 self.imgRecord.hidden = NO;
 self.btnCamera.hidden = NO;
 self.btnAfresh.hidden = YES;
 self.btnEnsure.hidden = YES;
 self.btnBack.hidden = NO;
 [UIView animateWithDuration:0.25 animations:^{
  [self.view layoutIfNeeded];
 }];
}
 
/*
#pragma mark - Navigation
 
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
 // Get the new view controller using [segue destinationViewController].
 // Pass the selected object to the new view controller.
}
*/
 
@end

使用也挺简单:

?
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
- (IBAction)onCameraAction:(UIButton *)sender {
 //额 。。由于是demo,所以用的xib,大家根据需求自己更改,该demo只是提供一个思路,使用时不要直接拖入项目
 HVideoViewController *ctrl = [[NSBundle mainBundle] loadNibNamed:@"HVideoViewController" owner:nil options:nil].lastObject;
 ctrl.HSeconds = 30;//设置可录制最长时间
 ctrl.takeBlock = ^(id item) {
  if ([item isKindOfClass:[NSURL class]]) {
   NSURL *videoURL = item;
   //视频url
 
  } else {
   //图片
 
  }
 };
 [self presentViewController:ctrl animated:YES completion:nil];
}

demo地址也给出来吧:不喜勿碰-_-\

KJCamera

自此就结束啦,写的比较简单,希望能帮助到大家,谢谢!

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持服务器之家。

原文链接:https://blog.csdn.net/qq_24874679/article/details/62886904