SoFunction
Updated on 2025-04-12

iOS AVCaptureSession implements video recording function

This article shares the specific code for AVCaptureSession to implement video recording function for your reference. The specific content is as follows

#import "" 
#import <AVFoundation/> 
#import <AssetsLibrary/> 
 
@interface RecordingVideoViewController ()<AVCaptureFileOutputRecordingDelegate> 
 
//Session is responsible for data transfer between input and output devices@property (strong,nonatomic) AVCaptureSession  *captureSession; 
// Device input Responsible for obtaining input data from AVCaptureDevice@property (strong,nonatomic) AVCaptureDeviceInput  *videoCaptureDeviceInput; 
@property (strong,nonatomic) AVCaptureDeviceInput  *audioCaptureDeviceInput; 
//Video output stream@property (strong,nonatomic) AVCaptureMovieFileOutput  *captureMovieFileOutput; 
//Camera shooting preview layer@property (strong,nonatomic) AVCaptureVideoPreviewLayer  *captureVideoPreviewLayer; 
 
//Custom UI control container@property (strong,nonatomic) UIView  *viewContainer; 
//Focus icon@property (strong,nonatomic) UIImageView  *focusCursor; 
//Recording time@property (strong,nonatomic) UILabel  *timeLabel; 
//Switch front and rear cameras@property (strong,nonatomic) UIButton  *switchCameraBtn; 
//Change the focus distance@property (strong,nonatomic) UIButton  *scaleBtn; 
//Timer@property (strong,nonatomic) NSTimer  *timer; 
 
 
@end 
 
@implementation RecordingVideoViewController { 
 @private 
  NSInteger _num; 
  CGFloat _kCameraScale; 
} 
 
 
- (UIView *)viewContainer { 
  if (!_viewContainer) { 
    _viewContainer = [[UIView alloc] initWithFrame:[UIScreen mainScreen].bounds]; 
     
    UIButton *takeButton = [UIButton buttonWithType:UIButtonTypeCustom]; 
     = [UIColor redColor]; 
    [takeButton setTitle:@"start" forState:UIControlStateNormal]; 
    [takeButton addTarget:self action:@selector(takeButtonClick:) forControlEvents:UIControlEventTouchUpInside]; 
     
   
    _timeLabel = [[UILabel alloc] init]; 
    _timeLabel.textColor = [UIColor redColor]; 
    _timeLabel.textAlignment = NSTextAlignmentCenter; 
    _timeLabel.font = [UIFont boldSystemFontOfSize:20]; 
    _timeLabel.text = @"00:00"; 
     
     
    _switchCameraBtn = [UIButton buttonWithType:UIButtonTypeCustom]; 
    [_switchCameraBtn setTitle:@"switch" forState:UIControlStateNormal]; 
    _switchCameraBtn.backgroundColor = [UIColor redColor]; 
    [_switchCameraBtn addTarget:self action:@selector(switchCameraBtnClick) forControlEvents:UIControlEventTouchUpInside]; 
     
    _scaleBtn = [UIButton buttonWithType:UIButtonTypeCustom]; 
    [_scaleBtn setTitle:@"1X" forState:UIControlStateNormal]; 
    _scaleBtn.backgroundColor = [UIColor redColor]; 
    [_scaleBtn addTarget:self action:@selector(scaleBtnClick:) forControlEvents:UIControlEventTouchUpInside]; 
     
    [_viewContainer addSubview:takeButton]; 
    [_viewContainer addSubview:_timeLabel]; 
    [_viewContainer addSubview:_scaleBtn]; 
    [_viewContainer addSubview:_switchCameraBtn]; 
    [takeButton mas_makeConstraints:^(MASConstraintMaker *make) { 
      .mas_equalTo(CGSizeMake(60, 40)); 
      .mas_equalTo(_viewContainer); 
      .mas_equalTo(_viewContainer).offset(-64); 
    }]; 
    [_timeLabel mas_makeConstraints:^(MASConstraintMaker *make) { 
      .mas_equalTo(_viewContainer); 
      .mas_equalTo(@30); 
      .mas_equalTo(_viewContainer); 
    }]; 
    [_scaleBtn mas_makeConstraints:^(MASConstraintMaker *make) { 
      .mas_equalTo(CGSizeMake(60, 40)); 
      .mas_equalTo(_viewContainer).offset(10); 
      .mas_equalTo(_viewContainer); 
    }]; 
    [_switchCameraBtn mas_makeConstraints:^(MASConstraintMaker *make) { 
      .mas_equalTo(CGSizeMake(60, 40)); 
      .mas_equalTo(_viewContainer); 
      .mas_equalTo(_viewContainer).offset(-10); 
    }]; 
     
    _focusCursor = [[UIImageView alloc] init]; 
    kBorder(_focusCursor, 1, [UIColor yellowColor]); 
    _focusCursor.alpha = 0; 
    [_viewContainer addSubview:]; 
    [_focusCursor mas_makeConstraints:^(MASConstraintMaker *make) { 
      .mas_equalTo(CGSizeMake(40, 40)); 
      .mas_equalTo(_viewContainer); 
    }]; 
 
  } 
  return _viewContainer; 
} 
 
- (void)viewDidLoad { 
  [super viewDidLoad]; 
   
   = @"Video Recording"; 
  _kCameraScale = 1.0f; 
  //Initialize the session object  _captureSession = [[AVCaptureSession alloc] init]; 
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) { 
    _captureSession.sessionPreset = AVCaptureSessionPreset1280x720; 
  } 
   
   
  NSError *error = nil; 
 
  //Get video input object  AVCaptureDevice *videoCaptureDevice = [self cameraDeviceWithPosition:(AVCaptureDevicePositionBack)]; 
  if (!videoCaptureDevice) { 
    NSLog(@"Failed to get the rear camera!"); 
    return; 
  } 
  _videoCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoCaptureDevice error:&error]; 
  if (error) { 
    NSLog(@"An error occurred while obtaining the input object of the video device"); 
    return; 
  } 
   
   
  //Get the audio input object  AVCaptureDevice *audioCatureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 
  _audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCatureDevice error:&error]; 
  if (error) { 
    NSLog(@"Error getting the input object of the audio device"); 
    return; 
  } 
   
  //Initialize the device output object  _captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
   
  //Add device input to the session  if ([_captureSession canAddInput:_videoCaptureDeviceInput]) { 
    [_captureSession addInput:_videoCaptureDeviceInput]; 
    [_captureSession addInput:_audioCaptureDeviceInput]; 
     
    // Anti-shake function    AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeAudio]; 
    if ([captureConnection isVideoStabilizationSupported]) { 
       = AVCaptureVideoStabilizationModeAuto; 
    } 
  } 
   
  //Add device output to session  if ([_captureSession canAddOutput:_captureMovieFileOutput]) { 
    [_captureSession addOutput:_captureMovieFileOutput]; 
  } 
   
   
  //Create a video preview layer  _captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession]; 
   = YES; 
  _captureVideoPreviewLayer.frame = ; 
  _captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
  [ addSublayer:_captureVideoPreviewLayer]; 
   
  //Show custom controls  [ addSubview:]; 
   
  //Add a click focus gesture  UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; 
  [ addGestureRecognizer:tapGesture]; 
   
} 
 
-(void)viewDidAppear:(BOOL)animated{ 
  [super viewDidAppear:animated]; 
  [ startRunning]; 
} 
 
-(void)viewDidDisappear:(BOOL)animated{ 
  [super viewDidDisappear:animated]; 
  [ stopRunning]; 
  [ invalidate]; 
   = nil; 
} 
 
- (void)viewWillDisappear:(BOOL)animated { 
  [super viewWillDisappear:animated]; 
  [ setAffineTransform:CGAffineTransformMakeScale(1, 1)]; 
} 
 
- (void)didReceiveMemoryWarning { 
  [super didReceiveMemoryWarning]; 
} 
 
//Start + Pause recording- (void)takeButtonClick:(UIButton *)sender { 
  if ([ isRecording]) { 
    [ stopRecording]; 
     
    [ popViewControllerAnimated:YES]; 
     
  } else { 
    AVCaptureConnection *captureConnection = [ connectionWithMediaType:AVMediaTypeVideo]; 
     = [ connection].videoOrientation; 
     
    NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@""]; 
    NSLog(@"%@",filePath); 
    [ startRecordingToOutputFileURL:[NSURL fileURLWithPath:filePath] recordingDelegate:self]; 
     
     
     = YES; 
     
     = [UIColor greenColor]; 
    [sender setTitle:@"stop" forState:UIControlStateNormal]; 
     
     = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:@selector(timeAction) userInfo:nil repeats:YES]; 
    [ setFireDate:[NSDate distantPast]]; 
  } 
} 
 
//Switch the camera- (void)switchCameraBtnClick { 
  AVCaptureDevicePosition currentPosition = ; 
  AVCaptureDevicePosition toPosition; 
  if (currentPosition == AVCaptureDevicePositionUnspecified || 
    currentPosition == AVCaptureDevicePositionFront) { 
    toPosition = AVCaptureDevicePositionBack; 
  } else { 
    toPosition = AVCaptureDevicePositionFront; 
  } 
   
  AVCaptureDevice *toCapturDevice = [self cameraDeviceWithPosition:toPosition]; 
  if (!toCapturDevice) { 
    NSLog(@"Failed to get the device to switch"); 
    return; 
  } 
   
  NSError *error = nil; 
  AVCaptureDeviceInput *toVideoDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:toCapturDevice error:&error]; 
  if (error) { 
    NSLog(@"Failed to get the input of the device to be switched"); 
    return; 
  } 
   
  //Change the session configuration  [ beginConfiguration]; 
   
  [ removeInput:]; 
  if ([ canAddInput:toVideoDeviceInput]) { 
    [ addInput:toVideoDeviceInput]; 
     
     = toVideoDeviceInput; 
  } 
  //Submit session configuration  [ commitConfiguration]; 
} 
 
 
//Tap gesture- (void)tapScreen:(UITapGestureRecognizer *)tap { 
  CGPoint point = [tap locationInView:]; 
   
  //Compliate the interface point to the camera point  CGPoint cameraPoint = [ captureDevicePointOfInterestForPoint:point]; 
 
  //Set the spotlight animation   = point; 
   = CGAffineTransformMakeScale(1.5, 1.5); 
   = 1.0f; 
  [UIView animateWithDuration:1 animations:^{ 
     = CGAffineTransformIdentity; 
  } completion:^(BOOL finished) { 
     = 0.0f; 
 
  }]; 
   
  //Set the coordinates of the spot  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 
 
} 
 
 
/**Set focus*/ 
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 
   
  AVCaptureDevice *captureDevice= [ device]; 
  NSError *error = nil; 
  // Setting the device attributes must be unlocked first and then locked  if ([captureDevice lockForConfiguration:&error]) { 
     
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:focusMode]; 
    } 
    if ([captureDevice isFocusPointOfInterestSupported]) { 
      [captureDevice setFocusPointOfInterest:point]; 
    } 
    // //Exposure    //    if ([captureDevice isExposureModeSupported:exposureMode]) { 
    //      [captureDevice setExposureMode:exposureMode]; 
    //    } 
    //    if ([captureDevice isExposurePointOfInterestSupported]) { 
    //      [captureDevice setExposurePointOfInterest:point]; 
    //    } 
    // // Flash mode    //    if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) { 
    //      [captureDevice setFlashMode:AVCaptureFlashModeAuto]; 
    //    } 
     
    // Add lock    [captureDevice unlockForConfiguration]; 
     
  }else{ 
    NSLog(@"An error occurred during setting device attributes, error message: %@",); 
  } 
} 
 
 
 
//Adjust the focus distance-(void)scaleBtnClick:(UIButton *)sender 
{ 
  _kCameraScale += 0.5; 
  if(_kCameraScale > 3.0) { 
    _kCameraScale = 1.0; 
  } 
  //Change the focus distance  AVCaptureDevice *videoDevice = ; 
  NSError *error = nil; 
  if ([videoDevice lockForConfiguration:&error]) { 
     
    [videoDevice setVideoZoomFactor:_kCameraScale]; 
     
    [videoDevice unlockForConfiguration]; 
     
    [sender setTitle:[NSString stringWithFormat:@"%lgX",_kCameraScale] forState:UIControlStateNormal]; 
 
    [CATransaction begin]; 
    [CATransaction setAnimationDuration:0.25]; 
    [ setAffineTransform:CGAffineTransformMakeScale(_kCameraScale, _kCameraScale)]; 
    [CATransaction commit]; 
     
  } else { 
    NSLog(@"Failed to modify the device attributes!") 
  } 
} 
 
 
 
#pragma mark -------- AVCaptureFileOutputRecordingDelegate ---------- 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections { 
  NSLog(@"Start recording"); 
} 
 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { 
  NSLog(@"Recording ends"); 
  ALAssetsLibrary *assetsLibrary = [[ALAssetsLibrary alloc] init]; 
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) { 
    if (error) { 
      NSLog(@"An error occurred during the process of saving video to the album, error message: %@",); 
    } 
  }]; 
} 
 
//Record timing- (void)timeAction { 
   = [NSString stringWithFormat:@"%.2ld:%.2ld",_num/60,_num%60]; 
  _num ++; 
} 
 
 
/**Get the camera at the specified location*/ 
- (AVCaptureDevice *)cameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 
  NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
  for (AVCaptureDevice *camera in cameras) { 
    if ([camera position] == position) { 
      return camera; 
    } 
  } 
  return nil; 
} 
  
@end 

Reference code:

#import "" 
#import <AVFoundation/> 
#import <AssetsLibrary/> 
 
typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice); 
 
@interface VideoTestViewController ()<AVCaptureFileOutputRecordingDelegate>// Video file output agent 
@property (strong,nonatomic) AVCaptureSession *captureSession;//Responsible for data transmission between input and output devices@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//Responsible for obtaining input data from AVCaptureDevice@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;//Video output stream@property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//Camera shooting preview layer 
@property (assign,nonatomic) BOOL enableRotation;//Whether rotation is allowed (note that the screen rotation is prohibited during video recording)@property (assign,nonatomic) CGRect *lastBounds;//The front size of the rotation@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier;//Background task identification@property (strong,nonatomic) UIView *viewContainer; 
@property (strong,nonatomic) UIButton *takeButton;//Photo button@property (strong,nonatomic) UIImageView *focusCursor; //Focus on the cursor 
 
@end 
 
@implementation VideoTestViewController 
 
#pragma mark - Controller view method- (void)viewDidLoad { 
  [super viewDidLoad]; 
} 
 
-(void)viewWillAppear:(BOOL)animated{ 
  [super viewWillAppear:animated]; 
   
  //Initialize the session  _captureSession=[[AVCaptureSession alloc]init]; 
  if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {//Set resolution    _captureSession.sessionPreset=AVCaptureSessionPreset1280x720; 
  } 
  //Get input device  AVCaptureDevice *captureDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];//Get the rear camera  if (!captureDevice) { 
    NSLog(@"There is a problem getting the rear camera."); 
    return; 
  } 
  //Add an audio input device  AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 
   
   
  NSError *error=nil; 
  //Initialize the device input object according to the input device to obtain input data  _captureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:&error]; 
  if (error) { 
    NSLog(@"An error occurred while obtaining the device input object, error reason: %@",); 
    return; 
  } 
  AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error]; 
  if (error) { 
    NSLog(@"An error occurred while obtaining the device input object, error reason: %@",); 
    return; 
  } 
  //Initialize the device output object to obtain output data  _captureMovieFileOutput=[[AVCaptureMovieFileOutput alloc]init]; 
   
  //Add device input to the session  if ([_captureSession canAddInput:_captureDeviceInput]) { 
    [_captureSession addInput:_captureDeviceInput]; 
    [_captureSession addInput:audioCaptureDeviceInput]; 
    AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 
    if ([captureConnection isVideoStabilizationSupported ]) { 
      =AVCaptureVideoStabilizationModeAuto; 
    } 
  } 
   
  //Add device output to session  if ([_captureSession canAddOutput:_captureMovieFileOutput]) { 
    [_captureSession addOutput:_captureMovieFileOutput]; 
  } 
   
  //Create a video preview layer to display the camera status in real time  _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:]; 
   
  CALayer *layer=; 
  =YES; 
   
  _captureVideoPreviewLayer.frame=; 
  _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//Fill mode  //Add the video preview layer to the interface  //[layer addSublayer:_captureVideoPreviewLayer]; 
  [layer insertSublayer:_captureVideoPreviewLayer below:]; 
   
  _enableRotation=YES; 
  [self addNotificationToCaptureDevice:captureDevice]; 
  [self addGenstureRecognizer]; 
} 
 
-(void)viewDidAppear:(BOOL)animated{ 
  [super viewDidAppear:animated]; 
  [ startRunning]; 
} 
 
-(void)viewDidDisappear:(BOOL)animated{ 
  [super viewDidDisappear:animated]; 
  [ stopRunning]; 
} 
 
- (void)didReceiveMemoryWarning { 
  [super didReceiveMemoryWarning]; 
} 
 
-(BOOL)shouldAutorotate{ 
  return ; 
} 
 
//// Adjust the orientation of the video preview layer when the screen rotates//-(void)willTransitionToTraitCollection:(UITraitCollection *)newCollection withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator{ 
//  [super willTransitionToTraitCollection:newCollection withTransitionCoordinator:coordinator]; 
////  NSLog(@"%i,%i",,); 
//  UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation]; 
//  NSLog(@"%i",orientation); 
//  AVCaptureConnection *captureConnection=[ connection]; 
//  =orientation; 
// 
//} 
//Adjust the orientation of the video preview layer when the screen rotates-(void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration{ 
  AVCaptureConnection *captureConnection=[ connection]; 
  =(AVCaptureVideoOrientation)toInterfaceOrientation; 
} 
//Reset the size after rotation-(void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation{ 
  _captureVideoPreviewLayer.frame=; 
} 
 
-(void)dealloc{ 
  [self removeNotification]; 
} 
#pragma mark - UI method#pragma mark video recording- (void)takeButtonClick:(UIButton *)sender { 
  //Get connection based on device output  AVCaptureConnection *captureConnection=[ connectionWithMediaType:AVMediaTypeVideo]; 
  //Acquiring the data output from the device based on the connection  if (![ isRecording]) { 
    =NO; 
    //If multitasking is supported, start multitasking    if ([[UIDevice currentDevice] isMultitaskingSupported]) { 
      =[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]; 
    } 
    //The preview layer and video orientation are consistent    =[ connection].videoOrientation; 
    NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@""]; 
    NSLog(@"save path is :%@",outputFielPath); 
    NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath]; 
    [ startRecordingToOutputFileURL:fileUrl recordingDelegate:self]; 
  } 
  else{ 
    [ stopRecording];//Stop recording  } 
} 
#pragma mark Switch front and rear cameras- (void)toggleButtonClick:(UIButton *)sender { 
  AVCaptureDevice *currentDevice=[ device]; 
  AVCaptureDevicePosition currentPosition=[currentDevice position]; 
  [self removeNotificationFromCaptureDevice:currentDevice]; 
  AVCaptureDevice *toChangeDevice; 
  AVCaptureDevicePosition toChangePosition=AVCaptureDevicePositionFront; 
  if (currentPosition==AVCaptureDevicePositionUnspecified||currentPosition==AVCaptureDevicePositionFront) { 
    toChangePosition=AVCaptureDevicePositionBack; 
  } 
  toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition]; 
  [self addNotificationToCaptureDevice:toChangeDevice]; 
  //Get the device input object to be adjusted  AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil]; 
   
  //Be sure to enable the configuration before changing the configuration of the session. After the configuration is completed, submit the configuration change.  [ beginConfiguration]; 
  //Remove the original input object  [ removeInput:]; 
  //Add new input object  if ([ canAddInput:toChangeDeviceInput]) { 
    [ addInput:toChangeDeviceInput]; 
    =toChangeDeviceInput; 
  } 
  //Submit session configuration  [ commitConfiguration]; 
   
} 
 
#pragma mark - Video output proxy-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{ 
  NSLog(@"Start recording..."); 
} 
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{ 
  NSLog(@"Video recording is complete."); 
  //After the video is entered, the video is stored in the background in the photo album  =YES; 
  UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier=; 
  =UIBackgroundTaskInvalid; 
  ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init]; 
  [assetsLibrary writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) { 
    if (error) { 
      NSLog(@"An error occurred during the process of saving video to the album, error message: %@",); 
    } 
    if (lastBackgroundTaskIdentifier!=UIBackgroundTaskInvalid) { 
      [[UIApplication sharedApplication] endBackgroundTask:lastBackgroundTaskIdentifier]; 
    } 
    NSLog(@"Save the video to the album successfully."); 
  }]; 
   
} 
 
#pragma mark - Notification/**
  * Add notification to the input device
  */ 
-(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{ 
  //Note that adding area changes capture notification must first set the device to allow capture  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    =YES; 
  }]; 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  //The capture area has changed  [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 
} 
-(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; 
} 
/**
  * Remove all notifications
  */ 
-(void)removeNotification{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  [notificationCenter removeObserver:self]; 
} 
 
-(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{ 
  NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; 
  //The session error occurred  [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession]; 
} 
 
/**
  * The device connection is successful
  *
  * @param notification Notification object
  */ 
-(void)deviceConnected:(NSNotification *)notification{ 
  NSLog(@"The device is connected..."); 
} 
/**
  * The device connection is disconnected
  *
  * @param notification Notification object
  */ 
-(void)deviceDisconnected:(NSNotification *)notification{ 
  NSLog(@"The device is disconnected."); 
} 
/**
  * Capture area changes
  *
  * @param notification Notification object
  */ 
-(void)areaChange:(NSNotification *)notification{ 
  NSLog(@"Capture area changes..."); 
} 
 
/**
  * A session error
  *
  * @param notification Notification object
  */ 
-(void)sessionRuntimeError:(NSNotification *)notification{ 
  NSLog(@"The session error occurred."); 
} 
 
#pragma mark - Private method 
/**
  * Get the camera at the specified location
  *
  * @param position Camera location
  *
  * @return Camera Equipment
  */ 
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 
  NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
  for (AVCaptureDevice *camera in cameras) { 
    if ([camera position]==position) { 
      return camera; 
    } 
  } 
  return nil; 
} 
 
/**
  * Unified operation method for changing device attributes
  *
  * @param propertyChange property change operation
  */ 
-(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{ 
  AVCaptureDevice *captureDevice= [ device]; 
  NSError *error; 
  //Note that before changing the device attributes, you must first call lockForConfiguration: after the call is completed, use the unlockForConfiguration method to unlock  if ([captureDevice lockForConfiguration:&error]) { 
    propertyChange(captureDevice); 
    [captureDevice unlockForConfiguration]; 
  }else{ 
    NSLog(@"An error occurred during setting device attributes, error message: %@",); 
  } 
} 
 
/**
  * Set flash mode
  *
  * @param flashMode flash mode
  */ 
-(void)setFlashMode:(AVCaptureFlashMode )flashMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFlashModeSupported:flashMode]) { 
      [captureDevice setFlashMode:flashMode]; 
    } 
  }]; 
} 
/**
  * Set focus mode
  *
  * @param focusMode Focus Mode
  */ 
-(void)setFocusMode:(AVCaptureFocusMode )focusMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:focusMode]; 
    } 
  }]; 
} 
/**
  * Set exposure mode
  *
  * @param exposureMode exposure mode
  */ 
-(void)setExposureMode:(AVCaptureExposureMode)exposureMode{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isExposureModeSupported:exposureMode]) { 
      [captureDevice setExposureMode:exposureMode]; 
    } 
  }]; 
} 
/**
  * Set the focus point
  *
  * @param point Focus
  */ 
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ 
  [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { 
    if ([captureDevice isFocusModeSupported:focusMode]) { 
      [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus]; 
    } 
    if ([captureDevice isFocusPointOfInterestSupported]) { 
      [captureDevice setFocusPointOfInterest:point]; 
    } 
    if ([captureDevice isExposureModeSupported:exposureMode]) { 
      [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose]; 
    } 
    if ([captureDevice isExposurePointOfInterestSupported]) { 
      [captureDevice setExposurePointOfInterest:point]; 
    } 
  }]; 
} 
 
/**
  * Add a tap gesture, focus when tapped
  */ 
-(void)addGenstureRecognizer{ 
  UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; 
  [ addGestureRecognizer:tapGesture]; 
} 
-(void)tapScreen:(UITapGestureRecognizer *)tapGesture{ 
  CGPoint point= [tapGesture locationInView:]; 
  //Convert UI coordinates to camera coordinates  CGPoint cameraPoint= [ captureDevicePointOfInterestForPoint:point]; 
  [self setFocusCursorWithPoint:point]; 
  [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint]; 
} 
 
/**
  * Set the focus cursor position
  *
  * @param point Cursor position
  */ 
-(void)setFocusCursorWithPoint:(CGPoint)point{ 
  =point; 
  =CGAffineTransformMakeScale(1.5, 1.5); 
  =1.0; 
  [UIView animateWithDuration:1.0 animations:^{ 
    =CGAffineTransformIdentity; 
  } completion:^(BOOL finished) { 
    =0; 
  }]; 
} 
@end 

The above is all the content of this article. I hope it will be helpful to everyone's study and I hope everyone will support me more.