There are many examples of custom cameras on the Internet. Here is just a demo I temporarily wrote about taking photos and recording videos by taking videos and recording demos for reference only:
The following libraries are used:
#import <AVFoundation/> #import <AssetsLibrary/>
When using it, you need to write the relevant permissions in:
Privacy - Microphone Usage Description Privacy - Photo Library Usage Description Privacy - Camera Usage Description
When I was writing this demo, I wrote it in the style of WeChat. I also clicked to take a photo and pressed to record the video. After recording the video, I directly played it. Here is a simple player:
m file
#import "" #import <AVFoundation/> @interface HAVPlayer () @property (nonatomic,strong) AVPlayer *player;//Player object @end @implementation HAVPlayer /* // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code } */ - (instancetype)initWithFrame:(CGRect)frame withShowInView:(UIView *)bgView url:(NSURL *)url { if (self = [self initWithFrame:frame]) { //Create the player layer AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:]; = ; [ addSublayer:playerLayer]; if (url) { = url; } [bgView addSubview:self]; } return self; } - (void)dealloc { [self removeAvPlayerNtf]; [self stopPlayer]; = nil; } - (AVPlayer *)player { if (!_player) { _player = [AVPlayer playerWithPlayerItem:[self getAVPlayerItem]]; [self addAVPlayerNtf:_player.currentItem]; } return _player; } - (AVPlayerItem *)getAVPlayerItem { AVPlayerItem *playerItem=[AVPlayerItem playerItemWithURL:]; return playerItem; } - (void)setVideoUrl:(NSURL *)videoUrl { _videoUrl = videoUrl; [self removeAvPlayerNtf]; [self nextPlayer]; } - (void)nextPlayer { [ seekToTime:CMTimeMakeWithSeconds(0, _player.)]; [ replaceCurrentItemWithPlayerItem:[self getAVPlayerItem]]; [self addAVPlayerNtf:]; if ( == 0) { [ play]; } } - (void) addAVPlayerNtf:(AVPlayerItem *)playerItem { //Monitoring status attributes [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionNew context:nil]; //Properties for monitoring network loading [playerItem addObserver:self forKeyPath:@"loadedTimeRanges" options:NSKeyValueObservingOptionNew context:nil]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:) name:AVPlayerItemDidPlayToEndTimeNotification object:]; } - (void)removeAvPlayerNtf { AVPlayerItem *playerItem = ; [playerItem removeObserver:self forKeyPath:@"status"]; [playerItem removeObserver:self forKeyPath:@"loadedTimeRanges"]; [[NSNotificationCenter defaultCenter] removeObserver:self]; } - (void)stopPlayer { if ( == 1) { [ pause];//Stop if it is in playback } } /** * Monitor player status through KVO * * @param keyPath Monitoring properties * @param object monitor * @param change status change * @param context context */ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{ AVPlayerItem *playerItem = object; if ([keyPath isEqualToString:@"status"]) { AVPlayerStatus status= [[change objectForKey:@"new"] intValue]; if(status==AVPlayerStatusReadyToPlay){ NSLog(@"Playing..., total video length: %.2f",CMTimeGetSeconds()); } }else if([keyPath isEqualToString:@"loadedTimeRanges"]){ NSArray *array=; CMTimeRange timeRange = [ CMTimeRangeValue];//This buffer time range float startSeconds = CMTimeGetSeconds(); float durationSeconds = CMTimeGetSeconds(); NSTimeInterval totalBuffer = startSeconds + durationSeconds;//Total buffer length NSLog(@"Total buffer: %.2f",totalBuffer); } } - (void)playbackFinished:(NSNotification *)ntf { Plog(@"Video playback is completed"); [ seekToTime:CMTimeMake(0, 1)]; [ play]; } @end
In addition, long pressing the button below WeChat will show an arc time bar:
m file
#import "" @interface HProgressView () /** * Progress value between 0-1.0 */ @property (nonatomic,assign)CGFloat progressValue; @property (nonatomic, assign) CGFloat currentTime; @end @implementation HProgressView // Only override drawRect: if you perform custom drawing. // An empty implementation adversely affects performance during animation. - (void)drawRect:(CGRect)rect { // Drawing code CGContextRef ctx = UIGraphicsGetCurrentContext();//Get the context Plog(@"width = %f",); CGPoint center = CGPointMake(/2.0, /2.0); //Set the center position CGFloat radius = /2.0-5; //Set the radius CGFloat startA = - M_PI_2; //The starting point of the circle CGFloat endA = -M_PI_2 + M_PI * 2 * _progressValue; //The end point of the circle UIBezierPath *path = [UIBezierPath bezierPathWithArcCenter:center radius:radius startAngle:startA endAngle:endA clockwise:YES]; CGContextSetLineWidth(ctx, 10); //Set line width [[UIColor whiteColor] setStroke]; //Set the stroke color CGContextAddPath(ctx, ); //Add path to context CGContextStrokePath(ctx); //Render} - (void)setTimeMax:(NSInteger)timeMax { _timeMax = timeMax; = 0; = 0; [self setNeedsDisplay]; = NO; [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1]; } - (void)clearProgress { _currentTime = _timeMax; = YES; } - (void)startProgress { _currentTime += 0.1; if (_timeMax > _currentTime) { _progressValue = _currentTime/_timeMax; Plog(@"progress = %f",_progressValue); [self setNeedsDisplay]; [self performSelector:@selector(startProgress) withObject:nil afterDelay:0.1]; } if (_timeMax <= _currentTime) { [self clearProgress]; } } @end
Next is the camera controller. Since it is written temporarily, you should not use xib directly. Just add m file code:
#import "" #import <AVFoundation/> #import "" #import "" #import <Foundation/> #import <AssetsLibrary/> typedef void(^PropertyChangeBlock)(AVCaptureDevice *captureDevice); @interface HVideoViewController ()<AVCaptureFileOutputRecordingDelegate> //Touch to take a photo, press and hold the camera@property (strong, nonatomic) IBOutlet UILabel *labelTipTitle; //Video output stream@property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput; //Picture output stream//@property (strong,nonatomic) AVCaptureStillImageOutput *captureStillImageOutput;//Photo output stream//Responsible for obtaining input data from AVCaptureDevice@property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput; //Background task identification@property (assign,nonatomic) UIBackgroundTaskIdentifier backgroundTaskIdentifier; @property (assign,nonatomic) UIBackgroundTaskIdentifier lastBackgroundTaskIdentifier; @property (weak, nonatomic) IBOutlet UIImageView *focusCursor; //Focus on the cursor //Responsible for data transmission between input and output devices@property(nonatomic)AVCaptureSession *session; //Image preview layer, display the captured images in real time@property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer; @property (strong, nonatomic) IBOutlet UIButton *btnBack; //Record@property (strong, nonatomic) IBOutlet UIButton *btnAfresh; //Sure@property (strong, nonatomic) IBOutlet UIButton *btnEnsure; //Camera switching@property (strong, nonatomic) IBOutlet UIButton *btnCamera; @property (strong, nonatomic) IBOutlet UIImageView *bgView; //Record recording time, default maximum of 60 seconds@property (assign, nonatomic) NSInteger seconds; //Record the path to save the video@property (strong, nonatomic) NSURL *saveVideoUrl; //Is it in focus?@property (assign, nonatomic) BOOL isFocus; @property (strong, nonatomic) IBOutlet NSLayoutConstraint *afreshCenterX; @property (strong, nonatomic) IBOutlet NSLayoutConstraint *ensureCenterX; @property (strong, nonatomic) IBOutlet NSLayoutConstraint *backCenterX; //Video playback@property (strong, nonatomic) HAVPlayer *player; @property (strong, nonatomic) IBOutlet HProgressView *progressView; // Whether it is a camera? YES means recording. NO means taking a photo.@property (assign, nonatomic) BOOL isVideo; @property (strong, nonatomic) UIImage *takeImage; @property (strong, nonatomic) UIImageView *takeImageView; @property (strong, nonatomic) IBOutlet UIImageView *imgRecord; @end //The time is greater than this, it is video, otherwise it is a photo#define TimeMax 1 @implementation HVideoViewController -(void)dealloc{ [self removeNotification]; } - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view. UIImage *image = [UIImage imageNamed:@"sc_btn_take.png"]; = -(SCREEN_WIDTH/2/2)-/2/2; = /2; if ( == 0) { = 60; } [self performSelector:@selector(hiddenTipsLabel) withObject:nil afterDelay:4]; } - (void)hiddenTipsLabel { = YES; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; [[UIApplication sharedApplication] setStatusBarHidden:YES]; [self customCamera]; [ startRunning]; } -(void)viewDidAppear:(BOOL)animated{ [super viewDidAppear:animated]; } -(void)viewDidDisappear:(BOOL)animated{ [super viewDidDisappear:animated]; [ stopRunning]; } - (void)viewWillDisappear:(BOOL)animated { [super viewWillDisappear:animated]; [[UIApplication sharedApplication] setStatusBarHidden:NO]; } - (void)customCamera { //Initialize the session to combine input and output = [[AVCaptureSession alloc] init]; //Set resolution (the highest resolution supported by the device) if ([ canSetSessionPreset:AVCaptureSessionPresetHigh]) { = AVCaptureSessionPresetHigh; } //Get the rear camera AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack]; //Add an audio input device AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; //Initialize the input device NSError *error = nil; = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error]; if (error) { Plog(@"An error occurred while obtaining the device input object, error reason: %@",); return; } //Add audio error = nil; AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:&error]; if (error) { NSLog(@"An error occurred while obtaining the device input object, error reason: %@",); return; } //Output object = [[AVCaptureMovieFileOutput alloc] init];//Video output //Add input device to session if ([ canAddInput:]) { [ addInput:]; [ addInput:audioCaptureDeviceInput]; //Set video anti-shake AVCaptureConnection *connection = [ connectionWithMediaType:AVMediaTypeVideo]; if ([connection isVideoStabilizationSupported]) { = AVCaptureVideoStabilizationModeCinematic; } } //Add the output device to the session (it was the photo as the output object at the beginning) if ([ canAddOutput:]) { [ addOutput:]; } //Create a video preview layer to display the camera status in real time = [[AVCaptureVideoPreviewLayer alloc] initWithSession:]; = ;//CGRectMake(0, 0, , ); = AVLayerVideoGravityResizeAspectFill;//Fill mode [ addSublayer:]; [self addNotificationToCaptureDevice:captureDevice]; [self addGenstureRecognizer]; } - (IBAction)onCancelAction:(UIButton *)sender { [self dismissViewControllerAnimated:YES completion:^{ [Utility hideProgressDialog]; }]; } - (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event { if ([[touches anyObject] view] == ) { Plog(@"Start recording"); //Get connection based on device output AVCaptureConnection *connection = [ connectionWithMediaType:AVMediaTypeAudio]; //Acquiring the data output from the device based on the connection if (![ isRecording]) { //If multitasking is supported, start multitasking if ([[UIDevice currentDevice] isMultitaskingSupported]) { = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]; } if () { [[NSFileManager defaultManager] removeItemAtURL: error:nil]; } //The preview layer and video orientation are consistent = [ connection].videoOrientation; NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@""]; NSLog(@"save path is :%@",outputFielPath); NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath]; NSLog(@"fileUrl:%@",fileUrl); [ startRecordingToOutputFileURL:fileUrl recordingDelegate:self]; } else { [ stopRecording]; } } } - (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event { if ([[touches anyObject] view] == ) { Plog(@"End Touch"); if (!) { [self performSelector:@selector(endRecord) withObject:nil afterDelay:0.3]; } else { [self endRecord]; } } } - (void)endRecord { [ stopRecording];//Stop recording} - (IBAction)onAfreshAction:(UIButton *)sender { Plog(@"Record"); [self recoverLayout]; } - (IBAction)onEnsureAction:(UIButton *)sender { Plog(@"Confirm Save or send it here"); if () { WS(weakSelf) [Utility showProgressDialogText:@"Video processing..."]; ALAssetsLibrary *assetsLibrary=[[ALAssetsLibrary alloc]init]; [assetsLibrary writeVideoAtPathToSavedPhotosAlbum: completionBlock:^(NSURL *assetURL, NSError *error) { Plog(@"outputUrl:%@",); [[NSFileManager defaultManager] removeItemAtURL: error:nil]; if (!= UIBackgroundTaskInvalid) { [[UIApplication sharedApplication] endBackgroundTask:]; } if (error) { Plog(@"An error occurred during the process of saving video to the album, error message: %@",); [Utility showAllTextDialog: Text:@"An error occurred when saving video to album"]; } else { if () { (assetURL); } Plog(@"Save the video to the album successfully."); [weakSelf onCancelAction:nil]; } }]; } else { //photo UIImageWriteToSavedPhotosAlbum(, self, nil, nil); if () { (); } [self onCancelAction:nil]; } } //Switch between front and rear cameras- (IBAction)onCameraAction:(UIButton *)sender { Plog(@"Switch camera"); AVCaptureDevice *currentDevice=[ device]; AVCaptureDevicePosition currentPosition=[currentDevice position]; [self removeNotificationFromCaptureDevice:currentDevice]; AVCaptureDevice *toChangeDevice; AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;//forward if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) { toChangePosition = AVCaptureDevicePositionBack;//back } toChangeDevice=[self getCameraDeviceWithPosition:toChangePosition]; [self addNotificationToCaptureDevice:toChangeDevice]; //Get the device input object to be adjusted AVCaptureDeviceInput *toChangeDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:toChangeDevice error:nil]; //Be sure to enable the configuration before changing the configuration of the session. After the configuration is completed, submit the configuration change. [ beginConfiguration]; //Remove the original input object [ removeInput:]; //Add new input object if ([ canAddInput:toChangeDeviceInput]) { [ addInput:toChangeDeviceInput]; = toChangeDeviceInput; } //Submit session configuration [ commitConfiguration]; } - (void)onStartTranscribe:(NSURL *)fileURL { if ([ isRecording]) { -- ; if ( > 0) { if ( - >= TimeMax && !) { = YES;// Press for a long time exceeding TimeMax means that it is video recording = ; } [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0]; } else { if ([ isRecording]) { [ stopRecording]; } } } } #pragma mark - Video output proxy-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{ Plog(@"Start recording..."); = ; [self performSelector:@selector(onStartTranscribe:) withObject:fileURL afterDelay:1.0]; } -(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{ Plog(@"Video recording is complete."); [self changeLayout]; if () { = outputFileURL; if (!) { = [[HAVPlayer alloc] initWithFrame: withShowInView: url:outputFileURL]; } else { if (outputFileURL) { = outputFileURL; = NO; } } } else { //photo = nil; [self videoHandlePhoto:outputFileURL]; } } - (void)videoHandlePhoto:(NSURL *)url { AVURLAsset *urlSet = [AVURLAsset assetWithURL:url]; AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet]; = YES; // Adjust to the correct direction when taking screenshots NSError *error = nil; CMTime time = CMTimeMake(0,30);//Thumbnail creation time CMTime is a structure that represents the movie time information. The first parameter indicates how many seconds the video is, and the second parameter indicates how many frames per second. (If you want to get the frame of a certain second, you can use the CMTimeMake method) CMTime actucalTime; //The actual time of thumbnail generation CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error]; if (error) { Plog(@"Failed to capture video image:%@",); } CMTimeShow(actucalTime); UIImage *image = [UIImage imageWithCGImage:cgImage]; CGImageRelease(cgImage); if (image) { Plog(@"Video intercept successfully"); } else { Plog(@"Video interception failed"); } = image;//[UIImage imageWithCGImage:cgImage]; [[NSFileManager defaultManager] removeItemAtURL:url error:nil]; if (!) { = [[UIImageView alloc] initWithFrame:]; [ addSubview:]; } = NO; = ; } #pragma mark - Notification //Registration Notification- (void)setupObservers { NSNotificationCenter *notification = [NSNotificationCenter defaultCenter]; [notification addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationWillResignActiveNotification object:[UIApplication sharedApplication]]; } //Escape video recording when entering the background- (void)applicationDidEnterBackground:(NSNotification *)notification { [self onCancelAction:nil]; } /** * Add notification to the input device */ -(void)addNotificationToCaptureDevice:(AVCaptureDevice *)captureDevice{ //Note that adding area changes capture notification must first set the device to allow capture [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { =YES; }]; NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; //The capture area has changed [notificationCenter addObserver:self selector:@selector(areaChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; } -(void)removeNotificationFromCaptureDevice:(AVCaptureDevice *)captureDevice{ NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; [notificationCenter removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:captureDevice]; } /** * Remove all notifications */ -(void)removeNotification{ NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; [notificationCenter removeObserver:self]; } -(void)addNotificationToCaptureSession:(AVCaptureSession *)captureSession{ NSNotificationCenter *notificationCenter= [NSNotificationCenter defaultCenter]; //The session error occurred [notificationCenter addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:captureSession]; } /** * The device connection is successful * * @param notification Notification object */ -(void)deviceConnected:(NSNotification *)notification{ NSLog(@"The device is connected..."); } /** * The device connection is disconnected * * @param notification Notification object */ -(void)deviceDisconnected:(NSNotification *)notification{ NSLog(@"The device is disconnected."); } /** * Capture area changes * * @param notification Notification object */ -(void)areaChange:(NSNotification *)notification{ NSLog(@"Capture area changes..."); } /** * A session error * * @param notification Notification object */ -(void)sessionRuntimeError:(NSNotification *)notification{ NSLog(@"The session error occurred."); } /** * Get the camera at the specified location * * @param position Camera location * * @return Camera Equipment */ -(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{ NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *camera in cameras) { if ([camera position] == position) { return camera; } } return nil; } /** * Unified operation method for changing device attributes * * @param propertyChange property change operation */ -(void)changeDeviceProperty:(PropertyChangeBlock)propertyChange{ AVCaptureDevice *captureDevice= [ device]; NSError *error; //Note that before changing the device attributes, you must first call lockForConfiguration: after the call is completed, use the unlockForConfiguration method to unlock if ([captureDevice lockForConfiguration:&error]) { //Automatic white balance if ([captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) { [captureDevice setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]; } //Open the flash automatically according to environmental conditions if ([captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) { [captureDevice setFlashMode:AVCaptureFlashModeAuto]; } propertyChange(captureDevice); [captureDevice unlockForConfiguration]; }else{ NSLog(@"An error occurred during setting device attributes, error message: %@",); } } /** * Set flash mode * * @param flashMode flash mode */ -(void)setFlashMode:(AVCaptureFlashMode )flashMode{ [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { if ([captureDevice isFlashModeSupported:flashMode]) { [captureDevice setFlashMode:flashMode]; } }]; } /** * Set focus mode * * @param focusMode Focus Mode */ -(void)setFocusMode:(AVCaptureFocusMode )focusMode{ [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { if ([captureDevice isFocusModeSupported:focusMode]) { [captureDevice setFocusMode:focusMode]; } }]; } /** * Set exposure mode * * @param exposureMode exposure mode */ -(void)setExposureMode:(AVCaptureExposureMode)exposureMode{ [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { if ([captureDevice isExposureModeSupported:exposureMode]) { [captureDevice setExposureMode:exposureMode]; } }]; } /** * Set the focus point * * @param point Focus */ -(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{ [self changeDeviceProperty:^(AVCaptureDevice *captureDevice) { // if ([captureDevice isFocusPointOfInterestSupported]) { // [captureDevice setFocusPointOfInterest:point]; // } // if ([captureDevice isExposurePointOfInterestSupported]) { // [captureDevice setExposurePointOfInterest:point]; // } if ([captureDevice isExposureModeSupported:exposureMode]) { [captureDevice setExposureMode:exposureMode]; } if ([captureDevice isFocusModeSupported:focusMode]) { [captureDevice setFocusMode:focusMode]; } }]; } /** * Add a tap gesture, focus when tapped */ -(void)addGenstureRecognizer{ UITapGestureRecognizer *tapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(tapScreen:)]; [ addGestureRecognizer:tapGesture]; } -(void)tapScreen:(UITapGestureRecognizer *)tapGesture{ if ([ isRunning]) { CGPoint point= [tapGesture locationInView:]; //Convert UI coordinates to camera coordinates CGPoint cameraPoint= [ captureDevicePointOfInterestForPoint:point]; [self setFocusCursorWithPoint:point]; [self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposureMode:AVCaptureExposureModeContinuousAutoExposure atPoint:cameraPoint]; } } /** * Set the focus cursor position * * @param point Cursor position */ -(void)setFocusCursorWithPoint:(CGPoint)point{ if (!) { = YES; =point; = CGAffineTransformMakeScale(1.25, 1.25); = 1.0; [UIView animateWithDuration:0.5 animations:^{ = CGAffineTransformIdentity; } completion:^(BOOL finished) { [self performSelector:@selector(onHiddenFocusCurSorAction) withObject:nil afterDelay:0.5]; }]; } } - (void)onHiddenFocusCurSorAction { =0; = NO; } //Called when shooting is completed- (void)changeLayout { = YES; = YES; = NO; = NO; = YES; if () { [ clearProgress]; } = -(SCREEN_WIDTH/2/2); = SCREEN_WIDTH/2/2; [UIView animateWithDuration:0.25 animations:^{ [ layoutIfNeeded]; }]; = ; = UIBackgroundTaskInvalid; [ stopRunning]; } //Called when reshooting- (void)recoverLayout { if () { = NO; [ stopPlayer]; = YES; } [ startRunning]; if (!) { = YES; } // = nil; = 0; = 0; = NO; = NO; = YES; = YES; = NO; [UIView animateWithDuration:0.25 animations:^{ [ layoutIfNeeded]; }]; } /* #pragma mark - Navigation // In a storyboard-based application, you will often want to do a little preparation before navigation - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { // Get the new view controller using [segue destinationViewController]. // Pass the selected object to the new view controller. } */ @end
It's also quite easy to use:
- (IBAction)onCameraAction:(UIButton *)sender { //Forehead . . Since it is a demo, we use xib. We change it according to our needs. This demo only provides an idea. Do not drag it into the project directly when using it. HVideoViewController *ctrl = [[NSBundle mainBundle] loadNibNamed:@"HVideoViewController" owner:nil options:nil].lastObject; = 30;//Set the maximum time to record = ^(id item) { if ([item isKindOfClass:[NSURL class]]) { NSURL *videoURL = item; //Video url } else { //picture } }; [self presentViewController:ctrl animated:YES completion:nil]; }
Also give the demo address: Don't touch if you don't like it-_-\
KJCamera
It's over since then. The writing is relatively simple. I hope it can help everyone. Thank you!
The above is all the content of this article. I hope it will be helpful to everyone's study and I hope everyone will support me more.