iOS QR code generation and scanning code
Now more and more applications are joining QR code-related businesses. In the iOS development market, many developers are using third-party scan codes and controlling QR codes. I personally think that such third-party controls are not highly recognizable. I happened to sort out the new framework recently and studied it. The specific code is as follows
Generate QR code
/** * @author Half full, 15-12-18 * * @brief Generate QR code picture * * @param code generates QR code image content * @param width QR code picture width * @param height QR code image height * * @return Return UIImage object */ - (UIImage *)generateQRCode:(NSString *)code width:(CGFloat)width height:(CGFloat)height { CIImage *qrcodeImage; NSData *data = [code dataUsingEncoding:NSISOLatin1StringEncoding allowLossyConversion:false]; CIFilter *filter = [CIFilter filterWithName:@"CIQRCodeGenerator"]; [filter setValue:data forKey:@"inputMessage"]; [filter setValue:@"H" forKey:@"inputCorrectionLevel"]; qrcodeImage = [filter outputImage]; CGFloat scaleX = width / ; CGFloat scaleY = height / ; CIImage *transformedImage = [qrcodeImage imageByApplyingTransform:CGAffineTransformScale(CGAffineTransformIdentity, scaleX, scaleY)]; return [UIImage imageWithCIImage:transformedImage]; }
Scan the QR code code
#import <AVFoundation/> static const float lightWidth = ; static const float lightHeight = ; static const float crossLineWidth = ; static const float crossLineHeight = ; @interface BBScanCodeViewController ()<AVCaptureMetadataOutputObjectsDelegate> { float leftWith; float topHeight; } @property (strong , nonatomic ) AVCaptureDevice *captureDevice; @property (strong , nonatomic ) AVCaptureDeviceInput *captureInput; @property (strong , nonatomic ) AVCaptureMetadataOutput *captureOutput; @property (strong , nonatomic ) AVCaptureSession *captureSession; @property (strong , nonatomic ) AVCaptureVideoPreviewLayer *capturePreview; @property (strong,nonatomic) UIButton *flashLightBtn; @property (strong,nonatomic) UIImageView *lineImageView; @end @implementation BBScanCodeViewController @synthesize captureDevice = _captureDevice; @synthesize captureInput = _captureInput; @synthesize captureOutput = _captureOutput; @synthesize capturePreview = _capturePreview; @synthesize captureSession = _captureSession; @synthesize delegate = _delegate; @synthesize isRectScan = _isRectScan; @synthesize lineImageView = _lineImageView; @synthesize flashLightBtn = _flashLightBtn; - (void)viewDidLoad { [super viewDidLoad]; = YES; CGRect screenRect = [UIScreen mainScreen].bounds; leftWith = ( - lightWidth) / 2; topHeight =( - lightHeight) / 2; #if !TARGET_IPHONE_SIMULATOR [self initScanCode]; #endif [self initLayer]; [self initViewControl]; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willResignActiveNotification) name:UIApplicationWillResignActiveNotification object:nil]; // Listen to whether the home key hanging program is triggered. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didBecomeActiveNotification) name:UIApplicationDidBecomeActiveNotification object:nil]; // Listen to whether to re-enter the program.} -(void)viewWillDisappear:(BOOL)animated { [self stopScanCode]; [super viewWillDisappear:animated]; } - (void)willResignActiveNotification { _flashLightBtn.selected = NO; } - (void)didBecomeActiveNotification { } //Loading controls on the interface, such as: plus flash buttons, etc.- (void)initViewControl { @autoreleasepool { _flashLightBtn = [UIButton buttonWithType:UIButtonTypeCustom]; [_flashLightBtn setImage:[UIImage imageNamed:@""] forState:UIControlStateNormal]; [_flashLightBtn setImage:[UIImage imageNamed:@""] forState:UIControlStateSelected]; _flashLightBtn.frame = CGRectMake(leftWith, , , ); [_flashLightBtn addTarget:self action:@selector(systemFlashLight) forControlEvents:UIControlEventTouchUpInside]; [ addSubview:_flashLightBtn]; _lineImageView = [[UIImageView alloc] initWithImage:nil]; _lineImageView.backgroundColor = [UIColor greenColor]; _lineImageView.frame = CGRectMake(leftWith, topHeight, lightWidth, 2); [ addSubview:_lineImageView]; [self scanLineAnimation]; } } - (void)scanLineAnimation { [UIView beginAnimations:nil context:nil]; [UIView setAnimationDuration:]; //Set the proxy [UIView setAnimationDelegate:self]; //Set the event of the animation execution and call [UIView setAnimationDidStopSelector:@selector(didViewAnimation)]; _lineImageView.frame = CGRectMake(leftWith,topHeight + lightHeight-2,lightWidth,2); [UIView commitAnimations]; } -(void)didViewAnimation { // _lineImageView.frame = CGRectMake(leftWith, topHeight, lightWidth, 2); [self scanLineAnimation]; } - (void)insertLayerWithFrame:(CGRect)frame withBackgroundColor:(UIColor *)backgroundColor { @autoreleasepool { CALayer *layer = [CALayer layer]; = ; = frame; [ addSublayer:layer]; } } //Initialize the layer layer and draw the translucent area-(void) initLayer { //Public parameters UIColor *fillColor = [UIColor colorWithRed:0xae/ green:0xae/ blue:0xae/ alpha:0.4]; UIColor *crossColor = [UIColor greenColor]; CGRect screenRect = [UIScreen mainScreen].bounds; [self insertLayerWithFrame:CGRectMake(0, 0, leftWith, ) withBackgroundColor:fillColor]; [self insertLayerWithFrame:CGRectMake(leftWith, 0, lightWidth, topHeight) withBackgroundColor:fillColor]; [self insertLayerWithFrame:CGRectMake(leftWith + lightWidth, 0, leftWith, ) withBackgroundColor:fillColor]; [self insertLayerWithFrame:CGRectMake(leftWith, topHeight + lightHeight, lightWidth, topHeight) withBackgroundColor:fillColor]; [self insertLayerWithFrame:CGRectMake(leftWith, topHeight, crossLineWidth, crossLineHeight) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith, topHeight, crossLineHeight, crossLineWidth) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith + lightWidth - crossLineHeight, topHeight, crossLineHeight, crossLineWidth) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith + lightWidth - crossLineWidth, topHeight, crossLineWidth, crossLineHeight) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith, topHeight + lightHeight - crossLineHeight, crossLineWidth, crossLineHeight) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith, topHeight + lightHeight - crossLineWidth, crossLineHeight, crossLineWidth) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith + lightWidth - crossLineHeight, topHeight + lightHeight - crossLineWidth, crossLineHeight, crossLineWidth) withBackgroundColor:crossColor]; [self insertLayerWithFrame:CGRectMake(leftWith + lightWidth - crossLineWidth, topHeight + lightHeight - crossLineHeight, crossLineWidth, crossLineHeight) withBackgroundColor:crossColor]; } -(void)initScanCode { @autoreleasepool { _captureDevice = [ AVCaptureDevice defaultDeviceWithMediaType : AVMediaTypeVideo]; _captureInput = [ AVCaptureDeviceInput deviceInputWithDevice : _captureDevice error : nil ]; _captureOutput = [[ AVCaptureMetadataOutput alloc ] init ]; [_captureOutput setMetadataObjectsDelegate : self queue : dispatch_get_main_queue ()]; if (_isRectScan) { CGRect screenRect = [UIScreen mainScreen].bounds; [ _captureOutput setRectOfInterest : CGRectMake (topHeight / , leftWith / , lightHeight/, lightWidth / )]; } _captureSession = [[ AVCaptureSession alloc ] init ]; [_captureSession setSessionPreset : AVCaptureSessionPresetHigh ]; if ([_captureSession canAddInput : _captureInput ]) { [_captureSession addInput : _captureInput ]; } if ([_captureSession canAddOutput : _captureOutput ]) { [_captureSession addOutput : _captureOutput ]; } _captureOutput.metadataObjectTypes = @[AVMetadataObjectTypeQRCode ] ; _capturePreview =[ AVCaptureVideoPreviewLayer layerWithSession :_captureSession ]; _capturePreview.videoGravity = AVLayerVideoGravityResizeAspectFill ; _capturePreview.frame = ; [ insertSublayer : _capturePreview atIndex : 0 ]; [_captureSession startRunning ]; } } - ( void )captureOutput:( AVCaptureOutput *)captureOutput didOutputMetadataObjects:( NSArray *)metadataObjects fromConnection:( AVCaptureConnection *)connection { if (metadataObjects != nil && [metadataObjects count] > 0) { AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0]; NSString *scanCodeResult; if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) { [self stopScanCode]; scanCodeResult = ; //Callback information if (_delegate && [_delegate respondsToSelector:@selector(scanCodeResultByViewController:withScanCodeResult:)]) { [_delegate scanCodeResultByViewController:self withScanCodeResult:scanCodeResult]; [ popViewControllerAnimated:YES]; } } else { NSLog(@"Error in scan information!"); } } } - (void)systemFlashLight { #if !TARGET_IPHONE_SIMULATOR if([_captureDevice hasTorch] && [ hasFlash]) { [_captureSession beginConfiguration]; [_captureDevice lockForConfiguration:nil]; if(_captureDevice.torchMode == AVCaptureTorchModeOff) { _flashLightBtn.selected = YES; [_captureDevice setTorchMode:AVCaptureTorchModeOn]; [_captureDevice setFlashMode:AVCaptureFlashModeOn]; } else { _flashLightBtn.selected = NO; [_captureDevice setTorchMode:AVCaptureTorchModeOff]; [_captureDevice setFlashMode:AVCaptureFlashModeOff]; } [_captureDevice unlockForConfiguration]; [_captureSession commitConfiguration]; } #else [CommonUtil showAlert:G_ALERTTITLE withMessage:@"The virtual device cannot run the camera!"]; #endif } -(void)stopScanCode { [_captureSession stopRunning]; _captureSession = nil; _captureDevice = nil; _captureInput = nil; _captureOutput = nil; [_capturePreview removeFromSuperlayer]; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; } @end
Thank you for reading, I hope it can help you. Thank you for your support for this site!