iOS QR code scanning Swift

Realize camera scanning QR code, photo album selects image recognition QR code

 

I. import framework

import AVFoundation//QR code scanning
import CoreImage//QR code recognition
import AudioToolbox//system sound

2, Compliance with agreement

AVCaptureMetadataOutputObjectsDelegate//Scan QR code
CALayerDelegate// CALayer drawing
UINavigationControllerDelegate//Picture selection controller
UIImagePickerControllerDelegate

3, Code implementation

//Call camera
    func setUpCamera() {
        //Create device
        guard let cameraDevice = AVCaptureDevice.default(for: .video) else {
            print("Camera not supported")
            return
        }
        
        //Create input, output stream
        let deviceInput: AVCaptureInput
        do{
            deviceInput = try AVCaptureDeviceInput(device: cameraDevice)
        }catch {
            print("Camera not supported")
            return
        }
        let deviceOutput = AVCaptureMetadataOutput()
        deviceOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
        
        //session settings
        let session = AVCaptureSession()
    
        //Collection quality
        session.canSetSessionPreset(AVCaptureSession.Preset.high)
        //Add input, output stream
        if session.canAddInput(deviceInput) {
            session.addInput(deviceInput)
        }
        if session.canAddOutput(deviceOutput) {
            session.addOutput(deviceOutput)
        }
        
        //Barcode type AVMetadataObjectTypeQRCode, set the format supported by scanning
        deviceOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.ean13,
                                       AVMetadataObject.ObjectType.ean8,
                                       AVMetadataObject.ObjectType.code128,
                                       AVMetadataObject.ObjectType.qr]
        
        //Set preview layer
        let previewLayer = AVCaptureVideoPreviewLayer(session: session)
        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
        previewLayer.frame = view.bounds
        view.layer.insertSublayer(previewLayer, at: 0)
        
        //Set the scanning area, or write your own calculation method
        NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureInputPortFormatDescriptionDidChange, object: nil, queue: nil) { [weak self](noti) in
            guard let strongSelf = self else { return }
            deviceOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: strongSelf.scanFrameView.frame)
        }
        
        //mask
        let shadowLayer = CALayer()
        shadowLayer.frame = view.bounds
        shadowLayer.delegate = self
        view.layer.insertSublayer(shadowLayer, above: previewLayer)
        shadowLayer.setNeedsDisplay()
        
        self.maskLayer = shadowLayer
        self.session = session
    }
    
    //MARK: CALayerDelegate, create mask
    func draw(_ layer: CALayer, in ctx: CGContext) {
        if layer == maskLayer {
            UIGraphicsBeginImageContextWithOptions((maskLayer?.frame.size)!, false, 1)
            //Mask color
            ctx.setFillColor(UIColor.RGBColor(r: 47, g: 47, b: 47, alpha: 0.6).cgColor)
            ctx.fill((maskLayer?.frame)!)
            let scanFrame = view.convert(scanFrameView.frame, from: scanFrameView.superview)
            //Empty out the middle piece
            ctx.clear(scanFrame)
        }
    }
    
    //Scanline move
    func scanAction() {
        let startPoint = CGPoint(x: scanline.center.x, y: scanFrameView.frame.minY)
        let endPoint = CGPoint(x: scanline.center.x, y: scanFrameView.frame.maxY)
        let basicAnimation = CABasicAnimation(keyPath: "position")
        basicAnimation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
        basicAnimation.fromValue = NSValue(cgPoint: startPoint)
        basicAnimation.toValue = NSValue(cgPoint: endPoint)
        basicAnimation.duration = 4
        basicAnimation.repeatCount = MAXFLOAT
        basicAnimation.autoreverses = false
        scanline.layer.add(basicAnimation, forKey: nil)
    }

Be careful:

1. When the scanning area is not set, it defaults to the whole screen;

Set the rectOfInterest property of AVCaptureMetadataOutput when customizing the size. The rectOfInterest is of CGRect type, but its four values are different from the traditional one. They are (y, x, height, width) and proportional values, with a value range of 0 ~ 1.

There are two setting methods: (1) user defined method calculation (2) system method calculation, which should be put in the notice at this time, otherwise it will not work. Remember to remove the observer in the deinit method

//Custom method to calculate scan box size

    func rectOfInterestByScanViewRect(rect:CGRect) -> CGRect{

        let width = self.view.frame.size.width

        let height = self.view.frame.size.height

        let x = rect.minY / height

        let y = rect.minX / width

        let w = rect.size.height / height

        let h = rect.size.width / width

        return CGRect(x: x, y: y, width: w, height: h)

    }
//System method implementation 
NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureInputPortFormatDescriptionDidChange, object: nil, queue: nil) { [weak self](noti) in
            guard let strongSelf = self else { return }
            deviceOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: strongSelf.scanFrameView.frame)
        }

2. When creating the mask around the scan area, one method is to add UIView around the scan box, or create a layer like the one above, and then remove the range of the middle scan box

When using the layer, pay attention to obey the callyerdelegate protocol, set the agent, and finally setNeedsDisplay() will not execute without writing setNeedsDisplay()

Set delegate to nil in the deinit method, otherwise crash may occur

deinit {
        maskLayer?.delegate = nil
        NotificationCenter.default.removeObserver(self)
        print("deinit ~ \(self)")
    }

Camera recognition

// MARK: - AVCaptureMetadataOutputObjectsDelegate
    //Capture barcode proxy protocol
    func metadataOutput(_ captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        if metadataObjects.count > 0 {
            playSystemSound()
            //Stop scanning
            session?.stopRunning()
            //Take the first QR code identified
            let metadataobject = metadataObjects.first as? AVMetadataMachineReadableCodeObject
            //Value of QR code
            stringValue = metadataobject?.stringValue

           //todoSomething.....
        }
    }

Album selection photo recognition

//MARK: UIImagePickerControllerDelegate
    //Open local album
    func openLocalPhoto() {
        let imagePicker = UIImagePickerController()
        imagePicker.sourceType = .photoLibrary
        imagePicker.delegate = self
        self.present(imagePicker, animated: true, completion: nil)
    }
    
    //Select finish
    func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
        //The selected type is photo
        let type = info[UIImagePickerController.InfoKey.mediaType] as! String
        if type == "public.image" {
            //Get photos
            pickedImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage
            print(pickedImage.size)
            picker.dismiss(animated: true) {
                self.scanQRCodeFromPhotoLibrary(image: self.pickedImage)
            }
        }
    }
    
    //Deselected
    func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
        picker.dismiss(animated: true, completion: nil)
    }
    
    //Identify QR code
    @objc func scanQRCodeFromPhotoLibrary(image: UIImage) {
        guard let cgImage = image.cgImage else { return }
        ///Here, the accuracy of recognition is set to High, but this may take a little time.
        if let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh]) {
            let features = detector.features(in: CIImage(cgImage: cgImage))
            for feature in features { // In fact, two QR codes can be identified here, and only the first one (left or top) is taken here
                if let qrFeature = feature as? CIQRCodeFeature {
                    playSystemSound()
                    session?.stopRunning()
                    //Get recognized string
                    stringValue = qrFeature.messageString
                   
                    //to do something.....

                    return
                }
            }
        }else {
            //QR code not recognized

            //to do something...
        }
        
    }
    
    func playSystemSound() {
        DispatchQueue.global().async {
            //Play sound effects
            let url = Bundle.main.url(forResource: "scanSuccess.wav", withExtension: nil)
            var soundID: SystemSoundID = 8787
            AudioServicesCreateSystemSoundID(url! as CFURL, &soundID)
            AudioServicesPlaySystemSound(soundID)
        }
    }

 

Keywords: Session

Added by vlcinsky on Sat, 28 Dec 2019 21:15:12 +0200