iOS 二維碼掃描 Swift
阿新 • • 發佈:2018-11-08
實現相機掃描二維碼, 相簿選取圖片識別二維碼
一. 匯入 framework
import AVFoundation//二維碼掃描
import CoreImage//二維碼識別
import AudioToolbox//系統音效
二. 服從協議
AVCaptureMetadataOutputObjectsDelegate//掃描二維碼
CALayerDelegate// CALayer 繪製
UINavigationControllerDelegate//圖片選擇控制器
UIImagePickerControllerDelegate
三.程式碼實現
//呼叫攝像頭 func setUpCamera() { //建立 device guard let cameraDevice = AVCaptureDevice.default(for: .video) else { print("不支援攝像頭") return } //建立輸入, 輸出流 let deviceInput: AVCaptureInput do{ deviceInput = try AVCaptureDeviceInput(device: cameraDevice) }catch { print("不支援攝像頭") return } let deviceOutput = AVCaptureMetadataOutput() deviceOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main) //session 設定 let session = AVCaptureSession() //採集質量 session.canSetSessionPreset(AVCaptureSession.Preset.high) //新增輸入, 輸出流 if session.canAddInput(deviceInput) { session.addInput(deviceInput) } if session.canAddOutput(deviceOutput) { session.addOutput(deviceOutput) } //條碼型別 AVMetadataObjectTypeQRCode, 設定支援的掃碼所支援的格式 deviceOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.ean13, AVMetadataObject.ObjectType.ean8, AVMetadataObject.ObjectType.code128, AVMetadataObject.ObjectType.qr] //設定預覽圖層 let previewLayer = AVCaptureVideoPreviewLayer(session: session) previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill previewLayer.frame = view.bounds view.layer.insertSublayer(previewLayer, at: 0) //設定掃描區域, 也可以自己寫方法計算 NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureInputPortFormatDescriptionDidChange, object: nil, queue: nil) { [weak self](noti) in guard let strongSelf = self else { return } deviceOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: strongSelf.scanFrameView.frame) } //蒙板 let shadowLayer = CALayer() shadowLayer.frame = view.bounds shadowLayer.delegate = self view.layer.insertSublayer(shadowLayer, above: previewLayer) shadowLayer.setNeedsDisplay() self.maskLayer = shadowLayer self.session = session } //MARK: CALayerDelegate, 建立蒙板 func draw(_ layer: CALayer, in ctx: CGContext) { if layer == maskLayer { UIGraphicsBeginImageContextWithOptions((maskLayer?.frame.size)!, false, 1) //蒙板顏色 ctx.setFillColor(UIColor.RGBColor(r: 47, g: 47, b: 47, alpha: 0.6).cgColor) ctx.fill((maskLayer?.frame)!) let scanFrame = view.convert(scanFrameView.frame, from: scanFrameView.superview) //空出中間一塊 ctx.clear(scanFrame) } } //掃描線移動 func scanAction() { let startPoint = CGPoint(x: scanline.center.x, y: scanFrameView.frame.minY) let endPoint = CGPoint(x: scanline.center.x, y: scanFrameView.frame.maxY) let basicAnimation = CABasicAnimation(keyPath: "position") basicAnimation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut) basicAnimation.fromValue = NSValue(cgPoint: startPoint) basicAnimation.toValue = NSValue(cgPoint: endPoint) basicAnimation.duration = 4 basicAnimation.repeatCount = MAXFLOAT basicAnimation.autoreverses = false scanline.layer.add(basicAnimation, forKey: nil) }
注意:
1. 掃描區域不設定的時候, 預設為整個螢幕;
自定義大小時設定 AVCaptureMetadataOutput 的 rectOfInterest 屬性, rectOfInterest 為 CGRect 型別, 但是它的四個值和傳統的不一樣,是(y,x,高,寬)且是比例值,取值範圍為0~1。
設定方法有兩種: (1) 自定義方法計算 (2) 系統方法計算, 此時要放在通知裡, 否則不起作用.記得在 deinit 方法中移除觀察者
//自定義方法計算掃描框尺寸 func rectOfInterestByScanViewRect(rect:CGRect) -> CGRect{ let width = self.view.frame.size.width let height = self.view.frame.size.height let x = rect.minY / height let y = rect.minX / width let w = rect.size.height / height let h = rect.size.width / width return CGRect(x: x, y: y, width: w, height: h) }
//系統方法實現 NotificationCenter.default.addObserver(forName: NSNotification.Name.AVCaptureInputPortFormatDescriptionDidChange, object: nil, queue: nil) { [weak self](noti) in guard let strongSelf = self else { return } deviceOutput.rectOfInterest = previewLayer.metadataOutputRectConverted(fromLayerRect: strongSelf.scanFrameView.frame) }
2. 建立掃描區域周圍的蒙板時, 一種方法是在掃描框周圍新增 UIView, 也可以像上文中建立一個 layer, 然後去掉中間掃描框的範圍.
使用 layer 時要注意服從 CALayerDelegate 協議, 設定代理, 最後 setNeedsDisplay(), 不寫 setNeedsDisplay() 不會執行.
在deinit 方法中把 delegate 置為 nil, 否則可能 crash
deinit {
maskLayer?.delegate = nil
NotificationCenter.default.removeObserver(self)
print("deinit ~ \(self)")
}
相機識別
// MARK: - AVCaptureMetadataOutputObjectsDelegate
//捕獲條碼代理協議
func metadataOutput(_ captureOutput: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects.count > 0 {
playSystemSound()
//停止掃描
session?.stopRunning()
//取識別到的第一個二維碼
let metadataobject = metadataObjects.first as? AVMetadataMachineReadableCodeObject
//二維碼的值
stringValue = metadataobject?.stringValue
//todoSomething.....
}
}
相簿選取照片識別
//MARK: UIImagePickerControllerDelegate
//開啟本地相簿
func openLocalPhoto() {
let imagePicker = UIImagePickerController()
imagePicker.sourceType = .photoLibrary
imagePicker.delegate = self
self.present(imagePicker, animated: true, completion: nil)
}
//選擇完成
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
//選擇的型別是照片
let type = info[UIImagePickerController.InfoKey.mediaType] as! String
if type == "public.image" {
//獲取照片
pickedImage = info[UIImagePickerController.InfoKey.originalImage] as? UIImage
print(pickedImage.size)
picker.dismiss(animated: true) {
self.scanQRCodeFromPhotoLibrary(image: self.pickedImage)
}
}
}
//取消選擇
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
picker.dismiss(animated: true, completion: nil)
}
//識別二維碼
@objc func scanQRCodeFromPhotoLibrary(image: UIImage) {
guard let cgImage = image.cgImage else { return }
/// 這裡設定了識別的精準程度為High,不過這個可能會有一點耗時。
if let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh]) {
let features = detector.features(in: CIImage(cgImage: cgImage))
for feature in features { // 這裡實際上可以識別兩張二維碼,在這裡只取第一張(左邊或者上邊)
if let qrFeature = feature as? CIQRCodeFeature {
playSystemSound()
session?.stopRunning()
//獲取識別出的字串
stringValue = qrFeature.messageString
//to do something.....
return
}
}
}else {
//沒有識別到二維碼
//to do something...
}
}
func playSystemSound() {
DispatchQueue.global().async {
//播放音效
let url = Bundle.main.url(forResource: "scanSuccess.wav", withExtension: nil)
var soundID: SystemSoundID = 8787
AudioServicesCreateSystemSoundID(url! as CFURL, &soundID)
AudioServicesPlaySystemSound(soundID)
}
}