当AR遇上coreML

2017年的苹果发布会过去快一个月了,一直忙着没有空写写文章,今天我要和大家分享的是苹果今年震撼推出的 ARKit 和 coreML,这也代表着未来互联网的发展趋势.

废话扯的再多也没有用,先上今天的效果图:

当AR遇上coreML_第1张图片
CE48E2BC-E66C-43FE-A11F-C4291B514901.png

不好意思本人不知道视频如何上传,这能来张简单的图片了
利用coleML识别电脑键盘,之后利用ARkit 将识别结果显示到屏幕上,每当我们点击一个物体,就会展示对应的识别结果.
现在先附上源码供大家一起学习,以后了解更深了再和大家畅聊AR和coreML

import UIKit
import SceneKit
import ARKit
import Vision

class ViewController: UIViewController, ARSCNViewDelegate {

    var hitResult : ARHitTestResult!
    var resnetModel = Resnet50()
    var visionRequest = [VNRequest]()
    
    @IBOutlet var sceneView: ARSCNView!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        
        //设置代理
        sceneView.delegate = self
        //显示
        sceneView.showsStatistics = true
        //创建场景
        let scene = SCNScene()
        //展示场景
        sceneView.scene = scene
    
        //注册手势
        registerTapScreenRecognizer()
    }
    
    //注册一个点击屏幕的手势
    func registerTapScreenRecognizer(){
        let tapGes = UITapGestureRecognizer(target: self, action: #selector(tapScreen))
        self.sceneView.addGestureRecognizer(tapGes)
    }
    
    //点击屏幕
    @objc func tapScreen(tapGes: UITapGestureRecognizer){
        let sceneView = tapGes.view as! ARSCNView//获得点击的view(截图)
        let touchLocation = self.sceneView.center
        guard let currentFrame = sceneView.session.currentFrame else{ return }
        let hitResults = sceneView.hitTest(touchLocation, types: .featurePoint)
        if hitResults.isEmpty{ return }
        guard let hitResult = hitResults.first else {return}
        self.hitResult = hitResult
        
        let pixelBuffer = currentFrame.capturedImage//把图片丢进图片缓冲区
        performVisionRequest(pixelBuffer: pixelBuffer)
    }
    
    func performVisionRequest(pixelBuffer:CVPixelBuffer) {
        let visionModel = try! VNCoreMLModel(for: self.resnetModel.model)
        
        let request = VNCoreMLRequest(model: visionModel) { (request, error) in
            if error != nil {return}
            guard let observations = request.results else{return}
            let observation = observations.first as! VNClassificationObservation//点击一瞬间的画面
            
            DispatchQueue.main.async {
                self.displayPredictions(text: observation.identifier)
            }
        }
        request.imageCropAndScaleOption = .centerCrop
        self.visionRequest = [request]
        
        let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .upMirrored, options: [:])
        
        DispatchQueue.global().async {
            try! imageRequestHandler.perform(self.visionRequest)
        }
    }
    
    //展示3d模型
    func displayPredictions(text:String) {
        let node = creatText(text: text)
        node.position = SCNVector3(self.hitResult.worldTransform.columns.3.x,
        self.hitResult.worldTransform.columns.3.y,
        self.hitResult.worldTransform.columns.3.z)
        
        self.sceneView.scene.rootNode.addChildNode(node)
    }
    
    //创建节点
    func creatText(text:String) -> SCNNode {
        let parrentNode = SCNNode()
        let sphere = SCNSphere(radius: 0.01)
        let sphereMaterial = SCNMaterial()
        sphereMaterial.diffuse.contents = UIColor.red
        sphere.firstMaterial = sphereMaterial
        let sphereNode = SCNNode(geometry: sphere)
        parrentNode.addChildNode(sphereNode)
        
        
        let textGeo = SCNText(string: text, extrusionDepth: 0)
        textGeo.alignmentMode = kCAAlignmentCenter
        textGeo.firstMaterial?.diffuse.contents = UIColor.red
        textGeo.firstMaterial?.specular.contents = UIColor.white
        textGeo.firstMaterial?.isDoubleSided = true
        
        let font = UIFont(name: "Futura", size: 0.15)
        textGeo.font = font
        
        let textNode = SCNNode(geometry: textGeo)
        textNode.scale = SCNVector3Make(0.2, 0.2, 0.2)
        parrentNode.addChildNode(textNode)
        
        return parrentNode
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        
        //这里面做全局追踪
        
        // Create a session configuration
        let configuration = ARWorldTrackingConfiguration()

        // Run the view's session
        sceneView.session.run(configuration)
    }
    
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        
        // Pause the view's session
        sceneView.session.pause()
    }
    
    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Release any cached data, images, etc that aren't in use.
    }

    // MARK: - ARSCNViewDelegate
    
/*
    // Override to create and configure nodes for anchors added to the view's session.
    func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
        let node = SCNNode()
     
        return node
    }
*/
    
    func session(_ session: ARSession, didFailWithError error: Error) {
        // Present an error message to the user
        
    }
    
    func sessionWasInterrupted(_ session: ARSession) {
        // Inform the user that the session has been interrupted, for example, by presenting an overlay
        
    }
    
    func sessionInterruptionEnded(_ session: ARSession) {
        // Reset tracking and/or remove existing anchors if consistent tracking is required
        
    }
}

想下载源码的可以戳这里 demo欢迎学习

你可能感兴趣的:(当AR遇上coreML)