Swift Arkit-如何跟踪移动宠物

发布于 2025-02-07 13:50:02 字数 4262 浏览 2 评论 0原文

我需要编写一个可以识别猫或狗的应用程序,并测量每分钟的呼吸数量。 为此,我想每100毫秒从CAT区域拿出100个颜色的点。这些点将通过傅立叶变换功能来处理,该功能根据周期性的变化来测量猫的呼吸。

视觉库可以通过相机快照来检测猫或狗,但恐怕每100ms aranchor将放置在其他地方,我将无法从猫那里获得相同的100点区域。

这是我的ViewController:

import SceneKit
import ARKit
import Vision

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
    
    var detectionAvailableUntil: Int64 = .zero
    @IBOutlet var sceneView: ARSCNView!
    private var viewportSize: CGSize!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        sceneView.delegate = self
        viewportSize = sceneView.frame.size
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        resetTracking()
    }
    
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        sceneView.session.pause()
    }
    
    private func resetTracking() {
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = []
        sceneView.session.run(configuration)
    }
    
    lazy var catRequest: VNRecognizeAnimalsRequest = {
        do {
            let req = VNRecognizeAnimalsRequest() { [weak self] request, error in
                self?.processCatDetections(for: request as! VNRecognizeAnimalsRequest, error: error)
            }
            
            return req
        } catch {
            fatalError("Fatal error")
        }
    }()
    
    func processCatDetections(for request: VNRecognizeAnimalsRequest, error: Error?) {
        guard error == nil else {
            print("Object detection error: \(error!.localizedDescription)")
            return
        }
        if let result = request.results?.first as? VNRecognizedObjectObservation {
            let cats = result.labels.filter({$0.identifier == "Cat"})
            for _ in cats {
                guard let currentFrame = self.sceneView.session.currentFrame,
                      result.confidence > 0.3 else { continue }
                let fromCameraImageToViewTransform = currentFrame.displayTransform(for: .portrait, viewportSize: self.viewportSize)
                let boundingBox = result.boundingBox
                let viewNormalizedBoundingBox = boundingBox.applying(fromCameraImageToViewTransform)
                let t = CGAffineTransform(scaleX: self.viewportSize.width, y: self.viewportSize.height)
                let viewBoundingBox = viewNormalizedBoundingBox.applying(t)
                let midPoint = CGPoint(x: viewBoundingBox.midX, y: viewBoundingBox.midY)
                let results = self.sceneView.hitTest(midPoint, types: .featurePoint)
                guard let result = results.first else { continue }
                let anchor = ARAnchor(name: "catAnchor", transform: result.worldTransform)
                self.sceneView.session.add(anchor: anchor)
            }
        }
    }
    
    func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
        guard let capturedImage = sceneView.session.currentFrame?.capturedImage else { return }
        let requestHandler = VNImageRequestHandler(cvPixelBuffer: capturedImage)
        
        do {
            try requestHandler.perform([self.catRequest])
        } catch {
            print("Error: Vision request failed")
        }
    }
    
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        let date = Int64(NSDate().timeIntervalSince1970 * 1000)
        guard anchor.name == "catAnchor",
              date > self.detectionAvailableUntil else { return }
        print("catAnchor added")
        self.detectionAvailableUntil = Int64(NSDate().timeIntervalSince1970 * 1000 + 500)
        let sphereNode = SCNNode(geometry: SCNSphere(radius: 0.01))
        sphereNode.geometry?.firstMaterial?.diffuse.contents = UIColor.red
        sphereNode.simdWorldTransform = anchor.transform
        node.addChildNode(sphereNode)
    }
}

问题是Spherenode是在猫区(腹部或头部)的不同位置绘制的。

Apple提供了跟踪人体或面部的库,因此我想我需要arfaceanchorArbodyandanchor,但对于Cat。

如果该应用程序能够跟踪CAT,则它将能够在Aranchor中检索相同的点。

是否可以?

I need to write an application that can recognize a cat or a dog and measure the number of breaths per minute.
For this, I would like to take 100 colors of points from the cat area every 100 milliseconds. These points will be processed by Fourier Transform function that measures cat's breaths based on cyclical color changes.

The Vision library can detect cat or dog by the camera snapshot, but I'm afraid that every 100ms the ARAnchor will be placed elsewhere and I won't be able to get the same 100 points from the cat area.

Here is my ViewController:

import SceneKit
import ARKit
import Vision

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
    
    var detectionAvailableUntil: Int64 = .zero
    @IBOutlet var sceneView: ARSCNView!
    private var viewportSize: CGSize!
    
    override func viewDidLoad() {
        super.viewDidLoad()
        sceneView.delegate = self
        viewportSize = sceneView.frame.size
    }
    
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        resetTracking()
    }
    
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        sceneView.session.pause()
    }
    
    private func resetTracking() {
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = []
        sceneView.session.run(configuration)
    }
    
    lazy var catRequest: VNRecognizeAnimalsRequest = {
        do {
            let req = VNRecognizeAnimalsRequest() { [weak self] request, error in
                self?.processCatDetections(for: request as! VNRecognizeAnimalsRequest, error: error)
            }
            
            return req
        } catch {
            fatalError("Fatal error")
        }
    }()
    
    func processCatDetections(for request: VNRecognizeAnimalsRequest, error: Error?) {
        guard error == nil else {
            print("Object detection error: \(error!.localizedDescription)")
            return
        }
        if let result = request.results?.first as? VNRecognizedObjectObservation {
            let cats = result.labels.filter({$0.identifier == "Cat"})
            for _ in cats {
                guard let currentFrame = self.sceneView.session.currentFrame,
                      result.confidence > 0.3 else { continue }
                let fromCameraImageToViewTransform = currentFrame.displayTransform(for: .portrait, viewportSize: self.viewportSize)
                let boundingBox = result.boundingBox
                let viewNormalizedBoundingBox = boundingBox.applying(fromCameraImageToViewTransform)
                let t = CGAffineTransform(scaleX: self.viewportSize.width, y: self.viewportSize.height)
                let viewBoundingBox = viewNormalizedBoundingBox.applying(t)
                let midPoint = CGPoint(x: viewBoundingBox.midX, y: viewBoundingBox.midY)
                let results = self.sceneView.hitTest(midPoint, types: .featurePoint)
                guard let result = results.first else { continue }
                let anchor = ARAnchor(name: "catAnchor", transform: result.worldTransform)
                self.sceneView.session.add(anchor: anchor)
            }
        }
    }
    
    func renderer(_ renderer: SCNSceneRenderer, willRenderScene scene: SCNScene, atTime time: TimeInterval) {
        guard let capturedImage = sceneView.session.currentFrame?.capturedImage else { return }
        let requestHandler = VNImageRequestHandler(cvPixelBuffer: capturedImage)
        
        do {
            try requestHandler.perform([self.catRequest])
        } catch {
            print("Error: Vision request failed")
        }
    }
    
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        let date = Int64(NSDate().timeIntervalSince1970 * 1000)
        guard anchor.name == "catAnchor",
              date > self.detectionAvailableUntil else { return }
        print("catAnchor added")
        self.detectionAvailableUntil = Int64(NSDate().timeIntervalSince1970 * 1000 + 500)
        let sphereNode = SCNNode(geometry: SCNSphere(radius: 0.01))
        sphereNode.geometry?.firstMaterial?.diffuse.contents = UIColor.red
        sphereNode.simdWorldTransform = anchor.transform
        node.addChildNode(sphereNode)
    }
}

The problem is the sphereNode is drawing in different places not on the cat's area (belly or head).

Apple provides libraries for tracking human body or face so I suppose that I need something like ARFaceAnchor or ARBodyAnchor but for cat.

If the app is able to track the cat, it will be able to retrieve the same points within the ARAnchor.

Is it possible?

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。
列表为空,暂无数据
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文