I’m currently working on a AR project where it can detect my hand OPEN and CLOSE using CoreML model.
Currently when it first detected me opening my hand and it would detect it again ONLY when I move my hand out of the camera and move back into the camera.
I want to detect me opening my hand whenever I close my hand and open it again, with my hand stay inside the camera the whole time.
func loopCoreMLUpdate() {
dispatchQueueML.async {
self.updateCoreML()
self.loopCoreMLUpdate()
}
}
func updateCoreML() {
let pixbuff : CVPixelBuffer? = (arView.session.currentFrame?.capturedImage)
if pixbuff == nil { return }
let ciImage = CIImage(cvPixelBuffer: pixbuff!)
let imageRequestHandler = VNImageRequestHandler(ciImage: ciImage, options: [:])
do {
try imageRequestHandler.perform(self.visionRequests)
} catch {
print(error)
}
}
func classificationCompleteHandler(request: VNRequest, error: Error?) {
if error != nil {
print("Error: " + (error?.localizedDescription)!)
return
}
guard let observations = request.results else {
print("No results")
return
}
let classifications = observations[0...2] // top 3 results
.compactMap({ $0 as? VNClassificationObservation })
.map({ "\($0.identifier) \(String(format:" : %.2f", $0.confidence))" })
.joined(separator: "\n")
DispatchQueue.main.async {
let topPrediction = classifications.components(separatedBy: "\n")[0]
let topPredictionName = topPrediction.components(separatedBy: ":")[0].trimmingCharacters(in: .whitespaces)
let topPredictionScore:Float? = Float(topPrediction.components(separatedBy: ":")[1].trimmingCharacters(in: .whitespaces))
if (topPredictionScore! > 0.9 && topPredictionName == "FIVE-UB-RHand"){
if self.handDetect == false {
print("Hand Detected.")
self.handDetect = true
let worldAnchor = try! Experience.loadStayHungry()
self.arView.scene.anchors.append(worldAnchor)
}
}
else{
if (topPredictionScore! > 0.01 && topPredictionName == "no-hand"){
self.handDetect = false
}
}
}
}