I have an app that uses iOS 13 body tracking to place some markers in a human body in real time using ARKit.
The following code allows me to draw the markers in real time and to get their coordinates in the screen.
func session(_ session: ARSession, didUpdate frame: ARFrame) {
if let detectedBody = frame.detectedBody {
guard let interfaceOrientation = arView.window?.windowScene?.interfaceOrientation else { return }
let transform = frame.displayTransform(for: interfaceOrientation, viewportSize: arView.frame.size)
//This array contains only the lower body joints
var arrayOfJoints = [detectedBody.skeleton.jointLandmarks[8],detectedBody.skeleton.jointLandmarks[9],detectedBody.skeleton.jointLandmarks[10],detectedBody.skeleton.jointLandmarks[11],detectedBody.skeleton.jointLandmarks[12],detectedBody.skeleton.jointLandmarks[13],detectedBody.skeleton.jointLandmarks[16]]
//Todo lo de este for estaba en el .forEach
for i in 0...arrayOfJoints.count - 1 {
let normalizedCenter = CGPoint(x: CGFloat(arrayOfJoints[i][0]), y: CGFloat(arrayOfJoints[i][1])).applying(transform)
let center = normalizedCenter.applying(CGAffineTransform.identity.scaledBy(x: arView.frame.width, y: arView.frame.height))
let circleWidth: CGFloat = 10
let circleHeight: CGFloat = 10
let rect = CGRect(origin: CGPoint(x: center.x - circleWidth/2, y: center.y - circleHeight/2), size: CGSize(width: circleWidth, height: circleHeight))
let circleLayer = CAShapeLayer()
circleLayer.fillColor = .init(srgbRed: 255, green: 255, blue: 255, alpha: 1.0)
circleLayer.path = UIBezierPath(ovalIn: rect).cgPath
arView.layer.addSublayer(circleLayer)
}
}
}
However, I can't find a way to obtain the real world coordinates to see, for example, what is the hip displacement in cm during the squat. Is it even possible to translate those points to real world coordinates using ARKit?
Thanks a lot