google ML Kit를 사용해보려고한다
구글은 관절(조인트)의 거리와 각도에 따른 함수를 제공한다
https://developers.google.com/ml-kit/vision/pose-detection/classifying-poses?hl=ko
func angle(
firstLandmark: PoseLandmark,
midLandmark: PoseLandmark,
lastLandmark: PoseLandmark
) -> CGFloat {
let radians: CGFloat =
atan2(lastLandmark.position.y - midLandmark.position.y,
lastLandmark.position.x - midLandmark.position.x) -
atan2(firstLandmark.position.y - midLandmark.position.y,
firstLandmark.position.x - midLandmark.position.x)
var degrees = radians * 180.0 / .pi
degrees = abs(degrees) // Angle should never be negative
if degrees > 180.0 {
degrees = 360.0 - degrees // Always get the acute representation of the angle
}
return degrees
}
애플의 pose detecting에는 관절의 각도에 따라, 관절의 거리에 따라 작동하는 함수가 없다 위 함수를 만들수있는지 확인이 필요하다
extension CGPoint {
static func midPoint(p1: CGPoint, p2: CGPoint) -> CGPoint {
return CGPoint(x: (p1.x + p2.x) / 2, y: (p1.y + p2.y) / 2)
}
func distance(from point: CGPoint) -> CGFloat {
return hypot(point.x - x, point.y - y)
}
}
https://developer.apple.com/videos/play/wwdc2020/10653/)