|
#include <cmath> |
|
#include "mediapipe/framework/calculator_framework.h" |
|
#include "mediapipe/framework/formats/landmark.pb.h" |
|
#include "mediapipe/framework/formats/rect.pb.h" |
|
|
|
namespace mediapipe |
|
{ |
|
|
|
namespace |
|
{ |
|
constexpr char normRectTag[] = "NORM_RECT"; |
|
constexpr char normalizedLandmarkListTag[] = "NORM_LANDMARKS"; |
|
} // namespace |
|
|
|
// Graph config: |
|
// |
|
// node { |
|
// calculator: "HandGestureRecognitionCalculator" |
|
// input_stream: "NORM_LANDMARKS:scaled_landmarks" |
|
// input_stream: "NORM_RECT:hand_rect_for_next_frame" |
|
// } |
|
class HandGestureRecognitionCalculator : public CalculatorBase |
|
{ |
|
public: |
|
static ::mediapipe::Status GetContract(CalculatorContract *cc); |
|
::mediapipe::Status Open(CalculatorContext *cc) override; |
|
|
|
::mediapipe::Status Process(CalculatorContext *cc) override; |
|
|
|
private: |
|
float get_Euclidean_DistanceAB(float a_x, float a_y, float b_x, float b_y) |
|
{ |
|
float dist = std::pow(a_x - b_x, 2) + pow(a_y - b_y, 2); |
|
return std::sqrt(dist); |
|
} |
|
|
|
bool isThumbNearFirstFinger(NormalizedLandmark point1, NormalizedLandmark point2) |
|
{ |
|
float distance = this->get_Euclidean_DistanceAB(point1.x(), point1.y(), point2.x(), point2.y()); |
|
return distance < 0.1; |
|
} |
|
}; |
|
|
|
REGISTER_CALCULATOR(HandGestureRecognitionCalculator); |
|
|
|
::mediapipe::Status HandGestureRecognitionCalculator::GetContract( |
|
CalculatorContract *cc) |
|
{ |
|
RET_CHECK(cc->Inputs().HasTag(normalizedLandmarkListTag)); |
|
cc->Inputs().Tag(normalizedLandmarkListTag).Set<mediapipe::NormalizedLandmarkList>(); |
|
|
|
RET_CHECK(cc->Inputs().HasTag(normRectTag)); |
|
cc->Inputs().Tag(normRectTag).Set<NormalizedRect>(); |
|
|
|
return ::mediapipe::OkStatus(); |
|
} |
|
|
|
::mediapipe::Status HandGestureRecognitionCalculator::Open( |
|
CalculatorContext *cc) |
|
{ |
|
cc->SetOffset(TimestampDiff(0)); |
|
return ::mediapipe::OkStatus(); |
|
} |
|
|
|
::mediapipe::Status HandGestureRecognitionCalculator::Process( |
|
CalculatorContext *cc) |
|
{ |
|
// hand closed (red) rectangle |
|
const auto rect = &(cc->Inputs().Tag(normRectTag).Get<NormalizedRect>()); |
|
float width = rect->width(); |
|
float height = rect->height(); |
|
|
|
if (width < 0.01 || height < 0.01) |
|
{ |
|
LOG(INFO) << "No Hand Detected"; |
|
return ::mediapipe::OkStatus(); |
|
} |
|
|
|
const auto &landmarkList = cc->Inputs() |
|
.Tag(normalizedLandmarkListTag) |
|
.Get<mediapipe::NormalizedLandmarkList>(); |
|
RET_CHECK_GT(landmarkList.landmark_size(), 0) << "Input landmark vector is empty."; |
|
|
|
// finger states |
|
bool thumbIsOpen = false; |
|
bool firstFingerIsOpen = false; |
|
bool secondFingerIsOpen = false; |
|
bool thirdFingerIsOpen = false; |
|
bool fourthFingerIsOpen = false; |
|
// |
|
|
|
float pseudoFixKeyPoint = landmarkList.landmark(2).x(); |
|
if (landmarkList.landmark(3).x() < pseudoFixKeyPoint && landmarkList.landmark(4).x() < pseudoFixKeyPoint) |
|
{ |
|
thumbIsOpen = true; |
|
} |
|
|
|
pseudoFixKeyPoint = landmarkList.landmark(6).y(); |
|
if (landmarkList.landmark(7).y() < pseudoFixKeyPoint && landmarkList.landmark(8).y() < pseudoFixKeyPoint) |
|
{ |
|
firstFingerIsOpen = true; |
|
} |
|
|
|
pseudoFixKeyPoint = landmarkList.landmark(10).y(); |
|
if (landmarkList.landmark(11).y() < pseudoFixKeyPoint && landmarkList.landmark(12).y() < pseudoFixKeyPoint) |
|
{ |
|
secondFingerIsOpen = true; |
|
} |
|
|
|
pseudoFixKeyPoint = landmarkList.landmark(14).y(); |
|
if (landmarkList.landmark(15).y() < pseudoFixKeyPoint && landmarkList.landmark(16).y() < pseudoFixKeyPoint) |
|
{ |
|
thirdFingerIsOpen = true; |
|
} |
|
|
|
pseudoFixKeyPoint = landmarkList.landmark(18).y(); |
|
if (landmarkList.landmark(19).y() < pseudoFixKeyPoint && landmarkList.landmark(20).y() < pseudoFixKeyPoint) |
|
{ |
|
fourthFingerIsOpen = true; |
|
} |
|
|
|
// Hand gesture recognition |
|
if (thumbIsOpen && firstFingerIsOpen && secondFingerIsOpen && thirdFingerIsOpen && fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "FIVE!"; |
|
} |
|
else if (!thumbIsOpen && firstFingerIsOpen && secondFingerIsOpen && thirdFingerIsOpen && fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "FOUR!"; |
|
} |
|
else if (thumbIsOpen && firstFingerIsOpen && secondFingerIsOpen && !thirdFingerIsOpen && !fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "TREE!"; |
|
} |
|
else if (thumbIsOpen && firstFingerIsOpen && !secondFingerIsOpen && !thirdFingerIsOpen && !fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "TWO!"; |
|
} |
|
else if (!thumbIsOpen && firstFingerIsOpen && !secondFingerIsOpen && !thirdFingerIsOpen && !fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "ONE!"; |
|
} |
|
else if (!thumbIsOpen && firstFingerIsOpen && secondFingerIsOpen && !thirdFingerIsOpen && !fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "YEAH!"; |
|
} |
|
else if (!thumbIsOpen && firstFingerIsOpen && !secondFingerIsOpen && !thirdFingerIsOpen && fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "ROCK!"; |
|
} |
|
else if (thumbIsOpen && firstFingerIsOpen && !secondFingerIsOpen && !thirdFingerIsOpen && fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "SPIDERMAN!"; |
|
} |
|
else if (!thumbIsOpen && !firstFingerIsOpen && !secondFingerIsOpen && !thirdFingerIsOpen && !fourthFingerIsOpen) |
|
{ |
|
LOG(INFO) << "FIST!"; |
|
} |
|
else if (!firstFingerIsOpen && secondFingerIsOpen && thirdFingerIsOpen && fourthFingerIsOpen && this->isThumbNearFirstFinger(landmarkList.landmark(4), landmarkList.landmark(8))) |
|
{ |
|
LOG(INFO) << "OK!"; |
|
} |
|
else |
|
{ |
|
LOG(INFO) << "Finger States: " << thumbIsOpen << firstFingerIsOpen << secondFingerIsOpen << thirdFingerIsOpen << fourthFingerIsOpen; |
|
LOG(INFO) << "___"; |
|
} |
|
|
|
return ::mediapipe::OkStatus(); |
|
} // namespace mediapipe |
|
|
|
} // namespace mediapipe |
Thanks a lot for such a nice tutorial and it is really very helpful to me the objective I want to achieve. Basically, I'm trying to build sign language use case using this approach and I'm little succeed decoding few signs but I'm facing few challenges and I really need your help to overcome those. Few hands signs like G (https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcQ-5AXHWBUUzbmELozMjac_B2oX7AQKgm49FZ5Xv8AKHTRuFIa4), P (https://i.pinimg.com/originals/a2/15/b8/a215b8aaa0a8584fc564a5925bff81f9.png) and espeially J (https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcS_4HEUv8Nxxis3E-a5SY1k8zalYspjmNHXYqr1Qe4z70k5lXsr) and Z (https://www.seekpng.com/png/detail/392-3923100_american-sign-language-british-sign-language-letter-z.png). The challenge is, sign like G and P involves horizontal orientation of hand so I'm not sure how to detect horizontal orientation of fingers and sign like J and Z involves movements so again there is grey area for me how we can achieve movement base gesture recognition using mediapipe.
I've tried detect vertical and horizontal orientation using below method but I'm not sure if this is the right way to do it, or you can suggest some better and stable approach for doing the same.
string getOrientation(NormalizedLandmark pt1)
{
string orientation = "";
if ((pt1.x() - pt1.y()) > .25)
{
orientation = "Vertical";
}
else if ((pt1.x() - pt1.y()) < .16)
{
orientation = "Horizontal";
}
return orientation;
}
Thanks again for your help.
Jitender