Skip to main content

Gesture Embedding and Classification

To recognize gestures:

def lm_postprocess_with_gesture_classification(current_region, inference, previous_frame_regions, 
gesture_model, classifier_model,
alpha=0.6, iou_threshold=0.4, crop_width=224, crop_height=224):
"""Process landmark model output with gesture embedding and classification"""
# First, process landmarks as before
lm_postprocess(current_region, inference, previous_frame_regions, alpha, iou_threshold, crop_width, crop_height)
# Then extract gesture embedding and classify
if hasattr(current_region, 'landmarks') and current_region.lm_score > 0.5:
embedding = gesture_model([
np.array(current_region.landmarks, dtype=np.float32).reshape(1, 21, 3),
np.array([[current_region.handedness]], dtype=np.float32),
None # world_landmarks if available
])
gesture_name, confidence = classify_gesture(embedding, classifier_model, gesture_labels)
current_region.gesture_name = gesture_name
current_region.gesture_confidence = confidence
return current_region