Commit
·
07d7978
1
Parent(s):
fa2ca8f
Push hand tracking app.
Browse files
pyproject.toml
CHANGED
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
| 4 |
|
| 5 |
|
| 6 |
[project]
|
| 7 |
-
name = "
|
| 8 |
version = "0.1.0"
|
| 9 |
description = "Add your description here"
|
| 10 |
readme = "README.md"
|
|
@@ -12,7 +12,8 @@ requires-python = ">=3.8"
|
|
| 12 |
# dependencies = ["reachy-mini"]
|
| 13 |
dependencies = [
|
| 14 |
"reachy-mini@git+https://github.com/pollen-robotics/reachy_mini",
|
| 15 |
-
|
|
|
|
| 16 |
|
| 17 |
[project.entry-points."reachy_mini_apps"]
|
| 18 |
-
|
|
|
|
| 4 |
|
| 5 |
|
| 6 |
[project]
|
| 7 |
+
name = "reachy_mini_hand_tracker_app"
|
| 8 |
version = "0.1.0"
|
| 9 |
description = "Add your description here"
|
| 10 |
readme = "README.md"
|
|
|
|
| 12 |
# dependencies = ["reachy-mini"]
|
| 13 |
dependencies = [
|
| 14 |
"reachy-mini@git+https://github.com/pollen-robotics/reachy_mini",
|
| 15 |
+
"mediapipe",
|
| 16 |
+
]
|
| 17 |
|
| 18 |
[project.entry-points."reachy_mini_apps"]
|
| 19 |
+
reachy_mini_hand_tracker = "reachy_mini_hand_tracker.main:HandTrackerApp"
|
reachy_mini_app_example/main.py
DELETED
|
@@ -1,27 +0,0 @@
|
|
| 1 |
-
import threading
|
| 2 |
-
import time
|
| 3 |
-
|
| 4 |
-
import numpy as np
|
| 5 |
-
from reachy_mini import ReachyMiniApp
|
| 6 |
-
from reachy_mini.reachy_mini import ReachyMini
|
| 7 |
-
from scipy.spatial.transform import Rotation as R
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
class ExampleApp(ReachyMiniApp):
|
| 11 |
-
def run(self, reachy_mini: ReachyMini, stop_event: threading.Event):
|
| 12 |
-
t0 = time.time()
|
| 13 |
-
while not stop_event.is_set():
|
| 14 |
-
t = time.time() - t0
|
| 15 |
-
|
| 16 |
-
target = np.deg2rad(30) * np.sin(2 * np.pi * 0.5 * t)
|
| 17 |
-
|
| 18 |
-
yaw = target
|
| 19 |
-
head = np.eye(4)
|
| 20 |
-
head[:3, :3] = R.from_euler("xyz", [0, 0, yaw], degrees=False).as_matrix()
|
| 21 |
-
|
| 22 |
-
reachy_mini.set_position(head=head, antennas=np.array([target, -target]))
|
| 23 |
-
|
| 24 |
-
time.sleep(0.01)
|
| 25 |
-
# if more than one second since last ping, print ping
|
| 26 |
-
if t % 1 < 0.01:
|
| 27 |
-
print("Ping")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
{reachy_mini_app_example → reachy_mini_hand_tracker_app}/__init__.py
RENAMED
|
File without changes
|
reachy_mini_hand_tracker_app/hand_tracker.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Hand Tracker using MediaPipe to detect hand positions in images."""
|
| 2 |
+
|
| 3 |
+
import cv2
|
| 4 |
+
import mediapipe as mp
|
| 5 |
+
import numpy as np
|
| 6 |
+
|
| 7 |
+
mp_drawing = mp.solutions.drawing_utils
|
| 8 |
+
mp_drawing_styles = mp.solutions.drawing_styles
|
| 9 |
+
mp_hands = mp.solutions.hands
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class HandTracker:
|
| 13 |
+
"""Hand Tracker using MediaPipe Hands to detect hand positions."""
|
| 14 |
+
|
| 15 |
+
def __init__(self, nb_hands=1):
|
| 16 |
+
"""Initialize the Hand Tracker."""
|
| 17 |
+
self.hands = mp_hands.Hands(
|
| 18 |
+
static_image_mode=True, max_num_hands=nb_hands, min_detection_confidence=0.5
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
def get_hands_positions(self, img):
|
| 22 |
+
"""Get the positions of the hands in the image."""
|
| 23 |
+
img = cv2.flip(img, 1)
|
| 24 |
+
|
| 25 |
+
results = self.hands.process(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
|
| 26 |
+
if results.multi_hand_landmarks is not None:
|
| 27 |
+
palm_centers = []
|
| 28 |
+
for landmarks in results.multi_hand_landmarks:
|
| 29 |
+
middle_finger_pip_landmark = landmarks.landmark[
|
| 30 |
+
mp_hands.HandLandmark.MIDDLE_FINGER_PIP
|
| 31 |
+
]
|
| 32 |
+
palm_center = np.array(
|
| 33 |
+
[middle_finger_pip_landmark.x, middle_finger_pip_landmark.y]
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
# Normalize the palm center to the range [-1, 1]
|
| 37 |
+
# Flip the x-axis
|
| 38 |
+
palm_center = [-(palm_center[0] - 0.5) * 2, (palm_center[1] - 0.5) * 2]
|
| 39 |
+
palm_centers.append(palm_center)
|
| 40 |
+
|
| 41 |
+
return palm_centers
|
| 42 |
+
return None
|
reachy_mini_hand_tracker_app/main.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Reachy Mini Hand Tracker App
|
| 2 |
+
|
| 3 |
+
This app makes Reachy Mini follow your hand.
|
| 4 |
+
|
| 5 |
+
It uses the robot camera to detect the hand position and adjusts the head pose accordingly.
|
| 6 |
+
It uses mediapipe to track the hand and OpenCV for image processing.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import threading
|
| 10 |
+
|
| 11 |
+
import cv2
|
| 12 |
+
import numpy as np
|
| 13 |
+
from reachy_mini import ReachyMini, ReachyMiniApp
|
| 14 |
+
from reachy_mini.io.cam_utils import find_camera
|
| 15 |
+
from scipy.spatial.transform import Rotation as R
|
| 16 |
+
|
| 17 |
+
from reachy_mini_hand_tracker_app.hand_tracker import HandTracker
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class HandTrackerApp(ReachyMiniApp):
|
| 21 |
+
# Proportional gain for the controller
|
| 22 |
+
# Reduce/Increase to make the head movement smoother or more responsive)
|
| 23 |
+
kp = 0.2
|
| 24 |
+
|
| 25 |
+
# Maximum delta for the head position adjustments
|
| 26 |
+
# This limits how much the head can move in one iteration to prevent abrupt movements
|
| 27 |
+
max_delta = 0.3
|
| 28 |
+
|
| 29 |
+
# Proportional gains for the head height adjustment
|
| 30 |
+
# Adjust this value to control how much the head moves up/down based on vertical error
|
| 31 |
+
kz = 0.04
|
| 32 |
+
|
| 33 |
+
def run(self, reachy_mini: ReachyMini, stop_event: threading.Event):
|
| 34 |
+
cap = find_camera()
|
| 35 |
+
if cap is None:
|
| 36 |
+
raise RuntimeError("No camera found. Please connect a camera.")
|
| 37 |
+
|
| 38 |
+
hand_tracker = HandTracker()
|
| 39 |
+
|
| 40 |
+
head_pose = np.eye(4)
|
| 41 |
+
euler_rot = np.array([0.0, 0.0, 0.0])
|
| 42 |
+
|
| 43 |
+
while not stop_event.is_set():
|
| 44 |
+
success, img = cap.read()
|
| 45 |
+
|
| 46 |
+
if not success:
|
| 47 |
+
print("Failed to capture image from camera.")
|
| 48 |
+
continue
|
| 49 |
+
|
| 50 |
+
hands = hand_tracker.get_hands_positions(img)
|
| 51 |
+
if hands:
|
| 52 |
+
hand = hands[0] # Assuming we only track the first detected hand
|
| 53 |
+
draw_hand(img, hand)
|
| 54 |
+
|
| 55 |
+
error = np.array([0, 0]) - hand
|
| 56 |
+
error = np.clip(
|
| 57 |
+
error, -self.max_delta, self.max_delta
|
| 58 |
+
) # Limit error to avoid extreme movements
|
| 59 |
+
euler_rot += np.array(
|
| 60 |
+
[0.0, -self.kp * 0.1 * error[1], self.kp * error[0]]
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
head_pose[:3, :3] = R.from_euler(
|
| 64 |
+
"xyz", euler_rot, degrees=False
|
| 65 |
+
).as_matrix()
|
| 66 |
+
head_pose[:3, 3][2] = (
|
| 67 |
+
error[1] * self.kz
|
| 68 |
+
) # Adjust height based on vertical error
|
| 69 |
+
|
| 70 |
+
reachy_mini.set_target(head=head_pose)
|
| 71 |
+
|
| 72 |
+
cv2.imshow("Reachy Mini Hand Tracker App", img)
|
| 73 |
+
if cv2.waitKey(1) & 0xFF == ord("q"):
|
| 74 |
+
break
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def draw_hand(img, hand):
|
| 78 |
+
"""Draw debug information on the image."""
|
| 79 |
+
h, w, _ = img.shape
|
| 80 |
+
draw_palm = [(-hand[0] + 1) / 2, (hand[1] + 1) / 2] # [0, 1]
|
| 81 |
+
cv2.circle(
|
| 82 |
+
img,
|
| 83 |
+
(int(w - draw_palm[0] * w), int(draw_palm[1] * h)),
|
| 84 |
+
radius=5,
|
| 85 |
+
color=(0, 0, 255),
|
| 86 |
+
thickness=-1,
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
_target = [0.5, 0.5]
|
| 90 |
+
cv2.circle(
|
| 91 |
+
img,
|
| 92 |
+
(int(_target[0] * w), int(_target[1] * h)),
|
| 93 |
+
radius=5,
|
| 94 |
+
color=(255, 0, 0),
|
| 95 |
+
thickness=-1,
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
cv2.line(
|
| 99 |
+
img,
|
| 100 |
+
(int(draw_palm[0] * w), int(draw_palm[1] * h)),
|
| 101 |
+
(int(_target[0] * w), int(_target[1] * h)),
|
| 102 |
+
color=(0, 255, 0),
|
| 103 |
+
thickness=2,
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
if __name__ == "__main__":
|
| 108 |
+
# You can run the app directly from this script
|
| 109 |
+
with ReachyMini() as mini:
|
| 110 |
+
app = HandTrackerApp()
|
| 111 |
+
|
| 112 |
+
stop = threading.Event()
|
| 113 |
+
|
| 114 |
+
try:
|
| 115 |
+
print("Running '{{ app_name }}' a ReachyMiniApp...")
|
| 116 |
+
print("Press Ctrl+C to stop the app.")
|
| 117 |
+
app.run(mini, stop)
|
| 118 |
+
print("App has stopped.")
|
| 119 |
+
|
| 120 |
+
except KeyboardInterrupt:
|
| 121 |
+
print("Stopping the app...")
|
| 122 |
+
stop.set()
|