解决手识别偏移的问题,增加手跟踪(未完成)

This commit is contained in:
shen 2026-01-29 00:38:58 +08:00
parent f3104482c1
commit 98797842f4
5 changed files with 172 additions and 6 deletions

View File

@ -8,8 +8,8 @@ import numpy as np
from camera_info_manager import CameraInfoManager
from sensor_msgs.msg import CameraInfo
CAMERA_WIDTH = int(1280)
CAMERA_HEIGHT = int(480)
CAMERA_WIDTH = 1280
CAMERA_HEIGHT = 480
FRAME_WIDTH = int(CAMERA_WIDTH / 2)
FRAME_HEIGHT = int(CAMERA_HEIGHT / 2)

View File

@ -0,0 +1,156 @@
import rclpy
from rclpy.node import Node
from rclpy.action import ActionClient
import numpy as np
import time
from moveit_msgs.action import MoveGroup
from moveit_msgs.msg import Constraints, PositionConstraint
from geometry_msgs.msg import PointStamped, PoseStamped
from shape_msgs.msg import SolidPrimitive
import tf2_ros
import tf2_geometry_msgs
class HandFollowMoveIt(Node):
def __init__(self):
super().__init__('hand_follow_moveit')
self.client = ActionClient(self, MoveGroup, '/move_action')
self.last_sent = time.time()
self.send_period = 0.3 # 约 3 Hz
self.p_prev = None
self.plan_period = 1.0 # 秒(规划频率)
self.min_delta = 0.05 # m手移动超过才重新规划
self.z_min = 0.15 # 防止撞桌
self.z_max = 0.60
self.latest_hand = None
self.last_target = None
self.base_frame = 'openarm_body_link0'
# ====== TF ======
self.tf_buffer = tf2_ros.Buffer()
self.tf_listener = tf2_ros.TransformListener(self.tf_buffer, self)
# ====== 订阅 ======
self.sub = self.create_subscription(
PointStamped,
'/hand_3d',
self.hand_cb,
10
)
self.timer = self.create_timer(self.plan_period, self.plan_timer)
self.get_logger().info('Hand → MoveIt Planning node started')
# ---------- 回调 ----------
def hand_cb(self, msg):
try:
hand = self.tf_buffer.transform(
msg,
self.base_frame,
timeout=rclpy.duration.Duration(seconds=0.1)
)
except Exception:
return
print(f"手坐标({self.base_frame}):{hand.point.x:.3f}, {hand.point.y:.3f}, {hand.point.z:.3f}")
self.latest_hand = np.array([
hand.point.x,
hand.point.y,
hand.point.z
])
def update_hand(self, p):
""" p: np.array([x,y,z]) in base_link """
# 1⃣ 限幅(按你机械臂改)
p[0] = np.clip(p[0], 0.25, 0.55)
p[1] = np.clip(p[1], -0.30, 0.30)
p[2] = np.clip(p[2], 0.25, 0.30)
# 2⃣ 低通滤波
if self.p_prev is None:
self.p_prev = p
return
alpha = 0.2
p = alpha * p + (1 - alpha) * self.p_prev
# 3⃣ 死区
if np.linalg.norm(p - self.p_prev) < 0.015:
return
# 4⃣ 控制频率
if time.time() - self.last_sent < self.send_period:
return
self.send_goal(p)
self.p_prev = p
self.last_sent = time.time()
def send_goal(self, p):
pose = PoseStamped()
pose.header.frame_id = "openarm_body_link0"
pose.pose.position.x = float(p[0])
pose.pose.position.y = float(p[1])
pose.pose.position.z = float(p[2])
pose.pose.orientation.w = 1.0
# pose.pose.position.x = 0.0
# pose.pose.position.y = 0.153
# pose.pose.position.z = 0.262
goal = MoveGroup.Goal()
req = goal.request
req.group_name = "left_arm"
req.allowed_planning_time = 2.0
req.max_velocity_scaling_factor = 0.2
pc = PositionConstraint()
pc.header = pose.header
pc.link_name = "openarm_left_link7"
box = SolidPrimitive()
box.type = SolidPrimitive.BOX
box.dimensions = [0.02, 0.02, 0.02]
pc.constraint_region.primitives.append(box)
pc.constraint_region.primitive_poses.append(pose.pose)
constraints = Constraints()
constraints.position_constraints.append(pc)
req.goal_constraints.append(constraints)
self.client.wait_for_server()
self.client.send_goal_async(goal)
def plan_timer(self):
if self.latest_hand is None:
return
# 限制 workspace
target = self.latest_hand.copy()
target[2] = np.clip(target[2], self.z_min, self.z_max)
# 判断是否值得重新规划
if self.last_target is not None:
if np.linalg.norm(target - self.last_target) < self.min_delta:
return
self.last_target = target.copy()
self.send_goal(target)
print(f"发送新目标:{target[0]:.3f}, {target[1]:.3f}, {target[2]:.3f}")
def main():
rclpy.init()
node = HandFollowMoveIt()
rclpy.spin(node)
rclpy.shutdown()
# while True:
# p_hand_base = get_hand_3d() # 你已有
# node.update_hand(p_hand_base)
if __name__ == "__main__":
main()

View File

@ -132,7 +132,7 @@ class StereoHandDepthNode(Node):
if results.multi_hand_landmarks:
wrist = results.multi_hand_landmarks[0].landmark[0]
u = int(wrist.x * w)
u = int(wrist.x * (w / 2)) # 注意这里是左图宽度
v = int(wrist.y * h)
p.header = msg.header
p.point.x = float(u)
@ -142,7 +142,7 @@ class StereoHandDepthNode(Node):
# 转灰度
left_gray = cv2.cvtColor(left_rect, cv2.COLOR_BGR2GRAY)
right_gray = cv2.cvtColor(right_rect, cv2.COLOR_BGR2GRAY)
# cv2.imshow('left', left_gray)
# 计算视差
disparity = self.stereo.compute(left_gray, right_gray).astype(np.float32) / 16.0
@ -150,11 +150,14 @@ class StereoHandDepthNode(Node):
# ----------- 这里先用“假手点”(画面中心)-----------
x = int(p.point.x)
y = int(p.point.y)
print(f'Wrist pixel: ({x}, {y})')
cv2.circle(left_gray, (x, y), 5, (255, 0, 0), -1)
# print("left_gray.shape:", left_gray.shape)
# cv2.imshow('left', left_gray)
# print(f'Wrist pixel: ({x}, {y})')
# ROI 取中值,稳很多
roi = disparity[y-3:y+3, x-3:x+3]
d = np.nanmedian(roi)
print(f'Disparity at wrist: {roi}')
# print(f'Disparity at wrist: {roi}')
if d <= 0:
return

View File

@ -6,6 +6,12 @@
<description>TODO: Package description</description>
<maintainer email="shen@todo.todo">shen</maintainer>
<license>TODO: License declaration</license>
<exec_depend>rclpy</exec_depend>
<exec_depend>sensor_msgs</exec_depend>
<exec_depend>cv_bridge</exec_depend>
<exec_depend>usb_cam</exec_depend>
<exec_depend>stereo_image_proc</exec_depend>
<test_depend>ament_copyright</test_depend>
<test_depend>ament_flake8</test_depend>

View File

@ -32,6 +32,7 @@ setup(
'camera_node = openarm_camera.camera:main',
'camera_pub_node = openarm_camera.camera_pub:main',
'stereo_hand_depth_node = openarm_camera.stereo_hand_depth_node:main',
'hand_to_moveit_planning = openarm_camera.hand_to_moveit_planning:main',
],
},
)