diff --git a/Trace/handeye_calibration.py b/Trace/handeye_calibration.py index 4c1a388..b2f5626 100644 --- a/Trace/handeye_calibration.py +++ b/Trace/handeye_calibration.py @@ -1,23 +1,15 @@ -import os - import numpy as np from scipy.spatial.transform import Rotation as R -def vec2rpy(normal): +def vec2rpy(normal,long_edge_direction): # 将法向量的反方向作为机械臂末端执行器的新Z轴 z_axis = -normal / np.linalg.norm(normal) # 归一化并取反向作为Z轴 + x_axis = long_edge_direction/np.linalg.norm(long_edge_direction) - # 尝试选择一个合适的X轴方向,尽量避免与Z轴共线 - if abs(z_axis[2]) != 1: - x_axis = np.array([1, 0, 0]) - else: - x_axis = np.array([0, 1, 0]) + x_axis = x_axis-np.dot(x_axis,z_axis)*z_axis + x_axis = x_axis/np.linalg.norm(x_axis) - x_axis = x_axis - np.dot(x_axis, z_axis) * z_axis # 投影到垂直于Z轴的平面 - x_axis = x_axis / np.linalg.norm(x_axis) - - # 计算Y轴方向,确保它与X轴和Z轴正交 - y_axis = np.cross(z_axis, x_axis) + y_axis = np.cross(z_axis,x_axis) # 构造旋转矩阵 rotation_matrix = np.vstack([x_axis, y_axis, z_axis]).T @@ -62,16 +54,23 @@ def R_matrix(x,y,z,u,v,w): # 图像识别结果:xyz和法向量 -def getPosition(x,y,z,a,b,c,rotation): +def getPosition(x,y,z,a,b,c,rotation,points): target = np.asarray([x, y, z,1]) camera2robot = np.loadtxt('./Trace/com_pose.txt', delimiter=' ') #相对目录且分隔符采用os.sep robot2base = rotation camera2base = robot2base @ camera2robot target_position = np.dot(camera2base, target) + corner_points_camera = np.asarray(points) + corner_points_base = np.dot(camera2base[:3, :3], corner_points_camera.T).T + camera2base[:3, 3] + edges = np.array([corner_points_base[i] - corner_points_base[i - 1] for i in range(len(corner_points_base))]) + edge_lengths = np.linalg.norm(edges, axis=1) + max_edge_idx = np.argmax(edge_lengths) + long_edge_direction = edges[max_edge_idx] / edge_lengths[max_edge_idx] # 单位化方向向量 + angle = np.asarray([a,b,c]) noraml = camera2base[:3, :3]@angle - noraml_base = vec2rpy(noraml) + noraml_base = vec2rpy(noraml,long_edge_direction) print(target_position, noraml_base)