def image_callback(msg):
try:
rospy.wait_for_service(‘/gazebo/set_model_state’)
set_model_state = rospy.ServiceProxy(‘/gazebo/set_model_state’, SetModelState)
drone_model_state = ModelState()
drone_model_state.model_name = ‘drone_model’ # ‘’ for global frame
drone_model_state.pose.position.z = 4
drone_model_state.pose.position.x = 8.0
drone_model_state.twist = Twist()
drone_model_state.twist.linear.x = 0 # Maintain current velocity in x direction
drone_model_state.twist.linear.y = 0 # Maintain current velocity in y direction
drone_model_state.twist.linear.z = 0
cv_image = bridge.imgmsg_to_cv2(msg, "bgr8")
resized_image = cv2.resize(cv_image, (400, 400))
gray = cv2.cvtColor(resized_image, cv2.COLOR_BGR2GRAY)
corners, ids, _ = aruco.detectMarkers(gray, aruco_dict, parameters=parameters)
rospy.loginfo(ids)
if ids is not None:
#cv2.imshow('Aruco Marker Pose', cv_image)
aruco.drawDetectedMarkers(gray, corners, borderColor=(0, 165, 100))
# Estimate pose of each marker
rvecs, tvecs, _ = aruco.estimatePoseSingleMarkers(corners, marker_size, k, distortion_coefficients)
idx = np.where(ids == marker_id)[0][0]
rvec = normalize_rvec(rvecs[idx].flatten())
rospy.loginfo(f"Normalized rvec: {rvec}")
R, _ = cv2.Rodrigues(rvec)
rospy.loginfo(f"Rotation Matrix (R):\n{R}")
# Apply axis transformation to match world frame
R_transformed = apply_axis_transformation(R)
# Convert the transformed rotation matrix to a quaternion
qw, qx, qy, qz = rotation_matrix_to_quaternion(R_transformed)
# Print the pose of the detected marker
if ids == marker_id:
rospy.loginfo(f"Marker ID: {marker_id}")
rospy.loginfo(f"Quaternion (qx, qy, qz, qw): {qx}, {qy}, {qz}, {qw}")
drone_model_state.pose.orientation.x = 0
drone_model_state.pose.orientation.y = qy
drone_model_state.pose.orientation.z = 0
drone_model_state.pose.orientation.w = 1
elif ids is None:
rospy.loginfo("Marker not detected")
set_model_state(drone_model_state)
ids=None
R=np.empty((3, 3))
except Exception as e:
rospy.logerr(f"Error processing image: {e}")
return