11 from sensor_msgs.msg
import Image
12 from cv_bridge
import CvBridge, CvBridgeError
13 from geometry_msgs.msg
import Point
17 from control_msgs.msg
import (
18 FollowJointTrajectoryAction,
19 FollowJointTrajectoryGoal,
20 JointTrajectoryControllerState
22 from trajectory_msgs.msg
import JointTrajectoryPoint
28 self.
_image_pub = rospy.Publisher(
"~output_image", Image, queue_size=1)
45 input_image = self.
_bridge.imgmsg_to_cv2(ros_image,
"bgr8")
46 except CvBridgeError
as e:
65 object_center = Point(
71 translated_point = Point()
72 translated_point.x = object_center.x - self.
_image_shape.x * 0.5
73 translated_point.y = -(object_center.y - self.
_image_shape.y * 0.5)
76 normalized_point = Point()
78 normalized_point.x = translated_point.x / (self.
_image_shape.x * 0.5)
79 normalized_point.y = translated_point.y / (self.
_image_shape.y * 0.5)
81 return normalized_point
91 MIN_OBJECT_SIZE = 1000
94 hsv = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2HSV)
97 mask = cv2.inRange(hsv, lower_color, upper_color)
101 contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
103 _, contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
107 for contour
in contours:
108 approx = cv2.convexHull(contour)
109 rect = cv2.boundingRect(approx)
115 rect = max(rects, key=(
lambda x: x[2] * x[3]))
118 if rect[2] * rect[3] > MIN_OBJECT_SIZE:
120 cv2.rectangle(bgr_image,
122 (rect[0] + rect[2], rect[1] + rect[3]),
123 (0, 0, 255), thickness=2)
135 lower_orange = np.array([5,127,127])
136 upper_orange = np.array([20,255,255])
145 lower_blue = np.array([100,127,127])
146 upper_blue = np.array([110,255,255])
158 rospy.logerr(
"cascade file does not set")
162 gray = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2GRAY)
165 height, width = gray.shape[:2]
166 small_gray = cv2.resize(gray, (width/SCALE, height/SCALE))
169 small_faces = self.
_face_cascade.detectMultiScale(small_gray)
172 for small_face
in small_faces:
174 face = small_face*SCALE
178 face[1]:face[1]+face[3],
179 face[0]:face[0]+face[2]]
186 cv2.rectangle(bgr_image,
188 (face[0]+face[2], face[1]+face[3]),
201 FollowJointTrajectoryAction)
202 self.
__client.wait_for_server(rospy.Duration(5.0))
203 if not self.
__client.wait_for_server(rospy.Duration(5.0)):
204 rospy.logerr(
"Action Server Not Found")
205 rospy.signal_shutdown(
"Action Server not found")
208 self.
_state_sub = rospy.Subscriber(
"/sciurus17/controller3/neck_controller/state",
220 yaw_radian = state.actual.positions[0]
221 pitch_radian = state.actual.positions[1]
239 def set_angle(self, yaw_angle, pitch_angle, goal_secs=1.0e-9):
241 goal = FollowJointTrajectoryGoal()
242 goal.trajectory.joint_names = [
"neck_yaw_joint",
"neck_pitch_joint"]
244 yawpoint = JointTrajectoryPoint()
245 yawpoint.positions.append(yaw_angle)
246 yawpoint.positions.append(pitch_angle)
247 yawpoint.time_from_start = rospy.Duration(goal_secs)
248 goal.trajectory.points.append(yawpoint)
251 self.
__client.wait_for_result(rospy.Duration(0.1))
257 neck.set_angle(math.radians(0), math.radians(0), 3.0)
263 rospy.on_shutdown(hook_shutdown)
271 INITIAL_YAW_ANGLE = 0
272 INITIAL_PITCH_ANGLE = 0
278 MIN_PITCH_ANGLE = -70
282 OPERATION_GAIN_X = 5.0
283 OPERATION_GAIN_Y = 5.0
286 RESET_OPERATION_ANGLE = 3
290 while not neck.state_received():
292 yaw_angle = neck.get_current_yaw()
293 pitch_angle = neck.get_current_pitch()
296 detection_timestamp = rospy.Time.now()
298 while not rospy.is_shutdown():
300 object_position = object_tracker.get_object_position()
302 if object_tracker.object_detected():
303 detection_timestamp = rospy.Time.now()
306 lost_time = rospy.Time.now() - detection_timestamp
308 if lost_time.to_sec() > 1.0:
313 if math.fabs(object_position.x) > THRESH_X:
314 yaw_angle += -object_position.x * OPERATION_GAIN_X
316 if math.fabs(object_position.y) > THRESH_Y:
317 pitch_angle += object_position.y * OPERATION_GAIN_Y
320 if yaw_angle > MAX_YAW_ANGLE:
321 yaw_angle = MAX_YAW_ANGLE
322 if yaw_angle < MIN_YAW_ANGLE:
323 yaw_angle = MIN_YAW_ANGLE
325 if pitch_angle > MAX_PITCH_ANGLE:
326 pitch_angle = MAX_PITCH_ANGLE
327 if pitch_angle < MIN_PITCH_ANGLE:
328 pitch_angle = MIN_PITCH_ANGLE
332 diff_yaw_angle = yaw_angle - INITIAL_YAW_ANGLE
333 if math.fabs(diff_yaw_angle) > RESET_OPERATION_ANGLE:
334 yaw_angle -= math.copysign(RESET_OPERATION_ANGLE, diff_yaw_angle)
336 yaw_angle = INITIAL_YAW_ANGLE
338 diff_pitch_angle = pitch_angle - INITIAL_PITCH_ANGLE
339 if math.fabs(diff_pitch_angle) > RESET_OPERATION_ANGLE:
340 pitch_angle -= math.copysign(RESET_OPERATION_ANGLE, diff_pitch_angle)
342 pitch_angle = INITIAL_PITCH_ANGLE
344 neck.set_angle(math.radians(yaw_angle), math.radians(pitch_angle))
349 if __name__ ==
'__main__':
350 rospy.init_node(
"head_camera_tracking")
def _detect_color_object(self, bgr_image, lower_color, upper_color)
def get_current_pitch(self)
def object_detected(self)
def get_current_yaw(self)
def set_angle(self, yaw_angle, pitch_angle, goal_secs=1.0e-9)
def _detect_face(self, bgr_image)
def _image_callback(self, ros_image)
def _detect_blue_object(self, bgr_image)
def get_object_position(self)
def _state_callback(self, state)
def _detect_orange_object(self, bgr_image)