perception_monitor.py
Go to the documentation of this file.
00001 #! /usr/bin/python
00002 
00003 import numpy as np, math
00004 import scipy.stats as stats
00005 from threading import RLock
00006 import random
00007 import sys
00008 import multiprocessing as mp
00009 
00010 import roslib; roslib.load_manifest('pr2_overhead_grasping')
00011 import rospy
00012 import tf
00013 
00014 from hrl_lib.msg import FloatArray
00015 from hrl_lib.rutils import GenericListener, ros_to_dict, RateCaller
00016 from tf.transformations import *
00017 from helpers import log, err, node_name, FileOperations
00018 import classifiers as cfiers
00019 
00020 from std_msgs.msg import Float64MultiArray
00021 from pr2_msgs.msg import AccelerometerState, PressureState
00022 from sensor_msgs.msg import JointState
00023 
00024 import threading
00025 import functools as ft
00026 import Queue
00027 import matplotlib.pyplot as plt
00028 
00029 import time, string
00030 
00031 # def pool_loading(fns):
00032 #     NUM_PROCESSES = 4
00033 #     pool = mp.Pool(NUM_PROCESSES)
00034 #     pool_result = pool.map(hard_load_pickle, fns) 
00035 
00036 class FastGaussConvolve(object):
00037     def __init__(self, rate, filter_beg_delay, filter_cen_delay, sigma_list):
00038         self.kernels = []
00039         self.filter_len = int(filter_beg_delay / rate)
00040         for deg in range(3):
00041             kernel_sigma_list = []
00042             for sigma_i, sigma in enumerate(sigma_list):
00043                 inds = np.linspace((filter_cen_delay - filter_beg_delay) / sigma, 
00044                                    filter_cen_delay / sigma, 
00045                                    self.filter_len)
00046                 kernel = []
00047                 for v in inds:
00048                     if deg == 0:
00049                         kernel.append(self.gauss(v))
00050                     elif deg == 1:
00051                         kernel.append(self.gauss_d1(v))
00052                     elif deg == 2:
00053                         kernel.append(self.gauss_d2(v))
00054                 kernel = np.array(kernel)
00055                 kernel /= np.sum(np.fabs(kernel))
00056                 kernel_sigma_list.append(kernel)
00057             self.kernels.append(kernel_sigma_list)
00058         # print self.kernels
00059         # plt.figure(1)
00060         # colors = ['blue', 'red', 'green']
00061         # for i in range(3):
00062         #     for j in range(4):
00063         #         plt.plot(self.kernels[i][j], c=colors[i])
00064         #         print np.sum(self.kernels[i][j]), len(self.kernels[i][j])
00065         # plt.show()
00066         # print assdf
00067     
00068     ##
00069     # Convolves the last values in the signal with the filter
00070     def convolve_pt(self, signal, deg, sigma_i):
00071         return np.convolve(signal, 
00072                            self.kernels[deg][sigma_i], mode='valid')[-1]
00073 
00074     def convolve_signal(self, signal, deg, sigma_i):
00075         window_len = 100
00076 
00077         avg_sig = sum(signal[0:20]) / 20.
00078         s=np.r_[[avg_sig]*window_len,signal,[avg_sig]*window_len]
00079 
00080         ret_sig = np.convolve(s, self.kernels[deg][sigma_i], mode='same')
00081 
00082         return list(ret_sig[window_len:-window_len])
00083 
00084     def gauss(self, x, sig=1.):
00085         return 1./np.sqrt(2. * np.pi * sig ** 2) * np.exp(-x**2/(2*sig**2))
00086 
00087     def gauss_d1(self, x, sig=1.):
00088         return (-2. * x * 1./(np.sqrt(2. * np.pi * sig ** 2) * 2 * sig**2) * 
00089                 np.exp(-x**2/(2*sig**2)))
00090 
00091     def gauss_d2(self, x, sig=1.):
00092         return (-2. * 1./(np.sqrt(2. * np.pi * sig ** 2) * 2 * sig**2) * 
00093                 np.exp(-x**2/(2*sig**2)) +
00094                 4. * x**2 * 1./(np.sqrt(2. * np.pi * sig ** 2) * 4 * sig**4) * 
00095                 np.exp(-x**2/(2*sig**2)))
00096 
00097 ##
00098 # Processes the AccelerometerState message, returning an average of the
00099 # sample values and the timestamp in nanoseconds
00100 #
00101 # @param msg AccelometerState message
00102 # @return (t, (x, y, z))
00103 
00104 def accel_state_processor(msg):
00105     xyz = np.array([0.]*3)
00106     if msg.samples is None or len(msg.samples) == 0:
00107         return None
00108     for samp in msg.samples:
00109         xyz[0] += samp.x
00110         xyz[1] += samp.y
00111         xyz[2] += samp.z
00112     xyz /= len(msg.samples)
00113     return (rospy.Time.now().to_sec(), xyz)
00114 
00115 r_jt_idx_list = [17, 18, 16, 20, 19, 21, 22]
00116 l_jt_idx_list = [29, 30, 28, 32, 31, 33, 34]
00117 joint_nm_list = ['shoulder_pan', 'shoulder_lift', 'upper_arm_roll',
00118                  'elbow_flex', 'forearm_roll', 'wrist_flex',
00119                  'wrist_roll']
00120 ##
00121 # Callback for /joint_states topic. Updates current joint
00122 # angles and efforts for the arms constantly
00123 #
00124 # @param data JointState message recieved from the /joint_states topic
00125 def joints_state_processor(msg, right_arm=True, angles_velocities_efforts=0):
00126     ret = np.array([0.] * 7)
00127     for i,nm in enumerate(joint_nm_list):
00128         if right_arm:
00129             idx = r_jt_idx_list[i]
00130             if msg.name[idx] != 'r_'+nm+'_joint':
00131                 raise RuntimeError('joint angle name does not match. Expected: %s, Actual: %s i: %d'%('r_'+nm+'_joint', msg.name[idx], i))
00132             if angles_velocities_efforts == 1:
00133                 ret[i] = msg.velocity[idx]
00134             elif angles_velocities_efforts == 2:
00135                 ret[i] = msg.effort[idx]
00136             else:
00137                 ret[i] = msg.position[idx]
00138         else:
00139 
00140             idx = l_jt_idx_list[i]
00141             if msg.name[idx] != 'l_'+nm+'_joint':
00142                 raise RuntimeError('joint angle name does not match. Expected: %s, Actual: %s i: %d'%('l_'+nm+'_joint', msg.name[idx], idx))
00143             if angles_velocities_efforts == 1:
00144                 ret[i] = msg.velocity[idx]
00145             elif angles_velocities_efforts == 2:
00146                 ret[i] = msg.effort[idx]
00147             else:
00148                 ret[i] = msg.position[idx]
00149     return (rospy.Time.now().to_sec(), ret)
00150 
00151 def pressure_state_processor(msg, right_finger_tip=True, indicies=None):
00152     ret = np.array([0.] * len(indicies))
00153     if indicies is None:
00154         indicies = range(len(msg.r_finger_tip))
00155     for i, ind in enumerate(indicies):
00156         if right_finger_tip:
00157             ret[i] = msg.r_finger_tip[ind]
00158         else:
00159             ret[i] = msg.l_finger_tip[ind]
00160     return (rospy.Time.now().to_sec(), ret)
00161 
00162 def pressure_state_sum_processor(msg, right_finger_tip=True, indicies=None):
00163     ret = 0.
00164     if indicies is None:
00165         indicies = range(len(msg.r_finger_tip))
00166     for i, ind in enumerate(indicies):
00167         if right_finger_tip:
00168             ret += msg.r_finger_tip[ind]
00169         else:
00170             ret += msg.l_finger_tip[ind]
00171     return (rospy.Time.now().to_sec(), np.array([ret]))
00172 
00173 ##
00174 # Monitors perception channels on the robot arms. Important: rate must be the same for both
00175 # data_capture and monitoring. Values are gathered timestep by timestep.
00176 #
00177 # Usecase:
00178 # apm = ArmPerceptionMonitor(0)
00179 # for trajectory in trajectories:
00180 #     apm.start_data_capture()
00181 #     trajectory.run()
00182 #     trajectory.wait_for_completion()
00183 #     apm.stop_data_capture()
00184 # mean_function, variance_function = apm.generate_model(...)
00185 # 
00186 class ArmPerceptionMonitor( ):
00187 
00188     ##
00189     # Initializes internal variables
00190     #
00191     # @param arm 0 if right, 1 if left
00192     # @param percept_mon_list list of perceptions to monitor; if None, do all
00193     def __init__(self, arm):
00194         self.load_parameters()
00195         ############## Classifiers #################
00196         self.impact_classifier = cfiers.classifiers_dict["small_refined_random_forest"]
00197         self.coll_type_classifier = cfiers.classifiers_dict["large_refined_random_forest"]
00198 
00199         log("Loading impact classifier")
00200         self.impact_classifier.load(self.I_RTREE_CLASSIFIER)
00201         log("Loading collision type classifier")
00202         self.coll_type_classifier.load(self.T_RTREE_CLASSIFIER)
00203 
00204         if arm == 0:
00205             self.armc = "r"
00206             self.is_right_arm = True
00207         else:
00208             self.armc = "l"
00209             self.is_right_arm = False
00210 
00211         self.perceptions = {}
00212 
00213         self.perception_names = [ "accelerometer",
00214                                   "joint_angles",
00215                                   "joint_velocities",
00216                                   "joint_efforts",
00217                                   "r_finger_periph_pressure",
00218                                   "r_finger_pad_pressure", 
00219                                   "l_finger_periph_pressure",
00220                                   "l_finger_pad_pressure",
00221                                   "gripper_pose"]
00222         self.perception_lengths = { "accelerometer" : 3,
00223                                   "joint_angles" : 7,
00224                                   "joint_velocities" : 7,
00225                                   "joint_efforts" : 7,
00226                                   "r_finger_periph_pressure" : 1,
00227                                   "r_finger_pad_pressure" : 1, 
00228                                   "l_finger_periph_pressure" : 1,
00229                                   "l_finger_pad_pressure" : 1,
00230                                   "gripper_pose" : 7}
00231 
00232         self.percept_mon_list = self.perception_names
00233 
00234         self.impact_fgc = FastGaussConvolve(self.SAMPLING_RATE, self.I_FILTER_BEG_DELAY,
00235                                      self.I_FILTER_CEN_DELAY, self.i_sigma_list)
00236         self.type_fgc = FastGaussConvolve(self.SAMPLING_RATE, self.T_FILTER_BEG_DELAY,
00237                                      self.T_FILTER_CEN_DELAY, self.t_sigma_list)
00238 
00239         self.perception_started = False
00240         # self.detect_ready = False
00241         self.fos = FileOperations()
00242 
00243     def load_parameters(self):
00244         self.SAMPLING_RATE = rospy.get_param("/overhead_grasping/data_sampling_rate")
00245         self.PERCEPTION_ORDER= rospy.get_param("/overhead_grasping/perception_order")
00246 
00247         # impact parameters
00248         self.I_FILTER_BEG_DELAY = rospy.get_param("/overhead_grasping/i_filter_beg_delay")
00249         self.I_FILTER_CEN_DELAY = rospy.get_param("/overhead_grasping/i_filter_cen_delay")
00250         self.I_MIN_SIGMA = rospy.get_param("/overhead_grasping/i_min_sigma")
00251         self.I_MAX_SIGMA = rospy.get_param("/overhead_grasping/i_max_sigma")
00252         self.I_NUM_SIGMA = rospy.get_param("/overhead_grasping/i_num_sigma")
00253         self.i_sigma_list = np.linspace(self.I_MIN_SIGMA,
00254                                         self.I_MAX_SIGMA,
00255                                         self.I_NUM_SIGMA)
00256         self.I_DEGREE_DICT = rospy.get_param("/overhead_grasping/i_degree_dict")
00257         self.i_instance_len = 0
00258         for k in self.PERCEPTION_ORDER:
00259             for coord in self.I_DEGREE_DICT[k]:
00260                 for deg in coord:
00261                     self.i_instance_len += self.I_NUM_SIGMA
00262         self.I_RTREE_CLASSIFIER = rospy.get_param("/overhead_grasping/i_rtree_classifier")
00263 
00264         # collision type parameters
00265         self.T_FILTER_BEG_DELAY = rospy.get_param("/overhead_grasping/t_filter_beg_delay")
00266         self.T_FILTER_CEN_DELAY = rospy.get_param("/overhead_grasping/t_filter_cen_delay")
00267         self.T_MIN_SIGMA = rospy.get_param("/overhead_grasping/t_min_sigma")
00268         self.T_MAX_SIGMA = rospy.get_param("/overhead_grasping/t_max_sigma")
00269         self.T_NUM_SIGMA = rospy.get_param("/overhead_grasping/t_num_sigma")
00270         self.t_sigma_list = np.linspace(self.T_MIN_SIGMA,
00271                                         self.T_MAX_SIGMA,
00272                                         self.T_NUM_SIGMA)
00273         self.T_DEGREE_DICT = rospy.get_param("/overhead_grasping/t_degree_dict")
00274         self.t_instance_len = 0
00275         for k in self.PERCEPTION_ORDER:
00276             for coord in self.T_DEGREE_DICT[k]:
00277                 for deg in coord:
00278                     self.t_instance_len += self.T_NUM_SIGMA
00279         self.T_RTREE_CLASSIFIER = rospy.get_param("/overhead_grasping/t_rtree_classifier")
00280 
00281         self.STATIC_SIGMA_INDEX = rospy.get_param("/overhead_grasping/static_sigma_index")
00282         self.STATIC_DERIV_THRESH = rospy.get_param("/overhead_grasping/static_deriv_thresh")
00283 
00284     ##
00285     # Initializes the listeners on the sensor topics.  This must
00286     # be started before any data can be collected from the arms.
00287     def activate_sensing(self, tf_listener=None):
00288 
00289         log("Initializing arm perception listeners")
00290         self.tf_listener = tf_listener
00291 
00292         if "accelerometer" in self.percept_mon_list:
00293             accel_listener = GenericListener("accel_mon_node", AccelerometerState, 
00294                                      "accelerometer/" + self.armc + "_gripper_motor",
00295                                      self.SAMPLING_RATE, accel_state_processor)
00296             self.perceptions["accelerometer"] = accel_listener.read
00297 
00298         if "joint_angles" in self.percept_mon_list:
00299             joint_angles_processor = ft.partial(joints_state_processor, 
00300                                                 right_arm=self.is_right_arm, 
00301                                                 angles_velocities_efforts=0)
00302             joint_angles_listener = GenericListener("joint_angles_mon_node", JointState, 
00303                                                "joint_states", self.SAMPLING_RATE, joint_angles_processor)
00304             self.perceptions["joint_angles"] = joint_angles_listener.read
00305 
00306         if "joint_velocities" in self.percept_mon_list:
00307             joint_velocities_processor = ft.partial(joints_state_processor, 
00308                                                 right_arm=self.is_right_arm, 
00309                                                 angles_velocities_efforts=1)
00310             joint_velocities_listener = GenericListener("joint_vels_mon_node", JointState, 
00311                                               "joint_states", self.SAMPLING_RATE, joint_velocities_processor)
00312             self.perceptions["joint_velocities"] = joint_velocities_listener.read
00313 
00314         if "joint_efforts" in self.percept_mon_list:
00315             joint_efforts_processor = ft.partial(joints_state_processor, 
00316                                                 right_arm=self.is_right_arm, 
00317                                                 angles_velocities_efforts=2)
00318             joint_efforts_listener = GenericListener("joint_efforts_mon_node", JointState, 
00319                                               "joint_states", self.SAMPLING_RATE, joint_efforts_processor)
00320             self.perceptions["joint_efforts"] = joint_efforts_listener.read
00321 
00322         if "r_finger_periph_pressure" in self.percept_mon_list:
00323             r_finger_periph_pressure_processor = ft.partial(pressure_state_sum_processor, 
00324                                             right_finger_tip=True, indicies=range(1,7))
00325             r_finger_periph_pressure_listener = GenericListener(self.armc + "_pressure_r_periph_mon_node", PressureState, 
00326                                                          "pressure/" + self.armc + "_gripper_motor", self.SAMPLING_RATE, 
00327                                                          r_finger_periph_pressure_processor)
00328             self.perceptions["r_finger_periph_pressure"] = r_finger_periph_pressure_listener.read
00329 
00330         if "r_finger_pad_pressure" in self.percept_mon_list:
00331             r_finger_pad_pressure_processor = ft.partial(pressure_state_sum_processor, 
00332                                             right_finger_tip=True, indicies=range(7,22))
00333             r_finger_pad_pressure_listener = GenericListener(self.armc + "_pressure_r_pad_mon_node", PressureState, 
00334                                                          "pressure/" + self.armc + "_gripper_motor", self.SAMPLING_RATE, 
00335                                                          r_finger_pad_pressure_processor)
00336             self.perceptions["r_finger_pad_pressure"] = r_finger_pad_pressure_listener.read
00337 
00338         if "l_finger_periph_pressure" in self.percept_mon_list:
00339             l_finger_periph_pressure_processor = ft.partial(pressure_state_sum_processor, 
00340                                             right_finger_tip=False, indicies=range(1,7))
00341             l_finger_periph_pressure_listener = GenericListener(self.armc + "_pressure_l_periph_mon_node", PressureState, 
00342                                                          "pressure/" + self.armc + "_gripper_motor", self.SAMPLING_RATE, 
00343                                                          l_finger_periph_pressure_processor)
00344             self.perceptions["l_finger_periph_pressure"] = l_finger_periph_pressure_listener.read
00345 
00346         if "l_finger_pad_pressure" in self.percept_mon_list:
00347             l_finger_pad_pressure_processor = ft.partial(pressure_state_sum_processor, 
00348                                             right_finger_tip=False, indicies=range(7,22))
00349             l_finger_pad_pressure_listener = GenericListener(self.armc + "_pressure_l_pad_mon_node", PressureState, 
00350                                                          "pressure/" + self.armc + "_gripper_motor", self.SAMPLING_RATE, 
00351                                                          l_finger_pad_pressure_processor)
00352             self.perceptions["l_finger_pad_pressure"] = l_finger_pad_pressure_listener.read
00353 
00354         for k in self.perceptions:
00355             # Make sure we have recieved a message first
00356             self.perceptions[k](allow_duplication=False, willing_to_wait=True)
00357             self.perceptions[k] = ft.partial(self.perceptions[k], willing_to_wait=False, 
00358                                              quiet=True,
00359                                              warn=False, allow_duplication=True)
00360 
00361         if "gripper_pose" in self.percept_mon_list:
00362             def gripper_pose_lookup():
00363                 if self.tf_listener is not None:
00364                     lct = self.tf_listener.getLatestCommonTime("/torso_lift_link", "/" + self.armc + "_wrist_roll_link")
00365                     pos, quat = self.tf_listener.lookupTransform("/torso_lift_link", "/" + self.armc + "_wrist_roll_link", lct)
00366                     return (lct.to_sec(), np.array(pos + quat))
00367                 else:
00368                     return (0., np.array([0.]*7))
00369 
00370             self.perceptions["gripper_pose"] = gripper_pose_lookup
00371 
00372 
00373         # all callbacks should return data in this format:
00374         # (t, np.array([x1, x2, ...]))
00375         # t is time in seconds
00376 
00377         self.clear_vars()
00378         self.perception_started = True
00379         
00380         log("Finished initialization")
00381 
00382     ##
00383     # Initialize variables
00384     def clear_vars(self):
00385         self.datasets = {}
00386         self.models = {}
00387         for k in self.perceptions:
00388             self.datasets[k] = []
00389             self.models[k] = {"mean" : None, "variance" : None}
00390 
00391     ##
00392     # Begin capturing peception data for all of the listeners
00393     #
00394     # @param duration If None, continue capturing until stop is called.
00395     #                 Else, stop capturing after duration seconds have passed.
00396     def start_data_capture(self, duration=None):
00397         if not self.perception_started:
00398             err("Perception hasn't been started!")
00399             return False
00400 
00401         self.logger = RateCaller(self._gather_perception, self.SAMPLING_RATE)
00402 
00403         for k in self.perceptions:
00404             self.datasets[k] += [[]]
00405         self.logger.run()
00406 
00407         if not duration is None:
00408             threading.Timer(self.stop_data_capture, duration)
00409 
00410         return True
00411 
00412     def _gather_perception(self):
00413         for k in self.perceptions:
00414             self.datasets[k][-1].append(self.perceptions[k]())
00415 
00416     ##
00417     # Stop capturing perception data.  Store output data in datasets list for
00418     # later statistics.
00419     def stop_data_capture(self):
00420         self.logger.stop()
00421         num_datapts = len(self.datasets[self.datasets.keys()[0]][-1])
00422         log("%d datapoints collected", num_datapts)
00423         return num_datapts
00424 
00425 
00426     ##
00427     # do processing of signals in current coord group
00428     # sigs_proc contains a list of three signal sets
00429     # each corresponds to a list of convolutions of the signals
00430     # with a degree derivative of a gaussian with various
00431     # scales
00432     def process_signals(self, datasets):
00433         
00434         models = {}
00435 
00436         sum, num = 0., 0.
00437         for p in datasets:
00438             models[p] = {}
00439             i_sigs_proc = [ [[] for x in range(self.I_NUM_SIGMA)] for y in range(3)]
00440             t_sigs_proc = [ [[] for x in range(self.T_NUM_SIGMA)] for y in range(3)]
00441             data_list = datasets[p]
00442             num_coords = len(data_list[0][0][1])
00443             times = None
00444             for coord in range(num_coords):
00445                 stream = data_list[0]
00446                 def conv(v):
00447                     if type(v) == type([]) or type(v) == type(np.array([])):
00448                         return v[0]
00449                     return v
00450                 signal = [conv(v) for v in zip(*zip(*stream)[1])[coord]]
00451 
00452                 time_ind = np.array(zip(*stream)[0], dtype=np.float64)
00453 
00454                 # normalize time indicies by offsetting to first value
00455                 time_ind_beg = time_ind[0]
00456                 for i, v in enumerate(time_ind):
00457                     time_ind[i] = float(time_ind[i] - time_ind_beg) 
00458 
00459                 times = time_ind
00460                 for deg in range(3):
00461                     for sigma_ind, sigma in enumerate(self.i_sigma_list):
00462                         s = self.impact_fgc.convolve_signal(signal, deg, sigma_ind)
00463                         i_sigs_proc[deg][sigma_ind].append(s)
00464                     for sigma_ind, sigma in enumerate(self.t_sigma_list):
00465                         s = self.type_fgc.convolve_signal(signal, deg, sigma_ind)
00466                         t_sigs_proc[deg][sigma_ind].append(s)
00467 
00468             if rospy.is_shutdown():
00469                 sys.exit()
00470 
00471             models[p]["impact_features"] = [ [[] for x in range(self.I_NUM_SIGMA)] for y in range(3)]
00472             models[p]["type_features"] = [ [[] for x in range(self.T_NUM_SIGMA)] for y in range(3)]
00473             for deg in range(3):
00474                 for sigma in range(self.I_NUM_SIGMA):
00475                     models[p]["impact_features"][deg][sigma] = zip(*i_sigs_proc[deg][sigma])
00476                 for sigma in range(self.T_NUM_SIGMA):
00477                     models[p]["type_features"][deg][sigma] = zip(*t_sigs_proc[deg][sigma])
00478             models[p]["time"] = times
00479             models[p]["i_sigma_list"] = self.i_sigma_list
00480             models[p]["t_sigma_list"] = self.t_sigma_list
00481 
00482         return models
00483 
00484     # def ready_collision_detection(self):
00485     #     # wait for the classifiers to finish loading
00486     #     self.impact_classifier.finish_loading()
00487     #     # self.coll_type_classifier.finish_loading()
00488     #     self.detect_ready = True
00489 
00490     ##
00491     # Begin monitoring peception data to make sure it doesn't deviate from
00492     # the model provided.
00493     #
00494     # TODO DOCS
00495     # @param duration If None, continue capturing until stop is called.
00496     #                 Else, stop capturing after duration seconds have passed.
00497     def begin_collision_detection(self, dynamic_detection=False, 
00498                                   callback=None, static_collision_type=None,
00499                                   debug=False):
00500 
00501         if not self.perception_started:
00502             err("Perception hasn't been started!")
00503             return False
00504         # if not self.detect_ready:
00505         #     err("ready_collision_detection hasn't been called")
00506         #     return False
00507 
00508         self.is_dynamic = dynamic_detection
00509         self.callback = callback
00510         if not dynamic_detection:
00511             self._setup_collision_type(static_collision_type)
00512         self.collision_detected = False
00513         self.collision_finished = False
00514         self.debug = debug
00515         self.collision_type = "No collision"
00516         self.cur_iter = 0
00517         self.cur_inds = {}
00518 
00519         for k in self.perceptions:
00520             self.cur_inds[k] = 0
00521 
00522         self.i_data_buffers = {}
00523         self.t_data_buffers = {}
00524         for k in self.perceptions:
00525             self.i_data_buffers[k] = [ np.zeros((self.impact_fgc.filter_len, self.impact_fgc.filter_len)) for i in range(self.perception_lengths[k])]
00526             self.t_data_buffers[k] = [ np.zeros((self.type_fgc.filter_len, self.type_fgc.filter_len)) for i in range(self.perception_lengths[k])]
00527         self.end_monitor_lock = threading.Lock()
00528 
00529         self.data_thread_spawner_lock1 = threading.Lock()
00530         self.data_thread_spawner_lock2 = threading.Lock()
00531         self.data_thread_spawner = RateCaller(self._data_spawn_thread, self.SAMPLING_RATE ) 
00532         self.data_thread_spawner.run()
00533 
00534         self.beg_time = rospy.Time.now().to_sec()
00535 
00536         self.t_sum, self.t_num = 0., 0.
00537         self.normal_dict_counts = []
00538         self.temp_dict = {}
00539         for k in self.perceptions:
00540             self.temp_dict[k] = [[] for i in range(self.perception_lengths[k])]
00541 
00542         return True
00543 
00544     def _setup_collision_type(self, static_collision_type):
00545         if static_collision_type == "all":
00546             static_dict = {"accelerometer" : range(3),
00547                            "joint_angles" : range(7),
00548                            "joint_velocities" : range(7),
00549                            "joint_efforts" : range(7),
00550                            "r_finger_periph_pressure" : range(1),
00551                            "r_finger_pad_pressure" : range(1), 
00552                            "l_finger_periph_pressure" : range(1),
00553                            "l_finger_pad_pressure" : range(1),
00554                            "gripper_pose" : range(7)}
00555         elif static_collision_type in self.perception_names:
00556             static_dict = {static_collision_type : range(
00557                                     self.perception_lengths[static_collision_type])}
00558         elif static_collision_type == "pressure":
00559             static_dict = {"r_finger_periph_pressure" : range(1),
00560                            "r_finger_pad_pressure" : range(1), 
00561                            "l_finger_periph_pressure" : range(1),
00562                            "l_finger_pad_pressure" : range(1)}
00563         elif type(static_collision_type) == type({}):
00564             static_dict = static_collision_type
00565         else:
00566             err("Bad static_collision_type")
00567         self.static_dict = static_dict
00568 
00569     def _data_spawn_thread(self):
00570         # only one thread allowed to wait at a time
00571         # all other threads will pass through
00572         if self.data_thread_spawner_lock1.acquire(False):
00573             if self.data_thread_spawner_lock2.acquire(True):
00574                 self.data_thread_spawner_lock1.acquire(False)
00575                 self.data_thread_spawner_lock1.release()
00576                 mt = self.DataThread(self)
00577                 mt.start()
00578 
00579     class DataThread(threading.Thread):
00580         def __init__(self, apm):
00581             threading.Thread.__init__(self)
00582             self.apm = apm
00583 
00584         def run(self):
00585             self.apm._monitor_data_collector()
00586             self.apm.data_thread_spawner_lock2.acquire(False)
00587             self.apm.data_thread_spawner_lock2.release()
00588 
00589     def _monitor_data_collector(self):
00590         st_time = rospy.Time.now().to_sec()
00591         #########################################
00592         if rospy.is_shutdown():
00593             self._trigger_collision()
00594             self._finish_collision("rospy shutdown")
00595         add_data = []
00596         for k in self.perceptions:
00597             p_vals = self.perceptions[k]()[1]
00598             for i, v in enumerate(p_vals):
00599                 # populate impact collision buffers
00600                 for b in range(self.impact_fgc.filter_len - 1, -1, -1):
00601                     self.i_data_buffers[k][i][(
00602                         b - self.cur_iter) % self.impact_fgc.filter_len, b] = v
00603                     if self.debug:
00604                         if b == 0 and k == "joint_angles": # and ((i == 0) or (i == 1)):
00605                             print v,
00606                 # populate collision type detection buffers
00607                 for b in range(self.type_fgc.filter_len - 1, -1, -1):
00608                     self.t_data_buffers[k][i][(
00609                         b - self.cur_iter) % self.type_fgc.filter_len, b] = v
00610 
00611         if self.debug:
00612             print
00613 
00614 
00615         if self.is_dynamic:
00616             if not self.collision_detected:
00617                 self._dynamic_collision_detection()
00618             else:
00619                 self._dynamic_collision_classifying()
00620         else:
00621             self._static_collision_detection()
00622         self.cur_iter += 1
00623         #########################################
00624         end_time = rospy.Time.now().to_sec()
00625         self.t_sum += end_time - st_time
00626         self.t_num += 1.
00627 
00628     def _dynamic_collision_detection(self):
00629 
00630         if self.cur_iter < self.impact_fgc.filter_len:
00631             return
00632         instance = np.zeros((self.i_instance_len, 1))
00633         i_place = 0
00634         for k in self.PERCEPTION_ORDER:
00635             for coord in range(len(self.i_data_buffers[k])):
00636                 for deg in range(3):
00637                     if deg not in self.I_DEGREE_DICT[k][coord]:
00638                         continue
00639                     for sigma_i, sigma_t in enumerate(self.i_sigma_list):
00640                         val = self.impact_fgc.convolve_pt(
00641                                 self.i_data_buffers[k][coord][(
00642                                     self.cur_iter % self.impact_fgc.filter_len)], 
00643                                     deg, sigma_i)
00644                         instance[i_place, 0] = val
00645                         i_place += 1
00646 
00647         has_collided = self.impact_classifier.predict(instance)
00648 
00649         if has_collided and not self.debug:
00650             if not self.collision_detected:
00651                 self._trigger_collision()
00652 
00653 
00654     def _dynamic_collision_classifying(self):
00655 
00656         if self.cur_iter < self.type_fgc.filter_len:
00657             # TODO HANDLE THIS CASE
00658             return
00659         if ((self.cur_iter - self.coll_iter) > self.type_fgc.filter_len / 2):
00660             return
00661         instance = np.zeros((self.t_instance_len, 1))
00662         i_place = 0
00663         for k in self.PERCEPTION_ORDER:
00664             for coord in range(len(self.t_data_buffers[k])):
00665                 for deg in range(3):
00666                     if deg not in self.T_DEGREE_DICT[k][coord]:
00667                         continue
00668                     for sigma_i, sigma_t in enumerate(self.t_sigma_list):
00669                         val = self.type_fgc.convolve_pt(
00670                                 self.t_data_buffers[k][coord][(
00671                                     self.cur_iter % self.type_fgc.filter_len)], 
00672                                     deg, sigma_i)
00673                         instance[i_place, 0] = val
00674                         i_place += 1
00675 
00676         collision_class = self.coll_type_classifier.predict(instance)
00677 
00678         if collision_class == 1.:
00679             type = "Environmental collision"
00680         else:
00681             type = "Table collision"
00682         self._finish_collision(type)
00683 
00684 
00685     def _static_collision_detection(self):
00686         if self.cur_iter < self.impact_fgc.filter_len:
00687             return
00688         for k in self.PERCEPTION_ORDER:
00689             if k not in self.static_dict:
00690                 continue
00691             for coord in self.static_dict[k]:
00692                 if k == "gripper_pose" and coord >= 3:
00693                     # Quaternions are unreliable, this should be fixed
00694                     # at some point
00695                     if self.debug:
00696                         self.temp_dict[k][coord].append(0.)
00697                     continue
00698                 val = self.impact_fgc.convolve_pt(self.i_data_buffers[k][coord][self.cur_iter % self.impact_fgc.filter_len], 1, self.STATIC_SIGMA_INDEX)
00699                 if self.debug:
00700                     self.temp_dict[k][coord].append(val)
00701                 if (np.fabs(val) >= self.STATIC_DERIV_THRESH[k][coord] 
00702                     and not self.debug):
00703                     print "Collision:", val, "Thresh:", self.STATIC_DERIV_THRESH[k][coord]
00704                     self._trigger_collision()
00705                     self._finish_collision(k + " collision", coord)
00706                 # if k == "r_finger_pad_pressure" and random.randint(0,40) == 0:
00707                 #     print val
00708 
00709 
00710     def _trigger_collision(self):
00711         self.collision_detected = True
00712         self.coll_iter = self.cur_iter - self.impact_fgc.filter_len / 2
00713         if not self.callback is None:
00714             self.callback()
00715 
00716     def _finish_collision(self, type = "Unlabled", coord = 0):
00717         if not self.collision_finished:
00718             print '-' * 60
00719             print
00720             print '                       %s detected' % type
00721             print
00722             print '-' * 60
00723             log("%s reported" % type)
00724             self.collision_type = type
00725             self.collision_coord = coord
00726             if not self.callback is None:
00727                 self.callback()
00728             self.collision_finished = True
00729             self.end_collision_detection()
00730             
00731     ##
00732     # Stop capturing perception data.  Store output data in datasets list for
00733     # later statistics.
00734     def end_collision_detection(self):
00735         while (not self.collision_finished and self.collision_detected 
00736                 and not rospy.is_shutdown()):
00737             rospy.sleep(0.01)
00738         with self.end_monitor_lock:
00739             if not self.data_thread_spawner is None:
00740                 print "Avg cvpt: %1.9f" % (self.t_sum / self.t_num)
00741                 print "cvpt sum: %1.5f" % (self.t_sum)
00742                 print self.t_num
00743                 self.data_thread_spawner.stop()
00744                 self.data_thread_spawner_lock1.acquire(False)
00745                 self.data_thread_spawner_lock1.release()
00746                 self.data_thread_spawner_lock2.acquire(False)
00747                 self.data_thread_spawner_lock2.release()
00748                 self.data_thread_spawner = None
00749             return
00750 
00751 def main(args):
00752 
00753     if args[1] == "static_test":
00754         log("Test 2")
00755         apm = ArmPerceptionMonitor(0, active = True,
00756                                    tf_listener = None)
00757         rospy.sleep(1.)
00758         apm.begin_collision_detection(dynamic_detection=False, static_collision_type="all")
00759         while not apm.collision_detected:
00760             rospy.sleep(0.01)
00761 
00762         log("Collision detected, exiting...")
00763 
00764         return 
00765 
00766     if args[1] == "test1":
00767         log("Test 1")
00768         apm = ArmPerceptionMonitor(0, active = True,
00769                                    tf_listener = None)
00770         apm.begin_collision_detection()
00771         rospy.sleep(8.)
00772         apm.end_collision_detection()
00773         return
00774 
00775     if args[1] == "test2":
00776         log("Test 2")
00777         tf_listener = tf.TransformListener()
00778         apm = ArmPerceptionMonitor(1, active=True,
00779                                    tf_listener=tf_listener)
00780         rospy.sleep(1.)
00781         apm.begin_collision_detection(dynamic_detection=False, static_collision_type="pressure",
00782                              debug=True)
00783         while not apm.collision_detected:
00784             rospy.sleep(0.01)
00785         # rospy.sleep(20.)
00786         apm.end_collision_detection()
00787         means = {}
00788         std_devs = {}
00789         for k in apm.temp_dict:
00790             means[k] = []
00791             std_devs[k] = []
00792             for c_i, coord in enumerate(apm.temp_dict[k]):
00793                 print "Perception %s, coord %d" % (k, c_i)
00794                 log("min: %1.3f, max: %1.3f, median: %1.3f, mean: %1.3f, std: %1.3f" % (np.min(coord), np.max(coord), np.median(coord), np.mean(np.fabs(coord)), np.std(coord)))
00795                 means[k].append(np.mean(coord))
00796                 std_devs[k].append(4 * np.std(coord))
00797         # print apm.temp_dict
00798         print means
00799         print std_devs
00800         return
00801 
00802 if __name__ == '__main__':
00803     rospy.init_node(node_name, anonymous=True)
00804     sys.exit(main(sys.argv))


kelsey_sandbox
Author(s): kelsey
autogenerated on Wed Nov 27 2013 11:52:04