test08.py
Go to the documentation of this file.
00001 import csv
00002 import roslib; roslib.load_manifest('hai_sandbox')
00003 from cv_bridge.cv_bridge import CvBridge, CvBridgeError
00004 import rospy
00005 import cv
00006 import sys
00007 
00008 import hrl_lib.rutils as ru
00009 import hrl_lib.tf_utils as tfu
00010 import tf.transformations as tr
00011 import tf
00012 import hrl_camera.ros_camera as cam
00013 from sensor_msgs.msg import CameraInfo
00014 import numpy as np
00015 import hai_sandbox.features as fea
00016 import os.path as pt
00017 import hrl_lib.util as ut
00018 import scipy.cluster.vq as vq
00019 
00020 def csv_bag_names(fname):
00021     csv_file = open(fname)
00022     for bag_name in csv.reader(csv_file):
00023         yield bag_name
00024     csv_file.close()
00025 
00026 def gen_pkl_name(path_complete, ext):
00027     #path_complete = path_complete[0]
00028     path_bag, name_bag = pt.split(path_complete)
00029     root_name, _ = pt.splitext(name_bag)
00030     surf_path_complete = pt.join(path_bag, root_name + ext)
00031     return surf_path_complete
00032 
00033 def features_mat(features_list):
00034     features = []
00035     for msg_t, surf in features_list:
00036         features.append(np.matrix(surf[1]).T)
00037     return np.column_stack(tuple(features))
00038 
00039 def features_mat_compress(fmat, k):
00040     #k = max(int(round(fmat.shape[1] * percent)), 1)
00041     rospy.loginfo('compressing to %d centers' % k)
00042     center_indices = np.random.permutation(fmat.shape[1])[0:k]
00043     initial_centers = fmat[:, center_indices]
00044     kresults = vq.kmeans(np.array(fmat.T), np.array(initial_centers.T))
00045     return np.matrix(kresults[0]).T
00046 
00047 def compress_pkl(surf_path_complete):
00048     features_list = ut.load_pickle(surf_path_complete)
00049 
00050     rospy.loginfo('making matrix')
00051     fmat = features_mat(features_list)
00052 
00053     rospy.loginfo('compressing')
00054     reduced_features = features_mat_compress(fmat, 1000)
00055     small_pickle_fname = gen_pkl_name(path_complete, '.surf_sm_pkl')
00056     ut.save_pickle(reduced_features, small_pickle_fname)
00057     rospy.loginfo('saved to %s' % small_pickle_fname)
00058 
00059 if __name__ == '__main__':
00060     ##
00061     # "compresss" large pkls
00062     path_complete = sys.argv[1]
00063     #for path_complete in csv_bag_names(fname):
00064     surf_path_complete = gen_pkl_name(path_complete, ext=".surf_pkl")
00065     rospy.loginfo('loading %s' % surf_path_complete)
00066     compress_pkl(surf_path_complete)
00067     rospy.loginfo('done')
00068     exit()
00069 
00070 
00071 
00072 
00073 
00074 
00075 
00076 
00077 
00078 
00079 
00080 
00081 
00082 
00083 
00084 
00085 
00086 
00087 
00088 
00089 
00090 
00091 
00092 
00093     #forearm_cam_l = '/l_forearm_cam/image_rect_color'
00094     #ws_l = '/wide_stereo/left/image_rect_color'
00095     #ws_r = '/wide_stereo/right/image_rect_color'
00096     #features_list = find_image_features(sys.argv[1], forearm_cam_l)
00097     #Find all features in all videos that we have
00098     #list_of_features_list = []
00099 
00100         #list_of_features_list.append(reduced_features)
00101         #break
00102     #ut.save_pickle(list_of_features_list, 'reduced_all_features.pkl')
00103     #pdb.set_trace()
00104     #Put all features into one large matrix, cluster...
00105     #for message_t, surf in list_of_features_list:
00106     #    keypoints, descriptors = surf
00107 
00108     #Kmean wants row vectors
00109     #What happens if we have many duplicated points?
00110 
00111 #whitened = vq.whiten(features)
00112 #book = np.array((whitened[0],whitened[2]))
00113 #vq.kmeans(whitened, book)
00114 


hai_sandbox
Author(s): Hai Nguyen
autogenerated on Wed Nov 27 2013 11:46:56