robot_motion.py
Go to the documentation of this file.
00001 import util as ut
00002 import math as mt
00003 import numpy as np
00004 from StringIO import StringIO
00005 import transforms2d as t2d
00006 import opencv.cv as cv
00007 import types
00008 
00009 class RobotMotion:
00010     """ 
00011         Generates sample from robot motion model, has methods used in particle filter 
00012         Use:
00013            motion_var- motion_var object
00014              or
00015            rotvar
00016            transvar
00017     """
00018     def __init__(self, motion_var=None, rotvar=None, transvar=None):
00019         if motion_var!= None:
00020             self.motion_var = motion_var
00021         elif (rotvar != None) and (transvar != None):
00022             self.motion_var = motion_var(rotvar, transvar)
00023 
00024     def predict_partial(self, odometry):
00025         return get_odom_sample(self.motion_var, odometry)
00026 
00027     def predict(self, odometry, particle):
00028         f = get_odom_sample(self.motion_var, odometry)
00029         return f(particle)
00030 
00031 
00032 def make_set(cov, start_state, num_particles):
00033     """ Init a bunch of particles to be exactly at start state """
00034     return get_odom_samples(cov, start_state, t2d.Pose2D(0.0, 0.0, 0.0), num_particles)
00035 
00036 
00037 def make_set_gauss(cov, start_state, num_particles):
00038     """ Initialize a gaussian distribution around start state """
00039     sx   = start_state.pos[0]
00040     sy   = start_state.pos[1]
00041     #TODO: check this
00042     mean = np.concatentate((start_state, np.matrix([start_state.angle])), axis=0)
00043     def gen_pose(idx):
00044         sample = np.random.multivariate_normal(mean, cov) 
00045         return t2d.Pose2D(sample[0,0], sample[1,0], sample[2,0])
00046     return map(gen_pose, range(num_particles))
00047 
00048 ####################################################################
00049 #      Functions for sampling over 2D robot poses 
00050 ####################################################################
00051 class move2d:
00052     """ 
00053         Store 2d movements as 3 components
00054             initial rotation
00055             translation
00056             final rotation
00057     """
00058     def __init__(self, rot1, trans, rot2):
00059         self.rot1 = rot1
00060         self.trans = trans
00061         self.rot2 = rot2
00062 
00063     def __str__(self):
00064         s = StringIO()
00065         print >>s, "( rot1:", self.rot1, ", trans:" , 
00066         print >>s, self.trans, ", rot2:" , self.rot2, ")"
00067         return s.getvalue()
00068 
00069 class motion_var:
00070     """ variances used for motion generation """
00071     def __init__(self, rotvar=(mt.radians(5), mt.radians(5)), transvar=(0.02, 0.03)):
00072         """
00073             rotvar   - a tuple of 2 floats
00074                        represents degree variance introduced per 1 rotation
00075                        degree variance per 1 meter traveled
00076 
00077             transvar - a tuple of 2 floats
00078                        distance error introduced per 1 rotation
00079                        distance error introduced per 1 meter traveled
00080         """
00081         self.rotvar   = rotvar
00082         self.transvar = transvar
00083 
00084 def p2d_to_move2d(odom):
00085     """
00086         Decompose odometry readings into three components
00087             initial rotation
00088             translation
00089             final rotation
00090         odom - a differential reading between this and the last time step
00091     """
00092     if (odom.pos == np.matrix([0.0, 0.0]).T).all():
00093         orot1 = 0.0
00094     else:
00095         orot1 = ut.standard_rad(ut.ang_of_vec(odom.pos) - odom.angle) 
00096 
00097     otran = np.linalg.norm(odom.pos)
00098     orot2 = odom.angle - orot1 
00099     return move2d(orot1, otran, orot2)
00100 
00101 
00102 def get_odom_samples(cov, s, motion, num_particles):
00103     """ Get a pose sample, use this function to get multiple samples from robot motion model """
00104     sampler = get_odom_sample(cov, motion)
00105     particles = []
00106     for i in xrange(num_particles):
00107         sample = sampler(s)
00108         particles.append(sample)
00109     return particles
00110 
00111 
00112 def get_odom_sample(motion_variances, motion): #s, motion_variances):
00113     """ 
00114         Get a pose sample using odometry motion model (from Probabilistic Robotics p. 136)
00115         use this method to get a sample pose, ignore others 
00116         motion           - odometry in p2d format
00117         s                - state    in p2d format from time t-1
00118         motion_variances - motion variances
00119     
00120         returns a new p2d, a perturbed version of motion+s
00121     """
00122     u_move2d = p2d_to_move2d(motion)
00123     #Partially applied motion 
00124     def get_odom_sample_partial(s):
00125         # Sample 
00126         srot1 = sample_rot1 (u_move2d, motion_variances)
00127         trans = sample_trans(u_move2d, motion_variances)
00128         srot2 = sample_rot2 (u_move2d, motion_variances)
00129 
00130         rot1  = ut.standard_rad(u_move2d.rot1  - srot1)
00131         trans =                 u_move2d.trans - trans 
00132         rot2  =                 u_move2d.rot2  - srot2 
00133 
00134         #print mt.degrees(rot1), trans, mt.degrees(rot2)
00135         # Calculate new values 
00136         sx = s.pos[0,0]
00137         sy = s.pos[1,0]
00138         x = sx + trans * mt.cos(s.angle + rot1)
00139         y = sy + trans * mt.sin(s.angle + rot1)
00140         total_rot = ut.standard_rad(s.angle + rot1 + rot2) 
00141         return t2d.Pose2D(x,y, total_rot)
00142     return get_odom_sample_partial
00143 
00144 
00145 def sample_rot1(odom, odom_cov):
00146     rotvar = odom_cov.rotvar 
00147     rotvar_0 = rotvar[0] / (np.pi * 2.0)
00148     scale  =  (rotvar_0 * abs(odom.rot1)) + (rotvar[1] * abs(odom.trans))
00149 
00150     if scale == 0.0:
00151         return 0.0
00152     else:
00153         return np.random.normal(scale=scale)
00154 
00155 
00156 def sample_trans(odom, odom_cov):
00157     transvar   = odom_cov.transvar 
00158     rot_comp   = transvar[0] * abs(odom.rot1 + odom.rot2)
00159     trans_comp = transvar[1] * abs(odom.trans)
00160     scale      = rot_comp + trans_comp
00161     if scale == 0.0:
00162         return 0.0
00163     else:
00164         return np.random.normal(scale=scale)
00165 
00166 
00167 def sample_rot2(odom, odom_cov):
00168     rotvar   = odom_cov.rotvar 
00169     rotvar_0 = rotvar[0] / (np.pi * 2.0)
00170     scale  = (rotvar_0 * abs(odom.rot2)) + (rotvar[1] * abs(odom.trans))
00171     if scale == 0.0:
00172         return 0.0
00173     else:
00174         return np.random.normal(scale=scale)
00175 
00176 
00177 def draw_weighted_Pose2D(display, max_weight, particles):
00178     for p in particles:
00179         if type(p) is types.TupleType:
00180             part, weight = p
00181             rpos = part.pos
00182         else:
00183             part = p
00184             rpos = p.pos
00185 
00186         x = mt.cos(part.angle) * .07
00187         y = mt.sin(part.angle) * .07
00188 
00189         dir  = rpos.copy()
00190         dir[0,0] = dir[0,0] + x
00191         dir[1,0] = dir[1,0] + y
00192 
00193         pos  = display.to_screen(rpos)
00194         dirp = display.to_screen(dir)
00195 
00196         if type(p) is types.TupleType:
00197             color = round(255.0 * (weight/max_weight))
00198             cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])), 
00199                         2, cv.cvScalar(255, 255-color, 255), cv.CV_FILLED, cv.CV_AA)
00200             cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])), 
00201                         2, cv.cvScalar(200, 200, 200), 8, cv.CV_AA)
00202         else:
00203             cv.cvCircle(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])), 
00204                         2, cv.cvScalar(150, 150, 150), cv.CV_FILLED, cv.CV_AA)
00205 
00206         cv.cvLine(display.buffer, cv.cvPoint((int) (pos[0,0]), (int) (pos[1,0])),
00207                                   cv.cvPoint((int) (dirp[0,0]), (int) (dirp[1,0])),
00208                                   cv.cvScalar(100,200,100), 1, cv.CV_AA, 0)
00209 
00210 #class odometry2d:
00211 #    """ an odometry reading """
00212 #    def init(self, rot, trans):
00213 #        self.rot = rot
00214 #        self.trans = trans
00215 #  type params = RobotSampler.cov 
00216 #  and state = Pose2.t
00217 #  and control = Pose2.t
00218 #
00219 #  let predict odom_cov u (* s *)=
00220 #    let partial = RobotSampler.get_odom_sample u in
00221 #      (fun s -> partial s odom_cov)
00222 #
00223 # Keep it simple for now and limit to 2D motion
00224 #type state = Pose2.t
00225 #type error_wts   = float*float
00226 #type cov   = {rot1w: error_wts;
00227 #              transw: error_wts;
00228 #              rot2w: error_wts}
00229 
00230 # Example covariance for a typical situation 
00231 #let image_motion_cov = {rot1w=(0.995,0.005); transw=(0.995,0.005); rot2w=(0.995,0.005)}
00232 #let make_cov rot1 trans rot2 = {rot1w=rot1; transw=trans; rot2w=rot2}
00233 
00234 
00235 if __name__ == "__main__":
00236     import nodes as nd
00237     import detection_appearance as da
00238 
00239     rotvar   = (0.8, 0.2)
00240     transvar = (0.1, 0.9)
00241     motion_model = RobotMotion(motion_var(rotvar=rotvar, transvar=transfar))
00242 
00243     cov = np.eye(3)
00244     app_model    = DetectionAppearance()
00245     disp = nd.RobotDisp()
00246 
00247 
00248 
00249 
00250 


pfilter
Author(s): Travis Deyle, Hai Nguyen, Advisor: Prof. Charlie Kemp, Lab: Healthcare Robotics Lab at Georgia Tech
autogenerated on Wed Nov 27 2013 11:42:09