00001 import numpy as np
00002 from scipy import interpolate
00003
00004
00005
00006
00007
00008 def filter(alist, indices):
00009 rlist = []
00010 for i in indices:
00011 rlist.append(alist[i])
00012 return rlist
00013
00014
00015
00016
00017
00018
00019
00020 def equalize_times(list_of_time_arrays):
00021 start_times = []
00022 end_times = []
00023 for tarray in list_of_time_arrays:
00024 start_times.append(tarray[0,0])
00025 end_times.append(tarray[0,-1])
00026
00027
00028
00029
00030
00031 min_start = np.min(start_times)
00032 max_end = np.max(end_times)
00033
00034 adjusted_list_of_time_arrays = []
00035 for tarray in list_of_time_arrays:
00036 adjusted_list_of_time_arrays.append(tarray - min_start)
00037
00038 return adjusted_list_of_time_arrays, min_start, max_end
00039
00040
00041
00042
00043
00044 def gradient(t, x):
00045
00046 dx = x[:, 2:] - x[:, 0:-2]
00047 dt = t[0, 2:] - t[0, 0:-2]
00048 dx_dt = np.multiply(dx, 1/dt)
00049
00050 dx_dt = np.column_stack((dx_dt[:,0], dx_dt))
00051 dx_dt = np.column_stack((dx_dt, dx_dt[:,-1]))
00052 return dx_dt
00053
00054
00055
00056
00057
00058
00059
00060 def interpolate_1d(x, y, xquery):
00061 try:
00062 x = x.A1
00063 y = y.A1
00064 xquery = xquery.A1
00065 minx = np.min(x)
00066 minx_query = np.min(xquery)
00067
00068 maxx = np.max(x)
00069 maxx_querry = np.max(xquery)
00070
00071 if minx_query <= minx:
00072 x = np.concatenate((np.array([minx_query-.01]), x))
00073 y = np.concatenate((np.array([y[0]]), y))
00074
00075 if maxx <= maxx_querry:
00076 x = np.concatenate((x, np.array([maxx_querry+.01])))
00077 y = np.concatenate((y, np.array([y[-1]])))
00078
00079 f = interpolate.interp1d(x, y)
00080 return f(xquery)
00081 except ValueError, e:
00082 pdb.set_trace()
00083 print e
00084
00085
00086
00087 def histogram_get_bin_numb(n, min_index, bin_size, nbins):
00088 bin_numb = int(np.floor((n - min_index) / bin_size))
00089 if bin_numb == nbins:
00090 bin_numb = bin_numb - 1
00091 return bin_numb
00092
00093
00094
00095
00096
00097
00098
00099
00100
00101 def histogram(index_list_list, elements_list_list, bin_size, min_index=None, max_index=None):
00102 if min_index is None:
00103 min_index = np.min(np.concatenate(index_list_list))
00104 if max_index is None:
00105 max_index = np.max(np.concatenate(index_list_list))
00106
00107 index_range = (max_index - min_index)
00108 nbins = int(np.ceil(index_range / bin_size))
00109 bins = []
00110 for i in range(nbins):
00111 bins.append([])
00112
00113
00114
00115
00116 for trial_number, element_list_slice in enumerate(zip(*elements_list_list)):
00117
00118 for i in range(len(element_list_slice[0])):
00119 bin_numb = histogram_get_bin_numb(index_list_list[trial_number][i], min_index, bin_size, nbins)
00120 elements = [el_list[i] for el_list in element_list_slice]
00121 if bin_numb < 0 or bin_numb > nbins:
00122 continue
00123 bins[bin_numb].append(elements)
00124
00125 return bins, np.arange(min_index, max_index, bin_size)
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156 def signal_smooth(x,window_len=11,window='hamming'):
00157
00158 if x.ndim != 1:
00159 raise ValueError, "smooth only accepts 1 dimension arrays."
00160
00161 if x.size < window_len:
00162 raise ValueError, "Input vector needs to be bigger than window size."
00163
00164
00165 if window_len<3:
00166 return x
00167
00168
00169 if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
00170 raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'"
00171
00172 s=np.r_[x[window_len:1:-1],x,x[-1:-window_len:-1]]
00173
00174
00175
00176 if window == 'flat':
00177 w=np.ones(window_len,'d')
00178 else:
00179 w=eval('np.'+window+'(window_len)')
00180
00181 y=np.convolve(w/w.sum(),s,mode='same')
00182 return y[window_len-1:-window_len+1]
00183
00184
00185
00186
00187
00188
00189
00190
00191 def signal_variance(x, y, window_len=10):
00192 if len(x) != len(y):
00193 raise ValueError, "Must have same length"
00194
00195 vars = []
00196 for i in range(len(x)):
00197 cursum = 0.
00198 cura = i - window_len/2
00199 curb = i + window_len/2
00200 if cura < 0:
00201 cura = 0
00202 if curb > len(x):
00203 curb = len(x)
00204 for xval in x[cura:curb]:
00205 cursum += (xval - y[i])**2
00206 vars += [cursum / (curb-cura)]
00207 vars += [vars[len(vars)-1]]
00208 return vars
00209
00210
00211
00212
00213
00214
00215
00216
00217
00218 def signal_list_variance(x_list, means, window_len=10, num_samples=30, resample=1):
00219
00220
00221
00222 vars = []
00223 num_samples_in_mean = num_samples / len(x_list)
00224 for i in range(0, len(means), resample):
00225 cursum = 0.
00226 cura = i - window_len/2
00227 curb = i + window_len/2
00228 if cura < 0:
00229 cura = 0
00230 if curb > len(means):
00231 curb = len(means)
00232 step = (curb - cura) / num_samples_in_mean
00233 n = 0
00234 for x in x_list:
00235 if cura >= len(x):
00236 continue
00237
00238 ccurb = curb
00239 cstep = step
00240 if ccurb >= len(x):
00241 ccurb = len(x)
00242 cstep = (ccurb - cura) / num_samples_in_mean
00243 if cstep > 0:
00244 for xval in x[cura:ccurb:cstep]:
00245 cursum += (xval - means[i])**2
00246 n += 1
00247 vars += [np.sqrt(cursum)/(n)]
00248 return np.array(vars)