iLab Neuromorphic Robotics Toolkit  0.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
UKFOld.H
Go to the documentation of this file.
1 /*! @file
2  @author Lior Elazary
3  @copyright GNU Public License (GPL v3)
4  @section License
5  @verbatim
6  // ////////////////////////////////////////////////////////////////////////
7  // The iLab Neuromorphic Robotics Toolkit (NRT) //
8  // Copyright 2010-2012 by the University of Southern California (USC) //
9  // and the iLab at USC. //
10  // //
11  // iLab - University of Southern California //
12  // Hedco Neurociences Building, Room HNB-10 //
13  // Los Angeles, Ca 90089-2520 - USA //
14  // //
15  // See http://ilab.usc.edu for information about this project. //
16  // ////////////////////////////////////////////////////////////////////////
17  // This file is part of The iLab Neuromorphic Robotics Toolkit. //
18  // //
19  // The iLab Neuromorphic Robotics Toolkit is free software: you can //
20  // redistribute it and/or modify it under the terms of the GNU General //
21  // Public License as published by the Free Software Foundation, either //
22  // version 3 of the License, or (at your option) any later version. //
23  // //
24  // The iLab Neuromorphic Robotics Toolkit is distributed in the hope //
25  // that it will be useful, but WITHOUT ANY WARRANTY; without even the //
26  // implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR //
27  // PURPOSE. See the GNU General Public License for more details. //
28  // //
29  // You should have received a copy of the GNU General Public License //
30  // along with The iLab Neuromorphic Robotics Toolkit. If not, see //
31  // <http://www.gnu.org/licenses/>. //
32  // ////////////////////////////////////////////////////////////////////////
33  @endverbatim */
34 
35 
36 #ifndef INCLUDE_NRT_PROBABILISTIC_BAYESFILTERS_UKFOLD_H
37 #define INCLUDE_NRT_PROBABILISTIC_BAYESFILTERS_UKFOLD_H
38 
39 #include <Eigen/Dense>
40 #include <iostream>
41 
42 // Include some definitions of no interest to the user
43 #include <nrt/Probabilistic/Types/GaussianPDFOld.H>
44 #include <Eigen/Cholesky>
45 
46 namespace nrt
47 {
48  template <typename StatePDF, typename ObsPDF>
49  class UKFOld
50  {
51 
52  public:
53 
54  //Base class to predict states
55  class StateFunctions
56  {
57  public:
58  virtual void PredictStateFunc(const typename StatePDF::ElemType&) = 0;
59  virtual void PredictObsFunc(const typename StatePDF::ElemType&, const typename ObsPDF::ElemType&) = 0;
60  };
61 
62 
63  UKFOld(StatePDF & state, StateFunctions& stateFunctions,
64  double k = 0, //secondery scaling param
65  double alpha = 1e-3, //the spread of the sigma points around the mean
66  double beta = 2 //Used to incorporate prior knowledge of the distribution
67  ) : itsState(state),
68  itsObservation(ObsPDF()),
69  itsStateFunctions(stateFunctions),
70  itsK(k),
71  itsAlpha(alpha),
72  itsBeta(beta),
73  itsUnstable(false) //Assume we start with a stable covariance
74  {
75  itsNumStates = itsState.size();
76  itsNumObservations = itsObservation.size();
77  itsLambda = itsAlpha*itsAlpha*(itsNumStates+itsK)-itsNumStates;
78  itsGamma = sqrt(itsNumStates + itsLambda);
79 
80  //Calculate the weights for the mean and covariance ahead of time
81  //Initial weight for the mean location
82  itsMuWeight = itsLambda/(itsNumStates + itsLambda);
83  itsSigmaWeight = (itsLambda/(itsNumStates + itsLambda)) +
84  (1-(itsAlpha*itsAlpha)+itsBeta);
85  //Initial weight for the reset of the sigma points
86  itsDefaultWeight = 1/(2*(itsNumStates+itsLambda));
87 
88  //Cache for computing the likelihood
89  itsGaussNormalizer = 1.0/sqrt(pow(2.0*M_PI,itsNumStates));
90 
91  //Initialize the observation based on the given state
92  decltype(itsState.itsMu) currState = itsState.itsMu;
93  decltype(itsObservation.itsMu) obs;
94  itsStateFunctions.PredictObsFunc(typename StatePDF::ElemType(currState), typename ObsPDF::ElemType(obs));
95  itsObservation.itsMu = obs;
96  }
97 
98  virtual ~UKFOld()
99  {
100  }
101 
102  //! Get the state (Const)
103  StatePDF const & getState() const { return itsState; }
104 
105  //! Get the state
106  const StatePDF & getState() { return itsState; }
107 
108  //! Get the Observation (Const)
109  ObsPDF const & getObservation() const { return itsObservation; }
110 
111  //! Get the Observation
112  const ObsPDF & getObservation() { return itsObservation; }
113 
114 
115  void predictState(StatePDF & stateNoise)
116  {
117  if (itsUnstable) return;
118  //Get the locations to sample from
119  Eigen::MatrixXd sigmaLocations = getSigmaLocations(itsGamma);
120 
121  ////Predict the new state by simulating the process at the sigma points
122  ////and then computing a new mean by a weighted sum
123 
124  //simulate the mean
125  decltype(itsState.itsMu) stateData = itsState.itsMu; //Copy the state over
126 
127  itsNewStates.clear();
128  itsStateFunctions.PredictStateFunc(typename StatePDF::ElemType(stateData)); //Get the predicted state
129  itsNewStates.push_back(stateData);
130 
131  //Simulate the + sigma locations
132  for(int i=0; i<itsNumStates; i++)
133  {
134  decltype(itsState.itsMu) stateData =
135  itsState.itsMu + sigmaLocations.block(0, i, itsNumStates, 1);
136 
137  itsStateFunctions.PredictStateFunc(typename StatePDF::ElemType(stateData)); //Get the predicted state
138  itsNewStates.push_back(stateData);
139  }
140 
141  //Simulate the + sigma locations
142  for(int i=0; i<itsNumStates; i++)
143  {
144  decltype(itsState.itsMu) stateData =
145  itsState.itsMu - sigmaLocations.block(0, i, itsNumStates, 1);
146 
147  itsStateFunctions.PredictStateFunc(typename StatePDF::ElemType(stateData)); //Get the predicted state
148  itsNewStates.push_back(stateData);
149  }
150 
151 
152  //Compute the predicted mean state from the newstates at the sigma points locations
153  itsState.itsMu = itsNewStates[0] * itsMuWeight; //set the mean
154  for(size_t i=1; i<itsNewStates.size(); i++)
155  itsState.itsMu += itsNewStates[i] * itsDefaultWeight;
156 
157  ////Compute the predicted covariance in a similar manner
158  Eigen::MatrixXd diff = itsNewStates[0] - itsState.itsMu;
159  Eigen::MatrixXd variance = diff * diff.transpose();
160 
161  itsState.itsSigma = variance * itsSigmaWeight;
162  for(size_t i=1; i<itsNewStates.size(); i++)
163  {
164  Eigen::MatrixXd diff = itsNewStates[i] - itsState.itsMu;
165  Eigen::MatrixXd variance = diff * diff.transpose();
166 
167  itsState.itsSigma += variance * itsDefaultWeight;
168  }
169 
170  //Add the noise
171  itsState.itsMu += stateNoise.itsMu;
172  itsState.itsSigma += stateNoise.itsSigma;
173  }
174 
175  void updateNewStates()
176  {
177  if (itsUnstable) return;
178  //Get the locations to sample from
179  Eigen::MatrixXd sigmaLocations = getSigmaLocations(itsGamma);
180 
181  ////Predict the new state by simulating the process at the sigma points
182  ////and then computing a new mean by a weighted sum
183 
184  //simulate the mean
185  decltype(itsState.itsMu) stateData = itsState.itsMu; //Copy the state over
186  itsNewStates.clear();
187  itsNewStates.push_back(stateData);
188 
189  //Simulate the + sigma locations
190  for(int i=0; i<itsNumStates; i++)
191  {
192  decltype(itsState.itsMu) stateData =
193  itsState.itsMu + sigmaLocations.block(0, i, itsNumStates, 1);
194  itsNewStates.push_back(stateData);
195  }
196 
197  //Simulate the + sigma locations
198  for(int i=0; i<itsNumStates; i++)
199  {
200  decltype(itsState.itsMu) stateData =
201  itsState.itsMu - sigmaLocations.block(0, i, itsNumStates, 1);
202  itsNewStates.push_back(stateData);
203  }
204  }
205 
206  //!Predict the observation without the noise
207  void predictObservation()
208  {
209  if (itsUnstable) return;
210  ////Predict the observation by simulating the process at the sigma points
211  ////and then computing a new mean by a weighted sum
212 
213  //Compute the observation at each state location
214  itsNewZ.clear();
215  for(decltype(itsState.itsMu) & state : itsNewStates)
216  {
217  decltype(itsObservation.itsMu) obs;
218 
219  itsStateFunctions.PredictObsFunc(typename StatePDF::ElemType(state), typename ObsPDF::ElemType(obs));
220  itsNewZ.push_back(obs);
221  }
222 
223  //Predict the mean observation
224  itsObservation.itsMu = itsNewZ[0] * itsMuWeight; //set the mean
225  for(size_t i=1; i<itsNewZ.size(); i++)
226  itsObservation.itsMu += itsNewZ[i] * itsDefaultWeight;
227 
228  ////Predict the measurement covariance
229  Eigen::MatrixXd diff = itsNewZ[0] - itsObservation.itsMu;
230  Eigen::MatrixXd variance = diff * diff.transpose();
231 
232  itsObservation.itsSigma = variance * itsSigmaWeight;
233  for(size_t i=1; i<itsNewZ.size(); i++)
234  {
235  Eigen::MatrixXd diff = itsNewZ[i] - itsObservation.itsMu;
236  Eigen::MatrixXd variance = diff * diff.transpose();
237 
238  itsObservation.itsSigma += variance * itsDefaultWeight;
239  }
240 
241 
242  }
243 
244  double getLikelihood(const ObsPDF & z)
245  {
246 
247  //Add the noise
248  Eigen::MatrixXd predictedSigma = itsObservation.itsSigma + z.itsSigma;
249 
250  double normalizer = itsGaussNormalizer * (1/sqrt(predictedSigma.determinant()));
251 
252  ////////Calculate the innovation
253  Eigen::MatrixXd innov = z.itsMu - itsObservation.itsMu;
254 
255  try
256  {
257  Eigen::MatrixXd probMat = innov.transpose() * predictedSigma.inverse() * innov;
258  return normalizer*exp(-0.5*probMat(0,0));
259  } catch (...)
260  {
261  itsUnstable = true;
262  }
263 
264  return 0;
265 
266  }
267 
268  double update(const ObsPDF & z)
269  {
270  if (itsUnstable) return 0;
271 
272  //Calculate the covariance between the state and the observation
273  Eigen::MatrixXd Pxy(itsNumObservations, itsNumStates);
274  Eigen::MatrixXd stateDiff = itsNewStates[0] - itsState.itsMu;
275  Eigen::MatrixXd obsDiff = itsNewZ[0] - itsObservation.itsMu;
276 
277  Eigen::MatrixXd variance = stateDiff * obsDiff.transpose();
278  Pxy = variance * itsSigmaWeight;
279 
280  for(size_t i=1; i<itsNewStates.size(); ++i)
281  {
282  Eigen::MatrixXd stateDiff = itsNewStates[i] - itsState.itsMu;
283  Eigen::MatrixXd obsDiff = itsNewZ[i] - itsObservation.itsMu;
284 
285  Eigen::MatrixXd variance = stateDiff * obsDiff.transpose();
286  Pxy += variance * itsDefaultWeight;
287  }
288 
289  //Add the noise
290  Eigen::MatrixXd predictedSigma = itsObservation.itsSigma + z.itsSigma;
291 
292  //Calculate the innovation
293  Eigen::MatrixXd innov = z.itsMu - itsObservation.itsMu;
294 
295  try
296  {
297  //Calculate the kalman gain
298  Eigen::MatrixXd K = Pxy * predictedSigma.inverse();
299 
300  //Update the state and covariance
301  itsState.itsMu += K * innov;
302  itsState.itsSigma -= K * Pxy.transpose();
303  } catch (...)
304  {
305  itsUnstable = true;
306  }
307 
308  //Return the probability of this measurement
309  try
310  {
311  Eigen::MatrixXd probMat = innov.transpose() * predictedSigma.inverse() * innov;
312  double normalizer = itsGaussNormalizer * (1/sqrt(predictedSigma.determinant()));
313 
314  return normalizer*exp(-0.5*probMat(0,0));
315  } catch (...)
316  {
317  itsUnstable = true;
318  }
319  return 0;
320  }
321 
322 
323  Eigen::MatrixXd getSigmaLocations(double const gamma)
324  {
325  ////We use the chol decomposition for stability instead of the sqrt
326  Eigen::MatrixXd A = itsState.itsSigma;
327  Eigen::MatrixXd sChol = A.llt().matrixU();
328 
329  Eigen::MatrixXd sigmaLoc=sChol.transpose()*gamma;
330 
331  return sigmaLoc;
332  }
333 
334 
335 
336 
337  protected:
338  StatePDF itsState;
339  ObsPDF itsObservation;
340 
341  StateFunctions& itsStateFunctions;
342 
343  //Scaling factor which determine how far sigma points a spread from the mean
344  double itsK;
345  double itsAlpha;
346 
347  //used to include high order information about the distribution
348  double itsBeta;
349 
350  int itsNumStates; //the number of states we have
351  int itsNumObservations; //the number of observations
352 
353  std::vector<decltype(itsState.itsMu)> itsNewStates; //The predicted next state from the sigma locations
354  std::vector<decltype(itsObservation.itsMu)> itsNewZ; // the predicted observations from the sigma locations
355 
356  private:
357 
358  double itsLambda;
359  double itsGamma;
360  double itsMuWeight;
361  double itsSigmaWeight;
362  double itsDefaultWeight;
363  double itsGaussNormalizer;
364  bool itsUnstable; //is the covariance symmetric?
365 
366  };
367 }
368 
369 
370 #endif // INCLUDE_NRT_PROBABILISTIC_BAYESFILTERS_UKFOLD_H