iLab Neuromorphic Robotics Toolkit  0.1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
tests/test-UKF.C
/*! @file
@author Unknown
@copyright GNU Public License (GPL v3)
@section License
@verbatim
// ////////////////////////////////////////////////////////////////////////
// The iLab Neuromorphic Robotics Toolkit (NRT) //
// Copyright 2010-2012 by the University of Southern California (USC) //
// and the iLab at USC. //
// //
// iLab - University of Southern California //
// Hedco Neurociences Building, Room HNB-10 //
// Los Angeles, Ca 90089-2520 - USA //
// //
// See http://ilab.usc.edu for information about this project. //
// ////////////////////////////////////////////////////////////////////////
// This file is part of The iLab Neuromorphic Robotics Toolkit. //
// //
// The iLab Neuromorphic Robotics Toolkit is free software: you can //
// redistribute it and/or modify it under the terms of the GNU General //
// Public License as published by the Free Software Foundation, either //
// version 3 of the License, or (at your option) any later version. //
// //
// The iLab Neuromorphic Robotics Toolkit is distributed in the hope //
// that it will be useful, but WITHOUT ANY WARRANTY; without even the //
// implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR //
// PURPOSE. See the GNU General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with The iLab Neuromorphic Robotics Toolkit. If not, see //
// <http://www.gnu.org/licenses/>. //
// ////////////////////////////////////////////////////////////////////////
@endverbatim */
#include <random>
// ######################################################################
// Setup the world
// ######################################################################
nrt::Dims<int> worldDims(640,480);
double translationalNoise = 0.09;
double angularNoise = 0.002;
double sensingRangeNoise = 15;
double sensingAngleNoise = 0.01;
std::random_device rd;
std::mt19937 randomgen(rd());
// ######################################################################
// Define the dimensions of the system
// ######################################################################
// Our vehicle's state has two field groups:
// - A position field group which keeps track of the (x,y) coordinates as well as the orientation of the vehicle
// - A velocity field group which keeps track of the translational and angular velocity of the vehicle
NRT_CREATE_STATE_FIELD_GROUP(Position, (x)(y)(theta));
NRT_CREATE_STATE_FIELD_GROUP(Velocity, (translational)(angular));
// We observe our vehicle with a range and angle
NRT_CREATE_STATE_FIELD_GROUP(PolarPosition, (range)(angle));
// ######################################################################
// Define the filter's model
// ######################################################################
struct FilterModel
{
struct ControlType { };
predict(nrt::StateVector<VehicleState> const & state, ControlType const & control)
{
double x = state.access<Position::x>();
double y = state.access<Position::y>();
double a = state.access<Position::theta>();
double dt = state.access<Velocity::translational>();
double da = state.access<Velocity::angular>();
nrt::StateVector<VehicleState> predictedPosition;
predictedPosition.access<Position::x>() = x + cos(a) * dt;
predictedPosition.access<Position::y>() = y + sin(a) * dt;
predictedPosition.access<Position::theta>() = a + da;
predictedPosition.access<Velocity::translational>() = dt;
predictedPosition.access<Velocity::angular>() = da;
if(predictedPosition.access<Position::theta>() > M_PI)
predictedPosition.access<Position::theta>() -= 2*M_PI;
if(predictedPosition.access<Position::theta>() < -M_PI)
predictedPosition.access<Position::theta>() += 2*M_PI;
return predictedPosition;
}
observe(nrt::StateVector<VehicleState> const & state)
{
double x = state.access<Position::x>();
double y = state.access<Position::y>();
observation.access<PolarPosition::range>() = sqrt(pow(x - worldDims.width()/2.0, 2) + pow(y - worldDims.height()/2.0, 2));
observation.access<PolarPosition::angle>() = atan2(y - worldDims.height() / 2.0, x - worldDims.width() / 2.0);
return observation;
}
};
// Some helper functions...
void drawVehicle(nrt::Image<nrt::PixRGB<nrt::byte>> & image, nrt::StateVector<VehicleState> const & currentPosition, nrt::PixRGBA<nrt::byte> const & color,
int thickness);
void drawPrediction(nrt::Image<nrt::PixRGB<nrt::byte>> & image, nrt::GaussianPDF<VehicleState> const & currentPosition, nrt::PixRGBA<nrt::byte> const & color,
int thickness);
void drawMeasurement(nrt::Image<nrt::PixRGB<nrt::byte>> & image, nrt::StateVector<VehicleObservation> const & observation);
int main(int argc, const char **argv)
{
// Create a new image display
nrt::Manager mgr(argc, argv);
std::shared_ptr<nrt::ImageSink> display(new nrt::ImageSink);
mgr.addSubComponent(display);
mgr.launch();
worldImage = nrt::Image<nrt::PixRGB<nrt::byte>>(worldDims, nrt::ImageInitPolicy::Zeros);
// Setup the initial vehicle state
initialState.mean<Position::x>() = worldDims.width() / 4;
initialState.mean<Position::y>() = worldDims.height() / 4;
initialState.mean<Position::theta>() = 0;
initialState.mean<Velocity::translational>() = 5.0;
initialState.mean<Velocity::angular>() = 0.05;
initialState.covariance<>() = Eigen::Matrix<double, 5, 5>::Identity() * 0.00001;
// Create a state vector to hold the real vehicle state
nrt::StateVector<VehicleState> vehicleState(initialState.mean<>());
// Setup the movement covariance
nrt::GaussianPDF<VehicleState> movementCovariance;
movementCovariance.covariance<Velocity::translational>() = pow(translationalNoise, 2.0)*2;
movementCovariance.covariance<Velocity::angular>() = pow(angularNoise, 2.0)*2;
// Setup the sensor covariance
sensorCovariance.covariance<PolarPosition::range>() = pow(sensingRangeNoise, 2.0);
sensorCovariance.covariance<PolarPosition::angle>() = pow(sensingAngleNoise, 2.0);
// Create the filter
ukfTracker(initialState, movementCovariance.covariance<>(), sensorCovariance.covariance<>());
nrt::Point2D<int> center(worldDims.width()/2, worldDims.height()/2);
nrt::drawCircle(worldImage, nrt::Circle<int>(center, 5), nrt::PixRGB<nrt::byte>(64,64,64), 3);
nrt::drawText(worldImage, center, "Radar Station", nrt::PixRGBA<nrt::byte>(255, 255, 255, 128),
for(int timestep = 0; timestep < 500; ++timestep)
{
nrt::Image<nrt::PixRGB<nrt::byte>> displayImage = worldImage;
// Move the actual vehicle
vehicleState = moveVehicle(vehicleState);
if(timestep % 10 == 0)
{
// Take a (noisy) measurement of the position of the vehicle
observation = takeMeasurement(vehicleState);
// Update with a measurement
ukfTracker.update(observation);
}
// Predict
ukfTracker.predict();
nrt::GaussianPDF<VehicleState> filteredState = ukfTracker.state();
NRT_INFO("---------------\nStep " << timestep <<
"\nPos [" << vehicleState.access<>().transpose() << "]" <<
"\nPred [" << filteredState.mean<>().transpose() << "]");
// Draw the world
drawVehicle(displayImage, vehicleState, nrt::PixRGBA<nrt::byte>(255, 0, 0, 128), 4);
drawPrediction(displayImage, filteredState, nrt::PixRGBA<nrt::byte>(0, 255, 0, 128), 2);
drawMeasurement(displayImage, observation);
display->out(nrt::GenericImage(displayImage));
usleep(100000);
}
return 0;
}
// ######################################################################
{
double x = currentPosition.access<Position::x>();
double y = currentPosition.access<Position::y>();
double a = currentPosition.access<Position::theta>();
double dt = currentPosition.access<Velocity::translational>();
double da = currentPosition.access<Velocity::angular>();
nextPosition.access<Position::x>() = x + cos(a) * dt;
nextPosition.access<Position::y>() = y + sin(a) * dt;
nextPosition.access<Position::theta>() = a + da;
nextPosition.access<Velocity::translational>() = std::normal_distribution<double>(dt, translationalNoise)(randomgen);
nextPosition.access<Velocity::angular>() = std::normal_distribution<double>(da, angularNoise)(randomgen);
if(nextPosition.access<Position::theta>() > M_PI)
nextPosition.access<Position::theta>() -= 2*M_PI;
if(nextPosition.access<Position::theta>() < -M_PI)
nextPosition.access<Position::theta>() += 2*M_PI;
return nextPosition;
}
// ######################################################################
void drawVehicle(nrt::Image<nrt::PixRGB<nrt::byte>> & image,
nrt::StateVector<VehicleState> const & currentPosition,
nrt::PixRGBA<nrt::byte> const & color,
int thickness)
{
static nrt::Point2D<int> lastpoint(-1,-1);
nrt::Point2D<int> c(currentPosition.access<Position::x>(), currentPosition.access<Position::y>());
double angle = currentPosition.access<Position::theta>();
nrt::drawCircle(image, nrt::Circle<int>(c, 5), color, thickness);
nrt::drawLine(image, nrt::Line<int>(c, c + nrt::Point2D<int>(cos(angle)*10, sin(angle)*10)), color, thickness);
if(lastpoint != nrt::Point2D<int>(-1, -1))
{
nrt::PixRGBA<nrt::byte> darkcolor = color;
darkcolor.alpha = 64;
nrt::drawLine(worldImage, nrt::Line<int>(lastpoint, c), darkcolor, 1);
nrt::drawLine(image, nrt::Line<int>(lastpoint, c), darkcolor, 1);
}
lastpoint = c;
}
// ######################################################################
void drawPrediction(nrt::Image<nrt::PixRGB<nrt::byte>> & image,
nrt::GaussianPDF<VehicleState> const & currentPosition,
nrt::PixRGBA<nrt::byte> const & color,
int thickness)
{
static nrt::Point2D<int> lastpoint(-1,-1);
nrt::Point2D<int> c(currentPosition.mean<Position::x>(), currentPosition.mean<Position::y>());
double angle = currentPosition.mean<Position::theta>();
nrt::drawCircle(image, nrt::Circle<int>(c, 5), color, thickness);
nrt::drawLine(image, nrt::Line<int>(c, c + nrt::Point2D<int>(cos(angle)*10, sin(angle)*10)), color, thickness);
nrt::Polygon<int> ellipse(currentPosition.covarianceEllipse<Position::x, Position::y>());
nrt::drawPolygon(image, ellipse, nrt::PixRGBA<nrt::byte>(0, 255, 255, 255), 1);
if(lastpoint != nrt::Point2D<int>(-1, -1))
{
nrt::PixRGBA<nrt::byte> darkcolor = color;
darkcolor.alpha = 64;
nrt::drawLine(worldImage, nrt::Line<int>(lastpoint, c), darkcolor, 1);
nrt::drawLine(image, nrt::Line<int>(lastpoint, c), darkcolor, 1);
}
lastpoint = c;
}
// ######################################################################
void drawMeasurement(nrt::Image<nrt::PixRGB<nrt::byte>> & image,
{
double r = observation.access<PolarPosition::range>();
double a = observation.access<PolarPosition::angle>();
nrt::Point2D<int> c(worldDims.width()/2.0 + r * cos(a), worldDims.height()/2.0 + r * sin(a));
nrt::drawCross(image, c, nrt::PixRGBA<nrt::byte>(255, 255, 255, 64), 30, 1);
nrt::drawLine(image, nrt::Line<int>(nrt::Point2D<int>(worldDims.width()/2, worldDims.height()/2), c), nrt::PixRGBA<nrt::byte>(255,255,255,16), 3);
nrt::drawCross(worldImage, c, nrt::PixRGBA<nrt::byte>(255, 255, 255, 16), 3, 1);
}
// ######################################################################
{
double x = vehicleState.access<Position::x>();
double y = vehicleState.access<Position::y>();
double randomRangeNoise = std::normal_distribution<double>(0, sensingRangeNoise)(randomgen);
double randomAngleNoise = std::normal_distribution<double>(0, sensingAngleNoise)(randomgen);
observation.access<PolarPosition::range>() =
sqrt(pow(x - worldDims.width()/2.0, 2) + pow(y - worldDims.height()/2.0, 2)) + randomRangeNoise;
observation.access<PolarPosition::angle>() =
atan2(y - worldDims.height() / 2.0, x - worldDims.width() / 2.0) + randomAngleNoise;
return observation;
}