diff --git a/paraview_wrapping/Slam.xml b/paraview_wrapping/Slam.xml
index 593d742efc57ccecc042a3d89f212307135f5241..901d4fc64e8c323865ec7087ea0314a0a7c15b97 100644
--- a/paraview_wrapping/Slam.xml
+++ b/paraview_wrapping/Slam.xml
@@ -366,28 +366,23 @@
       <IntVectorProperty
           name="Undistortion mode"
           command="SetUndistortion"
-          default_values="1"
+          default_values="2"
           number_of_elements="1">
         <EnumerationDomain name="enum">
           <Entry value="0" text="Disabled"/>
-          <Entry value="1" text="Approximated"/>
-          <Entry value="2" text="Optimized"/>
+          <Entry value="1" text="Once"/>
+          <Entry value="2" text="Refined"/>
         </EnumerationDomain>
         <Documentation>
-          Undistortion mode, to correct rolling shutter distortion during a
-          whole sweep during Lidar frame acquisition. This might be slower
-          and unstable in case of jumps.
+          Undistortion mode, to correct rolling shutter distortion during frame acquisition.
+          The undistortion should greatly improve the accuracy for smooth motions,
+          but might be unstable for high-frequency motions.
 
-          If DISABLED, SLAM will compute a rigid transform that will be applied
-          to the whole frame without taking into account the distortion due to
-          the lidar motion during a sweep.
+          DISABLED: no undistortion is performed.
 
-          If APPROXIMATED, current beginning sweep pose will be guessed using a
-          constant speed motion model from previous and current end sweep poses.
-          Then, linear undistortion between current begin and end poses is applied.
+          ONCE: undistortion is performed only once using estimated ego-motion.
 
-          If OPTIMIZED, both current begin and end sweep poses are optimized.
-          Then, linear undistortion between current begin and end poses is applied.
+          REFINED: undistortion is iteratively refined using optimized ego-motion.
         </Documentation>
       </IntVectorProperty>
 
diff --git a/paraview_wrapping/vtkSlam.cxx b/paraview_wrapping/vtkSlam.cxx
index eef6eb1c83f89e1f00a6723e8a22d7374931611c..99fb88ed9d9b04fb6e7705b5f0accc8c6200a4cd 100644
--- a/paraview_wrapping/vtkSlam.cxx
+++ b/paraview_wrapping/vtkSlam.cxx
@@ -648,8 +648,8 @@ void vtkSlam::SetUndistortion(int mode)
 {
   LidarSlam::UndistortionMode undistortion = static_cast<LidarSlam::UndistortionMode>(mode);
   if (undistortion != LidarSlam::UndistortionMode::NONE &&
-      undistortion != LidarSlam::UndistortionMode::APPROXIMATED &&
-      undistortion != LidarSlam::UndistortionMode::OPTIMIZED)
+      undistortion != LidarSlam::UndistortionMode::ONCE &&
+      undistortion != LidarSlam::UndistortionMode::REFINED)
   {
     vtkErrorMacro("Invalid undistortion mode (" << mode << "), ignoring setting.");
     return;
diff --git a/ros_wrapping/lidar_slam/params/slam_config.yaml b/ros_wrapping/lidar_slam/params/slam_config.yaml
index bdd230a26ef24cf51182609f1db9f6e46323d3bb..2043784c751342b4723a179ef46447639dbc5a57 100644
--- a/ros_wrapping/lidar_slam/params/slam_config.yaml
+++ b/ros_wrapping/lidar_slam/params/slam_config.yaml
@@ -92,20 +92,25 @@ slam:
   #     but should be more precise and rely less on constant motion hypothesis.
   ego_motion: 1
 
+
   # Undistortion mode, to correct rolling shutter distortion during frame acquisition.
-  # The undistortion should improve the accuracy, but the computation speed may
-  # decrease, and the result might be unstable in difficult situations.
-  #  0) No undistortion is performed :
+  # The undistortion should greatly improve the accuracy for smooth motions,
+  # but might be unstable for high-frequency motions.
+  #  0) NONE: no undistortion is performed :
   #     - End scan pose is optimized using rigid registration of raw scan and map.
-  #     - Raw input scan is added to maps.
-  #  1) Minimal undistortion is performed (might be unstable if jumps occur):
-  #     - Begin scan pose is linearly interpolated between previous and current end scan poses.
-  #     - End scan pose is optimized using rigid registration of undistorted scan and map.
+  #     - Raw input scan is added to map.
+  #  1) ONCE: undistortion is performed only one using estimated ego-motion:
+  #     - Begin and end scan poses are linearly interpolated using estimated ego-motion.
   #     - Scan is linearly undistorted between begin and end scan poses.
-  #  2) Ceres-optimized undistortion is performed (slower, and may be unstable):
-  #     - Both begin and end scan poses are optimized using registration of undistorted scan and map.
+  #     - Scan pose is iteratively optimized using rigid registration of undistorted scan and map.
+  #     - Undistorted scan is added to map.
+  #  2) REFINED: undistortion is iteratively refined using optimized ego-motion:
+  #     - Begin and end scan poses are linearly interpolated using ego-motion.
   #     - Scan is linearly undistorted between begin and end scan poses.
-  undistortion: 1
+  #     - Scan pose is optimized using rigid registration of undistorted scan and map.
+  #     - Iterate the three previous steps with updated ego-motion and poses.
+  #     - Undistorted scan is added to map.
+  undistortion: 2
 
   # Verbosity level :
   #  0) print errors, warnings or one time info
diff --git a/ros_wrapping/lidar_slam/src/LidarSlamNode.cxx b/ros_wrapping/lidar_slam/src/LidarSlamNode.cxx
index 6eddc70b3d3013f18389e25c9718b0241f559d86..b91436132ea4c8cb70b8e9994128a2e0cb3c22e2 100644
--- a/ros_wrapping/lidar_slam/src/LidarSlamNode.cxx
+++ b/ros_wrapping/lidar_slam/src/LidarSlamNode.cxx
@@ -555,11 +555,11 @@ void LidarSlamNode::SetSlamParameters()
   {
     LidarSlam::UndistortionMode undistortion = static_cast<LidarSlam::UndistortionMode>(undistortionMode);
     if (undistortion != LidarSlam::UndistortionMode::NONE &&
-        undistortion != LidarSlam::UndistortionMode::APPROXIMATED &&
-        undistortion != LidarSlam::UndistortionMode::OPTIMIZED)
+        undistortion != LidarSlam::UndistortionMode::ONCE &&
+        undistortion != LidarSlam::UndistortionMode::REFINED)
     {
-      ROS_ERROR_STREAM("Invalid undistortion mode (" << undistortion << "). Setting it to 'APPROXIMATED'.");
-      undistortion = LidarSlam::UndistortionMode::APPROXIMATED;
+      ROS_ERROR_STREAM("Invalid undistortion mode (" << undistortion << "). Setting it to 'REFINED'.");
+      undistortion = LidarSlam::UndistortionMode::REFINED;
     }
     LidarSlam.SetUndistortion(undistortion);
   }
diff --git a/slam_lib/include/LidarSlam/Enums.h b/slam_lib/include/LidarSlam/Enums.h
index b1c0ec6f17bfe3bd820358e7b12b8d74e54b04ae..9cfef2cfedca579249899da3d9fdfb0954d686af 100644
--- a/slam_lib/include/LidarSlam/Enums.h
+++ b/slam_lib/include/LidarSlam/Enums.h
@@ -35,21 +35,25 @@ enum Keypoint
 //! How to deal with undistortion
 enum UndistortionMode
 {
-  //! No undistortion is performed :
+  //! No undistortion is performed:
   //!  - End scan pose is optimized using rigid registration of raw scan and map.
-  //!  - Raw input scan is added to maps.
+  //!  - Raw input scan is added to map.
   NONE = 0,
 
-  //! Minimal undistortion is performed :
-  //!  - Begin scan pose is linearly interpolated between previous and current end scan poses.
-  //!  - End scan pose is optimized using rigid registration of undistorted scan and map.
+  //! Undistortion is performed only once using estimated ego-motion:
+  //!  - Begin and end scan poses are linearly interpolated using estimated ego-motion.
   //!  - Scan is linearly undistorted between begin and end scan poses.
-  APPROXIMATED = 1,
+  //!  - Scan pose is iteratively optimized using rigid registration of undistorted scan and map.
+  //!  - Undistorted scan is added to map.
+  ONCE = 1,
 
-  //! Ceres-optimized undistortion is performed :
-  //!  - Both begin and end scan poses are optimized using registration of undistorted scan and map.
+  //! Undistortion is iteratively refined using optimized ego-motion:
+  //!  - Begin and end scan poses are linearly interpolated using ego-motion.
   //!  - Scan is linearly undistorted between begin and end scan poses.
-  OPTIMIZED = 2
+  //!  - Scan pose is optimized using rigid registration of undistorted scan and map.
+  //!  - Iterate the three previous steps with updated ego-motion and poses.
+  //!  - Undistorted scan is added to map.
+  REFINED = 2
 };
 
 //------------------------------------------------------------------------------
diff --git a/slam_lib/include/LidarSlam/KeypointsRegistration.h b/slam_lib/include/LidarSlam/KeypointsRegistration.h
index ab48643ae59fd7393de6121c9c49bc2a5105428f..ac40c6d6a253b20b4531830a6b4c55c54c7d7ea2 100644
--- a/slam_lib/include/LidarSlam/KeypointsRegistration.h
+++ b/slam_lib/include/LidarSlam/KeypointsRegistration.h
@@ -143,21 +143,7 @@ public:
   //----------------------------------------------------------------------------
 
   // Init ICP-LM optimizer.
-  //  - if undistortion is NONE :
-  //    FirstPose is the only considered transform, rigidly optimized.
-  //    SecondPose is not even used, nor is time info.
-  //  - if undistortion is APPROXIMATED :
-  //    FirstPose and SecondPose are first used as fixed priors for linear undistortion during ICP,
-  //    then only SecondPose is rigidly optimized.
-  //  - if undistortion is OPTIMIZED :
-  //    FirstPose and SecondPose are first used as fixed priors for linear undistortion during ICP,
-  //    then are optimized by iteratively refining undistortion.
-  KeypointsRegistration(const Parameters& params,
-                        UndistortionMode undistortion,
-                        const Eigen::Isometry3d& firstPosePrior,
-                        const Eigen::Isometry3d& secondPosePrior = Eigen::Isometry3d::Identity(),
-                        double firstPoseTime = 0.,
-                        double secondPoseTime = 1.);
+  KeypointsRegistration(const Parameters& params, const Eigen::Isometry3d& posePrior);
 
   // Build point-to-neighborhood residuals
   MatchingResults BuildAndMatchResiduals(const PointCloud::Ptr& currPoints,
@@ -168,12 +154,9 @@ public:
   ceres::Solver::Summary Solve();
 
   // Get optimization results
-  Eigen::Isometry3d GetOptimizedFirstPose()  const { return Utils::XYZRPYtoIsometry(this->FirstPoseArray); }
-  Eigen::Isometry3d GetOptimizedSecondPose() const { return Utils::XYZRPYtoIsometry(this->SecondPoseArray); }
+  Eigen::Isometry3d GetOptimizedPose()  const { return Utils::XYZRPYtoIsometry(this->PoseArray); }
 
   // Estimate registration error
-  // If undistortion is disabled, this error is estimated for FirstPose.
-  // If undistortion is enabled, this error is estimated for SecondPose.
   RegistrationError EstimateRegistrationError();
 
   //----------------------------------------------------------------------------
@@ -193,18 +176,8 @@ private:
   //    * A = (n*n.t) for a plane with n being the normal.
   //    * A is the symmetric variance-covariance matrix encoding the shape of
   //      the neighborhood for a blob.
-  // - time stores the time acquisition (used only if undistortion is enabled)
   // - weight attenuates the distance function for outliers
-  void AddIcpResidual(const Eigen::Matrix3d& A, const Eigen::Vector3d& P, const Eigen::Vector3d& X, double time, double weight = 1.);
-
-  // Helper to compute point positions according to undistortion mode.
-  // - pInit will be the initial position in sensor coordinates, on which we
-  //   need to apply the transforms to optimize. This position will be used
-  //   in optimization.
-  // - pFinal will be estimated using the given prior to correspond to the
-  //   approximate point location in global coordinates system. This transformed
-  //   position can be used to find approximate nearest neighbors in map.
-  void ComputePointInitAndFinalPose(const Point& p, Eigen::Vector3d& pInit, Eigen::Vector3d& pFinal) const;
+  void AddIcpResidual(const Eigen::Matrix3d& A, const Eigen::Vector3d& P, const Eigen::Vector3d& X, double weight = 1.);
 
   // Match the current keypoint with its neighborhood in the map / previous
   // frame to estimate P and A.
@@ -235,21 +208,14 @@ private:
 
   const Parameters Params;
 
-  const UndistortionMode Undistortion;
-
   // The problem to build and optimize
   ceres::Problem Problem;
 
   // Initialization of DoF to optimize
-  const Eigen::Isometry3d FirstPosePrior;   ///< Initial guess of the first pose to optimize
-  const Eigen::Isometry3d SecondPosePrior;  ///< Initial guess of the second pose to optimize (only used if undistortion is enabled)
+  const Eigen::Isometry3d PosePrior;  ///< Initial guess of the pose to optimize
 
   // DoF to optimize (= output)
-  Eigen::Vector6d FirstPoseArray;   ///< First pose to optimize (XYZRPY)
-  Eigen::Vector6d SecondPoseArray;  ///< Second pose to optimize (XYZRPY) (only used if undistortion is enabled)
-
-  // Frame pose interpolator (only used if undistortion is enabled)
-  const LinearTransformInterpolator<double> WithinFrameMotionPrior;
+  Eigen::Vector6d PoseArray;  ///< Pose parameters to optimize (XYZRPY)
 };
 
 } // end of LidarSlam namespace
\ No newline at end of file
diff --git a/slam_lib/include/LidarSlam/Slam.h b/slam_lib/include/LidarSlam/Slam.h
index 7f47a5d73226b09427f31d2cff5ecfb40df1c8a5..2c7240ace983adfce4ae159a2ea14ba19f5036fd 100644
--- a/slam_lib/include/LidarSlam/Slam.h
+++ b/slam_lib/include/LidarSlam/Slam.h
@@ -358,10 +358,10 @@ private:
   // Localization step.
   EgoMotionMode EgoMotion = EgoMotionMode::MOTION_EXTRAPOLATION;
 
-  // How the algorithm should undistort the lidar scans.
-  // The undistortion should improve the accuracy, but the computation speed
-  // may decrease, and the result might be unstable in difficult situations.
-  UndistortionMode Undistortion = UndistortionMode::APPROXIMATED;
+  // How to correct the rolling shutter distortion during frame acquisition.
+  // The undistortion should greatly improve the accuracy for smooth motions,
+  // but might be unstable for high-frequency motions.
+  UndistortionMode Undistortion = UndistortionMode::REFINED;
 
   // Indicate verbosity level to display more or less information :
   // 0: print errors, warnings or one time info
@@ -423,7 +423,9 @@ private:
   // **** UNDISTORTION ****
 
   // Transform interpolator to estimate the pose of the sensor within a lidar
-  // frame, using poses at the beginning and end of frame.
+  // frame, using the BASE poses at the beginning and end of frame.
+  // This will be used to undistort the pointcloud and express its points
+  // relatively to the same BASE pose at frame header timestamp.
   // This will use the point-wise 'time' field, representing the time offset
   // in seconds to add to the frame header timestamp.
   LinearTransformInterpolator<double> WithinFrameMotion;
@@ -461,12 +463,16 @@ private:
   std::vector<PointCloud::Ptr> CurrentFrames;
 
   // Raw extracted keypoints, in BASE coordinates (no undistortion)
+  PointCloud::Ptr CurrentRawEdgesPoints;
+  PointCloud::Ptr CurrentRawPlanarsPoints;
+  PointCloud::Ptr CurrentRawBlobsPoints;
+  PointCloud::Ptr PreviousRawEdgesPoints;
+  PointCloud::Ptr PreviousRawPlanarsPoints;
+
+  // Extracted keypoints, in BASE coordinates (with undistortion if enabled)
   PointCloud::Ptr CurrentEdgesPoints;
   PointCloud::Ptr CurrentPlanarsPoints;
   PointCloud::Ptr CurrentBlobsPoints;
-  PointCloud::Ptr PreviousEdgesPoints;
-  PointCloud::Ptr PreviousPlanarsPoints;
-  PointCloud::Ptr PreviousBlobsPoints;
 
   // Extracted keypoints, in WORLD coordinates (with undistortion if enabled)
   PointCloud::Ptr CurrentWorldEdgesPoints;
@@ -599,10 +605,12 @@ private:
   // 'time' arg is the time offset in seconds to current frame header.stamp.
   Eigen::Isometry3d InterpolateScanPose(double time);
 
-  // Init undistortion process based on point-wise time field.
-  // Get current frame time field range, and update within frame motion interpolator.
+  // Init undistortion interpolator time bounds based on point-wise time field.
   void InitUndistortion();
 
+  // Update the undistortion interpolator poses bounds,
+  // and refine the undistortion of the current keypoints clouds.
+  void RefineUndistortion();
 };
 
 } // end of LidarSlam namespace
\ No newline at end of file
diff --git a/slam_lib/src/KeypointsRegistration.cxx b/slam_lib/src/KeypointsRegistration.cxx
index 4e97d60d1f23979e6c6546c625ea9808c279e99f..889c947af538782ae08f376fae2e2a6760b2d8c7 100644
--- a/slam_lib/src/KeypointsRegistration.cxx
+++ b/slam_lib/src/KeypointsRegistration.cxx
@@ -24,20 +24,12 @@ namespace LidarSlam
 
 //-----------------------------------------------------------------------------
 KeypointsRegistration::KeypointsRegistration(const KeypointsRegistration::Parameters& params,
-                                             UndistortionMode undistortion,
-                                             const Eigen::Isometry3d& firstPosePrior,
-                                             const Eigen::Isometry3d& secondPosePrior,
-                                             double firstPoseTime,
-                                             double secondPoseTime)
+                                             const Eigen::Isometry3d& posePrior)
   : Params(params)
-  , Undistortion(undistortion)
-  , FirstPosePrior(firstPosePrior)
-  , SecondPosePrior(secondPosePrior)
-  , WithinFrameMotionPrior(firstPosePrior, secondPosePrior, firstPoseTime, secondPoseTime)
+  , PosePrior(posePrior)
 {
-  // Convert isometries to 6D state vectors : X, Y, Z, rX, rY, rZ
-  this->FirstPoseArray  = Utils::IsometryToXYZRPY(firstPosePrior);
-  this->SecondPoseArray = Utils::IsometryToXYZRPY(secondPosePrior);
+  // Convert isometry to 6D state vector : X, Y, Z, rX, rY, rZ
+  this->PoseArray = Utils::IsometryToXYZRPY(posePrior);
 }
 
 //-----------------------------------------------------------------------------
@@ -112,7 +104,7 @@ KeypointsRegistration::RegistrationError KeypointsRegistration::EstimateRegistra
   // Computation of the variance-covariance matrix
   ceres::Covariance covarianceSolver(covOptions);
   std::vector<std::pair<const double*, const double*>> covarianceBlocks;
-  const double* paramBlock = this->Undistortion ? this->SecondPoseArray.data() : this->FirstPoseArray.data();
+  const double* paramBlock = this->PoseArray.data();
   covarianceBlocks.emplace_back(paramBlock, paramBlock);
   covarianceSolver.Compute(covarianceBlocks, &this->Problem);
   covarianceSolver.GetCovarianceBlock(paramBlock, paramBlock, err.Covariance.data());
@@ -129,63 +121,17 @@ KeypointsRegistration::RegistrationError KeypointsRegistration::EstimateRegistra
 }
 
 //----------------------------------------------------------------------------
-void KeypointsRegistration::AddIcpResidual(const Eigen::Matrix3d& A, const Eigen::Vector3d& P, const Eigen::Vector3d& X, double time, double weight)
+void KeypointsRegistration::AddIcpResidual(const Eigen::Matrix3d& A, const Eigen::Vector3d& P, const Eigen::Vector3d& X, double weight)
 {
-  // The Ceres residuals to use depending on undistortion mode
-  using RigidResidual = CeresCostFunctions::MahalanobisDistanceAffineIsometryResidual;
-  using UndistortionResidual = CeresCostFunctions::MahalanobisDistanceInterpolatedMotionResidual;
-
-  // If OPTIMIZED mode, we need to optimize both first and second poses
-  if (this->Undistortion == UndistortionMode::OPTIMIZED)
-  {
-    double normTime = (time - this->WithinFrameMotionPrior.GetTime0()) / this->WithinFrameMotionPrior.GetTimeRange();
-    ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<UndistortionResidual, 1, 6, 6>(new UndistortionResidual(A, P, X, normTime, weight));
-    #pragma omp critical(addIcpResidual)
-    this->Problem.AddResidualBlock(cost_function,
-                                   new ceres::ScaledLoss(new ceres::ArctanLoss(this->Params.LossScale), weight, ceres::TAKE_OWNERSHIP),
-                                   this->FirstPoseArray.data(), this->SecondPoseArray.data());
-  }
-  // If APPROXIMATED mode, we only need to optimize second pose
-  else if (this->Undistortion == UndistortionMode::APPROXIMATED)
-  {
-    ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<RigidResidual, 1, 6>(new RigidResidual(A, P, X, weight));
-    #pragma omp critical(addIcpResidual)
-    this->Problem.AddResidualBlock(cost_function,
-                                   new ceres::ScaledLoss(new ceres::ArctanLoss(this->Params.LossScale), weight, ceres::TAKE_OWNERSHIP),
-                                   this->SecondPoseArray.data());
-  }
-  // If NONE mode, we only need to optimize first pose
-  else
-  {
-    ceres::CostFunction* cost_function = new ceres::AutoDiffCostFunction<RigidResidual, 1, 6>(new RigidResidual(A, P, X, weight));
-    #pragma omp critical(addIcpResidual)
-    this->Problem.AddResidualBlock(cost_function,
-                                   new ceres::ScaledLoss(new ceres::ArctanLoss(this->Params.LossScale), weight, ceres::TAKE_OWNERSHIP),
-                                   this->FirstPoseArray.data());
-  }
-}
-
-//-----------------------------------------------------------------------------
-void KeypointsRegistration::ComputePointInitAndFinalPose(const Point& p, Eigen::Vector3d& pInit, Eigen::Vector3d& pFinal) const
-{
-  const Eigen::Vector3d pos = p.getVector3fMap().cast<double>();
-  switch (this->Undistortion)
-  {
-    case UndistortionMode::OPTIMIZED:
-      pInit = pos;
-      pFinal = this->WithinFrameMotionPrior(p.time) * pos;
-      break;
-
-    case UndistortionMode::APPROXIMATED:
-      pFinal = this->WithinFrameMotionPrior(p.time) * pos;
-      pInit = this->SecondPosePrior.inverse() * pFinal;
-      break;
-    
-    case UndistortionMode::NONE:
-      pInit = pos;
-      pFinal = this->FirstPosePrior * pos;
-      break;
-  }
+  // Create the point-to-line/plane/blob cost function
+  using Residual = CeresCostFunctions::MahalanobisDistanceAffineIsometryResidual;
+  ceres::CostFunction* costFunction = new ceres::AutoDiffCostFunction<Residual, 1, 6>(new Residual(A, P, X, weight));
+
+  // Add constraint to problem
+  #pragma omp critical(addIcpResidual)
+  this->Problem.AddResidualBlock(costFunction,
+                                 new ceres::ScaledLoss(new ceres::ArctanLoss(this->Params.LossScale), weight, ceres::TAKE_OWNERSHIP),
+                                 this->PoseArray.data());
 }
 
 //-----------------------------------------------------------------------------
@@ -194,10 +140,10 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   // =====================================================
   // Transform the point using the current pose estimation
 
-  // Rigid transform or time continuous motion model to take into account the
-  // rolling shutter distortion.
-  Eigen::Vector3d pInit, pFinal;
-  this->ComputePointInitAndFinalPose(p, pInit, pFinal);
+  // localPoint is the raw local position, on which we need to apply the transform to optimize.
+  // worldPoint is the estimated position in world coodinates.
+  Eigen::Vector3d localPoint = p.getVector3fMap().cast<double>();
+  Eigen::Vector3d worldPoint = this->PosePrior * localPoint;
 
   // ===================================================
   // Get neighboring points in previous set of keypoints
@@ -205,9 +151,9 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   std::vector<int> knnIndices;
   std::vector<float> knnSqDist;
   if (this->Params.SingleEdgePerRing)
-    this->GetPerRingLineNeighbors(kdtreePreviousEdges, pFinal.data(), this->Params.LineDistanceNbrNeighbors, knnIndices, knnSqDist);
+    this->GetPerRingLineNeighbors(kdtreePreviousEdges, worldPoint.data(), this->Params.LineDistanceNbrNeighbors, knnIndices, knnSqDist);
   else
-    this->GetRansacLineNeighbors(kdtreePreviousEdges, pFinal.data(), this->Params.LineDistanceNbrNeighbors, this->Params.MaxLineDistance, knnIndices, knnSqDist);
+    this->GetRansacLineNeighbors(kdtreePreviousEdges, worldPoint.data(), this->Params.LineDistanceNbrNeighbors, this->Params.MaxLineDistance, knnIndices, knnSqDist);
 
   // If not enough neighbors, abort
   unsigned int neighborhoodSize = knnIndices.size();
@@ -291,7 +237,7 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   double fitQualityCoeff = 1.0 - std::sqrt(meanSquaredDist / squaredMaxDist);
 
   // Store the distance parameters values
-  this->AddIcpResidual(A, mean, pInit, p.time, fitQualityCoeff);
+  this->AddIcpResidual(A, mean, localPoint, fitQualityCoeff);
 
   return MatchingResults::MatchStatus::SUCCESS;
 }
@@ -302,17 +248,17 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   // =====================================================
   // Transform the point using the current pose estimation
 
-  // Rigid transform or time continuous motion model to take into account the
-  // rolling shutter distortion.
-  Eigen::Vector3d pInit, pFinal;
-  this->ComputePointInitAndFinalPose(p, pInit, pFinal);
+  // localPoint is the raw local position, on which we need to apply the transform to optimize.
+  // worldPoint is the estimated position in world coodinates.
+  Eigen::Vector3d localPoint = p.getVector3fMap().cast<double>();
+  Eigen::Vector3d worldPoint = this->PosePrior * localPoint;
 
   // ===================================================
   // Get neighboring points in previous set of keypoints
 
   std::vector<int> knnIndices;
   std::vector<float> knnSqDist;
-  unsigned int neighborhoodSize = kdtreePreviousPlanes.KnnSearch(pFinal.data(), this->Params.PlaneDistanceNbrNeighbors, knnIndices, knnSqDist);
+  unsigned int neighborhoodSize = kdtreePreviousPlanes.KnnSearch(worldPoint.data(), this->Params.PlaneDistanceNbrNeighbors, knnIndices, knnSqDist);
 
   // It means that there is not enough keypoints in the neighborhood
   if (neighborhoodSize < this->Params.PlaneDistanceNbrNeighbors)
@@ -388,7 +334,7 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   double fitQualityCoeff = 1.0 - std::sqrt(meanSquaredDist / squaredMaxDist);
 
   // Store the distance parameters values
-  this->AddIcpResidual(A, mean, pInit, p.time, fitQualityCoeff);
+  this->AddIcpResidual(A, mean, localPoint, fitQualityCoeff);
 
   return MatchingResults::MatchStatus::SUCCESS;
 }
@@ -399,10 +345,10 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   // =====================================================
   // Transform the point using the current pose estimation
 
-  // Rigid transform or time continuous motion model to take into account the
-  // rolling shutter distortion.
-  Eigen::Vector3d pInit, pFinal;
-  this->ComputePointInitAndFinalPose(p, pInit, pFinal);
+  // localPoint is the raw local position, on which we need to apply the transform to optimize.
+  // worldPoint is the estimated position in world coodinates.
+  Eigen::Vector3d localPoint = p.getVector3fMap().cast<double>();
+  Eigen::Vector3d worldPoint = this->PosePrior * localPoint;
 
   // ===================================================
   // Get neighboring points in previous set of keypoints
@@ -412,7 +358,7 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
 
   std::vector<int> knnIndices;
   std::vector<float> knnSqDist;
-  unsigned int neighborhoodSize = kdtreePreviousBlobs.KnnSearch(pFinal.data(), this->Params.BlobDistanceNbrNeighbors, knnIndices, knnSqDist);
+  unsigned int neighborhoodSize = kdtreePreviousBlobs.KnnSearch(worldPoint.data(), this->Params.BlobDistanceNbrNeighbors, knnIndices, knnSqDist);
 
   // It means that there is not enough keypoints in the neighborhood
   if (neighborhoodSize < this->Params.BlobDistanceNbrNeighbors)
@@ -490,7 +436,7 @@ KeypointsRegistration::MatchingResults::MatchStatus KeypointsRegistration::Build
   double fitQualityCoeff = 1.0;//1.0 - knnSqDist.back() / maxDist;
 
   // store the distance parameters values
-  this->AddIcpResidual(A, mean, pInit, p.time, fitQualityCoeff);
+  this->AddIcpResidual(A, mean, localPoint, fitQualityCoeff);
 
   return MatchingResults::MatchStatus::SUCCESS;
 }
diff --git a/slam_lib/src/Slam.cxx b/slam_lib/src/Slam.cxx
index 7e12a074955b111f407cb711b7e78f6381654bfb..2289c52ce6bbeac337c8d337829e3e7dd7ad3c6b 100644
--- a/slam_lib/src/Slam.cxx
+++ b/slam_lib/src/Slam.cxx
@@ -170,6 +170,9 @@ void Slam::Reset(bool resetLog)
   // Reset point clouds
   this->CurrentFrames.clear();
   this->CurrentFrames.emplace_back(new PointCloud);
+  this->CurrentRawEdgesPoints.reset(new PointCloud);
+  this->CurrentRawPlanarsPoints.reset(new PointCloud);
+  this->CurrentRawBlobsPoints.reset(new PointCloud);
   this->CurrentEdgesPoints.reset(new PointCloud);
   this->CurrentPlanarsPoints.reset(new PointCloud);
   this->CurrentBlobsPoints.reset(new PointCloud);
@@ -187,15 +190,16 @@ void Slam::Reset(bool resetLog)
   // Reset log history
   if (resetLog)
   {
+    // Reset logged keypoints
     this->NbrFrameProcessed = 0;
     this->LogTrajectory.clear();
     this->LogEdgesPoints.clear();
     this->LogPlanarsPoints.clear();
     this->LogBlobsPoints.clear();
-  }
 
-  // Reset processing duration timers
-  Utils::Timer::Reset();
+    // Reset processing duration timers
+    Utils::Timer::Reset();
+  }
 }
 
 //-----------------------------------------------------------------------------
@@ -217,21 +221,17 @@ void Slam::AddFrames(const std::vector<PointCloud::Ptr>& frames)
   this->ExtractKeypoints();
   IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Keypoints extraction"));
 
-  // If the new frame is the first one we just add the extracted keypoints into
-  // the map without running odometry and localization steps
-  if (this->NbrFrameProcessed > 0)
-  {
-    // Estimate Trelative by extrapolating new pose with a constant velocity model
-    // and/or registering current frame on previous one
-    IF_VERBOSE(3, Utils::Timer::Init("Ego-Motion"));
-    this->ComputeEgoMotion();
-    IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Ego-Motion"));
-
-    // Perform Localization : update Tworld from map and current frame keypoints
-    IF_VERBOSE(3, Utils::Timer::Init("Localization"));
-    this->Localization();
-    IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Localization"));
-  }
+  // Estimate Trelative by extrapolating new pose with a constant velocity model
+  // and/or registering current frame on previous one
+  IF_VERBOSE(3, Utils::Timer::Init("Ego-Motion"));
+  this->ComputeEgoMotion();
+  IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Ego-Motion"));
+
+  // Perform Localization : update Tworld from map and current frame keypoints
+  // and optionally undistort keypoints clouds based on ego-motion
+  IF_VERBOSE(3, Utils::Timer::Init("Localization"));
+  this->Localization();
+  IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Localization"));
 
   // Update keypoints maps : add current keypoints to map using Tworld
   if (this->UpdateMap)
@@ -250,10 +250,7 @@ void Slam::AddFrames(const std::vector<PointCloud::Ptr>& frames)
   if (this->Verbosity >= 2)
   {
     SET_COUT_FIXED_PRECISION(3);
-    std::cout << "========== SLAM results ==========\n"
-                 "Ego-Motion:\n"
-                 " translation = [" << this->Trelative.translation().transpose()                                        << "] m\n"
-                 " rotation    = [" << Utils::Rad2Deg(Utils::RotationMatrixToRPY(this->Trelative.linear())).transpose() << "] °\n";
+    std::cout << "========== SLAM results ==========\n";
     if (this->Undistortion)
     {
       Eigen::Isometry3d motion = this->WithinFrameMotion.GetTransformRange();
@@ -261,7 +258,10 @@ void Slam::AddFrames(const std::vector<PointCloud::Ptr>& frames)
                    " translation = [" << motion.translation().transpose()                                        << "] m\n"
                    " rotation    = [" << Utils::Rad2Deg(Utils::RotationMatrixToRPY(motion.linear())).transpose() << "] °\n";
     }
-    std::cout << "Localization:\n"
+    std::cout << "Ego-Motion:\n"
+                 " translation = [" << this->Trelative.translation().transpose()                                        << "] m\n"
+                 " rotation    = [" << Utils::Rad2Deg(Utils::RotationMatrixToRPY(this->Trelative.linear())).transpose() << "] °\n"
+                 "Localization:\n"
                  " position    = [" << this->Tworld.translation().transpose()                                        << "] m\n"
                  " orientation = [" << Utils::Rad2Deg(Utils::RotationMatrixToRPY(this->Tworld.linear())).transpose() << "] °" << std::endl;
     RESET_COUT_FIXED_PRECISION;
@@ -363,20 +363,40 @@ void Slam::RunPoseGraphOptimization(const std::vector<Transform>& gpsPositions,
     this->LogTrajectory[i].SetIsometry(gpsToSensorOffset.inverse() * optimizedSlamPoses[i].GetIsometry());
 
     // Transform frame keypoints to world coordinates
-    Eigen::Matrix4d currentTransform = this->LogTrajectory[i].GetMatrix();
-
-    pcl::transformPointCloud(*this->LogEdgesPoints[i].GetCloud(), edgesKeypoints, currentTransform);
-    pcl::transformPointCloud(*this->LogPlanarsPoints[i].GetCloud(), planarsKeypoints, currentTransform);
-    if (!this->FastSlam)
-      pcl::transformPointCloud(*this->LogBlobsPoints[i].GetCloud(), blobsKeypoints, currentTransform);
+    const auto& logEdges = this->LogEdgesPoints[i].GetCloud();
+    const auto& logPlanes = this->LogPlanarsPoints[i].GetCloud();
+    const auto& logBlobs = !this->FastSlam ? this->LogBlobsPoints[i].GetCloud() : PointCloud::Ptr(new PointCloud);
+    if (this->Undistortion && i >= 1)
+    {
+      // Init the undistortion interpolator
+      LinearTransformInterpolator<double> interpolator;
+      interpolator.SetTransforms(this->LogTrajectory[i - 1].GetIsometry(), this->LogTrajectory[i].GetIsometry());
+      interpolator.SetTimes(this->LogTrajectory[i].time - this->LogTrajectory[i - 1].time, 0.);
 
-    // TODO: Deal with undistortion case (properly transform pointclouds before aggreagtion)
+      // Perform undistortion of keypoints clouds
+      auto undistortAndTransform = [&](const PointCloud& in, PointCloud& out)
+      {
+        out.clear();
+        out.reserve(in.size());
+        for (const Point& p : in)
+          out.push_back(Utils::TransformPoint(p, interpolator(p.time)));
+      };
+      undistortAndTransform(*logEdges, edgesKeypoints);
+      undistortAndTransform(*logPlanes, planarsKeypoints);
+      undistortAndTransform(*logBlobs, blobsKeypoints);
+    }
+    else
+    {
+      Eigen::Matrix4d currentTransform = this->LogTrajectory[i].GetMatrix();
+      pcl::transformPointCloud(*logEdges, edgesKeypoints, currentTransform);
+      pcl::transformPointCloud(*logPlanes, planarsKeypoints, currentTransform);
+      pcl::transformPointCloud(*logBlobs, blobsKeypoints, currentTransform);
+    }
 
     // Aggregate new keypoints to maps
     *aggregatedEdgesMap += edgesKeypoints;
     *aggregatedPlanarsMap += planarsKeypoints;
-    if (!this->FastSlam)
-      *aggregatedBlobsMap += blobsKeypoints;
+    *aggregatedBlobsMap += blobsKeypoints;
   }
 
   IF_VERBOSE(3, Utils::Timer::StopAndDisplay("PGO : frames keypoints aggregation"));
@@ -386,8 +406,6 @@ void Slam::RunPoseGraphOptimization(const std::vector<Transform>& gpsPositions,
   this->Tworld         = this->LogTrajectory[nbSlamPoses - 1].GetIsometry();
   this->PreviousTworld = this->LogTrajectory[nbSlamPoses - 2].GetIsometry();
 
-  // TODO : Deal with undistortion case (update motionParameters)
-
   // Update SLAM maps
   auto updateMap = [&](RollingGrid& map, const PointCloud& lastPoints, const PointCloud::Ptr& aggregatedPoints)
   {
@@ -422,14 +440,14 @@ void Slam::SetWorldTransformFromGuess(const Transform& poseGuess)
 {
   // Set current pose
   this->Tworld = poseGuess.GetIsometry();
-  // TODO update motionParameters
 
   // Ego-Motion estimation is not valid anymore since we imposed a discontinuity.
   // We reset previous pose so that previous ego-motion extrapolation results in Identity matrix.
   // We reset current frame keypoints so that ego-motion registration will be skipped for next frame.
   this->PreviousTworld = this->Tworld;
-  this->CurrentEdgesPoints.reset(new PointCloud);
-  this->CurrentPlanarsPoints.reset(new PointCloud);
+  this->CurrentRawEdgesPoints.reset(new PointCloud);
+  this->CurrentRawPlanarsPoints.reset(new PointCloud);
+  this->CurrentRawBlobsPoints.reset(new PointCloud);
 }
 
 //-----------------------------------------------------------------------------
@@ -589,10 +607,9 @@ Slam::PointCloud::Ptr Slam::GetOutputFrame()
     Eigen::Isometry3d baseToLidar = this->GetBaseToLidarOffset(this->CurrentFrames[i]->front().device_id);
     if (this->Undistortion)
     {
-      LinearTransformInterpolator<double> transformInterpolator;
-      transformInterpolator.SetH0(this->WithinFrameMotion.GetH0() * baseToLidar, this->WithinFrameMotion.GetTime0());
-      transformInterpolator.SetH1(this->WithinFrameMotion.GetH1() * baseToLidar, this->WithinFrameMotion.GetTime1());
-
+      auto transformInterpolator = this->WithinFrameMotion;
+      transformInterpolator.SetTransforms(this->Tworld * this->WithinFrameMotion.GetH0() * baseToLidar,
+                                          this->Tworld * this->WithinFrameMotion.GetH1() * baseToLidar);
       output.reserve(this->CurrentFrames[i]->size());
       for (const Slam::Point& p : *this->CurrentFrames[i])
         output.push_back(Utils::TransformPoint(p, transformInterpolator(p.time)));
@@ -710,17 +727,17 @@ void Slam::ExtractKeypoints()
   PRINT_VERBOSE(2, "========== Keypoints extraction ==========");
 
   // Current keypoints become previous ones
-  this->PreviousEdgesPoints = this->CurrentEdgesPoints;
-  this->PreviousPlanarsPoints = this->CurrentPlanarsPoints;
+  this->PreviousRawEdgesPoints = this->CurrentRawEdgesPoints;
+  this->PreviousRawPlanarsPoints = this->CurrentRawPlanarsPoints;
 
   // Reset current keypoints
-  this->CurrentEdgesPoints.reset(new PointCloud);
-  this->CurrentPlanarsPoints.reset(new PointCloud);
-  this->CurrentBlobsPoints.reset(new PointCloud);
+  this->CurrentRawEdgesPoints.reset(new PointCloud);
+  this->CurrentRawPlanarsPoints.reset(new PointCloud);
+  this->CurrentRawBlobsPoints.reset(new PointCloud);
   pcl::PCLHeader header = Utils::BuildPclHeader(this->CurrentFrames[0]->header.stamp, this->BaseFrameId, this->NbrFrameProcessed);
-  this->CurrentEdgesPoints->header = header;
-  this->CurrentPlanarsPoints->header = header;
-  this->CurrentBlobsPoints->header = header;
+  this->CurrentRawEdgesPoints->header = header;
+  this->CurrentRawPlanarsPoints->header = header;
+  this->CurrentRawBlobsPoints->header = header;
 
   // Transform pointcloud to BASE coordinates system and correct time offset
   auto AddBaseKeypoints = [this](Slam::PointCloud::Ptr& allKp, const Slam::PointCloud::Ptr& kp, const Eigen::Isometry3d& baseToLidar)
@@ -777,15 +794,15 @@ void Slam::ExtractKeypoints()
 
     // Transform them from LIDAR to BASE coordinates, and aggregate them
     Eigen::Isometry3d baseToLidar = this->GetBaseToLidarOffset(lidarDevice);
-    AddBaseKeypoints(this->CurrentEdgesPoints,   ke->GetEdgePoints(),   baseToLidar);
-    AddBaseKeypoints(this->CurrentPlanarsPoints, ke->GetPlanarPoints(), baseToLidar);
+    AddBaseKeypoints(this->CurrentRawEdgesPoints,   ke->GetEdgePoints(),   baseToLidar);
+    AddBaseKeypoints(this->CurrentRawPlanarsPoints, ke->GetPlanarPoints(), baseToLidar);
     if (!this->FastSlam)
-      AddBaseKeypoints(this->CurrentBlobsPoints, ke->GetBlobPoints(),   baseToLidar);
+      AddBaseKeypoints(this->CurrentRawBlobsPoints, ke->GetBlobPoints(),   baseToLidar);
   }
 
-  PRINT_VERBOSE(2, "Extracted features : " << this->CurrentEdgesPoints->size()   << " edges, "
-                                           << this->CurrentPlanarsPoints->size() << " planes, "
-                                           << this->CurrentBlobsPoints->size()   << " blobs.");
+  PRINT_VERBOSE(2, "Extracted features : " << this->CurrentRawEdgesPoints->size()   << " edges, "
+                                           << this->CurrentRawPlanarsPoints->size() << " planes, "
+                                           << this->CurrentRawBlobsPoints->size()   << " blobs.");
 }
 
 //-----------------------------------------------------------------------------
@@ -827,14 +844,14 @@ void Slam::ComputeEgoMotion()
     #pragma omp parallel sections num_threads(std::min(this->NbThreads, 2))
     {
       #pragma omp section
-      kdtreePreviousEdges.Reset(this->PreviousEdgesPoints);
+      kdtreePreviousEdges.Reset(this->PreviousRawEdgesPoints);
       #pragma omp section
-      kdtreePreviousPlanes.Reset(this->PreviousPlanarsPoints);
+      kdtreePreviousPlanes.Reset(this->PreviousRawPlanarsPoints);
     }
 
     PRINT_VERBOSE(2, "Keypoints extracted from previous frame : "
-                     << this->PreviousEdgesPoints->size() << " edges, "
-                     << this->PreviousPlanarsPoints->size() << " planes");
+                     << this->PreviousRawEdgesPoints->size() << " edges, "
+                     << this->PreviousRawPlanarsPoints->size() << " planes");
 
     IF_VERBOSE(3, Utils::Timer::StopAndDisplay("EgoMotion : build KD tree"));
     IF_VERBOSE(3, Utils::Timer::Init("Ego-Motion : whole ICP-LM loop"));
@@ -872,13 +889,13 @@ void Slam::ComputeEgoMotion()
       optimParams.LMMaxIter = this->EgoMotionLMMaxIter;
       optimParams.LossScale = this->EgoMotionInitLossScale + icpIter * (this->EgoMotionFinalLossScale - this->EgoMotionInitLossScale) / this->EgoMotionICPMaxIter;
 
-      KeypointsRegistration optim(optimParams, UndistortionMode::NONE, this->Trelative);
+      KeypointsRegistration optim(optimParams, this->Trelative);
 
       // Loop over edges to build the point to line residuals
-      this->EgoMotionMatchingResults[EDGE] = optim.BuildAndMatchResiduals(this->CurrentEdgesPoints, kdtreePreviousEdges, Keypoint::EDGE);
+      this->EgoMotionMatchingResults[EDGE] = optim.BuildAndMatchResiduals(this->CurrentRawEdgesPoints, kdtreePreviousEdges, Keypoint::EDGE);
 
       // Loop over surfaces to build the point to plane residuals
-      this->EgoMotionMatchingResults[PLANE] = optim.BuildAndMatchResiduals(this->CurrentPlanarsPoints, kdtreePreviousPlanes, Keypoint::PLANE);
+      this->EgoMotionMatchingResults[PLANE] = optim.BuildAndMatchResiduals(this->CurrentRawPlanarsPoints, kdtreePreviousPlanes, Keypoint::PLANE);
 
       // Skip this frame if there are too few geometric keypoints matched
       totalMatchedKeypoints = this->EgoMotionMatchingResults[EDGE].NbMatches() + this->EgoMotionMatchingResults[PLANE].NbMatches();
@@ -896,7 +913,7 @@ void Slam::ComputeEgoMotion()
       PRINT_VERBOSE(4, summary.BriefReport());
 
       // Get back optimized Trelative
-      this->Trelative = optim.GetOptimizedFirstPose();
+      this->Trelative = optim.GetOptimizedPose();
 
       IF_VERBOSE(3, Utils::Timer::StopAndDisplay("  Ego-Motion : LM optim"));
 
@@ -931,10 +948,21 @@ void Slam::Localization()
   // Integrate the relative motion to the world transformation
   this->PreviousTworld = this->Tworld;
   this->Tworld = this->PreviousTworld * this->Trelative;
-  // If undistortion is enabled, init the within frame motion interpolator
-  // for rolling shutter distortion correction.
+
+  // Init undistorted keypoints clouds from raw points
+  this->CurrentEdgesPoints = this->CurrentRawEdgesPoints;
+  this->CurrentPlanarsPoints = this->CurrentRawPlanarsPoints;
+  this->CurrentBlobsPoints = this->CurrentRawBlobsPoints;
+  // Init and run undistortion if required
   if (this->Undistortion)
+  {
+    IF_VERBOSE(3, Utils::Timer::Init("Localization : initial undistortion"));
+    // Init the within frame motion interpolator time bounds
     this->InitUndistortion();
+    // Undistort keypoints clouds
+    this->RefineUndistortion();
+    IF_VERBOSE(3, Utils::Timer::StopAndDisplay("Localization : initial undistortion"));
+  }
 
   // Get keypoints from maps and build kd-trees for fast nearest neighbors search
   IF_VERBOSE(3, Utils::Timer::Init("Localization : keypoints extraction"));
@@ -1014,9 +1042,7 @@ void Slam::Localization()
     optimParams.LMMaxIter = this->LocalizationLMMaxIter;
     optimParams.LossScale = this->LocalizationInitLossScale + icpIter * (this->LocalizationFinalLossScale - this->LocalizationInitLossScale) / this->LocalizationICPMaxIter;
 
-    KeypointsRegistration optim(optimParams, this->Undistortion,
-                                this->Undistortion ? this->WithinFrameMotion.GetH0() : this->Tworld,
-                                this->WithinFrameMotion.GetH1(), this->WithinFrameMotion.GetTime0(), this->WithinFrameMotion.GetTime1());
+    KeypointsRegistration optim(optimParams, this->Tworld);
 
     // Loop over edges to build the point to line residuals
     this->LocalizationMatchingResults[EDGE] = optim.BuildAndMatchResiduals(this->CurrentEdgesPoints, kdtreeEdges, Keypoint::EDGE);
@@ -1037,7 +1063,7 @@ void Slam::Localization()
       this->Trelative = Eigen::Isometry3d::Identity();
       this->Tworld = this->PreviousTworld;
       if (this->Undistortion)
-        this->WithinFrameMotion.SetTransforms(this->Tworld, this->Tworld);
+        this->WithinFrameMotion.SetTransforms(Eigen::Isometry3d::Identity(), Eigen::Isometry3d::Identity());
       PRINT_ERROR("Not enough keypoints, Localization skipped for this frame.");
       break;
     }
@@ -1049,30 +1075,14 @@ void Slam::Localization()
     ceres::Solver::Summary summary = optim.Solve();
     PRINT_VERBOSE(4, summary.BriefReport());
 
-    // Update Tworld, WithinFrameMotion and Trelative from optimization results
-    if (this->Undistortion == UndistortionMode::NONE)
-    {
-      this->Tworld = optim.GetOptimizedFirstPose();
-    }
-    else if (this->Undistortion == UndistortionMode::APPROXIMATED)
-    {
-      // Get optimized end pose, interpolate Tworld from previous and end poses,
-      // and interpolate begin pose between previous and Tworld
-      Eigen::Isometry3d endPose = optim.GetOptimizedSecondPose();
-      double prevPoseTime = this->LogTrajectory.back().time;
-      double currPoseTime = Utils::PclStampToSec(this->CurrentFrames[0]->header.stamp);
-      double endPoseTime = currPoseTime + this->WithinFrameMotion.GetTime1();
-      this->Tworld = LinearInterpolation(this->PreviousTworld, endPose, currPoseTime, prevPoseTime, endPoseTime);
-      this->WithinFrameMotion.SetTransforms(this->InterpolateScanPose(this->WithinFrameMotion.GetTime0()), endPose);
-    }
-    else if (this->Undistortion == UndistortionMode::OPTIMIZED)
-    {
-      // Get fully optimized start and end poses, and interpolate Tworld between them
-      this->WithinFrameMotion.SetTransforms(optim.GetOptimizedFirstPose(), optim.GetOptimizedSecondPose());
-      this->Tworld = this->WithinFrameMotion(0.);
-    }
+    // Update Tworld and Trelative from optimization results
+    this->Tworld = optim.GetOptimizedPose();
     this->Trelative = this->PreviousTworld.inverse() * this->Tworld;
 
+    // Optionally refine undistortion
+    if (this->Undistortion == UndistortionMode::REFINED)
+      this->RefineUndistortion();
+
     IF_VERBOSE(3, Utils::Timer::StopAndDisplay("  Localization : LM optim"));
 
     // If no L-M iteration has been made since the last ICP matching, it means
@@ -1104,7 +1114,7 @@ void Slam::Localization()
 //-----------------------------------------------------------------------------
 void Slam::UpdateMapsUsingTworld()
 {
-  // it would be nice to add the point from the frame directly to the map
+  // Helper to transform points to WORLD coordinates and add them to map
   auto updateMap = [this](std::shared_ptr<RollingGrid> map, PointCloud::Ptr baseFrame, PointCloud::Ptr worldFrame)
   {
     // Transform keypoints to WORLD coordinates
@@ -1112,12 +1122,8 @@ void Slam::UpdateMapsUsingTworld()
     worldFrame->points.reserve(baseFrame->size());
     worldFrame->header = baseFrame->header;
     worldFrame->header.frame_id = this->WorldFrameId;
-    if (this->Undistortion)
-      for (const Point& p : *baseFrame)
-        worldFrame->push_back(Utils::TransformPoint(p, this->WithinFrameMotion(p.time)));
-    else
-      for (const Point& p : *baseFrame)
-        worldFrame->push_back(Utils::TransformPoint(p, this->Tworld));
+    for (const Point& p : *baseFrame)
+      worldFrame->push_back(Utils::TransformPoint(p, this->Tworld));
     // Add new keypoints to rolling grid
     map->Add(worldFrame);
   };
@@ -1144,10 +1150,10 @@ void Slam::LogCurrentFrameState(double time, const std::string& frameId)
     // Save current frame data to buffer
     this->LogTrajectory.emplace_back(this->Tworld, time, frameId);
     this->LogCovariances.emplace_back(Utils::Matrix6dToStdArray36(this->LocalizationUncertainty.Covariance));
-    this->LogEdgesPoints.emplace_back(this->CurrentEdgesPoints, this->LoggingStorage);
-    this->LogPlanarsPoints.emplace_back(this->CurrentPlanarsPoints, this->LoggingStorage);
+    this->LogEdgesPoints.emplace_back(this->CurrentRawEdgesPoints, this->LoggingStorage);
+    this->LogPlanarsPoints.emplace_back(this->CurrentRawPlanarsPoints, this->LoggingStorage);
     if (!this->FastSlam)
-      this->LogBlobsPoints.emplace_back(this->CurrentBlobsPoints, this->LoggingStorage);
+      this->LogBlobsPoints.emplace_back(this->CurrentRawBlobsPoints, this->LoggingStorage);
 
     // If a timeout is defined, forget too old data
     if (this->LoggingTimeout > 0)
@@ -1202,32 +1208,31 @@ Eigen::Isometry3d Slam::InterpolateScanPose(double time)
 void Slam::InitUndistortion()
 {
   // Get 'time' field range
-  double firstFrameTime = std::numeric_limits<double>::max();
-  double lastFrameTime  = std::numeric_limits<double>::min();
+  double frameFirstTime = std::numeric_limits<double>::max();
+  double frameLastTime  = std::numeric_limits<double>::min();
   auto GetMinMaxTime = [&](const PointCloud::ConstPtr& cloud)
   {
     for (const Point& point: *cloud)
     {
-      firstFrameTime = std::min(firstFrameTime, point.time);
-      lastFrameTime  = std::max(lastFrameTime, point.time);
+      frameFirstTime = std::min(frameFirstTime, point.time);
+      frameLastTime  = std::max(frameLastTime, point.time);
     }
   };
   GetMinMaxTime(this->CurrentEdgesPoints);
   GetMinMaxTime(this->CurrentPlanarsPoints);
-  if (!this->FastSlam)
-    GetMinMaxTime(this->CurrentBlobsPoints);
+  GetMinMaxTime(this->CurrentBlobsPoints);
 
-  // Extrapolate first and last poses to initialize within frame motion interpolator
-  this->WithinFrameMotion.SetH0(this->InterpolateScanPose(firstFrameTime), firstFrameTime);
-  this->WithinFrameMotion.SetH1(this->InterpolateScanPose(lastFrameTime), lastFrameTime);
+  // Update interpolator timestamps and reset transforms
+  this->WithinFrameMotion.SetTimes(frameFirstTime, frameLastTime);
+  this->WithinFrameMotion.SetTransforms(Eigen::Isometry3d::Identity(), Eigen::Isometry3d::Identity());
 
   // Check time values
   if (this->WithinFrameMotion.GetTimeRange() < 1e-6)
   {
-    // If frame duration is 0, it means that the time field is constant
+    // If frame duration is 0, it means that the time field is constant and cannot be used.
+    // We reset timestamps to 0, to ensure no time offset will be used.
     PRINT_WARNING("'time' field is not properly set (constant value) and cannot be used for undistortion.");
-    this->WithinFrameMotion.SetTransforms(this->Tworld, this->Tworld);
-    this->WithinFrameMotion.SetTimes(0., lastFrameTime);
+    this->WithinFrameMotion.SetTimes(0., 0.);
   }
   else if (this->WithinFrameMotion.GetTimeRange() > 10.)
   {
@@ -1236,6 +1241,41 @@ void Slam::InitUndistortion()
   }
 }
 
+//-----------------------------------------------------------------------------
+void Slam::RefineUndistortion()
+{
+  // Get previously applied undistortion
+  Eigen::Isometry3d previousBaseBegin = this->WithinFrameMotion.GetH0();
+  Eigen::Isometry3d previousBaseEnd = this->WithinFrameMotion.GetH1();
+
+  // Extrapolate first and last poses to update within frame motion interpolator
+  Eigen::Isometry3d worldToBaseBegin = this->InterpolateScanPose(this->WithinFrameMotion.GetTime0());
+  Eigen::Isometry3d worldToBaseEnd = this->InterpolateScanPose(this->WithinFrameMotion.GetTime1());
+  Eigen::Isometry3d baseToWorld = this->Tworld.inverse();
+  Eigen::Isometry3d newBaseBegin = baseToWorld * worldToBaseBegin;
+  Eigen::Isometry3d newBaseEnd = baseToWorld * worldToBaseEnd;
+  this->WithinFrameMotion.SetTransforms(newBaseBegin, newBaseEnd);
+
+  // Init the interpolator to use to remove previous undistortion and apply updated one
+  auto transformInterpolator = this->WithinFrameMotion;
+  transformInterpolator.SetTransforms(newBaseBegin * previousBaseBegin.inverse(),
+                                      newBaseEnd * previousBaseEnd.inverse());
+
+  // Refine undistortion of keypoints clouds
+  #pragma omp parallel sections num_threads(std::min(this->NbThreads, 3))
+  {
+    #pragma omp section
+    for (Point& p : *this->CurrentEdgesPoints)
+      Utils::TransformPoint(p, transformInterpolator(p.time));
+    #pragma omp section
+    for (Point& p : *this->CurrentPlanarsPoints)
+      Utils::TransformPoint(p, transformInterpolator(p.time));
+    #pragma omp section
+    for (Point& p : *this->CurrentBlobsPoints)
+      Utils::TransformPoint(p, transformInterpolator(p.time));
+  }
+}
+
 //==============================================================================
 //   Keypoints extraction parameters setting
 //==============================================================================