Commit fabf5df0 authored by czb5793's avatar czb5793
Browse files

Apply "landmark matcher" to EKF SLAM and FastSLAM

parent fa471084
......@@ -34,9 +34,9 @@ robot:
motor:
noise:
# Standard deviation of the left wheel's velocity in meters/second while executing a motion command.
left_velocity: 0.001
left_velocity: 0.0001
# Standard deviation of the right wheel's velocity in meters/second while executing a motion command.
right_velocity: 0.001
right_velocity: 0.0001
# Configures the sensors used by the robot
sensor:
......@@ -157,15 +157,19 @@ slam:
# These are x position and y position
# Currently only supports 2
landmark_state_size: 2
# Determines whether landmark-matcher is used
# It determines whether correspondences of landmarks are given, i.e. identifies of landmarks are given
landmark_matcher: true
# Configures the sensor noise. The values are currently empirically chosen.
sensor_noise:
# Standard deviation of the detected distance in meters
detected_distance: 0.2
# Standard deviation of the detected distance in meters 0.2
detected_distance: 0.1
# Standard deviation of the detected angle in degrees
detected_angle: 30
ekf_slam:
# Determines whether the EKF SLAM algorithm shall be executed
enabled: false
enabled: true
# The mahalanobis distance threshold used in data association
distance_threshold: 1
# Configures the motion noise. The values are currently empirically chosen.
......
graph_based_slam_evaluation.png

35 KB | W: | H:

graph_based_slam_evaluation.png

32.1 KB | W: | H:

graph_based_slam_evaluation.png
graph_based_slam_evaluation.png
graph_based_slam_evaluation.png
graph_based_slam_evaluation.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -230,9 +230,9 @@ class Supervisor:
landmark_ids = self.robot.read_landmark_matcher()
# update LAM estimations
if self.ekfslam is not None:
self.ekfslam.update(motion_command, zip(measured_distances, sensor_angles))
self.ekfslam.update(motion_command, zip(measured_distances, sensor_angles, landmark_ids))
if self.fastslam is not None:
self.fastslam.update(motion_command, zip(measured_distances, sensor_angles))
self.fastslam.update(motion_command, zip(measured_distances, sensor_angles, landmark_ids))
if self.graphbasedslam is not None:
self.graphbasedslam.update(motion_command, zip(measured_distances, sensor_angles, landmark_ids))
# update mappings
......
......@@ -33,10 +33,13 @@ class EKFSlam(Slam):
self.motion_noise = np.diag([slam_cfg["ekf_slam"]["motion_noise"]["x"],
slam_cfg["ekf_slam"]["motion_noise"]["y"],
np.deg2rad(slam_cfg["ekf_slam"]["motion_noise"]["theta"])]) ** 2
self.landmark_correspondence_given = slam_cfg["landmark_matcher"]
# The estimated combined state vector, initially containing the robot pose at the origin and no landmarks
self.mu = np.zeros((self.robot_state_size, 1))
# The state covariance, initially set to absolute certainty of the initial robot pose
self.Sigma = np.zeros((self.robot_state_size, self.robot_state_size))
# The list of landmark IDs.
self.landmark_id = list()
def get_estimated_pose(self):
"""
......@@ -86,19 +89,22 @@ class EKFSlam(Slam):
def correction_step(self, z):
"""
Update the predicted state and uncertainty using the sensor measurements.
:param z: List of sensor measurements. A single measurement is a tuple of measured distance and measured angle.
:param z: List of sensor measurements. A single measurement is a tuple of measured distance, measured angle and identify.
"""
# Iterate through all sensor readings
for i, measurement in enumerate(z):
# Only execute if sensor observed landmark
if not self.supervisor.proximity_sensor_positive_detections()[i]:
continue
lm_id = self.data_association(self.mu, self.Sigma, measurement)
if not self.landmark_correspondence_given:
lm_id = self.data_association(self.mu, self.Sigma, measurement[0:2])
else:
lm_id = self.data_association_v2(self.mu, measurement[2])
nLM = self.get_n_lm(self.mu)
if lm_id == nLM: # If the landmark is new
self.add_new_landmark(measurement)
self.add_new_landmark(measurement[0:2], measurement[2])
lm = self.get_landmark_position(self.mu, lm_id)
innovation, Psi, H = self.calc_innovation(lm, self.mu, self.Sigma, measurement, lm_id)
innovation, Psi, H = self.calc_innovation(lm, self.mu, self.Sigma, measurement[0:2], lm_id)
K = (self.Sigma @ H.T) @ np.linalg.inv(Psi)
self.mu += K @ innovation
......@@ -135,7 +141,21 @@ class EKFSlam(Slam):
minid = mdist.index(min(mdist))
return minid
def add_new_landmark(self, measurement):
def data_association_v2(self, mu, id):
"""
Associates the identify to a landmark.
:param particle: Particle that will be updated
:param id: Identify of the observed landmark
return: The landmark index in the list.
"""
nLM = self.get_n_lm(mu)
lm_index = nLM
for i in range(nLM):
if self.landmark_id[i] == id:
lm_index = i
return lm_index
def add_new_landmark(self, measurement, id):
"""
Adds a new landmark.
State vector is extended using the measured position.
......@@ -149,6 +169,7 @@ class EKFSlam(Slam):
self.Sigma = np.vstack((np.hstack((self.Sigma, np.zeros((len(self.mu), L)))),
np.hstack((np.zeros((L, len(self.mu))), np.identity(L)))))
self.mu = xEstTemp
self.landmark_id.append(id)
@staticmethod
def motion_model(x, u, dt):
......
......@@ -37,6 +37,8 @@ class Particle:
self.lm = np.zeros((0, lm_state_size))
# List of landmark position covariances
self.lmP = np.zeros((0, lm_state_size))
# List of landmark ids
self.id = list()
class FastSlam(Slam):
......@@ -60,7 +62,7 @@ class FastSlam(Slam):
np.deg2rad(slam_cfg["sensor_noise"]["detected_angle"])]) ** 2
self.motion_noise = np.diag([slam_cfg["fast_slam"]["motion_noise"]["translational_velocity"],
slam_cfg["fast_slam"]["motion_noise"]["rotational_velocity"]]) ** 2
self.landmark_correspondence_given = slam_cfg["landmark_matcher"]
# Create initial list of particles
self.particles = [Particle(self.landmark_state_size) for _ in range(self.n_particles)]
......@@ -136,13 +138,16 @@ class FastSlam(Slam):
"""
# Removing the importance factors of the previous cycle
particles = self.clear_importance_factors(particles)
for i, (distance, theta) in enumerate(z):
measurement = np.asarray([distance, theta])
for i, (distance, theta, id) in enumerate(z):
measurement = np.asarray([distance, theta, id])
# Skip the measuremnt if no landmark was detected
if not self.supervisor.proximity_sensor_positive_detections()[i]:
continue
for particle in particles:
lm_id = self.data_association(particle, measurement)
if not self.landmark_correspondence_given:
lm_id = self.data_association(particle, measurement)
else:
lm_id = self.data_association_v2(particle, id)
nLM = self.get_n_lms(particle.lm)
if lm_id == nLM: # If the landmark is new
self.add_new_lm(particle, measurement)
......@@ -175,6 +180,20 @@ class FastSlam(Slam):
min_id = distances.index(min(distances))
return min_id
def data_association_v2(self, particle, id):
"""
Associates the identify to a landmark.
:param particle: Particle that will be updated
:param id: Identify of the observed landmark
return: The landmark index in the list.
"""
nLM = self.get_n_lms(particle.lm)
lm_index = nLM
for i in range(nLM):
if particle.id[i] == id:
lm_index = i
return lm_index
def normalize_weight(self, particles):
"""
Normalizes the importance factors of the particles so that their sum is 1
......@@ -229,6 +248,7 @@ class FastSlam(Slam):
Gz = np.array([[measured_x, -r * measured_y],
[measured_y, r * measured_x]])
particle.lmP = np.vstack((particle.lmP, Gz @ self.sensor_noise @ Gz.T))
particle.id.append(z[2])
return particle
......@@ -258,7 +278,7 @@ class FastSlam(Slam):
# Computing the covariance of the measurement
Psi = H @ landmark_cov @ H.T + self.sensor_noise
# Computing the innovation, the difference between actual measurement and expected measurement
innovation = z.reshape(2, 1) - expected_measurement
innovation = z[0:2].reshape(2, 1) - expected_measurement
innovation[1, 0] = normalize_angle(innovation[1, 0])
landmark, landmark_cov = self.ekf_update(landmark, landmark_cov, innovation, H, Psi)
......
......@@ -113,13 +113,13 @@ class GraphBasedSLAM(Slam):
self.mu = self.motion_model(self.mu, u, self.dt) # Calculate next step
J = self.jaco_motion_model(self.mu, u, self.dt) # Calculate jacobian matrix
self.Sigma = J @ self.Sigma @ J.T + self.motion_noise # Update covariance matrix, propagate it while robot is moving
self.odom_pose = self.__update_odometry_estimation(self.odom_pose) # Update accumulative odometry estimation
#self.odom_pose = self.__update_odometry_estimation(self.odom_pose) # Update accumulative odometry estimation
self.step_counter += 1
""" Update the Graph """
if self.step_counter % self.frontend_interval == 0: # create a vertex each n steps
self.__front_end(z)
self.odom_pose = self.__reset_odometry_measurement() # reset odom_pose
#self.odom_pose = self.__reset_odometry_measurement() # reset odom_pose
num_poses, _ = self.graph.count_vertices()
if num_poses % self.optimization_interval == 0 \
......@@ -137,7 +137,7 @@ class GraphBasedSLAM(Slam):
vertex2 = PoseVertex(self.mu, self.Sigma)
self.graph.add_vertex(vertex2)
fixed_counter = 3 # count the total dimensions of the vertices added
old_landmark_id = -1
#old_landmark_id = -1
""" calculate landmark vertices """
for i, zi in enumerate(z):
# Only execute if sensor has observed landmark
......@@ -162,9 +162,6 @@ class GraphBasedSLAM(Slam):
vertex3 = LandmarkVertex(pos_lm, self.sensor_noise, zi[2])
self.graph.add_vertex(vertex3)
fixed_counter += 2
else:
# old landmark.
old_landmark_id = vertex3.id
# calculate actual measurement and information matrix
meas, info = self.__convert_pose_landmark_raw_measurement(zi)
......@@ -174,7 +171,7 @@ class GraphBasedSLAM(Slam):
meas, info = self.__convert_pose_pose_raw_measurement(vertex1.pose, vertex2.pose)
self.graph.add_edge(vertex1, vertex2, meas, info)
if self.step_counter < 50: # vertices created at the beginning are fixed while optimization
if self.step_counter < 1: # vertices created at the beginning are fixed while optimization
self.fix_hessian += fixed_counter
# if old_landmark_id != -1: # old_landmark was found
......@@ -196,7 +193,7 @@ class GraphBasedSLAM(Slam):
last_vertex = self.graph.get_last_pose_vertex()
self.mu = np.copy(last_vertex.pose) # update current state
self.Sigma = np.copy(last_vertex.sigma)
self.odom_pose = self.__reset_odometry_measurement() # Reset odometry estimation
#self.odom_pose = self.__reset_odometry_measurement() # Reset odometry estimation
def __convert_pose_landmark_raw_measurement(self, zi):
"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment