Commit fa471084 authored by czb5793's avatar czb5793
Browse files

Add a new class "Landmark Matcher", which provides a correct data-association...

Add a new class "Landmark Matcher", which provides a correct data-association through landmark-ids. In other words, we assume now that the data-association are known. Because wrong data association affect strongly the slam update, and it can lead to wrong state estimations, as presented in the previous work.
parent f1431125
# Disrecte time that elapses during one simulation cycle
period: 0.05
period: 0.1
# Configures the robot
# These values are inspired by the Khepera III robot
......@@ -49,7 +49,7 @@ robot:
# Value that is read at minimum range
max_read_value: 3960
# Standard deviation of the distance from the sensors to the detected landmark.
noise: 0.001
noise: 0.0001
# Specificies the poses and number of sensors
# Each pose is composed of x position, y position and angle in degrees
poses: [[-0.038, 0.048, 128],
......@@ -142,10 +142,10 @@ control:
# If a sensor measures a distance smaller than the danger distance, the robot immediately starts moving into the opposite direction
danger_distance: 0.06
# If a sensor measures a distance smaller than the caution distance, the robot will follow the wall of the obstacle
# Set to danger_distanceto disable wall following, since the map contains small, circle-like objects, where wall following can lead to looping around an object
caution_distance: 0.06
# Set to danger_distance to disable wall following, since the map contains small, circle-like objects, where wall following can lead to looping around an object
caution_distance: 0.8 # 0.06
# Criterion for stopping the following of a wall
progress_epsilon: 0.05
progress_epsilon: 0.05 # 0.05
# Configures the SLAM system
slam:
......@@ -182,7 +182,7 @@ slam:
# The mahalanobis distance threshold used in data association
distance_threshold: 0.125
# The number of used particles
n_particles: 100
n_particles: 80
# Configures the motion noise. The values are currently empirically chosen.
motion_noise:
# Standard deviation of the motion command's translational velocity in m/s.
......@@ -194,7 +194,7 @@ slam:
# Determines whether the Graph-based SLAM algorithm shall be executed
enabled: true
# The euclidean distance threshold used in data association
distance_threshold: 0.2
distance_threshold: 0.125
# The number interval of pose-vertices added that the graph optimization is executed.
optimization_interval: 100
# The timestep interval of executing the frontend part.
......@@ -209,8 +209,8 @@ slam:
theta: 1.0 #0.09
# Configures the sensor noise. The values are currently empirically chosen.
sensor_noise:
x: 0.2
y: 0.2
x: 0.02
y: 0.02
# Configures the evaluation of the SLAM algorithms
evaluation:
......@@ -222,13 +222,14 @@ slam:
# Configures the 2D grid map of the occupancy grid mapping algorithm
mapping:
# Determines whether the mapping algorithm shall be executed
enabled: true
# Width of the map in meters
gridmap:
# Width of the map in meters
width: 5
# Height of the map in meters
height: 5
height: 6
# Number of pixels per meter. The GUI has problem while using grid map of high resolution.
resolution: 10
# offset the origin in the grid map
......@@ -236,10 +237,12 @@ slam:
# offset in meters in horizontal direction
x: 2.5
# offset in meters in vertical direction
y: 2.5
y: 3
path_planning:
# Determines whether the path planning algorithm shall be executed
enabled: true
# Determines how importance the heuristic term is.
heuristic_weight: 1.0
......
File mode changed from 100755 to 100644
......@@ -21,7 +21,6 @@ import utils.geometrics_util as geometrics
from simulation.exceptions import CollisionException
class Physics:
def __init__(self, world):
......@@ -71,9 +70,10 @@ class Physics:
for sensor in sensors:
dmin = float('inf')
solid_id = -1
detector_line = sensor.detector_line
for solid in solids:
for i, solid in enumerate(solids):
if solid is not robot: # assume that the sensor does not detect it's own robot
solid_polygon = solid.global_geometry
......@@ -85,9 +85,12 @@ class Physics:
if intersection_exists and d < dmin:
dmin = d
solid_id = i
# if there is an intersection, update the sensor with the new delta value
if dmin != float('inf'):
sensor.detect(dmin)
robot.landmark_matcher.detect(sensor.id, solid_id)
else:
sensor.detect(None)
robot.landmark_matcher.detect(sensor.id, -1)
......@@ -22,7 +22,7 @@ from models.Polygon import *
from robot.sensor.ProximitySensor import *
from supervisor.Supervisor import *
from robot.sensor.WheelEncoder import *
from robot.sensor.LandmarkMatcher import *
class Robot: # Khepera III robot
......@@ -50,10 +50,12 @@ class Robot: # Khepera III robot
# IR sensors
self.ir_sensors = []
for _pose in robot_cfg["sensor"]["poses"]:
for id, _pose in enumerate(robot_cfg["sensor"]["poses"]):
ir_pose = Pose(_pose[0], _pose[1], radians(_pose[2]))
self.ir_sensors.append(
ProximitySensor(self, ir_pose, robot_cfg["sensor"]))
ProximitySensor(self, ir_pose, robot_cfg["sensor"], id))
# Landmark Matcher
self.landmark_matcher = LandmarkMatcher()
# dynamics
self.dynamics = DifferentialDriveDynamics(self.wheel_radius, self.wheel_base_length)
......
......@@ -44,6 +44,13 @@ class RobotSupervisorInterface:
return [e.read() for e in self.robot.wheel_encoders]
def read_landmark_matcher(self):
"""
:return: List of landmark ids
"""
matcher = self.robot.landmark_matcher.read()
return [matcher[sensor.id] for sensor in self.robot.ir_sensors]
def set_wheel_drive_rates(self, v_l, v_r):
"""
Specify the wheel velocities
......
"""
This sensor provides a set of correspondences of landmarks.
To apply this class, we assume that the data associations of landmarks are known.
A "landmark matcher" can be a camera that can extract features from the environment,
also scan-matching algorithms are apply to match those features.
"""
from robot.sensor.Sensor import *
class LandmarkMatcher(Sensor):
def __init__(self):
self.matcher = dict()
def detect(self, sensor_id, landmark_id):
self.matcher[sensor_id] = landmark_id
def read(self):
return self.matcher
......@@ -29,16 +29,17 @@ class ProximitySensor(Sensor):
def __init__(self, robot, # robot this sensor is attached to
placement_pose,
# pose of this sensor relative to the robot (NOTE: normalized on robot located at origin and with theta 0, i.e. facing east )
sensor_config):
sensor_config, id):
"""
Initializes a ProximitySensor object
:param robot: THe underlying robot
:param placement_pose: The pose at which the sensor is placed relative to the robot
:param sensor_config: The sensor configurations
:param id: The sensor ID
"""
# bind the robot
self.robot = robot
self.id = id # sensor ID
# pose attributes
self.placement_pose = placement_pose # pose of this sensor relative to the robot
self.pose = Pose(0.0, 0.0, 0.0) # global pose of this sensor
......
......@@ -87,4 +87,4 @@ class World:
"""
:return: All solids in the world
"""
return self.robots + self.obstacles
return self.obstacles + self.robots
......@@ -227,13 +227,14 @@ class Supervisor:
motion_command = np.array([[v], [yaw]])
measured_distances = self.proximity_sensor_distances_from_robot_center
sensor_angles = [pose.theta for pose in self.proximity_sensor_placements]
landmark_ids = self.robot.read_landmark_matcher()
# update LAM estimations
if self.ekfslam is not None:
self.ekfslam.update(motion_command, zip(measured_distances, sensor_angles))
if self.fastslam is not None:
self.fastslam.update(motion_command, zip(measured_distances, sensor_angles))
if self.graphbasedslam is not None:
self.graphbasedslam.update(motion_command, zip(measured_distances, sensor_angles))
self.graphbasedslam.update(motion_command, zip(measured_distances, sensor_angles, landmark_ids))
# update mappings
if self.ekfslam_mapping is not None:
self.ekfslam_mapping.update(zip(measured_distances, sensor_angles))
......
......@@ -108,7 +108,7 @@ class GraphBasedSLAM(Slam):
"""
Executes an update cycle of the SLAM algorithm
:param u: Motion command, A single item is a vector of [[translational_velocity], [angular_velocity]]
:param z: List of measurements. A single item is a tuple of (range, angle)
:param z: List of measurements. A single item is a tuple of (range, angle, landmark_id)
"""
self.mu = self.motion_model(self.mu, u, self.dt) # Calculate next step
J = self.jaco_motion_model(self.mu, u, self.dt) # Calculate jacobian matrix
......@@ -143,7 +143,9 @@ class GraphBasedSLAM(Slam):
# Only execute if sensor has observed landmark
if not self.supervisor.proximity_sensor_positive_detections()[i]:
continue
pos_lm = self.calc_landmark_position(self.mu, zi) # calculate x-y position in world coordinate
pos_lm = self.calc_landmark_position(self.mu, zi) # calculate x-y position from range-bearing in world coordinate
""" Data Association 1.0 """
"""
min_index, vertices_lm = self.__data_association(pos_lm)
N = len(vertices_lm)
if min_index == N: # new landmark was found
......@@ -153,22 +155,33 @@ class GraphBasedSLAM(Slam):
else:
vertex3 = vertices_lm[min_index] # old landmark.
old_landmark_id = vertex3.id
"""
""" Data Association 2.0 """
vertex3 = self.__data_association(zi)
if vertex3 == None: # this landmark has not been detected in the past
vertex3 = LandmarkVertex(pos_lm, self.sensor_noise, zi[2])
self.graph.add_vertex(vertex3)
fixed_counter += 2
else:
# old landmark.
old_landmark_id = vertex3.id
# calculate actual measurement and information matrix
meas, info = self.__convert_pose_landmark_raw_measurement(zi)
self.graph.add_edge(vertex2, vertex3, meas, info)
""" calculate pose-pose edge """
meas, info = self.__convert_pose_pose_raw_measurement(vertex1.pose, self.odom_pose)
meas, info = self.__convert_pose_pose_raw_measurement(vertex1.pose, vertex2.pose)
self.graph.add_edge(vertex1, vertex2, meas, info)
if self.step_counter < 50: # vertices created at the beginning are fixed while optimization
self.fix_hessian += fixed_counter
if old_landmark_id != -1: # old_landmark was found
self.backend_counter -= 1
if self.backend_counter <= 0:
self.backend_counter = 10
self.__back_end()
# if old_landmark_id != -1: # old_landmark was found
# self.backend_counter -= 1
# if self.backend_counter <= 0:
# self.backend_counter = 10
# self.__back_end()
def __back_end(self):
"""
......@@ -227,27 +240,36 @@ class GraphBasedSLAM(Slam):
return err
# def __data_association(self, zi):
# """
# Data association based on euclidean distance.
# explaination of the return:
# - zi is a measurement of a new landmark, if min_index == N
# - zi is a measurement of an old landmark, if min_index < N
# :param zi: a measurement of landmark in world coordinate. [x, y].T
# :return:
# min_index: index of the nearest landmark in list vertices_lm
# vertices_lm: index of the vertex of this landmark
# """
# lms, vertices_lm = self.get_estimated_landmark_position() # find all landmark vertices from the list
# N = len(lms) # number of the known landmarks
# if N == 0: # there were no landmarks being found
# return N, []
# else:
# lms = np.array(lms).T # xy position of landmarks in world coordinate
# distances = np.linalg.norm(lms - zi, axis=0) # euclidean distances
# distances = np.append(distances, self.min_distance_threshold)
# min_index = np.argmin(distances)
# return min_index, vertices_lm
def __data_association(self, zi):
"""
Data association based on euclidean distance.
explaination of the return:
- zi is a measurement of a new landmark, if min_index == N
- zi is a measurement of an old landmark, if min_index < N
:param zi: a measurement of landmark in world coordinate. [x, y].T
:return:
min_index: index of the nearest landmark in list vertices_lm
vertices_lm: index of the vertex of this landmark
"""
lms, vertices_lm = self.get_estimated_landmark_position() # find all landmark vertices from the list
N = len(lms) # number of the known landmarks
if N == 0: # there were no landmarks being found
return N, []
else:
lms = np.array(lms).T # xy position of landmarks in world coordinate
distances = np.linalg.norm(lms - zi, axis=0) # euclidean distances
distances = np.append(distances, self.min_distance_threshold)
min_index = np.argmin(distances)
return min_index, vertices_lm
lm_id = zi[2]
landmark_vertices = self.graph.get_estimated_landmark_vertices()
vertex = None
for v in landmark_vertices:
if lm_id == v.landmark_id:
vertex = v
return vertex
def get_estimated_landmark_position(self):
lm_pos = []
......
......@@ -37,7 +37,7 @@ class PoseVertex(Vertex):
class LandmarkVertex(Vertex):
def __init__(self, pose, sigma):
def __init__(self, pose, sigma, landmark_id):
"""
:param pose: landmark position. [x, y].T
:param sigma: covariance matrix
......@@ -45,6 +45,7 @@ class LandmarkVertex(Vertex):
Vertex.__init__(self, pose, sigma, None)
assert pose.shape[0] == 2
assert sigma.shape == (2, 2)
self.landmark_id = landmark_id
def __str__(self):
......
  • Add a new class "Landmark Matcher", which provides a correct data-association through landmark-ids. In other words, we assume now that the data-association are known. Because wrong data association affect strongly the slam update, and it can lead to wrong state estimations, as presented in the previous work. For this reason, we assume that the corresponding landmark ids are available for the slam.

    Why I made this assumption? Because there are only 12 IR sensors available on the sobot-rimulator, it is not enough and it is therefore not always easy to implement a correct data-association (maybe there are some smart ways to do that, but I can't find any yet). Graph-based slam is offline, so it is always affected by accumulating error, one way to reduce the influence of error is the data-association. However, the tricky thing is accumulating error also makes data-association harder. Hopefully more IR sensors can be equipped but it may not feasible in reality, or we can equip the robot with a camera or a LiDAR, which will help a better data-association. But I don't want to focus on the data-association problem currently, so I just simply assume that data association is known.

Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment