Skip to content
Snippets Groups Projects
Commit ee257c39 authored by Jae Young Lee's avatar Jae Young Lee
Browse files

Further improve Follow and make KeepLane default (available any time).

parent 1f913dc3
No related branches found
No related tags found
No related merge requests found
...@@ -52,6 +52,16 @@ class KeepLane(ManeuverBase): ...@@ -52,6 +52,16 @@ class KeepLane(ManeuverBase):
def _features_dim_reduction(features_tuple): def _features_dim_reduction(features_tuple):
return extract_ego_features(features_tuple, 'v', 'v_ref', 'e_y', 'psi', 'v tan(psi/L)', 'theta', 'lane', 'acc', 'psi_dot') return extract_ego_features(features_tuple, 'v', 'v_ref', 'e_y', 'psi', 'v tan(psi/L)', 'theta', 'lane', 'acc', 'psi_dot')
@property
def initiation_condition(self):
"""a virtual function (property) from ManeuverBase.
As KeepLane is a default maneuver, it has to be activated to be chosen at any time, state, and condition
(refer to initiation_condition of ManeuverBase for the usual case.
:returns True.
"""
return True
@property @property
def extra_termination_condition(self): def extra_termination_condition(self):
if self._enable_low_level_training_properties: # activated only for the low-level training. if self._enable_low_level_training_properties: # activated only for the low-level training.
...@@ -383,6 +393,7 @@ class ChangeLane(ManeuverBase): ...@@ -383,6 +393,7 @@ class ChangeLane(ManeuverBase):
'psi_dot') 'psi_dot')
# TODO: In the low-level training of Follow, sometime the initial state doesn't satisfy its initiation condition.
class Follow(ManeuverBase): class Follow(ManeuverBase):
_reward_in_goal = None _reward_in_goal = None
...@@ -422,7 +433,6 @@ class Follow(ManeuverBase): ...@@ -422,7 +433,6 @@ class Follow(ManeuverBase):
self.env._terminate_in_goal = False self.env._terminate_in_goal = False
self._penalty_for_out_of_range = 200 self._penalty_for_out_of_range = 200
self._penalty_for_change_lane = 200 self._penalty_for_change_lane = 200
self._reward_in_goal = 200
self._enable_low_level_training_properties = True self._enable_low_level_training_properties = True
self._extra_action_weights_flag = True self._extra_action_weights_flag = True
...@@ -436,7 +446,6 @@ class Follow(ManeuverBase): ...@@ -436,7 +446,6 @@ class Follow(ManeuverBase):
self._set_v_ref() self._set_v_ref()
def _set_v_ref(self): def _set_v_ref(self):
#if self._enable_low_level_training_properties:
self._target_veh_i, _ = self.env.get_V2V_distance() self._target_veh_i, _ = self.env.get_V2V_distance()
if self._target_veh_i is not None: if self._target_veh_i is not None:
...@@ -460,23 +469,18 @@ class Follow(ManeuverBase): ...@@ -460,23 +469,18 @@ class Follow(ManeuverBase):
@property @property
def extra_termination_condition(self): def extra_termination_condition(self):
# APs = self.env.ego.APs
if self._target_veh_i is None: if self._target_veh_i is None:
return False return False
if self._enable_low_level_training_properties: # activated only for the low-level training. if self._enable_low_level_training_properties: # activated only for the low-level training.
if self.env.ego.APs['has_stopped_in_stop_region'] and \ APs = self.env.ego.APs
self.env.ego.APs['in_stop_region']: if (rd.speed_limit / 5 < self._v_ref) and \
self._extra_r_terminal = None
return True
elif (rd.speed_limit / 5 < self._v_ref) and \
(self.env.ego.v < self._v_ref / 2) and \ (self.env.ego.v < self._v_ref / 2) and \
self.env.ego.acc < 0 and \ self.env.ego.acc < 0 and \
not self.env.ego.APs['veh_ahead_stopped_now']: not APs['veh_ahead_stopped_now'] and \
self._extra_r_terminal = -100 not APs['in_stop_region']:
return True self._extra_r_terminal = -100
return True
else: else:
self._extra_r_terminal = None self._extra_r_terminal = None
...@@ -493,4 +497,4 @@ class Follow(ManeuverBase): ...@@ -493,4 +497,4 @@ class Follow(ManeuverBase):
return ego_features + extract_other_veh_features( return ego_features + extract_other_veh_features(
features_tuple, self._target_veh_i, 'rel_x', 'rel_y', 'v', 'acc') features_tuple, self._target_veh_i, 'rel_x', 'rel_y', 'v', 'acc')
else: else:
return ego_features + (0.0, 0.0, 0.0, 0.0) return ego_features + (0.0, 0.0, 0.0, 0.0)
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment