Skip to content

Commit

Permalink
Merge pull request #55 from BerkeleyAutomation/osc_fix
Browse files Browse the repository at this point in the history
Osc fix
  • Loading branch information
mdlaskey committed Apr 26, 2018
2 parents 3fcf9bb + 4177669 commit 91571b4
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 14 deletions.
1 change: 1 addition & 0 deletions .coverage

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def eval_policy(self, action,state,simplified = False):
if not (isinstance(action,VelocityAction) or action == None):
raise Exception('Action is not of type VelocityAction')

if self.not_initiliazed:
if not state.dynamic_objects["controlled_cars"][str(self.agent_num)].trajectory:
geoplanner = GeometricPlanner(state, inter_point_d=40.0, planning_time=0.1)

geoplanner.plan_for_agents(state,type_of_agent='controlled_cars',agent_num=self.agent_num)
Expand Down
10 changes: 3 additions & 7 deletions gym_urbandriving/agents/supervisor/velocity_supervisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,9 @@ def eval_policy(self, state,simplified = False):
self.not_initiliazed = False


if not simplified:
target_vel = VelocityMPCPlanner().plan(state, self.agent_num, type_of_agent = "controlled_cars")
else:
target_vel = state.dynamic_objects['controlled_cars'][str(self.agent_num)].trajectory.get_vel()
###IF END OF THE TRAJECTORY THE CAR SHOULD KEEP GOING
if target_vel == None:
return VelocityAction(4.0)

target_vel = VelocityMPCPlanner().plan(state, self.agent_num, type_of_agent = "controlled_cars")




Expand Down
12 changes: 7 additions & 5 deletions gym_urbandriving/planning/vel_mpc_planner.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from copy import deepcopy
import gym_urbandriving as uds
from gym_urbandriving.actions import VelocityAction
import IPython

class VelocityMPCPlanner:
def __init__(self, lookahead=10):
Expand All @@ -13,22 +14,23 @@ def plan(self, state, agent_num,type_of_agent = "background_cars"):
testing_env = uds.UrbanDrivingEnv(init_state=state_copy,
randomize=False)

num_controlled_cars = state_copy.agent_config['controlled_cars']
num_controlled_cars = state_copy.agent_config["controlled_cars"]
empty_actions = [None]*num_controlled_cars

state_copy = testing_env.current_state
if state_copy.dynamic_objects[type_of_agent][str(agent_num)].trajectory.stopped:

state_copy.dynamic_objects[type_of_agent][str(agent_num)].trajectory.set_vel(VelocityAction(4.0))
for t in range(self.lookahead):

state_copy, reward, done, info_dict = testing_env._step(empty_actions,background_simplified = True)
state_copy = state_copy[0]
done = state_copy.collides_any_dynamic(agent_num,type_of_agent = type_of_agent)
if done:
break
if not done:
return VelocityAction(4.0)
return VelocityAction(0.0)
if done:
return VelocityAction(0.0)
return VelocityAction(4.0)


elif not state_copy.dynamic_objects[type_of_agent][str(agent_num)].trajectory.stopped:
Expand Down
2 changes: 2 additions & 0 deletions gym_urbandriving/utils/trajectory.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def __init__(self, target='car', mode='xyvacst', fsm=0):
else:
raise ValueError()
self.fsm = fsm
print("MADE NEW TRAJECTORY")
self.stopped = True

def get_matrix(self):
Expand Down Expand Up @@ -91,6 +92,7 @@ def set_vel(self, target_vel):
v_index = self.mode.index('v')
for i in range(self.npoints()):
self._trajectory[i][v_index] = target_vel.get_value()

if target_vel.get_value() == 0.0:
self.stopped = True
else:
Expand Down
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
"pygame",
"scikit-image",
"opencv-python",
"codecov"],
"codecov",
"ipython"],
include_package_data=True,
packages = find_packages()

Expand Down

0 comments on commit 91571b4

Please sign in to comment.