Skip to content

Commit

Permalink
Merge pull request #901 from tue-robotics/rwc2019_challenge_hand_me_that
Browse files Browse the repository at this point in the history
Rwc2019 challenge hand me that
  • Loading branch information
MatthijsBurgh committed Dec 14, 2019
2 parents 06b057f + 84600d3 commit acdca08
Show file tree
Hide file tree
Showing 13 changed files with 718 additions and 0 deletions.
12 changes: 12 additions & 0 deletions challenge_hand_me_that/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
cmake_minimum_required(VERSION 2.8.3)
project(challenge_hand_me_that)

find_package(catkin REQUIRED)

catkin_python_setup()

catkin_package()

if (CATKIN_ENABLE_TESTING)
catkin_add_nosetests(test)
endif()
28 changes: 28 additions & 0 deletions challenge_hand_me_that/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Challenge hand me that

_Responsible: Rein_

## Scenario (RWC2019)

1. Start:

```
Robot starts at the entrance door en drives to the operator waypoint
```

2. Main loop

```
Robot starts routine for detecting the pointing pose of the operator. Based on the pointing pose, we raytrace ED and
determine the target furniture. This furniture will be expected and we will touch the object that we have detected.
Robot will return to the operator waypoint [loop]
```

## Testing / Running

```
robot-start
robot-challenge-hand-me-that
rosrun challenge_hand_me_that challenge_hand_me_that
```
23 changes: 23 additions & 0 deletions challenge_hand_me_that/package.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?xml version="1.0"?>
<package format="2">
<name>challenge_hand_me_that</name>
<version>0.0.0</version>
<description>
A guest at the party speaks English, but with only a limited vocabulary. The robot will assist them in obtaining things that they gesture for.
</description>

<maintainer email="reinzor@gmail.com">Rein Appeldoorn</maintainer>

<license>TODO</license>

<buildtool_depend>catkin</buildtool_depend>

<exec_depend>geometry_msgs</exec_depend>
<exec_depend>robocup_knowledge</exec_depend>
<exec_depend>robot_skills</exec_depend>
<exec_depend>robot_smach_states</exec_depend>
<exec_depend>rospy</exec_depend>
<exec_depend>smach</exec_depend>
<exec_depend>tf</exec_depend>
<exec_depend>tf_conversions</exec_depend>
</package>
21 changes: 21 additions & 0 deletions challenge_hand_me_that/scripts/challenge_hand_me_that
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/usr/bin/env python
#
# Copyright (c) 2019, TU/e Robotics, Netherlands
# All rights reserved.
#
# \author Rein Appeldoorn

from __future__ import print_function

import os

import rospy

from robot_smach_states import startup
from challenge_hand_me_that import setup_statemachine


if __name__ == '__main__':
challenge_name = os.path.basename(__file__)
rospy.init_node(challenge_name)
startup(setup_statemachine, challenge_name=challenge_name)
16 changes: 16 additions & 0 deletions challenge_hand_me_that/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/usr/bin/env python
#
# Copyright (c) 2019, TU/e Robotics, Netherlands
# All rights reserved.
#
# \author Rein Appeldoorn

from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup

d = generate_distutils_setup(
packages=['challenge_hand_me_that'],
package_dir={'': 'src'}
)

setup(**d)
4 changes: 4 additions & 0 deletions challenge_hand_me_that/src/challenge_hand_me_that/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .get_furniture_from_operator_pose import GetFurnitureFromOperatorPose
from .hand_me_that import setup_statemachine
from .identify_object import IdentifyObject
from .inspect_furniture_entity import InspectFurniture
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
#
# Copyright (c) 2019, TU/e Robotics, Netherlands
# All rights reserved.
#
# \author Rein Appeldoorn

import os

import rospy
from geometry_msgs.msg import PoseStamped
from robot_skills import Hero
from robot_smach_states import Entity, VariableDesignator
from robot_smach_states.util.designators import is_writeable
from rospy import ServiceException
from smach import StateMachine, cb_interface, CBState
from std_msgs.msg import Header
from ed_sensor_integration.srv import RayTraceResponse


OPERATOR = None
all_possible_furniture = ['kitchen_cabinet',
'kitchen_table',
'island',
'sink',
'dishwasher',
'desk',
'coffee_table',
# 'couch',
# 'armchair',
'display_cabinet',
'sideboard']


class GetFurnitureFromOperatorPose(StateMachine):
def __init__(self, robot, furniture_designator):
# type: (Hero, VariableDesignator) -> None
StateMachine.__init__(self, outcomes=['done'], output_keys=["laser_dot"])

is_writeable(furniture_designator)

def _show_view(timeout=5):
rgb, depth, depth_info = robot.perception.get_rgb_depth_caminfo()
robot.hmi.show_image_from_msg(rgb, timeout)
return rgb, depth, depth_info

@cb_interface(outcomes=['done'])
def _prepare_operator(_):
global OPERATOR
OPERATOR = None

robot.ed.reset()
robot.head.reset()
robot.speech.speak("Let's point, please stand in front of me!", block=False)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
robot.speech.speak("Please point at the object you want me to hand you", block=False) # hmm, weird sentence
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)
_show_view(timeout=2)
rospy.sleep(0.4)

_show_view(timeout=1)
robot.speech.speak("Three")
_show_view(timeout=1)
robot.speech.speak("Two")
_show_view(timeout=1)
robot.speech.speak("One")

return 'done'

@cb_interface(outcomes=['done'])
def _get_operator(_):
global OPERATOR

def _is_operator(person):
if person.position.z > 2.5:
robot.speech.speak("Please stand in my view with your full body")
return False

if person.position.z < 1.5:
robot.speech.speak("Please stand in my view with your full body")
return False

if "is_pointing" not in person.tags:
robot.speech.speak("Please point with your arm stretched")
return False

return True

while not rospy.is_shutdown() and OPERATOR is None:
persons = robot.perception.detect_person_3d(*_show_view())
if persons:
persons = sorted(persons, key=lambda x: x.position.z)
person = persons[0]
if _is_operator(person):
OPERATOR = person

# robot.speech.speak("I see an operator at %.2f meter in front of me" % OPERATOR.position.z)
rospy.loginfo("I see an operator at %.2f meter in front of me" % OPERATOR.position.z)

return 'done'

@cb_interface(outcomes=['done', 'failed'], output_keys=['laser_dot'])
def _get_furniture(user_data):
global OPERATOR

final_result = None
while not rospy.is_shutdown() and final_result is None:
result = None
while not rospy.is_shutdown() and result is None:
try:
map_pose = robot.tf_listener.transformPose("map", PoseStamped(
header=Header(
frame_id=OPERATOR.header.frame_id,
stamp=rospy.Time.now() - rospy.Duration.from_sec(0.5)
),
pose=OPERATOR.pointing_pose
))
result = robot.ed.ray_trace(map_pose) # type: RayTraceResponse
except Exception as e:
rospy.logerr("Could not get ray trace from closest person: {}".format(e))
rospy.sleep(1.)

# result.intersection_point type: PointStamped
# result.entity_id: string
rospy.loginfo("There is a ray intersection with {i} at ({p.x:.4}, {p.y:.4}, {p.z:.4})".format(i=result.entity_id, p=result.intersection_point.point))

if result.entity_id in all_possible_furniture:
final_result = result
else:
rospy.loginfo("{} is not furniture".format(result.entity_id))
robot.speech.speak("That's not furniture, you dummy.")
rospy.sleep(3)
OPERATOR = None
robot.get_arm().send_joint_goal("reset")
return 'failed'

robot.speech.speak("You pointed at %s" % final_result.entity_id)
robot.get_arm().send_joint_goal("reset")
# Fill the designator and user data the furniture inspection
furniture_designator.write(robot.ed.get_entity(final_result.entity_id))
user_data['laser_dot'] = result.intersection_point
return 'done'

with self:
self.add('PREPARE_OPERATOR', CBState(_prepare_operator), transitions={'done': 'GET_OPERATOR'})
self.add('GET_OPERATOR', CBState(_get_operator), transitions={'done': 'GET_FURNITURE'})
self.add('GET_FURNITURE', CBState(_get_furniture), transitions={'done': 'done', 'failed': 'GET_OPERATOR'})


if __name__ == '__main__':
rospy.init_node(os.path.splitext("test_" + os.path.basename(__file__))[0])
furniture_designator = VariableDesignator(resolve_type=Entity)
hero = Hero()
hero.reset()
while not rospy.is_shutdown():
GetFurnitureFromOperatorPose(hero, furniture_designator.writeable).execute()
67 changes: 67 additions & 0 deletions challenge_hand_me_that/src/challenge_hand_me_that/hand_me_that.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#
# Copyright (c) 2019, TU/e Robotics, Netherlands
# All rights reserved.
#
# \author Rein Appeldoorn

from smach import StateMachine

import robot_smach_states.util.designators as ds
from robot_skills.util.entity import Entity
from robot_smach_states import NavigateToWaypoint, StartChallengeRobust, Say, VariableDesignator,\
UnoccupiedArmDesignator
from robocup_knowledge import load_knowledge

from get_furniture_from_operator_pose import GetFurnitureFromOperatorPose
from identify_object import IdentifyObject
from inspect_furniture_entity import InspectFurniture

challenge_knowledge = load_knowledge('challenge_hand_me_that')

STARTING_POINT = challenge_knowledge.starting_point # Location where the challenge starts
HOME_LOCATION = challenge_knowledge.home_location # Location where the robot will go and look at the operator


def setup_statemachine(robot):
state_machine = StateMachine(outcomes=['done'])

furniture_designator = VariableDesignator(resolve_type=Entity)
entity_designator = VariableDesignator(resolve_type=Entity)
arm_designator = UnoccupiedArmDesignator(robot, {})

with state_machine:
# Intro
StateMachine.add('START_CHALLENGE_ROBUST', StartChallengeRobust(robot, STARTING_POINT),
transitions={'Done': 'SAY_START',
'Aborted': 'done',
'Failed': 'SAY_START'})

# Say we're gonna start
StateMachine.add('SAY_START', Say(robot, "Hand me that it is!", block=False),
transitions={'spoken': 'NAVIGATE_TO_START'})

# Drive to the start location
StateMachine.add('NAVIGATE_TO_START',
NavigateToWaypoint(robot, ds.EdEntityDesignator(robot, id=HOME_LOCATION)),
transitions={'arrived': 'GET_FURNITURE_FROM_OPERATOR_POSE',
'unreachable': 'NAVIGATE_TO_START', # ToDo: other fallback
'goal_not_defined': 'done'}) # I'm not even going to fill this in

# The pre-work
StateMachine.add('GET_FURNITURE_FROM_OPERATOR_POSE',
GetFurnitureFromOperatorPose(robot, furniture_designator.writeable),
transitions={'done': 'INSPECT_FURNITURE'})

# Go to the furniture object that was pointing to see what's there
StateMachine.add('INSPECT_FURNITURE',
InspectFurniture(robot, furniture_designator, entity_designator.writeable),
transitions={"succeeded": "IDENTIFY_OBJECT",
"failed": "NAVIGATE_TO_START"}) # If no entities, try again

# Point at the object
StateMachine.add('IDENTIFY_OBJECT',
IdentifyObject(robot, entity_designator, arm_designator),
transitions={'done': 'NAVIGATE_TO_START', # Just keep on going
'failed': 'NAVIGATE_TO_START'}) # Just keep on going

return state_machine

0 comments on commit acdca08

Please sign in to comment.