Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added PPMA/RA demo code #2

Merged
merged 10 commits into from Dec 4, 2019

Minor changes and fixed typos

added config folder for usecase parameters setup
  • Loading branch information
AndreaAntonello
AndreaAntonello committed Nov 28, 2019
commit 6e1fa398e9a3dc9cc75c4792e2d75809a86b66a2
@@ -0,0 +1,17 @@
# Automata Technologies - Dr. Andrea Antonello, 2019
This conversation was marked as resolved by Charlesworth

This comment has been minimized.

Copy link
@Charlesworth

Charlesworth Dec 4, 2019

Member

Get rid of this on all files

import os
import yaml
from pathlib import Path


def load_use_case_config():
return load_config("config/use_case_config.yaml")


def load_config(config_file):
with open(os.path.join(os.getcwd(), config_file), 'r') as f:
config = yaml.safe_load(f)

assert config

return config
@@ -0,0 +1,64 @@
# Automata Technologies - Dr. Andrea Antonello, 2019
# SETUP YAML file for Machine Vision Example - December 2019

EVA:
comms:
host: '0.0.0.0'
token: 'abcd1234'
end_effector:
payload: 0 # payload [kg]
length: 0 # length of tool [m]
offset: # ee offset in ee-frame [m]
- 0
- 0
- 0
hover_height: 0 # elevation of idle z axis wrt to the object [m]
surface_height: 0 # elevation of the pickup surface wrt to the robot [m, signed]

TCP:
server: '0.0.0.0'
port: 0

objects:
names: # object names
- 'C'
- 'M'
- 'R'
heights: # object thickness [m]
C: 0
M: 0
R: 0
weights: # object weight [k]
C: 0
M: 0
R: 0

waypoints:
joints_cal_zero: # joints @ (0,0) of calibration board [rad]
- 0
- 0
- 0
- 0
- 0
- 0
joints_guess: # joints guess for pickup/hover position [rad]
- 0
- 0
- 0
- 0
- 0
- 0
joints_home: # joints for home position [rad]
- 0
- 0
- 0
- 0
- 0
- 0
joints_drop: # joints for drop position [rad]
- 0
- 0
- 0
- 0
- 0
- 0
@@ -64,7 +64,7 @@ def solve_ik_head_down(eva, guess, theta, xyz_absolute):
return success_ik, joints_ik


def _read_tcp_ip(sock, objects):
def read_tcp_ip(sock, objects):
""" This method reads and decodes the string sent from the camera """
result = sock.recv(4000)
string_read = result.decode('utf-8')
@@ -84,12 +84,13 @@ def _read_tcp_ip(sock, objects):
passed = True
# Extract the best matching object from the string
camera_string = _extract_camera_serial(objects, select_obj, camera_string_raw)
# String format = ['start', 'object_name', float x_mm, float y_mm, float angle]
return passed, camera_string


def _extract_camera_serial(objects, index, camera_string_raw):
""" This method extracts only the best matching object data from the entire string """
camera_string = ['', 0, 0, 0, 0, 0]
camera_string = ['', 0, 0, 0, 0]
if index not in objects:
print('Wrong object in the list')
elif index is 'C':
@@ -1,89 +1,92 @@
# Automata Technologies - Andrea Antonello, 2019
# Automata Technologies - Dr. Andrea Antonello, 2019
import socket
from automata import Eva
from evaUtilities import *
from config.config_manager import load_use_case_config

# Connection to robot
host = <user_input>
token = <user_input>
eva = Eva(host, token)
eva.lock_status()

# Connection to camera
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server = <user_input>
port = <user_input>
sock.connect((server, port))
if __name__ == "__main__":
# Load config parameters
config = load_use_case_config()

# Connection to robot
host = config['EVA']['comms']['host']
token = config['EVA']['comms']['token']
eva = Eva(host, token)

# Use-case parameters
objects = ['C', 'M', 'R']
object_heights = {'C': <user_input>, 'M': <user_input>, 'R': <user_input>} # object thicknesses [m]
end_effector_length = <user_input> # length of tool [m]
hover_height = <user_input> # elevation of idle z axis wrt to the object [m]
surface_height = <user_input> # elevation of the pickup surface wrt to the robot [m, signed]
joint_cal_zero = <user_input> # joints @ (0,0) of calibration board
joint_guess = <user_input> # joints guess for pickup/hover position
# Connection to camera
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server = config['TCP']['server']
port = config['TCP']['port']
sock.connect((server, port))

with eva.lock():
while True:
passed, cam_string = readTcpIp(sock) # String format = ['start', 'pattern', 'x_mm', 'y_mm', 'angle', 'pass']:
# Use-case parameters to be modified in YAML file ~/config/use_case_config.yaml
objects = config['objects']['names'] # object names [m]
obj_heights = config['objects']['heights'] # object thicknesses [m]
ee_length = config['EVA']['end_effector']['length'] # length of tool [m]
hover_height = config['EVA']['hover_height'] # elevation of idle z axis wrt to the object [m]
surf_height = config['EVA']['surface_height'] # elevation of the pickup surface wrt to the robot [m, signed]
joints_cal_zero = config['waypoints']['joints_cal_zero'] # joints @ (0,0) of calibration board
joints_guess = config['waypoints']['joints_guess'] # joints guess for pickup/hover position
joints_home = config['waypoints']['joints_home'] # joints guess for home position
joints_drop = config['waypoints']['joints_drop'] # joints guess for drop position

if passed is True and len(cam_string) == 5 and cam_string[0] is 'start': # Successful string format
evaVision = EvaVision(eva, cam_string, joint_cal_zero, object_heights[cam_string[1]],
surface_height, end_effector_length)
xyz = evaVision.locate_object()
xyz_hover = xyz
xyz_hover[2] = xyz[2] + abs(hover_height) # add hover height to Z
with eva.lock():
while True:
passed, cam_string = read_tcp_ip(sock, objects)

# Compute IK for pickup and hover - special case with head down solution
success_IK_pickup, joints_IK_pickup = solve_ik_head_down(eva, joint_guess, cam_string[4], xyz)
success_IK_hover, joints_IK_hover = solve_ik_head_down(eva, joint_guess, cam_string[4], xyz_hover)
if passed is True and len(cam_string) == 5 and cam_string[0] is 'start':
obj_name = cam_string[1]
obj_angle = cam_string[4]
evaVision = EvaVision(eva, cam_string, joints_cal_zero, obj_heights[obj_name], surf_height, ee_length)
xyz = evaVision.locate_object() # object position in Eva's frame [m]
xyz_hover = xyz
xyz_hover[2] = xyz[2] + abs(hover_height) # add hover height to object position's Z [m]

# Verify IK success
if 'success' in success_IK_hover and 'success' in success_IK_pickup:
perform_move = True
message = 'Successful IK'
print(message)
else:
perform_move = False
message = 'Failed IK. Position not reachable'
print(message)
# Compute IK for pickup and hover - special case with head down solution
success_IK_pickup, joints_pickup = solve_ik_head_down(eva, joints_guess, obj_angle, xyz)
success_IK_hover, joints_hover = solve_ik_head_down(eva, joints_guess, obj_angle, xyz_hover)

# Verify IK success
if 'success' in success_IK_hover and 'success' in success_IK_pickup:
perform_move = True
message = 'Successful IK'
print(message)
else:
perform_move = False
message = 'Failed IK. Position not reachable'
print(message)

if perform_move is True:
# Create the way-points, generate the tool-path, save it and execute it
waypoint_home = <user_input> # [rad] home position
waypoint_drop = <user_input> # [rad] drop-off position
waypoint_hover = joints_IK_hover # hover position right on top of object: offset in height (z_pickup)
waypoint_pickup = joints_IK_pickup # pickup position on surface of the object
if perform_move is True:

toolpath_machine_vision = {
"metadata": {
"default_velocity": 1,
"next_label_id": 5,
"analog_modes": {"i0": "voltage", "i1": "voltage", "o0": "voltage", "o1": "voltage"}
},
"waypoints": [
{"label_id": 1, "joints": waypoint_home}, # index 0
{"label_id": 2, "joints": waypoint_hover}, # index 1
{"label_id": 3, "joints": waypoint_pickup}, # index 2
{"label_id": 4, "joints": waypoint_drop}, # index 3
],
"timeline": [
{"type": "home", "waypoint_id": 0},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 1},
{"type": "output-set", "io": {"location": "base", "type": "digital", "index": 0},
"value": True},
{"type": "trajectory", "trajectory": "linear", "waypoint_id": 2},
{"type": "wait", "condition": {"type": "time", "duration": 500}},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 1},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 3},
{"type": "output-set", "io": {"location": "base", "type": "digital", "index": 0},
"value": False},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 0},
]
}
eva.control_wait_for_ready()
eva.toolpaths_use(toolpath_machine_vision)
eva.control_run()
else:
print('No object correctly recognized')
toolpath_machine_vision = {
"metadata": {
"default_velocity": 1,
"next_label_id": 5,
"analog_modes": {"i0": "voltage", "i1": "voltage", "o0": "voltage", "o1": "voltage"}
},
"waypoints": [
{"label_id": 1, "joints": joints_home},
{"label_id": 2, "joints": joints_hover},
{"label_id": 3, "joints": joints_pickup},
{"label_id": 4, "joints": joints_drop},
],
"timeline": [
{"type": "home", "waypoint_id": 0},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 1},
{"type": "output-set", "io": {"location": "base", "type": "digital", "index": 0},
"value": True},
{"type": "trajectory", "trajectory": "linear", "waypoint_id": 2},
{"type": "wait", "condition": {"type": "time", "duration": 500}},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 1},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 3},
{"type": "output-set", "io": {"location": "base", "type": "digital", "index": 0},
"value": False},
{"type": "trajectory", "trajectory": "joint_space", "waypoint_id": 0},
]
}
eva.control_wait_for_ready()
eva.toolpaths_use(toolpath_machine_vision)
eva.control_run()
else:
print('No object correctly recognized')
ProTip! Use n and p to navigate between commits in a pull request.
You can’t perform that action at this time.