-
Notifications
You must be signed in to change notification settings - Fork 0
/
pid_control.py
140 lines (117 loc) · 4.34 KB
/
pid_control.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
#!/usr/bin/env python
'''
Author : Yi Herng Ong
Description : inherit template_match_class to obtain current pose of object and track its features for PID control
Date: Feb 6 2018
'''
import sys, os
import numpy as np
from template_match_class import *
import pdb
import rospy
from openravepy import *
from sensor_msgs.srv import JointMove
from wam_msgs.srv import BHandFingerPos, BHandSpreadPos
class PID(object):
'''
class of PID controller
'''
def __init__(self, kp = 1.0, kd = 0.0, ki = 0.0, reference = None):
self.kp = kp
self.kd = kd
self.ki = ki
self.reference = reference
self.prev_error = 0.0
def control(self, feedback, dt = 1, reference = None):
if reference is None:
pass
else:
self.reference = reference
error = self.reference - feedback # error between reference and feedback
error_diff = (error - self.prev_error) / dt # differentation of errors
error_accum += error * dt
# Compute control output
P = self.kp * error # P term
I = self.ki * accumulated_error # D term
D = self.kd * error_diff # I term
self.prev_error = error
return P + I + D
class robot():
def __init__(self):
self.d_jA = [0.0, 0.0, 0.0]
self.bhand_sub = rospy.Subscriber("/bhand/joint_states", JointState, self.get_bhand_state)
self.bhand_figpos_srv = rospy.ServiceProxy("/bhand/finger_pos", BHandFingerPos)
self.bhand_spdpos_srv = rospy.ServiceProxy("/bhand/spread_pos", BHandSpreadPos)
self.bhand_msg = []
def get_bhand_state(self,msg):
self.curr_finger_pos = [msg[0], msg[1], msg[2]]
# self.bhand_msg.append()
def get_obj_pose(image_):
# from template_feature_tracking we import current object pose
self.template_matcher = image_template_match() # intialize template matching class
obj_pose = self.template_matcher.template_match(image_)
return obj_pose
def move_to_touch(self, obj_pose, strategy):
prev = obj_pose
curr_obj_pose = self.get_obj_pose()
pose_diff = max(np.absolute(curr_obj_pose - prev))
if strategy == "preshape":
while pose_diff < 0.0001:
self.bhand_figpos_srv(self.curr_finger_pos + [0.08 0 0]) # 5 degree change at a time
pose_diff = max(np.absolute(curr_obj_pose - prev))
if strategy == "nudge":
while pose_diff < 0.0001:
self.bhand_figpos_srv(self.curr_finger_pos + [0 0.08 0.08]) # 5 degree change at a time
pose_diff = max(np.absolute(curr_obj_pose - prev))
def ctrl_strat(self, obj_pose):
left = np.array([])
right = np.array([])
if max(np.array(obj_pose) - left) > max(np.array(obj_pose) - right):
return "preshape"
else:
return "nudge"
def execute_control_strat(self, reference):
'''
Work horse of control strat by PID
'''
jA_speed = np.array([0.17, 0.17, 0.17])
# Robot control strategy protocol
# check object location, see whether it is close to two finger or one finger side
obj_pose = self.get_obj_pose()
if ctrl_strat(obj_pose) == "preshape":
# move to touch
move_to_touch(obj_pose, "preshape")
# initialize pid to move the other two fingers
prev = obj_pose
obj_pose = self.get_obj_pose()
pose_diff = max(np.absolute(obj_pose - prev))
curr_jA = self.curr_finger_pos
prev_jA = [0,0,0]
jA_diff = max(np.absolute(np.array(prev_jA) -np.array(curr_jA)))
while pose_diff < 0.0001 or jA_diff < 0.0001:
pose_diff = max(np.absolute(obj_pose - prev))
self.bhand_figpos_srv(self.curr_finger_pos + [0 0.08 0.08]) # 5 deg
prev_jA = curr_jA
curr_jA = self.curr_finger_pos
jA_diff = max(np.absolute(np.array(prev_jA) -np.array(curr_jA)))
if ctrl_strat(obj_pose) == "nudge":
# move to touch
move_to_touch(obj_pose, "nudge")
# initialize pid
obj_pose = self.get_obj_pose()
pid = PID(1,0,0, reference)
obj_diff = max(np.absolute(np.array(obj_pose) - np.array(reference)))
while obj_diff > 0.02: # look at x dir. only
pose = pid.control(self.obj_pose, 1, self.reference) # get new pose
next_pose = self.curr_finger_pos + (jA_speed * pose * 0.1)
self.bhand_pos_srv(next_pose) # move to desired pose generated by PID controller
obj_pose = self.get_obj_pose()
obj_diff = max(np.absolute(np.array(obj_pose) - np.array(reference)))
if __name__ == '__main__':
# create some sort of loop to loop through using vision feedback regarding object pose / feature
# PID protocol
'''
Whenever object pose does not change, move finger at speed 1
'''
r = robot()
r.execute_control_strat(reference)