@@ -52,19 +52,23 @@ def __init__(self):
52
52
53
53
self .tf_listener_ = tf .TransformListener ()
54
54
55
- self .CAMERA_RES = ( 640 , 360 )
56
- self .CENTER_PT = ( self .CAMERA_RES [ 0 ] // 2 , self . CAMERA_RES [ 1 ] // 2 )
55
+ self .CAMERA_RES = np . array (( 640 , 360 ) )
56
+ self .CENTER_PT = self .CAMERA_RES // 2
57
57
58
58
self .aCLast = None
59
59
self .aDLast = None
60
60
61
61
self .pixelTolerance = 10
62
- self .distTolerance = 0.3
62
+ self .distTolerance = 0.1
63
+ self .deadReck = 0.2
63
64
64
- # self.pixelStep
65
+ self .scale = np . array (( 0.001 , 0.001 ))
65
66
66
- def toCenterCoord (q ):
67
- return (q .x + q .w // 2 , q .y + q .h // 2 )
67
+ self .A = None
68
+ self .B = None
69
+
70
+ def toCenterCoord (self , q ):
71
+ return np .array ((q .x + q .w // 2 , q .y + q .h // 2 ))
68
72
69
73
def aCCallback (self , res ):
70
74
if self .aCLast == None :
@@ -84,76 +88,95 @@ def tryPlan(self):
84
88
return
85
89
86
90
# Pull values for inspection
87
-
88
91
aC = self .aCLast
89
92
aD = self .aDLast
90
93
91
94
# Sanitize workspace
92
-
93
95
self .aCLast = None
94
96
self .aDLast = None
95
97
96
98
# Check for target in FOV
97
99
if (aC .w or aC .h ):
98
100
# target found
99
101
100
- # Transform pixel coordinates and generate pixel space vector to center
101
- CURCAM_PT = (aC .x + aC .w // 2 , aC .y + aC .h // 2 )
102
- CURCAM_VEC = (self .CENTER_PT [0 ] - CURCAM_PT [0 ], self .CENTER_PT [1 ] - CURCAM_PT [1 ])
103
-
104
- # Check if Camera is Centered
105
- if np .abs (CURCAM_VEC [0 ]) > self .pixelTolerance or np .abs (CURCAM_VEC [1 ]) > self .pixelTolerance :
106
- rospy .logwarn ("Centering" )
107
- self .centerCamera (aC , CURCAM_VEC )
108
- else : # Advance closer to apple, this always happens after a potential trajectory correction step
109
- if aD .range > self .distTolerance :
102
+ cP = self .toCenterCoord (aC )
103
+
104
+ if aD .range > self .distTolerance and aD .range > self .deadReck :
105
+ # Advance closer to apple, this always happens after a potential trajectory correction step
106
+ # Check if Camera is Centered
107
+ if np .linalg .norm (self .CENTER_PT - cP ) > self .pixelTolerance :
108
+ rospy .logwarn ("Centering" )
109
+ self .centerCamera (aC , aD )
110
+ else :
110
111
msg = "Approaching, t-" + str (aD .range - self .distTolerance )
111
112
rospy .logwarn (msg )
113
+ rospy .logwarn (aD .range )
112
114
self .forwardStep (aD )
113
115
pass
114
- else :
115
- # Terminate Process
116
- rospy .logwarn ("Done!" )
117
- # self.wrapUp()
118
- pass
116
+ elif aD .range > self .distTolerance and aD .range < self .deadReck :
117
+ # Stop Checking for camera, start the full aproach
118
+ msg = "Approaching (dead), t-" + str (aD .range - self .distTolerance )
119
+ rospy .logwarn (msg )
120
+ self .forwardStep (aD )
121
+ else :
122
+ # Terminate Process
123
+ rospy .logwarn ("Done!" )
124
+ # self.wrapUp()
125
+ pass
119
126
120
127
def wrapUp (self ):
121
128
rospy .logwarn ("Terminating Motion Sequence" )
122
129
rospy .signal_shutdown ("End Position Reached" )
123
130
124
131
# Assumed Camera is in view when executed
125
- def centerCamera (self , c , CURCAM_VEC ):
132
+ def centerCamera (self , c , d ):
126
133
# Check for target in FOV
127
134
if (c .w or c .h ):
128
135
# target found
129
136
130
- CURCAM_VECn = (CURCAM_VEC [0 ]/ np .linalg .norm (CURCAM_VEC ),CURCAM_VEC [1 ]/ np .linalg .norm (CURCAM_VEC ))
137
+ # Transform pixel coordinates and generate pixel space vector to center
138
+
139
+ CURCAM_PT = np .array ((c .x + c .w // 2 , c .y + c .h // 2 ))
131
140
132
- if np .abs (CURCAM_VEC [0 ]) > self .pixelTolerance :
141
+ if self .A is None or self .B is None :
142
+ # 1st Move
133
143
134
- # End Effector Position [world]
135
- eefP = self .move_group .get_current_pose (self .move_group .get_end_effector_link ())
144
+ # Point (B) -> (A.t)
145
+ self .A = CURCAM_PT
146
+ self .B = self .CENTER_PT
136
147
137
- # We can get the joint values from the group and adjust some of the values:
148
+ # X Component, Hip
149
+ # End Effector Position [world]
138
150
joint_goal = self .move_group .get_current_joint_values ()
139
- joint_goal [0 ] += (CURCAM_VECn [0 ] * 0.1 )
151
+
152
+ # Normalization Hack: https://stackoverflow.com/questions/21030391/how-to-normalize-a-numpy-array-to-a-unit-vector
153
+ # v / (np.linalg.norm(v) + 1e-16)
154
+
155
+ v = (self .B - self .A )
156
+
157
+ joint_goal [0 ] += (v [0 ] * self .scale [0 ])
158
+ joint_goal [1 ] -= (v [1 ] * self .scale [1 ])
140
159
141
160
self .move_group .go (joint_goal , wait = True )
142
161
self .move_group .stop ()
143
162
144
- if np .abs (CURCAM_VEC [1 ]) > self .pixelTolerance :
163
+ # else:
164
+ # # 2nd Move
145
165
146
- # End Effector Position [world]
147
- eefP = self .move_group .get_current_pose (self .move_group .get_end_effector_link ())
166
+ # # Points from previous run
167
+ # A = self.A
168
+ # B = self.B
148
169
149
- # We can get the joint values from the group and adjust some of the values:
150
- joint_goal = self .move_group .get_current_joint_values ()
151
- joint_goal [1 ] += (CURCAM_VECn [0 ] * 0.01 )
170
+ # C = CURCAM_PT
152
171
153
- self .move_group .go (joint_goal , wait = True )
154
- self .move_group .stop ()
172
+ # %rospy.logwarn([((np.linalg.norm(C - A))/(np.linalg.norm((B - A)*self.scale))), 1/((C - A)/((B - A)*self.scale))])
173
+
174
+
175
+
176
+ # Wrap Up
177
+ self .A = None
178
+ self .B = None
155
179
156
-
157
180
158
181
def forwardStep (self , d ):
159
182
@@ -166,12 +189,8 @@ def forwardStep(self, d):
166
189
167
190
plan = self .move_group .set_pose_target (pose_goal )
168
191
169
- rospy .logwarn ("Premotion..." )
170
-
171
192
self .move_group .go (wait = True )
172
193
173
- rospy .logwarn ("Postmotion..." )
174
-
175
194
self .move_group .stop ()
176
195
177
196
0 commit comments