1
+ #!/usr/bin/env python
2
+ import math
3
+ import sys
4
+ import copy
5
+ from tokenize import group
6
+ import rospy
7
+ import time
8
+ import moveit_commander
9
+ import moveit_msgs .msg
10
+ import geometry_msgs .msg
11
+ from random import random
12
+ import os
13
+ import subprocess , shlex , psutil
14
+
15
+ from math import pi
16
+
17
+ import std_msgs .msg
18
+ from std_msgs .msg import String
19
+ from moveit_commander .conversions import pose_to_list
20
+ from tf .transformations import quaternion_from_euler
21
+
22
+ from std_msgs .msg import String
23
+ from sensor_msgs .msg import Range
24
+ import csv
25
+
26
+ import numpy as np
27
+
28
+ from applevision_rospkg .msg import PointWithCovarianceStamped , RegionOfInterestWithCovarianceStamped
29
+
30
+ import tf
31
+
32
+
33
+ class approachPlanner (object ):
34
+ def __init__ (self ):
35
+ super (approachPlanner , self ).__init__ ()
36
+
37
+ # Moveit Setup
38
+
39
+ moveit_commander .roscpp_initialize (sys .argv )
40
+
41
+ self .robot = moveit_commander .RobotCommander ()
42
+ self .scene = moveit_commander .PlanningSceneInterface ()
43
+
44
+ group_name = "manipulator"
45
+
46
+ self .move_group = moveit_commander .MoveGroupCommander (group_name )
47
+ self .display_trajectory_publisher = rospy .Publisher ('/move_group/display_planned_path' ,
48
+ moveit_msgs .msg .DisplayTrajectory ,
49
+ queue_size = 20 )
50
+
51
+ self .success_or_failure_publilsher = rospy .Publisher ('/success_or_failure' , std_msgs .msg .String , queue_size = 20 )
52
+
53
+ # We can get the name of the reference frame for this robot:
54
+ planning_frame = self .move_group .get_planning_frame ()
55
+ print "============ Planning frame: %s" % planning_frame
56
+
57
+ # We can also print the name of the end-effector link for this group:
58
+ eef_link = self .move_group .get_end_effector_link ()
59
+ print "============ End effector link: %s" % eef_link
60
+
61
+ # We can get a list of all the groups in the robot:
62
+ group_names = self .robot .get_group_names ()
63
+ print "============ Available Planning Groups:" , self .robot .get_group_names ()
64
+
65
+ # Sometimes for debugging it is useful to print the entire state of the
66
+ # robot:
67
+ print "============ Printing robot state"
68
+ print self .robot .get_current_state ()
69
+ print ""
70
+
71
+ # applevision camera
72
+
73
+ self .tf_listener_ = tf .TransformListener ()
74
+
75
+ self .CAMERA_RES = (640 , 360 )
76
+ self .CENTER_PT = (self .CAMERA_RES [0 ]// 2 , self .CAMERA_RES [1 ]// 2 )
77
+
78
+ self .aCLast = None
79
+ self .aDLast = None
80
+
81
+ def aCCallback (self , res ):
82
+ if self .aCLast == None :
83
+ self .aCLast = res
84
+ self .tryPlan ()
85
+ pass
86
+
87
+ def aDCallback (self , res ):
88
+ if self .aDLast == None :
89
+ self .aDLast = res
90
+ self .tryPlan ()
91
+ pass
92
+
93
+ def tryPlan (self ):
94
+ # Pre-Exit on Missing Sub
95
+ if self .aCLast == None or self .aDLast == None :
96
+ return
97
+
98
+ # Pull values for inspection
99
+
100
+ aC = self .aCLast
101
+ aD = self .aDLast
102
+
103
+ # Sanitize workspace
104
+
105
+ self .aCLast = None
106
+ self .aDLast = None
107
+
108
+ # Check for target in FOV
109
+ if (aC .w or aC .h ):
110
+ # target found
111
+
112
+ # Transform pixel coordinates and generate pixel space vector to center
113
+ CURCAM_PT = (aC .x + aC .w // 2 , aC .y + aC .h // 2 )
114
+ CURCAM_VEC = (self .CENTER_PT [0 ] - CURCAM_PT [0 ], self .CENTER_PT [1 ] - CURCAM_PT [1 ])
115
+
116
+ tolerance = 50
117
+
118
+ # Check if Camera is Centered
119
+ if np .linalg .norm (np .array (CURCAM_VEC )) > tolerance :
120
+ self .centerCamera (aC )
121
+
122
+
123
+ # rospy.logwarn(CURCAM_VEC)
124
+ # rospy.logwarn(aD)
125
+ pass
126
+
127
+ # Assumed Camera is in view when executed
128
+ def centerCamera (self , c ):
129
+ # Check for target in FOV
130
+ if (c .w or c .h ):
131
+ # target found
132
+
133
+ # Transform pixel coordinates and generate pixel space vector to center
134
+ CURCAM_PT = (c .x + c .w // 2 , c .y + c .h // 2 )
135
+ CURCAM_VEC = (self .CENTER_PT [0 ] - CURCAM_PT [0 ], self .CENTER_PT [1 ] - CURCAM_PT [1 ])
136
+
137
+ mag = np .linalg .norm (np .array (CURCAM_VEC ))
138
+
139
+ CURCAM_VEC = (CURCAM_VEC [0 ]/ mag ,CURCAM_VEC [1 ]/ mag )
140
+
141
+ # Generated direction to move in, generating move it move
142
+
143
+ # End Effector Position [world]
144
+ eefP = self .move_group .get_current_pose (self .move_group .get_end_effector_link ())
145
+
146
+ # Convert to frame palm
147
+
148
+ t = self .tf_listener_ .getLatestCommonTime ("/world" , "/palm" )
149
+ p1 = geometry_msgs .msg .PoseStamped ()
150
+ p1 .header .frame_id = "/palm"
151
+ p1 .pose = eefP .pose
152
+
153
+ # End Effector Position [Palm]
154
+
155
+ eefP_palm = self .tf_listener_ .transformPose ("/palm" , p1 )
156
+
157
+ stepSize = 1
158
+
159
+ # eefP_palm.pose.position.x += (CURCAM_VEC[0] * stepSize, CURCAM_VEC[1] * stepSize)
160
+ # eefP_palm.pose.position.y += (CURCAM_VEC[0] * stepSize, CURCAM_VEC[1] * stepSize)
161
+
162
+ # Convert [Palm] back to [world]
163
+
164
+ t = self .tf_listener_ .getLatestCommonTime ("/palm" , "/world" )
165
+ p2 = geometry_msgs .msg .PoseStamped ()
166
+ p2 .header .frame_id = "/world"
167
+ p2 .pose = eefP_palm .pose
168
+
169
+ # End Effector Position [world] (updated)
170
+
171
+ eefP_world = self .tf_listener_ .transformPose ("/world" , p2 )
172
+
173
+ rospy .logwarn_once ([eefP , eefP_palm , eefP_world ])
174
+
175
+
176
+
177
+ def main ():
178
+
179
+ try :
180
+ rospy .init_node ('applevision_motion' )
181
+ rospy .wait_for_service ('Tf2Transform' )
182
+
183
+ aP = approachPlanner ()
184
+
185
+ aCpub = rospy .Subscriber ('applevision/apple_camera' , RegionOfInterestWithCovarianceStamped , aP .aCCallback , queue_size = 10 )
186
+ aDPub = rospy .Subscriber ('applevision/apple_dist' , Range , aP .aDCallback , queue_size = 10 )
187
+
188
+ rospy .spin ()
189
+
190
+ except rospy .ROSInterruptException :
191
+ pass
192
+
193
+
194
+
195
+
196
+ if __name__ == '__main__' :
197
+ main ()
0 commit comments