Commit 392671f22cf2aeaca21e71a8523df5c7a071d756

Authored by Renan
1 parent c21b81c5
Exists in master

ENH: robot documentation

invesalius/data/coordinates.py
... ... @@ -236,7 +236,6 @@ def PolhemusCoord(trck, trck_id, ref_mode):
236 236  
237 237  
238 238 def PolhemusWrapperCoord(trck, trck_id, ref_mode):
239   -
240 239 trck.Run()
241 240 scale = 10.0 * np.array([1., 1., 1.])
242 241  
... ...
invesalius/data/elfin.py
... ... @@ -6,6 +6,10 @@ from socket import socket, AF_INET, SOCK_STREAM
6 6 import invesalius.constants as const
7 7  
8 8 class Elfin_Server():
  9 + """
  10 + This class is similar to tracker devices wrappers.
  11 + It follows the same functions as the others (Initialize, Run and Close)
  12 + """
9 13 def __init__(self, server_ip, port_number):
10 14 self.server_ip = server_ip
11 15 self.port_number = port_number
... ... @@ -21,6 +25,10 @@ class Elfin_Server():
21 25 return self.cobot.ReadPcsActualPos()
22 26  
23 27 def SendCoordinates(self, target, type=const.ROBOT_MOTIONS["normal"]):
  28 + """
  29 + It's not possible to send a move command to elfin if the robot is during a move.
  30 + Status 1009 means robot in motion.
  31 + """
24 32 status = self.cobot.ReadMoveState()
25 33 if status != 1009:
26 34 if type == const.ROBOT_MOTIONS["normal"] or type == const.ROBOT_MOTIONS["linear out"]:
... ... @@ -29,6 +37,8 @@ class Elfin_Server():
29 37 self.cobot.MoveC(target)
30 38  
31 39 def StopRobot(self):
  40 + # Takes some microseconds to the robot actual stops after the command.
  41 + # The sleep time is required to guarantee the stop
32 42 self.cobot.GrpStop()
33 43 sleep(0.1)
34 44  
... ... @@ -37,6 +47,9 @@ class Elfin_Server():
37 47  
38 48 class Elfin:
39 49 def __init__(self):
  50 + """
  51 + Class to communicate with elfin robot. This class follows "HansRobot Communication Protocol Interface".
  52 + """
40 53 self.end_msg = ",;"
41 54  
42 55 def connect(self, server_ip, port_number, message_size, robot_id):
... ...
invesalius/data/elfin_processing.py
... ... @@ -140,6 +140,9 @@ class TrackerProcessing:
140 140 return init_move_out_point, target_arc
141 141  
142 142 def compute_head_move_threshold(self, current_ref):
  143 + """
  144 + Checks if the head velocity is bellow the threshold
  145 + """
143 146 self.coord_vel.append(current_ref)
144 147 self.timestamp.append(time())
145 148 if len(self.coord_vel) >= 10:
... ... @@ -161,18 +164,24 @@ class TrackerProcessing:
161 164  
162 165 return False
163 166  
164   - def compute_head_move_compensation(self, current_head, M_change_robot_to_head):
  167 + def compute_head_move_compensation(self, current_head, m_change_robot_to_head):
  168 + """
  169 + Estimates the new robot position to reach the target
  170 + """
165 171 M_current_head = dco.coordinates_to_transformation_matrix(
166 172 position=current_head[:3],
167 173 orientation=current_head[3:6],
168 174 axes='rzyx',
169 175 )
170   - M_robot_new = M_current_head @ M_change_robot_to_head
171   - angles_as_deg, translate = dco.transformation_matrix_to_coordinates(M_robot_new, axes='rzyx')
  176 + m_robot_new = M_current_head @ m_change_robot_to_head
  177 + angles_as_deg, translate = dco.transformation_matrix_to_coordinates(m_robot_new, axes='rzyx')
172 178 #TODO: check this with robot
173 179 return list(translate) + list(angles_as_deg)
174 180  
175 181 def estimate_head_center(self, tracker, current_head):
  182 + """
  183 + Estimates the actual head center position using fiducials
  184 + """
176 185 m_probe_head_left, m_probe_head_right, m_probe_head_nasion = tracker.GetMatrixTrackerFiducials()
177 186 m_current_head = dcr.compute_marker_transformation(np.array([current_head]), 0)
178 187  
... ... @@ -182,10 +191,13 @@ class TrackerProcessing:
182 191 return (m_ear_left_new[:3, -1] + m_ear_right_new[:3, -1])/2
183 192  
184 193 def correction_distance_calculation_target(self, coord_inv, actual_point):
185   - sum = (coord_inv[0]-actual_point[0]) ** 2\
186   - + (coord_inv[1]-actual_point[1]) ** 2\
187   - + (coord_inv[2]-actual_point[2]) ** 2
188   - correction_distance_compensation = pow(sum, 0.5)
  194 + """
  195 + Estimates the Euclidean distance between the actual position and the target
  196 + """
  197 + square_sum = (coord_inv[0]-actual_point[0]) ** 2 +\
  198 + (coord_inv[1]-actual_point[1]) ** 2 +\
  199 + (coord_inv[2]-actual_point[2]) ** 2
  200 + correction_distance_compensation = pow(square_sum, 0.5)
189 201  
190 202 return correction_distance_compensation
191 203  
... ...
invesalius/navigation/robot.py
... ... @@ -35,6 +35,10 @@ except ImportError:
35 35  
36 36 class Robot():
37 37 def __init__(self, tracker):
  38 + """
  39 + Class to establish the connection between the robot and InVesalius
  40 + :param tracker: tracker.py instance
  41 + """
38 42 self.tracker = tracker
39 43 self.trk_init = None
40 44 self.robot_target_queue = None
... ... @@ -109,6 +113,10 @@ class Robot():
109 113  
110 114  
111 115 class RobotCoordinates():
  116 + """
  117 + Class to set/get robot coordinates. Robot coordinates are acquired in ControlRobot thread.
  118 + The class is required to avoid acquisition conflict with different threads (coordinates and navigation)
  119 + """
112 120 def __init__(self):
113 121 self.coord = None
114 122  
... ... @@ -121,6 +129,19 @@ class RobotCoordinates():
121 129  
122 130 class ControlRobot(threading.Thread):
123 131 def __init__(self, trck_init, tracker, robotcoordinates, queues, process_tracker, event_robot):
  132 + """Class (threading) to perform the robot control.
  133 + A distinguish thread is required to increase perform and separate robot control from navigation thread
  134 + (safetly layer for robot positining).
  135 +
  136 + There is no GUI involved, them no Sleep is required
  137 +
  138 + :param trck_init: Initialization variable of tracking device and connection type. See tracker.py.
  139 + :param tracker: tracker.py instance
  140 + :param robotcoordinates: RobotCoordinates() instance
  141 + :param queues: Queue instance that manage coordinates and robot target
  142 + :param process_tracker: TrackerProcessing() instance from elfin_process
  143 + :param event_robot: Threading event to ControlRobot when tasks is done
  144 + """
124 145 threading.Thread.__init__(self, name='ControlRobot')
125 146  
126 147 self.trck_init_robot = trck_init[1][0]
... ... @@ -155,6 +176,24 @@ class ControlRobot(threading.Thread):
155 176 return coord_raw, coord_robot_raw, markers_flag
156 177  
157 178 def robot_move_decision(self, distance_target, new_robot_coordinates, current_robot_coordinates, current_head_filtered):
  179 + """
  180 + There are two types of robot movements.
  181 + We can imagine in two concentric spheres of different sizes. The inside sphere is to compensate for small head movements.
  182 + It was named "normal" moves.
  183 + The outside sphere is for the arc motion. The arc motion is a safety feature for long robot movements.
  184 + Even for a new target or a sudden huge head movement.
  185 + 1) normal:
  186 + A linear move from the actual position until the target position.
  187 + This movement just happens when move distance is below a threshold (const.ROBOT_ARC_THRESHOLD_DISTANCE)
  188 + 2) arc motion:
  189 + It can be divided into three parts.
  190 + The first one represents the movement from the inner sphere to the outer sphere.
  191 + The robot moves back using a radial move (it use the center of the head as a reference).
  192 + The second step is the actual arc motion (along the outer sphere).
  193 + A middle point, between the actual position and the target, is required.
  194 + The last step is to make a linear move until the target (goes to the inner sphere)
  195 +
  196 + """
158 197 if distance_target < const.ROBOT_ARC_THRESHOLD_DISTANCE and not self.arc_motion_flag:
159 198 self.trck_init_robot.SendCoordinates(new_robot_coordinates, const.ROBOT_MOTIONS["normal"])
160 199  
... ... @@ -214,9 +253,10 @@ class ControlRobot(threading.Thread):
214 253 self.coord_inv_old = new_robot_coordinates
215 254  
216 255 if np.allclose(np.array(new_robot_coordinates), np.array(current_robot_coordinates), 0, 0.01):
217   - # print("At target within range 1")
  256 + #avoid small movements (0.01 mm)
218 257 pass
219 258 elif not np.allclose(np.array(new_robot_coordinates), np.array(self.coord_inv_old), 0, 5):
  259 + #if the head moves (>5mm) before the robot reach the target
220 260 self.trck_init_robot.StopRobot()
221 261 self.coord_inv_old = new_robot_coordinates
222 262 else:
... ... @@ -230,15 +270,11 @@ class ControlRobot(threading.Thread):
230 270 while not self.event_robot.is_set():
231 271 current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag = self.get_coordinates_from_tracker_devices()
232 272  
233   - if self.robot_target_queue.empty():
234   - None
235   - else:
  273 + if not self.robot_target_queue.empty():
236 274 self.robot_tracker_flag, self.m_change_robot_to_head = self.robot_target_queue.get_nowait()
237 275 self.robot_target_queue.task_done()
238 276  
239   - if self.object_at_target_queue.empty():
240   - None
241   - else:
  277 + if not self.object_at_target_queue.empty():
242 278 self.target_flag = self.object_at_target_queue.get_nowait()
243 279 self.object_at_target_queue.task_done()
244 280  
... ...