diff --git a/invesalius/data/bases.py b/invesalius/data/bases.py index f212328..dad0aa1 100644 --- a/invesalius/data/bases.py +++ b/invesalius/data/bases.py @@ -279,7 +279,7 @@ class transform_tracker_to_robot(object): M_tracker = dco.coordinates_to_transformation_matrix( position=tracker_coord[:3], - orientation= tracker_coord[3:6], + orientation=tracker_coord[3:6], axes='rzyx', ) M_tracker_in_robot = transform_tracker_to_robot.M_tracker_to_robot @ M_tracker diff --git a/invesalius/data/elfin_processing.py b/invesalius/data/elfin_processing.py index 555894b..5443514 100644 --- a/invesalius/data/elfin_processing.py +++ b/invesalius/data/elfin_processing.py @@ -20,16 +20,21 @@ import numpy as np import cv2 from time import time -import invesalius.data.transformations as tr import invesalius.data.coregistration as dcr +import invesalius.data.coordinates as dco import invesalius.constants as const class KalmanTracker: + """ + Kalman filter to avoid sudden fluctuation from tracker device. + The filter strength can be set by the cov_process, and cov_measure parameter + It is required to create one instance for each variable (x, y, z, a, b, g) + """ def __init__(self, state_num=2, - cov_process=0.001, - cov_measure=0.1): + covariance_process=0.001, + covariance_measure=0.1): self.state_num = state_num measure_num = 1 @@ -46,8 +51,8 @@ class KalmanTracker: [0, 1]], np.float32) self.filter.measurementMatrix = np.array([[1, 1]], np.float32) self.filter.processNoiseCov = np.array([[1, 0], - [0, 1]], np.float32) * cov_process - self.filter.measurementNoiseCov = np.array( [[1]], np.float32) * cov_measure + [0, 1]], np.float32) * covariance_process + self.filter.measurementNoiseCov = np.array([[1]], np.float32) * covariance_measure def update_kalman(self, measurement): self.prediction = self.filter.predict() @@ -67,8 +72,8 @@ class TrackerProcessing: self.tracker_stabilizers = [KalmanTracker( state_num=2, - cov_process=0.001, - cov_measure=0.1) for _ in range(6)] + covariance_process=0.001, + covariance_measure=0.1) for _ in range(6)] def kalman_filter(self, coord_tracker): kalman_array = [] @@ -100,41 +105,39 @@ class TrackerProcessing: init_point = np.array(init_point) final_point = np.array(final_point) norm = (sum((final_point - init_point) ** 2)) ** 0.5 - versorfactor = (((final_point-init_point) / norm) * const.ROBOT_VERSOR_SCALE_FACTOR).tolist() + versor_factor = (((final_point-init_point) / norm) * const.ROBOT_VERSOR_SCALE_FACTOR).tolist() - return versorfactor + return versor_factor - def compute_arc_motion(self, actual_point, coord_head, coord_inv): - p1 = coord_inv - pc = coord_head[0], coord_head[1], coord_head[2], coord_inv[3], coord_inv[4], coord_inv[5] + def compute_arc_motion(self, current_robot_coordinates, head_center_coordinates, new_robot_coordinates): + head_center = head_center_coordinates[0], head_center_coordinates[1], head_center_coordinates[2], \ + new_robot_coordinates[3], new_robot_coordinates[4], new_robot_coordinates[5] - versorfactor1 = self.compute_versors(pc, actual_point) - init_ext_point = actual_point[0] + versorfactor1[0], \ - actual_point[1] + versorfactor1[1], \ - actual_point[2] + versorfactor1[2], \ - actual_point[3], actual_point[4], actual_point[5] + versor_factor_move_out = self.compute_versors(head_center, current_robot_coordinates) + init_move_out_point = current_robot_coordinates[0] + versor_factor_move_out[0], \ + current_robot_coordinates[1] + versor_factor_move_out[1], \ + current_robot_coordinates[2] + versor_factor_move_out[2], \ + current_robot_coordinates[3], current_robot_coordinates[4], current_robot_coordinates[5] - middle_point = ((p1[0] + actual_point[0]) / 2, - (p1[1] + actual_point[1]) / 2, - (p1[2] + actual_point[2]) / 2, + middle_point = ((new_robot_coordinates[0] + current_robot_coordinates[0]) / 2, + (new_robot_coordinates[1] + current_robot_coordinates[1]) / 2, + (new_robot_coordinates[2] + current_robot_coordinates[2]) / 2, 0, 0, 0) + versor_factor_middle_arc = (np.array(self.compute_versors(head_center, middle_point))) * 2 + middle_arc_point = middle_point[0] + versor_factor_middle_arc[0], \ + middle_point[1] + versor_factor_middle_arc[1], \ + middle_point[2] + versor_factor_middle_arc[2] - newarr = (np.array(self.compute_versors(pc, middle_point))) * 2 - middle_arc_point = middle_point[0] + newarr[0], \ - middle_point[1] + newarr[1], \ - middle_point[2] + newarr[2] - - versorfactor3 = self.compute_versors(pc, p1) - - final_ext_arc_point = p1[0] + versorfactor3[0], \ - p1[1] + versorfactor3[1], \ - p1[2] + versorfactor3[2], \ - coord_inv[3], coord_inv[4], coord_inv[5], 0 + versor_factor_arc = self.compute_versors(head_center, new_robot_coordinates) + final_ext_arc_point = new_robot_coordinates[0] + versor_factor_arc[0], \ + new_robot_coordinates[1] + versor_factor_arc[1], \ + new_robot_coordinates[2] + versor_factor_arc[2], \ + new_robot_coordinates[3], new_robot_coordinates[4], new_robot_coordinates[5], 0 target_arc = middle_arc_point + final_ext_arc_point - return init_ext_point, target_arc + return init_move_out_point, target_arc def compute_head_move_threshold(self, current_ref): self.coord_vel.append(current_ref) @@ -158,24 +161,23 @@ class TrackerProcessing: return False - def head_move_compensation(self, current_ref, m_change_robot_to_head): - trans = tr.translation_matrix(current_ref[:3]) - a, b, g = np.radians(current_ref[3:6]) - rot = tr.euler_matrix(a, b, g, 'rzyx') - M_current_ref = tr.concatenate_matrices(trans, rot) - - m_robot_new = M_current_ref @ m_change_robot_to_head - _, _, angles, translate, _ = tr.decompose_matrix(m_robot_new) - angles = np.degrees(angles) - - return m_robot_new[0, -1], m_robot_new[1, -1], m_robot_new[2, -1], angles[0], angles[1], angles[2] - - def estimate_head_center(self, tracker, current_ref): - m_probe_ref_left, m_probe_ref_right, m_probe_ref_nasion = tracker.GetMatrixTrackerFiducials() - m_current_ref = dcr.compute_marker_transformation(np.array([current_ref]), 0) - - m_ear_left_new = m_current_ref @ m_probe_ref_left - m_ear_right_new = m_current_ref @ m_probe_ref_right + def compute_head_move_compensation(self, current_head, M_change_robot_to_head): + M_current_head = dco.coordinates_to_transformation_matrix( + position=current_head[:3], + orientation=current_head[3:6], + axes='rzyx', + ) + M_robot_new = M_current_head @ M_change_robot_to_head + angles_as_deg, translate = dco.transformation_matrix_to_coordinates(M_robot_new, axes='rzyx') + #TODO: check this with robot + return list(translate) + list(angles_as_deg) + + def estimate_head_center(self, tracker, current_head): + m_probe_head_left, m_probe_head_right, m_probe_head_nasion = tracker.GetMatrixTrackerFiducials() + m_current_head = dcr.compute_marker_transformation(np.array([current_head]), 0) + + m_ear_left_new = m_current_head @ m_probe_head_left + m_ear_right_new = m_current_head @ m_probe_head_right return (m_ear_left_new[:3, -1] + m_ear_right_new[:3, -1])/2 diff --git a/invesalius/gui/dialogs.py b/invesalius/gui/dialogs.py index 2e373f0..588d824 100644 --- a/invesalius/gui/dialogs.py +++ b/invesalius/gui/dialogs.py @@ -4324,6 +4324,10 @@ class SetOptitrackconfigs(wx.Dialog): return fn_cal, fn_userprofile class SetTrackerDeviceToRobot(wx.Dialog): + """ + Robot navigation requires a tracker device to tracker the head position and the object (coil) position. + A dialog pops up showing a combobox with all trackers but debugs and the robot itself (const.TRACKERS[:-3]) + """ def __init__(self, title=_("Setting tracker device:")): wx.Dialog.__init__(self, wx.GetApp().GetTopWindow(), -1, title, size=wx.Size(1000, 200), style=wx.DEFAULT_DIALOG_STYLE|wx.FRAME_FLOAT_ON_PARENT|wx.STAY_ON_TOP|wx.RESIZE_BORDER) @@ -4339,7 +4343,6 @@ class SetTrackerDeviceToRobot(wx.Dialog): choice_trck.SetToolTip(tooltip) choice_trck.SetSelection(const.DEFAULT_TRACKER) choice_trck.Bind(wx.EVT_COMBOBOX, partial(self.OnChoiceTracker, ctrl=choice_trck)) - #self.choice_trck = choice_trck btn_ok = wx.Button(self, wx.ID_OK) btn_ok.SetHelpText("") diff --git a/invesalius/navigation/robot.py b/invesalius/navigation/robot.py index 6cacc72..ba0c1bb 100644 --- a/invesalius/navigation/robot.py +++ b/invesalius/navigation/robot.py @@ -154,18 +154,18 @@ class ControlRobot(threading.Thread): return coord_raw, coord_robot_raw, markers_flag - def robot_move_decision(self, distance_target, coord_inv, coord_robot_raw, current_ref_filtered): + def robot_move_decision(self, distance_target, new_robot_coordinates, current_robot_coordinates, current_head_filtered): if distance_target < const.ROBOT_ARC_THRESHOLD_DISTANCE and not self.arc_motion_flag: - self.trck_init_robot.SendCoordinates(coord_inv, const.ROBOT_MOTIONS["normal"]) + self.trck_init_robot.SendCoordinates(new_robot_coordinates, const.ROBOT_MOTIONS["normal"]) elif distance_target >= const.ROBOT_ARC_THRESHOLD_DISTANCE or self.arc_motion_flag: - actual_point = coord_robot_raw + actual_point = current_robot_coordinates if not self.arc_motion_flag: - coord_head = self.process_tracker.estimate_head_center(self.tracker, - current_ref_filtered).tolist() + head_center_coordinates = self.process_tracker.estimate_head_center(self.tracker, + current_head_filtered).tolist() - self.target_linear_out, self.target_arc = self.process_tracker.compute_arc_motion(coord_robot_raw, coord_head, - coord_inv) + self.target_linear_out, self.target_arc = self.process_tracker.compute_arc_motion(current_robot_coordinates, head_center_coordinates, + new_robot_coordinates) self.arc_motion_flag = True self.arc_motion_step_flag = const.ROBOT_MOTIONS["linear out"] @@ -176,15 +176,15 @@ class ControlRobot(threading.Thread): coord = self.target_arc elif self.arc_motion_flag and self.arc_motion_step_flag == const.ROBOT_MOTIONS["arc"]: - coord_head = self.process_tracker.estimate_head_center(self.tracker, - current_ref_filtered).tolist() + head_center_coordinates = self.process_tracker.estimate_head_center(self.tracker, + current_head_filtered).tolist() - _, new_target_arc = self.process_tracker.compute_arc_motion(coord_robot_raw, coord_head, - coord_inv) + _, new_target_arc = self.process_tracker.compute_arc_motion(current_robot_coordinates, head_center_coordinates, + new_robot_coordinates) if np.allclose(np.array(new_target_arc[3:-1]), np.array(self.target_arc[3:-1]), 0, 1): None else: - if self.process_tracker.correction_distance_calculation_target(coord_inv, coord_robot_raw) >= \ + if self.process_tracker.correction_distance_calculation_target(new_robot_coordinates, current_robot_coordinates) >= \ const.ROBOT_ARC_THRESHOLD_DISTANCE*0.8: self.target_arc = new_target_arc @@ -193,40 +193,42 @@ class ControlRobot(threading.Thread): if np.allclose(np.array(actual_point), np.array(self.target_arc[3:-1]), 0, 10): self.arc_motion_flag = False self.arc_motion_step_flag = const.ROBOT_MOTIONS["normal"] - coord = coord_inv + coord = new_robot_coordinates self.trck_init_robot.SendCoordinates(coord, self.arc_motion_step_flag) - def robot_control(self, coords_tracker_in_robot, coord_robot_raw, markers_flag): - coord_ref_tracker_in_robot = coords_tracker_in_robot[1] - coord_obj_tracker_in_robot = coords_tracker_in_robot[2] + def robot_control(self, current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag): + coord_head_tracker_in_robot = current_tracker_coordinates_in_robot[1] + marker_head_flag = markers_flag[1] + coord_obj_tracker_in_robot = current_tracker_coordinates_in_robot[2] + marker_obj_flag = markers_flag[2] if self.robot_tracker_flag: - current_ref = coord_ref_tracker_in_robot - if current_ref is not None and markers_flag[1]: - current_ref_filtered = self.process_tracker.kalman_filter(current_ref) - if self.process_tracker.compute_head_move_threshold(current_ref_filtered): - coord_inv = self.process_tracker.head_move_compensation(current_ref_filtered, - self.m_change_robot_to_head) + current_head = coord_head_tracker_in_robot + if current_head is not None and marker_head_flag: + current_head_filtered = self.process_tracker.kalman_filter(current_head) + if self.process_tracker.compute_head_move_threshold(current_head_filtered): + new_robot_coordinates = self.process_tracker.compute_head_move_compensation(current_head_filtered, + self.m_change_robot_to_head) if self.coord_inv_old is None: - self.coord_inv_old = coord_inv + self.coord_inv_old = new_robot_coordinates - if np.allclose(np.array(coord_inv), np.array(coord_robot_raw), 0, 0.01): + if np.allclose(np.array(new_robot_coordinates), np.array(current_robot_coordinates), 0, 0.01): # print("At target within range 1") pass - elif not np.allclose(np.array(coord_inv), np.array(self.coord_inv_old), 0, 5): + elif not np.allclose(np.array(new_robot_coordinates), np.array(self.coord_inv_old), 0, 5): self.trck_init_robot.StopRobot() - self.coord_inv_old = coord_inv + self.coord_inv_old = new_robot_coordinates else: - distance_target = self.process_tracker.correction_distance_calculation_target(coord_inv, coord_robot_raw) - self.robot_move_decision(distance_target, coord_inv, coord_robot_raw, current_ref_filtered) - self.coord_inv_old = coord_inv + distance_target = self.process_tracker.correction_distance_calculation_target(new_robot_coordinates, current_robot_coordinates) + self.robot_move_decision(distance_target, new_robot_coordinates, current_robot_coordinates, current_head_filtered) + self.coord_inv_old = new_robot_coordinates else: self.trck_init_robot.StopRobot() def run(self): while not self.event_robot.is_set(): - coords_tracker_in_robot, coord_robot_raw, markers_flag = self.get_coordinates_from_tracker_devices() + current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag = self.get_coordinates_from_tracker_devices() if self.robot_target_queue.empty(): None @@ -240,4 +242,4 @@ class ControlRobot(threading.Thread): self.target_flag = self.object_at_target_queue.get_nowait() self.object_at_target_queue.task_done() - self.robot_control(coords_tracker_in_robot, coord_robot_raw, markers_flag) + self.robot_control(current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag) diff --git a/optional-requirements.txt b/optional-requirements.txt index 129496b..cfb161a 100644 --- a/optional-requirements.txt +++ b/optional-requirements.txt @@ -5,3 +5,4 @@ python-rtmidi==1.4.9 python-socketio[client]==5.3.0 requests==2.26.0 uvicorn[standard]==0.15.0 +opencv-python==4.5.3 -- libgit2 0.21.2