Commit c21b81c51dc00c7bbb874dfe0ae0641d27b4f7e4
1 parent
06dd9de7
Exists in
master
ENH: more variable name refractor
-added opencv to optional-requirements
Showing
5 changed files
with
91 additions
and
83 deletions
Show diff stats
invesalius/data/bases.py
@@ -279,7 +279,7 @@ class transform_tracker_to_robot(object): | @@ -279,7 +279,7 @@ class transform_tracker_to_robot(object): | ||
279 | 279 | ||
280 | M_tracker = dco.coordinates_to_transformation_matrix( | 280 | M_tracker = dco.coordinates_to_transformation_matrix( |
281 | position=tracker_coord[:3], | 281 | position=tracker_coord[:3], |
282 | - orientation= tracker_coord[3:6], | 282 | + orientation=tracker_coord[3:6], |
283 | axes='rzyx', | 283 | axes='rzyx', |
284 | ) | 284 | ) |
285 | M_tracker_in_robot = transform_tracker_to_robot.M_tracker_to_robot @ M_tracker | 285 | M_tracker_in_robot = transform_tracker_to_robot.M_tracker_to_robot @ M_tracker |
invesalius/data/elfin_processing.py
@@ -20,16 +20,21 @@ import numpy as np | @@ -20,16 +20,21 @@ import numpy as np | ||
20 | import cv2 | 20 | import cv2 |
21 | from time import time | 21 | from time import time |
22 | 22 | ||
23 | -import invesalius.data.transformations as tr | ||
24 | import invesalius.data.coregistration as dcr | 23 | import invesalius.data.coregistration as dcr |
24 | +import invesalius.data.coordinates as dco | ||
25 | import invesalius.constants as const | 25 | import invesalius.constants as const |
26 | 26 | ||
27 | 27 | ||
28 | class KalmanTracker: | 28 | class KalmanTracker: |
29 | + """ | ||
30 | + Kalman filter to avoid sudden fluctuation from tracker device. | ||
31 | + The filter strength can be set by the cov_process, and cov_measure parameter | ||
32 | + It is required to create one instance for each variable (x, y, z, a, b, g) | ||
33 | + """ | ||
29 | def __init__(self, | 34 | def __init__(self, |
30 | state_num=2, | 35 | state_num=2, |
31 | - cov_process=0.001, | ||
32 | - cov_measure=0.1): | 36 | + covariance_process=0.001, |
37 | + covariance_measure=0.1): | ||
33 | 38 | ||
34 | self.state_num = state_num | 39 | self.state_num = state_num |
35 | measure_num = 1 | 40 | measure_num = 1 |
@@ -46,8 +51,8 @@ class KalmanTracker: | @@ -46,8 +51,8 @@ class KalmanTracker: | ||
46 | [0, 1]], np.float32) | 51 | [0, 1]], np.float32) |
47 | self.filter.measurementMatrix = np.array([[1, 1]], np.float32) | 52 | self.filter.measurementMatrix = np.array([[1, 1]], np.float32) |
48 | self.filter.processNoiseCov = np.array([[1, 0], | 53 | self.filter.processNoiseCov = np.array([[1, 0], |
49 | - [0, 1]], np.float32) * cov_process | ||
50 | - self.filter.measurementNoiseCov = np.array( [[1]], np.float32) * cov_measure | 54 | + [0, 1]], np.float32) * covariance_process |
55 | + self.filter.measurementNoiseCov = np.array([[1]], np.float32) * covariance_measure | ||
51 | 56 | ||
52 | def update_kalman(self, measurement): | 57 | def update_kalman(self, measurement): |
53 | self.prediction = self.filter.predict() | 58 | self.prediction = self.filter.predict() |
@@ -67,8 +72,8 @@ class TrackerProcessing: | @@ -67,8 +72,8 @@ class TrackerProcessing: | ||
67 | 72 | ||
68 | self.tracker_stabilizers = [KalmanTracker( | 73 | self.tracker_stabilizers = [KalmanTracker( |
69 | state_num=2, | 74 | state_num=2, |
70 | - cov_process=0.001, | ||
71 | - cov_measure=0.1) for _ in range(6)] | 75 | + covariance_process=0.001, |
76 | + covariance_measure=0.1) for _ in range(6)] | ||
72 | 77 | ||
73 | def kalman_filter(self, coord_tracker): | 78 | def kalman_filter(self, coord_tracker): |
74 | kalman_array = [] | 79 | kalman_array = [] |
@@ -100,41 +105,39 @@ class TrackerProcessing: | @@ -100,41 +105,39 @@ class TrackerProcessing: | ||
100 | init_point = np.array(init_point) | 105 | init_point = np.array(init_point) |
101 | final_point = np.array(final_point) | 106 | final_point = np.array(final_point) |
102 | norm = (sum((final_point - init_point) ** 2)) ** 0.5 | 107 | norm = (sum((final_point - init_point) ** 2)) ** 0.5 |
103 | - versorfactor = (((final_point-init_point) / norm) * const.ROBOT_VERSOR_SCALE_FACTOR).tolist() | 108 | + versor_factor = (((final_point-init_point) / norm) * const.ROBOT_VERSOR_SCALE_FACTOR).tolist() |
104 | 109 | ||
105 | - return versorfactor | 110 | + return versor_factor |
106 | 111 | ||
107 | - def compute_arc_motion(self, actual_point, coord_head, coord_inv): | ||
108 | - p1 = coord_inv | ||
109 | 112 | ||
110 | - pc = coord_head[0], coord_head[1], coord_head[2], coord_inv[3], coord_inv[4], coord_inv[5] | 113 | + def compute_arc_motion(self, current_robot_coordinates, head_center_coordinates, new_robot_coordinates): |
114 | + head_center = head_center_coordinates[0], head_center_coordinates[1], head_center_coordinates[2], \ | ||
115 | + new_robot_coordinates[3], new_robot_coordinates[4], new_robot_coordinates[5] | ||
111 | 116 | ||
112 | - versorfactor1 = self.compute_versors(pc, actual_point) | ||
113 | - init_ext_point = actual_point[0] + versorfactor1[0], \ | ||
114 | - actual_point[1] + versorfactor1[1], \ | ||
115 | - actual_point[2] + versorfactor1[2], \ | ||
116 | - actual_point[3], actual_point[4], actual_point[5] | 117 | + versor_factor_move_out = self.compute_versors(head_center, current_robot_coordinates) |
118 | + init_move_out_point = current_robot_coordinates[0] + versor_factor_move_out[0], \ | ||
119 | + current_robot_coordinates[1] + versor_factor_move_out[1], \ | ||
120 | + current_robot_coordinates[2] + versor_factor_move_out[2], \ | ||
121 | + current_robot_coordinates[3], current_robot_coordinates[4], current_robot_coordinates[5] | ||
117 | 122 | ||
118 | - middle_point = ((p1[0] + actual_point[0]) / 2, | ||
119 | - (p1[1] + actual_point[1]) / 2, | ||
120 | - (p1[2] + actual_point[2]) / 2, | 123 | + middle_point = ((new_robot_coordinates[0] + current_robot_coordinates[0]) / 2, |
124 | + (new_robot_coordinates[1] + current_robot_coordinates[1]) / 2, | ||
125 | + (new_robot_coordinates[2] + current_robot_coordinates[2]) / 2, | ||
121 | 0, 0, 0) | 126 | 0, 0, 0) |
127 | + versor_factor_middle_arc = (np.array(self.compute_versors(head_center, middle_point))) * 2 | ||
128 | + middle_arc_point = middle_point[0] + versor_factor_middle_arc[0], \ | ||
129 | + middle_point[1] + versor_factor_middle_arc[1], \ | ||
130 | + middle_point[2] + versor_factor_middle_arc[2] | ||
122 | 131 | ||
123 | - newarr = (np.array(self.compute_versors(pc, middle_point))) * 2 | ||
124 | - middle_arc_point = middle_point[0] + newarr[0], \ | ||
125 | - middle_point[1] + newarr[1], \ | ||
126 | - middle_point[2] + newarr[2] | ||
127 | - | ||
128 | - versorfactor3 = self.compute_versors(pc, p1) | ||
129 | - | ||
130 | - final_ext_arc_point = p1[0] + versorfactor3[0], \ | ||
131 | - p1[1] + versorfactor3[1], \ | ||
132 | - p1[2] + versorfactor3[2], \ | ||
133 | - coord_inv[3], coord_inv[4], coord_inv[5], 0 | 132 | + versor_factor_arc = self.compute_versors(head_center, new_robot_coordinates) |
133 | + final_ext_arc_point = new_robot_coordinates[0] + versor_factor_arc[0], \ | ||
134 | + new_robot_coordinates[1] + versor_factor_arc[1], \ | ||
135 | + new_robot_coordinates[2] + versor_factor_arc[2], \ | ||
136 | + new_robot_coordinates[3], new_robot_coordinates[4], new_robot_coordinates[5], 0 | ||
134 | 137 | ||
135 | target_arc = middle_arc_point + final_ext_arc_point | 138 | target_arc = middle_arc_point + final_ext_arc_point |
136 | 139 | ||
137 | - return init_ext_point, target_arc | 140 | + return init_move_out_point, target_arc |
138 | 141 | ||
139 | def compute_head_move_threshold(self, current_ref): | 142 | def compute_head_move_threshold(self, current_ref): |
140 | self.coord_vel.append(current_ref) | 143 | self.coord_vel.append(current_ref) |
@@ -158,24 +161,23 @@ class TrackerProcessing: | @@ -158,24 +161,23 @@ class TrackerProcessing: | ||
158 | 161 | ||
159 | return False | 162 | return False |
160 | 163 | ||
161 | - def head_move_compensation(self, current_ref, m_change_robot_to_head): | ||
162 | - trans = tr.translation_matrix(current_ref[:3]) | ||
163 | - a, b, g = np.radians(current_ref[3:6]) | ||
164 | - rot = tr.euler_matrix(a, b, g, 'rzyx') | ||
165 | - M_current_ref = tr.concatenate_matrices(trans, rot) | ||
166 | - | ||
167 | - m_robot_new = M_current_ref @ m_change_robot_to_head | ||
168 | - _, _, angles, translate, _ = tr.decompose_matrix(m_robot_new) | ||
169 | - angles = np.degrees(angles) | ||
170 | - | ||
171 | - return m_robot_new[0, -1], m_robot_new[1, -1], m_robot_new[2, -1], angles[0], angles[1], angles[2] | ||
172 | - | ||
173 | - def estimate_head_center(self, tracker, current_ref): | ||
174 | - m_probe_ref_left, m_probe_ref_right, m_probe_ref_nasion = tracker.GetMatrixTrackerFiducials() | ||
175 | - m_current_ref = dcr.compute_marker_transformation(np.array([current_ref]), 0) | ||
176 | - | ||
177 | - m_ear_left_new = m_current_ref @ m_probe_ref_left | ||
178 | - m_ear_right_new = m_current_ref @ m_probe_ref_right | 164 | + def compute_head_move_compensation(self, current_head, M_change_robot_to_head): |
165 | + M_current_head = dco.coordinates_to_transformation_matrix( | ||
166 | + position=current_head[:3], | ||
167 | + orientation=current_head[3:6], | ||
168 | + axes='rzyx', | ||
169 | + ) | ||
170 | + M_robot_new = M_current_head @ M_change_robot_to_head | ||
171 | + angles_as_deg, translate = dco.transformation_matrix_to_coordinates(M_robot_new, axes='rzyx') | ||
172 | + #TODO: check this with robot | ||
173 | + return list(translate) + list(angles_as_deg) | ||
174 | + | ||
175 | + def estimate_head_center(self, tracker, current_head): | ||
176 | + m_probe_head_left, m_probe_head_right, m_probe_head_nasion = tracker.GetMatrixTrackerFiducials() | ||
177 | + m_current_head = dcr.compute_marker_transformation(np.array([current_head]), 0) | ||
178 | + | ||
179 | + m_ear_left_new = m_current_head @ m_probe_head_left | ||
180 | + m_ear_right_new = m_current_head @ m_probe_head_right | ||
179 | 181 | ||
180 | return (m_ear_left_new[:3, -1] + m_ear_right_new[:3, -1])/2 | 182 | return (m_ear_left_new[:3, -1] + m_ear_right_new[:3, -1])/2 |
181 | 183 |
invesalius/gui/dialogs.py
@@ -4324,6 +4324,10 @@ class SetOptitrackconfigs(wx.Dialog): | @@ -4324,6 +4324,10 @@ class SetOptitrackconfigs(wx.Dialog): | ||
4324 | return fn_cal, fn_userprofile | 4324 | return fn_cal, fn_userprofile |
4325 | 4325 | ||
4326 | class SetTrackerDeviceToRobot(wx.Dialog): | 4326 | class SetTrackerDeviceToRobot(wx.Dialog): |
4327 | + """ | ||
4328 | + Robot navigation requires a tracker device to tracker the head position and the object (coil) position. | ||
4329 | + A dialog pops up showing a combobox with all trackers but debugs and the robot itself (const.TRACKERS[:-3]) | ||
4330 | + """ | ||
4327 | def __init__(self, title=_("Setting tracker device:")): | 4331 | def __init__(self, title=_("Setting tracker device:")): |
4328 | wx.Dialog.__init__(self, wx.GetApp().GetTopWindow(), -1, title, size=wx.Size(1000, 200), | 4332 | wx.Dialog.__init__(self, wx.GetApp().GetTopWindow(), -1, title, size=wx.Size(1000, 200), |
4329 | style=wx.DEFAULT_DIALOG_STYLE|wx.FRAME_FLOAT_ON_PARENT|wx.STAY_ON_TOP|wx.RESIZE_BORDER) | 4333 | style=wx.DEFAULT_DIALOG_STYLE|wx.FRAME_FLOAT_ON_PARENT|wx.STAY_ON_TOP|wx.RESIZE_BORDER) |
@@ -4339,7 +4343,6 @@ class SetTrackerDeviceToRobot(wx.Dialog): | @@ -4339,7 +4343,6 @@ class SetTrackerDeviceToRobot(wx.Dialog): | ||
4339 | choice_trck.SetToolTip(tooltip) | 4343 | choice_trck.SetToolTip(tooltip) |
4340 | choice_trck.SetSelection(const.DEFAULT_TRACKER) | 4344 | choice_trck.SetSelection(const.DEFAULT_TRACKER) |
4341 | choice_trck.Bind(wx.EVT_COMBOBOX, partial(self.OnChoiceTracker, ctrl=choice_trck)) | 4345 | choice_trck.Bind(wx.EVT_COMBOBOX, partial(self.OnChoiceTracker, ctrl=choice_trck)) |
4342 | - #self.choice_trck = choice_trck | ||
4343 | 4346 | ||
4344 | btn_ok = wx.Button(self, wx.ID_OK) | 4347 | btn_ok = wx.Button(self, wx.ID_OK) |
4345 | btn_ok.SetHelpText("") | 4348 | btn_ok.SetHelpText("") |
invesalius/navigation/robot.py
@@ -154,18 +154,18 @@ class ControlRobot(threading.Thread): | @@ -154,18 +154,18 @@ class ControlRobot(threading.Thread): | ||
154 | 154 | ||
155 | return coord_raw, coord_robot_raw, markers_flag | 155 | return coord_raw, coord_robot_raw, markers_flag |
156 | 156 | ||
157 | - def robot_move_decision(self, distance_target, coord_inv, coord_robot_raw, current_ref_filtered): | 157 | + def robot_move_decision(self, distance_target, new_robot_coordinates, current_robot_coordinates, current_head_filtered): |
158 | if distance_target < const.ROBOT_ARC_THRESHOLD_DISTANCE and not self.arc_motion_flag: | 158 | if distance_target < const.ROBOT_ARC_THRESHOLD_DISTANCE and not self.arc_motion_flag: |
159 | - self.trck_init_robot.SendCoordinates(coord_inv, const.ROBOT_MOTIONS["normal"]) | 159 | + self.trck_init_robot.SendCoordinates(new_robot_coordinates, const.ROBOT_MOTIONS["normal"]) |
160 | 160 | ||
161 | elif distance_target >= const.ROBOT_ARC_THRESHOLD_DISTANCE or self.arc_motion_flag: | 161 | elif distance_target >= const.ROBOT_ARC_THRESHOLD_DISTANCE or self.arc_motion_flag: |
162 | - actual_point = coord_robot_raw | 162 | + actual_point = current_robot_coordinates |
163 | if not self.arc_motion_flag: | 163 | if not self.arc_motion_flag: |
164 | - coord_head = self.process_tracker.estimate_head_center(self.tracker, | ||
165 | - current_ref_filtered).tolist() | 164 | + head_center_coordinates = self.process_tracker.estimate_head_center(self.tracker, |
165 | + current_head_filtered).tolist() | ||
166 | 166 | ||
167 | - self.target_linear_out, self.target_arc = self.process_tracker.compute_arc_motion(coord_robot_raw, coord_head, | ||
168 | - coord_inv) | 167 | + self.target_linear_out, self.target_arc = self.process_tracker.compute_arc_motion(current_robot_coordinates, head_center_coordinates, |
168 | + new_robot_coordinates) | ||
169 | self.arc_motion_flag = True | 169 | self.arc_motion_flag = True |
170 | self.arc_motion_step_flag = const.ROBOT_MOTIONS["linear out"] | 170 | self.arc_motion_step_flag = const.ROBOT_MOTIONS["linear out"] |
171 | 171 | ||
@@ -176,15 +176,15 @@ class ControlRobot(threading.Thread): | @@ -176,15 +176,15 @@ class ControlRobot(threading.Thread): | ||
176 | coord = self.target_arc | 176 | coord = self.target_arc |
177 | 177 | ||
178 | elif self.arc_motion_flag and self.arc_motion_step_flag == const.ROBOT_MOTIONS["arc"]: | 178 | elif self.arc_motion_flag and self.arc_motion_step_flag == const.ROBOT_MOTIONS["arc"]: |
179 | - coord_head = self.process_tracker.estimate_head_center(self.tracker, | ||
180 | - current_ref_filtered).tolist() | 179 | + head_center_coordinates = self.process_tracker.estimate_head_center(self.tracker, |
180 | + current_head_filtered).tolist() | ||
181 | 181 | ||
182 | - _, new_target_arc = self.process_tracker.compute_arc_motion(coord_robot_raw, coord_head, | ||
183 | - coord_inv) | 182 | + _, new_target_arc = self.process_tracker.compute_arc_motion(current_robot_coordinates, head_center_coordinates, |
183 | + new_robot_coordinates) | ||
184 | if np.allclose(np.array(new_target_arc[3:-1]), np.array(self.target_arc[3:-1]), 0, 1): | 184 | if np.allclose(np.array(new_target_arc[3:-1]), np.array(self.target_arc[3:-1]), 0, 1): |
185 | None | 185 | None |
186 | else: | 186 | else: |
187 | - if self.process_tracker.correction_distance_calculation_target(coord_inv, coord_robot_raw) >= \ | 187 | + if self.process_tracker.correction_distance_calculation_target(new_robot_coordinates, current_robot_coordinates) >= \ |
188 | const.ROBOT_ARC_THRESHOLD_DISTANCE*0.8: | 188 | const.ROBOT_ARC_THRESHOLD_DISTANCE*0.8: |
189 | self.target_arc = new_target_arc | 189 | self.target_arc = new_target_arc |
190 | 190 | ||
@@ -193,40 +193,42 @@ class ControlRobot(threading.Thread): | @@ -193,40 +193,42 @@ class ControlRobot(threading.Thread): | ||
193 | if np.allclose(np.array(actual_point), np.array(self.target_arc[3:-1]), 0, 10): | 193 | if np.allclose(np.array(actual_point), np.array(self.target_arc[3:-1]), 0, 10): |
194 | self.arc_motion_flag = False | 194 | self.arc_motion_flag = False |
195 | self.arc_motion_step_flag = const.ROBOT_MOTIONS["normal"] | 195 | self.arc_motion_step_flag = const.ROBOT_MOTIONS["normal"] |
196 | - coord = coord_inv | 196 | + coord = new_robot_coordinates |
197 | 197 | ||
198 | self.trck_init_robot.SendCoordinates(coord, self.arc_motion_step_flag) | 198 | self.trck_init_robot.SendCoordinates(coord, self.arc_motion_step_flag) |
199 | 199 | ||
200 | - def robot_control(self, coords_tracker_in_robot, coord_robot_raw, markers_flag): | ||
201 | - coord_ref_tracker_in_robot = coords_tracker_in_robot[1] | ||
202 | - coord_obj_tracker_in_robot = coords_tracker_in_robot[2] | 200 | + def robot_control(self, current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag): |
201 | + coord_head_tracker_in_robot = current_tracker_coordinates_in_robot[1] | ||
202 | + marker_head_flag = markers_flag[1] | ||
203 | + coord_obj_tracker_in_robot = current_tracker_coordinates_in_robot[2] | ||
204 | + marker_obj_flag = markers_flag[2] | ||
203 | 205 | ||
204 | if self.robot_tracker_flag: | 206 | if self.robot_tracker_flag: |
205 | - current_ref = coord_ref_tracker_in_robot | ||
206 | - if current_ref is not None and markers_flag[1]: | ||
207 | - current_ref_filtered = self.process_tracker.kalman_filter(current_ref) | ||
208 | - if self.process_tracker.compute_head_move_threshold(current_ref_filtered): | ||
209 | - coord_inv = self.process_tracker.head_move_compensation(current_ref_filtered, | ||
210 | - self.m_change_robot_to_head) | 207 | + current_head = coord_head_tracker_in_robot |
208 | + if current_head is not None and marker_head_flag: | ||
209 | + current_head_filtered = self.process_tracker.kalman_filter(current_head) | ||
210 | + if self.process_tracker.compute_head_move_threshold(current_head_filtered): | ||
211 | + new_robot_coordinates = self.process_tracker.compute_head_move_compensation(current_head_filtered, | ||
212 | + self.m_change_robot_to_head) | ||
211 | if self.coord_inv_old is None: | 213 | if self.coord_inv_old is None: |
212 | - self.coord_inv_old = coord_inv | 214 | + self.coord_inv_old = new_robot_coordinates |
213 | 215 | ||
214 | - if np.allclose(np.array(coord_inv), np.array(coord_robot_raw), 0, 0.01): | 216 | + if np.allclose(np.array(new_robot_coordinates), np.array(current_robot_coordinates), 0, 0.01): |
215 | # print("At target within range 1") | 217 | # print("At target within range 1") |
216 | pass | 218 | pass |
217 | - elif not np.allclose(np.array(coord_inv), np.array(self.coord_inv_old), 0, 5): | 219 | + elif not np.allclose(np.array(new_robot_coordinates), np.array(self.coord_inv_old), 0, 5): |
218 | self.trck_init_robot.StopRobot() | 220 | self.trck_init_robot.StopRobot() |
219 | - self.coord_inv_old = coord_inv | 221 | + self.coord_inv_old = new_robot_coordinates |
220 | else: | 222 | else: |
221 | - distance_target = self.process_tracker.correction_distance_calculation_target(coord_inv, coord_robot_raw) | ||
222 | - self.robot_move_decision(distance_target, coord_inv, coord_robot_raw, current_ref_filtered) | ||
223 | - self.coord_inv_old = coord_inv | 223 | + distance_target = self.process_tracker.correction_distance_calculation_target(new_robot_coordinates, current_robot_coordinates) |
224 | + self.robot_move_decision(distance_target, new_robot_coordinates, current_robot_coordinates, current_head_filtered) | ||
225 | + self.coord_inv_old = new_robot_coordinates | ||
224 | else: | 226 | else: |
225 | self.trck_init_robot.StopRobot() | 227 | self.trck_init_robot.StopRobot() |
226 | 228 | ||
227 | def run(self): | 229 | def run(self): |
228 | while not self.event_robot.is_set(): | 230 | while not self.event_robot.is_set(): |
229 | - coords_tracker_in_robot, coord_robot_raw, markers_flag = self.get_coordinates_from_tracker_devices() | 231 | + current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag = self.get_coordinates_from_tracker_devices() |
230 | 232 | ||
231 | if self.robot_target_queue.empty(): | 233 | if self.robot_target_queue.empty(): |
232 | None | 234 | None |
@@ -240,4 +242,4 @@ class ControlRobot(threading.Thread): | @@ -240,4 +242,4 @@ class ControlRobot(threading.Thread): | ||
240 | self.target_flag = self.object_at_target_queue.get_nowait() | 242 | self.target_flag = self.object_at_target_queue.get_nowait() |
241 | self.object_at_target_queue.task_done() | 243 | self.object_at_target_queue.task_done() |
242 | 244 | ||
243 | - self.robot_control(coords_tracker_in_robot, coord_robot_raw, markers_flag) | 245 | + self.robot_control(current_tracker_coordinates_in_robot, current_robot_coordinates, markers_flag) |
optional-requirements.txt