-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy patht265_to_mavlink.py
619 lines (513 loc) · 27.2 KB
/
t265_to_mavlink.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
#!/usr/bin/env python3
#####################################################
## librealsense T265 to MAVLink ##
#####################################################
# This script assumes pyrealsense2.[].so file is found under the same directory as this script
# Install required packages:
# pip install pyrealsense2
# pip install transformations
# pip3 install dronekit
# pip3 install apscheduler
# Set the path for IDLE
import sys
sys.path.append("/usr/local/lib/")
# Set MAVLink protocol to 2.
import os
os.environ["MAVLINK20"] = "1"
# Import the libraries
import pyrealsense2 as rs
import numpy as np
import transformations as tf
import math as m
import time
import argparse
import threading
from time import sleep
from apscheduler.schedulers.background import BackgroundScheduler
from dronekit import connect, VehicleMode
from pymavlink import mavutil
#######################################
# Parameters
#######################################
# Default configurations for connection to the FCU
connection_string_default = '/dev/ttyUSB0'
connection_baudrate_default = 921600
connection_timeout_sec_default = 5
# Transformation to convert different camera orientations to NED convention. Replace camera_orientation_default for your configuration.
# 0: Forward, USB port to the right
# 1: Downfacing, USB port to the right
# 2: Forward, 45 degree tilted down
# Important note for downfacing camera: you need to tilt the vehicle's nose up a little - not flat - before you run the script, otherwise the initial yaw will be randomized, read here for more details: https://github.com/IntelRealSense/librealsense/issues/4080. Tilt the vehicle to any other sides and the yaw might not be as stable.
camera_orientation_default = 0
# https://mavlink.io/en/messages/common.html#VISION_POSITION_ESTIMATE
enable_msg_vision_position_estimate = True
vision_position_estimate_msg_hz_default = 30
# https://mavlink.io/en/messages/ardupilotmega.html#VISION_POSITION_DELTA
enable_msg_vision_position_delta = False
vision_position_delta_msg_hz_default = 30
# https://mavlink.io/en/messages/common.html#VISION_SPEED_ESTIMATE
enable_msg_vision_speed_estimate = True
vision_speed_estimate_msg_hz_default = 30
# https://mavlink.io/en/messages/common.html#STATUSTEXT
enable_update_tracking_confidence_to_gcs = True
update_tracking_confidence_to_gcs_hz_default = 1
# Default global position for EKF home/ origin
enable_auto_set_ekf_home = False
home_lat = 151269321 # Somewhere random
home_lon = 16624301 # Somewhere random
home_alt = 163000 # Somewhere random
# TODO: Taken care of by ArduPilot, so can be removed (once the handling on AP side is confirmed stable)
# In NED frame, offset from the IMU or the center of gravity to the camera's origin point
body_offset_enabled = 0
body_offset_x = 0 # In meters (m)
body_offset_y = 0 # In meters (m)
body_offset_z = 0 # In meters (m)
# Global scale factor, position x y z will be scaled up/down by this factor
scale_factor = 1.0
# Enable using yaw from compass to align north (zero degree is facing north)
compass_enabled = 0
# pose data confidence: 0x0 - Failed / 0x1 - Low / 0x2 - Medium / 0x3 - High
pose_data_confidence_level = ('FAILED', 'Low', 'Medium', 'High')
# lock for thread synchronization
lock = threading.Lock()
#######################################
# Global variables
#######################################
# FCU connection variables
vehicle = None
is_vehicle_connected = False
# Camera-related variables
pipe = None
pose_sensor = None
linear_accel_cov = 0.01
angular_vel_cov = 0.01
# Data variables
data = None
prev_data = None
H_aeroRef_aeroBody = None
V_aeroRef_aeroBody = None
heading_north_yaw = None
current_confidence_level = None
current_time_us = 0
# Increment everytime pose_jumping or relocalization happens
# See here: https://github.com/IntelRealSense/librealsense/blob/master/doc/t265.md#are-there-any-t265-specific-options
# For AP, a non-zero "reset_counter" would mean that we could be sure that the user's setup was using mavlink2
reset_counter = 1
#######################################
# Parsing user' inputs
#######################################
parser = argparse.ArgumentParser(description='Reboots vehicle')
parser.add_argument('--connect',
help="Vehicle connection target string. If not specified, a default string will be used.")
parser.add_argument('--baudrate', type=float,
help="Vehicle connection baudrate. If not specified, a default value will be used.")
parser.add_argument('--vision_position_estimate_msg_hz', type=float,
help="Update frequency for VISION_POSITION_ESTIMATE message. If not specified, a default value will be used.")
parser.add_argument('--vision_position_delta_msg_hz', type=float,
help="Update frequency for VISION_POSITION_DELTA message. If not specified, a default value will be used.")
parser.add_argument('--vision_speed_estimate_msg_hz', type=float,
help="Update frequency for VISION_SPEED_DELTA message. If not specified, a default value will be used.")
parser.add_argument('--scale_calib_enable', default=False, action='store_true',
help="Scale calibration. Only run while NOT in flight")
parser.add_argument('--camera_orientation', type=int,
help="Configuration for camera orientation. Currently supported: forward, usb port to the right - 0; downward, usb port to the right - 1, 2: forward tilted down 45deg")
parser.add_argument('--debug_enable',type=int,
help="Enable debug messages on terminal")
args = parser.parse_args()
connection_string = args.connect
connection_baudrate = args.baudrate
vision_position_estimate_msg_hz = args.vision_position_estimate_msg_hz
vision_position_delta_msg_hz = args.vision_position_delta_msg_hz
vision_speed_estimate_msg_hz = args.vision_speed_estimate_msg_hz
scale_calib_enable = args.scale_calib_enable
camera_orientation = args.camera_orientation
debug_enable = args.debug_enable
# Using default values if no specified inputs
if not connection_string:
connection_string = connection_string_default
print("INFO: Using default connection_string", connection_string)
else:
print("INFO: Using connection_string", connection_string)
if not connection_baudrate:
connection_baudrate = connection_baudrate_default
print("INFO: Using default connection_baudrate", connection_baudrate)
else:
print("INFO: Using connection_baudrate", connection_baudrate)
if not vision_position_estimate_msg_hz:
vision_position_estimate_msg_hz = vision_position_estimate_msg_hz_default
print("INFO: Using default vision_position_estimate_msg_hz", vision_position_estimate_msg_hz)
else:
print("INFO: Using vision_position_estimate_msg_hz", vision_position_estimate_msg_hz)
if not vision_position_delta_msg_hz:
vision_position_delta_msg_hz = vision_position_delta_msg_hz_default
print("INFO: Using default vision_position_delta_msg_hz", vision_position_delta_msg_hz)
else:
print("INFO: Using vision_position_delta_msg_hz", vision_position_delta_msg_hz)
if not vision_speed_estimate_msg_hz:
vision_speed_estimate_msg_hz = vision_speed_estimate_msg_hz_default
print("INFO: Using default vision_speed_estimate_msg_hz", vision_speed_estimate_msg_hz)
else:
print("INFO: Using vision_speed_estimate_msg_hz", vision_speed_estimate_msg_hz)
if body_offset_enabled == 1:
print("INFO: Using camera position offset: Enabled, x y z is", body_offset_x, body_offset_y, body_offset_z)
else:
print("INFO: Using camera position offset: Disabled")
if compass_enabled == 1:
print("INFO: Using compass: Enabled. Heading will be aligned to north.")
else:
print("INFO: Using compass: Disabled")
if scale_calib_enable == True:
print("\nINFO: SCALE CALIBRATION PROCESS. DO NOT RUN DURING FLIGHT.\nINFO: TYPE IN NEW SCALE IN FLOATING POINT FORMAT\n")
else:
if scale_factor == 1.0:
print("INFO: Using default scale factor", scale_factor)
else:
print("INFO: Using scale factor", scale_factor)
if not camera_orientation:
camera_orientation = camera_orientation_default
print("INFO: Using default camera orientation", camera_orientation)
else:
print("INFO: Using camera orientation", camera_orientation)
if camera_orientation == 0: # Forward, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
elif camera_orientation == 1: # Downfacing, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.array([[0,1,0,0],[1,0,0,0],[0,0,-1,0],[0,0,0,1]])
elif camera_orientation == 2: # 45degree forward
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = (tf.euler_matrix(m.pi/4, 0, 0)).dot(np.linalg.inv(H_aeroRef_T265Ref))
else: # Default is facing forward, USB port to the right
H_aeroRef_T265Ref = np.array([[0,0,-1,0],[1,0,0,0],[0,-1,0,0],[0,0,0,1]])
H_T265body_aeroBody = np.linalg.inv(H_aeroRef_T265Ref)
if not debug_enable:
debug_enable = 0
else:
debug_enable = 1
np.set_printoptions(precision=4, suppress=True) # Format output on terminal
print("INFO: Debug messages enabled.")
#######################################
# Functions
#######################################
# https://mavlink.io/en/messages/common.html#VISION_POSITION_ESTIMATE
def send_vision_position_estimate_message():
global is_vehicle_connected, current_time_us, H_aeroRef_aeroBody, reset_counter
with lock:
if is_vehicle_connected == True and H_aeroRef_aeroBody is not None:
# Setup angle data
rpy_rad = np.array( tf.euler_from_matrix(H_aeroRef_aeroBody, 'sxyz'))
# Setup covariance data, which is the upper right triangle of the covariance matrix, see here: https://files.gitter.im/ArduPilot/VisionProjects/1DpU/image.png
# Attemp #01: following this formula https://github.com/IntelRealSense/realsense-ros/blob/development/realsense2_camera/src/base_realsense_node.cpp#L1406-L1411
cov_pose = linear_accel_cov * pow(10, 3 - int(data.tracker_confidence))
cov_twist = angular_vel_cov * pow(10, 1 - int(data.tracker_confidence))
covariance = np.array([cov_pose, 0, 0, 0, 0, 0,
cov_pose, 0, 0, 0, 0,
cov_pose, 0, 0, 0,
cov_twist, 0, 0,
cov_twist, 0,
cov_twist])
# Setup the message to be sent
msg = vehicle.message_factory.vision_position_estimate_encode(
current_time_us, # us Timestamp (UNIX time or time since system boot)
H_aeroRef_aeroBody[0][3], # Global X position
H_aeroRef_aeroBody[1][3], # Global Y position
H_aeroRef_aeroBody[2][3], # Global Z position
rpy_rad[0], # Roll angle
rpy_rad[1], # Pitch angle
rpy_rad[2], # Yaw angle
covariance, # Row-major representation of pose 6x6 cross-covariance matrix
reset_counter # Estimate reset counter. Increment every time pose estimate jumps.
)
vehicle.send_mavlink(msg)
vehicle.flush()
# https://mavlink.io/en/messages/ardupilotmega.html#VISION_POSITION_DELTA
def send_vision_position_delta_message():
global is_vehicle_connected, current_time_us, current_confidence_level, H_aeroRef_aeroBody
with lock:
if is_vehicle_connected == True and H_aeroRef_aeroBody is not None:
# Calculate the deltas in position, attitude and time from the previous to current orientation
H_aeroRef_PrevAeroBody = send_vision_position_delta_message.H_aeroRef_PrevAeroBody
H_PrevAeroBody_CurrAeroBody = (np.linalg.inv(H_aeroRef_PrevAeroBody)).dot(H_aeroRef_aeroBody)
delta_time_us = current_time_us - send_vision_position_delta_message.prev_time_us
delta_position_m = [H_PrevAeroBody_CurrAeroBody[0][3], H_PrevAeroBody_CurrAeroBody[1][3], H_PrevAeroBody_CurrAeroBody[2][3]]
delta_angle_rad = np.array( tf.euler_from_matrix(H_PrevAeroBody_CurrAeroBody, 'sxyz'))
# Send the message
msg = vehicle.message_factory.vision_position_delta_encode(
current_time_us, # us: Timestamp (UNIX time or time since system boot)
delta_time_us, # us: Time since last reported camera frame
delta_angle_rad, # float[3] in radian: Defines a rotation vector in body frame that rotates the vehicle from the previous to the current orientation
delta_position_m, # float[3] in m: Change in position from previous to current frame rotated into body frame (0=forward, 1=right, 2=down)
current_confidence_level # Normalized confidence value from 0 to 100.
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Save static variables
send_vision_position_delta_message.H_aeroRef_PrevAeroBody = H_aeroRef_aeroBody
send_vision_position_delta_message.prev_time_us = current_time_us
# https://mavlink.io/en/messages/common.html#VISION_SPEED_ESTIMATE
def send_vision_speed_estimate_message():
global is_vehicle_connected, current_time_us, V_aeroRef_aeroBody, reset_counter
with lock:
if is_vehicle_connected == True and V_aeroRef_aeroBody is not None:
# Attemp #01: following this formula https://github.com/IntelRealSense/realsense-ros/blob/development/realsense2_camera/src/base_realsense_node.cpp#L1406-L1411
cov_pose = linear_accel_cov * pow(10, 3 - int(data.tracker_confidence))
covariance = np.array([cov_pose, 0, 0,
0, cov_pose, 0,
0, 0, cov_pose])
# Setup the message to be sent
msg = vehicle.message_factory.vision_speed_estimate_encode(
current_time_us, # us Timestamp (UNIX time or time since system boot)
V_aeroRef_aeroBody[0][3], # Global X speed
V_aeroRef_aeroBody[1][3], # Global Y speed
V_aeroRef_aeroBody[2][3], # Global Z speed
covariance, # covariance
reset_counter # Estimate reset counter. Increment every time pose estimate jumps.
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Update the changes of confidence level on GCS and terminal
def update_tracking_confidence_to_gcs():
if update_tracking_confidence_to_gcs.prev_confidence_level != data.tracker_confidence:
confidence_status_string = 'Tracking confidence: ' + pose_data_confidence_level[data.tracker_confidence]
send_msg_to_gcs(confidence_status_string)
update_tracking_confidence_to_gcs.prev_confidence_level = data.tracker_confidence
# https://mavlink.io/en/messages/common.html#STATUSTEXT
def send_msg_to_gcs(text_to_be_sent):
# MAV_SEVERITY: 0=EMERGENCY 1=ALERT 2=CRITICAL 3=ERROR, 4=WARNING, 5=NOTICE, 6=INFO, 7=DEBUG, 8=ENUM_END
# Defined here: https://mavlink.io/en/messages/common.html#MAV_SEVERITY
# MAV_SEVERITY = 3 will let the message be displayed on Mission Planner HUD, but 6 is ok for QGroundControl
if is_vehicle_connected == True:
text_msg = 'T265: ' + text_to_be_sent
status_msg = vehicle.message_factory.statustext_encode(
6, # MAV_SEVERITY
text_msg.encode() # max size is char[50]
)
vehicle.send_mavlink(status_msg)
vehicle.flush()
print("INFO: " + text_to_be_sent)
else:
print("INFO: Vehicle not connected. Cannot send text message to Ground Control Station (GCS)")
# Send a mavlink SET_GPS_GLOBAL_ORIGIN message (http://mavlink.org/messages/common#SET_GPS_GLOBAL_ORIGIN), which allows us to use local position information without a GPS.
def set_default_global_origin():
if is_vehicle_connected == True:
msg = vehicle.message_factory.set_gps_global_origin_encode(
int(vehicle._master.source_system),
home_lat,
home_lon,
home_alt
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Send a mavlink SET_HOME_POSITION message (http://mavlink.org/messages/common#SET_HOME_POSITION), which allows us to use local position information without a GPS.
def set_default_home_position():
if is_vehicle_connected == True:
x = 0
y = 0
z = 0
q = [1, 0, 0, 0] # w x y z
approach_x = 0
approach_y = 0
approach_z = 1
msg = vehicle.message_factory.set_home_position_encode(
int(vehicle._master.source_system),
home_lat,
home_lon,
home_alt,
x,
y,
z,
q,
approach_x,
approach_y,
approach_z
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Request a timesync update from the flight controller, for future work.
# TODO: Inspect the usage of timesync_update
def update_timesync(ts=0, tc=0):
if ts == 0:
ts = int(round(time.time() * 1000))
msg = vehicle.message_factory.timesync_encode(
tc, # tc1
ts # ts1
)
vehicle.send_mavlink(msg)
vehicle.flush()
# Listen to attitude data to acquire heading when compass data is enabled
def att_msg_callback(self, attr_name, value):
global heading_north_yaw
if heading_north_yaw is None:
heading_north_yaw = value.yaw
print("INFO: Received first ATTITUDE message with heading yaw", heading_north_yaw * 180 / m.pi, "degrees")
else:
heading_north_yaw = value.yaw
print("INFO: Received ATTITUDE message with heading yaw", heading_north_yaw * 180 / m.pi, "degrees")
def vehicle_connect():
global vehicle, is_vehicle_connected
try:
vehicle = connect(connection_string, wait_ready = True, baud = connection_baudrate, source_system = 1)
except:
print('Connection error! Retrying...')
sleep(1)
if vehicle == None:
is_vehicle_connected = False
return False
else:
is_vehicle_connected = True
return True
# List of notification events: https://github.com/IntelRealSense/librealsense/blob/development/include/librealsense2/h/rs_types.h
# List of notification API: https://github.com/IntelRealSense/librealsense/blob/development/common/notifications.cpp
def realsense_notification_callback(notif):
global reset_counter
print("INFO: T265 event: " + notif)
if notif.get_category() is rs.notification_category.pose_relocalization:
reset_counter += 1
send_msg_to_gcs('Relocalization detected')
def realsense_connect():
global pipe, pose_sensor
# Declare RealSense pipeline, encapsulating the actual device and sensors
pipe = rs.pipeline()
# Build config object before requesting data
cfg = rs.config()
# Enable the stream we are interested in
cfg.enable_stream(rs.stream.pose) # Positional data
# Configure callback for relocalization event
device = cfg.resolve(pipe).get_device()
pose_sensor = device.first_pose_sensor()
pose_sensor.set_notifications_callback(realsense_notification_callback)
# Start streaming with requested config
pipe.start(cfg)
# Monitor user input from the terminal and perform action accordingly
def user_input_monitor():
global scale_factor
while True:
# Special case: updating scale
if scale_calib_enable == True:
scale_factor = float(input("INFO: Type in new scale as float number\n"))
print("INFO: New scale is ", scale_factor)
if enable_auto_set_ekf_home:
send_msg_to_gcs('Set EKF home with default GPS location')
set_default_global_origin()
set_default_home_position()
time.sleep(1) # Wait a short while for FCU to start working
# Add new action here according to the key pressed.
# Enter: Set EKF home when user press enter
try:
c = input()
if c == "":
send_msg_to_gcs('Set EKF home with default GPS location')
set_default_global_origin()
set_default_home_position()
else:
print("Got keyboard input", c)
except IOError: pass
#######################################
# Main code starts here
#######################################
print("INFO: Connecting to vehicle.")
while (not vehicle_connect()):
pass
print("INFO: Vehicle connected.")
send_msg_to_gcs('Connecting to camera...')
realsense_connect()
send_msg_to_gcs('Camera connected.')
if compass_enabled == 1:
# Listen to the attitude data in aeronautical frame
vehicle.add_message_listener('ATTITUDE', att_msg_callback)
# Send MAVlink messages in the background at pre-determined frequencies
sched = BackgroundScheduler()
if enable_msg_vision_position_estimate:
sched.add_job(send_vision_position_estimate_message, 'interval', seconds = 1/vision_position_estimate_msg_hz)
if enable_msg_vision_position_delta:
sched.add_job(send_vision_position_delta_message, 'interval', seconds = 1/vision_position_delta_msg_hz)
send_vision_position_delta_message.H_aeroRef_PrevAeroBody = tf.quaternion_matrix([1,0,0,0])
send_vision_position_delta_message.prev_time_us = int(round(time.time() * 1000000))
if enable_msg_vision_speed_estimate:
sched.add_job(send_vision_speed_estimate_message, 'interval', seconds = 1/vision_speed_estimate_msg_hz)
if enable_update_tracking_confidence_to_gcs:
sched.add_job(update_tracking_confidence_to_gcs, 'interval', seconds = 1/update_tracking_confidence_to_gcs_hz_default)
update_tracking_confidence_to_gcs.prev_confidence_level = -1
# A separate thread to monitor user input
user_keyboard_input_thread = threading.Thread(target=user_input_monitor)
user_keyboard_input_thread.daemon = True
user_keyboard_input_thread.start()
sched.start()
if compass_enabled == 1:
time.sleep(1) # Wait a short while for yaw to be correctly initiated
send_msg_to_gcs('Sending vision messages to FCU')
print("INFO: Press Enter to set EKF home at default location")
try:
while True:
# Monitor last_heartbeat to reconnect in case of lost connection
if vehicle.last_heartbeat > connection_timeout_sec_default:
is_vehicle_connected = False
print("WARNING: CONNECTION LOST. Last hearbeat was %f sec ago."% vehicle.last_heartbeat)
print("WARNING: Attempting to reconnect ...")
vehicle_connect()
continue
# Wait for the next set of frames from the camera
frames = pipe.wait_for_frames()
# Fetch pose frame
pose = frames.get_pose_frame()
# Process data
if pose:
with lock:
# Store the timestamp for MAVLink messages
current_time_us = int(round(time.time() * 1000000))
# Pose data consists of translation and rotation
data = pose.get_pose_data()
# Confidence level value from T265: 0-3, remapped to 0 - 100: 0% - Failed / 33.3% - Low / 66.6% - Medium / 100% - High
current_confidence_level = float(data.tracker_confidence * 100 / 3)
# In transformations, Quaternions w+ix+jy+kz are represented as [w, x, y, z]!
H_T265Ref_T265body = tf.quaternion_matrix([data.rotation.w, data.rotation.x, data.rotation.y, data.rotation.z])
H_T265Ref_T265body[0][3] = data.translation.x * scale_factor
H_T265Ref_T265body[1][3] = data.translation.y * scale_factor
H_T265Ref_T265body[2][3] = data.translation.z * scale_factor
# Transform to aeronautic coordinates (body AND reference frame!)
H_aeroRef_aeroBody = H_aeroRef_T265Ref.dot( H_T265Ref_T265body.dot( H_T265body_aeroBody))
# Calculate GLOBAL XYZ speed (speed from T265 is already GLOBAL)
V_aeroRef_aeroBody = tf.quaternion_matrix([1,0,0,0])
V_aeroRef_aeroBody[0][3] = data.velocity.x
V_aeroRef_aeroBody[1][3] = data.velocity.y
V_aeroRef_aeroBody[2][3] = data.velocity.z
V_aeroRef_aeroBody = H_aeroRef_T265Ref.dot(V_aeroRef_aeroBody)
# Check for pose jump and increment reset_counter
if prev_data != None:
delta_translation = [data.translation.x - prev_data.translation.x, data.translation.y - prev_data.translation.y, data.translation.z - prev_data.translation.z]
position_displacement = np.linalg.norm(delta_translation)
# Pose jump is indicated when position changes abruptly. The behavior is not well documented yet (as of librealsense 2.34.0)
jump_threshold = 0.1 # in meters, from trials and errors, should be relative to how frequent is the position data obtained (200Hz for the T265)
if (position_displacement > jump_threshold):
send_msg_to_gcs('Pose jump detected')
print("Position jumped by: ", position_displacement)
reset_counter += 1
prev_data = data
# Take offsets from body's center of gravity (or IMU) to camera's origin into account
if body_offset_enabled == 1:
H_body_camera = tf.euler_matrix(0, 0, 0, 'sxyz')
H_body_camera[0][3] = body_offset_x
H_body_camera[1][3] = body_offset_y
H_body_camera[2][3] = body_offset_z
H_camera_body = np.linalg.inv(H_body_camera)
H_aeroRef_aeroBody = H_body_camera.dot(H_aeroRef_aeroBody.dot(H_camera_body))
# Realign heading to face north using initial compass data
if compass_enabled == 1:
H_aeroRef_aeroBody = H_aeroRef_aeroBody.dot( tf.euler_matrix(0, 0, heading_north_yaw, 'sxyz'))
# Show debug messages here
if debug_enable == 1:
os.system('clear') # This helps in displaying the messages to be more readable
print("DEBUG: Raw RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_T265Ref_T265body, 'sxyz')) * 180 / m.pi))
print("DEBUG: NED RPY[deg]: {}".format( np.array( tf.euler_from_matrix( H_aeroRef_aeroBody, 'sxyz')) * 180 / m.pi))
print("DEBUG: Raw pos xyz : {}".format( np.array( [data.translation.x, data.translation.y, data.translation.z])))
print("DEBUG: NED pos xyz : {}".format( np.array( tf.translation_from_matrix( H_aeroRef_aeroBody))))
except KeyboardInterrupt:
send_msg_to_gcs('Closing the script...')
except:
send_msg_to_gcs('ERROR IN SCRIPT')
print("Unexpected error:", sys.exc_info()[0])
finally:
pipe.stop()
vehicle.close()
print("INFO: Realsense pipeline and vehicle object closed.")
sys.exit()