Skip to content

Commit

Permalink
Merge pull request #795 from luxonis/develop
Browse files Browse the repository at this point in the history
Release v2.21.0.0
  • Loading branch information
SzabolcsGergely authored Apr 1, 2023
2 parents 23aa63f + 1f162f1 commit b6b20c9
Show file tree
Hide file tree
Showing 44 changed files with 407 additions and 120 deletions.
2 changes: 1 addition & 1 deletion depthai-core
Submodule depthai-core updated 59 files
+4 −4 .github/workflows/main.workflow.yml
+1 −1 CMakeLists.txt
+23 −0 README.md
+1 −1 cmake/Depthai/DepthaiDeviceSideConfig.cmake
+4 −4 cmake/Hunter/config.cmake
+7 −0 examples/CMakeLists.txt
+41 −0 examples/CrashReport/crash_report.cpp
+1 −3 examples/FeatureTracker/feature_tracker.cpp
+24 −58 examples/IMU/imu_firmware_update.cpp
+28 −0 examples/SpatialDetection/spatial_location_calculator.cpp
+0 −1 examples/StereoDepth/stereo_depth_video.cpp
+9 −1 include/depthai/device/DataQueue.hpp
+58 −9 include/depthai/device/DeviceBase.hpp
+12 −1 include/depthai/device/DeviceBootloader.hpp
+24 −0 include/depthai/pipeline/Pipeline.hpp
+7 −0 include/depthai/pipeline/datatype/StereoDepthConfig.hpp
+9 −0 include/depthai/pipeline/node/Camera.hpp
+10 −0 include/depthai/pipeline/node/ColorCamera.hpp
+10 −0 include/depthai/pipeline/node/MonoCamera.hpp
+5 −0 include/depthai/pipeline/node/ObjectTracker.hpp
+2 −2 include/depthai/pipeline/node/SPIOut.hpp
+8 −1 include/depthai/pipeline/node/SpatialDetectionNetwork.hpp
+28 −0 include/depthai/pipeline/node/StereoDepth.hpp
+1 −1 include/depthai/pipeline/node/Warp.hpp
+1 −4 include/depthai/utility/LockingQueue.hpp
+4 −1 include/depthai/xlink/XLinkConnection.hpp
+1 −0 include/depthai/xlink/XLinkStream.hpp
+1 −1 shared/depthai-shared
+2 −2 src/device/CalibrationHandler.cpp
+11 −2 src/device/DataQueue.cpp
+0 −14 src/device/Device.cpp
+62 −89 src/device/DeviceBase.cpp
+31 −13 src/device/DeviceBootloader.cpp
+7 −7 src/pipeline/Node.cpp
+12 −4 src/pipeline/Pipeline.cpp
+5 −0 src/pipeline/datatype/StereoDepthConfig.cpp
+4 −0 src/pipeline/node/Camera.cpp
+13 −9 src/pipeline/node/ColorCamera.cpp
+7 −3 src/pipeline/node/MonoCamera.cpp
+1 −1 src/pipeline/node/NeuralNetwork.cpp
+3 −0 src/pipeline/node/ObjectTracker.cpp
+2 −2 src/pipeline/node/SPIIn.cpp
+4 −0 src/pipeline/node/SpatialDetectionNetwork.cpp
+16 −0 src/pipeline/node/StereoDepth.cpp
+0 −1 src/utility/Initialization.cpp
+5 −5 src/utility/PimplImpl.hpp
+16 −26 src/xlink/XLinkConnection.cpp
+9 −4 tests/CMakeLists.txt
+1 −2 tests/src/bootloader_config_test.cpp
+1 −2 tests/src/bootloader_version_test.cpp
+1 −2 tests/src/device_usbspeed_test.cpp
+27 −26 tests/src/filesystem_test.cpp
+1 −2 tests/src/logging_test.cpp
+1 −2 tests/src/neural_network_test.cpp
+1 −2 tests/src/openvino_blob_test.cpp
+1 −2 tests/src/pipeline_test.cpp
+1 −2 tests/src/serialization_test.cpp
+1 −2 tests/src/unlimited_io_connection_test.cpp
+1 −2 tests/src/xlink_roundtrip_test.cpp
17 changes: 17 additions & 0 deletions docs/source/components/nodes/imu.rst
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,23 @@ Limitations
- For BNO086, gyroscope frequency above 400Hz can produce some jitter from time to time due to sensor HW limitation.
- **Maximum frequencies**: 500 Hz raw accelerometer, 1000 Hz raw gyroscope values individually, and 500Hz combined (synced) output. You can obtain the combined synced 500Hz output with :code:`imu.enableIMUSensor([dai.IMUSensor.ACCELEROMETER_RAW, dai.IMUSensor.GYROSCOPE_RAW], 500)`.

IMU sensor frequencies
######################

Below are the discrete **stable frequencies** available for each (raw) IMU sensor. Some maximum IMU frequencies are higher, eg.
for BNO086, maximum frequency for gyroscope is 1000Hz, but up to 400Hz is stable (due to driver limitation).

**BNO086:**

- Accelerometer: 100Hz, 200Hz, 400Hz
- Gyroscope: 125Hz, 250Hz, 400Hz
- Magnetometer: 100Hz

**BMI270:**

- Accelerometer: 25Hz, 50Hz, 100Hz, 200Hz, 400Hz
- Gyroscope: 25Hz, 50Hz, 100Hz, 200Hz, 400Hz

Usage
#####

Expand Down
34 changes: 34 additions & 0 deletions examples/CrashReport/crash_report.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python3

import cv2
import depthai as dai
from json import dump
from os.path import exists

# Connect to device and start pipeline
with dai.Device() as device:

if device.hasCrashDump():
crashDump = device.getCrashDump()
commitHash = crashDump.depthaiCommitHash
deviceId = crashDump.deviceId

json = crashDump.serializeToJson()

i = -1
while True:
i += 1
destPath = "crashDump_" + str(i) + "_" + deviceId + "_" + commitHash + ".json"
if exists(destPath):
continue

with open(destPath, 'w', encoding='utf-8') as f:
dump(json, f, ensure_ascii=False, indent=4)

print("Crash dump found on your device!")
print(f"Saved to {destPath}")
print("Please report to developers!")
break
else:
print("There was no crash dump found on your device!")

75 changes: 20 additions & 55 deletions examples/IMU/imu_firmware_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,70 +5,35 @@
import time
import math

device = dai.Device()

imuVersion = device.getConnectedIMU()
imuFirmwareVersion = device.getIMUFirmwareVersion()
embeddedIMUFirmwareVersion = device.getEmbeddedIMUFirmwareVersion()
print(f"IMU type: {imuVersion}, firmware version: {imuFirmwareVersion}, embedded firmware version: {embeddedIMUFirmwareVersion}")

print("Warning! Flashing IMU firmware can potentially soft brick your device and should be done with caution.")
print("Do not unplug your device while the IMU firmware is flashing.")
print("Type 'y' and press enter to proceed, otherwise exits: ")
if input() != 'y':
print("Prompt declined, exiting...")
exit(-1)

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
imu = pipeline.create(dai.node.IMU)
xlinkOut = pipeline.create(dai.node.XLinkOut)

xlinkOut.setStreamName("imu")

# enable ACCELEROMETER_RAW at 500 hz rate
imu.enableIMUSensor(dai.IMUSensor.ACCELEROMETER_RAW, 500)
# enable GYROSCOPE_RAW at 400 hz rate
imu.enableIMUSensor(dai.IMUSensor.GYROSCOPE_RAW, 400)
# it's recommended to set both setBatchReportThreshold and setMaxBatchReports to 20 when integrating in a pipeline with a lot of input/output connections
# above this threshold packets will be sent in batch of X, if the host is not blocked and USB bandwidth is available
imu.setBatchReportThreshold(1)
# maximum number of IMU packets in a batch, if it's reached device will block sending until host can receive it
# if lower or equal to batchReportThreshold then the sending is always blocking on device
# useful to reduce device's CPU load and number of lost packets, if CPU load is high on device side due to multiple nodes
imu.setMaxBatchReports(10)

# Link plugins IMU -> XLINK
imu.out.link(xlinkOut.input)

imu.enableFirmwareUpdate(True)

# Pipeline is defined, now we can connect to the device
with dai.Device(pipeline) as device:
with device:

def timeDeltaToMilliS(delta) -> float:
return delta.total_seconds()*1000
started = device.startIMUFirmwareUpdate()
if not started:
print("Couldn't start IMU firmware update")
exit(1)

# Output queue for imu bulk packets
imuQueue = device.getOutputQueue(name="imu", maxSize=50, blocking=False)
baseTs = None
while True:
imuData = imuQueue.get() # blocking call, will wait until a new data has arrived

imuPackets = imuData.packets
for imuPacket in imuPackets:
acceleroValues = imuPacket.acceleroMeter
gyroValues = imuPacket.gyroscope

acceleroTs = acceleroValues.getTimestampDevice()
gyroTs = gyroValues.getTimestampDevice()
if baseTs is None:
baseTs = acceleroTs if acceleroTs < gyroTs else gyroTs
acceleroTs = timeDeltaToMilliS(acceleroTs - baseTs)
gyroTs = timeDeltaToMilliS(gyroTs - baseTs)

imuF = "{:.06f}"
tsF = "{:.03f}"

print(f"Accelerometer timestamp: {tsF.format(acceleroTs)} ms")
print(f"Accelerometer [m/s^2]: x: {imuF.format(acceleroValues.x)} y: {imuF.format(acceleroValues.y)} z: {imuF.format(acceleroValues.z)}")
print(f"Gyroscope timestamp: {tsF.format(gyroTs)} ms")
print(f"Gyroscope [rad/s]: x: {imuF.format(gyroValues.x)} y: {imuF.format(gyroValues.y)} z: {imuF.format(gyroValues.z)} ")

if cv2.waitKey(1) == ord('q'):
fwUpdateFinished, percentage = device.getIMUFirmwareUpdateStatus()
print(f"IMU FW update status: {percentage:.1f}%")
if fwUpdateFinished:
if percentage == 100:
print("Firmware update successful!")
else:
print("Firmware update failed!")
break
time.sleep(1)
13 changes: 12 additions & 1 deletion examples/IMU/imu_rotation_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,16 @@
import time
import math

device = dai.Device()

imuType = device.getConnectedIMU()
imuFirmwareVersion = device.getIMUFirmwareVersion()
print(f"IMU type: {imuType}, firmware version: {imuFirmwareVersion}")

if imuType != "BNO086":
print("Rotation vector output is supported only by BNO086!")
exit(1)

# Create pipeline
pipeline = dai.Pipeline()

Expand All @@ -28,7 +38,8 @@
imu.out.link(xlinkOut.input)

# Pipeline is defined, now we can connect to the device
with dai.Device(pipeline) as device:
with device:
device.startPipeline(pipeline)

def timeDeltaToMilliS(delta) -> float:
return delta.total_seconds()*1000
Expand Down
9 changes: 9 additions & 0 deletions examples/IMU/imu_version.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/usr/bin/env python3

import depthai as dai

device = dai.Device()

imuType = device.getConnectedIMU()
imuFirmwareVersion = device.getIMUFirmwareVersion()
print(f"IMU type: {imuType}, firmware version: {imuFirmwareVersion}")
Empty file modified examples/ImageManip/image_manip_warp_mesh.py
100644 → 100755
Empty file.
Empty file modified examples/MonoCamera/mono_preview_alternate_pro.py
100644 → 100755
Empty file.
Empty file modified examples/NeuralNetwork/concat_multi_input.py
100644 → 100755
Empty file.
10 changes: 5 additions & 5 deletions examples/NeuralNetwork/detection_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@
nn.setNumInferenceThreads(2)
nn.input.setBlocking(False)

blob = dai.OpenVINO.Blob(args.nnPath);
nn.setBlob(blob);
det.setBlob(blob);
det.setNNFamily(dai.DetectionNetworkType.MOBILENET);
det.setConfidenceThreshold(0.5);
blob = dai.OpenVINO.Blob(args.nnPath)
nn.setBlob(blob)
det.setBlob(blob)
det.setNNFamily(dai.DetectionNetworkType.MOBILENET)
det.setConfidenceThreshold(0.5)

# Linking
if args.sync:
Expand Down
Empty file modified examples/Script/script_change_pipeline_flow.py
100644 → 100755
Empty file.
Empty file modified examples/Script/script_forward_frames.py
100644 → 100755
Empty file.
23 changes: 22 additions & 1 deletion examples/SpatialDetection/spatial_location_calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
config = dai.SpatialLocationCalculatorConfigData()
config.depthThresholds.lowerThreshold = 100
config.depthThresholds.upperThreshold = 10000
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MEDIAN
config.roi = dai.Rect(topLeft, bottomRight)

spatialLocationCalculator.inputConfig.setWaitForMessage(False)
Expand Down Expand Up @@ -123,10 +124,30 @@
topLeft.x += stepSize
bottomRight.x += stepSize
newConfig = True
elif key == ord('1'):
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MEAN
print('Switching calculation algorithm to MEAN!')
newConfig = True
elif key == ord('2'):
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MIN
print('Switching calculation algorithm to MIN!')
newConfig = True
elif key == ord('3'):
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MAX
print('Switching calculation algorithm to MAX!')
newConfig = True
elif key == ord('4'):
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MODE
print('Switching calculation algorithm to MODE!')
newConfig = True
elif key == ord('5'):
calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.MEDIAN
print('Switching calculation algorithm to MEDIAN!')
newConfig = True

if newConfig:
config.roi = dai.Rect(topLeft, bottomRight)
config.calculationAlgorithm = dai.SpatialLocationCalculatorAlgorithm.AVERAGE
config.calculationAlgorithm = calculationAlgorithm
cfg = dai.SpatialLocationCalculatorConfig()
cfg.addROI(config)
spatialCalcConfigInQueue.send(cfg)
Expand Down
98 changes: 75 additions & 23 deletions examples/StereoDepth/depth_preview_lr.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,46 +11,99 @@
# Better handling for occlusions:
lr_check = True

enableRectified = False

# Create pipeline
pipeline = dai.Pipeline()

# Define sources and outputs
left = pipeline.create(dai.node.ColorCamera)
center = pipeline.create(dai.node.ColorCamera)
right = pipeline.create(dai.node.ColorCamera)
depth = pipeline.create(dai.node.StereoDepth)
xout = pipeline.create(dai.node.XLinkOut)
xoutl = pipeline.create(dai.node.XLinkOut)
xoutr = pipeline.create(dai.node.XLinkOut)
LC_depth = pipeline.create(dai.node.StereoDepth)
LR_depth = pipeline.create(dai.node.StereoDepth)
CR_depth = pipeline.create(dai.node.StereoDepth)

xout_LC = pipeline.create(dai.node.XLinkOut)
xout_LR = pipeline.create(dai.node.XLinkOut)
xout_CR = pipeline.create(dai.node.XLinkOut)

xout_LC.setStreamName("disparity_LC")
if enableRectified:
xoutl_LC = pipeline.create(dai.node.XLinkOut)
xoutr_LC = pipeline.create(dai.node.XLinkOut)
xoutl_LC.setStreamName("rectifiedLeft_LC")
xoutr_LC.setStreamName("rectifiedRight_LC")

xout.setStreamName("disparity")
xoutl.setStreamName("rectifiedLeft")
xoutr.setStreamName("rectifiedRight")
xout_LR.setStreamName("disparity_LR")
if enableRectified:
xoutl_LR = pipeline.create(dai.node.XLinkOut)
xoutr_LR = pipeline.create(dai.node.XLinkOut)
xoutl_LR.setStreamName("rectifiedLeft_LR")
xoutr_LR.setStreamName("rectifiedRight_LR")

xout_CR.setStreamName("disparity_CR")
if enableRectified:
xoutl_CR = pipeline.create(dai.node.XLinkOut)
xoutr_CR = pipeline.create(dai.node.XLinkOut)
xoutl_CR.setStreamName("rectifiedLeft_CR")
xoutr_CR.setStreamName("rectifiedRight_CR")

# Properties
left.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1200_P)
left.setBoardSocket(dai.CameraBoardSocket.LEFT)
left.setIspScale(2, 3)

center.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1200_P)
center.setBoardSocket(dai.CameraBoardSocket.CENTER)
center.setIspScale(2, 3)

right.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1200_P)
right.setBoardSocket(dai.CameraBoardSocket.RIGHT)
right.setIspScale(2, 3)
left.setIspScale(2, 3)

LC_depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
LC_depth.initialConfig.setMedianFilter(dai.MedianFilter.MEDIAN_OFF)
LC_depth.setLeftRightCheck(lr_check)
LC_depth.setExtendedDisparity(extended_disparity)
LC_depth.setSubpixel(subpixel)

# Create a node that will produce the depth map (using disparity output as it's easier to visualize depth this way)
depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
# Options: MEDIAN_OFF, KERNEL_3x3, KERNEL_5x5, KERNEL_7x7 (default)
depth.initialConfig.setMedianFilter(dai.MedianFilter.KERNEL_7x7)
depth.setInputResolution(1280, 800)
depth.setLeftRightCheck(lr_check)
depth.setExtendedDisparity(extended_disparity)
depth.setSubpixel(subpixel)
depth.setInputResolution(1280, 800)
LR_depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
LR_depth.initialConfig.setMedianFilter(dai.MedianFilter.MEDIAN_OFF)
LR_depth.setLeftRightCheck(lr_check)
LR_depth.setExtendedDisparity(extended_disparity)
LR_depth.setSubpixel(subpixel)

CR_depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
CR_depth.initialConfig.setMedianFilter(dai.MedianFilter.MEDIAN_OFF)
CR_depth.setLeftRightCheck(lr_check)
CR_depth.setExtendedDisparity(extended_disparity)
CR_depth.setSubpixel(subpixel)

# Linking
left.isp.link(depth.left)
right.isp.link(depth.right)
depth.disparity.link(xout.input)
depth.rectifiedLeft.link(xoutl.input)
depth.rectifiedRight.link(xoutr.input)
# LC
left.isp.link(LC_depth.left)
center.isp.link(LC_depth.right)
LC_depth.disparity.link(xout_LC.input)
if enableRectified:
LC_depth.rectifiedLeft.link(xoutl_LC.input)
LC_depth.rectifiedRight.link(xoutr_LC.input)
# LR
left.isp.link(LR_depth.left)
right.isp.link(LR_depth.right)
LR_depth.disparity.link(xout_LR.input)
if enableRectified:
LR_depth.rectifiedLeft.link(xoutl_LR.input)
LR_depth.rectifiedRight.link(xoutr_LR.input)
# CR
center.isp.link(CR_depth.left)
right.isp.link(CR_depth.right)
CR_depth.disparity.link(xout_CR.input)
if enableRectified:
CR_depth.rectifiedLeft.link(xoutl_CR.input)
CR_depth.rectifiedRight.link(xoutr_CR.input)

maxDisp = LC_depth.initialConfig.getMaxDisparity()

# Connect to device and start pipeline
with dai.Device(pipeline) as device:
Expand All @@ -62,7 +115,6 @@
if type(message) == dai.ImgFrame:
frame = message.getCvFrame()
if 'disparity' in q:
maxDisp = depth.initialConfig.getMaxDisparity()
disp = (frame * (255.0 / maxDisp)).astype(np.uint8)
disp = cv2.applyColorMap(disp, cv2.COLORMAP_JET)
cv2.imshow(q, disp)
Expand Down
Loading

0 comments on commit b6b20c9

Please sign in to comment.