IMU and Video Sync

This example demonstrates the use of the DepthAI Sync node to synchronize IMU (Inertial Measurement Unit) data with video frames from a color camera. It highlights the capability to process and display the latest rotation vector from the IMU alongside the video stream in real-time.

Similar samples:

Demo

~/depthai-python/examples/Sync $ python3 imu_video_synced.py
IMU type: BNO086, firmware version: 3.9.7

Device timestamp imu: 0:00:05.379914
Device timestamp video:0:00:05.385096
Quaternion: i: -0.0549 j: -0.0335 k: 0.0018 real: 0.9979


Device timestamp imu: 0:00:05.410274
Device timestamp video:0:00:05.418425
Quaternion: i: -0.0549 j: -0.0334 k: 0.0018 real: 0.9979


Device timestamp imu: 0:00:05.445439
Device timestamp video:0:00:05.451753
Quaternion: i: -0.0548 j: -0.0334 k: 0.0018 real: 0.9979


Device timestamp imu: 0:00:05.475084
Device timestamp video:0:00:05.485082
Quaternion: i: -0.0547 j: -0.0334 k: 0.0018 real: 0.9979


Device timestamp imu: 0:00:05.510046
Device timestamp video:0:00:05.518411
Quaternion: i: -0.0546 j: -0.0334 k: 0.0018 real: 0.9979

Setup

Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script

git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py

For additional information, please follow installation guide

Source code

Also available on GitHub

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import depthai as dai
import numpy as np
import cv2
from datetime import timedelta

device = dai.Device()

imuType = device.getConnectedIMU()
imuFirmwareVersion = device.getIMUFirmwareVersion()
print(f"IMU type: {imuType}, firmware version: {imuFirmwareVersion}")

if imuType != "BNO086":
    print("Rotation vector output is supported only by BNO086!")
    exit(0)

pipeline = dai.Pipeline()

color = pipeline.create(dai.node.ColorCamera)
imu = pipeline.create(dai.node.IMU)
sync = pipeline.create(dai.node.Sync)
xoutImu = pipeline.create(dai.node.XLinkOut)
xoutImu.setStreamName("imu")

xoutGrp = pipeline.create(dai.node.XLinkOut)
xoutGrp.setStreamName("xout")

color.setCamera("color")

imu.enableIMUSensor(dai.IMUSensor.ROTATION_VECTOR, 120)
imu.setBatchReportThreshold(1)
imu.setMaxBatchReports(10)

sync.setSyncThreshold(timedelta(milliseconds=10))
sync.setSyncAttempts(-1)

color.video.link(sync.inputs["video"])
imu.out.link(sync.inputs["imu"])

sync.out.link(xoutGrp.input)


with device:
    device.startPipeline(pipeline)
    groupQueue = device.getOutputQueue("xout", 3, True)
    while True:
        groupMessage = groupQueue.get()
        imuMessage = groupMessage["imu"]
        colorMessage = groupMessage["video"]
        print()
        print("Device timestamp imu: " + str(imuMessage.getTimestampDevice()))
        print("Device timestamp video:" + str(colorMessage.getTimestampDevice()))
        latestRotationVector = imuMessage.packets[-1].rotationVector
        imuF = "{:.4f}"
        print(f"Quaternion: i: {imuF.format(latestRotationVector.i)} j: {imuF.format(latestRotationVector.j)} "
        f"k: {imuF.format(latestRotationVector.k)} real: {imuF.format(latestRotationVector.real)}")
        print()
        cv2.imshow("video", colorMessage.getCvFrame())
        if cv2.waitKey(1) == ord("q"):
            break

Also available on GitHub

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#include <iostream>
#include <opencv2/opencv.hpp>

#include "depthai/depthai.hpp"

int main() {
    dai::Device device;

    auto imuType = device.getConnectedIMU();
    auto imuFirmwareVersion = device.getIMUFirmwareVersion();
    std::cout << "IMU type: " << imuType << ", firmware version: " << imuFirmwareVersion << std::endl;

    if(imuType != "BNO086") {
        std::cout << "Rotation vector output is supported only by BNO086!" << std::endl;
        return 1;
    }

    dai::Pipeline pipeline;

    auto colorCamera = pipeline.create<dai::node::ColorCamera>();
    auto imu = pipeline.create<dai::node::IMU>();
    auto sync = pipeline.create<dai::node::Sync>();
    auto xoutGroup = pipeline.create<dai::node::XLinkOut>();

    xoutGroup->setStreamName("xout");

    colorCamera->setCamera("color");

    imu->enableIMUSensor(dai::IMUSensor::ROTATION_VECTOR, 120);
    imu->setBatchReportThreshold(1);
    imu->setMaxBatchReports(10);

    sync->setSyncThreshold(std::chrono::milliseconds(10));
    sync->setSyncAttempts(-1);  // Infinite attempts

    colorCamera->video.link(sync->inputs["video"]);
    imu->out.link(sync->inputs["imu"]);

    sync->out.link(xoutGroup->input);

    device.startPipeline(pipeline);

    auto groupQueue = device.getOutputQueue("xout", 3, false);

    while(true) {
        auto groupMessage = groupQueue->get<dai::MessageGroup>();
        auto imuData = groupMessage->get<dai::IMUData>("imu");
        auto colorData = groupMessage->get<dai::ImgFrame>("video");
        auto timeDifference = imuData->getTimestampDevice() - colorData->getTimestampDevice();
        auto timeDifferenceUs = std::chrono::duration_cast<std::chrono::microseconds>(timeDifference).count();

        std::cout << "Time difference between messages is: " << std::abs(timeDifferenceUs / 1000.0) << " ms" << std::endl;

        for(auto& packet : imuData->packets) {
            auto& rv = packet.rotationVector;

            printf(
                "Quaternion: i: %.3f j: %.3f k: %.3f real: %.3f\n"
                "Accuracy (rad): %.3f \n",
                rv.i,
                rv.j,
                rv.k,
                rv.real,
                rv.rotationVectorAccuracy);
        }

        cv::imshow("Color", colorData->getCvFrame());
        if(cv::waitKey(1) == 'q') {
            break;
        }
    }

    return 0;
}

How it Works

  1. Initialize the DepthAI device.

  2. Check the connected IMU type and firmware version.

  3. Create a pipeline and add a ColorCamera and IMU node.

  4. Set up the Sync node to synchronize the IMU data with the video frames.

  5. Link the output of the ColorCamera and IMU nodes to the Sync node.

  6. Start the pipeline and continuously receive synchronized data.

  7. Display the video frames and print the IMU rotation vector data, including quaternion values.

Got questions?

Head over to Discussion Forum for technical support or any other questions you might have.