Depth Crop Control¶
This example shows usage of depth camera in crop mode with the possibility to move the crop. You can manipulate the movement of the cropped frame by using the following keys:
w will move the crop up
a will move the crop left
s will move the crop down
d will move the crop right
Similar samples:
Demo¶
Setup¶
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
For additional information, please follow installation guide
Source code¶
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 | #!/usr/bin/env python3
"""
This example shows usage of depth camera in crop mode with the possibility to move the crop.
Use 'WASD' in order to do it.
"""
import cv2
import depthai as dai
import numpy as np
# Step size ('W','A','S','D' controls)
stepSize = 0.02
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
monoRight = pipeline.create(dai.node.MonoCamera)
monoLeft = pipeline.create(dai.node.MonoCamera)
manip = pipeline.create(dai.node.ImageManip)
stereo = pipeline.create(dai.node.StereoDepth)
configIn = pipeline.create(dai.node.XLinkIn)
xout = pipeline.create(dai.node.XLinkOut)
configIn.setStreamName('config')
xout.setStreamName("depth")
# Crop range
topLeft = dai.Point2f(0.2, 0.2)
bottomRight = dai.Point2f(0.8, 0.8)
# Properties
monoRight.setCamera("right")
monoLeft.setCamera("left")
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
manip.initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
manip.setMaxOutputFrameSize(monoRight.getResolutionHeight()*monoRight.getResolutionWidth()*3)
stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
stereo.setSubpixel(True)
# Linking
configIn.out.link(manip.inputConfig)
stereo.depth.link(manip.inputImage)
manip.out.link(xout.input)
monoRight.out.link(stereo.right)
monoLeft.out.link(stereo.left)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Queues
q = device.getOutputQueue(xout.getStreamName(), maxSize=4, blocking=False)
configQueue = device.getInputQueue(configIn.getStreamName())
sendCamConfig = False
while True:
inDepth = q.get()
depthFrame = inDepth.getFrame() # depthFrame values are in millimeters
# Frame is transformed, the color map will be applied to highlight the depth info
depth_downscaled = depthFrame[::4]
if np.all(depth_downscaled == 0):
min_depth = 0 # Set a default minimum depth value when all elements are zero
else:
min_depth = np.percentile(depth_downscaled[depth_downscaled != 0], 1)
max_depth = np.percentile(depth_downscaled, 99)
depthFrameColor = np.interp(depthFrame, (min_depth, max_depth), (0, 255)).astype(np.uint8)
depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_HOT)
# Frame is ready to be shown
cv2.imshow("depth", depthFrameColor)
# Update screen
key = cv2.waitKey(10)
if key == ord('q'):
break
elif key == ord('w'):
if topLeft.y - stepSize >= 0:
topLeft.y -= stepSize
bottomRight.y -= stepSize
sendCamConfig = True
elif key == ord('a'):
if topLeft.x - stepSize >= 0:
topLeft.x -= stepSize
bottomRight.x -= stepSize
sendCamConfig = True
elif key == ord('s'):
if bottomRight.y + stepSize <= 1:
topLeft.y += stepSize
bottomRight.y += stepSize
sendCamConfig = True
elif key == ord('d'):
if bottomRight.x + stepSize <= 1:
topLeft.x += stepSize
bottomRight.x += stepSize
sendCamConfig = True
# Send new config to camera
if sendCamConfig:
cfg = dai.ImageManipConfig()
cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
configQueue.send(cfg)
sendCamConfig = False
|
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 | /**
* This example shows usage of depth camera in crop mode with the possibility to move the crop.
* Use 'WASD' in order to do it.
*/
#include <iostream>
// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
// Step size ('W','A','S','D' controls)
static constexpr float stepSize = 0.02f;
int main() {
// Create pipeline
dai::Pipeline pipeline;
// Define sources and outputs
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto manip = pipeline.create<dai::node::ImageManip>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto configIn = pipeline.create<dai::node::XLinkIn>();
auto xout = pipeline.create<dai::node::XLinkOut>();
configIn->setStreamName("config");
xout->setStreamName("depth");
// Crop range
dai::Point2f topLeft(0.2f, 0.2f);
dai::Point2f bottomRight(0.8f, 0.8f);
// Properties
monoRight->setCamera("right");
monoLeft->setCamera("left");
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
manip->initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
manip->setMaxOutputFrameSize(monoRight->getResolutionHeight() * monoRight->getResolutionWidth() * 3);
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
// Linking
configIn->out.link(manip->inputConfig);
stereo->depth.link(manip->inputImage);
manip->out.link(xout->input);
monoRight->out.link(stereo->right);
monoLeft->out.link(stereo->left);
// Connect to device and start pipeline
dai::Device device(pipeline);
// Queues
auto q = device.getOutputQueue(xout->getStreamName(), 4, false);
auto configQueue = device.getInputQueue(configIn->getStreamName());
bool sendCamConfig = false;
while(true) {
auto inDepth = q->get<dai::ImgFrame>();
cv::Mat depthFrame = inDepth->getFrame(); // depthFrame values are in millimeters
// Frame is transformed, the color map will be applied to highlight the depth info
cv::Mat depthFrameColor;
cv::normalize(depthFrame, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1);
cv::equalizeHist(depthFrameColor, depthFrameColor);
cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_HOT);
// Frame is ready to be shown
cv::imshow("depth", depthFrameColor);
// Update screen (10ms pooling rate)
int key = cv::waitKey(9);
cv::waitKey(1); // glitch workaround
if(key == 'q') {
break;
} else if(key == 'w') {
if(topLeft.y - stepSize >= 0) {
topLeft.y -= stepSize;
bottomRight.y -= stepSize;
sendCamConfig = true;
}
} else if(key == 'a') {
if(topLeft.x - stepSize >= 0) {
topLeft.x -= stepSize;
bottomRight.x -= stepSize;
sendCamConfig = true;
}
} else if(key == 's') {
if(bottomRight.y + stepSize <= 1) {
topLeft.y += stepSize;
bottomRight.y += stepSize;
sendCamConfig = true;
}
} else if(key == 'd') {
if(bottomRight.x + stepSize <= 1) {
topLeft.x += stepSize;
bottomRight.x += stepSize;
sendCamConfig = true;
}
}
// Send new config to camera
if(sendCamConfig) {
dai::ImageManipConfig cfg;
cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
configQueue->send(cfg);
sendCamConfig = false;
}
}
return 0;
}
|