Mono Camera Control¶
This example shows how to control the device-side crop and camera triggers. TWo output is a displayed mono cropped frame, that can be manipulated using the following keys:
w will move the crop up
a will move the crop left
s will move the crop down
d will move the crop right
e will trigger autoexposure
i and o will decrease/increase the exposure time
k and l will decrease/increase the sensitivity iso
Similar samples:
Demo¶
Setup¶
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
For additional information, please follow installation guide
Source code¶
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 | #!/usr/bin/env python3
"""
This example shows usage of mono camera in crop mode with the possibility to move the crop.
Uses 'WASD' controls to move the crop window, 'T' to trigger autofocus, 'IOKL,.' for manual exposure/focus:
Control: key[dec/inc] min..max
exposure time: I O 1..33000 [us]
sensitivity iso: K L 100..1600
To go back to auto controls:
'E' - autoexposure
"""
import cv2
import depthai as dai
# Step size ('W','A','S','D' controls)
stepSize = 0.02
# Manual exposure/focus set step
expStep = 500 # us
isoStep = 50
def clamp(num, v0, v1):
return max(v0, min(num, v1))
sendCamConfig = False
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
monoRight = pipeline.create(dai.node.MonoCamera)
monoLeft = pipeline.create(dai.node.MonoCamera)
manipRight = pipeline.create(dai.node.ImageManip)
manipLeft = pipeline.create(dai.node.ImageManip)
controlIn = pipeline.create(dai.node.XLinkIn)
configIn = pipeline.create(dai.node.XLinkIn)
manipOutRight = pipeline.create(dai.node.XLinkOut)
manipOutLeft = pipeline.create(dai.node.XLinkOut)
controlIn.setStreamName('control')
configIn.setStreamName('config')
manipOutRight.setStreamName("right")
manipOutLeft.setStreamName("left")
# Crop range
topLeft = dai.Point2f(0.2, 0.2)
bottomRight = dai.Point2f(0.8, 0.8)
# Properties
monoRight.setCamera("right")
monoLeft.setCamera("left")
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
manipRight.initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
manipLeft.initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
manipRight.setMaxOutputFrameSize(monoRight.getResolutionHeight()*monoRight.getResolutionWidth()*3)
# Linking
monoRight.out.link(manipRight.inputImage)
monoLeft.out.link(manipLeft.inputImage)
controlIn.out.link(monoRight.inputControl)
controlIn.out.link(monoLeft.inputControl)
configIn.out.link(manipRight.inputConfig)
configIn.out.link(manipLeft.inputConfig)
manipRight.out.link(manipOutRight.input)
manipLeft.out.link(manipOutLeft.input)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Output queues will be used to get the grayscale frames
qRight = device.getOutputQueue(manipOutRight.getStreamName(), maxSize=4, blocking=False)
qLeft = device.getOutputQueue(manipOutLeft.getStreamName(), maxSize=4, blocking=False)
configQueue = device.getInputQueue(configIn.getStreamName())
controlQueue = device.getInputQueue(controlIn.getStreamName())
# Defaults and limits for manual focus/exposure controls
expTime = 20000
expMin = 1
expMax = 33000
sensIso = 800
sensMin = 100
sensMax = 1600
while True:
inRight = qRight.get()
inLeft = qLeft.get()
cv2.imshow("right", inRight.getCvFrame())
cv2.imshow("left", inLeft.getCvFrame())
# Update screen (1ms pooling rate)
key = cv2.waitKey(1)
if key == ord('q'):
break
elif key == ord('e'):
print("Autoexposure enable")
ctrl = dai.CameraControl()
ctrl.setAutoExposureEnable()
controlQueue.send(ctrl)
elif key in [ord('i'), ord('o'), ord('k'), ord('l')]:
if key == ord('i'): expTime -= expStep
if key == ord('o'): expTime += expStep
if key == ord('k'): sensIso -= isoStep
if key == ord('l'): sensIso += isoStep
expTime = clamp(expTime, expMin, expMax)
sensIso = clamp(sensIso, sensMin, sensMax)
print("Setting manual exposure, time:", expTime, "iso:", sensIso)
ctrl = dai.CameraControl()
ctrl.setManualExposure(expTime, sensIso)
controlQueue.send(ctrl)
elif key == ord('w'):
if topLeft.y - stepSize >= 0:
topLeft.y -= stepSize
bottomRight.y -= stepSize
sendCamConfig = True
elif key == ord('a'):
if topLeft.x - stepSize >= 0:
topLeft.x -= stepSize
bottomRight.x -= stepSize
sendCamConfig = True
elif key == ord('s'):
if bottomRight.y + stepSize <= 1:
topLeft.y += stepSize
bottomRight.y += stepSize
sendCamConfig = True
elif key == ord('d'):
if bottomRight.x + stepSize <= 1:
topLeft.x += stepSize
bottomRight.x += stepSize
sendCamConfig = True
# Send new config to camera
if sendCamConfig:
cfg = dai.ImageManipConfig()
cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)
configQueue.send(cfg)
sendCamConfig = False
|
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 | /**
* This example shows usage of mono camera in crop mode with the possibility to move the crop.
* Uses 'WASD' controls to move the crop window, 'T' to trigger autofocus, 'IOKL,.' for manual exposure/focus:
* Control: key[dec/inc] min..max
* exposure time: I O 1..33000 [us]
* sensitivity iso: K L 100..1600
* To go back to auto controls:
* 'E' - autoexposure
*/
#include <iostream>
// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
// Step size ('W','A','S','D' controls)
static constexpr float stepSize = 0.02f;
// Manual exposure/focus set step
static constexpr int EXP_STEP = 500; // us
static constexpr int ISO_STEP = 50;
static int clamp(int num, int v0, int v1) {
return std::max(v0, std::min(num, v1));
}
static std::atomic<bool> sendCamConfig{false};
int main() {
// Create pipeline
dai::Pipeline pipeline;
// Define sources and outputs
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto manipRight = pipeline.create<dai::node::ImageManip>();
auto manipLeft = pipeline.create<dai::node::ImageManip>();
auto controlIn = pipeline.create<dai::node::XLinkIn>();
auto configIn = pipeline.create<dai::node::XLinkIn>();
auto manipOutRight = pipeline.create<dai::node::XLinkOut>();
auto manipOutLeft = pipeline.create<dai::node::XLinkOut>();
controlIn->setStreamName("control");
configIn->setStreamName("config");
manipOutRight->setStreamName("right");
manipOutLeft->setStreamName("left");
// Crop range
dai::Point2f topLeft(0.2f, 0.2f);
dai::Point2f bottomRight(0.8f, 0.8f);
// Properties
monoRight->setCamera("right");
monoLeft->setCamera("left");
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P);
manipRight->initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
manipLeft->initialConfig.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
// Linking
monoRight->out.link(manipRight->inputImage);
monoLeft->out.link(manipLeft->inputImage);
controlIn->out.link(monoRight->inputControl);
controlIn->out.link(monoLeft->inputControl);
configIn->out.link(manipRight->inputConfig);
configIn->out.link(manipLeft->inputConfig);
manipRight->out.link(manipOutRight->input);
manipLeft->out.link(manipOutLeft->input);
// Connect to device and start pipeline
dai::Device device(pipeline);
// Output queues will be used to get the grayscale frames
auto qRight = device.getOutputQueue(manipOutRight->getStreamName(), 4, false);
auto qLeft = device.getOutputQueue(manipOutLeft->getStreamName(), 4, false);
auto controlQueue = device.getInputQueue(controlIn->getStreamName());
auto configQueue = device.getInputQueue(configIn->getStreamName());
// Defaults and limits for manual focus/exposure controls
int exp_time = 20000;
int exp_min = 1;
int exp_max = 33000;
int sens_iso = 800;
int sens_min = 100;
int sens_max = 1600;
while(true) {
auto inRight = qRight->get<dai::ImgFrame>();
auto inLeft = qLeft->get<dai::ImgFrame>();
cv::imshow("right", inRight->getCvFrame());
cv::imshow("left", inLeft->getCvFrame());
// Update screen (1ms pooling rate)
int key = cv::waitKey(1);
if(key == 'q') {
break;
} else if(key == 'e') {
printf("Autoexposure enable\n");
dai::CameraControl ctrl;
ctrl.setAutoExposureEnable();
controlQueue->send(ctrl);
} else if(key == 'i' || key == 'o' || key == 'k' || key == 'l') {
if(key == 'i') exp_time -= EXP_STEP;
if(key == 'o') exp_time += EXP_STEP;
if(key == 'k') sens_iso -= ISO_STEP;
if(key == 'l') sens_iso += ISO_STEP;
exp_time = clamp(exp_time, exp_min, exp_max);
sens_iso = clamp(sens_iso, sens_min, sens_max);
printf("Setting manual exposure, time: %d, iso: %d\n", exp_time, sens_iso);
dai::CameraControl ctrl;
ctrl.setManualExposure(exp_time, sens_iso);
controlQueue->send(ctrl);
} else if(key == 'w') {
if(topLeft.y - stepSize >= 0) {
topLeft.y -= stepSize;
bottomRight.y -= stepSize;
sendCamConfig = true;
}
} else if(key == 'a') {
if(topLeft.x - stepSize >= 0) {
topLeft.x -= stepSize;
bottomRight.x -= stepSize;
sendCamConfig = true;
}
} else if(key == 's') {
if(bottomRight.y + stepSize <= 1) {
topLeft.y += stepSize;
bottomRight.y += stepSize;
sendCamConfig = true;
}
} else if(key == 'd') {
if(bottomRight.x + stepSize <= 1) {
topLeft.x += stepSize;
bottomRight.x += stepSize;
sendCamConfig = true;
}
}
// Send new config to camera
if(sendCamConfig) {
dai::ImageManipConfig cfg;
cfg.setCropRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
configQueue->send(cfg);
sendCamConfig = false;
}
}
return 0;
}
|