RGB Full Resolution Saver¶
This example saves full-resolution 3840x2160 .jpeg
images when c
key is pressed.
It serves as an example of recording high resolution frames to disk for the purposes of
high-resolution ground-truth data.
Note that each frame consumes above 2MB of storage, so “spamming” capture key could fill up your storage.
Similar samples:
Encoded bitstream (either MJPEG, H264, or H265) from the device can also be saved directly into .mp4 container with no computational overhead on the host computer. See demo here for more information.
Matroska
Besides ffmpeg
and .mp4
video container (which is patent encumbered), you could also use the mkvmerge
(see MKVToolNix for GUI usage) and .mkv
video container
to mux encoded stream into video file that is supported by all major video players
(eg. VLC)
mkvmerge -o vid.mkv video.h265
Demo¶
Setup¶
Please run the install script to download all required dependencies. Please note that this script must be ran from git context, so you have to download the depthai-python repository first and then run the script
git clone https://github.com/luxonis/depthai-python.git
cd depthai-python/examples
python3 install_requirements.py
For additional information, please follow installation guide
Source code¶
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 | #!/usr/bin/env python3
import time
from pathlib import Path
import cv2
import depthai as dai
# Create pipeline
pipeline = dai.Pipeline()
camRgb = pipeline.create(dai.node.ColorCamera)
camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
xoutRgb = pipeline.create(dai.node.XLinkOut)
xoutRgb.setStreamName("rgb")
camRgb.video.link(xoutRgb.input)
xin = pipeline.create(dai.node.XLinkIn)
xin.setStreamName("control")
xin.out.link(camRgb.inputControl)
# Properties
videoEnc = pipeline.create(dai.node.VideoEncoder)
videoEnc.setDefaultProfilePreset(1, dai.VideoEncoderProperties.Profile.MJPEG)
camRgb.still.link(videoEnc.input)
# Linking
xoutStill = pipeline.create(dai.node.XLinkOut)
xoutStill.setStreamName("still")
videoEnc.bitstream.link(xoutStill.input)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Output queue will be used to get the rgb frames from the output defined above
qRgb = device.getOutputQueue(name="rgb", maxSize=30, blocking=False)
qStill = device.getOutputQueue(name="still", maxSize=30, blocking=True)
qControl = device.getInputQueue(name="control")
# Make sure the destination path is present before starting to store the examples
dirName = "rgb_data"
Path(dirName).mkdir(parents=True, exist_ok=True)
while True:
inRgb = qRgb.tryGet() # Non-blocking call, will return a new data that has arrived or None otherwise
if inRgb is not None:
frame = inRgb.getCvFrame()
# 4k / 4
frame = cv2.pyrDown(frame)
frame = cv2.pyrDown(frame)
cv2.imshow("rgb", frame)
if qStill.has():
fName = f"{dirName}/{int(time.time() * 1000)}.jpeg"
with open(fName, "wb") as f:
f.write(qStill.get().getData())
print('Image saved to', fName)
key = cv2.waitKey(1)
if key == ord('q'):
break
elif key == ord('c'):
ctrl = dai.CameraControl()
ctrl.setCaptureStill(True)
qControl.send(ctrl)
print("Sent 'still' event to the camera!")
|
Also available on GitHub
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 | #include <chrono>
#include <iostream>
// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
#include "utility.hpp"
int main() {
using namespace std::chrono;
// Create pipeline
dai::Pipeline pipeline;
// Define source and outputs
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto videoEnc = pipeline.create<dai::node::VideoEncoder>();
auto xoutJpeg = pipeline.create<dai::node::XLinkOut>();
auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
xoutJpeg->setStreamName("jpeg");
xoutRgb->setStreamName("rgb");
// Properties
camRgb->setBoardSocket(dai::CameraBoardSocket::CAM_A);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_4_K);
videoEnc->setDefaultProfilePreset(camRgb->getFps(), dai::VideoEncoderProperties::Profile::MJPEG);
// Linking
camRgb->video.link(xoutRgb->input);
camRgb->video.link(videoEnc->input);
videoEnc->bitstream.link(xoutJpeg->input);
// Connect to device and start pipeline
dai::Device device(pipeline);
// Queues
auto qRgb = device.getOutputQueue("rgb", 30, false);
auto qJpeg = device.getOutputQueue("jpeg", 30, true);
std::string dirName = "rgb_data";
createDirectory(dirName);
while(true) {
auto inRgb = qRgb->tryGet<dai::ImgFrame>();
if(inRgb != NULL) {
cv::imshow("rgb", inRgb->getCvFrame());
}
auto encFrames = qJpeg->tryGetAll<dai::ImgFrame>();
for(const auto& encFrame : encFrames) {
uint64_t time = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
std::stringstream videoStr;
videoStr << dirName << "/" << time << ".jpeg";
auto videoFile = std::ofstream(videoStr.str(), std::ios::binary);
videoFile.write((char*)encFrame->getData().data(), encFrame->getData().size());
}
int key = cv::waitKey(1);
if(key == 'q' || key == 'Q') {
return 0;
}
}
return 0;
}
|