This commit is contained in:
2026-01-09 13:59:10 +08:00
commit 336a19762a
378 changed files with 99177 additions and 0 deletions

View File

@@ -0,0 +1,42 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(orbbec_sdk_exampes)
set(CMAKE_CXX_STANDARD 11)
option(OB_BUILD_PCL_EXAMPLES "Build Point Cloud Library examples" OFF)
option(OB_BUILD_OPEN3D_EXAMPLES "Build Open3D examples" OFF)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
if(MSVC OR CMAKE_GENERATOR STREQUAL "Xcode")
message(STATUS "Using multi-config generator: ${CMAKE_GENERATOR}")
foreach(OUTPUTCONFIG DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)
string(TOUPPER ${OUTPUTCONFIG} OUTPUTCONFIG_UPPER)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}")
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}")
endforeach()
endif()
set(OrbbecSDK_DIR ${CMAKE_CURRENT_LIST_DIR}/../lib)
find_package(OrbbecSDK REQUIRED)
if(APPLE)
set(CMAKE_MACOSX_RPATH ON)
set(CMAKE_INSTALL_RPATH "@loader_path/../lib")
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
elseif(UNIX)
set(CMAKE_SKIP_BUILD_RPATH FALSE)
set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib")
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
endif()
add_subdirectory(src)

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_enumerate)
add_executable(${PROJECT_NAME} enumerate.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,127 @@
# C++ Sample: 0.basic.enumerate
## Overview
Use the SDK interface to obtain camera-related information, including model, various sensors, and sensor-related configurations.
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
## Code overview
1. Create a context
```cpp
// Create a context.
ob::Context context;
```
2. Check if there is a camera connected
```cpp
// Query the list of connected devices.
auto deviceList = context.queryDeviceList();
if(deviceList->getCount() < 1) {
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
return -1;
}
```
3. Obtain and output relevant information of the access device
```cpp
std::cout << "enumerated devices: " << std::endl;
std::shared_ptr<ob::Device> device = nullptr;
std::shared_ptr<ob::DeviceInfo> deviceInfo = nullptr;
for(uint32_t index = 0; index < deviceList->getCount(); index++) {
// Get device from deviceList.
device = deviceList->getDevice(index);
// Get device information from device
deviceInfo = device->getDeviceInfo();
std::cout << " - " << index << ". name: " << deviceInfo->getName() << " pid: " << deviceInfo->getPid() << " SN: " << deviceInfo->getSerialNumber()
<< std::endl;
}
```
4. Wait for keyboard input to select device
```cpp
// select a device.
int deviceSelected = ob_smpl::getInputOption();
if(deviceSelected == -1) {
break;
}
```
5. Output device sensors and wait for keyboard input
```cpp
// Enumerate sensors.
void enumerateSensors(std::shared_ptr<ob::Device> device) {
while(true) {
std::cout << "Sensor list: " << std::endl;
// Get the list of sensors.
auto sensorList = device->getSensorList();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Get the sensor type.
auto sensorType = sensorList->getSensorType(index);
std::cout << " - " << index << "."
<< "sensor type: " << ob::TypeHelper::convertOBSensorTypeToString(sensorType) << std::endl;
}
std::cout << "Select a sensor to enumerate its streams(input sensor index or \'ESC\' to enumerate device): " << std::endl;
// Select a sensor.
int sensorSelected = ob_smpl::getInputOption();
if(sensorSelected == -1) {
break;
}
// Get sensor from sensorList.
auto sensor = sensorList->getSensor(sensorSelected);
enumerateStreamProfiles(sensor);
}
}
```
6. Output information about the selected sensor
```cpp
// Enumerate stream profiles.
void enumerateStreamProfiles(std::shared_ptr<ob::Sensor> sensor) {
// Get the list of stream profiles.
auto streamProfileList = sensor->getStreamProfileList();
// Get the sensor type.
auto sensorType = sensor->getType();
for(uint32_t index = 0; index < streamProfileList->getCount(); index++) {
// Get the stream profile.
auto profile = streamProfileList->getProfile(index);
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_COLOR || sensorType == OB_SENSOR_DEPTH || sensorType == OB_SENSOR_IR_LEFT
|| sensorType == OB_SENSOR_IR_RIGHT) {
printStreamProfile(profile, index);
}
else if(sensorType == OB_SENSOR_ACCEL) {
printAccelProfile(profile, index);
}
else if(sensorType == OB_SENSOR_GYRO) {
printGyroProfile(profile, index);
}
else {
break;
}
}
}
```
## Run Sample
In the window, enter the relevant information of the device sensor you want to view according to the prompts.
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/enumerate.jpg)

View File

@@ -0,0 +1,170 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
#include <iomanip>
// get input option
int getInputOption() {
char inputOption = ob_smpl::waitForKeyPressed();
if(inputOption == ESC_KEY) {
return -1;
}
return inputOption - '0';
}
// Print stream profile information.
void printStreamProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
// Get the video profile.
auto videoProfile = profile->as<ob::VideoStreamProfile>();
// Get the format.
auto formatName = profile->getFormat();
// Get the width.
auto width = videoProfile->getWidth();
// Get the height.
auto height = videoProfile->getHeight();
// Get the fps.
auto fps = videoProfile->getFps();
std::cout << index << "."
<< "format: " << ob::TypeHelper::convertOBFormatTypeToString(formatName) << ", "
<< "res: " << width << "*" << height << ", "
<< "fps: " << fps << std::endl;
}
// Print accel profile information.
void printAccelProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
// Get the profile of accel.
auto accProfile = profile->as<ob::AccelStreamProfile>();
// Get the rate of accel.
auto accRate = accProfile->getSampleRate();
std::cout << index << "."
<< "acc rate: " << ob::TypeHelper::convertOBIMUSampleRateTypeToString(accRate) << std::endl;
}
// Print gyro profile information.
void printGyroProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
// Get the profile of gyro.
auto gyroProfile = profile->as<ob::GyroStreamProfile>();
// Get the rate of gyro.
auto gyroRate = gyroProfile->getSampleRate();
std::cout << index << "."
<< "gyro rate: " << ob::TypeHelper::convertOBIMUSampleRateTypeToString(gyroRate) << std::endl;
}
// Enumerate stream profiles.
void enumerateStreamProfiles(std::shared_ptr<ob::Sensor> sensor) {
// Get the list of stream profiles.
auto streamProfileList = sensor->getStreamProfileList();
// Get the sensor type.
auto sensorType = sensor->getType();
for(uint32_t index = 0; index < streamProfileList->getCount(); index++) {
// Get the stream profile.
auto profile = streamProfileList->getProfile(index);
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_COLOR || sensorType == OB_SENSOR_DEPTH || sensorType == OB_SENSOR_IR_LEFT
|| sensorType == OB_SENSOR_IR_RIGHT || sensorType == OB_SENSOR_CONFIDENCE) {
printStreamProfile(profile, index);
}
else if(sensorType == OB_SENSOR_ACCEL) {
printAccelProfile(profile, index);
}
else if(sensorType == OB_SENSOR_GYRO) {
printGyroProfile(profile, index);
}
else {
break;
}
}
}
// Enumerate sensors.
void enumerateSensors(std::shared_ptr<ob::Device> device) {
while(true) {
std::cout << "Sensor list: " << std::endl;
// Get the list of sensors.
auto sensorList = device->getSensorList();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Get the sensor type.
auto sensorType = sensorList->getSensorType(index);
std::cout << " - " << index << "."
<< "sensor type: " << ob::TypeHelper::convertOBSensorTypeToString(sensorType) << std::endl;
}
std::cout << "Select a sensor to enumerate its streams(input sensor index or \'ESC\' to enumerate device): " << std::endl;
// Select a sensor.
int sensorSelected = ob_smpl::getInputOption();
if(sensorSelected >= static_cast<int>(sensorList->getCount()) || sensorSelected < 0) {
if(sensorSelected == -1) {
break;
}
else {
std::cout << "\nInvalid input, please reselect the sensor!\n";
continue;
}
}
// Get sensor from sensorList.
auto sensor = sensorList->getSensor(sensorSelected);
enumerateStreamProfiles(sensor);
}
}
int main(void) try {
// Create a context.
ob::Context context;
while(true) {
// Query the list of connected devices.
auto deviceList = context.queryDeviceList();
if(deviceList->getCount() < 1) {
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
return -1;
}
std::cout << "enumerated devices: " << std::endl;
std::shared_ptr<ob::Device> device = nullptr;
std::shared_ptr<ob::DeviceInfo> deviceInfo = nullptr;
for(uint32_t index = 0; index < deviceList->getCount(); index++) {
// Get device from deviceList.
device = deviceList->getDevice(index);
// Get device information from device
deviceInfo = device->getDeviceInfo();
std::cout << " " << index << "- device name: " << deviceInfo->getName() << ", device pid: 0x" << std::hex << std::setw(4) << std::setfill('0')
<< deviceInfo->getPid() << std::dec << " ,device SN: " << deviceInfo->getSerialNumber() << ", connection type:" << deviceInfo->getConnectionType() << std::endl;
}
std::cout << "Select a device to enumerate its sensors (Input device index or \'ESC\' to exit program):" << std::endl;
// select a device.
int deviceSelected = ob_smpl::getInputOption();
if(deviceSelected >= static_cast<int>(deviceList->getCount()) || deviceSelected < 0) {
if(deviceSelected == -1) {
break;
}
else {
std::cout << "\nInvalid input, please reselect the device!\n";
continue;
}
}
// Get the device.
auto selectedDevice = deviceList->getDevice(deviceSelected);
enumerateSensors(selectedDevice);
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_quick_start)
add_executable(${PROJECT_NAME} quick_start.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,60 @@
# C++ Sample: 0.basic.quick_start
## Overview
Use the SDK interface to quickly obtain the camera video stream and display it in the window.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
win is used to display the frame data.
## Code overview
1. Instantiate the pipeline using the default configuration file and quickly open the video stream
```cpp
// Create a pipeline.
ob::Pipeline pipe;
// Start the pipeline with default config.
// Modify the default configuration by the configuration file: "OrbbecSDKConfig.xml"
pipe.start();
```
2. Create a window for showing the frames, and set the size of the window
```cpp
// Create a window for showing the frames, and set the size of the window.
ob_smpl::CVWindow win("QuickStart", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
```
3. Open the window and display the video stream. The video stream waits for a frame of data in a blocking manner. The frame is a composite frame containing the frame data of all streams enabled in the configuration, and the waiting timeout of the frame is set
```cpp
while(win.run()) {
// Wait for frameSet from the pipeline, the default timeout is 1000ms.
auto frameSet = pipe.waitForFrameset();
// Push the frames to the window for showing.
win.pushFramesToView(frameSet);
}
```
4. Use pipeline to close the video stream
```cpp
// Stop the Pipeline, no frame data will be generated
pipe.stop();
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/quick_start.jpg)

View File

@@ -0,0 +1,39 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
int main(void) try {
// Create a pipeline.
ob::Pipeline pipe;
// Start the pipeline with default config.
// Modify the default configuration by the configuration file: "OrbbecSDKConfig.xml"
pipe.start();
// Create a window for showing the frames, and set the size of the window.
ob_smpl::CVWindow win("QuickStart", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
while(win.run()) {
// Wait for frameSet from the pipeline, the default timeout is 1000ms.
auto frameSet = pipe.waitForFrameset();
// Push the frames to the window for showing.
win.pushFramesToView(frameSet);
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_callback)
add_executable(${PROJECT_NAME} callback.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,96 @@
# C++ Sample: 1.stream.callback
## Overview
In this sample,user can get the depth、RGB、IR image.This sample also support users can perform user-defined operations such as data acquisition, data processing, and data modification within the callback function.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
## code overview
1. Create the pipeline instance using the default configuration and create a config instance to enable or disable the streams.Get the device instance from pipeline,and then get the sensor instance from device.
```c++
// Create a pipeline.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Get device from pipeline.
auto device = pipe.getDevice();
// Get sensorList from device.
auto sensorList = device->getSensorList();
```
2. Get only the sensor for the VideoStream,enable the stream from these sensor.
```c++
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Query all supported infrared sensor type and enable the infrared stream.
// For dual infrared device, enable the left and right infrared streams.
// For single infrared device, enable the infrared stream.
OBSensorType sensorType = sensorList->getSensorType(index);
// exclude non-video sensor type
if(!ob::TypeHelper::isVideoSensorType(sensorType)) {
continue;
}
// Enable the stream for the sensor type.
config->enableStream(sensorType);
}
```
3. In this callback function, you can add what you want to do with the data.Avoid performing complex computational operations within callback functions; prolonged operations can lead to data frame drops. It is recommended to use a queue for processing.
```c++
// Start the pipeline with callback.
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> output) {
std::lock_guard<std::mutex> lock(framesetMutex);
frameset = output;
});
```
4. Render window
```c++
while(win.run()) {
std::lock_guard<std::mutex> lock(framesetMutex);
if(frameset == nullptr) {
continue;
}
// Rendering display
win.pushFramesToView(frameset);
}
```
5. stop pipeline
```c++
// Stop the Pipeline, no frame data will be generated
pipe.stop();
```
## Run Sample
If you are on Windows, you can switch to the directory `OrbbecSDK-dev/build/win_XX/bin` to find the `ob_callback.exe`.
If you are on linux, you can switch to the directory `OrbbecSDK-dev/build/linux_XX/bin` to find the `ob_callback`.
### Key introduction
Press the Esc key in the window to exit the program.
### Result
![result](../../docs/resource/callback.jpg)

View File

@@ -0,0 +1,80 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
int main(void) try {
// Create a pipeline.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Get device from pipeline.
auto device = pipe.getDevice();
// Get sensorList from device.
auto sensorList = device->getSensorList();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Query all supported infrared sensor type and enable the infrared stream.
// For dual infrared device, enable the left and right infrared streams.
// For single infrared device, enable the infrared stream.
OBSensorType sensorType = sensorList->getSensorType(index);
// exclude non-video sensor type
if(!ob::TypeHelper::isVideoSensorType(sensorType)) {
continue;
}
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
if(sensorType == OB_SENSOR_COLOR) {
continue;
}
}
// Enable the stream for the sensor type.
config->enableStream(sensorType);
}
std::mutex framesetMutex;
std::shared_ptr<ob::FrameSet> frameset = nullptr;
// Start the pipeline with callback.
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> output) {
std::lock_guard<std::mutex> lock(framesetMutex);
frameset = output;
});
// Create a window for rendering, and set the size of the window.
ob_smpl::CVWindow win("Callback", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win.run()) {
std::lock_guard<std::mutex> lock(framesetMutex);
if(frameset == nullptr) {
continue;
}
// Rendering display
win.pushFramesToView(frameset);
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_color)
add_executable(${PROJECT_NAME} color.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,47 @@
# C++ Sample: 1.stream.color
## Overview
Use the SDK interface to obtain the camera's color stream and display it in the window.
### Knowledge
config is the configuration of the camera
Frameset is a combination of different types of Frames
## Code overview
1. Configure the output color stream and open the video stream.You must configure this before calling pipe.start().
```cpp
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable color video stream.
config->enableVideoStream(OB_STREAM_COLOR);
```
2. After waiting for a while, get the color stream in the frameset and display it in the window
```cpp
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset();
if(frameSet == nullptr) {
continue;
}
// get color frame from frameset.
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
// Render colorFrame.
win.pushFramesToView(colorFrame);
}
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![result](../../docs/resource/color.jpg)

View File

@@ -0,0 +1,50 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
int main(void) try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable color video stream.
config->enableVideoStream(OB_STREAM_COLOR);
// Start the pipeline with config.
pipe.start(config);
// Create a window for rendering and set the resolution of the window.
ob_smpl::CVWindow win("Color");
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset();
if(frameSet == nullptr) {
continue;
}
// get color frame from frameset.
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
// Render colorFrame.
win.pushFramesToView(colorFrame);
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_confidence)
add_executable(${PROJECT_NAME} confidence.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,58 @@
# C++ Sample: 1.stream.confidence
## Overview
Use the SDK interface to obtain the depth and confidence stream of the camera and display them in the window
### Knowledge
Enabling the confidence stream requires the depth stream to be active, and its resolution and frame rate must match the depth stream's.
## Code overview
1. Configure the depth and confidence streams, then start the pipeline with this configuration. All stream configurations must be completed before calling pipe.start().
```cpp
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable depth stream.
config->enableVideoStream(OB_STREAM_DEPTH);
// Enable confidence stream. The resolution and fps of confidence must match depth stream.
auto enabledProfiles = config->getEnabledStreamProfileList();
if(enabledProfiles) {
for(uint32_t i = 0; i < enabledProfiles->getCount(); i++) {
auto profile = enabledProfiles->getProfile(i);
if(profile && profile->getType() == OB_STREAM_DEPTH) {
auto depthProfile = profile->as<ob::VideoStreamProfile>();
if(depthProfile) {
config->enableVideoStream(OB_STREAM_CONFIDENCE, depthProfile->getWidth(), depthProfile->getHeight(), depthProfile->getFps());
}
break;
}
}
}
```
2. After waiting for a while, get the depth and confidence stream in the frameset and display them in the window
```cpp
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Render frame in the wisndow.
win.pushFramesToView(frameSet);
}
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/confidence.jpg)

View File

@@ -0,0 +1,73 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <thread>
#define IS_GEMINI_435LE(pid) (pid == 0x0815)
int main(void) try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// This example only supports Gemini 435Le device
auto device = pipe.getDevice();
if(!IS_GEMINI_435LE(device->getDeviceInfo()->getPid())) {
std::cout << "This example only supports Gemini 435Le device." << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
return 0;
}
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable depth stream.
config->enableVideoStream(OB_STREAM_DEPTH);
// Enable confidence stream. The resolution and fps of confidence must match depth stream.
auto enabledProfiles = config->getEnabledStreamProfileList();
if(enabledProfiles) {
for(uint32_t i = 0; i < enabledProfiles->getCount(); i++) {
auto profile = enabledProfiles->getProfile(i);
if(profile && profile->getType() == OB_STREAM_DEPTH) {
auto depthProfile = profile->as<ob::VideoStreamProfile>();
if(depthProfile) {
config->enableVideoStream(OB_STREAM_CONFIDENCE, depthProfile->getWidth(), depthProfile->getHeight(), depthProfile->getFps());
}
break;
}
}
}
// Start the pipeline with config.
pipe.start(config);
// Create a window for rendering, and set the resolution of the window.
ob_smpl::CVWindow win("Confidence", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Render frame in the wisndow.
win.pushFramesToView(frameSet);
}
// Stop the pipeline
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,22 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_depth)
add_executable(${PROJECT_NAME} depth.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,51 @@
# C++ Sample: 1.stream.depth
## Overview
Use the SDK interface to obtain the depth stream of the camera and display it in the window
### Knowledge
DepthFrame can obtain relevant information about the depth
## code overview
1. Configure the output color stream and open the video stream.You must configure this before calling pipe.start().
```cpp
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
//This is the default depth streamprofile that is enabled. If you want to modify it, you can do so in the configuration file.
config->enableVideoStream(OB_STREAM_DEPTH);
```
2. Calculate the distance from the center pixel to the opposite side from the acquired Y16 depth stream format and display it in the window. The distance is refreshed every 30 frames.The default depth unit for the SDK is millimeters.
```cpp
// Get the depth Frame form depthFrameRaw.
auto depthFrame = depthFrameRaw->as<ob::DepthFrame>();
// for Y16 format depth frame, print the distance of the center pixel every 30 frames.
if(depthFrame->getIndex() % 30 == 0 && depthFrame->getFormat() == OB_FORMAT_Y16) {
uint32_t width = depthFrame->getWidth();
uint32_t height = depthFrame->getHeight();
float scale = depthFrame->getValueScale();
const uint16_t *data = reinterpret_cast<const uint16_t *>(depthFrame->getData());
// pixel value multiplied by scale is the actual distance value in millimeters.
float centerDistance = data[width * height / 2 + width / 2] * scale;
// // attention: if the distance is 0, it means that the depth camera cannot detect the object (may be out of detection range).
win.addLog("Facing an object at a distance of " + ob_smpl::toString(centerDistance, 3) + " mm. ");
}
```
## Run Sample
Moving the camera can obtain the change in the distance across the center pixel
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/depth.jpg)

View File

@@ -0,0 +1,71 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <thread>
int main(void) try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// This is the default depth streamprofile that is enabled. If you want to modify it, you can do so in the configuration file.
config->enableVideoStream(OB_STREAM_DEPTH);
// Start the pipeline with config.
pipe.start(config);
// Create a window for rendering, and set the resolution of the window.
ob_smpl::CVWindow win("Depth");
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Get the depth frame raw from frameset.
auto depthFrameRaw = frameSet->getFrame(OB_FRAME_DEPTH);
if(!depthFrameRaw) {
continue;
}
// Get the depth Frame form depthFrameRaw.
auto depthFrame = depthFrameRaw->as<ob::DepthFrame>();
// for Y16 format depth frame, print the distance of the center pixel every 30 frames.
if(depthFrame->getIndex() % 30 == 0 && depthFrame->getFormat() == OB_FORMAT_Y16) {
uint32_t width = depthFrame->getWidth();
uint32_t height = depthFrame->getHeight();
float scale = depthFrame->getValueScale();
const uint16_t *data = reinterpret_cast<const uint16_t *>(depthFrame->getData());
// pixel value multiplied by scale is the actual distance value in millimeters.
float centerDistance = data[width * height / 2 + width / 2] * scale;
// // attention: if the distance is 0, it means that the depth camera cannot detect the object (may be out of detection range).
win.addLog("Facing an object at a distance of " + ob_smpl::toString(centerDistance, 3) + " mm. ");
}
// Render frame in the window.
win.pushFramesToView(depthFrame);
}
// Stop the pipeline
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_imu)
add_executable(${PROJECT_NAME} imu.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,66 @@
# C++ Sample: 1.stream.imu
## Overview
Use the SDK interface to obtain the camera's internal imu data and output it
### Knowledge
AccelFrame measures the acceleration of x, y, and z in m/s^2
GyroFrame measures the angular velocity of x, y, and z in rad/s
Frameset is a combination of different types of Frames.imu data stream can be obtained through frameset
## code overview
1. Configure output imu related information and open stream.You must configure this before calling pipe.start().
```cpp
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable Accel stream.
config->enableAccelStream();
// Enable Gyro stream.
config->enableGyroStream();
// Only FrameSet that contains all types of data frames will be output.
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
```
2. Instantiate pipeline, configure output imu related information and open stream
```cpp
auto accelFrameRaw = frameSet->getFrame(OB_FRAME_ACCEL);
auto accelFrame = accelFrameRaw->as<ob::AccelFrame>();
auto accelIndex = accelFrame->getIndex();
auto accelTimeStampUs = accelFrame->getTimeStampUs();
auto accelTemperature = accelFrame->getTemperature();
auto accelType = accelFrame->getType();
if(accelIndex % 50 == 0) { // print information every 50 frames.
auto accelValue = accelFrame->getValue();
printImuValue(accelValue, accelIndex, accelTimeStampUs, accelTemperature, accelType, "m/s^2");
}
auto gyroFrameRaw = frameSet->getFrame(OB_FRAME_GYRO);
auto gyroFrame = gyroFrameRaw->as<ob::GyroFrame>();
auto gyroIndex = gyroFrame->getIndex();
auto gyroTimeStampUs = gyroFrame->getTimeStampUs();
auto gyroTemperature = gyroFrame->getTemperature();
auto gyroType = gyroFrame->getType();
if(gyroIndex % 50 == 0) { // print information every 50 frames.
auto gyroValue = gyroFrame->getValue();
printImuValue(gyroValue, gyroIndex, gyroTimeStampUs, gyroTemperature, gyroType, "rad/s");
}
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/imu.jpg)

View File

@@ -0,0 +1,90 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_types.h"
#include <mutex>
#include <iostream>
void printImuValue(OBFloat3D obFloat3d, uint64_t index, uint64_t timeStampUs, float temperature, OBFrameType type, const std::string &unitStr) {
std::cout << "frame index: " <<index << std::endl;
auto typeStr = ob::TypeHelper::convertOBFrameTypeToString(type);
std::cout << typeStr << " Frame: \n\r{\n\r"
<< " tsp = " << timeStampUs << "\n\r"
<< " temperature = " << temperature << "\n\r"
<< " " << typeStr << ".x = " << obFloat3d.x << unitStr << "\n\r"
<< " " << typeStr << ".y = " << obFloat3d.y << unitStr << "\n\r"
<< " " << typeStr << ".z = " << obFloat3d.z << unitStr << "\n\r"
<< "}\n\r" << std::endl;
}
int main() try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enable Accel stream.
config->enableAccelStream();
// Enable Gyro stream.
config->enableGyroStream();
// Only FrameSet that contains all types of data frames will be output.
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
// Start the pipeline with config.
pipe.start(config);
uint64_t accelCount = 0;
uint64_t gyroCount = 0;
while(true) {
auto key = ob_smpl::waitForKeyPressed(1);
if(key == ESC_KEY) { // Esc key to exit.
break;
}
auto frameSet = pipe.waitForFrameset();
if(frameSet == nullptr) {
continue;
}
auto accelFrameRaw = frameSet->getFrame(OB_FRAME_ACCEL);
auto accelFrame = accelFrameRaw->as<ob::AccelFrame>();
auto accelIndex = accelFrame->getIndex();
auto accelTimeStampUs = accelFrame->getTimeStampUs();
auto accelTemperature = accelFrame->getTemperature();
auto accelType = accelFrame->getType();
if(accelCount % 50 == 0) { // print information every 50 frames.
auto accelValue = accelFrame->getValue();
printImuValue(accelValue, accelIndex, accelTimeStampUs, accelTemperature, accelType, "m/s^2");
}
++accelCount;
auto gyroFrameRaw = frameSet->getFrame(OB_FRAME_GYRO);
auto gyroFrame = gyroFrameRaw->as<ob::GyroFrame>();
auto gyroIndex = gyroFrame->getIndex();
auto gyroTimeStampUs = gyroFrame->getTimeStampUs();
auto gyroTemperature = gyroFrame->getTemperature();
auto gyroType = gyroFrame->getType();
if(gyroCount % 50 == 0) { // print information every 50 frames.
auto gyroValue = gyroFrame->getValue();
printImuValue(gyroValue, gyroIndex, gyroTimeStampUs, gyroTemperature, gyroType, "rad/s");
}
++gyroCount;
}
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_infrared)
add_executable(${PROJECT_NAME} infrared.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,59 @@
# C++ Sample: 1.stream.infrared
## Overview
Use the SDK interface to obtain the camera IR stream and display it in the window
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
## code overview
1. Configure IR sensor related information and enable the IR stream.You must configure this before calling pipe.start().
```cpp
// Get the device from pipeline.
std::shared_ptr<ob::Device> device = pipe.getDevice();
// Get the sensor list from device.
std::shared_ptr<ob::SensorList> sensorList = device->getSensorList();
// Create a config for pipeline.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Query all supported infrared sensor type and enable the infrared stream.
// For dual infrared device, enable the left and right infrared streams.
// For single infrared device, enable the infrared stream.
OBSensorType sensorType = sensorList->getSensorType(index);
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT) {
// Enable the stream with specified profile;
config->enableVideoStream(sensorType, OB_WIDTH_ANY, OB_HEIGHT_ANY, 30, OB_FORMAT_ANY);
}
}
```
2. Open the window and output the IR stream
```cpp
ob_smpl::CVWindow win("Infrared", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Render a set of frame in the window.
win.pushFramesToView(frameSet);
}
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/infrared.jpg)

View File

@@ -0,0 +1,65 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
std::map<OBSensorType, ob_stream_type> sensorStreamMap = {
{OB_SENSOR_IR, OB_STREAM_IR},
{OB_SENSOR_IR_LEFT, OB_STREAM_IR_LEFT},
{OB_SENSOR_IR_RIGHT, OB_STREAM_IR_RIGHT}
};
int main() try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Get the device from pipeline.
std::shared_ptr<ob::Device> device = pipe.getDevice();
// Get the sensor list from device.
std::shared_ptr<ob::SensorList> sensorList = device->getSensorList();
// Create a config for pipeline.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Query all supported infrared sensor type and enable the infrared stream.
// For dual infrared device, enable the left and right infrared streams.
// For single infrared device, enable the infrared stream.
OBSensorType sensorType = sensorList->getSensorType(index);
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT) {
// Enable the stream with specified profile;
config->enableVideoStream(sensorType, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
}
}
pipe.start(config);
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("Infrared", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
while(win.run()) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Render a set of frame in the window.
win.pushFramesToView(frameSet);
}
// Stop the pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_multi_streams)
add_executable(${PROJECT_NAME} multi_streams.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,74 @@
# C++ Sample: 1.stream.multi_streams
## Overview
Use SDK to obtain multiple camera data streams and output them
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
Frameset is a combination of different types of Frames.
## code overview
1. Configure the output video stream in addition to imu data, such as depth, color, etc.
```cpp
// Get sensor list from device.
auto sensorList = device->getSensorList();
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
// Get sensor type.
auto sensorType = sensorList->getSensorType(i);
// exclude gyro and accel sensors.
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
continue;
}
// enable the stream.
config->enableStream(sensorType);
}
// Start the pipeline with config
std::mutex frameMutex;
std::shared_ptr<const ob::FrameSet> renderFrameSet;
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
renderFrameSet = frameSet;
});
```
2. Instantiate the pipeline, configure IMU related information and start streaming
```cpp
// The IMU frame rate is much faster than the video, so it is advisable to use a separate pipeline to obtain IMU data.
auto dev = pipe.getDevice();
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
std::mutex imuFrameMutex;
std::shared_ptr<const ob::FrameSet> renderImuFrameSet;
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
// enable gyro stream.
imuConfig->enableGyroStream();
// enable accel stream.
imuConfig->enableAccelStream();
// start the imu pipeline.
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
renderImuFrameSet = frameSet;
});
```
## Run Sample
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/multistream.jpg)

View File

@@ -0,0 +1,121 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.h>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
#include <thread>
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
int main(void) try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// Enumerate and config all sensors.
auto device = pipe.getDevice();
// Get sensor list from device.
auto sensorList = device->getSensorList();
bool supportIMU = false;
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
// Get sensor type.
auto sensorType = sensorList->getSensorType(i);
// exclude gyro and accel sensors.
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
supportIMU = true;
continue;
}
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
if(sensorType == OB_SENSOR_COLOR) {
continue;
}
}
// enable the stream.
config->enableStream(sensorType);
}
// Start the pipeline with config
std::mutex frameMutex;
std::shared_ptr<const ob::FrameSet> renderFrameSet;
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
renderFrameSet = frameSet;
});
if(supportIMU) {
// The IMU frame rate is much faster than the video, so it is advisable to use a separate pipeline to obtain IMU data.
auto dev = pipe.getDevice();
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
std::mutex imuFrameMutex;
std::shared_ptr<const ob::FrameSet> renderImuFrameSet;
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
// enable gyro stream.
imuConfig->enableGyroStream();
// enable accel stream.
imuConfig->enableAccelStream();
// start the imu pipeline.
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
renderImuFrameSet = frameSet;
});
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("MultiStream", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win.run()) {
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
std::lock_guard<std::mutex> lock(frameMutex);
if(renderFrameSet == nullptr || renderImuFrameSet == nullptr) {
continue;
}
// Render camera and imu frameset.
win.pushFramesToView({ renderFrameSet, renderImuFrameSet });
}
// Stop the Pipeline, no frame data will be generated.
pipe.stop();
if(supportIMU) {
// Stop the IMU Pipeline, no frame data will be generated.
imuPipeline->stop();
}
}
else {
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("MultiStream", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win.run()) {
std::lock_guard<std::mutex> lock(frameMutex);
if(renderFrameSet == nullptr) {
continue;
}
// Render camera and imu frameset.
win.pushFramesToView(renderFrameSet);
}
// Stop the Pipeline, no frame data will be generated.
pipe.stop();
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_control)
add_executable(${PROJECT_NAME} device_control.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,176 @@
# C++ Sample2.device.control
## Overview
The SDK can be used to modify camera-related parameters, including laser switch, laser level intensity, white balance switch, etc.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
## code overview
1. Get camera related information and output
```cpp
// select a device to operate
std::shared_ptr<ob::Device> device = nullptr;
if(deviceList->getCount() > 0) {
if(deviceList->getCount() <= 1) {
// If a single device is plugged in, the first one is selected by default
device = deviceList->getDevice(0);
}
else {
device = selectDevice(deviceList);
}
auto deviceInfo = device->getDeviceInfo();
std::cout << "\n------------------------------------------------------------------------\n";
std::cout << "Current Device: "
<< " name: " << deviceInfo->getName() << ", vid: 0x" << std::hex << deviceInfo->getVid() << ", pid: 0x" << std::setw(4)
<< std::setfill('0') << deviceInfo->getPid() << ", uid: 0x" << deviceInfo->getUid() << std::dec << std::endl;
}
else {
std::cout << "Device Not Found" << std::endl;
isSelectDevice = false;
break;
}
```
2. Get the relevant parameters stored in the container and reorder them by id
```cpp
// Get property list
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device) {
std::vector<OBPropertyItem> propertyVec;
propertyVec.clear();
uint32_t size = device->getSupportedPropertyCount();
for(uint32_t i = 0; i < size; i++) {
OBPropertyItem property_item = device->getSupportedProperty(i);
if(isPrimaryTypeProperty(property_item) && property_item.permission != OB_PERMISSION_DENY) {
propertyVec.push_back(property_item);
}
}
return propertyVec;
}
```
```cpp
std::vector<OBPropertyItem> propertyList = getPropertyList(device);
std::sort(propertyList.begin(), propertyList.end(), [](const OBPropertyItem &a, const OBPropertyItem &b) { return a.id < b.id; });
```
3. Use the get command to obtain camera-related property values
```cpp
// get property value
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem) {
try {
bool bool_ret = false;
int int_ret = 0;
float float_ret = 0.0f;
switch(propertyItem.type) {
case OB_BOOL_PROPERTY:
try {
bool_ret = device->getBoolProperty(propertyItem.id);
}
catch(...) {
std::cout << "get bool property failed." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get bool value:" << bool_ret << std::endl;
break;
case OB_INT_PROPERTY:
try {
int_ret = device->getIntProperty(propertyItem.id);
}
catch(...) {
std::cout << "get int property failed." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get int value:" << int_ret << std::endl;
break;
case OB_FLOAT_PROPERTY:
try {
float_ret = device->getFloatProperty(propertyItem.id);
}
catch(...) {
std::cout << "get float property failed." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get float value:" << float_ret << std::endl;
break;
default:
break;
}
}
catch(...) {
std::cout << "get property failed: " << propertyItem.name << std::endl;
}
}
```
4. Use the set command to set camera-related property values
```cpp
// set properties
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem, std::string strValue) {
try {
int int_value = 0;
float float_value = 0.0f;
int bool_value = 0;
switch(propertyItem.type) {
case OB_BOOL_PROPERTY:
bool_value = std::atoi(strValue.c_str());
try {
device->setBoolProperty(propertyItem.id, bool_value);
}
catch(...) {
std::cout << "set bool property fail." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set bool value:" << bool_value << std::endl;
break;
case OB_INT_PROPERTY:
int_value = std::atoi(strValue.c_str());
try {
device->setIntProperty(propertyItem.id, int_value);
}
catch(...) {
std::cout << "set int property fail." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set int value:" << int_value << std::endl;
break;
case OB_FLOAT_PROPERTY:
float_value = static_cast<float>(std::atof(strValue.c_str())) ;
try {
device->setFloatProperty(propertyItem.id, float_value);
}
catch(...) {
std::cout << "set float property fail." << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set float value:" << float_value << std::endl;
break;
default:
break;
}
}
catch(...) {
std::cout << "set property failed: " << propertyItem.name << std::endl;
}
}
```
## Run Sample
Select the camera you want to operate. If it is a single device, skip the selection.
You can enter the command ? to get all the properties of the camera, including setting the maximum and minimum values, etc.
You can enter set to set command to setto set parameters, for example 6 set 0 (note the space)
You can enter the get command to set parameters, for example, 6 get (note the space)
Press the Esc key in the window to exit the program.
### Result
![image](../../docs/resource/control1.jpg)
![image](../../docs/resource/control2.jpg)

View File

@@ -0,0 +1,320 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <thread>
#include <string>
#include <vector>
#include <cstring>
#include <iomanip>
#include <sstream>
#include <iostream>
#include <algorithm>
std::shared_ptr<ob::Device> selectDevice(std::shared_ptr<ob::DeviceList> deviceList);
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device);
bool isPrimaryTypeProperty(OBPropertyItem propertyItem);
void printfPropertyList(std::shared_ptr<ob::Device> device, const std::vector<OBPropertyItem> &propertyList);
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem item, std::string strValue);
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem item);
std::string permissionTypeToString(OBPermissionType permission);
int main(void) try {
// Create a Context.
ob::Context context;
// Query the list of connected devices
auto deviceList = context.queryDeviceList();
bool isSelectDevice = true;
while(isSelectDevice) {
// select a device to operate
std::shared_ptr<ob::Device> device = nullptr;
if(deviceList->getCount() > 0) {
if(deviceList->getCount() <= 1) {
// If a single device is plugged in, the first one is selected by default
device = deviceList->getDevice(0);
}
else {
device = selectDevice(deviceList);
}
auto deviceInfo = device->getDeviceInfo();
std::cout << "\n------------------------------------------------------------------------\n";
std::cout << "Current Device: "
<< " name: " << deviceInfo->getName() << ", vid: 0x" << std::hex << deviceInfo->getVid() << ", pid: 0x" << std::setw(4)
<< std::setfill('0') << deviceInfo->getPid() << ", uid: 0x" << deviceInfo->getUid() << std::dec << std::endl;
}
else {
std::cout << "Device Not Found" << std::endl;
isSelectDevice = false;
break;
}
std::cout << "Input \"?\" to get all properties." << std::endl;
std::vector<OBPropertyItem> propertyList = getPropertyList(device);
std::sort(propertyList.begin(), propertyList.end(), [](const OBPropertyItem &a, const OBPropertyItem &b) { return a.id < b.id; });
bool isSelectProperty = true;
while(isSelectProperty) {
std::string choice;
std::getline(std::cin, choice);
if(choice != "?") {
std::istringstream ss(choice);
std::string tmp;
std::vector<std::string> controlVec;
while(ss >> tmp) {
controlVec.push_back(tmp);
}
if(controlVec.size() <= 0)
continue;
// exit the program
if(controlVec.at(0) == "exit") {
isSelectProperty = false;
isSelectDevice = false;
break;
}
// Check if it matches the input format
if(controlVec.size() <= 1 || (controlVec.at(1) != "get" && controlVec.at(1) != "set") || controlVec.size() > 3
|| (controlVec.at(1) == "set" && controlVec.size() < 3)) {
std::cout << "Property control usage: [property index] [set] [property value] or [property index] [get]" << std::endl;
continue;
}
size_t size = propertyList.size();
size_t selectId = std::atoi(controlVec.at(0).c_str());
if(selectId >= size) {
std::cout << "Your selection is out of range, please reselect: " << std::endl;
continue;
}
bool isGetValue = controlVec.at(1) == "get" ? true : false;
auto propertyItem = propertyList.at(selectId);
if(isGetValue) {
// get property value
getPropertyValue(device, propertyItem);
}
else {
// set property value
setPropertyValue(device, propertyItem, controlVec.at(2));
}
}
else {
printfPropertyList(device, propertyList);
std::cout << "Please select property.(Property control usage: [property number] [set/get] [property value])" << std::endl;
}
}
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Select a device, the name, pid, vid, uid of the device will be printed here, and the corresponding device object will be created after selection
std::shared_ptr<ob::Device> selectDevice(std::shared_ptr<ob::DeviceList> deviceList) {
int devCount = deviceList->getCount();
std::cout << "Device list: " << std::endl;
for(int i = 0; i < devCount; i++) {
std::cout << i << ". name: " << deviceList->getName(i) << ", vid: 0x" << std::hex << deviceList->getVid(i) << ", pid: 0x" << std::setw(4)
<< std::setfill('0') << deviceList->getPid(i) << ", uid: 0x" << deviceList->getUid(i) << ", sn: " << deviceList->getSerialNumber(i)
<< std::dec << std::endl;
}
std::cout << "Select a device: ";
int devIndex;
std::cin >> devIndex;
while(devIndex < 0 || devIndex >= devCount || std::cin.fail()) {
std::cin.clear();
std::cin.ignore();
std::cout << "Your select is out of range, please reselect: " << std::endl;
std::cin >> devIndex;
}
return deviceList->getDevice(devIndex);
}
// Print a list of supported properties
void printfPropertyList(std::shared_ptr<ob::Device> device, const std::vector<OBPropertyItem> &propertyList) {
std::cout << "size: " << propertyList.size() << std::endl;
if(propertyList.empty()) {
std::cout << "No supported property!" << std::endl;
}
std::cout << "\n------------------------------------------------------------------------\n";
for(size_t i = 0; i < propertyList.size(); i++) {
auto property_item = propertyList[i];
std::string strRange = "";
OBIntPropertyRange int_range;
OBFloatPropertyRange float_range;
switch(property_item.type) {
case OB_BOOL_PROPERTY:
strRange = "Bool value(min:0, max:1, step:1)";
break;
case OB_INT_PROPERTY: {
try {
int_range = device->getIntPropertyRange(property_item.id);
strRange = "Int value(min:" + std::to_string(int_range.min) + ", max:" + std::to_string(int_range.max)
+ ", step:" + std::to_string(int_range.step) + ")";
}
catch(...) {
std::cout << "get int property range failed." << std::endl;
}
} break;
case OB_FLOAT_PROPERTY:
try {
float_range = device->getFloatPropertyRange(property_item.id);
strRange = "Float value(min:" + std::to_string(float_range.min) + ", max:" + std::to_string(float_range.max)
+ ", step:" + std::to_string(float_range.step) + ")";
}
catch(...) {
std::cout << "get float property range failed." << std::endl;
}
break;
default:
break;
}
std::cout.setf(std::ios::right);
std::cout.fill('0');
std::cout.width(2);
std::cout << i << ". ";
std::cout << property_item.name << "(" << (int)property_item.id << ")";
std::cout << ", permission=" << permissionTypeToString(property_item.permission) << ", range=" << strRange << std::endl;
}
std::cout << "------------------------------------------------------------------------\n";
}
bool isPrimaryTypeProperty(OBPropertyItem propertyItem) {
return propertyItem.type == OB_INT_PROPERTY || propertyItem.type == OB_FLOAT_PROPERTY || propertyItem.type == OB_BOOL_PROPERTY;
}
// Get property list
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device) {
std::vector<OBPropertyItem> propertyVec;
propertyVec.clear();
uint32_t size = device->getSupportedPropertyCount();
for(uint32_t i = 0; i < size; i++) {
OBPropertyItem property_item = device->getSupportedProperty(i);
if(isPrimaryTypeProperty(property_item) && property_item.permission != OB_PERMISSION_DENY) {
propertyVec.push_back(property_item);
}
}
return propertyVec;
}
// set properties
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem, std::string strValue) {
try {
int int_value = 0;
float float_value = 0.0f;
int bool_value = 0;
switch(propertyItem.type) {
case OB_BOOL_PROPERTY:
bool_value = std::atoi(strValue.c_str());
try {
device->setBoolProperty(propertyItem.id, bool_value);
}
catch(ob::Error &e) {
std::cout << "set bool property fail: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set bool value:" << bool_value << std::endl;
break;
case OB_INT_PROPERTY:
int_value = std::atoi(strValue.c_str());
try {
device->setIntProperty(propertyItem.id, int_value);
}
catch(ob::Error &e) {
std::cout << "set int property fail: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set int value:" << int_value << std::endl;
break;
case OB_FLOAT_PROPERTY:
float_value = static_cast<float>(std::atof(strValue.c_str()));
try {
device->setFloatProperty(propertyItem.id, float_value);
}
catch(ob::Error &e) {
std::cout << "set floar property fail: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",set float value:" << float_value << std::endl;
break;
default:
break;
}
}
catch(...) {
std::cout << "set property failed: " << propertyItem.name << std::endl;
}
}
// get property value
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem) {
try {
bool bool_ret = false;
int int_ret = 0;
float float_ret = 0.0f;
switch(propertyItem.type) {
case OB_BOOL_PROPERTY:
try {
bool_ret = device->getBoolProperty(propertyItem.id);
}
catch(ob::Error &e) {
std::cout << "get bool property failed: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get bool value:" << bool_ret << std::endl;
break;
case OB_INT_PROPERTY:
try {
int_ret = device->getIntProperty(propertyItem.id);
}
catch(ob::Error &e) {
std::cout << "get int property failed: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get int value:" << int_ret << std::endl;
break;
case OB_FLOAT_PROPERTY:
try {
float_ret = device->getFloatProperty(propertyItem.id);
}
catch(ob::Error &e) {
std::cout << "get float property failed: " << e.what() << std::endl;
}
std::cout << "property name:" << propertyItem.name << ",get float value:" << float_ret << std::endl;
break;
default:
break;
}
}
catch(...) {
std::cout << "get property failed: " << propertyItem.name << std::endl;
}
}
std::string permissionTypeToString(OBPermissionType permission) {
switch(permission) {
case OB_PERMISSION_READ:
return "R/_";
case OB_PERMISSION_WRITE:
return "_/W";
case OB_PERMISSION_READ_WRITE:
return "R/W";
default:
break;
}
return "_/_";
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_firmware_update)
add_executable(${PROJECT_NAME} device_firmware_update.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,108 @@
# C++ Sample2.device.firmware_update
## Overview
This sample demonstrates how to use the SDK to update the firmware of a connected device. It includes functions to list connected devices, select a device, and update its firmware.
> Note: This sample are not suiltable for Femto Mega, Femto Mega i, and Femto Bolt devices.
> For these devices, please refer to the this repo:[https://github.com/orbbec/OrbbecFirmware](https://github.com/orbbec/OrbbecFirmware)
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
## code overview
1. Initialize the SDK Context: This is necessary to access the connected devices.
```c++
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
```
2. List Connected Devices.
```c++
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
devices.push_back(deviceList->getDevice(i));
}
```
3. Define a Callback Function for Firmware Update Progress.
You can define a callback function to get the progress of the firmware update. The callback function will be called every time the device updates its progress.
```c++
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
if(firstCall) {
firstCall = !firstCall;
}
else {
std::cout << "\033[3F"; // Move cursor up 3 lines
}
std::cout << "\033[K"; // Clear the current line
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
std::cout << "\033[K";
std::cout << "Status : ";
switch(state) {
case STAT_VERIFY_SUCCESS:
std::cout << "Image file verification success" << std::endl;
break;
case STAT_FILE_TRANSFER:
std::cout << "File transfer in progress" << std::endl;
break;
case STAT_DONE:
std::cout << "Update completed" << std::endl;
break;
case STAT_IN_PROGRESS:
std::cout << "Upgrade in progress" << std::endl;
break;
case STAT_START:
std::cout << "Starting the upgrade" << std::endl;
break;
case STAT_VERIFY_IMAGE:
std::cout << "Verifying image file" << std::endl;
break;
default:
std::cout << "Unknown status or error" << std::endl;
break;
}
std::cout << "\033[K";
std::cout << "Message : " << message << std::endl << std::flush;
}
```
4. Update the Device Firmware.
After selecting a device, update its firmware by calling the updateFirmware function with the specified callback.
```c++
devices[deviceIndex]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
```
### Attention
1. After the firmware update completes, you need to restart the device manually to apply the new firmware. Alternatively, you can use the `reboot()` function to restart the device programmatically.
```c++
device->reboot();
```
2. Don't plug out the device during the firmware update process.
3. For linux users, it is recommended to use the `LibUVC` as the backend as the `V4L2` backend may cause some issues on some systems. Switch backend before create device like this:
```c++
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
```
## Run Sample
Select the device for firmware update and input the path of the firmware file. The SDK will start updating the firmware, and the progress will be displayed on the console.
### Result
![image](../../docs/resource/device_firmware_update.jpg)

View File

@@ -0,0 +1,202 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
#include <fstream>
#include <string>
#include <memory>
#include <algorithm>
#include <cctype>
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent);
bool getFirmwarePath(std::string &firmwarePath);
bool selectDevice(int &deviceIndex);
void printDeviceList();
bool firstCall = true;
std::vector<std::shared_ptr<ob::Device>> devices{};
int main() try {
// Create a context to access the connected devices
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
#if defined(__linux__)
// On Linux, it is recommended to use the libuvc backend for device access as v4l2 is not always reliable on some systems for firmware update.
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
#endif
// Get connected devices from the context
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
if(deviceList->getCount() == 0) {
std::cout << "No device found. Please connect a device first!" << std::endl;
std::cout << "Press any key to exit..." << std::endl;
ob_smpl::waitForKeyPressed();
return 0;
}
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
devices.push_back(deviceList->getDevice(i));
}
std::cout << "Devices found:" << std::endl;
printDeviceList();
while(true) {
firstCall = true;
int deviceIndex = -1;
if(!selectDevice(deviceIndex)) {
break;
}
std::string firmwarePath;
if(!getFirmwarePath(firmwarePath)) {
break;
}
std::cout << "Upgrading device firmware, please wait...\n\n";
try {
// Set async to false to synchronously block and wait for the device firmware upgrade to complete.
devices[deviceIndex]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
}
catch(ob::Error &e) {
// If the update fails, will throw an exception.
std::cerr << "\nThe upgrade was interrupted! An error occurred! " << std::endl;
std::cerr << "Error message: " << e.what() << std::endl;
std::cout << "Press any key to exit." << std::endl;
ob_smpl::waitForKeyPressed();
break;
}
std::string input;
std::cout << "Enter 'Q' or 'q' to quit, or any other key to continue: ";
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
break;
}
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent)
{
if(firstCall) {
firstCall = !firstCall;
}
else {
std::cout << "\033[3F"; // Move cursor up 3 lines
}
std::cout << "\033[K"; // Clear the current line
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
std::cout << "\033[K";
std::cout << "Status : ";
switch(state) {
case STAT_VERIFY_SUCCESS:
std::cout << "Image file verification success" << std::endl;
break;
case STAT_FILE_TRANSFER:
std::cout << "File transfer in progress" << std::endl;
break;
case STAT_DONE:
std::cout << "Update completed" << std::endl;
break;
case STAT_IN_PROGRESS:
std::cout << "Upgrade in progress" << std::endl;
break;
case STAT_START:
std::cout << "Starting the upgrade" << std::endl;
break;
case STAT_VERIFY_IMAGE:
std::cout << "Verifying image file" << std::endl;
break;
default:
std::cout << "Unknown status or error" << std::endl;
break;
}
std::cout << "\033[K";
std::cout << "Message : " << message << std::endl << std::flush;
}
bool getFirmwarePath(std::string &firmwarePath) {
std::cout << "Please input the path of the firmware file (.bin) to be updated:" << std::endl;
std::cout << "(Enter 'Q' or 'q' to quit): " << std::endl;
std::cout << "Path: ";
std::string input;
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
exit(EXIT_SUCCESS);
}
// Remove leading and trailing whitespaces
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
// Remove leading and trailing quotes
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
input = input.substr(1, input.size() - 2);
}
if(input.size() > 4 && (input.substr(input.size() - 4) == ".bin" || input.substr(input.size() - 4) == ".img")) {
firmwarePath = input;
std::cout << "Firmware file confirmed: " << firmwarePath << std::endl << std::endl;
return true;
}
std::cout << "Invalid file format. Please provide a .bin file." << std::endl << std::endl;
return getFirmwarePath(firmwarePath);
}
void printDeviceList() {
std::cout << "--------------------------------------------------------------------------------\n";
for(uint32_t i = 0; i < devices.size(); ++i) {
std::cout << "[" << i << "] " << "Device: " << devices[i]->getDeviceInfo()->getName();
std::cout << " | SN: " << devices[i]->getDeviceInfo()->getSerialNumber();
std::cout << " | Firmware version: " << devices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
}
std::cout << "---------------------------------------------------------------------------------\n";
}
bool selectDevice(int &deviceIndex) {
std::string input;
while(true) {
std::cout << "Please select a device to update the firmware, enter 'l' to list devices, or enter 'q' to quit: " << std::endl;
std::cout << "Device index: ";
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
return false;
}
if(input == "l" || input == "L") {
printDeviceList();
continue;
}
try {
deviceIndex = std::stoi(input);
if(deviceIndex < 0 || deviceIndex >= static_cast<int>(devices.size())) {
std::cout << "Invalid input, please enter a valid index number." << std::endl;
continue;
}
std::cout << std::endl;
break;
}
catch(...) {
std::cout << "Invalid input, please enter a valid index number." << std::endl;
continue;
}
}
return true;
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_forceip)
add_executable(${PROJECT_NAME} forceip.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,53 @@
# C++ Sample: 2.device.forceip
## Overview
This sample demonstrates how to use the SDK context class to query connected devices, configure the network IP of a selected device using the ForceIP command (as defined by the GigE Vision standard)
### Knowledge
The Context class serves as the entry point to the SDK. It provides functionality to:
1. Query connected device lists
2. Modify network configurations for the selected device
## Code Overview
1. Query device list and select a device
```cpp
// Create a Context object to interact with Orbbec devices
ob::Context context;
// Query the list of connected devices
auto deviceList = context.queryDeviceList();
// Select a device to operate
uint32_t selectedIndex;
auto res = selectDevice(deviceList, selectedIndex);
```
2. Get new IP configuration from user input
```cpp
OBNetIpConfig config = getIPConfig();
```
3. Change the selected device IP configuration and print the result of the operation.
```cpp
res = context.forceIp(deviceList->getUid(deviceNumber), config);
if(res) {
std::cout << "The new IP configuration has been successfully applied to the device." << std::endl;
}
else {
std::cout << "Failed to apply the new IP configuration." << std::endl;
}
```
## Run Sample
Device list:
Enter your choice:
Please enter the network configuration information:
Enter IP address:
Enter Subnet Mask:
Enter Gateway address:
The new IP configuration has been successfully applied to the device.
### Result
![result](/docs/resource/forceip.jpg)

View File

@@ -0,0 +1,175 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <thread>
#include <string>
#include <vector>
#include <cstring>
#include <iomanip>
#include <sstream>
#include <iostream>
static bool parseIpString(const std::string &Str, uint8_t *out) {
if(Str.empty()) {
return false;
}
try {
std::istringstream ss(Str);
std::string token;
int count = 0;
while(std::getline(ss, token, '.')) {
if(count > 4) {
return false;
}
for(char c: token) {
if(!isdigit(c)) {
return false;
}
}
int val = std::stoi(token);
if(val < 0 || val > 255) {
return false;
}
out[count++] = static_cast<uint8_t>(val);
}
return count == 4;
}
catch(const std::exception &e) {
// error
(void)e;
}
return false;
}
static bool selectDevice(std::shared_ptr<ob::DeviceList> deviceList, uint32_t &selectedIndex) {
selectedIndex = static_cast<uint32_t>(-1);
auto devCount = deviceList->getCount();
if(devCount == 0) {
std::cout << "No devices found." << std::endl;
return false;
}
std::vector<uint32_t> indexList;
uint32_t count = 0;
std::cout << "Ethernet device list:" << std::endl;
for(uint32_t i = 0; i < devCount; i++) {
std::string DeviceConnectType = deviceList->getConnectionType(i);
if(DeviceConnectType != "Ethernet") {
continue;
}
std::cout << count << ". Name: " << deviceList->getName(i) << ", Serial Number: " << deviceList->getSerialNumber(i)
<< ", MAC: " << deviceList->getUid(i) << std::dec << ", IP: " << deviceList->getIpAddress(i)
<< ", Subnet Mask: " << deviceList->getSubnetMask(i) << ", Gateway: " << deviceList->getGateway(i) << std::endl;
indexList.push_back(i);
count++;
}
if(indexList.empty()) {
std::cout << "No network devices found." << std::endl;
return false;
}
uint32_t index;
do {
std::cout << "Enter your choice: ";
std::cin >> index;
if(std::cin.fail()) {
std::cin.clear();
std::cin.ignore();
std::cout << "Invalid input, please enter a number." << std::endl;
continue;
}
if(index >= indexList.size()) {
std::cout << "Invalid input, please enter a valid index number." << std::endl;
continue;
}
selectedIndex = indexList[index];
return true;
} while(true);
return false;
}
static OBNetIpConfig getIPConfig() {
OBNetIpConfig cfg;
std::string val;
uint8_t address[4];
uint8_t mask[4];
uint8_t gateway[4];
std::cout << "Please enter the network configuration information:" << std::endl;
std::cout << "Enter IP address:" << std::endl;
while(std::cin >> val) {
if(parseIpString(val, address)) {
break;
}
std::cout << "Invalid format." << std::endl;
std::cout << "Enter IP address:" << std::endl;
}
std::cout << "Enter Subnet Mask:" << std::endl;
while(std::cin >> val) {
if(parseIpString(val, mask)) {
break;
}
std::cout << "Invalid format." << std::endl;
std::cout << "Enter Subnet Mask:" << std::endl;
}
std::cout << "Enter Gateway address:" << std::endl;
while(std::cin >> val) {
if(parseIpString(val, gateway)) {
break;
}
std::cout << "Invalid format." << std::endl;
std::cout << "Enter Gateway address:" << std::endl;
}
cfg.dhcp = 0;
for(int i = 0; i < 4; ++i) {
cfg.address[i] = address[i];
cfg.gateway[i] = gateway[i];
cfg.mask[i] = mask[i];
}
return cfg;
}
int main(void) try {
// Create a Context object to interact with Orbbec devices
ob::Context context;
// Query the list of connected devices
auto deviceList = context.queryDeviceList();
// Select a device to operate
uint32_t selectedIndex;
auto res = selectDevice(deviceList, selectedIndex);
if(res) {
// Get the new IP configuration from user input
OBNetIpConfig config = getIPConfig();
// Change device IP configuration
res = context.forceIp(deviceList->getUid(selectedIndex), config);
if(res) {
std::cout << "The new IP configuration has been successfully applied to the device." << std::endl;
}
else {
std::cout << "Failed to apply the new IP configuration." << std::endl;
}
}
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
return 0;
}
catch(ob::Error &e) {
std::cerr << "Function: " << e.getFunction() << "\nArguments: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
<< std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_hot_plugin)
add_executable(${PROJECT_NAME} hot_plugin.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,74 @@
# C++ Sample2.device.hot_plugin
## Overview
Use SDK to handle the settings of device unplug callback and process the acquired code stream after unplugging
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
### Attention
*The GMSL devices (such as Gemini335Lg) do not support hot plugging.*
## code overview
1. Register device callback and execute relevant functions during device unplugging and unplugging
```cpp
ctx.setDeviceChangedCallback( []( std::shared_ptr< ob::DeviceList > removedList, std::shared_ptr< ob::DeviceList > addedList ) {
DeviceDisconnectCallback( removedList );
DeviceConnectCallback( addedList );
} );
```
2. Trigger the callback function to print relevant information
```cpp
void printDeviceList(const std::string &prompt, std::shared_ptr<ob::DeviceList> deviceList) {
auto count = deviceList->getCount();
if(count == 0) {
return;
}
std::cout << count << " device(s) " << prompt << ": " << std::endl;
for(uint32_t i = 0; i < count; i++) {
auto uid = deviceList->getUid(i);
auto vid = deviceList->getVid(i);
auto pid = deviceList->getPid(i);
auto serialNumber = deviceList->getSerialNumber(i);
auto connection = deviceList->getConnectionType(i);
std::cout << " - uid: " << uid << ", vid: 0x" << std::hex << std::setfill('0') << std::setw(4) << vid << ", pid: 0x" << pid
<< ", serial number: " << serialNumber << ", connection: " << connection << std::endl;
}
std::cout << std::endl;
}
```
3. Restart your device
```cpp
void rebootDevices(std::shared_ptr<ob::DeviceList> deviceList) {
for(uint32_t i = 0; i < deviceList->getCount(); i++) {
// get device from device list
auto device = deviceList->getDevice(i);
// reboot device
device->reboot();
}
}
```
## Run Sample
Press R to reboot the device
You can try to manually unplug and plug the device
Press the Esc key in the window to exit the program
### Result
![image](../../docs/resource/hotplugin.jpg)

View File

@@ -0,0 +1,80 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iomanip>
#include <iostream>
void printDeviceList(const std::string &prompt, std::shared_ptr<ob::DeviceList> deviceList) {
auto count = deviceList->getCount();
if(count == 0) {
return;
}
std::cout << count << " device(s) " << prompt << ": " << std::endl;
for(uint32_t i = 0; i < count; i++) {
auto uid = deviceList->getUid(i);
auto vid = deviceList->getVid(i);
auto pid = deviceList->getPid(i);
auto serialNumber = deviceList->getSerialNumber(i);
auto connection = deviceList->getConnectionType(i);
std::cout << " - uid: " << uid << ", vid: 0x" << std::hex << std::setfill('0') << std::setw(4) << vid << ", pid: 0x" << std::setw(4) << pid
<< ", serial number: " << serialNumber << ", connection: " << connection << std::endl;
}
std::cout << std::endl;
}
void rebootDevices(std::shared_ptr<ob::DeviceList> deviceList) {
for(uint32_t i = 0; i < deviceList->getCount(); i++) {
// get device from device list
auto device = deviceList->getDevice(i);
// reboot device
device->reboot();
}
}
int main(void) try {
// create context
ob::Context ctx;
// register device callback
ctx.setDeviceChangedCallback([](std::shared_ptr<ob::DeviceList> removedList, std::shared_ptr<ob::DeviceList> deviceList) {
printDeviceList("added", deviceList);
printDeviceList("removed", removedList);
});
// query current device list
auto currentList = ctx.queryDeviceList();
printDeviceList("connected", currentList);
std::cout << "Press 'r' to reboot the connected devices to trigger the device disconnect and reconnect event, or manually unplug and plugin the device."
<< std::endl;
std::cout << "Press 'Esc' to exit." << std::endl << std::endl;
// main loop, wait for key press
while(true) {
auto key = ob_smpl::waitForKeyPressed(100);
// Press the esc key to exit
if(key == 27) {
break;
}
else if(key == 'r' || key == 'R') {
// update device list
currentList = ctx.queryDeviceList();
std::cout << "Rebooting devices..." << std::endl;
rebootDevices(currentList);
}
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_multi_devices_firmware_update)
add_executable(${PROJECT_NAME} multi_devices_firmware_update.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,133 @@
# C++ Sample2.multi_devices_firmware_update
## Overview
If you want to upgrade multiple Orbbec cameras connected to your system, this sample might be helpful for you. For detailed information about firmware upgrades, please refer to the [2.device.firmware_update](../2.device.firmware_update/README.md).
> Note: This sample are not suiltable for Femto Mega, Femto Mega i, and Femto Bolt devices.
> For these devices, please refer to the this repo:[https://github.com/orbbec/OrbbecFirmware](https://github.com/orbbec/OrbbecFirmware)
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
## code overview
1. Initialize the SDK Context: This is necessary to access the connected devices.
```c++
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
```
2. List Connected Devices.
```c++
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
devices.push_back(deviceList->getDevice(i));
}
```
3. Update each device.
You don't need to worry about issues caused by using incorrect firmware during the upgrade process. The SDK performs internal verification of the firmware to ensure its compatibility and validity.
```c++
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
try {
std::cout << "\nUpgrading device: " << i + 1 << "/" << totalDevices.size()
<< " - " << totalDevices[i]->getDeviceInfo()->getName() << std::endl;
totalDevices[i]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
}
}
```
4. Retrieve Status from the Callback
```c++
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
if(firstCall) {
firstCall = !firstCall;
}
else {
std::cout << "\033[3F"; // Move cursor up 3 lines
}
std::cout << "\033[K"; // Clear the current line
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
std::cout << "\033[K";
std::cout << "Status : ";
switch(state) {
case STAT_VERIFY_SUCCESS:
std::cout << "Image file verification success" << std::endl;
break;
case STAT_FILE_TRANSFER:
std::cout << "File transfer in progress" << std::endl;
break;
case STAT_DONE:
std::cout << "Update completed" << std::endl;
break;
case STAT_IN_PROGRESS:
std::cout << "Upgrade in progress" << std::endl;
break;
case STAT_START:
std::cout << "Starting the upgrade" << std::endl;
break;
case STAT_VERIFY_IMAGE:
std::cout << "Verifying image file" << std::endl;
break;
case ERR_MISMATCH:
std::cout << "Mismatch between device and image file" << std::endl;
break;
default:
std::cout << "Unknown status or error" << std::endl;
break;
}
std::cout << "\033[K";
std::cout << "Message : " << message << std::endl << std::flush;
if(state == STAT_DONE) {
finalSuccess = true;
finalFailure = false;
}
else if(state == ERR_MISMATCH) {
// If the device's firmware version does not match the image file, the callback status will be ERR_MISMATCH.
finalMismatch = true;
}
else if(state < 0) {
// While state < 0, it means an error occurred.
finalFailure = true;
}
}
```
### Attention
1. After the firmware update completes, you need to restart the device manually to apply the new firmware. Alternatively, you can use the `reboot()` function to restart the device programmatically.
```c++
device->reboot();
```
2. Don't plug out the device during the firmware update process.
3. For linux users, it is recommended to use the `LibUVC` as the backend as the `V4L2` backend may cause some issues on some systems. Switch backend before create device like this:
```c++
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
```
## Run Sample
By providing the firmware file path via the command line, the program will automatically upgrade the devices that match the firmware.
### Result
![image](../../docs/resource/multi_devices_firmware_update.jpg)

View File

@@ -0,0 +1,224 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
#include <fstream>
#include <string>
#include <memory>
#include <algorithm>
#include <cctype>
bool getFirmwarePathFromCommandLine(int argc, char **argv, std::string &firmwarePath);
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent);
void printDeviceList();
bool firstCall = true;
bool finalSuccess = false;
bool finalMismatch = false;
bool finalFailure = false;
std::vector<std::shared_ptr<ob::Device>> totalDevices{};
std::vector<std::shared_ptr<ob::Device>> successDevices{};
std::vector<std::shared_ptr<ob::Device>> misMatchDevices{};
std::vector<std::shared_ptr<ob::Device>> failedDevices{};
int main(int argc, char *argv[]) try {
std::string firmwarePath;
if(!getFirmwarePathFromCommandLine(argc, argv, firmwarePath)) {
std::cout << "Press any key to exit..." << std::endl;
exit(EXIT_FAILURE);
}
// Create a context to access the devices
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
#if defined(__linux__)
// On Linux, it is recommended to use the libuvc backend for device access as v4l2 is not always reliable on some systems for firmware update.
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
#endif
// Query the device list
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
if(deviceList->getCount() == 0) {
std::cout << "No device found. Please connect a device first!" << std::endl;
std::cout << "Press any key to exit..." << std::endl;
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Get all devices
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
totalDevices.push_back(deviceList->getDevice(i));
}
printDeviceList();
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
firstCall = true;
finalSuccess = false;
finalMismatch = false;
finalFailure = false;
try {
std::cout << "\nUpgrading device: " << i + 1 << "/" << totalDevices.size()
<< " - " << totalDevices[i]->getDeviceInfo()->getName() << std::endl;
// Upgrade each device with async set to false for synchronous calls.
// You can set a callback function to retrieve the device's upgrade progress and related information in real time.
totalDevices[i]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
}
catch(ob::Error &e) {
// Unexpected situations, such as device disconnection, will typically throw an exception.
// Note that common issues like verification failures are usually reported through the callback status.
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
finalFailure = true;
}
if(finalSuccess) {
successDevices.push_back(totalDevices[i]);
}
else if(finalMismatch) {
misMatchDevices.push_back(totalDevices[i]);
}
else if(finalFailure) {
failedDevices.push_back(totalDevices[i]);
}
}
std::cout << "\nUpgrade Summary:\n";
std::cout << "==================================================\n";
std::cout << "Success (" << successDevices.size() << "):\n";
for(const auto &device: successDevices) {
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
}
std::cout << "\nMismatch (" << misMatchDevices.size() << "):\n";
for(const auto &device: misMatchDevices) {
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
}
if (misMatchDevices.size() > 0) {
std::cout << "Please check use the correct firmware version and retry the upgrade." << std::endl;
}
std::cout << "\nFailure (" << failedDevices.size() << "):\n";
for(const auto &device: failedDevices) {
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
}
std::cout << "\nUpgrade process completed. Try to reboot all successfully upgraded devices." << std::endl;
for (auto &device : successDevices) {
device->reboot();
}
std::cout << "Press any key to exit..." << std::endl;
ob_smpl::waitForKeyPressed();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
if(firstCall) {
firstCall = !firstCall;
}
else {
std::cout << "\033[3F"; // Move cursor up 3 lines
}
std::cout << "\033[K"; // Clear the current line
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
std::cout << "\033[K";
std::cout << "Status : ";
switch(state) {
case STAT_VERIFY_SUCCESS:
std::cout << "Image file verification success" << std::endl;
break;
case STAT_FILE_TRANSFER:
std::cout << "File transfer in progress" << std::endl;
break;
case STAT_DONE:
std::cout << "Update completed" << std::endl;
break;
case STAT_IN_PROGRESS:
std::cout << "Upgrade in progress" << std::endl;
break;
case STAT_START:
std::cout << "Starting the upgrade" << std::endl;
break;
case STAT_VERIFY_IMAGE:
std::cout << "Verifying image file" << std::endl;
break;
case ERR_MISMATCH:
std::cout << "Mismatch between device and image file" << std::endl;
break;
default:
std::cout << "Unknown status or error" << std::endl;
break;
}
std::cout << "\033[K";
std::cout << "Message : " << message << std::endl << std::flush;
if(state == STAT_DONE) {
finalSuccess = true;
finalFailure = false;
}
else if(state == ERR_MISMATCH) {
// If the device's firmware version does not match the image file, the callback status will be ERR_MISMATCH.
finalMismatch = true;
}
else if(state < 0) {
// While state < 0, it means an error occurred.
finalFailure = true;
}
}
bool getFirmwarePathFromCommandLine(int argc, char **argv, std::string &firmwarePath) {
if(argc != 2) {
std::cerr << "Usage: " << argv[0] << " <firmware_file_path>" << std::endl;
std::cerr << "Example: " << argv[0] << " /path/to/firmware.bin" << std::endl;
return false;
}
std::vector<std::string> validExtensions = { ".bin", ".img" };
firmwarePath = argv[1];
if(firmwarePath.size() > 4) {
std::string extension = firmwarePath.substr(firmwarePath.size() - 4);
auto result = std::find_if(validExtensions.begin(), validExtensions.end(),
[extension](const std::string &validExtension) { return extension == validExtension; });
if(result != validExtensions.end()) {
std::cout << "Firmware file confirmed: " << firmwarePath << std::endl << std::endl;
return true;
}
}
std::cout << "Invalid input file: Please provide a valid firmware file, supported formats: ";
for(const auto &ext: validExtensions) {
std::cout << ext << " ";
}
std::cout << std::endl;
return false;
}
void printDeviceList() {
std::cout << "Devices found:" << std::endl;
std::cout << "--------------------------------------------------------------------------------\n";
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
std::cout << "[" << i << "] " << "Device: " << totalDevices[i]->getDeviceInfo()->getName();
std::cout << " | SN: " << totalDevices[i]->getDeviceInfo()->getSerialNumber();
std::cout << " | Firmware version: " << totalDevices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
}
std::cout << "---------------------------------------------------------------------------------\n";
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_optional_depth_presets_update)
add_executable(${PROJECT_NAME} device.optional_depth_presets_update.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,63 @@
# C++ Sample2.device.optional_depth_presets_update
## Overview
This sample demonstrates how to use the SDK to update the optional depth presets of a connected device. It includes functions to list connected devices, select a device, and update its depth presets.
> Note: This sample is only applicable to devices that support presets, such as G330 serials of devices
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
## code overview
1. Initialize the SDK Context: This is necessary to access the connected devices.
```c++
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
```
2. List Connected Devices.
```c++
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
devices.push_back(deviceList->getDevice(i));
}
```
3. Define a Callback Function for Firmware Update Progress.
You can define a callback function to get the progress of the firmware update. The callback function will be called every time the device updates its progress.
```c++
void presetUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
// show update state and message here
}
```
4. Update the optional depth presets.
After selecting a device, update its presets by calling the updateOptionalDepthPresets function with the specified callback.
```c++
device->updateOptionalDepthPresets(filePaths, count, presetUpdateCallback);
```
> Note: The api supports upgrading multiple presets at once. For G300 series devices, a maximum of 3 presets can be written at a time. The first preset written will be set as the default preset.
### Attention
1. After the optional depth presets update completes, you don't need to restart the device.
2. Don't plug out the device during the presets update process.
## Run Sample
Select the device for presets update and input the path of the presets file. The SDK will start updating the presets, and the progress will be displayed on the console.
### Result
![image](../../docs/resource/device_optional_depth_presets_update.jpg)

View File

@@ -0,0 +1,295 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
#include <fstream>
#include <string>
#include <memory>
#include <algorithm>
#include <cctype>
static bool shouldContinue();
static void presetUpdateCallback(bool firstCall, OBFwUpdateState state, const char *message, uint8_t percent);
static bool getPresetPath(std::vector<std::string> &pathList);
static bool selectDevice(std::shared_ptr<ob::Device> &device);
static void printDeviceList();
static bool isPresetSupported(std::shared_ptr<ob::Device> device);
static void printPreset(std::shared_ptr<ob::Device> device);
std::vector<std::shared_ptr<ob::Device>> devices{};
int main() try {
// Create a context to access the connected devices
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
// Get connected devices from the context
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
if(deviceList->getCount() == 0) {
std::cout << "No device found. Please connect a device first!" << std::endl;
std::cout << "Press any key to exit..." << std::endl;
ob_smpl::waitForKeyPressed();
return 0;
}
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
devices.push_back(deviceList->getDevice(i));
}
std::cout << "Devices found:" << std::endl;
printDeviceList();
while(true) {
bool firstCall = true;
OBFwUpdateState updateState = STAT_START;
std::shared_ptr<ob::Device> device = nullptr;
if(!selectDevice(device)) {
break;
}
printPreset(device);
std::vector<std::string> pathList;
if(!getPresetPath(pathList)) {
break;
}
uint8_t index = 0;
uint8_t count = static_cast<uint8_t>(pathList.size());
char(*filePaths)[OB_PATH_MAX] = new char[count][OB_PATH_MAX];
// copy paths
std::cout << "\nPreset file paths you input: " << std::endl;
for(const auto &path: pathList) {
strcpy(filePaths[index++], path.c_str());
std::cout << "Path " << (uint32_t)index << ": " << path << std::endl;
}
std::cout << std::endl;
std::cout << "Start to update optional depth preset, please wait a moment...\n\n";
try {
device->updateOptionalDepthPresets(filePaths, count, [&updateState, &firstCall](OBFwUpdateState state, const char *message, uint8_t percent) {
updateState = state;
presetUpdateCallback(firstCall, state, message, percent);
firstCall = false;
});
delete[] filePaths;
filePaths = nullptr;
}
catch(ob::Error &e) {
// If the update fails, will throw an exception.
std::cerr << "\nThe update was interrupted! An error occurred! " << std::endl;
std::cerr << "Error message: " << e.what() << "\n" << std::endl;
std::cout << "Press any key to exit." << std::endl;
ob_smpl::waitForKeyPressed();
delete[] filePaths;
filePaths = nullptr;
break;
}
std::cout << std::endl;
if(updateState == STAT_DONE || updateState == STAT_DONE_WITH_DUPLICATES) {
// success
std::cout << "After updating the preset: " << std::endl;
printPreset(device);
}
if(!shouldContinue()) {
break;
}
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
static bool shouldContinue() {
std::string input;
std::cout << "Enter 'Q' or 'q' to quit, or any other key to continue: ";
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
return false;
}
return true;
}
static void presetUpdateCallback(bool firstCall, OBFwUpdateState state, const char *message, uint8_t percent) {
if(!firstCall) {
std::cout << "\033[3F"; // Move cursor up 3 lines
}
std::cout << "\033[K"; // Clear the current line
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
std::cout << "\033[K";
std::cout << "Status : ";
switch(state) {
case STAT_VERIFY_SUCCESS:
std::cout << "Image file verification success" << std::endl;
break;
case STAT_FILE_TRANSFER:
std::cout << "File transfer in progress" << std::endl;
break;
case STAT_DONE:
std::cout << "Update completed" << std::endl;
break;
case STAT_DONE_WITH_DUPLICATES:
std::cout << "Update completed, duplicated presets have been ignored" << std::endl;
break;
case STAT_IN_PROGRESS:
std::cout << "Update in progress" << std::endl;
break;
case STAT_START:
std::cout << "Starting the update" << std::endl;
break;
case STAT_VERIFY_IMAGE:
std::cout << "Verifying image file" << std::endl;
break;
default:
std::cout << "Unknown status or error" << std::endl;
break;
}
std::cout << "\033[K";
std::cout << "Message : " << message << std::endl << std::flush;
}
static bool getPresetPath(std::vector<std::string> &pathList) {
std::cout << "Please input the file paths of the optional depth preset file (.bin):" << std::endl;
std::cout << " - Press 'Enter' to finish this input" << std::endl;
std::cout << " - Press 'Q' or 'q' to exit the program" << std::endl;
uint8_t count = 0;
pathList.clear();
do {
std::cout << "Enter Path: ";
std::string input;
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
return false;
}
if(input.empty()) {
if(pathList.size() == 0) {
std::cout << "You didn't input any file paths" << std::endl;
if(!shouldContinue()) {
return false;
}
continue;
}
break;
}
// Remove leading and trailing whitespaces
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
// Remove leading and trailing quotes
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
input = input.substr(1, input.size() - 2);
}
if(input.size() > 4 && input.substr(input.size() - 4) == ".bin") {
pathList.push_back(input);
++count;
continue;
}
else {
std::cout << "Invalid file format. Please provide a .bin file." << std::endl << std::endl;
continue;
}
} while(count < 10);
return true;
}
static bool selectDevice(std::shared_ptr<ob::Device> &device) {
std::string input;
device = nullptr;
while(true) {
std::cout << "Please select a device to update the optional depth preset, enter 'l' to list devices, or enter 'q' to quit: " << std::endl;
std::cout << "Device index: ";
std::getline(std::cin, input);
if(input == "Q" || input == "q") {
return false;
}
if(input == "l" || input == "L") {
printDeviceList();
continue;
}
try {
uint32_t index = std::stoi(input);
if(index >= static_cast<uint32_t>(devices.size())) {
std::cout << "Invalid input, please enter a valid index number." << std::endl;
continue;
}
device = devices[index];
if(!isPresetSupported(device)) {
std::cerr << "The device you selected does not support preset. Please select another one" << std::endl;
continue;
}
std::cout << std::endl;
break;
}
catch(...) {
std::cout << "Invalid input, please enter a valid index number." << std::endl;
continue;
}
}
return true;
}
static void printDeviceList() {
std::cout << "--------------------------------------------------------------------------------\n";
for(uint32_t i = 0; i < devices.size(); ++i) {
std::cout << "[" << i << "] " << "Device: " << devices[i]->getDeviceInfo()->getName();
std::cout << " | SN: " << devices[i]->getDeviceInfo()->getSerialNumber();
std::cout << " | Firmware version: " << devices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
}
std::cout << "---------------------------------------------------------------------------------\n";
}
static bool isPresetSupported(std::shared_ptr<ob::Device> device) {
auto presetList = device->getAvailablePresetList();
if(presetList && presetList->getCount() > 0) {
return true;
}
return false;
}
static void printPreset(std::shared_ptr<ob::Device> device) {
try {
auto presetList = device->getAvailablePresetList();
std::cout << "Preset count: " << presetList->getCount() << std::endl;
for(uint32_t i = 0; i < presetList->getCount(); ++i) {
std::cout << " - " << presetList->getName(i) << std::endl;
}
std::cout << "Current preset: " << device->getCurrentPresetName() << "\n" << std::endl;
}
catch(ob::Error &e) {
// If the update fails, will throw an exception.
std::cerr << "\nThe device doesn't support preset! " << std::endl;
std::cerr << "error: " << e.what() << "\n" << std::endl;
return;
}
std::string key = "PresetVer";
if(device->isExtensionInfoExist(key)) {
std::string value = device->getExtensionInfo(key);
std::cout << "Preset version: " << value << "\n" << std::endl;
}
else {
std::cout << "PresetVer: n/a\n" << std::endl;
}
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_playback)
add_executable(${PROJECT_NAME} device_playback.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,63 @@
# C++ Sample2.device.playback
## Overview
This example demonstrates how to use the SDK to read and visualize data from a ROS bag file (.bag) with Orbbec camera streams.
### Knowledge
**Pipeline**: Manages data streams with multi-channel configuration, frame synchronization, and aggregation capabilities.
**PlaybackDevice**: Reads sensor data from a ROS bag file and feeds it into the processing pipeline.
## code overview
1. Initialize Playback Device and Pipeline
Create a playback device from a ROS bag file and configure the processing pipeline:
```cpp
// Create a playback device with a Rosbag file
std::shared_ptr<ob::PlaybackDevice> playback = std::make_shared<ob::PlaybackDevice>(filePath);
// Create a pipeline with the playback device
std::shared_ptr<ob::Pipeline> pipe = std::make_shared<ob::Pipeline>(playback);
```
2. Enable Recorded Streams
Activate all sensor streams available in the bag file:
```cpp
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
auto sensorList = playback->getSensorList();
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
auto sensorType = sensorList->getSensorType(i);
config->enableStream(sensorType);
}
```
3. Start the Pipeline with the Config
```cpp
pipe->start(config);
```
4. Automatically restart playback when reaching file end:
```cpp
playback->setPlaybackStatusChangeCallback([&](OBPlaybackStatus status) {
if(status == OB_PLAYBACK_STOPPED && !exited) {
pipe->stop();
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
pipe->start(config);
}
});
```
## Run Sample
Press the 'Esc' key in the window to exit the program.
### Result
![image](../../docs/resource/device_playbcak.jpg)

View File

@@ -0,0 +1,105 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.h>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
#include <thread>
#include <atomic>
bool getRosbagPath(std::string &rosbagPath);
int main(void) try {
std::atomic<bool> exited(false);
std::string filePath;
// Get valid .bag file path from user input
getRosbagPath(filePath);
// Create a playback device with a Rosbag file
std::shared_ptr<ob::PlaybackDevice> playback = std::make_shared<ob::PlaybackDevice>(filePath);
// Create a pipeline with the playback device
std::shared_ptr<ob::Pipeline> pipe = std::make_shared<ob::Pipeline>(playback);
// Enable all recording streams from the playback device
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
std::cout << "duration: " << playback->getDuration() << std::endl;
std::mutex frameMutex;
std::shared_ptr<const ob::FrameSet> renderFrameSet;
auto frameCallback = [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
renderFrameSet = frameSet;
};
// Set playback status change callback, when the playback stops, start the pipeline again with the same config
playback->setPlaybackStatusChangeCallback([&](OBPlaybackStatus status) {
if(status == OB_PLAYBACK_STOPPED && !exited) {
pipe->stop();
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
std::cout << "Replay again" << std::endl;
pipe->start(config, frameCallback);
}
});
auto sensorList = playback->getSensorList();
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
auto sensorType = sensorList->getSensorType(i);
config->enableStream(sensorType);
}
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ANY_SITUATION);
// Start the pipeline with the config
pipe->start(config, frameCallback);
ob_smpl::CVWindow win("Playback", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win.run() && !exited) {
std::lock_guard<std::mutex> lock(frameMutex);
if(renderFrameSet == nullptr) {
continue;
}
win.pushFramesToView(renderFrameSet);
}
exited = true;
pipe->stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
bool getRosbagPath(std::string &rosbagPath) {
while(true) {
std::cout << "Please input the path of the Rosbag file (.bag) to playback: \n";
std::cout << "Path: ";
std::string input;
std::getline(std::cin, input);
// Remove leading and trailing whitespaces
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
// Remove leading and trailing quotes
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
input = input.substr(1, input.size() - 2);
}
if(!input.empty() && input.front() == '\"' && input.back() == '\"') {
input = input.substr(1, input.size() - 2);
}
// Validate .bag extension
if(input.size() > 4 && input.substr(input.size() - 4) == ".bag") {
rosbagPath = input;
std::cout << "Playback file confirmed: " << rosbagPath << "\n\n";
return true;
}
std::cout << "Invalid file format. Please provide a .bag file.\n\n";
}
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_record_nogui)
add_executable(${PROJECT_NAME} device_record_nogui.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,50 @@
# C++ Sample2.device.record.nogui
## Overview
This example demonstrates how to use the SDK to record video/sensor stream data from an Orbbec camera and output a ROS bag file (.bag).
It is a command-line (CLI) tool that records streams directly without rendering video frames.
### Knowledge
- **Pipeline**: Manages data streams with capabilities for multi-channel configuration, stream switching, frame aggregation, and synchronization.
- **RecordDevice**: Handles data recording to a ROS bag file, supporting simultaneous capture from multiple sensors and streams.
## code overview
1. Create a Context object and get the specified device.
```cpp
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
auto device = deviceList->getDevice(0);
```
2. Instantiate a RecordDevice to capture all streams from the connected device into a ROS bag file:
```cpp
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
```
3. Configure and start the pipeline with a frame callback for real-time preview:
```cpp
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
// Do something for frameset
});
```
4. Destroy the RecordDevice to flush and save the ROS bag file:
```cpp
recordDevice = nullptr;
```
## Run Sample
Press the 'Esc' key in the window to exit the program.
### Result
![image](../../docs/resource/device_record_nogui.jpg)

View File

@@ -0,0 +1,150 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
#include <iomanip>
#include <mutex>
#include <thread>
#include <atomic>
#include <map>
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
int main(void) try {
std::cout << "Please enter the output filename (with .bag extension) and press Enter to start recording: ";
std::string filePath;
std::getline(std::cin, filePath);
// Create a context, for getting devices and sensors
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
// Query device list
auto deviceList = context->queryDeviceList();
if(deviceList->getCount() < 1) {
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Acquire first available device
auto device = deviceList->getDevice(0);
// Create a pipeline the specified device
auto pipe = std::make_shared<ob::Pipeline>(device);
// Activate device clock synchronization
try {
device->timerSyncWithHost();
}
catch(ob::Error &e) {
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
<< std::endl;
}
// Create a config and enable all streams
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
auto sensorList = device->getSensorList();
auto count = sensorList->getCount();
for(uint32_t i = 0; i < count; i++) {
auto sensor = sensorList->getSensor(i);
auto sensorType = sensor->getType();
auto profileList = sensor->getStreamProfileList(); // Get profileList to create Sensor object in advance
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
if(sensorType == OB_SENSOR_IR) {
continue;
}
}
config->enableStream(sensorType);
}
std::mutex frameMutex;
std::map<OBFrameType, uint64_t> frameCountMap;
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
if(frameSet == nullptr) {
return;
}
std::lock_guard<std::mutex> lock(frameMutex);
auto count = frameSet->getCount();
for(uint32_t i = 0; i < count; i++) {
auto frame = frameSet->getFrameByIndex(i);
if(frame) {
auto type = frame->getType();
frameCountMap[type]++;
}
}
});
// Initialize recording device with output file
auto startTime = ob_smpl::getNowTimesMs();
uint32_t waitTime = 1000;
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
// operation prompt
std::cout << "Streams and recorder have started!" << std::endl;
std::cout << "Press ESC, 'q', or 'Q' to stop recording and exit safely." << std::endl;
std::cout << "IMPORTANT: Always use ESC/q/Q to stop! Otherwise, the bag file will be corrupted and unplayable." << std::endl << std::endl;
do {
auto key = ob_smpl::waitForKeyPressed(waitTime);
if(key == ESC_KEY || key == 'q' || key == 'Q') {
break;
}
auto currentTime = ob_smpl::getNowTimesMs();
if(currentTime > startTime + waitTime) {
std::map<OBFrameType, uint64_t> tempCountMap;
uint64_t duration;
{
// Copy data
std::lock_guard<std::mutex> lock(frameMutex);
// get time again
currentTime = ob_smpl::getNowTimesMs();
duration = currentTime - startTime;
if(!frameCountMap.empty()) {
startTime = currentTime;
waitTime = 2000; // Change to 2s for next time
tempCountMap = frameCountMap;
for(auto &item: frameCountMap) {
item.second = 0; // reset count
}
}
}
std::string seperate = "";
if(tempCountMap.empty()) {
std::cout << "Recording... Current FPS: 0" << std::endl;
}
else {
std::cout << "Recording... Current FPS: ";
for(const auto &item: tempCountMap) {
auto name = ob::TypeHelper::convertOBFrameTypeToString(item.first);
float rate = item.second / (duration / 1000.0f);
std::cout << std::fixed << std::setprecision(2) << std::showpoint;
std::cout << seperate << name << "=" << rate;
seperate = ", ";
}
std::cout << std::endl;
}
}
} while(true);
// stop the pipeline
pipe->stop();
// Flush and save recording file
recordDevice = nullptr;
return 0;
}
catch(ob::Error &e) {
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
<< std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_device_record)
add_executable(${PROJECT_NAME} device_record.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,49 @@
# C++ Sample2.device.record
## Overview
This example demonstrates how to use the SDK to record video/sensor stream data from an Orbbec camera and output a ROS bag file (.bag).
### Knowledge
- **Pipeline**: Manages data streams with capabilities for multi-channel configuration, stream switching, frame aggregation, and synchronization.
- **RecordDevice**: Handles data recording to a ROS bag file, supporting simultaneous capture from multiple sensors and streams.
## code overview
1. Create a Context object and get the specified device.
```cpp
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
auto device = deviceList->getDevice(0);
```
2. Instantiate a RecordDevice to capture all streams from the connected device into a ROS bag file:
```cpp
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
```
3. Configure and start the pipeline with a frame callback for real-time preview:
```cpp
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
renderFrameSet = frameSet;
});
```
4. Destroy the RecordDevice to flush and save the ROS bag file:
```cpp
recordDevice = nullptr;
```
## Run Sample
Press the 'Esc' key in the window to exit the program.
### Result
![image](../../docs/resource/device_record.jpg)

View File

@@ -0,0 +1,119 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.h>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
#include <thread>
#include <atomic>
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
std::atomic<bool> isPaused{false};
void handleKeyPress(ob_smpl::CVWindow &win, std::shared_ptr<ob::RecordDevice> recorder, int key);
int main(void) try {
std::cout << "Please enter the output filename (with .bag extension) and press Enter to start recording: ";
std::string filePath;
std::getline(std::cin, filePath);
// Create a context, for getting devices and sensors
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
// Query device list
auto deviceList = context->queryDeviceList();
if(deviceList->getCount() < 1) {
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Acquire first available device
auto device = deviceList->getDevice(0);
// Create a pipeline the specified device
auto pipe = std::make_shared<ob::Pipeline>(device);
// Activate device clock synchronization
try {
device->timerSyncWithHost();
}
catch(ob::Error &e) {
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
<< std::endl;
}
// Create a config and enable all streams
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
auto sensorList = device->getSensorList();
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
auto sensorType = sensorList->getSensorType(i);
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
if(sensorType == OB_SENSOR_IR) {
continue;
}
}
config->enableStream(sensorType);
}
std::mutex frameMutex;
std::shared_ptr<const ob::FrameSet> renderFrameSet;
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(frameMutex);
renderFrameSet = frameSet;
});
// Initialize recording device with output file
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
std::cout << "Streams and recorder have started!" << std::endl;
std::cout << "Press ESC to stop recording and exit safely." << std::endl;
std::cout << "IMPORTANT: Always use ESC to stop! Otherwise, the bag file will be corrupted and unplayable." << std::endl << std::endl;
ob_smpl::CVWindow win("Record", 1280, 720, ob_smpl::ARRANGE_GRID);
win.setKeyPrompt("Press 'S' to pause/resume recording.");
// set the callback function for the window to handle key press events
win.setKeyPressedCallback([&win, recordDevice](int key) { handleKeyPress(win, recordDevice, key); });
while(win.run()) {
std::lock_guard<std::mutex> lock(frameMutex);
if(renderFrameSet == nullptr) {
continue;
}
win.pushFramesToView(renderFrameSet);
}
pipe->stop();
// Flush and save recording file
recordDevice = nullptr;
return 0;
}
catch(ob::Error &e) {
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
<< std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
void handleKeyPress(ob_smpl::CVWindow& win, std::shared_ptr<ob::RecordDevice> recorder, int key) {
if(key == 'S' || key == 's') {
if(!isPaused) {
recorder->pause();
isPaused.store(true);
win.addLog("[PAUSED] Recording paused");
}
else {
recorder->resume();
isPaused.store(false);
win.addLog("[RESUMED] Recording resumed");
}
}
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_common_usages)
add_executable(${PROJECT_NAME} common_usages.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,77 @@
# C++ Sample3.advanced.common_usages
## Overview
Use the SDK interface to view camera related information, set related parameters, and display the video stream
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
## code overview
1. Register device callback
```cpp
// Create ob:Context.
ctx = std::make_shared<ob::Context>();
ctx.setDeviceChangedCallback( []( std::shared_ptr< ob::DeviceList > removedList, std::shared_ptr< ob::DeviceList > addedList ) {
DeviceDisconnectCallback( removedList );
DeviceConnectCallback( addedList );
} );
```
2. Get the device list and print out the information, then use pipeline to start the video stream.
```cpp
// Query the list of connected devices.
std::shared_ptr<ob::DeviceList> devices = ctx->queryDeviceList();
// Handle connected devicesand open one device
handleDeviceConnected(devices);
```
3. Block thread waiting for device connection
```cpp
while(!device) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
```
4. Execute corresponding settings according to the commands entered by the user. The following is an introduction to some setting functions
```cpp
//Get the basic parameters of the camera, including connection type, device model, etc.
void getDeviceInformation()
//Get camera sensor intrinsics, distortion and pixels
void getCameraParams()
//Laser switch function
void switchLaser()
//Laser safety protection and ranging function switch
void switchLDP()
//Get the laser safety protection and ranging function status
void getLDPStatus()
//Color auto-exposure switch
void switchColorAE()
//Color exposure value adjustment
void setColorExposureValue(bool increase)
//Color gain value adjustment
void setColorGainValue(bool increase)
//Depth auto-exposure switch
void setDepthExposureValue(bool increase)
//Depth exposure value adjustment
void setDepthGainValue(bool increase)
## Run Sample
Press the button according to the interface prompts
### Result
![image](../../docs/resource/common1.jpg)
![image](../../docs/resource/common2.jpg)

View File

@@ -0,0 +1,921 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
#include <string>
#include <iomanip>
const std::map<std::string, int> gemini_330_list = { { "Gemini 335", 0x0800 }, { "Gemini 335L", 0x0804 }, { "Gemini 336", 0x0803 }, { "Gemini 336L", 0x0807 },
{ "Gemini 330", 0x0801 }, { "Gemini 330L", 0x0805 }, { "DabaiA", 0x0A12 }, { "DabaiAL", 0x0A13 },
{ "Gemini 345", 0x0812 }, { "Gemini 345Lg", 0x0813 }, { "CAM-5330", 0x0816 }, { "CAM-5530", 0x0817 },{"Gemini 338",0x0818} };
const std::map<OBSensorType, std::string> sensorTypeToStringMap = { { OB_SENSOR_COLOR, "Color profile: " },
{ OB_SENSOR_DEPTH, "Depth profile: " },
{ OB_SENSOR_IR, "IR profile: " },
{ OB_SENSOR_IR_LEFT, "Left IR profile: " },
{ OB_SENSOR_IR_RIGHT, "Right IR profile: " } };
bool isGemini330Series(int pid) {
bool find = false;
for(auto it = gemini_330_list.begin(); it != gemini_330_list.end(); ++it) {
if(it->second == pid) {
find = true;
break;
}
}
return find;
}
std::shared_ptr<ob_smpl::CVWindow> win = nullptr;
std::shared_ptr<ob::Context> ctx = nullptr;
std::shared_ptr<ob::Device> device = nullptr;
std::shared_ptr<ob::Pipeline> pipeline = nullptr;
std::recursive_mutex deviceMutex;
bool irRightMirrorSupport = false;
std::map<OBSensorType, std::shared_ptr<ob::VideoStreamProfile>> profilesMap;
std::shared_ptr<ob::VideoStreamProfile> depthProfile = nullptr;
std::shared_ptr<ob::VideoStreamProfile> irProfile = nullptr;
std::shared_ptr<ob::Filter> align = nullptr;
void handleDeviceConnected(std::shared_ptr<ob::DeviceList> connectList);
void handleDeviceDisconnected(std::shared_ptr<ob::DeviceList> disconnectList);
void switchDepthWorkMode();
void turnOffHwD2d();
void setDepthUnit();
void setDepthSoftFilter();
void printUsage();
void commandProcess(std::string cmd);
void handleFrameset(std::shared_ptr<ob::FrameSet> frameset);
void startStream();
int main(void) try {
// create window for render
win = std::make_shared<ob_smpl::CVWindow>("CommonUsages", 1280, 720, ob_smpl::ARRANGE_GRID);
// Set log severity. disable log, please set OB_LOG_SEVERITY_OFF.
ob::Context::setLoggerSeverity(OB_LOG_SEVERITY_ERROR);
// Create ob:Context.
ctx = std::make_shared<ob::Context>();
// create align filter
align = ob::FilterFactory::createFilter("Align");
// Register device callback
ctx->setDeviceChangedCallback([](std::shared_ptr<ob::DeviceList> removedList, std::shared_ptr<ob::DeviceList> addedList) {
handleDeviceDisconnected(removedList);
handleDeviceConnected(addedList);
});
// Query the list of connected devices.
std::shared_ptr<ob::DeviceList> devices = ctx->queryDeviceList();
// Handle connected devices(and open one device)
handleDeviceConnected(devices);
if(!device) {
std::cout << "Waiting for connect device...";
while(!device) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
}
irRightMirrorSupport = device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_READ_WRITE);
printUsage();
auto inputWatchThread = std::thread([]{
while(true) {
std::string cmd;
std::cout << "\nInput command: ";
std::getline(std::cin, cmd);
if(cmd == "quit" || cmd == "q") {
win->close();
break;
}
else {
commandProcess(cmd);
}
}
});
inputWatchThread.detach();
while(win->run()) {
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
if(pipeline) {
pipeline->stop();
}
// destruct all global variables here before exiting main
irProfile.reset();
depthProfile.reset();
profilesMap.clear();
pipeline.reset();
device.reset();
devices.reset();
align.reset();
ctx.reset();
win.reset();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Device connection callback
void handleDeviceConnected(std::shared_ptr<ob::DeviceList> devices) {
// Get the number of connected devices
if(devices->getCount() == 0) {
return;
}
const auto deviceCount = devices->getCount();
for(uint32_t i = 0; i < deviceCount; i++) {
std::string deviceSN = devices->getSerialNumber(i);
std::cout << "Found device connected, SN: " << deviceSN << std::endl;
}
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(!device) {
// open default device (device index=0)
device = devices->getDevice(0);
pipeline = std::make_shared<ob::Pipeline>(device);
std::cout << "Open device success, SN: " << devices->getSerialNumber(0) << std::endl;
startStream();
}
}
// Device disconnect callback
void handleDeviceDisconnected(std::shared_ptr<ob::DeviceList> disconnectList) {
std::string currentDevSn = "";
{
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
std::shared_ptr<ob::DeviceInfo> devInfo = device->getDeviceInfo();
currentDevSn = devInfo->getSerialNumber();
}
}
const auto deviceCount = disconnectList->getCount();
for(uint32_t i = 0; i < deviceCount; i++) {
std::string deviceSN = disconnectList->getSerialNumber(i);
std::cout << "Device disconnected, SN: " << deviceSN << std::endl;
if(currentDevSn == deviceSN) {
device.reset(); // release device
pipeline.reset(); // release pipeline
std::cout << "Current device disconnected" << std::endl;
}
}
}
void switchDepthWorkMode() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
// Check whether the camera depth working mode is supported
if(!device->isPropertySupported(OB_STRUCT_CURRENT_DEPTH_ALG_MODE, OB_PERMISSION_READ_WRITE)) {
return;
}
// Query the current camera depth mode
auto curDepthMode = device->getCurrentDepthWorkMode();
std::cout << "current depth work mode: " << curDepthMode.name << std::endl;
// Get the list of camera depth modes
auto depthModeList = device->getDepthWorkModeList();
std::cout << "depthModeList size: " << depthModeList->getCount() << std::endl;
for(uint32_t i = 0; i < depthModeList->getCount(); i++) {
std::cout << "depthModeList[" << i << "]: " << (*depthModeList)[i].name << std::endl;
}
// switch depth work mode to default (index=0) mode, user can switch to ohter mode like this.
device->switchDepthWorkMode((*depthModeList)[0].name);
std::cout << "switch depth work mode to:" << (*depthModeList)[0].name << std::endl;
// It is require to reopen the device and pipeline after switch depth work mode
auto deviceInfo = device->getDeviceInfo();
device.reset();
pipeline.reset();
auto deviceList = ctx->queryDeviceList();
device = deviceList->getDeviceBySN(deviceInfo->getSerialNumber()); // using serial number to create device
pipeline = std::make_shared<ob::Pipeline>(device);
}
void turnOffHwD2d() {
try {
// Some models dose not support this feature
if(device->isPropertySupported(OB_PROP_DISPARITY_TO_DEPTH_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_DISPARITY_TO_DEPTH_BOOL, false);
std::cout << "turn off hardware disparity to depth converter (Turn on Software D2D)" << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
}
void setDepthUnit() {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_PRECISION_LEVEL_INT, OB_PERMISSION_WRITE)) {
device->setIntProperty(OB_PROP_DEPTH_PRECISION_LEVEL_INT, OB_PRECISION_1MM);
std::cout << "set depth unit to 1mm" << std::endl;
}
else {
std::cerr << "Depth precision level switch is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
}
void setDepthSoftFilter() {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_NOISE_REMOVAL_FILTER_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_DEPTH_NOISE_REMOVAL_FILTER_BOOL, true);
std::cout << "turn on depth soft filter" << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
}
void startStream() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
device = pipeline->getDevice();
auto sensorList = device->getSensorList();
// Configure which streams to enable or disable for the Pipeline by creating a Config.
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
// Query all supported infrared sensor type and enable the infrared stream.
// For dual infrared device, enable the left and right infrared streams.
// For single infrared device, enable the infrared stream.
OBSensorType sensorType = sensorList->getSensorType(index);
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT || sensorType == OB_SENSOR_COLOR
|| sensorType == OB_SENSOR_DEPTH) {
try {
auto sensor = sensorList->getSensor(sensorType);
auto profileList = sensor->getStreamProfileList();
if(profileList->getCount() > 0) {
// get default (index=0) stream profile
auto defProfile = profileList->getProfile(OB_PROFILE_DEFAULT);
auto defVsProfile = defProfile->as<ob::VideoStreamProfile>();
profilesMap.insert(std::make_pair(sensorType, defVsProfile));
auto it = sensorTypeToStringMap.find(sensorType);
if(it != sensorTypeToStringMap.end()) {
std::cout << it->second << defVsProfile->getWidth() << "x" << defVsProfile->getHeight() << " @ " << defVsProfile->getFps() << "fps"
<< std::endl;
}
else {
std::cout << "unknown profile: " << defVsProfile->getWidth() << "x" << defVsProfile->getHeight() << " @ " << defVsProfile->getFps()
<< "fps" << std::endl;
}
// enable color stream.
config->enableStream(defVsProfile);
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
// start pipeline
pipeline->start(config, handleFrameset);
std::cout << "Stream started!" << std::endl;
}
std::shared_ptr<ob::FrameSet> fileterAlign(std::shared_ptr<ob::FrameSet> frameset) {
auto newFrame = align->process(frameset);
if(!newFrame) {
return nullptr;
}
auto newFrameSet = newFrame->as<ob::FrameSet>();
return newFrameSet;
}
void handleFrameset(std::shared_ptr<ob::FrameSet> frameset) {
auto alignFrameSet = fileterAlign(frameset);
// If no depthframe is present, it is discarded
if(frameset->getCount() < 3) {
return;
}
win->pushFramesToView(alignFrameSet);
}
void getDeviceInformation() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
auto info = device->getDeviceInfo();
// Get the name of the device
std::cout << "-Device name: " << info->getName() << std::endl;
// Get the pid, vid, uid of the device
std::cout << "-Device pid: 0x" << std::hex << std::setw(4) << std::setfill('0') << info->getPid() << " vid: 0x" << std::hex << std::setw(4)
<< std::setfill('0') << info->getVid() << " uid: " << info->getUid() << std::dec << std::endl;
// By getting the firmware version number of the device
auto fwVer = info->getFirmwareVersion();
std::cout << "-Firmware version: " << fwVer << std::endl;
// By getting the serial number of the device
auto sn = info->getSerialNumber();
std::cout << "-Serial number: " << sn << std::endl;
// By getting the connection type of the device
auto connectType = info->getConnectionType();
std::cout << "-ConnectionType: " << connectType << std::endl;
}
}
void getCameraParams() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(pipeline) {
try {
for(const auto &item: profilesMap) {
auto profile = item.second;
auto type = item.first;
auto intrinsics = profile->getIntrinsic();
auto distortion = profile->getDistortion();
auto typeString = ob::TypeHelper::convertOBSensorTypeToString(type);
std::cout << typeString << " intrinsics: "
<< "fx:" << intrinsics.fx << ", fy: " << intrinsics.fy << ", cx: " << intrinsics.cx << ", cy: " << intrinsics.cy
<< " ,width: " << intrinsics.width << ", height: " << intrinsics.height << std::endl;
std::cout << typeString << " distortion: "
<< "k1:" << distortion.k1 << ", k2:" << distortion.k2 << ", k3:" << distortion.k3 << ", k4:" << distortion.k4
<< ", k5:" << distortion.k5 << ", k6:" << distortion.k6 << ", p1:" << distortion.p1 << ", p2:" << distortion.p2 << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchLaser() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
auto pid = device->getDeviceInfo()->getPid();
OBPropertyID propertyId = OB_PROP_LASER_BOOL;
if(isGemini330Series(pid)) {
propertyId = OB_PROP_LASER_CONTROL_INT;
}
if(device->isPropertySupported(propertyId, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(propertyId);
if(device->isPropertySupported(propertyId, OB_PERMISSION_WRITE)) {
device->setBoolProperty(propertyId, !value);
if(!value) {
std::cout << "laser turn on!" << std::endl;
}
else {
std::cout << "laser turn off!" << std::endl;
}
}
}
else {
std::cerr << "Laser switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchLDP() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_LDP_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_LDP_BOOL);
if(device->isPropertySupported(OB_PROP_LDP_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_LDP_BOOL, !value);
if(!value) {
std::cout << "LDP turn on!" << std::endl;
}
else {
std::cout << "LDP turn off!" << std::endl;
}
std::cout << "Attention: For some models, it is require to restart depth stream after turn on/of LDP. Input \"stream\" command "
"to restart stream!"
<< std::endl;
}
}
else {
std::cerr << "LDP switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void getLDPStatus() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_LDP_STATUS_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_LDP_STATUS_BOOL);
std::cout << "LDP status:" << value << std::endl;
}
else {
std::cerr << "LDP status property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchDepthAE() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL);
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, !value);
if(!value) {
std::cout << "Depth Auto-Exposure on!" << std::endl;
}
else {
std::cout << "Depth Auto-Exposure off!" << std::endl;
}
}
}
else {
std::cerr << "Depth Auto-Exposure switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchColorAE() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL);
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, !value);
if(!value) {
std::cout << "Color Auto-Exposure on!" << std::endl;
}
else {
std::cout << "Color Auto-Exposure off!" << std::endl;
}
}
}
else {
std::cerr << "Color Auto-Exposure switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchDepthMirror() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_MIRROR_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_DEPTH_MIRROR_BOOL);
if(device->isPropertySupported(OB_PROP_DEPTH_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_DEPTH_MIRROR_BOOL, !value);
if(!value) {
std::cout << "Note: Currently with the D2C(SW) turned on, Depth Mirror will not work!" << std::endl;
std::cout << "Depth mirror on!" << std::endl;
}
else {
std::cout << "Depth mirror off!" << std::endl;
}
}
}
else {
std::cerr << "Depth mirror switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchIRMirror() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_IR_MIRROR_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_IR_MIRROR_BOOL);
if(device->isPropertySupported(OB_PROP_IR_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_IR_MIRROR_BOOL, !value);
if(!value) {
std::cout << "IR mirror on!" << std::endl;
}
else {
std::cout << "IR mirror off!" << std::endl;
}
}
}
else {
std::cerr << "IR mirror switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchIRRightMirror() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_IR_RIGHT_MIRROR_BOOL);
if(device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_IR_RIGHT_MIRROR_BOOL, !value);
if(!value) {
std::cout << "IR Right mirror on!" << std::endl;
}
else {
std::cout << "IR Right mirror off!" << std::endl;
}
}
}
else {
std::cerr << "IR mirror switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void switchColorMirror() {
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
if(device) {
try {
if(device->isPropertySupported(OB_PROP_COLOR_MIRROR_BOOL, OB_PERMISSION_READ)) {
bool value = device->getBoolProperty(OB_PROP_COLOR_MIRROR_BOOL);
if(device->isPropertySupported(OB_PROP_COLOR_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
device->setBoolProperty(OB_PROP_COLOR_MIRROR_BOOL, !value);
if(!value) {
std::cout << "Color mirror on!" << std::endl;
}
else {
std::cout << "Color mirror off!" << std::endl;
}
}
}
else {
std::cerr << "Color mirror switch property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void setDepthExposureValue(bool increase) {
if(device) {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ_WRITE)) {
bool value = device->getBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL);
if(value) {
device->setBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, false);
std::cout << "Depth AE close." << std::endl;
}
}
if(device->isPropertySupported(OB_PROP_DEPTH_EXPOSURE_INT, OB_PERMISSION_READ)) {
// get the value range
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_DEPTH_EXPOSURE_INT);
std::cout << "Depth current exposure max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
int value = device->getIntProperty(OB_PROP_DEPTH_EXPOSURE_INT);
std::cout << "Depth current exposure:" << value << std::endl;
if(device->isPropertySupported(OB_PROP_DEPTH_EXPOSURE_INT, OB_PERMISSION_WRITE)) {
if(increase) {
value += (valueRange.max - valueRange.min) / 10;
if(value > valueRange.max) {
value = valueRange.max;
}
}
else {
value -= (valueRange.max - valueRange.min) / 10;
if(value < valueRange.min) {
value = valueRange.min;
}
}
// Ensure that the value meet the step value requirements
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
std::cout << "Set depth exposure:" << value << std::endl;
device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, value);
}
else {
std::cerr << "Depth exposure set property is not supported." << std::endl;
}
}
else {
std::cerr << "Depth exposure get property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void setColorExposureValue(bool increase) {
if(device) {
try {
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ_WRITE)) {
bool value = device->getBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL);
if(value) {
device->setBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, false);
std::cout << "Color AE close." << std::endl;
}
}
if(device->isPropertySupported(OB_PROP_COLOR_EXPOSURE_INT, OB_PERMISSION_READ)) {
// get the value range
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_COLOR_EXPOSURE_INT);
std::cout << "Color current exposure max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
int value = device->getIntProperty(OB_PROP_COLOR_EXPOSURE_INT);
std::cout << "Color current exposure:" << value << std::endl;
if(device->isPropertySupported(OB_PROP_COLOR_EXPOSURE_INT, OB_PERMISSION_WRITE)) {
if(increase) {
value += (valueRange.max - valueRange.min) / 10;
if(value > valueRange.max) {
value = valueRange.max;
}
}
else {
value -= (valueRange.max - valueRange.min) / 10;
if(value < valueRange.min) {
value = valueRange.min;
}
}
// Ensure that the value meet the step value requirements
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
std::cout << "Set color exposure:" << value << std::endl;
device->setIntProperty(OB_PROP_COLOR_EXPOSURE_INT, value);
}
else {
std::cerr << "Color exposure set property is not supported." << std::endl;
}
}
else {
std::cerr << "Color exposure get property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void setDepthGainValue(bool increase) {
if(device) {
try {
if(device->isPropertySupported(OB_PROP_DEPTH_GAIN_INT, OB_PERMISSION_READ)) {
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_DEPTH_GAIN_INT);
std::cout << "Depth current gain max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
int value = device->getIntProperty(OB_PROP_DEPTH_GAIN_INT);
std::cout << "Depth current gain:" << value << std::endl;
if(device->isPropertySupported(OB_PROP_DEPTH_GAIN_INT, OB_PERMISSION_WRITE)) {
if(increase) {
value += (valueRange.max - valueRange.min) / 10;
if(value > valueRange.max) {
value = valueRange.max;
}
}
else {
value -= (valueRange.max - valueRange.min) / 10;
if(value < valueRange.min) {
value = valueRange.min;
}
}
// Ensure that the value meet the step value requirements
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
std::cout << "Set depth gain:" << value << std::endl;
device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, value);
}
else {
std::cerr << "Depth gain set property is not supported." << std::endl;
}
}
else {
std::cerr << "Depth gain get property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void setColorGainValue(bool increase) {
if(device) {
try {
if(device->isPropertySupported(OB_PROP_COLOR_GAIN_INT, OB_PERMISSION_READ)) {
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_COLOR_GAIN_INT);
std::cout << "Color current gain max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
int value = device->getIntProperty(OB_PROP_COLOR_GAIN_INT);
std::cout << "Color current gain:" << value << std::endl;
if(device->isPropertySupported(OB_PROP_COLOR_GAIN_INT, OB_PERMISSION_WRITE)) {
if(increase) {
value += (valueRange.max - valueRange.min) / 10;
if(value > valueRange.max) {
value = valueRange.max;
}
}
else {
value -= (valueRange.max - valueRange.min) / 10;
if(value < valueRange.min) {
value = valueRange.min;
}
}
// Ensure that the value meet the step value requirements
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
std::cout << "Set color gain:" << value << std::endl;
device->setIntProperty(OB_PROP_COLOR_GAIN_INT, value);
}
else {
std::cerr << "Color gain set property is not supported." << std::endl;
}
}
else {
std::cerr << "Color gain get property is not supported." << std::endl;
}
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
<< std::endl;
exit(EXIT_FAILURE);
}
}
}
void printUsage() {
std::cout << "Support commands:" << std::endl;
std::cout << " info / i - get device information" << std::endl;
std::cout << " param / p - get camera parameter" << std::endl;
std::cout << " laser / l - on/off laser" << std::endl;
std::cout << " ldp / d - on/off LDP" << std::endl;
std::cout << " ldp status / ds - get LDP status" << std::endl;
std::cout << " color ae / ca - on/off Color auto exposure" << std::endl;
std::cout << " inc color value / cei - increase Color exposure value" << std::endl;
std::cout << " dec color value / ced - decrease Color exposure value" << std::endl;
std::cout << " inc color gain / cgi - increase Color gain value" << std::endl;
std::cout << " dec color gain / cgd - decrease Color gain value" << std::endl;
std::cout << " color mirror / cm - on/off color mirror" << std::endl;
std::cout << " depth ae / da - on/off Depth/IR auto exposure" << std::endl;
std::cout << " depth mirror / dm - on/off Depth mirror" << std::endl;
std::cout << " inc depth value / dei - increase Depth/IR exposure value" << std::endl;
std::cout << " dec depth value / ded - decrease Depth/IR exposure value" << std::endl;
std::cout << " inc depth gain / dgi - increase Depth/IR gain value" << std::endl;
std::cout << " dec depth gain / dgd - decrease Depth/IR gain value" << std::endl;
std::cout << " ir mirror / im - on/off Ir mirror" << std::endl;
if(irRightMirrorSupport) {
std::cout << " ir right mirror / irm - on/off Ir right mirror" << std::endl;
}
std::cout << "--------------------------------" << std::endl;
std::cout << " help / ? - print usage" << std::endl;
std::cout << " quit / q- quit application" << std::endl;
}
void commandProcess(std::string cmd) {
if(cmd == "info" || cmd == "i") {
getDeviceInformation();
}
else if(cmd == "param" || cmd == "p") {
getCameraParams();
}
else if(cmd == "laser" || cmd == "l") {
switchLaser();
}
else if(cmd == "ldp" || cmd == "d") {
switchLDP();
}
else if(cmd == "ldp status" || cmd == "ds") {
getLDPStatus();
}
else if(cmd == "color ae" || cmd == "ca") {
switchColorAE();
}
else if(cmd == "inc color value" || cmd == "cei") {
setColorExposureValue(true);
}
else if(cmd == "dec color value" || cmd == "ced") {
setColorExposureValue(false);
}
else if(cmd == "inc color gain" || cmd == "cgi") {
setColorGainValue(true);
}
else if(cmd == "dec color gain" || cmd == "cgd") {
setColorGainValue(false);
}
else if(cmd == "inc depth value" || cmd == "dei") {
setDepthExposureValue(true);
}
else if(cmd == "dec depth value" || cmd == "ded") {
setDepthExposureValue(false);
}
else if(cmd == "inc depth gain" || cmd == "dgi") {
setDepthGainValue(true);
}
else if(cmd == "dec depth gain" || cmd == "dgd") {
setDepthGainValue(false);
}
else if(cmd == "depth ae" || cmd == "da") {
switchDepthAE();
}
else if(cmd == "color mirror" || cmd == "cm") {
switchColorMirror();
}
else if(cmd == "depth mirror" || cmd == "dm") {
switchDepthMirror();
}
else if(cmd == "ir mirror" || cmd == "im") {
switchIRMirror();
}
else if(cmd == " ir right mirror" || cmd == "irm") {
switchIRRightMirror();
}
else if(cmd == "help" || cmd == "?") {
printUsage();
}
else {
std::cerr << "Unsupported command received! Input \"help\" to get usage" << std::endl;
}
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_coordinate_transform)
add_executable(${PROJECT_NAME} coordinate_transform.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,98 @@
# C++ Sample: 3.advanced.coordinate_transform
## Overview
Use the SDK interface to transform different coordinate systems.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
## Code overview
1. Enable color stream
```cpp
auto colorProfiles = pipe.getStreamProfileList(OB_SENSOR_COLOR);
if(colorProfiles) {
colorProfile = colorProfiles->getVideoStreamProfile(1280, OB_HEIGHT_ANY, OB_FORMAT_RGB, 30);
}
config->enableStream(colorProfile);
```
2. Enable depth stream
```cpp
auto depthProfiles = pipe.getStreamProfileList(OB_SENSOR_DEPTH);
std::shared_ptr<ob::VideoStreamProfile> depthProfile = nullptr;
if(depthProfiles) {
depthProfile = depthProfiles->getVideoStreamProfile(640, OB_HEIGHT_ANY, OB_FORMAT_Y16, 30);
}
config->enableStream(depthProfile);
```
3. Get frame data
```cpp
auto colorFrame = frameSet->colorFrame();
auto depthFrame = frameSet->depthFrame();
```
4. Get get stream profile
```cpp
auto colorProfile = colorFrame->getStreamProfile();
auto depthProfile = depthFrame->getStreamProfile();
```
5. Get the extrinsic parameters
```cpp
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
```
6. Get the intrinsic parameters
```cpp
auto colorIntrinsic = colorProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
auto colorDistortion = colorProfile->as<ob::VideoStreamProfile>()->getDistortion();
```
7. Get the distortion parameters
```cpp
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
```
8. Processing
```cpp
if(testType == "1") {
transformation2dto2d(colorFrame, depthFrame);
} else if (testType == "2") {
transformation2dto3d(colorFrame, depthFrame);
} else if (testType == "3") {
transformation3dto3d(colorFrame, depthFrame);
} else if (testType == "4") {
transformation3dto2d(colorFrame, depthFrame);
} else {
std::cout << "Invalid command" << std::endl;
}
```
## Run Sample
Press the Esc key to exit the program.
Press the 1 key - transformation 2d to 2d
Press the 2 key - transformation 2d to 3d
Press the 3 key - transformation 3d to 3d
Press the 4 key - transformation 3d to 2d
### Result
![image](../../docs/resource/coordinate_transform.jpg)

View File

@@ -0,0 +1,299 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "libobsensor/hpp/Utils.hpp"
#include "libobsensor/hpp/Frame.hpp"
void printUsage();
void transformation2dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
void transformation2dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
void transformation3dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
void transformation3dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
int main(void) try {
// Configure which streams to enable or disable for the Pipeline by creating a Config
auto config = std::make_shared<ob::Config>();
// enable depth and color streams with specified format
config->enableVideoStream(OB_STREAM_DEPTH, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
config->enableVideoStream(OB_STREAM_COLOR, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
// set the frame aggregate output mode to ensure all types of frames are included in the output frameset
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
// Create a pipeline with default device to manage stream
auto pipe = std::make_shared<ob::Pipeline>();
// Start the pipeline with config
pipe->start(config);
while(1) {
printUsage();
std::cout << "\nInput command: ";
std::string cmd = "1";
std::getline(std::cin, cmd);
if(cmd == "quit" || cmd == "q") {
break;
}
// Wait for a frameset from the pipeline
auto frameSet = pipe->waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Get the color frame and check its validity
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
// Get the depth frame and check its validity
auto depthFrame = frameSet->getFrame(OB_FRAME_DEPTH);
if(cmd == "1") {
transformation2dto2d(colorFrame, depthFrame);
}
else if(cmd == "2") {
transformation2dto3d(colorFrame, depthFrame);
}
else if(cmd == "3") {
transformation3dto3d(colorFrame, depthFrame);
}
else if(cmd == "4") {
transformation3dto2d(colorFrame, depthFrame);
}
else {
std::cout << "Invalid command" << std::endl;
}
}
pipe->stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
void printUsage() {
std::cout << "Support commands:" << std::endl;
std::cout << " 1 - transformation 2d to 2d" << std::endl;
std::cout << " 2 - transformation 2d to 3d" << std::endl;
std::cout << " 3 - transformation 3d to 3d" << std::endl;
std::cout << " 4 - transformation 3d to 2d" << std::endl;
std::cout << "--------------------------------" << std::endl;
std::cout << " quit / q- quit application" << std::endl;
}
void printRuslt(std::string msg, OBPoint2f sourcePixel, OBPoint2f targetPixel) {
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ") -> (" << targetPixel.x << ", " << targetPixel.y << ")" << std::endl;
}
void printRuslt(std::string msg, OBPoint2f sourcePixel, OBPoint3f targetPixel, float depthValue) {
std::cout << msg << ":" << "depth " << depthValue << " (" << sourcePixel.x << ", " << sourcePixel.y << ") -> (" << targetPixel.x << ", " << targetPixel.y << ", " << targetPixel.z << ")" << std::endl;
}
void printRuslt(std::string msg, OBPoint3f sourcePixel, OBPoint2f targetPixel) {
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ", " << sourcePixel.z << ") -> (" << targetPixel.x << ", " << targetPixel.y << ")" << std::endl;
}
void printRuslt(std::string msg, OBPoint3f sourcePixel, OBPoint3f targetPixel) {
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ", " << sourcePixel.z << ") -> (" << targetPixel.x << ", " << targetPixel.y << ", " << targetPixel.z << ")" << std::endl;
}
// test the transformation from one 2D coordinate system to another
void transformation2dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
// Get the width and height of the color and depth frames
auto colorFrameWidth = colorFrame->as<ob::VideoFrame>()->getWidth();
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
auto colorFrameHeight = colorFrame->as<ob::VideoFrame>()->getHeight();
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
// Get the stream profiles for the color and depth frames
auto colorProfile = colorFrame->getStreamProfile();
auto depthProfile = depthFrame->getStreamProfile();
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
// Get the intrinsic and distortion parameters for the color and depth streams
auto colorIntrinsic = colorProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
auto colorDistortion = colorProfile->as<ob::VideoStreamProfile>()->getDistortion();
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
// Access the depth data from the frame
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
uint16_t convertAreaWidth = 3;
uint16_t convertAreaHeight = 3;
// Transform depth values to the color frame's coordinate system
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
OBPoint2f targetPixel = {};
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
if(depthValue == 0) {
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
continue;
}
// Demonstrate Depth 2D converted to Color 2D
bool result = ob::CoordinateTransformHelper::transformation2dto2d(sourcePixel, depthValue, depthIntrinsic, depthDistortion, colorIntrinsic,
colorDistortion, extrinsicD2C, &targetPixel);
// Check transformation result and whether the target pixel is within the color frame
if(!result || targetPixel.y < 0 || targetPixel.x < 0 || targetPixel.y > colorFrameWidth || targetPixel.x > colorFrameWidth) {
continue;
}
// Calculate the index position of the target pixel in the transformation data buffer
auto index = (((uint32_t)targetPixel.y * colorFrameWidth) + (uint32_t)targetPixel.x);
if(index > colorFrameWidth * colorFrameHeight) {
continue;
}
printRuslt("depth to color: depth image coordinate transform to color image coordinate", sourcePixel, targetPixel);
}
}
}
// test the transformation from 2D to 3D coordinates
void transformation2dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
// Get the width and height of the color and depth frames
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
// Get the stream profiles for the color and depth frames
auto colorProfile = colorFrame->getStreamProfile();
auto depthProfile = depthFrame->getStreamProfile();
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
// Get the intrinsic and distortion parameters for the color and depth streams
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
// Access the depth data from the frame
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
uint16_t convertAreaWidth = 3;
uint16_t convertAreaHeight = 3;
// Transform depth values to the color frame's coordinate system
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
// Get the coordinates of the current pixel
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
OBPoint3f targetPixel = {};
// Get the depth value of the current pixel
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
if(depthValue == 0) {
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
continue;
}
// Perform the 2D to 3D transformation
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic, extrinsicD2C, &targetPixel);
if(!result ) {
continue;
}
printRuslt("2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, targetPixel, depthValue);
}
}
}
// test the transformation from 3D coordinates to 3D coordinates
void transformation3dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
// Get the width and height of the color and depth frames
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
// Get the stream profiles for the color and depth frames
auto colorProfile = colorFrame->getStreamProfile();
auto depthProfile = depthFrame->getStreamProfile();
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
// Get the intrinsic and distortion parameters for the color and depth streams
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
// Access the depth data from the frame
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
uint16_t convertAreaWidth = 3;
uint16_t convertAreaHeight = 3;
// Transform depth values to the color frame's coordinate system
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
// Get the coordinates of the current pixel
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
OBPoint3f tmpTargetPixel = {};
OBPoint3f targetPixel = {};
// Get the depth value of the current pixel
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
if(depthValue == 0) {
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
continue;
}
// Perform the 2D to 3D transformation
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic, extrinsicD2C, &tmpTargetPixel);
if(!result ) {
continue;
}
printRuslt("2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, tmpTargetPixel, depthValue);
// Perform the 3D to 3D transformation
result = ob::CoordinateTransformHelper::transformation3dto3d(tmpTargetPixel, extrinsicC2D, &targetPixel);
if(!result ) {
continue;
}
printRuslt("3d to 3D: transform 3D coordinates relative to one sensor to 3D coordinates relative to another viewpoint", tmpTargetPixel, targetPixel);
}
}
}
// test the transformation from 3D coordinates back to 2D coordinates
void transformation3dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
// Get the width and height of the color and depth frames
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
// Get the stream profiles for the color and depth frames
auto colorProfile = colorFrame->getStreamProfile();
auto depthProfile = depthFrame->getStreamProfile();
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
// Get the intrinsic and distortion parameters for the color and depth streams
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
// Access the depth data from the frame
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
uint16_t convertAreaWidth = 3;
uint16_t convertAreaHeight = 3;
// Transform depth values to the color frame's coordinate system
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
// Get the coordinates of the current pixel
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
OBPoint3f tmpTargetPixel = {};
OBPoint2f targetPixel = {};
// Get the depth value of the current pixel
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
if(depthValue == 0) {
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
continue;
}
// Perform the 2D to 3D transformation
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic,
extrinsicD2C, &tmpTargetPixel);
if(!result ) {
continue;
}
printRuslt("depth 2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, tmpTargetPixel, depthValue);
// Perform the 3D to 2D transformation
result = ob::CoordinateTransformHelper::transformation3dto2d(tmpTargetPixel, depthIntrinsic, depthDistortion, extrinsicC2D, &targetPixel);
if(!result ) {
continue;
}
printRuslt("3d to depth 2d : point in 3D space transform to the corresponding pixel coordinates in an image", tmpTargetPixel, targetPixel);
}
}
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_hdr)
add_executable(${PROJECT_NAME} hdr.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,83 @@
# C++ Sample: 3.advanced.hdr
## Overview
In this sample, user can get the HDR merge image. Also Allows the user to control the on-off of the HDR synthesis and whether the original image is displayed through the keyboard.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
### Attentions
> This Sample only supports Gemini330 series devices.
## Code overview
### 1. Check if the device supports HDR merge
```c++
if(!device->isPropertySupported(OB_STRUCT_DEPTH_HDR_CONFIG, OB_PERMISSION_READ_WRITE)) {
std::cerr << "Current default device does not support HDR merge" << std::endl;
std::cout << "Press any key to exit...";
ob_smpl::waitForKeyPressed();
return -1;
}
```
### 2. Get depth stream profile
Get all stream profiles of the depth camera, including stream resolution, frame rate, and frame format
```c++
auto depthProfiles = pipe.getStreamProfileList(OB_SENSOR_DEPTH);
auto depthProfile = depthProfiles->getProfile(OB_PROFILE_DEFAULT);
config->enableStream(depthProfile);
```
### 3. Create HDRMerge
Create HDRMerge post processor to merge depth frames betweens different hdr sequence ids.
The HDRMerge also supports processing of infrared frames.
```c++
auto hdrMerge = ob::FilterFactory::createFilter("HDRMerge");
```
### 5. Configure and enable Hdr stream
```c++
OBHdrConfig obHdrConfig;
obHdrConfig.enable = true; // enable HDR merge
obHdrConfig.exposure_1 = 7500;
obHdrConfig.gain_1 = 24;
obHdrConfig.exposure_2 = 100;
obHdrConfig.gain_2 = 16;
device->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, reinterpret_cast<uint8_t *>(&obHdrConfig), sizeof(OBHdrConfig));
```
### 7. Stop the pipeline and close hdr merge
```c++
// Stop the Pipeline, no frame data will be generated
pipe.stop();
// close hdr merge
obHdrConfig.enable = false;
device->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, reinterpret_cast<uint8_t *>(&obHdrConfig), sizeof(OBHdrConfig));
```
## Run Sample
### Key introduction
Press the 'Esc' key in the window to exit the program.
Press the '?' key in the window to show key map.
Press the 'M' key in the window to Toggle HDR merge.
Press the 'N' key in the window to Toggle alternate show origin frame.
### Result
![hdr](../../docs/resource/hdr.jpg)

View File

@@ -0,0 +1,129 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
int main(void) try {
// Create a pipeline with default device
ob::Pipeline pipe;
// Get the device from the pipeline
auto device = pipe.getDevice();
// Check if the device supports HDR merge
if(!device->isPropertySupported(OB_STRUCT_DEPTH_HDR_CONFIG, OB_PERMISSION_READ_WRITE)) {
std::cerr << "Current default device does not support HDR merge" << std::endl;
std::cout << "Press any key to exit...";
ob_smpl::waitForKeyPressed();
return -1;
}
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// enable depth stream with default profile
config->enableVideoStream(OB_STREAM_DEPTH);
config->enableVideoStream(OB_STREAM_IR_LEFT);
config->enableVideoStream(OB_STREAM_IR_RIGHT);
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
// Create HDRMerge post processor to merge depth frames betweens different hdr sequence ids.
// The HDRMerge also supports processing of infrared frames.
auto hdrMerge = ob::FilterFactory::createFilter("HDRMerge");
if( device->isFrameInterleaveSupported() ) {
// load frame interleave mode as 'Depth from HDR'
device->loadFrameInterleave("Depth from HDR");
// enable frame interleave mode
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, true);
// The default parameters were loaded when loadFrameInterleave is called
// You can also modify these parameters yourself
//
// 1. frame interleave parameters for index 0(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 0);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 1); // laser control must be 1
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 7500);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 60);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 10000);
// 2. frame interleave parameters for index 1(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 1);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 1); // laser control must be 1
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 1);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 20);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 2000);
}else {
// configure and enable Hdr stream
OBHdrConfig obHdrConfig;
obHdrConfig.enable = true; // enable HDR merge
obHdrConfig.exposure_1 = 7500;
obHdrConfig.gain_1 = 24;
obHdrConfig.exposure_2 = 100;
obHdrConfig.gain_2 = 16;
device->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, reinterpret_cast<uint8_t *>(&obHdrConfig), sizeof(OBHdrConfig));
}
// Start the pipeline with config
pipe.start(config);
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("HDR-Merge", 1280, 720, ob_smpl::ARRANGE_GRID);
win.addLog("The HDR-Merged depth frames are displayed in the last row of the window.");
while(win.run()) {
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
// Get the depth and infrared frames from the frameset
auto depthFrame = frameSet->getFrame(OB_FRAME_DEPTH)->as<ob::DepthFrame>();
auto leftIRFrame = frameSet->getFrame(OB_FRAME_IR_LEFT)->as<ob::IRFrame>();
auto rightIRFrame = frameSet->getFrame(OB_FRAME_IR_RIGHT)->as<ob::IRFrame>();
// Get the HDR sequence id from the depth frame metadata
int groupId = static_cast<int>(depthFrame->getMetadataValue(OB_FRAME_METADATA_TYPE_HDR_SEQUENCE_INDEX));
win.pushFramesToView({ depthFrame, leftIRFrame, rightIRFrame }, groupId);
try {
// Using HDRMerge filter to merge hdr frames
auto result = hdrMerge->process(frameSet);
if(result == nullptr) {
continue;
}
auto resultFrameSet = result->as<ob::FrameSet>();
auto resultDepthFrame = resultFrameSet->getFrame(OB_FRAME_DEPTH)->as<ob::DepthFrame>();
// add merged depth frame to render queue
win.pushFramesToView(resultDepthFrame, 10); // set the group id to 10 to avoid same group id with original depth frame
}
catch(ob::Error &e) {
std::cerr << "HDRMerge error: " << e.what() << std::endl;
}
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
// close hdr merge
if(device->isFrameInterleaveSupported()) {
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, false);
}
else {
OBHdrConfig obHdrConfig = { 0 };
obHdrConfig.enable = false;
device->setStructuredData(OB_STRUCT_DEPTH_HDR_CONFIG, reinterpret_cast<uint8_t *>(&obHdrConfig), sizeof(OBHdrConfig));
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit...";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_hw_d2c_align)
add_executable(${PROJECT_NAME} hw_d2c_align.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,14 @@
# C++ Sample: 3.advanced.hw_d2c_align
## Overview
This sample demonstrates how to use the SDK to enable depth-to-color alignment on hardware devices (alse known as hardware D2C or HwD2C).
### Knowledge
- The HwD2C feature allows you to align the depth image to the color image captured by the device. This is useful for applications that require depth frame to be aligned with color frame and do want to use increase the resource usage of the host.
### Attention
- The HwD2C feature is only available on devices that support it. Please check the documentation of your device to see if it supports HwD2C.
- Is not all profile of depth stream are supported the HwD2C feature. Please call the `getD2CDepthProfileList` function of `ob::Pipeline` class to check the supported depth profile.

View File

@@ -0,0 +1,139 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
#include <thread>
bool enable_align_mode = 1;
// key press event processing
void handleKeyPress(ob_smpl::CVWindow &win, std::shared_ptr<ob::Pipeline> pipe, int key, std::shared_ptr<ob::Config> config) {
if(key == 't' || key == 'T') {
// Press the T key to switch align mode
enable_align_mode = !enable_align_mode;
// update the align mode in the config
if(enable_align_mode) {
config->setAlignMode(ALIGN_D2C_HW_MODE);
win.addLog("Haeware Depth to Color Align: Enabled");
}
else {
config->setAlignMode(ALIGN_DISABLE);
win.addLog("Haeware Depth to Color Align: Disabled");
}
// restart the pipeline with the new config
pipe->stop();
pipe->start(config);
}
}
// check if the given stream profiles support hardware depth-to-color alignment
bool checkIfSupportHWD2CAlign(std::shared_ptr<ob::Pipeline> pipe, std::shared_ptr<ob::StreamProfile> colorStreamProfile,
std::shared_ptr<ob::StreamProfile> depthStreamProfile) {
auto hwD2CSupportedDepthStreamProfiles = pipe->getD2CDepthProfileList(colorStreamProfile, ALIGN_D2C_HW_MODE);
if(hwD2CSupportedDepthStreamProfiles->count() == 0) {
return false;
}
// Iterate through the supported depth stream profiles and check if there is a match with the given depth stream profile
auto depthVsp = depthStreamProfile->as<ob::VideoStreamProfile>();
auto count = hwD2CSupportedDepthStreamProfiles->getCount();
for(uint32_t i = 0; i < count; i++) {
auto sp = hwD2CSupportedDepthStreamProfiles->getProfile(i);
auto vsp = sp->as<ob::VideoStreamProfile>();
if(vsp->getWidth() == depthVsp->getWidth() && vsp->getHeight() == depthVsp->getHeight() && vsp->getFormat() == depthVsp->getFormat()
&& vsp->getFps() == depthVsp->getFps()) {
// Found a matching depth stream profile, it is means the given stream profiles support hardware depth-to-color alignment
return true;
}
}
return false;
}
// create a config for hardware depth-to-color alignment
std::shared_ptr<ob::Config> createHwD2CAlignConfig(std::shared_ptr<ob::Pipeline> pipe) {
auto coloStreamProfiles = pipe->getStreamProfileList(OB_SENSOR_COLOR);
auto depthStreamProfiles = pipe->getStreamProfileList(OB_SENSOR_DEPTH);
// Iterate through all color and depth stream profiles to find a match for hardware depth-to-color alignment
auto colorSpCount = coloStreamProfiles->getCount();
auto depthSpCount = depthStreamProfiles->getCount();
for(uint32_t i = 0; i < colorSpCount; i++) {
auto colorProfile = coloStreamProfiles->getProfile(i);
auto colorVsp = colorProfile->as<ob::VideoStreamProfile>();
for(uint32_t j = 0; j < depthSpCount; j++) {
auto depthProfile = depthStreamProfiles->getProfile(j);
auto depthVsp = depthProfile->as<ob::VideoStreamProfile>();
// make sure the color and depth stream have the same fps, due to some models may not support different fps
if(colorVsp->getFps() != depthVsp->getFps()) {
continue;
}
// Check if the given stream profiles support hardware depth-to-color alignment
if(checkIfSupportHWD2CAlign(pipe, colorProfile, depthProfile)) {
// If support, create a config for hardware depth-to-color alignment
auto hwD2CAlignConfig = std::make_shared<ob::Config>();
hwD2CAlignConfig->enableStream(colorProfile); // enable color stream
hwD2CAlignConfig->enableStream(depthProfile); // enable depth stream
hwD2CAlignConfig->setAlignMode(ALIGN_D2C_HW_MODE); // enable hardware depth-to-color alignment
hwD2CAlignConfig->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE); // output frameset with all types of frames
return hwD2CAlignConfig;
}
}
}
return nullptr;
}
int main(void) try {
// Create a pipeline with default device to manage stream
auto pipe = std::make_shared<ob::Pipeline>();
// enable frame sync inside the pipeline, which is synchronized by frame timestamp
pipe->enableFrameSync();
// Create a config for hardware depth-to-color alignment
auto config = createHwD2CAlignConfig(pipe);
if(config == nullptr) {
std::cerr << "Current device does not support hardware depth-to-color alignment." << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
// Start the pipeline with config
pipe->start(config);
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("Hardware Depth to Color Align", 1280, 720, ob_smpl::ARRANGE_OVERLAY);
// set key prompt
win.setKeyPrompt("'T': Enable/Disable HwD2C, '+/-': Adjust Transparency");
// set the callback function for the window to handle key press events
win.setKeyPressedCallback([&](int key) { handleKeyPress(win, pipe, key, config); });
while(win.run()) {
// Wait for a frameset from the pipeline
auto frameSet = pipe->waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
win.pushFramesToView(frameSet);
}
// Stop the Pipeline, no frame data will be generated
pipe->stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_laser_interleave)
add_executable(${PROJECT_NAME} laser_interleave.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,108 @@
# C++ Sample: 3.advanced.interleave
## Overview
In this sample, user can enable or disable the function of laser frame interleave and SequenceId filter.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
### Attentions
> This Sample only supports Gemini330 series devices.
## Code overview
### 1. Check if the device supports frame interleave
```c++
if(!device->isFrameInterleaveSupported()) {
std::cerr << "Current default device does not support frame interleave" << std::endl;
std::cout << "Press any key to exit...";
ob_smpl::waitForKeyPressed();
return -1;
}
```
### 2. Enable depth and IR stream
Enable depth camera, left IR camera and right IR camera with default profiles
```c++
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// enable depth stream with default profile
config->enableVideoStream(OB_STREAM_DEPTH);
config->enableVideoStream(OB_STREAM_IR_LEFT);
config->enableVideoStream(OB_STREAM_IR_RIGHT);
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
```
### 3. Create SequenceId filter
Create SequenceId post processor to filter frames.
```c++
auto postDepthFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
auto postLeftInfraredFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
auto postRightInfraredFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
```
### 5. Enable laser frame laser interleave
```c++
// load frame interleave mode as 'Laser On-Off'
device->loadFrameInterleave("Laser On-Off");
// enable frame interleave
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, true);
// The default parameters were loaded when loadFrameInterleave is called
// You can also modify these parameters yourself
//
// 1. frame interleave parameters for index 1(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 1);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 0); // first: set laser control to 0 to turn off laser
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 3000);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 60);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 17000);
// 2. frame interleave parameters for index 0(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 0);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 1); // second: set laser control to 1 to turn on laser
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 3000);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 60);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 30000);
```
### 7. Stop the pipeline and close frame interleave
```c++
// Stop the Pipeline, no frame data will be generated
pipe.stop();
// close hdr merge
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, false);
```
## Run Sample
### Key introduction
Press the 'Esc' key in the window to exit the program.
Press the '?' key in the window to show key map.
### Result
After enabling laser frame interleave and SequenceId filter,
the user can set the sequence id of depth to 0 and the sequence id of IR to 1,
which will enhance the depth image quality and get the pure IR image.
1. laser interleave when sequence id is 0
![laser interleave](../../docs/resource/laser_interleave0.jpg)
2. laser interleave when sequence id is 1
![laser interleave](../../docs/resource/laser_interleave1.jpg)

View File

@@ -0,0 +1,226 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
void inputWatcher();
std::shared_ptr<ob::Filter> postDepthFilter = nullptr;
std::shared_ptr<ob::Filter> postLeftInfraredFilter = nullptr;
std::shared_ptr<ob::Filter> postRightInfraredFilter = nullptr;
std::shared_ptr<ob_smpl::CVWindow> win;
int main(void) try {
// Create a pipeline with default device
ob::Pipeline pipe;
// Get the device from the pipeline
auto device = pipe.getDevice();
// Check if the device supports frame interleave
if(!device->isFrameInterleaveSupported()) {
std::cerr << "Current default device does not support frame interleave" << std::endl;
std::cout << "Press any key to exit...";
ob_smpl::waitForKeyPressed();
return -1;
}
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
// enable depth stream with default profile
config->enableVideoStream(OB_STREAM_DEPTH);
config->enableVideoStream(OB_STREAM_IR_LEFT);
config->enableVideoStream(OB_STREAM_IR_RIGHT);
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
// Create SequenceIdFilter post processor to filter frames.
// The SequenceIdFilter also supports processing of infrared frames.
postDepthFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
postLeftInfraredFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
postRightInfraredFilter = ob::FilterFactory::createFilter("SequenceIdFilter");
// load frame interleave mode as 'Laser On-Off'
device->loadFrameInterleave("Laser On-Off");
// enable frame interleave
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, true);
// The default parameters were loaded when loadFrameInterleave is called
// You can also modify these parameters yourself
//
// 1. frame interleave parameters for index 0(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 0);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 1); // first: set laser control to 1 to turn on laser
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 3000);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 60);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 30000);
// 2. frame interleave parameters for index 1(index starts from 0):
// device->setIntProperty(OB_PROP_FRAME_INTERLEAVE_CONFIG_INDEX_INT, 1);
// device->setIntProperty(OB_PROP_LASER_CONTROL_INT, 0); // second: set laser control to 0 to turn off laser
// device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, 3000);
// device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, 16);
// device->setIntProperty(OB_PROP_IR_BRIGHTNESS_INT, 60);
// device->setIntProperty(OB_PROP_IR_AE_MAX_EXPOSURE_INT, 17000);
// Start the pipeline with config
pipe.start(config);
postDepthFilter->setConfigValue("sequenceid", -1); // sequenceid can be -1,0,1
postLeftInfraredFilter->setConfigValue("sequenceid", -1);
postRightInfraredFilter->setConfigValue("sequenceid", -1);
auto inputWatchThread = std::thread(inputWatcher);
inputWatchThread.detach();
// Create a window for rendering and set the resolution of the window
// create window for render
win = std::make_shared<ob_smpl::CVWindow>("Laser On-Off", 1280, 720, ob_smpl::ARRANGE_GRID);
while(win->run()) {
auto frameSet = pipe.waitForFrameset(100);
if(frameSet == nullptr) {
continue;
}
auto postFilter = [](std::shared_ptr<ob::FrameSet> frameSet, std::shared_ptr<ob::Filter> &filter, OBFrameType frameType) -> std::shared_ptr<ob::Frame> {
auto tempFrame = frameSet->getFrame(frameType);
if(!tempFrame) {
return nullptr;
}
return filter->process(tempFrame);
};
try {
// Using SequenceId filter to filter frames
// 1: depth
auto depthFrame = postFilter(frameSet, postDepthFilter, OB_FRAME_DEPTH);
if(depthFrame) {
// add frame to render queue
win->pushFramesToView(depthFrame, 0);
}
// 2: left infrared
auto leftIrFrame = postFilter(frameSet, postLeftInfraredFilter, OB_FRAME_IR_LEFT);
if(leftIrFrame) {
// add frame to render queue
win->pushFramesToView(leftIrFrame, 1);
}
// 2: right infrared
auto rightIrFrame = postFilter(frameSet, postRightInfraredFilter, OB_FRAME_IR_RIGHT);
if(rightIrFrame) {
// add frame to render queue
win->pushFramesToView(rightIrFrame, 2);
}
}
catch(ob::Error &e) {
std::cerr << "SequenceIdFilter error: " << e.what() << std::endl;
}
}
postDepthFilter.reset();
postLeftInfraredFilter.reset();
postRightInfraredFilter.reset();
// Stop the Pipeline, no frame data will be generated
pipe.stop();
// close frame interleave
device->setBoolProperty(OB_PROP_FRAME_INTERLEAVE_ENABLE_BOOL, false);
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit...";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
void printCommandTips() {
std::cout << "\n-------------------------------";
std::cout << "\nCommand usage: <filter> <param>";
std::cout << "\n <filter>: stream filter name, must be one of the following values:";
std::cout << "\n depth";
std::cout << "\n left_ir";
std::cout << "\n right_ir";
std::cout << "\n <param>: stream filter param, must be one of the following values:";
std::cout << "\n all: disable sequenceid filter";
std::cout << "\n 0: set sequenceid to 0";
std::cout << "\n 1: set sequenceid to 1";
std::cout << "\nPress 'q' or 'quit' to exit the program." << std::endl;
}
void inputWatcher() {
while(true) {
std::string cmd;
printCommandTips();
std::getline(std::cin, cmd);
if(cmd == "quit" || cmd == "q") {
win->close();
break;
}
else {
std::istringstream ss(cmd);
std::string tmp;
std::vector<std::string> controlVec;
while(ss >> tmp) {
controlVec.push_back(tmp);
}
if(controlVec.size() != 2) {
std::cerr << "Error: invalid param." << std::endl;
continue;
}
// filter
std::shared_ptr<ob::Filter> filter = nullptr;
if(controlVec.at(0) == "depth") {
filter = postDepthFilter;
}
else if(controlVec.at(0) == "left_ir") {
filter = postLeftInfraredFilter;
}
else if(controlVec.at(0) == "right_ir") {
filter = postRightInfraredFilter;
}
else {
std::cerr << "Error: invalid param." << std::endl;
continue;
}
// param
int32_t sequenceid = 0;
if(controlVec.at(1) == "all") {
sequenceid = -1;
}
else if(controlVec.at(1) == "0") {
sequenceid = 0;
}
else if(controlVec.at(1) == "1") {
sequenceid = 1;
}
else {
std::cerr << "Error: invalid param." << std::endl;
continue;
}
// set filter
try {
filter->setConfigValue("sequenceid", sequenceid);
std::cout << "Set sequenceid successfully" << std::endl;
}
catch(ob::Error &e) {
std::cerr << "Set sequenceid error: " << e.what() << std::endl;
}
}
}
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_multi_device)
add_executable(${PROJECT_NAME} multi_device.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,92 @@
# C++ Sample3.advanced.multi_devices
## Overview
In this sample, users can connect multiple camera devices and get color and depth images of different cameras.
### Knowledge
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
FrameSet is a combination of different types of Frames.
## code overview
1. StartStream DeviceIndex is used to identify different devices, and map is used to store the color and depth stream of different devices.
```cpp
void StartStream(std::map<int, std::shared_ptr<ob::Pipeline>> &pipes) {
for(auto &item: pipes) {
int deviceIndex = item.first;
auto &pipe = item.second;
// config to enable depth and color streams
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
config->enableVideoStream(OB_STREAM_COLOR);
config->enableVideoStream(OB_STREAM_DEPTH);
// start pipeline and pass the callback function to receive the frames
pipe->start(config, [deviceIndex](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(framesetMutex);
framesets[deviceIndex] = frameSet;
});
}
}
```
2. StopStream Obtain pipelines corresponding to different devices and stop the pipelines.
```cpp
void StopStream(std::map<int, std::shared_ptr<ob::Pipeline>> &pipes) {
for(auto &item: pipes) {
auto &pipe = item.second;
// stop the pipeline
pipe->stop();
}
std::lock_guard<std::mutex> lock(framesetMutex);
framesets.clear();
}
```
3. Obtain the currently connected devices through context, and obtain the list and number of devices.
```cpp
// Create a Context
ob::Context ctx;
// Query the list of connected devices
auto devList = ctx.queryDeviceList();
// Get the number of connected devices
int devCount = devList->getCount();
```
4. Use map to bind deviceIndex to the device's pipeline to distinguish video streams obtained by different devices.
```cpp
// Create a pipeline for each device
std::map<int, std::shared_ptr<ob::Pipeline>> pipes;
for(int i = 0; i < devCount; i++) {
// Get the device from device list
auto dev = devList->getDevice(i);
// Create a pipeline for the device
auto pipe = std::make_shared<ob::Pipeline>(dev);
// Add the pipeline to the map of pipelines
pipes.insert({ i, pipe });
}
```
## Run Sample
### Key introduction
Press the 'Esc' key in the window to exit the program.
Press the '?' key in the window to show key map.
### Result
![multi_devices](../../docs/resource/multi_devices.jpg)

View File

@@ -0,0 +1,104 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <mutex>
std::map<int, std::shared_ptr<const ob::Frame>> framesets;
std::mutex framesetMutex;
void startStream(std::map<int, std::shared_ptr<ob::Pipeline>> &pipes) {
for(auto &item: pipes) {
int deviceIndex = item.first;
auto &pipe = item.second;
// config to enable depth and color streams
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
config->enableVideoStream(OB_STREAM_DEPTH);
auto sensorList = pipe->getDevice()->getSensorList();
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
OBSensorType sensorType = sensorList->getSensorType(index);
if(sensorType == OB_SENSOR_COLOR) {
config->enableVideoStream(OB_STREAM_COLOR);
}
}
// start pipeline and pass the callback function to receive the frames
pipe->start(config, [deviceIndex](std::shared_ptr<ob::FrameSet> frameSet) {
std::lock_guard<std::mutex> lock(framesetMutex);
framesets[deviceIndex] = frameSet;
});
}
}
void stopStream(std::map<int, std::shared_ptr<ob::Pipeline>> &pipes) {
for(auto &item: pipes) {
auto &pipe = item.second;
// stop the pipeline
pipe->stop();
}
std::lock_guard<std::mutex> lock(framesetMutex);
framesets.clear();
}
int main() try {
// Create a Context
ob::Context ctx;
// Query the list of connected devices
auto devList = ctx.queryDeviceList();
// Get the number of connected devices
int devCount = devList->getCount();
// Create a pipeline for each device
std::map<int, std::shared_ptr<ob::Pipeline>> pipes;
for(int i = 0; i < devCount; i++) {
// Get the device from device list
auto dev = devList->getDevice(i);
// Create a pipeline for the device
auto pipe = std::make_shared<ob::Pipeline>(dev);
// Add the pipeline to the map of pipelines
pipes.insert({ i, pipe });
}
// Start the depth and color streams for all devices
startStream(pipes);
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("MultiDevice", 1280, 720, ob_smpl::ARRANGE_GRID);
// Main loop to show the frames, press `ESC` to exit
while(win.run()) {
// Get the latest frames from all devices
for(auto &item: framesets) {
std::lock_guard<std::mutex> lock(framesetMutex);
auto deviceIndex = item.first;
auto &frameset = item.second;
// push the frames to the window for show
win.pushFramesToView(frameset, deviceIndex);
}
}
// Stop all streams and clear the framesets
stopStream(pipes);
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,25 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_multi_devices_sync)
file(GLOB_RECURSE SOURCE_FILES *.cpp)
file(GLOB_RECURSE HEADER_FILES *.hpp)
add_executable(${PROJECT_NAME} ${SOURCE_FILES} ${HEADER_FILES} utils/cJSON.c)
#add_executable(${PROJECT_NAME} ob_multi_devices_sync.cpp PipelineHolder.cpp utils/cJSON.c)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
find_package(Threads REQUIRED)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils Threads::Threads)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(FILES ${CMAKE_CURRENT_LIST_DIR}/MultiDeviceSyncConfig.json DESTINATION bin)
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,151 @@
#include "FramePairingManager.hpp"
#include <map>
#include <algorithm>
uint64_t getFrameTimestampMsec(const std::shared_ptr<const ob::Frame> frame) {
return frame->getTimeStampUs() / 1000;
}
FramePairingManager::FramePairingManager()
: destroy_(false) {
}
FramePairingManager::~FramePairingManager() {
release();
}
bool FramePairingManager::pipelineHoldersFrameNotEmpty() {
if(pipelineHolderList_.size() == 0) {
return false;
}
for(const auto &holder: pipelineHolderList_) {
if(!holder->isFrameReady()) {
return false;
}
}
return true;
}
void FramePairingManager::setPipelineHolderList(std::vector<std::shared_ptr<PipelineHolder>> pipelineHolderList) {
this->pipelineHolderList_ = pipelineHolderList;
for(auto &&pipelineHolder: pipelineHolderList) {
int deviceIndex = pipelineHolder->getDeviceIndex();
if(pipelineHolder->getSensorType() == OB_SENSOR_DEPTH) {
depthPipelineHolderList_[deviceIndex] = pipelineHolder;
}
if(pipelineHolder->getSensorType() == OB_SENSOR_COLOR) {
colorPipelineHolderList_[deviceIndex] = pipelineHolder;
}
}
}
std::vector<std::pair<std::shared_ptr<ob::Frame>, std::shared_ptr<ob::Frame>>> FramePairingManager::getFramePairs() {
std::vector<std::pair<std::shared_ptr<ob::Frame>, std::shared_ptr<ob::Frame>>> framePairs;
if(pipelineHolderList_.size() > 0) {
int depthPipelineHolderSize = static_cast<int>(depthPipelineHolderList_.size());
auto start = std::chrono::steady_clock::now();
// Timestamp Matching Mode.
while(!pipelineHoldersFrameNotEmpty() && !destroy_) {
// Wait for frames if not yet available (optional: add sleep for simulation)
std::this_thread::sleep_for(std::chrono::milliseconds(1));
auto now = std::chrono::steady_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(now - start).count();
if(elapsed > 200) {
return framePairs;
}
}
if(destroy_) {
return framePairs;
}
bool discardFrame = false;
std::map<int, std::shared_ptr<ob::Frame>> depthFramesMap;
std::map<int, std::shared_ptr<ob::Frame>> colorFramesMap;
std::vector<std::shared_ptr<PipelineHolder>> pipelineHolderVector;
sortFrameMap(pipelineHolderList_, pipelineHolderVector);
auto refIter = pipelineHolderVector.begin();
const auto &refHolder = *refIter;
auto refTsp = getFrameTimestampMsec(refHolder->frontFrame());
auto refHalfTspGap = refHolder->halfTspGap;
for(const auto &item: pipelineHolderVector) {
auto tarFrame = item->frontFrame();
auto tarHalfTspGap = item->halfTspGap;
int index = item->getDeviceIndex();
auto frameType = item->getFrameType();
uint32_t tspHalfGap = tarHalfTspGap > refHalfTspGap ? tarHalfTspGap : refHalfTspGap;
// std::cout << "tspHalfGap : " << tspHalfGap << std::endl;
auto tarTsp = getFrameTimestampMsec(tarFrame);
auto diffTsp = tarTsp - refTsp;
if(diffTsp > tspHalfGap) {
discardFrame = true;
//std::cout << "index = " << index << " frame type = " << frameType << " diff tsp = " << diffTsp << std::endl;
break;
}
refHalfTspGap = tarHalfTspGap;
if(frameType == OB_FRAME_DEPTH) {
depthFramesMap[index] = item->getFrame();
}
if(frameType == OB_FRAME_COLOR) {
colorFramesMap[index] = item->getFrame();
}
}
if(discardFrame) {
depthFramesMap.clear();
colorFramesMap.clear();
return framePairs;
}
std::cout << "=================================================" << std::endl;
for(int i = 0; i < depthPipelineHolderSize; i++) {
auto depthFrame = depthFramesMap[i];
auto colorFrame = colorFramesMap[i];
std::cout << "Device#" << i << ", "
<< " depth(us) "
<< ", frame timestamp=" << depthFrame->timeStampUs() << ","
<< "global timestamp = " << depthFrame->globalTimeStampUs() << ","
<< "system timestamp = " << depthFrame->systemTimeStampUs() << std::endl;
std::cout << "Device#" << i << ", "
<< " color(us) "
<< ", frame timestamp=" << colorFrame->timeStampUs() << ","
<< "global timestamp = " << colorFrame->globalTimeStampUs() << ","
<< "system timestamp = " << colorFrame->systemTimeStampUs() << std::endl;
framePairs.emplace_back(depthFrame, colorFrame);
}
return framePairs;
}
return framePairs;
}
void FramePairingManager::sortFrameMap(std::vector<std::shared_ptr<PipelineHolder>> &pipelineHolders,
std::vector<std::shared_ptr<PipelineHolder>> &pipelineHolderVector) {
for(const auto &holder: pipelineHolders) {
pipelineHolderVector.push_back(holder);
}
std::sort(pipelineHolderVector.begin(), pipelineHolderVector.end(), [](const std::shared_ptr<PipelineHolder> &x, const std::shared_ptr<PipelineHolder> &y) {
auto xTsp = getFrameTimestampMsec(x->frontFrame());
auto yTsp = getFrameTimestampMsec(y->frontFrame());
return xTsp < yTsp;
});
}
void FramePairingManager::release() {
destroy_ = true;
}

View File

@@ -0,0 +1,34 @@
#pragma once
#include "PipelineHolder.hpp"
#include <libobsensor/ObSensor.hpp>
#include <iostream>
#include <vector>
#include <mutex>
#include <condition_variable>
class FramePairingManager {
public:
FramePairingManager();
~FramePairingManager();
private:
bool pipelineHoldersFrameNotEmpty();
void sortFrameMap(std::vector<std::shared_ptr<PipelineHolder>> &pipelineHolders, std::vector<std::shared_ptr<PipelineHolder>> &pipelineHolderVector);
public:
void setPipelineHolderList(std::vector<std::shared_ptr<PipelineHolder>> pipelineHolderList);
std::vector<std::pair<std::shared_ptr<ob::Frame>, std::shared_ptr<ob::Frame>>> getFramePairs();
void release();
private:
bool destroy_;
bool timestampPairingEnable_;
uint64_t timestampPairingRange_;
std::vector<std::shared_ptr<PipelineHolder>> pipelineHolderList_;
std::map<int, std::shared_ptr<PipelineHolder>> depthPipelineHolderList_;
std::map<int, std::shared_ptr<PipelineHolder>> colorPipelineHolderList_;
};

View File

@@ -0,0 +1,30 @@
{
"version": "1.0.0",
"configTime": "2023/01/01",
"devices": [
{
"sn": "CP2194200060",
"syncConfig": {
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_PRIMARY",
"depthDelayUs": 0,
"colorDelayUs": 0,
"trigger2ImageDelayUs": 0,
"triggerOutEnable": true,
"triggerOutDelayUs": 0,
"framesPerTrigger": 1
}
},
{
"sn": "CP0Y8420004K",
"syncConfig": {
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY",
"depthDelayUs": 0,
"colorDelayUs": 0,
"trigger2ImageDelayUs": 0,
"triggerOutEnable": true,
"triggerOutDelayUs": 0,
"framesPerTrigger": 1
}
}
]
}

View File

@@ -0,0 +1,150 @@
#include "PipelineHolder.hpp"
PipelineHolder::PipelineHolder(std::shared_ptr<ob::Pipeline> pipeline, OBSensorType sensorType, std::string deviceSN, int deviceIndex)
: startStream_(false), pipeline_(pipeline), sensorType_(sensorType), deviceSN_(deviceSN), deviceIndex_(deviceIndex) {
}
PipelineHolder::~PipelineHolder() {
release();
}
void PipelineHolder::startStream() {
std::cout << "startStream: " << deviceSN_ << " sensorType:" << sensorType_ << std::endl;
try {
if(pipeline_) {
auto profileList = pipeline_->getStreamProfileList(sensorType_);
auto streamProfile = profileList->getProfile(OB_PROFILE_DEFAULT)->as<ob::VideoStreamProfile>();
frameType_ = mapFrameType(sensorType_);
auto fps = streamProfile->getFps();
halfTspGap = static_cast<uint32_t>(500.0f / fps + 0.5);
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
config->enableStream(streamProfile);
pipeline_->start(config, [this](std::shared_ptr<ob::FrameSet> frameSet) {
processFrame(frameSet);
});
startStream_ = true;
}
}
catch(ob::Error &e) {
std::cerr << "starting stream failed: " << deviceSN_ << std::endl;
handleStreamError(e);
}
}
void PipelineHolder::processFrame(std::shared_ptr<ob::FrameSet> frameSet) {
if(!frameSet) {
std::cerr << "Invalid frameSet received." << std::endl;
return;
}
if(!startStream_) {
return;
}
{
std::lock_guard<std::mutex> lock(queueMutex_);
auto obFrame = frameSet->getFrame(frameType_);
if(obFrame) {
if(obFrames.size() >= static_cast<size_t>(maxFrameSize_)) {
obFrames.pop();
}
obFrames.push(obFrame);
}
}
condVar_.notify_all();
}
bool PipelineHolder::isFrameReady() {
{
std::unique_lock<std::mutex> lock(queueMutex_);
condVar_.wait(lock, [this]() { return !obFrames.empty() || startStream_; });
if(startStream_ && obFrames.empty()) {
return false;
}
}
return true;
}
std::shared_ptr<ob::Frame> PipelineHolder::frontFrame() {
{
std::unique_lock<std::mutex> lock(queueMutex_);
condVar_.wait(lock, [this]() { return !obFrames.empty() || startStream_; });
if(startStream_ && obFrames.empty()) {
return nullptr;
}
auto frame = obFrames.front();
return frame;
}
}
void PipelineHolder::popFrame() {
{
std::unique_lock<std::mutex> lock(queueMutex_);
condVar_.wait(lock, [this]() { return !obFrames.empty() || startStream_; });
if(startStream_ && obFrames.empty()) {
return;
}
obFrames.pop();
}
}
std::shared_ptr<ob::Frame> PipelineHolder::getFrame() {
{
std::unique_lock<std::mutex> lock(queueMutex_);
condVar_.wait(lock, [this]() { return !obFrames.empty() || startStream_; });
if(startStream_ && obFrames.empty()) {
return nullptr;
}
auto frame = obFrames.front();
obFrames.pop();
return frame;
}
}
void PipelineHolder::stopStream() {
try {
if(pipeline_) {
std::cout << "stopStream: " << deviceSN_ << " sensorType:" << sensorType_ << std::endl;
startStream_ = false;
pipeline_->stop();
}
}
catch(ob::Error &e) {
std::cerr << "stopping stream failed: " << deviceSN_ << std::endl;
std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
}
}
void PipelineHolder::release() {
{
std::lock_guard<std::mutex> lock(queueMutex_);
startStream_ = false;
}
condVar_.notify_all();
}
void PipelineHolder::handleStreamError(const ob::Error &e) {
std::cerr << "Function: " << e.getName() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.getMessage() << "\nType: " << e.getExceptionType() << std::endl;
}
OBFrameType PipelineHolder::mapFrameType(OBSensorType sensorType) {
switch(sensorType) {
case OB_SENSOR_COLOR:
return OB_FRAME_COLOR;
case OB_SENSOR_IR:
return OB_FRAME_IR;
case OB_SENSOR_IR_LEFT:
return OB_FRAME_IR_LEFT;
case OB_SENSOR_IR_RIGHT:
return OB_FRAME_IR_RIGHT;
case OB_SENSOR_DEPTH:
return OB_FRAME_DEPTH;
default:
return OBFrameType::OB_FRAME_UNKNOWN;
}
}

View File

@@ -0,0 +1,75 @@
#pragma once
#include <libobsensor/ObSensor.hpp>
#include <iostream>
#include <vector>
#include <mutex>
#include <queue>
#include <condition_variable>
class PipelineHolder {
public:
PipelineHolder(std::shared_ptr<ob::Pipeline> pipeline, OBSensorType sensorType, std::string deviceSN, int deviceIndex);
~PipelineHolder();
public:
void startStream();
void processFrame(std::shared_ptr<ob::FrameSet> frameSet);
bool isFrameReady();
std::shared_ptr<ob::Frame> frontFrame();
void popFrame();
std::shared_ptr<ob::Frame> getFrame();
void stopStream();
void release();
void handleStreamError(const ob::Error &e);
OBFrameType mapFrameType(OBSensorType sensorType);
std::string getSerialNumber() {
return deviceSN_;
}
OBSensorType getSensorType() {
return sensorType_;
}
OBFrameType getFrameType() {
return frameType_;
}
int getDeviceIndex(){
return deviceIndex_;
}
int getFrameQueueSize() {
std::lock_guard<std::mutex> lock(queueMutex_);
return static_cast<int>(obFrames.size());
}
private:
bool startStream_;
std::shared_ptr<ob::Pipeline> pipeline_;
OBSensorType sensorType_;
OBFrameType frameType_;
std::string deviceSN_;
int deviceIndex_;
std::condition_variable condVar_;
std::mutex queueMutex_;
uint32_t maxFrameSize_ = 16;
std::queue<std::shared_ptr<ob::Frame>> obFrames;
public:
uint32_t halfTspGap;
};

View File

@@ -0,0 +1,266 @@
# C++ Sample: 3.advanced.multi_devices_sync
## Overview
Function description: Demonstrate multi devices synchronization operation,This sample supports network devices, USB devices, and GMSL devices (such as Gemini 335lg).
- Network devices and USB devices must be connected to a sync hub(via the 8-pin port),please refer to the [Multi-device Sync documentation](https://www.orbbec.com/docs-general/set-up-cameras-for-external-synchronization_v1-2/).
- GMSL devices can connect via the 8-pin port or through multi-device sync via GMSL2 /FAKRA, Gemini 335lg multi device sync please refer [this document](https://www.orbbec.com/docs/gemini-335lg-hardware-synchronization/).
## Code overview
### 1.Configure multi device synchronization
```cpp
configMultiDeviceSync();
```
### 2.Conduct multi device testing
```cpp
testMultiDeviceSync();
```
#### 2.1 Distinguishing secondary devices
```cpp
streamDevList.clear();
// Query the list of connected devices
auto devList = context.queryDeviceList();
int devCount = devList->deviceCount();
for(int i = 0; i < devCount; i++) {
streamDevList.push_back(devList->getDevice(i));
}
if(streamDevList.empty()) {
std::cerr << "Device list is empty. please check device connection state" << std::endl;
return -1;
}
// traverse the device list and create the device
std::vector<std::shared_ptr<ob::Device>> primary_devices;
std::vector<std::shared_ptr<ob::Device>> secondary_devices;
for(auto dev: streamDevList) {
auto config = dev->getMultiDeviceSyncConfig();
if(config.syncMode == OB_MULTI_DEVICE_SYNC_MODE_PRIMARY) {
primary_devices.push_back(dev);
}
else {
secondary_devices.push_back(dev);
}
}
```
#### 2.2 Enable secondary devices
```cpp
std::cout << "Secondary devices start..." << std::endl;
startDeviceStreams(secondary_devices, 0);
```
#### 2.3 Enable Primary device
```cpp
std::cout << "Primary device start..." << std::endl;
startDeviceStreams(primary_devices, static_cast<int>(secondary_devices.size()));
```
#### 2.4 Set software synchronization interval time
```cpp
// Start the multi-device time synchronization function
context.enableDeviceClockSync(60000);
```
#### 2.5 Create a FramePairingManager object for multi-device timestamp pairing
``` cpp
auto framePairingManager = std::make_shared<FramePairingManager>();
framePairingManager->setPipelineHolderList(pipelineHolderList);
```
#### 2.6 Pair multiple devices based on timestamps
```cpp
std::vector<std::pair<std::shared_ptr<ob::Frame>, std::shared_ptr<ob::Frame>>> framePairs = framePairingManager->getFramePairs();
if(framePairs.size() == 0) {
continue;
}
```
#### 7.Close data stream
```cpp
// Stop streams and clear resources
for(auto &holder: pipelineHolderList) {
holder->stopStream();
}
pipelineHolderList.clear();
```
#### 8.Software Triggering Mode
Set the device synchronization mode to `OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING` after opening the stream, and the device will wait for the trigger signal (command) sent by the upper layer after opening the stream. The number of frames to be triggered for triggering mode can be configured through `framesPerTrigger`. The method for triggering images:
```c++
auto multiDeviceSyncConfig = dev->getMultiDeviceSyncConfig();
if(multiDeviceSyncConfig.syncMode == OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING)
{
dev->triggerCapture();
}
```
*Press `t` in the render window to trigger a capture once.*
## Configuration file parameter description
**Notes"The configuration parameters for multi-device Sync may vary between different devices. Please refer to the [Multi-device Sync documentation](https://www.orbbec.com/docs-general/set-up-cameras-for-external-synchronization_v1-2/)**
config file : MultiDeviceSyncConfig.json
```
{
"version": "1.0.0",
"configTime": "2023/01/01",
"devices": [
{
"sn": "CP2194200060", //device serial number
"syncConfig": {
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_PRIMARY", // sync mode
"depthDelayUs": 0, //Configure depth trigger delay, unit: microseconds
"colorDelayUs": 0, //Configure color trigger delay, unit: microseconds
"trigger2ImageDelayUs": 0, //Configure trigger image delay, unit: microseconds
"triggerOutEnable": true, //Configure trigger signal output enable.
"triggerOutDelayUs": 0, //Configure trigger signal output delay, unit: microsecond
"framesPerTrigger": 1 //Configure the number of frames captured by each trigger in the trigger mode
}
},
{
"sn": "CP0Y8420004K",
"syncConfig": {
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY",
"depthDelayUs": 0,
"colorDelayUs": 0,
"trigger2ImageDelayUs": 0,
"triggerOutEnable": true,
"triggerOutDelayUs": 0,
"framesPerTrigger": 1
}
}
]
}
```
**There are three synchronization configuration methods for network devices and USB devices.**
- The first method is to set one device as OB_MULTI_DEVICE_SYNC_MODE_PRIMARY, and configure the other devices as OB_MULTI_DEVICE_SYNC_MODE_SECONDARY.
- The second method is to set one device as OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING, and configure the other devices as OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING,Capture images by send a software trigger command(`dev->triggerCapture()`).
- The third method is to set all devices as OB_MULTI_DEVICE_SYNC_MODE_SECONDARY, in this mode, an external trigger signal is required.
**For GMSL devices, please refer to the following document.**
## GMSL Multi devices Sync
### Method 1:Multi-device sync via 8-pin port
When using 8-pin port for multi-device synchronization, in order to ensure the quality of the synchronization signal, it is necessary to use it together with a multi-device sync hub,please refer [Multi-device Sync documentation](https://www.orbbec.com/docs-general/set-up-cameras-for-external-synchronization_v1-2/).
via 8-pin port, GMSL multi devices sync is the same as that for USB devices, and the supported synchronization modes are also the same.
### Method 2: Multi-device sync via GMSL2/FAKRA
GMSL Multi devices Sync please refer [this document](https://www.orbbec.com/docs/gemini-335lg-hardware-synchronization/),There are two usage methods:
The first is to set all devices as OB_MULTI_DEVICE_SYNC_MODE_SECONDARY mode and synchronize them through PWM triggering.
The second is to set all devices as OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING mode and synchronize them through PWM triggering.
PWM triggering please refer ob_multi_devices_sync_gmsltrigger sample.
* Notes: To make the multi devices sync sample simple and versatile, the PWM trigger has been separated into its own sample. GMSL2/FAKRA requires running two samples for testing. If you are developing your own application, you can combine these two functionalities into a single application.
## Run Sample
### windows
The following demonstrates how to use the multi-device synchronization sample on Windows with the Gemini 335L.
- Double-click ob_multi_devices_sync.exe, and the following dialog will appear,Then select 0.
![windows_sync](image/windows_sync.png)
0: Configure sync mode and start stream
1: Start stream: If the parameters for multi device sync mode have been configured, you can start the stream directly.
- Multi-device synchronization test results are as follows
![windows sync result](image/windows_sync_result.png)
Observe the timestamps. As shown in the figure below, the device timestamps of the two devices are identical, indicating that the two devices are successfully synchronized.
### Linux/ARM64
- For USB device or Ethernet device multi-device synchronization, simply execute ob_multi_devices_sync.
```
$ ./ob_multi_devices_sync
```
**Notes:**
**Multi-device sync via 8-pin port, GMSL multi devices sync is the same as that for USB devices, and the supported synchronization modes are also the same.**
- For GMSL device multi-device sync via GMSL2/FAKRA, run the sample according to the following steps
**1. Open the first terminal and run the multi-devices sync sample**
```
$ ./ob_multi_devices_sync
--------------------------------------------------
Please select options:
0 --> config devices sync mode.
1 --> start stream
--------------------------------------------------
Please select input: 0
```
**2. Open the second terminal and run the sample that sends PWM trigger signals with administrator privileges**
```
orbbec@agx:~/SensorSDK/build/install/Example/bin$ sudo ./ob_multi_devices_sync_gmsltrigger
Please select options:
------------------------------------------------------------
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
------------------------------------------------------------
input select item: 0
Enter FPS (frames per second) (for example: 3000): 3000
Setting FPS to 3000...
Please select options:
------------------------------------------------------------
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
------------------------------------------------------------
input select item: 1
```
**Notes:**
- Enter FPS (frames per second) (for example: 3000): 3000(3000 indicates 30 fps) .
The differences between the two sync modes are as follows:
- OB_MULTI_DEVICE_SYNC_MODE_SECONDARY Mode: Sets the device to secondary mode. In this mode, the PWM trigger frame rate must match the actual streaming frame rate. For example, if the streaming frame rate is 30 fps, the PWM frame rate must also be set to 30
- OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING mode: Sets the device to hardware triggering mode. In this mode, the PWM trigger signal must not exceed half of the streaming frame rate. For example, if the streaming frame rate is set to 30 fps, and the PWM trigger signal exceeds 15, the camera will still only capture images at 15 fps. In other words, when the streaming frame rate is 30 fps, the valid range for the PWM trigger signal is 1 to 15 fps.
#### Test Results
The multi-device synchronization results of six Gemini 335Lg on AGX Orin are as follows:
![AGX Orin sync result](image/AGX_ORIN_result.png)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 MiB

View File

@@ -0,0 +1,449 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "PipelineHolder.hpp"
#include "FramePairingManager.hpp"
#include "utils.hpp"
#include "utils_opencv.hpp"
#include "utils/cJSON.h"
#include <string>
#include <vector>
#include <map>
#include <algorithm>
#include <fstream>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <functional>
#include <iostream>
#include <chrono>
#define MAX_DEVICE_COUNT 9
#define CONFIG_FILE "./MultiDeviceSyncConfig.json"
#define KEY_ESC 27
static bool quitStreamPreview = false;
typedef struct DeviceConfigInfo_t {
std::string deviceSN;
OBMultiDeviceSyncConfig syncConfig;
} DeviceConfigInfo;
std::vector<std::shared_ptr<ob::Device>> streamDevList;
std::vector<std::shared_ptr<ob::Device>> configDevList;
std::vector<std::shared_ptr<DeviceConfigInfo>> deviceConfigList;
std::condition_variable waitRebootCompleteCondition;
std::mutex rebootingDevInfoListMutex;
std::vector<std::shared_ptr<ob::DeviceInfo>> rebootingDevInfoList;
std::vector<std::shared_ptr<PipelineHolder>> pipelineHolderList;
bool loadConfigFile();
int configMultiDeviceSync();
int testMultiDeviceSync();
std::string OBSyncModeToString(const OBMultiDeviceSyncMode syncMode);
OBMultiDeviceSyncMode stringToOBSyncMode(const std::string &modeString);
std::string readFileContent(const char *filePath);
int strcmp_nocase(const char *str0, const char *str1);
bool checkDevicesWithDeviceConfigs(const std::vector<std::shared_ptr<ob::Device>> &deviceList);
std::shared_ptr<PipelineHolder> createPipelineHolder(std::shared_ptr<ob::Device> device, OBSensorType sensorType, int deviceIndex);
ob::Context context;
int main(void) try {
int choice;
int exitValue = 0;
constexpr std::streamsize maxInputIgnore = 10000;
while(true) {
std::cout << "\n--------------------------------------------------\n";
std::cout << "Please select options: \n";
std::cout << " 0 --> config devices sync mode. \n";
std::cout << " 1 --> start stream \n";
std::cout << "--------------------------------------------------\n";
std::cout << "Please select input: ";
// std::cin >> choice;
if(!(std::cin >> choice)) {
std::cin.clear();
std::cin.ignore(maxInputIgnore, '\n');
std::cout << "Invalid input. Please enter a number [0~1]" << std::endl;
continue;
}
std::cout << std::endl;
switch(choice) {
case 0:
exitValue = configMultiDeviceSync();
if(exitValue == 0) {
std::cout << "Config MultiDeviceSync Success. \n" << std::endl;
exitValue = testMultiDeviceSync();
}
break;
case 1:
std::cout << "\nStart Devices video stream." << std::endl;
exitValue = testMultiDeviceSync();
break;
default:
break;
}
if(exitValue == 0) {
break;
}
}
return exitValue;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}
int configMultiDeviceSync() {
try {
if(!loadConfigFile()) {
std::cout << "load config failed" << std::endl;
return -1;
}
if(deviceConfigList.empty()) {
std::cout << "DeviceConfigList is empty. please check config file: " << CONFIG_FILE << std::endl;
return -1;
}
// Query the list of connected devices
auto devList = context.queryDeviceList();
int devCount = devList->deviceCount();
for(int i = 0; i < devCount; i++) {
std::shared_ptr<ob::Device> device = devList->getDevice(i);
configDevList.push_back(devList->getDevice(i));
}
if(configDevList.empty()) {
std::cerr << "Device list is empty. please check device connection state" << std::endl;
return -1;
}
// write configuration to device
for(auto config: deviceConfigList) {
auto findItr = std::find_if(configDevList.begin(), configDevList.end(), [config](std::shared_ptr<ob::Device> device) {
auto serialNumber = device->getDeviceInfo()->serialNumber();
return strcmp_nocase(serialNumber, config->deviceSN.c_str()) == 0;
});
if(findItr != configDevList.end()) {
auto device = (*findItr);
auto curConfig = device->getMultiDeviceSyncConfig();
// Update the configuration items of the configuration file, and keep the original configuration for other items
curConfig.syncMode = config->syncConfig.syncMode;
curConfig.depthDelayUs = config->syncConfig.depthDelayUs;
curConfig.colorDelayUs = config->syncConfig.colorDelayUs;
curConfig.trigger2ImageDelayUs = config->syncConfig.trigger2ImageDelayUs;
curConfig.triggerOutEnable = config->syncConfig.triggerOutEnable;
curConfig.triggerOutDelayUs = config->syncConfig.triggerOutDelayUs;
curConfig.framesPerTrigger = config->syncConfig.framesPerTrigger;
std::cout << "-Config Device syncMode:" << curConfig.syncMode << ", syncModeStr:" << OBSyncModeToString(curConfig.syncMode) << std::endl;
device->setMultiDeviceSyncConfig(curConfig);
}
std::this_thread::sleep_for(std::chrono::milliseconds(10));
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "configMultiDeviceSync failed! \n";
std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
return -1;
}
}
void startDeviceStreams(const std::vector<std::shared_ptr<ob::Device>> &devices, int startIndex) {
std::vector<OBSensorType> sensorTypes = { OB_SENSOR_DEPTH, OB_SENSOR_COLOR };
for(auto &dev: devices) {
for(auto sensorType: sensorTypes) {
auto holder = createPipelineHolder(dev, sensorType, startIndex);
pipelineHolderList.push_back(holder);
holder->startStream();
}
startIndex++;
}
quitStreamPreview = false;
}
// key press event processing
void handleKeyPress(ob_smpl::CVWindow &win, int key) {
// Get the key value
if(key == KEY_ESC) {
if(!quitStreamPreview) {
win.setShowInfo(false);
win.setShowSyncTimeInfo(false);
quitStreamPreview = true;
win.close();
win.destroyWindow();
std::cout << "press ESC quitStreamPreview" << std::endl;
}
}
else if(key == 'S' || key == 's') {
std::cout << "syncDevicesTime..." << std::endl;
context.enableDeviceClockSync(60000); // Manual update synchronization
}
else if(key == 'T' || key == 't') {
// software trigger
std::cout << "check software trigger mode" << std::endl;
for(auto &dev: streamDevList) {
auto multiDeviceSyncConfig = dev->getMultiDeviceSyncConfig();
if(multiDeviceSyncConfig.syncMode == OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING) {
std::cout << "software trigger..." << std::endl;
dev->triggerCapture();
}
}
}
}
int testMultiDeviceSync() {
try {
streamDevList.clear();
// Query the list of connected devices
auto devList = context.queryDeviceList();
int devCount = devList->deviceCount();
for(int i = 0; i < devCount; i++) {
streamDevList.push_back(devList->getDevice(i));
}
if(streamDevList.empty()) {
std::cerr << "Device list is empty. please check device connection state" << std::endl;
return -1;
}
// traverse the device list and create the device
std::vector<std::shared_ptr<ob::Device>> primary_devices;
std::vector<std::shared_ptr<ob::Device>> secondary_devices;
for(auto dev: streamDevList) {
auto config = dev->getMultiDeviceSyncConfig();
if(config.syncMode == OB_MULTI_DEVICE_SYNC_MODE_PRIMARY) {
primary_devices.push_back(dev);
}
else {
secondary_devices.push_back(dev);
}
}
std::cout << "Secondary devices start..." << std::endl;
startDeviceStreams(secondary_devices, 0);
// Delay and wait for 5s to ensure that the initialization of the slave device is completed
// std::this_thread::sleep_for(std::chrono::milliseconds(5000));
if(primary_devices.empty()) {
std::cerr << "WARNING primary_devices is empty!!!" << std::endl;
}
else {
std::cout << "Primary device start..." << std::endl;
startDeviceStreams(primary_devices, static_cast<int>(secondary_devices.size()));
}
// Start the multi-device time synchronization function
context.enableDeviceClockSync(60000); // update and sync every minitor
auto framePairingManager = std::make_shared<FramePairingManager>();
framePairingManager->setPipelineHolderList(pipelineHolderList);
// Create a window for rendering and set the resolution of the window
ob_smpl::CVWindow win("MultiDeviceSyncViewer", 1600, 900, ob_smpl::ARRANGE_GRID);
// set key prompt
win.setKeyPrompt("'S': syncDevicesTime, 'T': software trigger");
// set the callback function for the window to handle key press events
win.setKeyPressedCallback([&](int key) { handleKeyPress(win, key); });
win.setShowInfo(true);
win.setShowSyncTimeInfo(true);
while(win.run() && !quitStreamPreview) {
if(quitStreamPreview) {
break;
}
std::vector<std::pair<std::shared_ptr<ob::Frame>, std::shared_ptr<ob::Frame>>> framePairs = framePairingManager->getFramePairs();
if(framePairs.size() == 0) {
continue;
}
auto groudID = 0;
for(const auto &pair: framePairs) {
groudID++;
win.pushFramesToView({ pair.first, pair.second }, groudID);
}
}
framePairingManager->release();
// Stop streams and clear resources
for(auto &holder: pipelineHolderList) {
holder->stopStream();
}
pipelineHolderList.clear();
// Release resource
streamDevList.clear();
configDevList.clear();
deviceConfigList.clear();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
return -1;
}
}
std::shared_ptr<PipelineHolder> createPipelineHolder(std::shared_ptr<ob::Device> device, OBSensorType sensorType, int deviceIndex) {
auto pipeline = std::make_shared<ob::Pipeline>(device);
auto holder = std::make_shared<PipelineHolder>(pipeline, sensorType, device->getDeviceInfo()->serialNumber(), deviceIndex);
return holder;
}
std::string readFileContent(const char *filePath) {
std::ostringstream oss;
std::ifstream file(filePath, std::fstream::in);
if(!file.is_open()) {
std::cerr << "Failed to open file: " << filePath << std::endl;
return "";
}
oss << file.rdbuf();
file.close();
return oss.str();
}
bool loadConfigFile() {
int deviceCount = 0;
std::shared_ptr<DeviceConfigInfo> devConfigInfo = nullptr;
cJSON *deviceElem = nullptr;
auto content = readFileContent(CONFIG_FILE);
if(content.empty()) {
std::cerr << "load config file failed." << std::endl;
return false;
}
cJSON *rootElem = cJSON_Parse(content.c_str());
if(rootElem == nullptr) {
const char *errMsg = cJSON_GetErrorPtr();
std::cout << std::string(errMsg) << std::endl;
cJSON_Delete(rootElem);
return true;
}
cJSON *devicesElem = cJSON_GetObjectItem(rootElem, "devices");
cJSON_ArrayForEach(deviceElem, devicesElem) {
devConfigInfo = std::make_shared<DeviceConfigInfo>();
memset(&devConfigInfo->syncConfig, 0, sizeof(devConfigInfo->syncConfig));
devConfigInfo->syncConfig.syncMode = OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN;
cJSON *snElem = cJSON_GetObjectItem(deviceElem, "sn");
if(cJSON_IsString(snElem) && snElem->valuestring != nullptr) {
devConfigInfo->deviceSN = std::string(snElem->valuestring);
}
cJSON *deviceConfigElem = cJSON_GetObjectItem(deviceElem, "syncConfig");
if(cJSON_IsObject(deviceConfigElem)) {
cJSON *numberElem = nullptr;
cJSON *strElem = nullptr;
cJSON *bElem = nullptr;
strElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "syncMode");
if(cJSON_IsString(strElem) && strElem->valuestring != nullptr) {
devConfigInfo->syncConfig.syncMode = stringToOBSyncMode(strElem->valuestring);
std::cout << "config[" << (deviceCount++) << "]: SN=" << std::string(devConfigInfo->deviceSN) << ", mode=" << strElem->valuestring << std::endl;
}
numberElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "depthDelayUs");
if(cJSON_IsNumber(numberElem)) {
devConfigInfo->syncConfig.depthDelayUs = numberElem->valueint;
}
numberElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "colorDelayUs");
if(cJSON_IsNumber(numberElem)) {
devConfigInfo->syncConfig.colorDelayUs = numberElem->valueint;
}
numberElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "trigger2ImageDelayUs");
if(cJSON_IsNumber(numberElem)) {
devConfigInfo->syncConfig.trigger2ImageDelayUs = numberElem->valueint;
}
numberElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "triggerOutDelayUs");
if(cJSON_IsNumber(numberElem)) {
devConfigInfo->syncConfig.triggerOutDelayUs = numberElem->valueint;
}
bElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "triggerOutEnable");
if(cJSON_IsBool(bElem)) {
devConfigInfo->syncConfig.triggerOutEnable = (bool)bElem->valueint;
}
bElem = cJSON_GetObjectItemCaseSensitive(deviceConfigElem, "framesPerTrigger");
if(cJSON_IsNumber(bElem)) {
devConfigInfo->syncConfig.framesPerTrigger = bElem->valueint;
}
}
if(OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN != devConfigInfo->syncConfig.syncMode) {
deviceConfigList.push_back(devConfigInfo);
}
else {
std::cerr << "Invalid sync mode of deviceSN: " << devConfigInfo->deviceSN << std::endl;
}
devConfigInfo = nullptr;
}
cJSON_Delete(rootElem);
return true;
}
OBMultiDeviceSyncMode stringToOBSyncMode(const std::string &modeString) {
static const std::unordered_map<std::string, OBMultiDeviceSyncMode> syncModeMap = {
{ "OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN", OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN },
{ "OB_MULTI_DEVICE_SYNC_MODE_STANDALONE", OB_MULTI_DEVICE_SYNC_MODE_STANDALONE },
{ "OB_MULTI_DEVICE_SYNC_MODE_PRIMARY", OB_MULTI_DEVICE_SYNC_MODE_PRIMARY },
{ "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY", OB_MULTI_DEVICE_SYNC_MODE_SECONDARY },
{ "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY_SYNCED", OB_MULTI_DEVICE_SYNC_MODE_SECONDARY_SYNCED },
{ "OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING", OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING },
{ "OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING", OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING }
};
auto it = syncModeMap.find(modeString);
if(it != syncModeMap.end()) {
return it->second;
}
// Constructing exception messages with stringstream
std::stringstream ss;
ss << "Unrecognized sync mode: " << modeString;
throw std::invalid_argument(ss.str());
}
std::string OBSyncModeToString(const OBMultiDeviceSyncMode syncMode) {
static const std::unordered_map<OBMultiDeviceSyncMode, std::string> modeToStringMap = {
{ OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN, "OB_MULTI_DEVICE_SYNC_MODE_FREE_RUN" },
{ OB_MULTI_DEVICE_SYNC_MODE_STANDALONE, "OB_MULTI_DEVICE_SYNC_MODE_STANDALONE" },
{ OB_MULTI_DEVICE_SYNC_MODE_PRIMARY, "OB_MULTI_DEVICE_SYNC_MODE_PRIMARY" },
{ OB_MULTI_DEVICE_SYNC_MODE_SECONDARY, "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY" },
{ OB_MULTI_DEVICE_SYNC_MODE_SECONDARY_SYNCED, "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY_SYNCED" },
{ OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING, "OB_MULTI_DEVICE_SYNC_MODE_SOFTWARE_TRIGGERING" },
{ OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING, "OB_MULTI_DEVICE_SYNC_MODE_HARDWARE_TRIGGERING" }
};
auto it = modeToStringMap.find(syncMode);
if(it != modeToStringMap.end()) {
return it->second;
}
std::stringstream ss;
ss << "Unmapped sync mode value: " << static_cast<int>(syncMode) << ". Please check the sync mode value.";
throw std::invalid_argument(ss.str());
}
int strcmp_nocase(const char *str0, const char *str1) {
#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(__NT__)
return _strcmpi(str0, str1);
#else
return strcasecmp(str0, str1);
#endif
}

View File

@@ -0,0 +1,298 @@
/*
Copyright (c) 2009-2017 Dave Gamble and cJSON contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#ifndef cJSON__h
#define cJSON__h
#ifdef __cplusplus
extern "C" {
#endif
#if !defined(__WINDOWS__) && (defined(WIN32) || defined(WIN64) || defined(_MSC_VER) || defined(_WIN32))
#define __WINDOWS__
#endif
#ifdef __WINDOWS__
/* When compiling for windows, we specify a specific calling convention to avoid issues where we are being called from a project with a different default
calling convention. For windows you have 3 define options:
CJSON_HIDE_SYMBOLS - Define this in the case where you don't want to ever dllexport symbols
CJSON_EXPORT_SYMBOLS - Define this on library build when you want to dllexport symbols (default)
CJSON_IMPORT_SYMBOLS - Define this if you want to dllimport symbol
For *nix builds that support visibility attribute, you can define similar behavior by
setting default visibility to hidden by adding
-fvisibility=hidden (for gcc)
or
-xldscope=hidden (for sun cc)
to CFLAGS
then using the CJSON_API_VISIBILITY flag to "export" the same symbols the way CJSON_EXPORT_SYMBOLS does
*/
#define CJSON_CDECL __cdecl
#define CJSON_STDCALL __stdcall
/* export symbols by default, this is necessary for copy pasting the C and header file */
#if !defined(CJSON_HIDE_SYMBOLS) && !defined(CJSON_IMPORT_SYMBOLS) && !defined(CJSON_EXPORT_SYMBOLS)
#define CJSON_EXPORT_SYMBOLS
#endif
#if defined(CJSON_HIDE_SYMBOLS)
#define CJSON_PUBLIC(type) type CJSON_STDCALL
#elif defined(CJSON_EXPORT_SYMBOLS)
#define CJSON_PUBLIC(type) __declspec(dllexport) type CJSON_STDCALL
#elif defined(CJSON_IMPORT_SYMBOLS)
#define CJSON_PUBLIC(type) __declspec(dllimport) type CJSON_STDCALL
#endif
#else /* !__WINDOWS__ */
#define CJSON_CDECL
#define CJSON_STDCALL
#if(defined(__GNUC__) || defined(__SUNPRO_CC) || defined(__SUNPRO_C)) && defined(CJSON_API_VISIBILITY)
#define CJSON_PUBLIC(type) __attribute__((visibility("default"))) type
#else
#define CJSON_PUBLIC(type) type
#endif
#endif
/* project version */
#define CJSON_VERSION_MAJOR 1
#define CJSON_VERSION_MINOR 7
#define CJSON_VERSION_PATCH 15
#include <stddef.h>
/* cJSON Types: */
#define cJSON_Invalid (0)
#define cJSON_False (1 << 0)
#define cJSON_True (1 << 1)
#define cJSON_NULL (1 << 2)
#define cJSON_Number (1 << 3)
#define cJSON_String (1 << 4)
#define cJSON_Array (1 << 5)
#define cJSON_Object (1 << 6)
#define cJSON_Raw (1 << 7) /* raw json */
#define cJSON_IsReference 256
#define cJSON_StringIsConst 512
/* The cJSON structure: */
typedef struct cJSON {
/* next/prev allow you to walk array/object chains. Alternatively, use GetArraySize/GetArrayItem/GetObjectItem */
struct cJSON *next;
struct cJSON *prev;
/* An array or object item will have a child pointer pointing to a chain of the items in the array/object. */
struct cJSON *child;
/* The type of the item, as above. */
int type;
/* The item's string, if type==cJSON_String and type == cJSON_Raw */
char *valuestring;
/* writing to valueint is DEPRECATED, use cJSON_SetNumberValue instead */
int valueint;
/* The item's number, if type==cJSON_Number */
double valuedouble;
/* The item's name string, if this item is the child of, or is in the list of subitems of an object. */
char *string;
} cJSON;
typedef struct cJSON_Hooks {
/* malloc/free are CDECL on Windows regardless of the default calling convention of the compiler, so ensure the hooks allow passing those functions
* directly. */
void *(CJSON_CDECL *malloc_fn)(size_t sz);
void(CJSON_CDECL *free_fn)(void *ptr);
} cJSON_Hooks;
typedef int cJSON_bool;
/* Limits how deeply nested arrays/objects can be before cJSON rejects to parse them.
* This is to prevent stack overflows. */
#ifndef CJSON_NESTING_LIMIT
#define CJSON_NESTING_LIMIT 1000
#endif
/* returns the version of cJSON as a string */
CJSON_PUBLIC(const char *) cJSON_Version(void);
/* Supply malloc, realloc and free functions to cJSON */
CJSON_PUBLIC(void) cJSON_InitHooks(cJSON_Hooks *hooks);
/* Memory Management: the caller is always responsible to free the results from all variants of cJSON_Parse (with cJSON_Delete) and cJSON_Print (with stdlib
* free, cJSON_Hooks.free_fn, or cJSON_free as appropriate). The exception is cJSON_PrintPreallocated, where the caller has full responsibility of the buffer.
*/
/* Supply a block of JSON, and this returns a cJSON object you can interrogate. */
CJSON_PUBLIC(cJSON *) cJSON_Parse(const char *value);
CJSON_PUBLIC(cJSON *) cJSON_ParseWithLength(const char *value, size_t buffer_length);
/* ParseWithOpts allows you to require (and check) that the JSON is null terminated, and to retrieve the pointer to the final byte parsed. */
/* If you supply a ptr in return_parse_end and parsing fails, then return_parse_end will contain a pointer to the error so will match cJSON_GetErrorPtr(). */
CJSON_PUBLIC(cJSON *) cJSON_ParseWithOpts(const char *value, const char **return_parse_end, cJSON_bool require_null_terminated);
CJSON_PUBLIC(cJSON *) cJSON_ParseWithLengthOpts(const char *value, size_t buffer_length, const char **return_parse_end, cJSON_bool require_null_terminated);
/* Render a cJSON entity to text for transfer/storage. */
CJSON_PUBLIC(char *) cJSON_Print(const cJSON *item);
/* Render a cJSON entity to text for transfer/storage without any formatting. */
CJSON_PUBLIC(char *) cJSON_PrintUnformatted(const cJSON *item);
/* Render a cJSON entity to text using a buffered strategy. prebuffer is a guess at the final size. guessing well reduces reallocation. fmt=0 gives unformatted,
* =1 gives formatted */
CJSON_PUBLIC(char *) cJSON_PrintBuffered(const cJSON *item, int prebuffer, cJSON_bool fmt);
/* Render a cJSON entity to text using a buffer already allocated in memory with given length. Returns 1 on success and 0 on failure. */
/* NOTE: cJSON is not always 100% accurate in estimating how much memory it will use, so to be safe allocate 5 bytes more than you actually need */
CJSON_PUBLIC(cJSON_bool) cJSON_PrintPreallocated(cJSON *item, char *buffer, const int length, const cJSON_bool format);
/* Delete a cJSON entity and all subentities. */
CJSON_PUBLIC(void) cJSON_Delete(cJSON *item);
/* Returns the number of items in an array (or object). */
CJSON_PUBLIC(int) cJSON_GetArraySize(const cJSON *array);
/* Retrieve item number "index" from array "array". Returns NULL if unsuccessful. */
CJSON_PUBLIC(cJSON *) cJSON_GetArrayItem(const cJSON *array, int index);
/* Get item "string" from object. Case insensitive. */
CJSON_PUBLIC(cJSON *) cJSON_GetObjectItem(const cJSON *const object, const char *const string);
CJSON_PUBLIC(cJSON *) cJSON_GetObjectItemCaseSensitive(const cJSON *const object, const char *const string);
CJSON_PUBLIC(cJSON_bool) cJSON_HasObjectItem(const cJSON *object, const char *string);
/* For analysing failed parses. This returns a pointer to the parse error. You'll probably need to look a few chars back to make sense of it. Defined when
* cJSON_Parse() returns 0. 0 when cJSON_Parse() succeeds. */
CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void);
/* Check item type and return its value */
CJSON_PUBLIC(char *) cJSON_GetStringValue(const cJSON *const item);
CJSON_PUBLIC(double) cJSON_GetNumberValue(const cJSON *const item);
/* These functions check the type of an item */
CJSON_PUBLIC(cJSON_bool) cJSON_IsInvalid(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsFalse(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsTrue(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsBool(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsNull(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsNumber(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsString(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsArray(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsObject(const cJSON *const item);
CJSON_PUBLIC(cJSON_bool) cJSON_IsRaw(const cJSON *const item);
/* These calls create a cJSON item of the appropriate type. */
CJSON_PUBLIC(cJSON *) cJSON_CreateNull(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateTrue(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateFalse(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateBool(cJSON_bool boolean);
CJSON_PUBLIC(cJSON *) cJSON_CreateNumber(double num);
CJSON_PUBLIC(cJSON *) cJSON_CreateString(const char *string);
/* raw json */
CJSON_PUBLIC(cJSON *) cJSON_CreateRaw(const char *raw);
CJSON_PUBLIC(cJSON *) cJSON_CreateArray(void);
CJSON_PUBLIC(cJSON *) cJSON_CreateObject(void);
/* Create a string where valuestring references a string so
* it will not be freed by cJSON_Delete */
CJSON_PUBLIC(cJSON *) cJSON_CreateStringReference(const char *string);
/* Create an object/array that only references it's elements so
* they will not be freed by cJSON_Delete */
CJSON_PUBLIC(cJSON *) cJSON_CreateObjectReference(const cJSON *child);
CJSON_PUBLIC(cJSON *) cJSON_CreateArrayReference(const cJSON *child);
/* These utilities create an Array of count items.
* The parameter count cannot be greater than the number of elements in the number array, otherwise array access will be out of bounds.*/
CJSON_PUBLIC(cJSON *) cJSON_CreateIntArray(const int *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateFloatArray(const float *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateDoubleArray(const double *numbers, int count);
CJSON_PUBLIC(cJSON *) cJSON_CreateStringArray(const char *const *strings, int count);
/* Append item to the specified array/object. */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToArray(cJSON *array, cJSON *item);
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToObject(cJSON *object, const char *string, cJSON *item);
/* Use this when string is definitely const (i.e. a literal, or as good as), and will definitely survive the cJSON object.
* WARNING: When this function was used, make sure to always check that (item->type & cJSON_StringIsConst) is zero before
* writing to `item->string` */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemToObjectCS(cJSON *object, const char *string, cJSON *item);
/* Append reference to item to the specified array/object. Use this when you want to add an existing cJSON to a new cJSON, but don't want to corrupt your
* existing cJSON. */
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemReferenceToArray(cJSON *array, cJSON *item);
CJSON_PUBLIC(cJSON_bool) cJSON_AddItemReferenceToObject(cJSON *object, const char *string, cJSON *item);
/* Remove/Detach items from Arrays/Objects. */
CJSON_PUBLIC(cJSON *) cJSON_DetachItemViaPointer(cJSON *parent, cJSON *const item);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromArray(cJSON *array, int which);
CJSON_PUBLIC(void) cJSON_DeleteItemFromArray(cJSON *array, int which);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObject(cJSON *object, const char *string);
CJSON_PUBLIC(cJSON *) cJSON_DetachItemFromObjectCaseSensitive(cJSON *object, const char *string);
CJSON_PUBLIC(void) cJSON_DeleteItemFromObject(cJSON *object, const char *string);
CJSON_PUBLIC(void) cJSON_DeleteItemFromObjectCaseSensitive(cJSON *object, const char *string);
/* Update array items. */
CJSON_PUBLIC(cJSON_bool) cJSON_InsertItemInArray(cJSON *array, int which, cJSON *newitem); /* Shifts pre-existing items to the right. */
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemViaPointer(cJSON *const parent, cJSON *const item, cJSON *replacement);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInArray(cJSON *array, int which, cJSON *newitem);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInObject(cJSON *object, const char *string, cJSON *newitem);
CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemInObjectCaseSensitive(cJSON *object, const char *string, cJSON *newitem);
/* Duplicate a cJSON item */
CJSON_PUBLIC(cJSON *) cJSON_Duplicate(const cJSON *item, cJSON_bool recurse);
/* Duplicate will create a new, identical cJSON item to the one you pass, in new memory that will
* need to be released. With recurse!=0, it will duplicate any children connected to the item.
* The item->next and ->prev pointers are always zero on return from Duplicate. */
/* Recursively compare two cJSON items for equality. If either a or b is NULL or invalid, they will be considered unequal.
* case_sensitive determines if object keys are treated case sensitive (1) or case insensitive (0) */
CJSON_PUBLIC(cJSON_bool) cJSON_Compare(const cJSON *const a, const cJSON *const b, const cJSON_bool case_sensitive);
/* Minify a strings, remove blank characters(such as ' ', '\t', '\r', '\n') from strings.
* The input pointer json cannot point to a read-only address area, such as a string constant,
* but should point to a readable and writable address area. */
CJSON_PUBLIC(void) cJSON_Minify(char *json);
/* Helper functions for creating and adding items to an object at the same time.
* They return the added item or NULL on failure. */
CJSON_PUBLIC(cJSON *) cJSON_AddNullToObject(cJSON *const object, const char *const name);
CJSON_PUBLIC(cJSON *) cJSON_AddTrueToObject(cJSON *const object, const char *const name);
CJSON_PUBLIC(cJSON *) cJSON_AddFalseToObject(cJSON *const object, const char *const name);
CJSON_PUBLIC(cJSON *) cJSON_AddBoolToObject(cJSON *const object, const char *const name, const cJSON_bool boolean);
CJSON_PUBLIC(cJSON *) cJSON_AddNumberToObject(cJSON *const object, const char *const name, const double number);
CJSON_PUBLIC(cJSON *) cJSON_AddStringToObject(cJSON *const object, const char *const name, const char *const string);
CJSON_PUBLIC(cJSON *) cJSON_AddRawToObject(cJSON *const object, const char *const name, const char *const raw);
CJSON_PUBLIC(cJSON *) cJSON_AddObjectToObject(cJSON *const object, const char *const name);
CJSON_PUBLIC(cJSON *) cJSON_AddArrayToObject(cJSON *const object, const char *const name);
/* When assigning an integer value, it needs to be propagated to valuedouble too. */
#define cJSON_SetIntValue(object, number) ((object) ? (object)->valueint = (object)->valuedouble = (number) : (number))
/* helper for the cJSON_SetNumberValue macro */
CJSON_PUBLIC(double) cJSON_SetNumberHelper(cJSON *object, double number);
#define cJSON_SetNumberValue(object, number) ((object != NULL) ? cJSON_SetNumberHelper(object, (double)number) : (number))
/* Change the valuestring of a cJSON_String object, only takes effect when type of object is cJSON_String */
CJSON_PUBLIC(char *) cJSON_SetValuestring(cJSON *object, const char *valuestring);
/* Macro for iterating over an array or object */
#define cJSON_ArrayForEach(element, array) for(element = (array != NULL) ? (array)->child : NULL; element != NULL; element = element->next)
/* malloc/free objects using the malloc/free functions that have been set with cJSON_InitHooks */
CJSON_PUBLIC(void *) cJSON_malloc(size_t size);
CJSON_PUBLIC(void) cJSON_free(void *object);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_multi_devices_sync_gmsltrigger)
add_executable(${PROJECT_NAME} ob_multi_devices_sync_gmsltrigger.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,114 @@
# C++ Sample: 3.advanced.multi_devices_sync_gmsltrigger
## Overview
GMSL Multi-device Hardware Trigger Configuration
support Connection of GMSL device (Nvidia Xavier/Orin platform)
### Knowledge
Send a hardware synchronization signal with a set frequency to all connected GMSL devices for multi-machine hardware synchronization through the /dev/camsync device node.
## Code overview
1.open /dev/camsync device.
2.set trigger frequency
```cpp
int startTrigger(uint16_t triggerFps) {
if(!isDeviceOpen) {
if(openDevice() < 0) {
error("open device Failed!");
return -1;
}
}
cs_param_t wt_param = { WRITE_MODE, triggerFps };
int ret = writeTriggerParam(wt_param);
if(ret < 0) {
error("write trigger parameter Failed!");
return ret;
}
info("write param: ", wt_param);
cs_param_t rd_param = { READ_MODE, 0 };
ret = readTriggerParam(rd_param);
if(ret < 0) {
error("read trigger parameter Failed!");
}
info("read param: ", rd_param);
return ret;
}
```
## Run Sample
### 1.**Setup of Trigger Source and Device Node Permissions**
```
sudo chmod 777 /dev/camsync
```
### 2.**Running Configuration and Triggering Program**
The program is located in the Example/bin directory of the orbbecsdk.
Sample Configuration: 30FPS and Trigger Source Frequency of 3000
```
orbbec@agx:~/SensorSDK/build/install/Example/bin$ ./MultiDeviceSyncGmslTrigger
Please select options:
------------------------------------------------------------
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
------------------------------------------------------------
input select item: 0
Enter FPS (frames per second) (for example: 3000): 3000
Setting FPS to 3000...
Please select options:
------------------------------------------------------------
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
------------------------------------------------------------
input select item: 1
write param: mode=1, fps=3000
read param: mode=1, fps=3000
start pwm source TriggerSync...
Please select options:
------------------------------------------------------------
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
------------------------------------------------------------
input select item:
```
**Brief description of configuring the trigger program**
Please select options:
0 --> config GMSL SOC hardware trigger Source. Set trigger fps:
1 --> start Trigger
2 --> stop Trigger
3 --> exit
0: Configure the frequency of the SOC hardware trigger source.
(config trigger frequency as 3000 (Note: It is recommended to configure the trigger frequency to be equal to or less than the set video frame frequency. For example, if the set video frame frequency is 30FPS, the trigger frequency should be 3000 or less.)
1: Start hardware tigger. (start send hardware signal triggering at the configured trigger frequency)
2: Stop triggering
3: Exit the program

View File

@@ -0,0 +1,167 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
/*
Notes: MultiDeviceSyncGmslSocTrigger for GMSL device
on the nvidia arm64 xavier/orin platform ,this sample use nvidia platform soc pwm trigger to sync multi device.
*/
#ifdef __linux__
#include <libobsensor/ObSensor.hpp>
#include <unistd.h>
#include <fcntl.h>
#include <iostream>
#include <string>
static const char * DEVICE_PATH = "/dev/camsync";
static const uint8_t WRITE_MODE = 1;
static const uint8_t READ_MODE = 0;
typedef struct {
uint8_t mode;
uint16_t fps;
} cs_param_t;
class PwmTrigger {
public:
PwmTrigger() : fd(-1), isDeviceOpen(false) {}
~PwmTrigger() {
closeDevice();
}
int startTrigger(uint16_t triggerFps) {
if(!isDeviceOpen) {
if(openDevice() < 0) {
error("open device Failed!");
return -1;
}
}
cs_param_t wt_param = { WRITE_MODE, triggerFps };
int ret = writeTriggerParam(wt_param);
if(ret < 0) {
error("write trigger parameter Failed!");
return ret;
}
info("write param: ", wt_param);
cs_param_t rd_param = { READ_MODE, 0 };
ret = readTriggerParam(rd_param);
if(ret < 0) {
error("read trigger parameter Failed!");
}
info("read param: ", rd_param);
return ret;
}
int stopTrigger() {
int ret = 0;
if(isDeviceOpen) {
ret = closeDevice();
}
return ret;
}
void info(const std::string &msg, const cs_param_t &param) {
std::cout << msg << " mode=" << static_cast<int>(param.mode) << ", fps=" << param.fps << std::endl;
}
void error(const std::string &msg) {
std::cerr << "Error: " << msg << std::endl;
}
private:
int openDevice() {
fd = open(DEVICE_PATH, O_RDWR);
if(fd < 0) {
error("open /dev/camsync failed");
return fd;
}
isDeviceOpen = true;
return fd;
}
int closeDevice() {
if(isDeviceOpen) {
isDeviceOpen = false;
int ret = close(fd);
fd = -1;
if(ret < 0) {
error("close /dev/camsync failed: " + std::to_string(errno));
return ret;
}
}
return 0;
}
int writeTriggerParam(const cs_param_t &param) {
int ret = write(fd, &param, sizeof(param));
if(ret < 0) {
error("write /dev/camsync failed: " + std::to_string(errno));
}
return ret;
}
int readTriggerParam(cs_param_t &param) {
int ret = read(fd, &param, sizeof(param));
if(ret < 0) {
error("read /dev/camsync failed: " + std::to_string(errno));
}
return ret;
}
private:
int fd;
bool isDeviceOpen;
};
int main(void) try {
PwmTrigger pwmTrigger;
uint16_t fps = 0;
constexpr std::streamsize maxInputIgnore = 10000;
while(true) {
std::cout << "Please select options: \n"
<< "------------------------------------------------------------\n"
<< " 0 --> config GMSL SOC PWM trigger Source. Set trigger fps: \n"
<< " 1 --> start Trigger \n"
<< " 2 --> stop Trigger \n"
<< " 3 --> exit \n"
<< "------------------------------------------------------------\n"
<< "input select item: ";
int index = -1;
// std::cin >> index;
if(!(std::cin >> index)) {
std::cin.clear();
std::cin.ignore(maxInputIgnore, '\n');
std::cout << "Invalid input. Please enter a number." << std::endl;
continue;
}
std::cout << std::endl;
switch(index) {
case 0:
std::cout << "Enter FPS (frames per second) (for example: 3000): ";
std::cin >> fps; // set the FPS here
std::cout << "Setting FPS to " << fps << "..." << std::endl;
break;
case 1:
if(pwmTrigger.startTrigger(fps) < 0) {
std::cerr << "Failed to start trigger." << std::endl;
}
std::cout << "start pwm source TriggerSync... \n" << std::endl;
break;
case 2:
pwmTrigger.stopTrigger();
std::cout << "stop pwm source TriggerSync... \n" << std::endl;
break;
case 3:
pwmTrigger.stopTrigger();
std::cout << "Program exit & clse device! \n" << std::endl;
return 0;
default:
std::cout << "-input Invalid index. \n"
<< "-Please re-select and input valid param. \n";
}
}
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}
#endif

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_point_cloud)
add_executable(${PROJECT_NAME} point_cloud.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,56 @@
# C++ Sample: 3.advanced.point_clout
## Overview
Connect the device to open the stream, generate a depth point cloud or RGBD point cloud and save it as a ply format file, and exit the program through the ESC\_KEY key
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
## Code overview
1. Create Align Filter and point cloud Filter.
```cpp
// Create a point cloud Filter, which will be used to generate pointcloud frame from depth and color frames.
auto pointCloud = std::make_shared<ob::PointCloudFilter>();
auto align = std::make_shared<ob::Align>(OB_STREAM_COLOR); // align depth frame to color frame
```
2. Create RGBD Point Cloud.Note that the AlignFilter processing is needed before the PointCloud processing.
```cpp
// align depth frame to color frame
auto alignedFrameset = align->process(frameset);
// set to create RGBD point cloud format (will be effective only if color frame and depth frame are contained in the frameset)
pointCloud->setCreatePointFormat(OB_FORMAT_RGB_POINT);
// process the frameset to generate point cloud frame
std::shared_ptr<ob::Frame> frame = pointCloud->process(alignedFrameset);
```
3. create Depth PointCloud
```cpp
// set to create depth point cloud format
auto alignedFrameset = align->process(frameset);
// set to create point cloud format
pointCloud->setCreatePointFormat(OB_FORMAT_POINT);
// process the frameset to generate point cloud frame (pass into a single depth frame to process is also valid)
std::shared_ptr<ob::Frame> frame = pointCloud->process(alignedFrameset);
```
## Run Sample
Press R or r to create RGBD PointCloud and save to ply file!
Press D or d to create Depth PointCloud and save to ply file!
Press ESC to exit!
### Result
![image](../../docs/resource/point_cloud.jpg)

View File

@@ -0,0 +1,139 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "libobsensor/hpp/Utils.hpp"
#include "utils.hpp"
#include <fstream>
#include <iostream>
#define KEY_ESC 27
#define KEY_R 82
#define KEY_r 114
int main(void) try {
// create config to configure the pipeline streams
auto config = std::make_shared<ob::Config>();
// enable depth and color streams with specified format
config->enableVideoStream(OB_STREAM_DEPTH, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
config->enableVideoStream(OB_STREAM_COLOR, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_RGB);
// set frame aggregate output mode to all type frame require. therefor, the output frameset will contain all type of frames
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
// create pipeline to manage the streams
auto pipeline = std::make_shared<ob::Pipeline>();
// Enable frame synchronization to ensure depth frame and color frame on output frameset are synchronized.
pipeline->enableFrameSync();
// Start pipeline with config
pipeline->start(config);
// Create a point cloud Filter, which will be used to generate pointcloud frame from depth and color frames.
auto pointCloud = std::make_shared<ob::PointCloudFilter>();
// Create a Align Filter, which will be used to align depth frame and color frame.
auto align = std::make_shared<ob::Align>(OB_STREAM_COLOR); // align depth frame to color frame
// operation prompt
std::cout << "Depth and Color stream are started!" << std::endl;
std::cout << "Press R or r to create RGBD PointCloud and save to ply file! " << std::endl;
std::cout << "Press D or d to create Depth PointCloud and save to ply file! " << std::endl;
std::cout << "Press M or m to create RGBD PointCloud and save to Mesh ply file! " << std::endl;
std::cout << "Press ESC to exit! " << std::endl;
while(true) {
auto key = ob_smpl::waitForKeyPressed();
if(key == 27) {
break;
}
if(key == 'r' || key == 'R') {
std::cout << "Save RGBD PointCloud to ply file, this will take some time..." << std::endl;
std::shared_ptr<ob::FrameSet> frameset = nullptr;
while(true) {
frameset = pipeline->waitForFrameset(1000);
if(frameset) {
break;
}
}
// align depth frame to color frame
auto alignedFrameset = align->process(frameset);
// set to create RGBD point cloud format (will be effective only if color frame and depth frame are contained in the frameset)
pointCloud->setCreatePointFormat(OB_FORMAT_RGB_POINT);
// process the frameset to generate point cloud frame
std::shared_ptr<ob::Frame> frame = pointCloud->process(alignedFrameset);
// save point cloud data to ply file
ob::PointCloudHelper::savePointcloudToPly("RGBPoints.ply", frame, false, false, 50);
std::cout << "RGBPoints.ply Saved" << std::endl;
}
else if(key == 'd' || key == 'D') {
std::cout << "Save Depth PointCloud to ply file, this will take some time..." << std::endl;
std::shared_ptr<ob::FrameSet> frameset = nullptr;
while(true) {
frameset = pipeline->waitForFrameset(1000);
if(frameset) {
break;
}
}
// set to create depth point cloud format
auto alignedFrameset = align->process(frameset);
// set to create point cloud format
pointCloud->setCreatePointFormat(OB_FORMAT_POINT);
// process the frameset to generate point cloud frame (pass into a single depth frame to process is also valid)
std::shared_ptr<ob::Frame> frame = pointCloud->process(alignedFrameset);
// save point cloud data to ply file
ob::PointCloudHelper::savePointcloudToPly("DepthPoints.ply", frame, false, false, 50);
std::cout << "DepthPoints.ply Saved" << std::endl;
}
else if(key == 'm' || key == 'M') {
std::cout << "Save RGBD PointCloud(mesh) to ply file, this will take some time..." << std::endl;
std::shared_ptr<ob::FrameSet> frameset = nullptr;
while(true) {
frameset = pipeline->waitForFrameset(1000);
if(frameset) {
break;
}
}
// align depth frame to color frame
auto alignedFrameset = align->process(frameset);
// set to create RGBD point cloud format (will be effective only if color frame and depth frame are contained in the frameset)
pointCloud->setCreatePointFormat(OB_FORMAT_RGB_POINT);
// process the frameset to generate point cloud frame
std::shared_ptr<ob::Frame> frame = pointCloud->process(alignedFrameset);
ob::PointCloudHelper::savePointcloudToPly("ColorMeshPoints.ply", frame, false, true, 50);
std::cout << "ColorMeshPoints.ply Saved" << std::endl;
}
}
// stop the pipeline
pipeline->stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_post_processing)
add_executable(${PROJECT_NAME} post_processing.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,81 @@
# C++ Sample: 3.advanced.post_processing
## Overview
Use the SDK interface to  demonstrate post-processing operations, display post-processed images, and exit the program using the ESC_KEY key
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
win is used to display the frame data.
## Code overview
1. Get the device and sensor, and create the list of recommended filters for the sensor
```cpp
auto device = pipe.getDevice();
auto sensor = device->getSensor(OB_SENSOR_DEPTH);
auto filterList = sensor->createRecommendedFilters();
```
2. The filter operation.
- Get the type of filter
```cpp
filter->getName()
```
- Get the Config Schema Vec object
```cpp
filter->getConfigSchemaVec()
```
- Enable the filter
```cpp
filter->enable(tokens[1] == "on");
```
- Get the Config Value object by name.
```cpp
filter->getConfigValue(configSchema.name)
```
- Get the Enable State of the filter.
```cpp
filter->isEnabled()
```
- Set the filter config value by name.
```cpp
filter->setConfigValue(tokens[1], value);
```
3. Apply the recommended filters to the depth frame
```cpp
auto processedFrame = depthFrame;
// Apply the recommended filters to the depth frame
for(auto &filter: filterList) {
if(filter->isEnabled()) { // Only apply enabled filters
processedFrame = filter->process(processedFrame);
}
}
````
## Run Sample
Press the button according to the interface prompts
### Result
![image](../../docs/resource/post_processing.jpg)

View File

@@ -0,0 +1,195 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.h>
#include "utils.hpp"
#include "utils_opencv.hpp"
#include <thread>
bool quit_program = false; // Flag to signal the program to quit
void printFiltersInfo(const std::vector<std::shared_ptr<ob::Filter>> &filterList) {
std::cout << filterList.size() << " post processing filters recommended:" << std::endl;
for(auto &filter: filterList) {
std::cout << " - " << filter->getName() << ": " << (filter->isEnabled() ? "enabled" : "disabled") << std::endl;
auto configSchemaVec = filter->getConfigSchemaVec();
// Print the config schema for each filter
for(auto &configSchema: configSchemaVec) {
std::cout << " - {" << configSchema.name << ", " << configSchema.type << ", " << configSchema.min << ", " << configSchema.max << ", "
<< configSchema.step << ", " << configSchema.def << ", " << configSchema.desc << "}" << std::endl;
}
filter->enable(false); // Disable the filter
}
}
void filterControl(const std::vector<std::shared_ptr<ob::Filter>> &filterList) {
auto printHelp = [&]() {
std::cout << "Available commands:" << std::endl;
std::cout << "- Enter `[Filter]` to list the config values for the filter" << std::endl;
std::cout << "- Enter `[Filter] on` or `[Filter] off` to enable/disable the filter" << std::endl;
std::cout << "- Enter `[Filter] list` to list the config schema for the filter" << std::endl;
std::cout << "- Enter `[Filter] [Config]` to show the config values for the filter" << std::endl;
std::cout << "- Enter `[Filter] [Config] [Value]` to set a config value" << std::endl;
std::cout << "- Enter `L`or `l` to list all available filters" << std::endl;
std::cout << "- Enter `H` or `h` to print this help message" << std::endl;
std::cout << "- Enter `Q` or `q` to quit" << std::endl;
};
printHelp();
while(!quit_program) {
std::cout << "---------------------------" << std::endl;
std::cout << "Enter your input (h for help): ";
std::string input;
std::getline(std::cin, input);
if(input == "q" || input == "Q") {
quit_program = true;
break;
}
else if(input == "l" || input == "L") {
printFiltersInfo(filterList);
continue;
}
else if(input == "h" || input == "H") {
printHelp();
continue;
}
// Parse the input
std::vector<std::string> tokens;
std::istringstream iss(input);
for(std::string token; iss >> token;) {
tokens.push_back(token);
}
if(tokens.empty()) {
continue;
}
bool foundFilter = false;
for(auto &filter: filterList) {
if(filter->getName() == tokens[0]) {
foundFilter = true;
if(tokens.size() == 1) { // print list of configs for the filter
auto configSchemaVec = filter->getConfigSchemaVec();
std::cout << "Config values for " << filter->getName() << ":" << std::endl;
for(auto &configSchema: configSchemaVec) {
std::cout << " - " << configSchema.name << ": " << filter->getConfigValue(configSchema.name) << std::endl;
}
}
else if(tokens.size() == 2 && (tokens[1] == "on" || tokens[1] == "off")) { // Enable/disable the filter
filter->enable(tokens[1] == "on");
std::cout << "Success: Filter " << filter->getName() << " is now " << (filter->isEnabled() ? "enabled" : "disabled") << std::endl;
}
else if(tokens.size() == 2 && tokens[1] == "list") { // List the config values for the filter
auto configSchemaVec = filter->getConfigSchemaVec();
std::cout << "Config schema for " << filter->getName() << ":" << std::endl;
for(auto &configSchema: configSchemaVec) {
std::cout << " - {" << configSchema.name << ", " << configSchema.type << ", " << configSchema.min << ", " << configSchema.max << ", "
<< configSchema.step << ", " << configSchema.def << ", " << configSchema.desc << "}" << std::endl;
}
}
else if(tokens.size() == 2) { // Print the config schema for the filter
auto configSchemaVec = filter->getConfigSchemaVec();
bool foundConfig = false;
for(auto &configSchema: configSchemaVec) {
if(configSchema.name == tokens[1]) {
foundConfig = true;
std::cout << "Config values for " << filter->getName() << "@" << configSchema.name << ":"
<< filter->getConfigValue(configSchema.name) << std::endl;
break;
}
}
if(!foundConfig) {
std::cerr << "Error: Config " << tokens[1] << " not found for filter " << filter->getName() << std::endl;
}
}
else if(tokens.size() == 3) { // Set a config value
try {
double value = std::stod(tokens[2]);
filter->setConfigValue(tokens[1], value);
}
catch(const std::exception &e) {
std::cerr << "Error: " << e.what() << std::endl;
continue;
}
std::cout << "Success: Config value of " << tokens[1] << " for filter " << filter->getName() << " is set to " << tokens[2] << std::endl;
}
break;
}
}
if(!foundFilter) {
std::cerr << "Error: Filter " << tokens[0] << " not found" << std::endl;
}
std::this_thread::sleep_for(std::chrono::milliseconds(500));
}
}
int main() try {
// Create a pipeline with default device
ob::Pipeline pipe;
// Get the device and sensor, and get the list of recommended filters for the sensor
auto device = pipe.getDevice();
auto sensor = device->getSensor(OB_SENSOR_DEPTH);
auto filterList = sensor->createRecommendedFilters();
// Print the recommended filters
printFiltersInfo(filterList);
// Create a config with depth stream enabled
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
config->enableStream(OB_STREAM_DEPTH);
// Start the pipeline with config
pipe.start(config);
// Start the filter control loop on sub thread
std::thread filterControlThread(filterControl, filterList);
filterControlThread.detach();
// Create a window for rendering, and set the resolution of the window
ob_smpl::CVWindow win("PostProcessing", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
while(win.run() && !quit_program) {
// Wait for up to 1000ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrameset(1000);
if(frameSet == nullptr) {
continue;
}
// Get the depth frame from the frameset
auto depthFrame = frameSet->getFrame(OB_FRAME_DEPTH);
if(!depthFrame) {
continue;
}
auto processedFrame = depthFrame;
// Apply the recommended filters to the depth frame
for(auto &filter: filterList) {
if(filter->isEnabled()) { // Only apply enabled filters
processedFrame = filter->process(processedFrame);
}
}
// Push the frames to the window for showing
// Due to processedFrame type is same as the depthFrame, we should push it with different group id.
win.pushFramesToView(depthFrame, 0);
win.pushFramesToView(processedFrame, 1);
}
// Stop the pipeline
pipe.stop();
quit_program = true;
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,18 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_preset)
add_executable(${PROJECT_NAME} preset.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,39 @@
# C++ Sample: 3.advanced.preset
## Overview
Use the SDK interface to set and get the preset value.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
## Code overview
1. Get preset list from device.
```cpp
std::shared_ptr<ob::DevicePresetList> presetLists = device->getAvailablePresetList();
```
2. Get preset value from device.
```cpp
// Print current preset name.
std::cout << "Current PresetName: " << device->getCurrentPresetName() << std::endl;
```
3. Set preset value to device.
```cpp
// Load preset.
device->loadPreset(presetName);
```
## Run Sample
Press the button according to the interface prompts
### Result
![image](../../docs/resource/preset.jpg)

View File

@@ -0,0 +1,64 @@
// Copyright (c) Orbbec Inc. All Rights Reserved.
// Licensed under the MIT License.
#include <libobsensor/ObSensor.hpp>
#include "utils.hpp"
#include <iostream>
int main() try {
// Create a pipeline with default device.
ob::Pipeline pipe;
// Get the device from the pipeline.
std::shared_ptr<ob::Device> device = pipe.getDevice();
while(true){
// Get preset list from device.
std::shared_ptr<ob::DevicePresetList> presetLists = device->getAvailablePresetList();
if (presetLists && presetLists->getCount() == 0) {
std::cout << "The current device does not support preset mode" << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
return 0;
}
std::cout << "Available Presets:" << std::endl;
for(uint32_t index = 0; index < presetLists->getCount(); index++) {
// Print available preset name.
std::cout << " - " << index << "." << presetLists->getName(index) << std::endl;
}
// Print current preset name.
std::cout << "Current PresetName: " << device->getCurrentPresetName() << std::endl;
std::cout << "Enter index of preset to load: ";
// Select preset to load.
int inputOption = ob_smpl::getInputOption();
auto presetName = presetLists->getName(inputOption);
// Load preset.
device->loadPreset(presetName);
// Print current preset name.
std::cout << "Current PresetName: " << device->getCurrentPresetName() << std::endl;
}
// Stop Pipeline.
pipe.stop();
printf("\nProgram ended successfully. Press any key to exit.");
ob_smpl::getInputOption();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
std::cout << "\nPress any key to exit.";
ob_smpl::waitForKeyPressed();
exit(EXIT_FAILURE);
}

View File

@@ -0,0 +1,17 @@
# Copyright (c) Orbbec Inc. All Rights Reserved.
# Licensed under the MIT License.
cmake_minimum_required(VERSION 3.5)
project(ob_sync_align)
add_executable(${PROJECT_NAME} sync_align.cpp)
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
if(MSVC)
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
endif()
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)

View File

@@ -0,0 +1,66 @@
# C++ Sample: 3.advanced.sync_align
## Overview
Use the SDK interface to demonstrate the synchronization and alignment of sensor data streams,display the aligned image,and exit the program using the ESC_KEY key.
### Knowledge
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
Frameset is a combination of different types of Frames
win is used to display the frame data.
C2DColor to Depthis the transformation from the color image coordinate system to the depth image coordinate system.To map the pixel positions in the color image to the corresponding positions in the depth image. This is commonly used to align color and depth images so that both types of information can be used in the same coordinate system.
D2CDepth to Coloris the transformation from the depth image coordinate system to the color image coordinate system.To map the pixel positions in the depth image to the corresponding positions in the color image. This transformation allows depth data to be applied to the color image, facilitating the annotation or analysis of depth information within the color image.
## Code overview
1. Set alignment mode
```cpp
// Create a filter to align depth frame to color frame
auto depth2colorAlign = std::make_shared<ob::Align>(OB_STREAM_COLOR);
// create a filter to align color frame to depth frame
auto color2depthAlign = std::make_shared<ob::Align>(OB_STREAM_DEPTH);
```
2. Set the callback function for the Align Filter to display the aligned frames in the window
```cpp
depth2colorAlign->setCallBack([&win](std::shared_ptr<ob::Frame> frame) { win.pushFramesToView(frame); });
color2depthAlign->setCallBack([&win](std::shared_ptr<ob::Frame> frame) { win.pushFramesToView(frame); });
```
3. Perform alignment processing
```cpp
// Get filter according to the align mode
std::shared_ptr<ob::Filter> alignFilter = depth2colorAlign;
if(align_mode % 2 == 1) {
alignFilter = color2depthAlign;
}
// push the frameset to the Align Filter to align the frames.
// The frameset will be processed in an internal thread, and the resulting frames will be asynchronously output via the callback function.
alignFilter->pushFrame(frameSet);
```
## Run Sample
Press the Esc key in the window to exit the program.
'T': Switch Align Mode.
'F': Toggle Synchronization.
'+/-': Adjust Transparency
### Result
Sync
![image](../../docs/resource/sync.jpg)
D2C
![image](../../docs/resource/d2c.jpg)
C2D
![image](../../docs/resource/c2d.jpg)

Some files were not shown because too many files have changed in this diff Show More