init
This commit is contained in:
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"configTime": "2023/01/01",
|
||||
"devices": [
|
||||
{
|
||||
"sn": "CP2194200060",
|
||||
"syncConfig": {
|
||||
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_PRIMARY",
|
||||
"depthDelayUs": 0,
|
||||
"colorDelayUs": 0,
|
||||
"trigger2ImageDelayUs": 0,
|
||||
"triggerOutEnable": true,
|
||||
"triggerOutDelayUs": 0,
|
||||
"framesPerTrigger": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"sn": "CP0Y8420004K",
|
||||
"syncConfig": {
|
||||
"syncMode": "OB_MULTI_DEVICE_SYNC_MODE_SECONDARY",
|
||||
"depthDelayUs": 0,
|
||||
"colorDelayUs": 0,
|
||||
"trigger2ImageDelayUs": 0,
|
||||
"triggerOutEnable": true,
|
||||
"triggerOutDelayUs": 0,
|
||||
"framesPerTrigger": 1
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_callback
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_callback
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_color
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_color
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_common_usages
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_common_usages
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_confidence
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_confidence
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_coordinate_transform
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_coordinate_transform
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_depth
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_depth
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_depth_c
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_depth_c
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_control
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_control
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_firmware_update
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_firmware_update
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_forceip
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_forceip
Normal file
Binary file not shown.
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_playback
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_playback
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_record
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_record
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_record_nogui
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_device_record_nogui
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_enumerate
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_enumerate
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_enumerate_c
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_enumerate_c
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hdr
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hdr
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hot_plugin
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hot_plugin
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hw_d2c_align
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_hw_d2c_align
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_imshow
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_imshow
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_imu
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_imu
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_infrared
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_infrared
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_laser_interleave
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_laser_interleave
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_logger
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_logger
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_metadata
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_metadata
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_device
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_device
Normal file
Binary file not shown.
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_devices_sync
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_devices_sync
Normal file
Binary file not shown.
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_streams
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_multi_streams
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_point_cloud
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_point_cloud
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_post_processing
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_post_processing
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_preset
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_preset
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_quick_start
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_quick_start
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_quick_start_c
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_quick_start_c
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_save_to_disk
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_save_to_disk
Normal file
Binary file not shown.
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_sync_align
Normal file
BIN
VideoProsessing/OrbbecSDK_v2.5.5/bin/ob_sync_align
Normal file
Binary file not shown.
103
VideoProsessing/OrbbecSDK_v2.5.5/build_examples.sh
Normal file
103
VideoProsessing/OrbbecSDK_v2.5.5/build_examples.sh
Normal file
@@ -0,0 +1,103 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
echo "Checking if apt-get is workable ..."
|
||||
apt_workable=1
|
||||
# Check if apt-get is installed
|
||||
if ! command -v apt-get &> /dev/null
|
||||
then
|
||||
echo "apt-get could not be found."
|
||||
apt_workable=0
|
||||
fi
|
||||
|
||||
# check if apt-get is working
|
||||
if ! command -v sudo apt-get update &> /dev/null
|
||||
then
|
||||
echo "apt-get update failed. apt-get may not be working properly."
|
||||
apt_workable=0
|
||||
fi
|
||||
|
||||
if [ $apt_workable -eq 1 ]
|
||||
then
|
||||
#install compiler and tools
|
||||
if ! g++ --version &> /dev/null || ! make --version &> /dev/null
|
||||
then
|
||||
echo "C++ Compiler and tools could not be found. It is required to build the examples."
|
||||
echo "Do you want to install it via install build-essential? (y/n)"
|
||||
read answer
|
||||
if [ "$answer" == "y" ]
|
||||
then
|
||||
sudo apt-get install -y build-essential
|
||||
fi
|
||||
else
|
||||
echo "C++ Compiler and tools is installed."
|
||||
fi
|
||||
|
||||
# install cmake
|
||||
if ! cmake --version &> /dev/null
|
||||
then
|
||||
echo "Cmake could not be found. It is required to build the examples."
|
||||
echo "Do you want to install cmake? (y/n)"
|
||||
read answer
|
||||
if [ "$answer" == "y" ]
|
||||
then
|
||||
sudo apt-get install -y cmake
|
||||
fi
|
||||
else
|
||||
echo "cmake is installed."
|
||||
fi
|
||||
|
||||
# install libopencv-dev
|
||||
if ! dpkg -l | grep libopencv-dev &> /dev/null || ! dpkg -l | grep libopencv &> /dev/null
|
||||
then
|
||||
echo "libopencv-dev or libopencv could not be found. Without opencv, part of the examples may not be built successfully."
|
||||
echo "Do you want to install libopencv-dev and libopencv? (y/n)"
|
||||
read answer
|
||||
if [ "$answer" == "y" ]
|
||||
then
|
||||
sudo apt-get install -y libopencv
|
||||
sudo apt-get install -y libopencv-dev
|
||||
fi
|
||||
else
|
||||
echo "libopencv-dev is installed."
|
||||
fi
|
||||
else
|
||||
echo "apt-get is not workable, network connection may be down or the system may not have internet access. Build examples may not be successful."
|
||||
fi
|
||||
|
||||
# restore current directory
|
||||
current_dir=$(pwd)
|
||||
|
||||
# cd to the directory where this script is located
|
||||
cd "$(dirname "$0")"
|
||||
project_dir=$(pwd)
|
||||
examples_dir=$project_dir/examples
|
||||
|
||||
#detect cpu core count
|
||||
cpu_count=$(grep -c ^processor /proc/cpuinfo)
|
||||
half_cpu_count=$((cpu_count/2))
|
||||
if [ $half_cpu_count -eq 0 ]
|
||||
then
|
||||
half_cpu_count=1
|
||||
fi
|
||||
|
||||
#cmake
|
||||
echo "Building examples..."
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DOB_BUILD_LINUX=ON -DCMAKE_INSTALL_PREFIX="$project_dir" "$examples_dir"
|
||||
echo "Building examples with $half_cpu_count threads..."
|
||||
cmake --build . -- -j$half_cpu_count # build with thread count equal to half of cpu count
|
||||
# install the executable files to the project directory
|
||||
make install
|
||||
|
||||
# clean up
|
||||
cd $project_dir
|
||||
rm -rf build
|
||||
|
||||
echo "OrbbecSDK examples built successfully!"
|
||||
echo "The executable files located in: $project_dir/bin"
|
||||
|
||||
cd $current_dir
|
||||
42
VideoProsessing/OrbbecSDK_v2.5.5/examples/CMakeLists.txt
Normal file
42
VideoProsessing/OrbbecSDK_v2.5.5/examples/CMakeLists.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
project(orbbec_sdk_exampes)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
|
||||
option(OB_BUILD_PCL_EXAMPLES "Build Point Cloud Library examples" OFF)
|
||||
option(OB_BUILD_OPEN3D_EXAMPLES "Build Open3D examples" OFF)
|
||||
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)
|
||||
if(MSVC OR CMAKE_GENERATOR STREQUAL "Xcode")
|
||||
message(STATUS "Using multi-config generator: ${CMAKE_GENERATOR}")
|
||||
foreach(OUTPUTCONFIG DEBUG RELEASE RELWITHDEBINFO MINSIZEREL)
|
||||
string(TOUPPER ${OUTPUTCONFIG} OUTPUTCONFIG_UPPER)
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}")
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY_${OUTPUTCONFIG_UPPER} "${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}")
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
set(OrbbecSDK_DIR ${CMAKE_CURRENT_LIST_DIR}/../lib)
|
||||
find_package(OrbbecSDK REQUIRED)
|
||||
|
||||
if(APPLE)
|
||||
set(CMAKE_MACOSX_RPATH ON)
|
||||
set(CMAKE_INSTALL_RPATH "@loader_path/../lib")
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
|
||||
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
elseif(UNIX)
|
||||
set(CMAKE_SKIP_BUILD_RPATH FALSE)
|
||||
set(CMAKE_INSTALL_RPATH "$ORIGIN/../lib")
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
|
||||
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
endif()
|
||||
|
||||
add_subdirectory(src)
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_enumerate)
|
||||
|
||||
add_executable(${PROJECT_NAME} enumerate.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,127 @@
|
||||
# C++ Sample: 0.basic.enumerate
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain camera-related information, including model, various sensors, and sensor-related configurations.
|
||||
|
||||
### Knowledge
|
||||
|
||||
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
|
||||
|
||||
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
|
||||
|
||||
## Code overview
|
||||
|
||||
1. Create a context
|
||||
|
||||
```cpp
|
||||
// Create a context.
|
||||
ob::Context context;
|
||||
```
|
||||
|
||||
2. Check if there is a camera connected
|
||||
|
||||
```cpp
|
||||
// Query the list of connected devices.
|
||||
auto deviceList = context.queryDeviceList();
|
||||
if(deviceList->getCount() < 1) {
|
||||
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
|
||||
return -1;
|
||||
}
|
||||
```
|
||||
|
||||
3. Obtain and output relevant information of the access device
|
||||
|
||||
```cpp
|
||||
std::cout << "enumerated devices: " << std::endl;
|
||||
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
std::shared_ptr<ob::DeviceInfo> deviceInfo = nullptr;
|
||||
for(uint32_t index = 0; index < deviceList->getCount(); index++) {
|
||||
// Get device from deviceList.
|
||||
device = deviceList->getDevice(index);
|
||||
// Get device information from device
|
||||
deviceInfo = device->getDeviceInfo();
|
||||
std::cout << " - " << index << ". name: " << deviceInfo->getName() << " pid: " << deviceInfo->getPid() << " SN: " << deviceInfo->getSerialNumber()
|
||||
<< std::endl;
|
||||
}
|
||||
```
|
||||
|
||||
4. Wait for keyboard input to select device
|
||||
|
||||
```cpp
|
||||
// select a device.
|
||||
int deviceSelected = ob_smpl::getInputOption();
|
||||
if(deviceSelected == -1) {
|
||||
break;
|
||||
}
|
||||
```
|
||||
|
||||
5. Output device sensors and wait for keyboard input
|
||||
|
||||
```cpp
|
||||
// Enumerate sensors.
|
||||
void enumerateSensors(std::shared_ptr<ob::Device> device) {
|
||||
while(true) {
|
||||
std::cout << "Sensor list: " << std::endl;
|
||||
// Get the list of sensors.
|
||||
auto sensorList = device->getSensorList();
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Get the sensor type.
|
||||
auto sensorType = sensorList->getSensorType(index);
|
||||
std::cout << " - " << index << "."
|
||||
<< "sensor type: " << ob::TypeHelper::convertOBSensorTypeToString(sensorType) << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "Select a sensor to enumerate its streams(input sensor index or \'ESC\' to enumerate device): " << std::endl;
|
||||
|
||||
// Select a sensor.
|
||||
int sensorSelected = ob_smpl::getInputOption();
|
||||
if(sensorSelected == -1) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Get sensor from sensorList.
|
||||
auto sensor = sensorList->getSensor(sensorSelected);
|
||||
enumerateStreamProfiles(sensor);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
6. Output information about the selected sensor
|
||||
|
||||
```cpp
|
||||
// Enumerate stream profiles.
|
||||
void enumerateStreamProfiles(std::shared_ptr<ob::Sensor> sensor) {
|
||||
// Get the list of stream profiles.
|
||||
auto streamProfileList = sensor->getStreamProfileList();
|
||||
// Get the sensor type.
|
||||
auto sensorType = sensor->getType();
|
||||
for(uint32_t index = 0; index < streamProfileList->getCount(); index++) {
|
||||
// Get the stream profile.
|
||||
auto profile = streamProfileList->getProfile(index);
|
||||
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_COLOR || sensorType == OB_SENSOR_DEPTH || sensorType == OB_SENSOR_IR_LEFT
|
||||
|| sensorType == OB_SENSOR_IR_RIGHT) {
|
||||
printStreamProfile(profile, index);
|
||||
}
|
||||
else if(sensorType == OB_SENSOR_ACCEL) {
|
||||
printAccelProfile(profile, index);
|
||||
}
|
||||
else if(sensorType == OB_SENSOR_GYRO) {
|
||||
printGyroProfile(profile, index);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
In the window, enter the relevant information of the device sensor you want to view according to the prompts.
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,170 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
|
||||
// get input option
|
||||
int getInputOption() {
|
||||
char inputOption = ob_smpl::waitForKeyPressed();
|
||||
if(inputOption == ESC_KEY) {
|
||||
return -1;
|
||||
}
|
||||
return inputOption - '0';
|
||||
}
|
||||
|
||||
// Print stream profile information.
|
||||
void printStreamProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
|
||||
// Get the video profile.
|
||||
auto videoProfile = profile->as<ob::VideoStreamProfile>();
|
||||
// Get the format.
|
||||
auto formatName = profile->getFormat();
|
||||
// Get the width.
|
||||
auto width = videoProfile->getWidth();
|
||||
// Get the height.
|
||||
auto height = videoProfile->getHeight();
|
||||
// Get the fps.
|
||||
auto fps = videoProfile->getFps();
|
||||
std::cout << index << "."
|
||||
<< "format: " << ob::TypeHelper::convertOBFormatTypeToString(formatName) << ", "
|
||||
<< "res: " << width << "*" << height << ", "
|
||||
<< "fps: " << fps << std::endl;
|
||||
}
|
||||
|
||||
// Print accel profile information.
|
||||
void printAccelProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
|
||||
// Get the profile of accel.
|
||||
auto accProfile = profile->as<ob::AccelStreamProfile>();
|
||||
// Get the rate of accel.
|
||||
auto accRate = accProfile->getSampleRate();
|
||||
std::cout << index << "."
|
||||
<< "acc rate: " << ob::TypeHelper::convertOBIMUSampleRateTypeToString(accRate) << std::endl;
|
||||
}
|
||||
|
||||
// Print gyro profile information.
|
||||
void printGyroProfile(std::shared_ptr<ob::StreamProfile> profile, uint32_t index) {
|
||||
// Get the profile of gyro.
|
||||
auto gyroProfile = profile->as<ob::GyroStreamProfile>();
|
||||
// Get the rate of gyro.
|
||||
auto gyroRate = gyroProfile->getSampleRate();
|
||||
std::cout << index << "."
|
||||
<< "gyro rate: " << ob::TypeHelper::convertOBIMUSampleRateTypeToString(gyroRate) << std::endl;
|
||||
}
|
||||
|
||||
// Enumerate stream profiles.
|
||||
void enumerateStreamProfiles(std::shared_ptr<ob::Sensor> sensor) {
|
||||
// Get the list of stream profiles.
|
||||
auto streamProfileList = sensor->getStreamProfileList();
|
||||
// Get the sensor type.
|
||||
auto sensorType = sensor->getType();
|
||||
for(uint32_t index = 0; index < streamProfileList->getCount(); index++) {
|
||||
// Get the stream profile.
|
||||
auto profile = streamProfileList->getProfile(index);
|
||||
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_COLOR || sensorType == OB_SENSOR_DEPTH || sensorType == OB_SENSOR_IR_LEFT
|
||||
|| sensorType == OB_SENSOR_IR_RIGHT || sensorType == OB_SENSOR_CONFIDENCE) {
|
||||
printStreamProfile(profile, index);
|
||||
}
|
||||
else if(sensorType == OB_SENSOR_ACCEL) {
|
||||
printAccelProfile(profile, index);
|
||||
}
|
||||
else if(sensorType == OB_SENSOR_GYRO) {
|
||||
printGyroProfile(profile, index);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enumerate sensors.
|
||||
void enumerateSensors(std::shared_ptr<ob::Device> device) {
|
||||
while(true) {
|
||||
std::cout << "Sensor list: " << std::endl;
|
||||
// Get the list of sensors.
|
||||
auto sensorList = device->getSensorList();
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Get the sensor type.
|
||||
auto sensorType = sensorList->getSensorType(index);
|
||||
std::cout << " - " << index << "."
|
||||
<< "sensor type: " << ob::TypeHelper::convertOBSensorTypeToString(sensorType) << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "Select a sensor to enumerate its streams(input sensor index or \'ESC\' to enumerate device): " << std::endl;
|
||||
|
||||
// Select a sensor.
|
||||
int sensorSelected = ob_smpl::getInputOption();
|
||||
if(sensorSelected >= static_cast<int>(sensorList->getCount()) || sensorSelected < 0) {
|
||||
if(sensorSelected == -1) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
std::cout << "\nInvalid input, please reselect the sensor!\n";
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Get sensor from sensorList.
|
||||
auto sensor = sensorList->getSensor(sensorSelected);
|
||||
enumerateStreamProfiles(sensor);
|
||||
}
|
||||
}
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a context.
|
||||
ob::Context context;
|
||||
|
||||
while(true) {
|
||||
// Query the list of connected devices.
|
||||
auto deviceList = context.queryDeviceList();
|
||||
if(deviceList->getCount() < 1) {
|
||||
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return -1;
|
||||
}
|
||||
|
||||
std::cout << "enumerated devices: " << std::endl;
|
||||
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
std::shared_ptr<ob::DeviceInfo> deviceInfo = nullptr;
|
||||
for(uint32_t index = 0; index < deviceList->getCount(); index++) {
|
||||
// Get device from deviceList.
|
||||
device = deviceList->getDevice(index);
|
||||
// Get device information from device
|
||||
deviceInfo = device->getDeviceInfo();
|
||||
std::cout << " " << index << "- device name: " << deviceInfo->getName() << ", device pid: 0x" << std::hex << std::setw(4) << std::setfill('0')
|
||||
<< deviceInfo->getPid() << std::dec << " ,device SN: " << deviceInfo->getSerialNumber() << ", connection type:" << deviceInfo->getConnectionType() << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "Select a device to enumerate its sensors (Input device index or \'ESC\' to exit program):" << std::endl;
|
||||
|
||||
// select a device.
|
||||
int deviceSelected = ob_smpl::getInputOption();
|
||||
if(deviceSelected >= static_cast<int>(deviceList->getCount()) || deviceSelected < 0) {
|
||||
if(deviceSelected == -1) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
std::cout << "\nInvalid input, please reselect the device!\n";
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Get the device.
|
||||
auto selectedDevice = deviceList->getDevice(deviceSelected);
|
||||
enumerateSensors(selectedDevice);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_quick_start)
|
||||
|
||||
add_executable(${PROJECT_NAME} quick_start.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,60 @@
|
||||
# C++ Sample: 0.basic.quick_start
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to quickly obtain the camera video stream and display it in the window.
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
|
||||
|
||||
Frameset is a combination of different types of Frames
|
||||
|
||||
win is used to display the frame data.
|
||||
|
||||
## Code overview
|
||||
|
||||
1. Instantiate the pipeline using the default configuration file and quickly open the video stream
|
||||
|
||||
```cpp
|
||||
// Create a pipeline.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Start the pipeline with default config.
|
||||
// Modify the default configuration by the configuration file: "OrbbecSDKConfig.xml"
|
||||
pipe.start();
|
||||
```
|
||||
|
||||
2. Create a window for showing the frames, and set the size of the window
|
||||
|
||||
```cpp
|
||||
// Create a window for showing the frames, and set the size of the window.
|
||||
ob_smpl::CVWindow win("QuickStart", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
|
||||
```
|
||||
|
||||
3. Open the window and display the video stream. The video stream waits for a frame of data in a blocking manner. The frame is a composite frame containing the frame data of all streams enabled in the configuration, and the waiting timeout of the frame is set
|
||||
|
||||
```cpp
|
||||
while(win.run()) {
|
||||
// Wait for frameSet from the pipeline, the default timeout is 1000ms.
|
||||
auto frameSet = pipe.waitForFrameset();
|
||||
|
||||
// Push the frames to the window for showing.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
```
|
||||
|
||||
4. Use pipeline to close the video stream
|
||||
|
||||
```cpp
|
||||
// Stop the Pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,39 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
int main(void) try {
|
||||
// Create a pipeline.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Start the pipeline with default config.
|
||||
// Modify the default configuration by the configuration file: "OrbbecSDKConfig.xml"
|
||||
pipe.start();
|
||||
|
||||
// Create a window for showing the frames, and set the size of the window.
|
||||
ob_smpl::CVWindow win("QuickStart", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
|
||||
|
||||
while(win.run()) {
|
||||
// Wait for frameSet from the pipeline, the default timeout is 1000ms.
|
||||
auto frameSet = pipe.waitForFrameset();
|
||||
|
||||
// Push the frames to the window for showing.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
|
||||
// Stop the Pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_callback)
|
||||
|
||||
add_executable(${PROJECT_NAME} callback.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
# C++ Sample: 1.stream.callback
|
||||
|
||||
## Overview
|
||||
|
||||
In this sample,user can get the depth、RGB、IR image.This sample also support users can perform user-defined operations such as data acquisition, data processing, and data modification within the callback function.
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
|
||||
|
||||
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
|
||||
|
||||
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Create the pipeline instance using the default configuration and create a config instance to enable or disable the streams.Get the device instance from pipeline,and then get the sensor instance from device.
|
||||
|
||||
```c++
|
||||
// Create a pipeline.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Get device from pipeline.
|
||||
auto device = pipe.getDevice();
|
||||
|
||||
// Get sensorList from device.
|
||||
auto sensorList = device->getSensorList();
|
||||
```
|
||||
|
||||
2. Get only the sensor for the VideoStream,enable the stream from these sensor.
|
||||
|
||||
```c++
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Query all supported infrared sensor type and enable the infrared stream.
|
||||
// For dual infrared device, enable the left and right infrared streams.
|
||||
// For single infrared device, enable the infrared stream.
|
||||
OBSensorType sensorType = sensorList->getSensorType(index);
|
||||
|
||||
// exclude non-video sensor type
|
||||
if(!ob::TypeHelper::isVideoSensorType(sensorType)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Enable the stream for the sensor type.
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
```
|
||||
|
||||
3. In this callback function, you can add what you want to do with the data.Avoid performing complex computational operations within callback functions; prolonged operations can lead to data frame drops. It is recommended to use a queue for processing.
|
||||
|
||||
```c++
|
||||
// Start the pipeline with callback.
|
||||
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> output) {
|
||||
std::lock_guard<std::mutex> lock(framesetMutex);
|
||||
frameset = output;
|
||||
});
|
||||
```
|
||||
|
||||
4. Render window
|
||||
|
||||
```c++
|
||||
while(win.run()) {
|
||||
std::lock_guard<std::mutex> lock(framesetMutex);
|
||||
|
||||
if(frameset == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Rendering display
|
||||
win.pushFramesToView(frameset);
|
||||
}
|
||||
```
|
||||
|
||||
5. stop pipeline
|
||||
|
||||
```c++
|
||||
// Stop the Pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
If you are on Windows, you can switch to the directory `OrbbecSDK-dev/build/win_XX/bin` to find the `ob_callback.exe`.
|
||||
|
||||
If you are on linux, you can switch to the directory `OrbbecSDK-dev/build/linux_XX/bin` to find the `ob_callback`.
|
||||
|
||||
### Key introduction
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,80 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a pipeline.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Get device from pipeline.
|
||||
auto device = pipe.getDevice();
|
||||
|
||||
// Get sensorList from device.
|
||||
auto sensorList = device->getSensorList();
|
||||
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Query all supported infrared sensor type and enable the infrared stream.
|
||||
// For dual infrared device, enable the left and right infrared streams.
|
||||
// For single infrared device, enable the infrared stream.
|
||||
OBSensorType sensorType = sensorList->getSensorType(index);
|
||||
|
||||
// exclude non-video sensor type
|
||||
if(!ob::TypeHelper::isVideoSensorType(sensorType)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
|
||||
if(sensorType == OB_SENSOR_COLOR) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Enable the stream for the sensor type.
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
|
||||
std::mutex framesetMutex;
|
||||
std::shared_ptr<ob::FrameSet> frameset = nullptr;
|
||||
|
||||
// Start the pipeline with callback.
|
||||
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> output) {
|
||||
std::lock_guard<std::mutex> lock(framesetMutex);
|
||||
frameset = output;
|
||||
});
|
||||
|
||||
// Create a window for rendering, and set the size of the window.
|
||||
ob_smpl::CVWindow win("Callback", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
|
||||
while(win.run()) {
|
||||
std::lock_guard<std::mutex> lock(framesetMutex);
|
||||
|
||||
if(frameset == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Rendering display
|
||||
win.pushFramesToView(frameset);
|
||||
}
|
||||
|
||||
// Stop the Pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_color)
|
||||
|
||||
add_executable(${PROJECT_NAME} color.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,47 @@
|
||||
# C++ Sample: 1.stream.color
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain the camera's color stream and display it in the window.
|
||||
|
||||
### Knowledge
|
||||
|
||||
config is the configuration of the camera
|
||||
Frameset is a combination of different types of Frames
|
||||
|
||||
## Code overview
|
||||
|
||||
1. Configure the output color stream and open the video stream.You must configure this before calling pipe.start().
|
||||
|
||||
```cpp
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable color video stream.
|
||||
config->enableVideoStream(OB_STREAM_COLOR);
|
||||
```
|
||||
|
||||
2. After waiting for a while, get the color stream in the frameset and display it in the window
|
||||
|
||||
```cpp
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset();
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// get color frame from frameset.
|
||||
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
|
||||
// Render colorFrame.
|
||||
win.pushFramesToView(colorFrame);
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,50 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable color video stream.
|
||||
config->enableVideoStream(OB_STREAM_COLOR);
|
||||
|
||||
// Start the pipeline with config.
|
||||
pipe.start(config);
|
||||
|
||||
// Create a window for rendering and set the resolution of the window.
|
||||
ob_smpl::CVWindow win("Color");
|
||||
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset();
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// get color frame from frameset.
|
||||
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
|
||||
// Render colorFrame.
|
||||
win.pushFramesToView(colorFrame);
|
||||
}
|
||||
|
||||
// Stop the Pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_confidence)
|
||||
|
||||
add_executable(${PROJECT_NAME} confidence.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,58 @@
|
||||
# C++ Sample: 1.stream.confidence
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain the depth and confidence stream of the camera and display them in the window
|
||||
|
||||
### Knowledge
|
||||
|
||||
Enabling the confidence stream requires the depth stream to be active, and its resolution and frame rate must match the depth stream's.
|
||||
|
||||
## Code overview
|
||||
|
||||
1. Configure the depth and confidence streams, then start the pipeline with this configuration. All stream configurations must be completed before calling pipe.start().
|
||||
|
||||
```cpp
|
||||
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable depth stream.
|
||||
config->enableVideoStream(OB_STREAM_DEPTH);
|
||||
|
||||
// Enable confidence stream. The resolution and fps of confidence must match depth stream.
|
||||
auto enabledProfiles = config->getEnabledStreamProfileList();
|
||||
if(enabledProfiles) {
|
||||
for(uint32_t i = 0; i < enabledProfiles->getCount(); i++) {
|
||||
auto profile = enabledProfiles->getProfile(i);
|
||||
if(profile && profile->getType() == OB_STREAM_DEPTH) {
|
||||
auto depthProfile = profile->as<ob::VideoStreamProfile>();
|
||||
if(depthProfile) {
|
||||
config->enableVideoStream(OB_STREAM_CONFIDENCE, depthProfile->getWidth(), depthProfile->getHeight(), depthProfile->getFps());
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. After waiting for a while, get the depth and confidence stream in the frameset and display them in the window
|
||||
|
||||
```cpp
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
// Render frame in the wisndow.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,73 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <thread>
|
||||
|
||||
#define IS_GEMINI_435LE(pid) (pid == 0x0815)
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// This example only supports Gemini 435Le device
|
||||
auto device = pipe.getDevice();
|
||||
if(!IS_GEMINI_435LE(device->getDeviceInfo()->getPid())) {
|
||||
std::cout << "This example only supports Gemini 435Le device." << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return 0;
|
||||
}
|
||||
|
||||
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable depth stream.
|
||||
config->enableVideoStream(OB_STREAM_DEPTH);
|
||||
|
||||
// Enable confidence stream. The resolution and fps of confidence must match depth stream.
|
||||
auto enabledProfiles = config->getEnabledStreamProfileList();
|
||||
if(enabledProfiles) {
|
||||
for(uint32_t i = 0; i < enabledProfiles->getCount(); i++) {
|
||||
auto profile = enabledProfiles->getProfile(i);
|
||||
if(profile && profile->getType() == OB_STREAM_DEPTH) {
|
||||
auto depthProfile = profile->as<ob::VideoStreamProfile>();
|
||||
if(depthProfile) {
|
||||
config->enableVideoStream(OB_STREAM_CONFIDENCE, depthProfile->getWidth(), depthProfile->getHeight(), depthProfile->getFps());
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Start the pipeline with config.
|
||||
pipe.start(config);
|
||||
|
||||
// Create a window for rendering, and set the resolution of the window.
|
||||
ob_smpl::CVWindow win("Confidence", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
// Render frame in the wisndow.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
|
||||
// Stop the pipeline
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_depth)
|
||||
|
||||
add_executable(${PROJECT_NAME} depth.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
# C++ Sample: 1.stream.depth
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain the depth stream of the camera and display it in the window
|
||||
|
||||
### Knowledge
|
||||
|
||||
DepthFrame can obtain relevant information about the depth
|
||||
|
||||
## code overview
|
||||
|
||||
1. Configure the output color stream and open the video stream.You must configure this before calling pipe.start().
|
||||
|
||||
```cpp
|
||||
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
//This is the default depth streamprofile that is enabled. If you want to modify it, you can do so in the configuration file.
|
||||
config->enableVideoStream(OB_STREAM_DEPTH);
|
||||
|
||||
```
|
||||
|
||||
2. Calculate the distance from the center pixel to the opposite side from the acquired Y16 depth stream format and display it in the window. The distance is refreshed every 30 frames.The default depth unit for the SDK is millimeters.
|
||||
|
||||
```cpp
|
||||
// Get the depth Frame form depthFrameRaw.
|
||||
auto depthFrame = depthFrameRaw->as<ob::DepthFrame>();
|
||||
// for Y16 format depth frame, print the distance of the center pixel every 30 frames.
|
||||
if(depthFrame->getIndex() % 30 == 0 && depthFrame->getFormat() == OB_FORMAT_Y16) {
|
||||
uint32_t width = depthFrame->getWidth();
|
||||
uint32_t height = depthFrame->getHeight();
|
||||
float scale = depthFrame->getValueScale();
|
||||
const uint16_t *data = reinterpret_cast<const uint16_t *>(depthFrame->getData());
|
||||
|
||||
// pixel value multiplied by scale is the actual distance value in millimeters.
|
||||
float centerDistance = data[width * height / 2 + width / 2] * scale;
|
||||
|
||||
// // attention: if the distance is 0, it means that the depth camera cannot detect the object (may be out of detection range).
|
||||
win.addLog("Facing an object at a distance of " + ob_smpl::toString(centerDistance, 3) + " mm. ");
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Moving the camera can obtain the change in the distance across the center pixel
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,71 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <thread>
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// By creating config to configure which streams to enable or disable for the pipeline, here the depth stream will be enabled.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// This is the default depth streamprofile that is enabled. If you want to modify it, you can do so in the configuration file.
|
||||
config->enableVideoStream(OB_STREAM_DEPTH);
|
||||
|
||||
// Start the pipeline with config.
|
||||
pipe.start(config);
|
||||
|
||||
// Create a window for rendering, and set the resolution of the window.
|
||||
ob_smpl::CVWindow win("Depth");
|
||||
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the depth frame raw from frameset.
|
||||
auto depthFrameRaw = frameSet->getFrame(OB_FRAME_DEPTH);
|
||||
if(!depthFrameRaw) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the depth Frame form depthFrameRaw.
|
||||
auto depthFrame = depthFrameRaw->as<ob::DepthFrame>();
|
||||
// for Y16 format depth frame, print the distance of the center pixel every 30 frames.
|
||||
if(depthFrame->getIndex() % 30 == 0 && depthFrame->getFormat() == OB_FORMAT_Y16) {
|
||||
uint32_t width = depthFrame->getWidth();
|
||||
uint32_t height = depthFrame->getHeight();
|
||||
float scale = depthFrame->getValueScale();
|
||||
const uint16_t *data = reinterpret_cast<const uint16_t *>(depthFrame->getData());
|
||||
|
||||
// pixel value multiplied by scale is the actual distance value in millimeters.
|
||||
float centerDistance = data[width * height / 2 + width / 2] * scale;
|
||||
|
||||
// // attention: if the distance is 0, it means that the depth camera cannot detect the object (may be out of detection range).
|
||||
win.addLog("Facing an object at a distance of " + ob_smpl::toString(centerDistance, 3) + " mm. ");
|
||||
}
|
||||
|
||||
// Render frame in the window.
|
||||
win.pushFramesToView(depthFrame);
|
||||
}
|
||||
|
||||
// Stop the pipeline
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_imu)
|
||||
|
||||
add_executable(${PROJECT_NAME} imu.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
# C++ Sample: 1.stream.imu
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain the camera's internal imu data and output it
|
||||
|
||||
### Knowledge
|
||||
|
||||
AccelFrame measures the acceleration of x, y, and z in m/s^2
|
||||
GyroFrame measures the angular velocity of x, y, and z in rad/s
|
||||
|
||||
Frameset is a combination of different types of Frames.imu data stream can be obtained through frameset
|
||||
|
||||
## code overview
|
||||
|
||||
1. Configure output imu related information and open stream.You must configure this before calling pipe.start().
|
||||
|
||||
```cpp
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable Accel stream.
|
||||
config->enableAccelStream();
|
||||
|
||||
// Enable Gyro stream.
|
||||
config->enableGyroStream();
|
||||
|
||||
// Only FrameSet that contains all types of data frames will be output.
|
||||
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
|
||||
|
||||
```
|
||||
|
||||
2. Instantiate pipeline, configure output imu related information and open stream
|
||||
|
||||
```cpp
|
||||
auto accelFrameRaw = frameSet->getFrame(OB_FRAME_ACCEL);
|
||||
auto accelFrame = accelFrameRaw->as<ob::AccelFrame>();
|
||||
auto accelIndex = accelFrame->getIndex();
|
||||
auto accelTimeStampUs = accelFrame->getTimeStampUs();
|
||||
auto accelTemperature = accelFrame->getTemperature();
|
||||
auto accelType = accelFrame->getType();
|
||||
if(accelIndex % 50 == 0) { // print information every 50 frames.
|
||||
auto accelValue = accelFrame->getValue();
|
||||
printImuValue(accelValue, accelIndex, accelTimeStampUs, accelTemperature, accelType, "m/s^2");
|
||||
}
|
||||
|
||||
auto gyroFrameRaw = frameSet->getFrame(OB_FRAME_GYRO);
|
||||
auto gyroFrame = gyroFrameRaw->as<ob::GyroFrame>();
|
||||
auto gyroIndex = gyroFrame->getIndex();
|
||||
auto gyroTimeStampUs = gyroFrame->getTimeStampUs();
|
||||
auto gyroTemperature = gyroFrame->getTemperature();
|
||||
auto gyroType = gyroFrame->getType();
|
||||
if(gyroIndex % 50 == 0) { // print information every 50 frames.
|
||||
auto gyroValue = gyroFrame->getValue();
|
||||
printImuValue(gyroValue, gyroIndex, gyroTimeStampUs, gyroTemperature, gyroType, "rad/s");
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,90 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_types.h"
|
||||
|
||||
#include <mutex>
|
||||
#include <iostream>
|
||||
|
||||
void printImuValue(OBFloat3D obFloat3d, uint64_t index, uint64_t timeStampUs, float temperature, OBFrameType type, const std::string &unitStr) {
|
||||
std::cout << "frame index: " <<index << std::endl;
|
||||
auto typeStr = ob::TypeHelper::convertOBFrameTypeToString(type);
|
||||
std::cout << typeStr << " Frame: \n\r{\n\r"
|
||||
<< " tsp = " << timeStampUs << "\n\r"
|
||||
<< " temperature = " << temperature << "\n\r"
|
||||
<< " " << typeStr << ".x = " << obFloat3d.x << unitStr << "\n\r"
|
||||
<< " " << typeStr << ".y = " << obFloat3d.y << unitStr << "\n\r"
|
||||
<< " " << typeStr << ".z = " << obFloat3d.z << unitStr << "\n\r"
|
||||
<< "}\n\r" << std::endl;
|
||||
}
|
||||
|
||||
int main() try {
|
||||
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enable Accel stream.
|
||||
config->enableAccelStream();
|
||||
|
||||
// Enable Gyro stream.
|
||||
config->enableGyroStream();
|
||||
|
||||
// Only FrameSet that contains all types of data frames will be output.
|
||||
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
|
||||
|
||||
// Start the pipeline with config.
|
||||
pipe.start(config);
|
||||
|
||||
uint64_t accelCount = 0;
|
||||
uint64_t gyroCount = 0;
|
||||
while(true) {
|
||||
auto key = ob_smpl::waitForKeyPressed(1);
|
||||
if(key == ESC_KEY) { // Esc key to exit.
|
||||
break;
|
||||
}
|
||||
auto frameSet = pipe.waitForFrameset();
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto accelFrameRaw = frameSet->getFrame(OB_FRAME_ACCEL);
|
||||
auto accelFrame = accelFrameRaw->as<ob::AccelFrame>();
|
||||
auto accelIndex = accelFrame->getIndex();
|
||||
auto accelTimeStampUs = accelFrame->getTimeStampUs();
|
||||
auto accelTemperature = accelFrame->getTemperature();
|
||||
auto accelType = accelFrame->getType();
|
||||
if(accelCount % 50 == 0) { // print information every 50 frames.
|
||||
auto accelValue = accelFrame->getValue();
|
||||
printImuValue(accelValue, accelIndex, accelTimeStampUs, accelTemperature, accelType, "m/s^2");
|
||||
}
|
||||
++accelCount;
|
||||
|
||||
auto gyroFrameRaw = frameSet->getFrame(OB_FRAME_GYRO);
|
||||
auto gyroFrame = gyroFrameRaw->as<ob::GyroFrame>();
|
||||
auto gyroIndex = gyroFrame->getIndex();
|
||||
auto gyroTimeStampUs = gyroFrame->getTimeStampUs();
|
||||
auto gyroTemperature = gyroFrame->getTemperature();
|
||||
auto gyroType = gyroFrame->getType();
|
||||
if(gyroCount % 50 == 0) { // print information every 50 frames.
|
||||
auto gyroValue = gyroFrame->getValue();
|
||||
printImuValue(gyroValue, gyroIndex, gyroTimeStampUs, gyroTemperature, gyroType, "rad/s");
|
||||
}
|
||||
++gyroCount;
|
||||
}
|
||||
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_infrared)
|
||||
|
||||
add_executable(${PROJECT_NAME} infrared.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
# C++ Sample: 1.stream.infrared
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to obtain the camera IR stream and display it in the window
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Configure IR sensor related information and enable the IR stream.You must configure this before calling pipe.start().
|
||||
|
||||
```cpp
|
||||
// Get the device from pipeline.
|
||||
std::shared_ptr<ob::Device> device = pipe.getDevice();
|
||||
|
||||
// Get the sensor list from device.
|
||||
std::shared_ptr<ob::SensorList> sensorList = device->getSensorList();
|
||||
|
||||
// Create a config for pipeline.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Query all supported infrared sensor type and enable the infrared stream.
|
||||
// For dual infrared device, enable the left and right infrared streams.
|
||||
// For single infrared device, enable the infrared stream.
|
||||
OBSensorType sensorType = sensorList->getSensorType(index);
|
||||
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT) {
|
||||
// Enable the stream with specified profile;
|
||||
config->enableVideoStream(sensorType, OB_WIDTH_ANY, OB_HEIGHT_ANY, 30, OB_FORMAT_ANY);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. Open the window and output the IR stream
|
||||
|
||||
```cpp
|
||||
ob_smpl::CVWindow win("Infrared", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Render a set of frame in the window.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,65 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
std::map<OBSensorType, ob_stream_type> sensorStreamMap = {
|
||||
{OB_SENSOR_IR, OB_STREAM_IR},
|
||||
{OB_SENSOR_IR_LEFT, OB_STREAM_IR_LEFT},
|
||||
{OB_SENSOR_IR_RIGHT, OB_STREAM_IR_RIGHT}
|
||||
};
|
||||
|
||||
int main() try {
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Get the device from pipeline.
|
||||
std::shared_ptr<ob::Device> device = pipe.getDevice();
|
||||
|
||||
// Get the sensor list from device.
|
||||
std::shared_ptr<ob::SensorList> sensorList = device->getSensorList();
|
||||
|
||||
// Create a config for pipeline.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Query all supported infrared sensor type and enable the infrared stream.
|
||||
// For dual infrared device, enable the left and right infrared streams.
|
||||
// For single infrared device, enable the infrared stream.
|
||||
OBSensorType sensorType = sensorList->getSensorType(index);
|
||||
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT) {
|
||||
// Enable the stream with specified profile;
|
||||
config->enableVideoStream(sensorType, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
|
||||
}
|
||||
}
|
||||
|
||||
pipe.start(config);
|
||||
|
||||
// Create a window for rendering and set the resolution of the window
|
||||
ob_smpl::CVWindow win("Infrared", 1280, 720, ob_smpl::ARRANGE_ONE_ROW);
|
||||
while(win.run()) {
|
||||
// Wait for up to 100ms for a frameset in blocking mode.
|
||||
auto frameSet = pipe.waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Render a set of frame in the window.
|
||||
win.pushFramesToView(frameSet);
|
||||
}
|
||||
|
||||
// Stop the pipeline, no frame data will be generated
|
||||
pipe.stop();
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_multi_streams)
|
||||
|
||||
add_executable(${PROJECT_NAME} multi_streams.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
# C++ Sample: 1.stream.multi_streams
|
||||
|
||||
## Overview
|
||||
|
||||
Use SDK to obtain multiple camera data streams and output them
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
|
||||
|
||||
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
|
||||
|
||||
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
|
||||
|
||||
Frameset is a combination of different types of Frames.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Configure the output video stream in addition to imu data, such as depth, color, etc.
|
||||
|
||||
```cpp
|
||||
// Get sensor list from device.
|
||||
auto sensorList = device->getSensorList();
|
||||
|
||||
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
|
||||
// Get sensor type.
|
||||
auto sensorType = sensorList->getSensorType(i);
|
||||
|
||||
// exclude gyro and accel sensors.
|
||||
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// enable the stream.
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
|
||||
// Start the pipeline with config
|
||||
std::mutex frameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderFrameSet;
|
||||
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
renderFrameSet = frameSet;
|
||||
});
|
||||
```
|
||||
|
||||
2. Instantiate the pipeline, configure IMU related information and start streaming
|
||||
|
||||
```cpp
|
||||
// The IMU frame rate is much faster than the video, so it is advisable to use a separate pipeline to obtain IMU data.
|
||||
auto dev = pipe.getDevice();
|
||||
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
|
||||
std::mutex imuFrameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderImuFrameSet;
|
||||
|
||||
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
|
||||
// enable gyro stream.
|
||||
imuConfig->enableGyroStream();
|
||||
// enable accel stream.
|
||||
imuConfig->enableAccelStream();
|
||||
// start the imu pipeline.
|
||||
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
|
||||
renderImuFrameSet = frameSet;
|
||||
});
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,121 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.h>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
|
||||
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// Create a pipeline with default device.
|
||||
ob::Pipeline pipe;
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
// Enumerate and config all sensors.
|
||||
auto device = pipe.getDevice();
|
||||
|
||||
// Get sensor list from device.
|
||||
auto sensorList = device->getSensorList();
|
||||
|
||||
bool supportIMU = false;
|
||||
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
|
||||
// Get sensor type.
|
||||
auto sensorType = sensorList->getSensorType(i);
|
||||
|
||||
// exclude gyro and accel sensors.
|
||||
if(sensorType == OB_SENSOR_GYRO || sensorType == OB_SENSOR_ACCEL) {
|
||||
supportIMU = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
|
||||
if(sensorType == OB_SENSOR_COLOR) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// enable the stream.
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
|
||||
// Start the pipeline with config
|
||||
std::mutex frameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderFrameSet;
|
||||
pipe.start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
renderFrameSet = frameSet;
|
||||
});
|
||||
|
||||
if(supportIMU) {
|
||||
// The IMU frame rate is much faster than the video, so it is advisable to use a separate pipeline to obtain IMU data.
|
||||
auto dev = pipe.getDevice();
|
||||
auto imuPipeline = std::make_shared<ob::Pipeline>(dev);
|
||||
std::mutex imuFrameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderImuFrameSet;
|
||||
|
||||
std::shared_ptr<ob::Config> imuConfig = std::make_shared<ob::Config>();
|
||||
// enable gyro stream.
|
||||
imuConfig->enableGyroStream();
|
||||
// enable accel stream.
|
||||
imuConfig->enableAccelStream();
|
||||
// start the imu pipeline.
|
||||
imuPipeline->start(imuConfig, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
|
||||
renderImuFrameSet = frameSet;
|
||||
});
|
||||
|
||||
// Create a window for rendering and set the resolution of the window
|
||||
ob_smpl::CVWindow win("MultiStream", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
while(win.run()) {
|
||||
std::lock_guard<std::mutex> lockImu(imuFrameMutex);
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
|
||||
if(renderFrameSet == nullptr || renderImuFrameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
// Render camera and imu frameset.
|
||||
win.pushFramesToView({ renderFrameSet, renderImuFrameSet });
|
||||
}
|
||||
|
||||
// Stop the Pipeline, no frame data will be generated.
|
||||
pipe.stop();
|
||||
|
||||
if(supportIMU) {
|
||||
// Stop the IMU Pipeline, no frame data will be generated.
|
||||
imuPipeline->stop();
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Create a window for rendering and set the resolution of the window
|
||||
ob_smpl::CVWindow win("MultiStream", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
while(win.run()) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
|
||||
if(renderFrameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
// Render camera and imu frameset.
|
||||
win.pushFramesToView(renderFrameSet);
|
||||
}
|
||||
|
||||
// Stop the Pipeline, no frame data will be generated.
|
||||
pipe.stop();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_control)
|
||||
|
||||
add_executable(${PROJECT_NAME} device_control.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,176 @@
|
||||
# C++ Sample:2.device.control
|
||||
|
||||
## Overview
|
||||
|
||||
The SDK can be used to modify camera-related parameters, including laser switch, laser level intensity, white balance switch, etc.
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
|
||||
|
||||
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
|
||||
|
||||
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Get camera related information and output
|
||||
|
||||
```cpp
|
||||
// select a device to operate
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
if(deviceList->getCount() > 0) {
|
||||
if(deviceList->getCount() <= 1) {
|
||||
// If a single device is plugged in, the first one is selected by default
|
||||
device = deviceList->getDevice(0);
|
||||
}
|
||||
else {
|
||||
device = selectDevice(deviceList);
|
||||
}
|
||||
auto deviceInfo = device->getDeviceInfo();
|
||||
std::cout << "\n------------------------------------------------------------------------\n";
|
||||
std::cout << "Current Device: "
|
||||
<< " name: " << deviceInfo->getName() << ", vid: 0x" << std::hex << deviceInfo->getVid() << ", pid: 0x" << std::setw(4)
|
||||
<< std::setfill('0') << deviceInfo->getPid() << ", uid: 0x" << deviceInfo->getUid() << std::dec << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Device Not Found" << std::endl;
|
||||
isSelectDevice = false;
|
||||
break;
|
||||
}
|
||||
```
|
||||
|
||||
2. Get the relevant parameters stored in the container and reorder them by id
|
||||
|
||||
```cpp
|
||||
// Get property list
|
||||
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device) {
|
||||
std::vector<OBPropertyItem> propertyVec;
|
||||
propertyVec.clear();
|
||||
uint32_t size = device->getSupportedPropertyCount();
|
||||
for(uint32_t i = 0; i < size; i++) {
|
||||
OBPropertyItem property_item = device->getSupportedProperty(i);
|
||||
if(isPrimaryTypeProperty(property_item) && property_item.permission != OB_PERMISSION_DENY) {
|
||||
propertyVec.push_back(property_item);
|
||||
}
|
||||
}
|
||||
return propertyVec;
|
||||
}
|
||||
```
|
||||
|
||||
```cpp
|
||||
std::vector<OBPropertyItem> propertyList = getPropertyList(device);
|
||||
std::sort(propertyList.begin(), propertyList.end(), [](const OBPropertyItem &a, const OBPropertyItem &b) { return a.id < b.id; });
|
||||
```
|
||||
|
||||
3. Use the get command to obtain camera-related property values
|
||||
|
||||
```cpp
|
||||
// get property value
|
||||
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem) {
|
||||
try {
|
||||
bool bool_ret = false;
|
||||
int int_ret = 0;
|
||||
float float_ret = 0.0f;
|
||||
|
||||
switch(propertyItem.type) {
|
||||
case OB_BOOL_PROPERTY:
|
||||
try {
|
||||
bool_ret = device->getBoolProperty(propertyItem.id);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get bool property failed." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get bool value:" << bool_ret << std::endl;
|
||||
break;
|
||||
case OB_INT_PROPERTY:
|
||||
try {
|
||||
int_ret = device->getIntProperty(propertyItem.id);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get int property failed." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get int value:" << int_ret << std::endl;
|
||||
break;
|
||||
case OB_FLOAT_PROPERTY:
|
||||
try {
|
||||
float_ret = device->getFloatProperty(propertyItem.id);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get float property failed." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get float value:" << float_ret << std::endl;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get property failed: " << propertyItem.name << std::endl;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Use the set command to set camera-related property values
|
||||
|
||||
```cpp
|
||||
// set properties
|
||||
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem, std::string strValue) {
|
||||
try {
|
||||
int int_value = 0;
|
||||
float float_value = 0.0f;
|
||||
int bool_value = 0;
|
||||
switch(propertyItem.type) {
|
||||
case OB_BOOL_PROPERTY:
|
||||
bool_value = std::atoi(strValue.c_str());
|
||||
try {
|
||||
device->setBoolProperty(propertyItem.id, bool_value);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "set bool property fail." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set bool value:" << bool_value << std::endl;
|
||||
break;
|
||||
case OB_INT_PROPERTY:
|
||||
int_value = std::atoi(strValue.c_str());
|
||||
try {
|
||||
device->setIntProperty(propertyItem.id, int_value);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "set int property fail." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set int value:" << int_value << std::endl;
|
||||
break;
|
||||
case OB_FLOAT_PROPERTY:
|
||||
float_value = static_cast<float>(std::atof(strValue.c_str())) ;
|
||||
try {
|
||||
device->setFloatProperty(propertyItem.id, float_value);
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "set float property fail." << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set float value:" << float_value << std::endl;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "set property failed: " << propertyItem.name << std::endl;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Select the camera you want to operate. If it is a single device, skip the selection.
|
||||
You can enter the command ? to get all the properties of the camera, including setting the maximum and minimum values, etc.
|
||||
You can enter set to set command to setto set parameters, for example 6 set 0 (note the space)
|
||||
You can enter the get command to set parameters, for example, 6 get (note the space)
|
||||
Press the Esc key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
|
||||

|
||||
@@ -0,0 +1,320 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <thread>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cstring>
|
||||
#include <iomanip>
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
||||
std::shared_ptr<ob::Device> selectDevice(std::shared_ptr<ob::DeviceList> deviceList);
|
||||
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device);
|
||||
bool isPrimaryTypeProperty(OBPropertyItem propertyItem);
|
||||
void printfPropertyList(std::shared_ptr<ob::Device> device, const std::vector<OBPropertyItem> &propertyList);
|
||||
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem item, std::string strValue);
|
||||
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem item);
|
||||
std::string permissionTypeToString(OBPermissionType permission);
|
||||
|
||||
int main(void) try {
|
||||
// Create a Context.
|
||||
ob::Context context;
|
||||
|
||||
// Query the list of connected devices
|
||||
auto deviceList = context.queryDeviceList();
|
||||
|
||||
bool isSelectDevice = true;
|
||||
while(isSelectDevice) {
|
||||
// select a device to operate
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
if(deviceList->getCount() > 0) {
|
||||
if(deviceList->getCount() <= 1) {
|
||||
// If a single device is plugged in, the first one is selected by default
|
||||
device = deviceList->getDevice(0);
|
||||
}
|
||||
else {
|
||||
device = selectDevice(deviceList);
|
||||
}
|
||||
auto deviceInfo = device->getDeviceInfo();
|
||||
std::cout << "\n------------------------------------------------------------------------\n";
|
||||
std::cout << "Current Device: "
|
||||
<< " name: " << deviceInfo->getName() << ", vid: 0x" << std::hex << deviceInfo->getVid() << ", pid: 0x" << std::setw(4)
|
||||
<< std::setfill('0') << deviceInfo->getPid() << ", uid: 0x" << deviceInfo->getUid() << std::dec << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Device Not Found" << std::endl;
|
||||
isSelectDevice = false;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "Input \"?\" to get all properties." << std::endl;
|
||||
|
||||
std::vector<OBPropertyItem> propertyList = getPropertyList(device);
|
||||
std::sort(propertyList.begin(), propertyList.end(), [](const OBPropertyItem &a, const OBPropertyItem &b) { return a.id < b.id; });
|
||||
|
||||
bool isSelectProperty = true;
|
||||
while(isSelectProperty) {
|
||||
std::string choice;
|
||||
std::getline(std::cin, choice);
|
||||
|
||||
if(choice != "?") {
|
||||
std::istringstream ss(choice);
|
||||
std::string tmp;
|
||||
std::vector<std::string> controlVec;
|
||||
while(ss >> tmp) {
|
||||
controlVec.push_back(tmp);
|
||||
}
|
||||
|
||||
if(controlVec.size() <= 0)
|
||||
continue;
|
||||
|
||||
// exit the program
|
||||
if(controlVec.at(0) == "exit") {
|
||||
isSelectProperty = false;
|
||||
isSelectDevice = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Check if it matches the input format
|
||||
if(controlVec.size() <= 1 || (controlVec.at(1) != "get" && controlVec.at(1) != "set") || controlVec.size() > 3
|
||||
|| (controlVec.at(1) == "set" && controlVec.size() < 3)) {
|
||||
std::cout << "Property control usage: [property index] [set] [property value] or [property index] [get]" << std::endl;
|
||||
continue;
|
||||
}
|
||||
size_t size = propertyList.size();
|
||||
size_t selectId = std::atoi(controlVec.at(0).c_str());
|
||||
if(selectId >= size) {
|
||||
std::cout << "Your selection is out of range, please reselect: " << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
bool isGetValue = controlVec.at(1) == "get" ? true : false;
|
||||
auto propertyItem = propertyList.at(selectId);
|
||||
|
||||
if(isGetValue) {
|
||||
// get property value
|
||||
getPropertyValue(device, propertyItem);
|
||||
}
|
||||
else {
|
||||
// set property value
|
||||
setPropertyValue(device, propertyItem, controlVec.at(2));
|
||||
}
|
||||
}
|
||||
else {
|
||||
printfPropertyList(device, propertyList);
|
||||
std::cout << "Please select property.(Property control usage: [property number] [set/get] [property value])" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Select a device, the name, pid, vid, uid of the device will be printed here, and the corresponding device object will be created after selection
|
||||
std::shared_ptr<ob::Device> selectDevice(std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
int devCount = deviceList->getCount();
|
||||
std::cout << "Device list: " << std::endl;
|
||||
for(int i = 0; i < devCount; i++) {
|
||||
std::cout << i << ". name: " << deviceList->getName(i) << ", vid: 0x" << std::hex << deviceList->getVid(i) << ", pid: 0x" << std::setw(4)
|
||||
<< std::setfill('0') << deviceList->getPid(i) << ", uid: 0x" << deviceList->getUid(i) << ", sn: " << deviceList->getSerialNumber(i)
|
||||
<< std::dec << std::endl;
|
||||
}
|
||||
std::cout << "Select a device: ";
|
||||
|
||||
int devIndex;
|
||||
std::cin >> devIndex;
|
||||
while(devIndex < 0 || devIndex >= devCount || std::cin.fail()) {
|
||||
std::cin.clear();
|
||||
std::cin.ignore();
|
||||
std::cout << "Your select is out of range, please reselect: " << std::endl;
|
||||
std::cin >> devIndex;
|
||||
}
|
||||
|
||||
return deviceList->getDevice(devIndex);
|
||||
}
|
||||
|
||||
// Print a list of supported properties
|
||||
void printfPropertyList(std::shared_ptr<ob::Device> device, const std::vector<OBPropertyItem> &propertyList) {
|
||||
std::cout << "size: " << propertyList.size() << std::endl;
|
||||
if(propertyList.empty()) {
|
||||
std::cout << "No supported property!" << std::endl;
|
||||
}
|
||||
std::cout << "\n------------------------------------------------------------------------\n";
|
||||
for(size_t i = 0; i < propertyList.size(); i++) {
|
||||
auto property_item = propertyList[i];
|
||||
std::string strRange = "";
|
||||
|
||||
OBIntPropertyRange int_range;
|
||||
OBFloatPropertyRange float_range;
|
||||
switch(property_item.type) {
|
||||
case OB_BOOL_PROPERTY:
|
||||
strRange = "Bool value(min:0, max:1, step:1)";
|
||||
break;
|
||||
case OB_INT_PROPERTY: {
|
||||
try {
|
||||
int_range = device->getIntPropertyRange(property_item.id);
|
||||
strRange = "Int value(min:" + std::to_string(int_range.min) + ", max:" + std::to_string(int_range.max)
|
||||
+ ", step:" + std::to_string(int_range.step) + ")";
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get int property range failed." << std::endl;
|
||||
}
|
||||
} break;
|
||||
case OB_FLOAT_PROPERTY:
|
||||
try {
|
||||
float_range = device->getFloatPropertyRange(property_item.id);
|
||||
strRange = "Float value(min:" + std::to_string(float_range.min) + ", max:" + std::to_string(float_range.max)
|
||||
+ ", step:" + std::to_string(float_range.step) + ")";
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get float property range failed." << std::endl;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout.setf(std::ios::right);
|
||||
std::cout.fill('0');
|
||||
std::cout.width(2);
|
||||
std::cout << i << ". ";
|
||||
std::cout << property_item.name << "(" << (int)property_item.id << ")";
|
||||
std::cout << ", permission=" << permissionTypeToString(property_item.permission) << ", range=" << strRange << std::endl;
|
||||
}
|
||||
std::cout << "------------------------------------------------------------------------\n";
|
||||
}
|
||||
|
||||
bool isPrimaryTypeProperty(OBPropertyItem propertyItem) {
|
||||
return propertyItem.type == OB_INT_PROPERTY || propertyItem.type == OB_FLOAT_PROPERTY || propertyItem.type == OB_BOOL_PROPERTY;
|
||||
}
|
||||
|
||||
// Get property list
|
||||
std::vector<OBPropertyItem> getPropertyList(std::shared_ptr<ob::Device> device) {
|
||||
std::vector<OBPropertyItem> propertyVec;
|
||||
propertyVec.clear();
|
||||
uint32_t size = device->getSupportedPropertyCount();
|
||||
for(uint32_t i = 0; i < size; i++) {
|
||||
OBPropertyItem property_item = device->getSupportedProperty(i);
|
||||
if(isPrimaryTypeProperty(property_item) && property_item.permission != OB_PERMISSION_DENY) {
|
||||
propertyVec.push_back(property_item);
|
||||
}
|
||||
}
|
||||
return propertyVec;
|
||||
}
|
||||
|
||||
// set properties
|
||||
void setPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem, std::string strValue) {
|
||||
try {
|
||||
int int_value = 0;
|
||||
float float_value = 0.0f;
|
||||
int bool_value = 0;
|
||||
switch(propertyItem.type) {
|
||||
case OB_BOOL_PROPERTY:
|
||||
bool_value = std::atoi(strValue.c_str());
|
||||
try {
|
||||
device->setBoolProperty(propertyItem.id, bool_value);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "set bool property fail: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set bool value:" << bool_value << std::endl;
|
||||
break;
|
||||
case OB_INT_PROPERTY:
|
||||
int_value = std::atoi(strValue.c_str());
|
||||
try {
|
||||
device->setIntProperty(propertyItem.id, int_value);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "set int property fail: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set int value:" << int_value << std::endl;
|
||||
break;
|
||||
case OB_FLOAT_PROPERTY:
|
||||
float_value = static_cast<float>(std::atof(strValue.c_str()));
|
||||
try {
|
||||
device->setFloatProperty(propertyItem.id, float_value);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "set floar property fail: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",set float value:" << float_value << std::endl;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "set property failed: " << propertyItem.name << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
// get property value
|
||||
void getPropertyValue(std::shared_ptr<ob::Device> device, OBPropertyItem propertyItem) {
|
||||
try {
|
||||
bool bool_ret = false;
|
||||
int int_ret = 0;
|
||||
float float_ret = 0.0f;
|
||||
|
||||
switch(propertyItem.type) {
|
||||
case OB_BOOL_PROPERTY:
|
||||
try {
|
||||
bool_ret = device->getBoolProperty(propertyItem.id);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "get bool property failed: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get bool value:" << bool_ret << std::endl;
|
||||
break;
|
||||
case OB_INT_PROPERTY:
|
||||
try {
|
||||
int_ret = device->getIntProperty(propertyItem.id);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "get int property failed: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get int value:" << int_ret << std::endl;
|
||||
break;
|
||||
case OB_FLOAT_PROPERTY:
|
||||
try {
|
||||
float_ret = device->getFloatProperty(propertyItem.id);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cout << "get float property failed: " << e.what() << std::endl;
|
||||
}
|
||||
std::cout << "property name:" << propertyItem.name << ",get float value:" << float_ret << std::endl;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "get property failed: " << propertyItem.name << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
std::string permissionTypeToString(OBPermissionType permission) {
|
||||
switch(permission) {
|
||||
case OB_PERMISSION_READ:
|
||||
return "R/_";
|
||||
case OB_PERMISSION_WRITE:
|
||||
return "_/W";
|
||||
case OB_PERMISSION_READ_WRITE:
|
||||
return "R/W";
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return "_/_";
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_firmware_update)
|
||||
|
||||
add_executable(${PROJECT_NAME} device_firmware_update.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,108 @@
|
||||
# C++ Sample:2.device.firmware_update
|
||||
|
||||
## Overview
|
||||
|
||||
This sample demonstrates how to use the SDK to update the firmware of a connected device. It includes functions to list connected devices, select a device, and update its firmware.
|
||||
|
||||
> Note: This sample are not suiltable for Femto Mega, Femto Mega i, and Femto Bolt devices.
|
||||
> For these devices, please refer to the this repo:[https://github.com/orbbec/OrbbecFirmware](https://github.com/orbbec/OrbbecFirmware)
|
||||
|
||||
### Knowledge
|
||||
|
||||
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
|
||||
|
||||
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Initialize the SDK Context: This is necessary to access the connected devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
```
|
||||
2. List Connected Devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
devices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
```
|
||||
3. Define a Callback Function for Firmware Update Progress.
|
||||
|
||||
You can define a callback function to get the progress of the firmware update. The callback function will be called every time the device updates its progress.
|
||||
|
||||
```c++
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
if(firstCall) {
|
||||
firstCall = !firstCall;
|
||||
}
|
||||
else {
|
||||
std::cout << "\033[3F"; // Move cursor up 3 lines
|
||||
}
|
||||
|
||||
std::cout << "\033[K"; // Clear the current line
|
||||
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Status : ";
|
||||
switch(state) {
|
||||
case STAT_VERIFY_SUCCESS:
|
||||
std::cout << "Image file verification success" << std::endl;
|
||||
break;
|
||||
case STAT_FILE_TRANSFER:
|
||||
std::cout << "File transfer in progress" << std::endl;
|
||||
break;
|
||||
case STAT_DONE:
|
||||
std::cout << "Update completed" << std::endl;
|
||||
break;
|
||||
case STAT_IN_PROGRESS:
|
||||
std::cout << "Upgrade in progress" << std::endl;
|
||||
break;
|
||||
case STAT_START:
|
||||
std::cout << "Starting the upgrade" << std::endl;
|
||||
break;
|
||||
case STAT_VERIFY_IMAGE:
|
||||
std::cout << "Verifying image file" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "Unknown status or error" << std::endl;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Message : " << message << std::endl << std::flush;
|
||||
}
|
||||
```
|
||||
|
||||
4. Update the Device Firmware.
|
||||
|
||||
After selecting a device, update its firmware by calling the updateFirmware function with the specified callback.
|
||||
|
||||
```c++
|
||||
devices[deviceIndex]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
|
||||
```
|
||||
|
||||
### Attention
|
||||
|
||||
1. After the firmware update completes, you need to restart the device manually to apply the new firmware. Alternatively, you can use the `reboot()` function to restart the device programmatically.
|
||||
|
||||
```c++
|
||||
device->reboot();
|
||||
```
|
||||
|
||||
2. Don't plug out the device during the firmware update process.
|
||||
|
||||
3. For linux users, it is recommended to use the `LibUVC` as the backend as the `V4L2` backend may cause some issues on some systems. Switch backend before create device like this:
|
||||
|
||||
```c++
|
||||
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Select the device for firmware update and input the path of the firmware file. The SDK will start updating the firmware, and the progress will be displayed on the console.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,202 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent);
|
||||
bool getFirmwarePath(std::string &firmwarePath);
|
||||
bool selectDevice(int &deviceIndex);
|
||||
void printDeviceList();
|
||||
|
||||
bool firstCall = true;
|
||||
std::vector<std::shared_ptr<ob::Device>> devices{};
|
||||
|
||||
int main() try {
|
||||
// Create a context to access the connected devices
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
#if defined(__linux__)
|
||||
// On Linux, it is recommended to use the libuvc backend for device access as v4l2 is not always reliable on some systems for firmware update.
|
||||
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
|
||||
#endif
|
||||
|
||||
// Get connected devices from the context
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
if(deviceList->getCount() == 0) {
|
||||
std::cout << "No device found. Please connect a device first!" << std::endl;
|
||||
std::cout << "Press any key to exit..." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return 0;
|
||||
}
|
||||
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
devices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
std::cout << "Devices found:" << std::endl;
|
||||
printDeviceList();
|
||||
|
||||
while(true) {
|
||||
firstCall = true;
|
||||
int deviceIndex = -1;
|
||||
|
||||
if(!selectDevice(deviceIndex)) {
|
||||
break;
|
||||
}
|
||||
|
||||
std::string firmwarePath;
|
||||
if(!getFirmwarePath(firmwarePath)) {
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "Upgrading device firmware, please wait...\n\n";
|
||||
try {
|
||||
// Set async to false to synchronously block and wait for the device firmware upgrade to complete.
|
||||
devices[deviceIndex]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
// If the update fails, will throw an exception.
|
||||
std::cerr << "\nThe upgrade was interrupted! An error occurred! " << std::endl;
|
||||
std::cerr << "Error message: " << e.what() << std::endl;
|
||||
std::cout << "Press any key to exit." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
break;
|
||||
}
|
||||
|
||||
std::string input;
|
||||
std::cout << "Enter 'Q' or 'q' to quit, or any other key to continue: ";
|
||||
std::getline(std::cin, input);
|
||||
if(input == "Q" || input == "q") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent)
|
||||
|
||||
{
|
||||
if(firstCall) {
|
||||
firstCall = !firstCall;
|
||||
}
|
||||
else {
|
||||
std::cout << "\033[3F"; // Move cursor up 3 lines
|
||||
}
|
||||
|
||||
std::cout << "\033[K"; // Clear the current line
|
||||
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Status : ";
|
||||
switch(state) {
|
||||
case STAT_VERIFY_SUCCESS:
|
||||
std::cout << "Image file verification success" << std::endl;
|
||||
break;
|
||||
case STAT_FILE_TRANSFER:
|
||||
std::cout << "File transfer in progress" << std::endl;
|
||||
break;
|
||||
case STAT_DONE:
|
||||
std::cout << "Update completed" << std::endl;
|
||||
break;
|
||||
case STAT_IN_PROGRESS:
|
||||
std::cout << "Upgrade in progress" << std::endl;
|
||||
break;
|
||||
case STAT_START:
|
||||
std::cout << "Starting the upgrade" << std::endl;
|
||||
break;
|
||||
case STAT_VERIFY_IMAGE:
|
||||
std::cout << "Verifying image file" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "Unknown status or error" << std::endl;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Message : " << message << std::endl << std::flush;
|
||||
}
|
||||
|
||||
bool getFirmwarePath(std::string &firmwarePath) {
|
||||
std::cout << "Please input the path of the firmware file (.bin) to be updated:" << std::endl;
|
||||
std::cout << "(Enter 'Q' or 'q' to quit): " << std::endl;
|
||||
std::cout << "Path: ";
|
||||
std::string input;
|
||||
std::getline(std::cin, input);
|
||||
|
||||
if(input == "Q" || input == "q") {
|
||||
exit(EXIT_SUCCESS);
|
||||
}
|
||||
|
||||
// Remove leading and trailing whitespaces
|
||||
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
|
||||
|
||||
// Remove leading and trailing quotes
|
||||
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
|
||||
input = input.substr(1, input.size() - 2);
|
||||
}
|
||||
|
||||
if(input.size() > 4 && (input.substr(input.size() - 4) == ".bin" || input.substr(input.size() - 4) == ".img")) {
|
||||
firmwarePath = input;
|
||||
std::cout << "Firmware file confirmed: " << firmwarePath << std::endl << std::endl;
|
||||
return true;
|
||||
}
|
||||
|
||||
std::cout << "Invalid file format. Please provide a .bin file." << std::endl << std::endl;
|
||||
return getFirmwarePath(firmwarePath);
|
||||
}
|
||||
|
||||
void printDeviceList() {
|
||||
std::cout << "--------------------------------------------------------------------------------\n";
|
||||
for(uint32_t i = 0; i < devices.size(); ++i) {
|
||||
std::cout << "[" << i << "] " << "Device: " << devices[i]->getDeviceInfo()->getName();
|
||||
std::cout << " | SN: " << devices[i]->getDeviceInfo()->getSerialNumber();
|
||||
std::cout << " | Firmware version: " << devices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
|
||||
}
|
||||
std::cout << "---------------------------------------------------------------------------------\n";
|
||||
}
|
||||
|
||||
bool selectDevice(int &deviceIndex) {
|
||||
std::string input;
|
||||
while(true) {
|
||||
std::cout << "Please select a device to update the firmware, enter 'l' to list devices, or enter 'q' to quit: " << std::endl;
|
||||
std::cout << "Device index: ";
|
||||
std::getline(std::cin, input);
|
||||
|
||||
if(input == "Q" || input == "q") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(input == "l" || input == "L") {
|
||||
printDeviceList();
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
deviceIndex = std::stoi(input);
|
||||
if(deviceIndex < 0 || deviceIndex >= static_cast<int>(devices.size())) {
|
||||
std::cout << "Invalid input, please enter a valid index number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
std::cout << std::endl;
|
||||
break;
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "Invalid input, please enter a valid index number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_forceip)
|
||||
|
||||
add_executable(${PROJECT_NAME} forceip.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,53 @@
|
||||
# C++ Sample: 2.device.forceip
|
||||
|
||||
## Overview
|
||||
This sample demonstrates how to use the SDK context class to query connected devices, configure the network IP of a selected device using the ForceIP command (as defined by the GigE Vision standard)
|
||||
|
||||
### Knowledge
|
||||
The Context class serves as the entry point to the SDK. It provides functionality to:
|
||||
1. Query connected device lists
|
||||
2. Modify network configurations for the selected device
|
||||
|
||||
## Code Overview
|
||||
|
||||
1. Query device list and select a device
|
||||
|
||||
```cpp
|
||||
// Create a Context object to interact with Orbbec devices
|
||||
ob::Context context;
|
||||
// Query the list of connected devices
|
||||
auto deviceList = context.queryDeviceList();
|
||||
// Select a device to operate
|
||||
uint32_t selectedIndex;
|
||||
auto res = selectDevice(deviceList, selectedIndex);
|
||||
```
|
||||
|
||||
2. Get new IP configuration from user input
|
||||
|
||||
```cpp
|
||||
OBNetIpConfig config = getIPConfig();
|
||||
```
|
||||
|
||||
3. Change the selected device IP configuration and print the result of the operation.
|
||||
|
||||
```cpp
|
||||
res = context.forceIp(deviceList->getUid(deviceNumber), config);
|
||||
if(res) {
|
||||
std::cout << "The new IP configuration has been successfully applied to the device." << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Failed to apply the new IP configuration." << std::endl;
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
Device list:
|
||||
Enter your choice:
|
||||
Please enter the network configuration information:
|
||||
Enter IP address:
|
||||
Enter Subnet Mask:
|
||||
Enter Gateway address:
|
||||
The new IP configuration has been successfully applied to the device.
|
||||
|
||||
### Result
|
||||

|
||||
@@ -0,0 +1,175 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <thread>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cstring>
|
||||
#include <iomanip>
|
||||
#include <sstream>
|
||||
#include <iostream>
|
||||
|
||||
static bool parseIpString(const std::string &Str, uint8_t *out) {
|
||||
if(Str.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
std::istringstream ss(Str);
|
||||
std::string token;
|
||||
int count = 0;
|
||||
while(std::getline(ss, token, '.')) {
|
||||
if(count > 4) {
|
||||
return false;
|
||||
}
|
||||
for(char c: token) {
|
||||
if(!isdigit(c)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
int val = std::stoi(token);
|
||||
if(val < 0 || val > 255) {
|
||||
return false;
|
||||
}
|
||||
out[count++] = static_cast<uint8_t>(val);
|
||||
}
|
||||
return count == 4;
|
||||
}
|
||||
catch(const std::exception &e) {
|
||||
// error
|
||||
(void)e;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool selectDevice(std::shared_ptr<ob::DeviceList> deviceList, uint32_t &selectedIndex) {
|
||||
selectedIndex = static_cast<uint32_t>(-1);
|
||||
|
||||
auto devCount = deviceList->getCount();
|
||||
if(devCount == 0) {
|
||||
std::cout << "No devices found." << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
std::vector<uint32_t> indexList;
|
||||
uint32_t count = 0;
|
||||
|
||||
std::cout << "Ethernet device list:" << std::endl;
|
||||
for(uint32_t i = 0; i < devCount; i++) {
|
||||
std::string DeviceConnectType = deviceList->getConnectionType(i);
|
||||
if(DeviceConnectType != "Ethernet") {
|
||||
continue;
|
||||
}
|
||||
std::cout << count << ". Name: " << deviceList->getName(i) << ", Serial Number: " << deviceList->getSerialNumber(i)
|
||||
<< ", MAC: " << deviceList->getUid(i) << std::dec << ", IP: " << deviceList->getIpAddress(i)
|
||||
<< ", Subnet Mask: " << deviceList->getSubnetMask(i) << ", Gateway: " << deviceList->getGateway(i) << std::endl;
|
||||
indexList.push_back(i);
|
||||
count++;
|
||||
}
|
||||
if(indexList.empty()) {
|
||||
std::cout << "No network devices found." << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t index;
|
||||
do {
|
||||
std::cout << "Enter your choice: ";
|
||||
std::cin >> index;
|
||||
if(std::cin.fail()) {
|
||||
std::cin.clear();
|
||||
std::cin.ignore();
|
||||
std::cout << "Invalid input, please enter a number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
if(index >= indexList.size()) {
|
||||
std::cout << "Invalid input, please enter a valid index number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
selectedIndex = indexList[index];
|
||||
return true;
|
||||
} while(true);
|
||||
return false;
|
||||
}
|
||||
|
||||
static OBNetIpConfig getIPConfig() {
|
||||
OBNetIpConfig cfg;
|
||||
std::string val;
|
||||
uint8_t address[4];
|
||||
uint8_t mask[4];
|
||||
uint8_t gateway[4];
|
||||
|
||||
std::cout << "Please enter the network configuration information:" << std::endl;
|
||||
std::cout << "Enter IP address:" << std::endl;
|
||||
while(std::cin >> val) {
|
||||
if(parseIpString(val, address)) {
|
||||
break;
|
||||
}
|
||||
std::cout << "Invalid format." << std::endl;
|
||||
std::cout << "Enter IP address:" << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "Enter Subnet Mask:" << std::endl;
|
||||
while(std::cin >> val) {
|
||||
if(parseIpString(val, mask)) {
|
||||
break;
|
||||
}
|
||||
std::cout << "Invalid format." << std::endl;
|
||||
std::cout << "Enter Subnet Mask:" << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "Enter Gateway address:" << std::endl;
|
||||
while(std::cin >> val) {
|
||||
if(parseIpString(val, gateway)) {
|
||||
break;
|
||||
}
|
||||
std::cout << "Invalid format." << std::endl;
|
||||
std::cout << "Enter Gateway address:" << std::endl;
|
||||
}
|
||||
|
||||
cfg.dhcp = 0;
|
||||
for(int i = 0; i < 4; ++i) {
|
||||
cfg.address[i] = address[i];
|
||||
cfg.gateway[i] = gateway[i];
|
||||
cfg.mask[i] = mask[i];
|
||||
}
|
||||
return cfg;
|
||||
}
|
||||
|
||||
int main(void) try {
|
||||
// Create a Context object to interact with Orbbec devices
|
||||
ob::Context context;
|
||||
// Query the list of connected devices
|
||||
auto deviceList = context.queryDeviceList();
|
||||
// Select a device to operate
|
||||
uint32_t selectedIndex;
|
||||
auto res = selectDevice(deviceList, selectedIndex);
|
||||
if(res) {
|
||||
// Get the new IP configuration from user input
|
||||
OBNetIpConfig config = getIPConfig();
|
||||
|
||||
// Change device IP configuration
|
||||
res = context.forceIp(deviceList->getUid(selectedIndex), config);
|
||||
if(res) {
|
||||
std::cout << "The new IP configuration has been successfully applied to the device." << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Failed to apply the new IP configuration." << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "Function: " << e.getFunction() << "\nArguments: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
|
||||
<< std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_hot_plugin)
|
||||
|
||||
add_executable(${PROJECT_NAME} hot_plugin.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
# C++ Sample:2.device.hot_plugin
|
||||
|
||||
## Overview
|
||||
|
||||
Use SDK to handle the settings of device unplug callback and process the acquired code stream after unplugging
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions.
|
||||
|
||||
Device is a class that can be used to get device information, parameters, and a list of contained sensors.
|
||||
|
||||
Sensor can be used to obtain different components of the camera and the stream of the component, for example, RGB, IR, Depth stream can be obtained through the RGB, IR, Depth sensor.
|
||||
|
||||
### Attention
|
||||
|
||||
*The GMSL devices (such as Gemini335Lg) do not support hot plugging.*
|
||||
|
||||
## code overview
|
||||
|
||||
1. Register device callback and execute relevant functions during device unplugging and unplugging
|
||||
|
||||
```cpp
|
||||
ctx.setDeviceChangedCallback( []( std::shared_ptr< ob::DeviceList > removedList, std::shared_ptr< ob::DeviceList > addedList ) {
|
||||
DeviceDisconnectCallback( removedList );
|
||||
DeviceConnectCallback( addedList );
|
||||
} );
|
||||
```
|
||||
|
||||
2. Trigger the callback function to print relevant information
|
||||
|
||||
```cpp
|
||||
void printDeviceList(const std::string &prompt, std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
auto count = deviceList->getCount();
|
||||
if(count == 0) {
|
||||
return;
|
||||
}
|
||||
std::cout << count << " device(s) " << prompt << ": " << std::endl;
|
||||
for(uint32_t i = 0; i < count; i++) {
|
||||
auto uid = deviceList->getUid(i);
|
||||
auto vid = deviceList->getVid(i);
|
||||
auto pid = deviceList->getPid(i);
|
||||
auto serialNumber = deviceList->getSerialNumber(i);
|
||||
auto connection = deviceList->getConnectionType(i);
|
||||
std::cout << " - uid: " << uid << ", vid: 0x" << std::hex << std::setfill('0') << std::setw(4) << vid << ", pid: 0x" << pid
|
||||
<< ", serial number: " << serialNumber << ", connection: " << connection << std::endl;
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
```
|
||||
|
||||
3. Restart your device
|
||||
|
||||
```cpp
|
||||
void rebootDevices(std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); i++) {
|
||||
// get device from device list
|
||||
auto device = deviceList->getDevice(i);
|
||||
|
||||
// reboot device
|
||||
device->reboot();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press R to reboot the device
|
||||
You can try to manually unplug and plug the device
|
||||
Press the Esc key in the window to exit the program
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,80 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <iomanip>
|
||||
#include <iostream>
|
||||
|
||||
void printDeviceList(const std::string &prompt, std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
auto count = deviceList->getCount();
|
||||
if(count == 0) {
|
||||
return;
|
||||
}
|
||||
std::cout << count << " device(s) " << prompt << ": " << std::endl;
|
||||
for(uint32_t i = 0; i < count; i++) {
|
||||
auto uid = deviceList->getUid(i);
|
||||
auto vid = deviceList->getVid(i);
|
||||
auto pid = deviceList->getPid(i);
|
||||
auto serialNumber = deviceList->getSerialNumber(i);
|
||||
auto connection = deviceList->getConnectionType(i);
|
||||
std::cout << " - uid: " << uid << ", vid: 0x" << std::hex << std::setfill('0') << std::setw(4) << vid << ", pid: 0x" << std::setw(4) << pid
|
||||
<< ", serial number: " << serialNumber << ", connection: " << connection << std::endl;
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
void rebootDevices(std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); i++) {
|
||||
// get device from device list
|
||||
auto device = deviceList->getDevice(i);
|
||||
|
||||
// reboot device
|
||||
device->reboot();
|
||||
}
|
||||
}
|
||||
|
||||
int main(void) try {
|
||||
// create context
|
||||
ob::Context ctx;
|
||||
|
||||
// register device callback
|
||||
ctx.setDeviceChangedCallback([](std::shared_ptr<ob::DeviceList> removedList, std::shared_ptr<ob::DeviceList> deviceList) {
|
||||
printDeviceList("added", deviceList);
|
||||
printDeviceList("removed", removedList);
|
||||
});
|
||||
|
||||
// query current device list
|
||||
auto currentList = ctx.queryDeviceList();
|
||||
printDeviceList("connected", currentList);
|
||||
|
||||
std::cout << "Press 'r' to reboot the connected devices to trigger the device disconnect and reconnect event, or manually unplug and plugin the device."
|
||||
<< std::endl;
|
||||
std::cout << "Press 'Esc' to exit." << std::endl << std::endl;
|
||||
|
||||
// main loop, wait for key press
|
||||
while(true) {
|
||||
auto key = ob_smpl::waitForKeyPressed(100);
|
||||
// Press the esc key to exit
|
||||
if(key == 27) {
|
||||
break;
|
||||
}
|
||||
else if(key == 'r' || key == 'R') {
|
||||
// update device list
|
||||
currentList = ctx.queryDeviceList();
|
||||
|
||||
std::cout << "Rebooting devices..." << std::endl;
|
||||
rebootDevices(currentList);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_multi_devices_firmware_update)
|
||||
|
||||
add_executable(${PROJECT_NAME} multi_devices_firmware_update.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,133 @@
|
||||
# C++ Sample:2.multi_devices_firmware_update
|
||||
|
||||
## Overview
|
||||
|
||||
If you want to upgrade multiple Orbbec cameras connected to your system, this sample might be helpful for you. For detailed information about firmware upgrades, please refer to the [2.device.firmware_update](../2.device.firmware_update/README.md).
|
||||
|
||||
> Note: This sample are not suiltable for Femto Mega, Femto Mega i, and Femto Bolt devices.
|
||||
> For these devices, please refer to the this repo:[https://github.com/orbbec/OrbbecFirmware](https://github.com/orbbec/OrbbecFirmware)
|
||||
|
||||
### Knowledge
|
||||
|
||||
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
|
||||
|
||||
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Initialize the SDK Context: This is necessary to access the connected devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
```
|
||||
2. List Connected Devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
devices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
```
|
||||
3. Update each device.
|
||||
|
||||
You don't need to worry about issues caused by using incorrect firmware during the upgrade process. The SDK performs internal verification of the firmware to ensure its compatibility and validity.
|
||||
|
||||
```c++
|
||||
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
|
||||
try {
|
||||
std::cout << "\nUpgrading device: " << i + 1 << "/" << totalDevices.size()
|
||||
<< " - " << totalDevices[i]->getDeviceInfo()->getName() << std::endl;
|
||||
|
||||
totalDevices[i]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
4. Retrieve Status from the Callback
|
||||
|
||||
```c++
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
if(firstCall) {
|
||||
firstCall = !firstCall;
|
||||
}
|
||||
else {
|
||||
std::cout << "\033[3F"; // Move cursor up 3 lines
|
||||
}
|
||||
|
||||
std::cout << "\033[K"; // Clear the current line
|
||||
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Status : ";
|
||||
switch(state) {
|
||||
case STAT_VERIFY_SUCCESS:
|
||||
std::cout << "Image file verification success" << std::endl;
|
||||
break;
|
||||
case STAT_FILE_TRANSFER:
|
||||
std::cout << "File transfer in progress" << std::endl;
|
||||
break;
|
||||
case STAT_DONE:
|
||||
std::cout << "Update completed" << std::endl;
|
||||
break;
|
||||
case STAT_IN_PROGRESS:
|
||||
std::cout << "Upgrade in progress" << std::endl;
|
||||
break;
|
||||
case STAT_START:
|
||||
std::cout << "Starting the upgrade" << std::endl;
|
||||
break;
|
||||
case STAT_VERIFY_IMAGE:
|
||||
std::cout << "Verifying image file" << std::endl;
|
||||
break;
|
||||
case ERR_MISMATCH:
|
||||
std::cout << "Mismatch between device and image file" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "Unknown status or error" << std::endl;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Message : " << message << std::endl << std::flush;
|
||||
|
||||
if(state == STAT_DONE) {
|
||||
finalSuccess = true;
|
||||
finalFailure = false;
|
||||
}
|
||||
else if(state == ERR_MISMATCH) {
|
||||
// If the device's firmware version does not match the image file, the callback status will be ERR_MISMATCH.
|
||||
finalMismatch = true;
|
||||
}
|
||||
else if(state < 0) {
|
||||
// While state < 0, it means an error occurred.
|
||||
finalFailure = true;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Attention
|
||||
|
||||
1. After the firmware update completes, you need to restart the device manually to apply the new firmware. Alternatively, you can use the `reboot()` function to restart the device programmatically.
|
||||
|
||||
```c++
|
||||
device->reboot();
|
||||
```
|
||||
|
||||
2. Don't plug out the device during the firmware update process.
|
||||
|
||||
3. For linux users, it is recommended to use the `LibUVC` as the backend as the `V4L2` backend may cause some issues on some systems. Switch backend before create device like this:
|
||||
|
||||
```c++
|
||||
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
By providing the firmware file path via the command line, the program will automatically upgrade the devices that match the firmware.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,224 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
|
||||
bool getFirmwarePathFromCommandLine(int argc, char **argv, std::string &firmwarePath);
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent);
|
||||
void printDeviceList();
|
||||
|
||||
bool firstCall = true;
|
||||
bool finalSuccess = false;
|
||||
bool finalMismatch = false;
|
||||
bool finalFailure = false;
|
||||
|
||||
std::vector<std::shared_ptr<ob::Device>> totalDevices{};
|
||||
std::vector<std::shared_ptr<ob::Device>> successDevices{};
|
||||
std::vector<std::shared_ptr<ob::Device>> misMatchDevices{};
|
||||
std::vector<std::shared_ptr<ob::Device>> failedDevices{};
|
||||
|
||||
int main(int argc, char *argv[]) try {
|
||||
std::string firmwarePath;
|
||||
if(!getFirmwarePathFromCommandLine(argc, argv, firmwarePath)) {
|
||||
std::cout << "Press any key to exit..." << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Create a context to access the devices
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
#if defined(__linux__)
|
||||
// On Linux, it is recommended to use the libuvc backend for device access as v4l2 is not always reliable on some systems for firmware update.
|
||||
context->setUvcBackendType(OB_UVC_BACKEND_TYPE_LIBUVC);
|
||||
#endif
|
||||
|
||||
// Query the device list
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
if(deviceList->getCount() == 0) {
|
||||
std::cout << "No device found. Please connect a device first!" << std::endl;
|
||||
std::cout << "Press any key to exit..." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Get all devices
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
totalDevices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
printDeviceList();
|
||||
|
||||
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
|
||||
firstCall = true;
|
||||
finalSuccess = false;
|
||||
finalMismatch = false;
|
||||
finalFailure = false;
|
||||
|
||||
try {
|
||||
std::cout << "\nUpgrading device: " << i + 1 << "/" << totalDevices.size()
|
||||
<< " - " << totalDevices[i]->getDeviceInfo()->getName() << std::endl;
|
||||
|
||||
// Upgrade each device with async set to false for synchronous calls.
|
||||
// You can set a callback function to retrieve the device's upgrade progress and related information in real time.
|
||||
totalDevices[i]->updateFirmware(firmwarePath.c_str(), firmwareUpdateCallback, false);
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
// Unexpected situations, such as device disconnection, will typically throw an exception.
|
||||
// Note that common issues like verification failures are usually reported through the callback status.
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
finalFailure = true;
|
||||
}
|
||||
|
||||
if(finalSuccess) {
|
||||
successDevices.push_back(totalDevices[i]);
|
||||
}
|
||||
else if(finalMismatch) {
|
||||
misMatchDevices.push_back(totalDevices[i]);
|
||||
}
|
||||
else if(finalFailure) {
|
||||
failedDevices.push_back(totalDevices[i]);
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "\nUpgrade Summary:\n";
|
||||
std::cout << "==================================================\n";
|
||||
|
||||
std::cout << "Success (" << successDevices.size() << "):\n";
|
||||
for(const auto &device: successDevices) {
|
||||
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "\nMismatch (" << misMatchDevices.size() << "):\n";
|
||||
for(const auto &device: misMatchDevices) {
|
||||
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
|
||||
}
|
||||
if (misMatchDevices.size() > 0) {
|
||||
std::cout << "Please check use the correct firmware version and retry the upgrade." << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "\nFailure (" << failedDevices.size() << "):\n";
|
||||
for(const auto &device: failedDevices) {
|
||||
std::cout << " - Name: " << device->getDeviceInfo()->getName() << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "\nUpgrade process completed. Try to reboot all successfully upgraded devices." << std::endl;
|
||||
for (auto &device : successDevices) {
|
||||
device->reboot();
|
||||
}
|
||||
|
||||
std::cout << "Press any key to exit..." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
void firmwareUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
if(firstCall) {
|
||||
firstCall = !firstCall;
|
||||
}
|
||||
else {
|
||||
std::cout << "\033[3F"; // Move cursor up 3 lines
|
||||
}
|
||||
|
||||
std::cout << "\033[K"; // Clear the current line
|
||||
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Status : ";
|
||||
switch(state) {
|
||||
case STAT_VERIFY_SUCCESS:
|
||||
std::cout << "Image file verification success" << std::endl;
|
||||
break;
|
||||
case STAT_FILE_TRANSFER:
|
||||
std::cout << "File transfer in progress" << std::endl;
|
||||
break;
|
||||
case STAT_DONE:
|
||||
std::cout << "Update completed" << std::endl;
|
||||
break;
|
||||
case STAT_IN_PROGRESS:
|
||||
std::cout << "Upgrade in progress" << std::endl;
|
||||
break;
|
||||
case STAT_START:
|
||||
std::cout << "Starting the upgrade" << std::endl;
|
||||
break;
|
||||
case STAT_VERIFY_IMAGE:
|
||||
std::cout << "Verifying image file" << std::endl;
|
||||
break;
|
||||
case ERR_MISMATCH:
|
||||
std::cout << "Mismatch between device and image file" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "Unknown status or error" << std::endl;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Message : " << message << std::endl << std::flush;
|
||||
|
||||
if(state == STAT_DONE) {
|
||||
finalSuccess = true;
|
||||
finalFailure = false;
|
||||
}
|
||||
else if(state == ERR_MISMATCH) {
|
||||
// If the device's firmware version does not match the image file, the callback status will be ERR_MISMATCH.
|
||||
finalMismatch = true;
|
||||
}
|
||||
else if(state < 0) {
|
||||
// While state < 0, it means an error occurred.
|
||||
finalFailure = true;
|
||||
}
|
||||
}
|
||||
|
||||
bool getFirmwarePathFromCommandLine(int argc, char **argv, std::string &firmwarePath) {
|
||||
if(argc != 2) {
|
||||
std::cerr << "Usage: " << argv[0] << " <firmware_file_path>" << std::endl;
|
||||
std::cerr << "Example: " << argv[0] << " /path/to/firmware.bin" << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
std::vector<std::string> validExtensions = { ".bin", ".img" };
|
||||
firmwarePath = argv[1];
|
||||
|
||||
if(firmwarePath.size() > 4) {
|
||||
std::string extension = firmwarePath.substr(firmwarePath.size() - 4);
|
||||
|
||||
auto result = std::find_if(validExtensions.begin(), validExtensions.end(),
|
||||
[extension](const std::string &validExtension) { return extension == validExtension; });
|
||||
if(result != validExtensions.end()) {
|
||||
std::cout << "Firmware file confirmed: " << firmwarePath << std::endl << std::endl;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "Invalid input file: Please provide a valid firmware file, supported formats: ";
|
||||
for(const auto &ext: validExtensions) {
|
||||
std::cout << ext << " ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
void printDeviceList() {
|
||||
std::cout << "Devices found:" << std::endl;
|
||||
std::cout << "--------------------------------------------------------------------------------\n";
|
||||
for(uint32_t i = 0; i < totalDevices.size(); ++i) {
|
||||
std::cout << "[" << i << "] " << "Device: " << totalDevices[i]->getDeviceInfo()->getName();
|
||||
std::cout << " | SN: " << totalDevices[i]->getDeviceInfo()->getSerialNumber();
|
||||
std::cout << " | Firmware version: " << totalDevices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
|
||||
}
|
||||
std::cout << "---------------------------------------------------------------------------------\n";
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_optional_depth_presets_update)
|
||||
|
||||
add_executable(${PROJECT_NAME} device.optional_depth_presets_update.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
@@ -0,0 +1,63 @@
|
||||
# C++ Sample:2.device.optional_depth_presets_update
|
||||
|
||||
## Overview
|
||||
|
||||
This sample demonstrates how to use the SDK to update the optional depth presets of a connected device. It includes functions to list connected devices, select a device, and update its depth presets.
|
||||
|
||||
> Note: This sample is only applicable to devices that support presets, such as G330 serials of devices
|
||||
|
||||
### Knowledge
|
||||
|
||||
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
|
||||
|
||||
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Initialize the SDK Context: This is necessary to access the connected devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
```
|
||||
2. List Connected Devices.
|
||||
|
||||
```c++
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
devices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
```
|
||||
3. Define a Callback Function for Firmware Update Progress.
|
||||
|
||||
You can define a callback function to get the progress of the firmware update. The callback function will be called every time the device updates its progress.
|
||||
|
||||
```c++
|
||||
void presetUpdateCallback(OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
// show update state and message here
|
||||
}
|
||||
```
|
||||
|
||||
4. Update the optional depth presets.
|
||||
|
||||
After selecting a device, update its presets by calling the updateOptionalDepthPresets function with the specified callback.
|
||||
|
||||
```c++
|
||||
device->updateOptionalDepthPresets(filePaths, count, presetUpdateCallback);
|
||||
```
|
||||
> Note: The api supports upgrading multiple presets at once. For G300 series devices, a maximum of 3 presets can be written at a time. The first preset written will be set as the default preset.
|
||||
|
||||
### Attention
|
||||
|
||||
1. After the optional depth presets update completes, you don't need to restart the device.
|
||||
|
||||
2. Don't plug out the device during the presets update process.
|
||||
|
||||
|
||||
|
||||
## Run Sample
|
||||
|
||||
Select the device for presets update and input the path of the presets file. The SDK will start updating the presets, and the progress will be displayed on the console.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,295 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <algorithm>
|
||||
#include <cctype>
|
||||
|
||||
static bool shouldContinue();
|
||||
static void presetUpdateCallback(bool firstCall, OBFwUpdateState state, const char *message, uint8_t percent);
|
||||
static bool getPresetPath(std::vector<std::string> &pathList);
|
||||
static bool selectDevice(std::shared_ptr<ob::Device> &device);
|
||||
static void printDeviceList();
|
||||
static bool isPresetSupported(std::shared_ptr<ob::Device> device);
|
||||
static void printPreset(std::shared_ptr<ob::Device> device);
|
||||
|
||||
std::vector<std::shared_ptr<ob::Device>> devices{};
|
||||
|
||||
int main() try {
|
||||
// Create a context to access the connected devices
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
// Get connected devices from the context
|
||||
std::shared_ptr<ob::DeviceList> deviceList = context->queryDeviceList();
|
||||
if(deviceList->getCount() == 0) {
|
||||
std::cout << "No device found. Please connect a device first!" << std::endl;
|
||||
std::cout << "Press any key to exit..." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
return 0;
|
||||
}
|
||||
|
||||
for(uint32_t i = 0; i < deviceList->getCount(); ++i) {
|
||||
devices.push_back(deviceList->getDevice(i));
|
||||
}
|
||||
std::cout << "Devices found:" << std::endl;
|
||||
printDeviceList();
|
||||
|
||||
while(true) {
|
||||
bool firstCall = true;
|
||||
OBFwUpdateState updateState = STAT_START;
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
|
||||
if(!selectDevice(device)) {
|
||||
break;
|
||||
}
|
||||
|
||||
printPreset(device);
|
||||
|
||||
std::vector<std::string> pathList;
|
||||
if(!getPresetPath(pathList)) {
|
||||
break;
|
||||
}
|
||||
|
||||
uint8_t index = 0;
|
||||
uint8_t count = static_cast<uint8_t>(pathList.size());
|
||||
char(*filePaths)[OB_PATH_MAX] = new char[count][OB_PATH_MAX];
|
||||
|
||||
// copy paths
|
||||
std::cout << "\nPreset file paths you input: " << std::endl;
|
||||
for(const auto &path: pathList) {
|
||||
strcpy(filePaths[index++], path.c_str());
|
||||
std::cout << "Path " << (uint32_t)index << ": " << path << std::endl;
|
||||
}
|
||||
std::cout << std::endl;
|
||||
|
||||
std::cout << "Start to update optional depth preset, please wait a moment...\n\n";
|
||||
try {
|
||||
device->updateOptionalDepthPresets(filePaths, count, [&updateState, &firstCall](OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
updateState = state;
|
||||
presetUpdateCallback(firstCall, state, message, percent);
|
||||
firstCall = false;
|
||||
});
|
||||
delete[] filePaths;
|
||||
filePaths = nullptr;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
// If the update fails, will throw an exception.
|
||||
std::cerr << "\nThe update was interrupted! An error occurred! " << std::endl;
|
||||
std::cerr << "Error message: " << e.what() << "\n" << std::endl;
|
||||
std::cout << "Press any key to exit." << std::endl;
|
||||
ob_smpl::waitForKeyPressed();
|
||||
delete[] filePaths;
|
||||
filePaths = nullptr;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << std::endl;
|
||||
if(updateState == STAT_DONE || updateState == STAT_DONE_WITH_DUPLICATES) {
|
||||
// success
|
||||
std::cout << "After updating the preset: " << std::endl;
|
||||
printPreset(device);
|
||||
}
|
||||
|
||||
if(!shouldContinue()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
static bool shouldContinue() {
|
||||
std::string input;
|
||||
std::cout << "Enter 'Q' or 'q' to quit, or any other key to continue: ";
|
||||
std::getline(std::cin, input);
|
||||
if(input == "Q" || input == "q") {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static void presetUpdateCallback(bool firstCall, OBFwUpdateState state, const char *message, uint8_t percent) {
|
||||
if(!firstCall) {
|
||||
std::cout << "\033[3F"; // Move cursor up 3 lines
|
||||
}
|
||||
|
||||
std::cout << "\033[K"; // Clear the current line
|
||||
std::cout << "Progress: " << static_cast<uint32_t>(percent) << "%" << std::endl;
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Status : ";
|
||||
switch(state) {
|
||||
case STAT_VERIFY_SUCCESS:
|
||||
std::cout << "Image file verification success" << std::endl;
|
||||
break;
|
||||
case STAT_FILE_TRANSFER:
|
||||
std::cout << "File transfer in progress" << std::endl;
|
||||
break;
|
||||
case STAT_DONE:
|
||||
std::cout << "Update completed" << std::endl;
|
||||
break;
|
||||
case STAT_DONE_WITH_DUPLICATES:
|
||||
std::cout << "Update completed, duplicated presets have been ignored" << std::endl;
|
||||
break;
|
||||
case STAT_IN_PROGRESS:
|
||||
std::cout << "Update in progress" << std::endl;
|
||||
break;
|
||||
case STAT_START:
|
||||
std::cout << "Starting the update" << std::endl;
|
||||
break;
|
||||
case STAT_VERIFY_IMAGE:
|
||||
std::cout << "Verifying image file" << std::endl;
|
||||
break;
|
||||
default:
|
||||
std::cout << "Unknown status or error" << std::endl;
|
||||
break;
|
||||
}
|
||||
|
||||
std::cout << "\033[K";
|
||||
std::cout << "Message : " << message << std::endl << std::flush;
|
||||
}
|
||||
|
||||
static bool getPresetPath(std::vector<std::string> &pathList) {
|
||||
std::cout << "Please input the file paths of the optional depth preset file (.bin):" << std::endl;
|
||||
std::cout << " - Press 'Enter' to finish this input" << std::endl;
|
||||
std::cout << " - Press 'Q' or 'q' to exit the program" << std::endl;
|
||||
|
||||
uint8_t count = 0;
|
||||
|
||||
pathList.clear();
|
||||
do {
|
||||
std::cout << "Enter Path: ";
|
||||
std::string input;
|
||||
std::getline(std::cin, input);
|
||||
|
||||
if(input == "Q" || input == "q") {
|
||||
return false;
|
||||
}
|
||||
if(input.empty()) {
|
||||
if(pathList.size() == 0) {
|
||||
std::cout << "You didn't input any file paths" << std::endl;
|
||||
if(!shouldContinue()) {
|
||||
return false;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Remove leading and trailing whitespaces
|
||||
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
|
||||
|
||||
// Remove leading and trailing quotes
|
||||
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
|
||||
input = input.substr(1, input.size() - 2);
|
||||
}
|
||||
|
||||
if(input.size() > 4 && input.substr(input.size() - 4) == ".bin") {
|
||||
pathList.push_back(input);
|
||||
++count;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
std::cout << "Invalid file format. Please provide a .bin file." << std::endl << std::endl;
|
||||
continue;
|
||||
}
|
||||
} while(count < 10);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool selectDevice(std::shared_ptr<ob::Device> &device) {
|
||||
std::string input;
|
||||
|
||||
device = nullptr;
|
||||
while(true) {
|
||||
std::cout << "Please select a device to update the optional depth preset, enter 'l' to list devices, or enter 'q' to quit: " << std::endl;
|
||||
std::cout << "Device index: ";
|
||||
std::getline(std::cin, input);
|
||||
|
||||
if(input == "Q" || input == "q") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(input == "l" || input == "L") {
|
||||
printDeviceList();
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
uint32_t index = std::stoi(input);
|
||||
if(index >= static_cast<uint32_t>(devices.size())) {
|
||||
std::cout << "Invalid input, please enter a valid index number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
device = devices[index];
|
||||
if(!isPresetSupported(device)) {
|
||||
std::cerr << "The device you selected does not support preset. Please select another one" << std::endl;
|
||||
continue;
|
||||
}
|
||||
std::cout << std::endl;
|
||||
break;
|
||||
}
|
||||
catch(...) {
|
||||
std::cout << "Invalid input, please enter a valid index number." << std::endl;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static void printDeviceList() {
|
||||
std::cout << "--------------------------------------------------------------------------------\n";
|
||||
for(uint32_t i = 0; i < devices.size(); ++i) {
|
||||
std::cout << "[" << i << "] " << "Device: " << devices[i]->getDeviceInfo()->getName();
|
||||
std::cout << " | SN: " << devices[i]->getDeviceInfo()->getSerialNumber();
|
||||
std::cout << " | Firmware version: " << devices[i]->getDeviceInfo()->getFirmwareVersion() << std::endl;
|
||||
}
|
||||
std::cout << "---------------------------------------------------------------------------------\n";
|
||||
}
|
||||
|
||||
static bool isPresetSupported(std::shared_ptr<ob::Device> device) {
|
||||
auto presetList = device->getAvailablePresetList();
|
||||
if(presetList && presetList->getCount() > 0) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static void printPreset(std::shared_ptr<ob::Device> device) {
|
||||
try {
|
||||
auto presetList = device->getAvailablePresetList();
|
||||
std::cout << "Preset count: " << presetList->getCount() << std::endl;
|
||||
for(uint32_t i = 0; i < presetList->getCount(); ++i) {
|
||||
std::cout << " - " << presetList->getName(i) << std::endl;
|
||||
}
|
||||
std::cout << "Current preset: " << device->getCurrentPresetName() << "\n" << std::endl;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
// If the update fails, will throw an exception.
|
||||
std::cerr << "\nThe device doesn't support preset! " << std::endl;
|
||||
std::cerr << "error: " << e.what() << "\n" << std::endl;
|
||||
return;
|
||||
}
|
||||
|
||||
std::string key = "PresetVer";
|
||||
if(device->isExtensionInfoExist(key)) {
|
||||
std::string value = device->getExtensionInfo(key);
|
||||
std::cout << "Preset version: " << value << "\n" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "PresetVer: n/a\n" << std::endl;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_playback)
|
||||
|
||||
add_executable(${PROJECT_NAME} device_playback.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
# C++ Sample:2.device.playback
|
||||
|
||||
## Overview
|
||||
|
||||
This example demonstrates how to use the SDK to read and visualize data from a ROS bag file (.bag) with Orbbec camera streams.
|
||||
|
||||
### Knowledge
|
||||
|
||||
**Pipeline**: Manages data streams with multi-channel configuration, frame synchronization, and aggregation capabilities.
|
||||
**PlaybackDevice**: Reads sensor data from a ROS bag file and feeds it into the processing pipeline.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Initialize Playback Device and Pipeline
|
||||
|
||||
Create a playback device from a ROS bag file and configure the processing pipeline:
|
||||
|
||||
```cpp
|
||||
// Create a playback device with a Rosbag file
|
||||
std::shared_ptr<ob::PlaybackDevice> playback = std::make_shared<ob::PlaybackDevice>(filePath);
|
||||
// Create a pipeline with the playback device
|
||||
std::shared_ptr<ob::Pipeline> pipe = std::make_shared<ob::Pipeline>(playback);
|
||||
```
|
||||
|
||||
2. Enable Recorded Streams
|
||||
Activate all sensor streams available in the bag file:
|
||||
|
||||
```cpp
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
auto sensorList = playback->getSensorList();
|
||||
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
|
||||
auto sensorType = sensorList->getSensorType(i);
|
||||
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
```
|
||||
|
||||
3. Start the Pipeline with the Config
|
||||
|
||||
```cpp
|
||||
pipe->start(config);
|
||||
```
|
||||
|
||||
4. Automatically restart playback when reaching file end:
|
||||
|
||||
```cpp
|
||||
playback->setPlaybackStatusChangeCallback([&](OBPlaybackStatus status) {
|
||||
if(status == OB_PLAYBACK_STOPPED && !exited) {
|
||||
pipe->stop();
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
||||
pipe->start(config);
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the 'Esc' key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.h>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
#include <atomic>
|
||||
|
||||
bool getRosbagPath(std::string &rosbagPath);
|
||||
|
||||
int main(void) try {
|
||||
std::atomic<bool> exited(false);
|
||||
std::string filePath;
|
||||
// Get valid .bag file path from user input
|
||||
getRosbagPath(filePath);
|
||||
|
||||
// Create a playback device with a Rosbag file
|
||||
std::shared_ptr<ob::PlaybackDevice> playback = std::make_shared<ob::PlaybackDevice>(filePath);
|
||||
// Create a pipeline with the playback device
|
||||
std::shared_ptr<ob::Pipeline> pipe = std::make_shared<ob::Pipeline>(playback);
|
||||
// Enable all recording streams from the playback device
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
std::cout << "duration: " << playback->getDuration() << std::endl;
|
||||
|
||||
std::mutex frameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderFrameSet;
|
||||
auto frameCallback = [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
renderFrameSet = frameSet;
|
||||
};
|
||||
|
||||
// Set playback status change callback, when the playback stops, start the pipeline again with the same config
|
||||
playback->setPlaybackStatusChangeCallback([&](OBPlaybackStatus status) {
|
||||
if(status == OB_PLAYBACK_STOPPED && !exited) {
|
||||
pipe->stop();
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
|
||||
std::cout << "Replay again" << std::endl;
|
||||
pipe->start(config, frameCallback);
|
||||
}
|
||||
});
|
||||
|
||||
auto sensorList = playback->getSensorList();
|
||||
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
|
||||
auto sensorType = sensorList->getSensorType(i);
|
||||
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ANY_SITUATION);
|
||||
|
||||
// Start the pipeline with the config
|
||||
pipe->start(config, frameCallback);
|
||||
|
||||
ob_smpl::CVWindow win("Playback", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
while(win.run() && !exited) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
if(renderFrameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
win.pushFramesToView(renderFrameSet);
|
||||
}
|
||||
exited = true;
|
||||
|
||||
pipe->stop();
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
bool getRosbagPath(std::string &rosbagPath) {
|
||||
while(true) {
|
||||
std::cout << "Please input the path of the Rosbag file (.bag) to playback: \n";
|
||||
std::cout << "Path: ";
|
||||
std::string input;
|
||||
std::getline(std::cin, input);
|
||||
|
||||
// Remove leading and trailing whitespaces
|
||||
input.erase(std::find_if(input.rbegin(), input.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), input.end());
|
||||
|
||||
// Remove leading and trailing quotes
|
||||
if(!input.empty() && input.front() == '\'' && input.back() == '\'') {
|
||||
input = input.substr(1, input.size() - 2);
|
||||
}
|
||||
|
||||
if(!input.empty() && input.front() == '\"' && input.back() == '\"') {
|
||||
input = input.substr(1, input.size() - 2);
|
||||
}
|
||||
|
||||
// Validate .bag extension
|
||||
if(input.size() > 4 && input.substr(input.size() - 4) == ".bag") {
|
||||
rosbagPath = input;
|
||||
std::cout << "Playback file confirmed: " << rosbagPath << "\n\n";
|
||||
return true;
|
||||
}
|
||||
|
||||
std::cout << "Invalid file format. Please provide a .bag file.\n\n";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_record_nogui)
|
||||
|
||||
add_executable(${PROJECT_NAME} device_record_nogui.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
# C++ Sample:2.device.record.nogui
|
||||
|
||||
## Overview
|
||||
|
||||
This example demonstrates how to use the SDK to record video/sensor stream data from an Orbbec camera and output a ROS bag file (.bag).
|
||||
It is a command-line (CLI) tool that records streams directly without rendering video frames.
|
||||
|
||||
### Knowledge
|
||||
|
||||
- **Pipeline**: Manages data streams with capabilities for multi-channel configuration, stream switching, frame aggregation, and synchronization.
|
||||
- **RecordDevice**: Handles data recording to a ROS bag file, supporting simultaneous capture from multiple sensors and streams.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Create a Context object and get the specified device.
|
||||
|
||||
```cpp
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
auto device = deviceList->getDevice(0);
|
||||
```
|
||||
|
||||
2. Instantiate a RecordDevice to capture all streams from the connected device into a ROS bag file:
|
||||
|
||||
```cpp
|
||||
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
|
||||
```
|
||||
|
||||
3. Configure and start the pipeline with a frame callback for real-time preview:
|
||||
|
||||
```cpp
|
||||
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
// Do something for frameset
|
||||
});
|
||||
```
|
||||
4. Destroy the RecordDevice to flush and save the ROS bag file:
|
||||
|
||||
```cpp
|
||||
recordDevice = nullptr;
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the 'Esc' key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
|
||||
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
|
||||
|
||||
int main(void) try {
|
||||
std::cout << "Please enter the output filename (with .bag extension) and press Enter to start recording: ";
|
||||
std::string filePath;
|
||||
std::getline(std::cin, filePath);
|
||||
|
||||
// Create a context, for getting devices and sensors
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
// Query device list
|
||||
auto deviceList = context->queryDeviceList();
|
||||
if(deviceList->getCount() < 1) {
|
||||
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Acquire first available device
|
||||
auto device = deviceList->getDevice(0);
|
||||
|
||||
// Create a pipeline the specified device
|
||||
auto pipe = std::make_shared<ob::Pipeline>(device);
|
||||
|
||||
// Activate device clock synchronization
|
||||
try {
|
||||
device->timerSyncWithHost();
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// Create a config and enable all streams
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
auto sensorList = device->getSensorList();
|
||||
auto count = sensorList->getCount();
|
||||
for(uint32_t i = 0; i < count; i++) {
|
||||
auto sensor = sensorList->getSensor(i);
|
||||
auto sensorType = sensor->getType();
|
||||
auto profileList = sensor->getStreamProfileList(); // Get profileList to create Sensor object in advance
|
||||
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
|
||||
if(sensorType == OB_SENSOR_IR) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
|
||||
std::mutex frameMutex;
|
||||
std::map<OBFrameType, uint64_t> frameCountMap;
|
||||
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
if(frameSet == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
auto count = frameSet->getCount();
|
||||
for(uint32_t i = 0; i < count; i++) {
|
||||
auto frame = frameSet->getFrameByIndex(i);
|
||||
if(frame) {
|
||||
auto type = frame->getType();
|
||||
frameCountMap[type]++;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize recording device with output file
|
||||
auto startTime = ob_smpl::getNowTimesMs();
|
||||
uint32_t waitTime = 1000;
|
||||
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
|
||||
|
||||
// operation prompt
|
||||
std::cout << "Streams and recorder have started!" << std::endl;
|
||||
std::cout << "Press ESC, 'q', or 'Q' to stop recording and exit safely." << std::endl;
|
||||
std::cout << "IMPORTANT: Always use ESC/q/Q to stop! Otherwise, the bag file will be corrupted and unplayable." << std::endl << std::endl;
|
||||
|
||||
do {
|
||||
auto key = ob_smpl::waitForKeyPressed(waitTime);
|
||||
if(key == ESC_KEY || key == 'q' || key == 'Q') {
|
||||
break;
|
||||
}
|
||||
auto currentTime = ob_smpl::getNowTimesMs();
|
||||
if(currentTime > startTime + waitTime) {
|
||||
std::map<OBFrameType, uint64_t> tempCountMap;
|
||||
uint64_t duration;
|
||||
{
|
||||
// Copy data
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
|
||||
// get time again
|
||||
currentTime = ob_smpl::getNowTimesMs();
|
||||
duration = currentTime - startTime;
|
||||
if(!frameCountMap.empty()) {
|
||||
startTime = currentTime;
|
||||
waitTime = 2000; // Change to 2s for next time
|
||||
tempCountMap = frameCountMap;
|
||||
for(auto &item: frameCountMap) {
|
||||
item.second = 0; // reset count
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string seperate = "";
|
||||
if(tempCountMap.empty()) {
|
||||
std::cout << "Recording... Current FPS: 0" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Recording... Current FPS: ";
|
||||
for(const auto &item: tempCountMap) {
|
||||
auto name = ob::TypeHelper::convertOBFrameTypeToString(item.first);
|
||||
float rate = item.second / (duration / 1000.0f);
|
||||
|
||||
std::cout << std::fixed << std::setprecision(2) << std::showpoint;
|
||||
std::cout << seperate << name << "=" << rate;
|
||||
seperate = ", ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
}
|
||||
} while(true);
|
||||
|
||||
// stop the pipeline
|
||||
pipe->stop();
|
||||
|
||||
// Flush and save recording file
|
||||
recordDevice = nullptr;
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
|
||||
<< std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_device_record)
|
||||
|
||||
add_executable(${PROJECT_NAME} device_record.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
# C++ Sample:2.device.record
|
||||
|
||||
## Overview
|
||||
|
||||
This example demonstrates how to use the SDK to record video/sensor stream data from an Orbbec camera and output a ROS bag file (.bag).
|
||||
|
||||
### Knowledge
|
||||
|
||||
- **Pipeline**: Manages data streams with capabilities for multi-channel configuration, stream switching, frame aggregation, and synchronization.
|
||||
- **RecordDevice**: Handles data recording to a ROS bag file, supporting simultaneous capture from multiple sensors and streams.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Create a Context object and get the specified device.
|
||||
|
||||
```cpp
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
auto device = deviceList->getDevice(0);
|
||||
```
|
||||
|
||||
2. Instantiate a RecordDevice to capture all streams from the connected device into a ROS bag file:
|
||||
|
||||
```cpp
|
||||
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
|
||||
```
|
||||
|
||||
3. Configure and start the pipeline with a frame callback for real-time preview:
|
||||
|
||||
```cpp
|
||||
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
renderFrameSet = frameSet;
|
||||
});
|
||||
```
|
||||
4. Destroy the RecordDevice to flush and save the ROS bag file:
|
||||
|
||||
```cpp
|
||||
recordDevice = nullptr;
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the 'Esc' key in the window to exit the program.
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.h>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
#include <atomic>
|
||||
|
||||
#define IS_ASTRA_MINI_DEVICE(pid) (pid == 0x069d || pid == 0x065b || pid == 0x065e)
|
||||
|
||||
std::atomic<bool> isPaused{false};
|
||||
|
||||
void handleKeyPress(ob_smpl::CVWindow &win, std::shared_ptr<ob::RecordDevice> recorder, int key);
|
||||
|
||||
int main(void) try {
|
||||
std::cout << "Please enter the output filename (with .bag extension) and press Enter to start recording: ";
|
||||
std::string filePath;
|
||||
std::getline(std::cin, filePath);
|
||||
|
||||
// Create a context, for getting devices and sensors
|
||||
std::shared_ptr<ob::Context> context = std::make_shared<ob::Context>();
|
||||
|
||||
// Query device list
|
||||
auto deviceList = context->queryDeviceList();
|
||||
if(deviceList->getCount() < 1) {
|
||||
std::cout << "No device found! Please connect a supported device and retry this program." << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Acquire first available device
|
||||
auto device = deviceList->getDevice(0);
|
||||
|
||||
// Create a pipeline the specified device
|
||||
auto pipe = std::make_shared<ob::Pipeline>(device);
|
||||
|
||||
// Activate device clock synchronization
|
||||
try {
|
||||
device->timerSyncWithHost();
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// Create a config and enable all streams
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
auto sensorList = device->getSensorList();
|
||||
for(uint32_t i = 0; i < sensorList->getCount(); i++) {
|
||||
auto sensorType = sensorList->getSensorType(i);
|
||||
if(IS_ASTRA_MINI_DEVICE(device->getDeviceInfo()->getPid())) {
|
||||
if(sensorType == OB_SENSOR_IR) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
config->enableStream(sensorType);
|
||||
}
|
||||
|
||||
std::mutex frameMutex;
|
||||
std::shared_ptr<const ob::FrameSet> renderFrameSet;
|
||||
pipe->start(config, [&](std::shared_ptr<ob::FrameSet> frameSet) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
renderFrameSet = frameSet;
|
||||
});
|
||||
|
||||
// Initialize recording device with output file
|
||||
auto recordDevice = std::make_shared<ob::RecordDevice>(device, filePath);
|
||||
std::cout << "Streams and recorder have started!" << std::endl;
|
||||
std::cout << "Press ESC to stop recording and exit safely." << std::endl;
|
||||
std::cout << "IMPORTANT: Always use ESC to stop! Otherwise, the bag file will be corrupted and unplayable." << std::endl << std::endl;
|
||||
|
||||
ob_smpl::CVWindow win("Record", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
|
||||
win.setKeyPrompt("Press 'S' to pause/resume recording.");
|
||||
// set the callback function for the window to handle key press events
|
||||
win.setKeyPressedCallback([&win, recordDevice](int key) { handleKeyPress(win, recordDevice, key); });
|
||||
|
||||
while(win.run()) {
|
||||
std::lock_guard<std::mutex> lock(frameMutex);
|
||||
if(renderFrameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
win.pushFramesToView(renderFrameSet);
|
||||
}
|
||||
|
||||
pipe->stop();
|
||||
|
||||
// Flush and save recording file
|
||||
recordDevice = nullptr;
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "Function: " << e.getFunction() << "\nArgs: " << e.getArgs() << "\nMessage: " << e.what() << "\nException Type: " << e.getExceptionType()
|
||||
<< std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
void handleKeyPress(ob_smpl::CVWindow& win, std::shared_ptr<ob::RecordDevice> recorder, int key) {
|
||||
if(key == 'S' || key == 's') {
|
||||
if(!isPaused) {
|
||||
recorder->pause();
|
||||
isPaused.store(true);
|
||||
win.addLog("[PAUSED] Recording paused");
|
||||
}
|
||||
else {
|
||||
recorder->resume();
|
||||
isPaused.store(false);
|
||||
win.addLog("[RESUMED] Recording resumed");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_common_usages)
|
||||
|
||||
add_executable(${PROJECT_NAME} common_usages.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
# C++ Sample:3.advanced.common_usages
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to view camera related information, set related parameters, and display the video stream
|
||||
|
||||
### Knowledge
|
||||
|
||||
Context is the environment context, the first object created during initialization, which can be used to perform some settings, including but not limited to device status change callbacks, log level settings, etc. Context can access multiple Devices.
|
||||
|
||||
Device is the device object, which can be used to obtain the device information, such as the model, serial number, and various sensors.One actual hardware device corresponds to one Device object.
|
||||
|
||||
## code overview
|
||||
|
||||
1. Register device callback
|
||||
|
||||
```cpp
|
||||
// Create ob:Context.
|
||||
ctx = std::make_shared<ob::Context>();
|
||||
ctx.setDeviceChangedCallback( []( std::shared_ptr< ob::DeviceList > removedList, std::shared_ptr< ob::DeviceList > addedList ) {
|
||||
DeviceDisconnectCallback( removedList );
|
||||
DeviceConnectCallback( addedList );
|
||||
} );
|
||||
```
|
||||
|
||||
2. Get the device list and print out the information, then use pipeline to start the video stream.
|
||||
|
||||
```cpp
|
||||
// Query the list of connected devices.
|
||||
std::shared_ptr<ob::DeviceList> devices = ctx->queryDeviceList();
|
||||
|
||||
// Handle connected devices(and open one device)
|
||||
handleDeviceConnected(devices);
|
||||
```
|
||||
|
||||
3. Block thread waiting for device connection
|
||||
|
||||
```cpp
|
||||
while(!device) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
}
|
||||
```
|
||||
|
||||
4. Execute corresponding settings according to the commands entered by the user. The following is an introduction to some setting functions
|
||||
|
||||
```cpp
|
||||
//Get the basic parameters of the camera, including connection type, device model, etc.
|
||||
void getDeviceInformation()
|
||||
//Get camera sensor intrinsics, distortion and pixels
|
||||
void getCameraParams()
|
||||
//Laser switch function
|
||||
void switchLaser()
|
||||
//Laser safety protection and ranging function switch
|
||||
void switchLDP()
|
||||
//Get the laser safety protection and ranging function status
|
||||
void getLDPStatus()
|
||||
//Color auto-exposure switch
|
||||
void switchColorAE()
|
||||
//Color exposure value adjustment
|
||||
void setColorExposureValue(bool increase)
|
||||
//Color gain value adjustment
|
||||
void setColorGainValue(bool increase)
|
||||
//Depth auto-exposure switch
|
||||
void setDepthExposureValue(bool increase)
|
||||
//Depth exposure value adjustment
|
||||
void setDepthGainValue(bool increase)
|
||||
‵‵‵
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the button according to the interface prompts
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
|
||||

|
||||
@@ -0,0 +1,921 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
|
||||
#include "utils.hpp"
|
||||
#include "utils_opencv.hpp"
|
||||
|
||||
#include <mutex>
|
||||
#include <string>
|
||||
#include <iomanip>
|
||||
|
||||
const std::map<std::string, int> gemini_330_list = { { "Gemini 335", 0x0800 }, { "Gemini 335L", 0x0804 }, { "Gemini 336", 0x0803 }, { "Gemini 336L", 0x0807 },
|
||||
{ "Gemini 330", 0x0801 }, { "Gemini 330L", 0x0805 }, { "DabaiA", 0x0A12 }, { "DabaiAL", 0x0A13 },
|
||||
{ "Gemini 345", 0x0812 }, { "Gemini 345Lg", 0x0813 }, { "CAM-5330", 0x0816 }, { "CAM-5530", 0x0817 },{"Gemini 338",0x0818} };
|
||||
|
||||
const std::map<OBSensorType, std::string> sensorTypeToStringMap = { { OB_SENSOR_COLOR, "Color profile: " },
|
||||
{ OB_SENSOR_DEPTH, "Depth profile: " },
|
||||
{ OB_SENSOR_IR, "IR profile: " },
|
||||
{ OB_SENSOR_IR_LEFT, "Left IR profile: " },
|
||||
{ OB_SENSOR_IR_RIGHT, "Right IR profile: " } };
|
||||
|
||||
bool isGemini330Series(int pid) {
|
||||
bool find = false;
|
||||
for(auto it = gemini_330_list.begin(); it != gemini_330_list.end(); ++it) {
|
||||
if(it->second == pid) {
|
||||
find = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return find;
|
||||
}
|
||||
|
||||
std::shared_ptr<ob_smpl::CVWindow> win = nullptr;
|
||||
std::shared_ptr<ob::Context> ctx = nullptr;
|
||||
|
||||
std::shared_ptr<ob::Device> device = nullptr;
|
||||
std::shared_ptr<ob::Pipeline> pipeline = nullptr;
|
||||
std::recursive_mutex deviceMutex;
|
||||
|
||||
bool irRightMirrorSupport = false;
|
||||
|
||||
std::map<OBSensorType, std::shared_ptr<ob::VideoStreamProfile>> profilesMap;
|
||||
std::shared_ptr<ob::VideoStreamProfile> depthProfile = nullptr;
|
||||
std::shared_ptr<ob::VideoStreamProfile> irProfile = nullptr;
|
||||
|
||||
std::shared_ptr<ob::Filter> align = nullptr;
|
||||
|
||||
void handleDeviceConnected(std::shared_ptr<ob::DeviceList> connectList);
|
||||
void handleDeviceDisconnected(std::shared_ptr<ob::DeviceList> disconnectList);
|
||||
void switchDepthWorkMode();
|
||||
void turnOffHwD2d();
|
||||
void setDepthUnit();
|
||||
void setDepthSoftFilter();
|
||||
|
||||
void printUsage();
|
||||
void commandProcess(std::string cmd);
|
||||
|
||||
void handleFrameset(std::shared_ptr<ob::FrameSet> frameset);
|
||||
void startStream();
|
||||
|
||||
int main(void) try {
|
||||
|
||||
// create window for render
|
||||
win = std::make_shared<ob_smpl::CVWindow>("CommonUsages", 1280, 720, ob_smpl::ARRANGE_GRID);
|
||||
|
||||
// Set log severity. disable log, please set OB_LOG_SEVERITY_OFF.
|
||||
ob::Context::setLoggerSeverity(OB_LOG_SEVERITY_ERROR);
|
||||
|
||||
// Create ob:Context.
|
||||
ctx = std::make_shared<ob::Context>();
|
||||
|
||||
// create align filter
|
||||
align = ob::FilterFactory::createFilter("Align");
|
||||
|
||||
// Register device callback
|
||||
ctx->setDeviceChangedCallback([](std::shared_ptr<ob::DeviceList> removedList, std::shared_ptr<ob::DeviceList> addedList) {
|
||||
handleDeviceDisconnected(removedList);
|
||||
handleDeviceConnected(addedList);
|
||||
});
|
||||
|
||||
// Query the list of connected devices.
|
||||
std::shared_ptr<ob::DeviceList> devices = ctx->queryDeviceList();
|
||||
|
||||
// Handle connected devices(and open one device)
|
||||
handleDeviceConnected(devices);
|
||||
|
||||
if(!device) {
|
||||
std::cout << "Waiting for connect device...";
|
||||
while(!device) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
}
|
||||
}
|
||||
|
||||
irRightMirrorSupport = device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_READ_WRITE);
|
||||
printUsage();
|
||||
|
||||
auto inputWatchThread = std::thread([]{
|
||||
while(true) {
|
||||
std::string cmd;
|
||||
std::cout << "\nInput command: ";
|
||||
std::getline(std::cin, cmd);
|
||||
if(cmd == "quit" || cmd == "q") {
|
||||
win->close();
|
||||
break;
|
||||
}
|
||||
else {
|
||||
commandProcess(cmd);
|
||||
}
|
||||
}
|
||||
});
|
||||
inputWatchThread.detach();
|
||||
|
||||
while(win->run()) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
|
||||
if(pipeline) {
|
||||
pipeline->stop();
|
||||
}
|
||||
|
||||
// destruct all global variables here before exiting main
|
||||
irProfile.reset();
|
||||
depthProfile.reset();
|
||||
profilesMap.clear();
|
||||
pipeline.reset();
|
||||
device.reset();
|
||||
devices.reset();
|
||||
align.reset();
|
||||
ctx.reset();
|
||||
win.reset();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
std::cout << "\nPress any key to exit.";
|
||||
ob_smpl::waitForKeyPressed();
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
// Device connection callback
|
||||
void handleDeviceConnected(std::shared_ptr<ob::DeviceList> devices) {
|
||||
// Get the number of connected devices
|
||||
if(devices->getCount() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auto deviceCount = devices->getCount();
|
||||
for(uint32_t i = 0; i < deviceCount; i++) {
|
||||
std::string deviceSN = devices->getSerialNumber(i);
|
||||
std::cout << "Found device connected, SN: " << deviceSN << std::endl;
|
||||
}
|
||||
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(!device) {
|
||||
// open default device (device index=0)
|
||||
device = devices->getDevice(0);
|
||||
pipeline = std::make_shared<ob::Pipeline>(device);
|
||||
std::cout << "Open device success, SN: " << devices->getSerialNumber(0) << std::endl;
|
||||
|
||||
startStream();
|
||||
}
|
||||
}
|
||||
|
||||
// Device disconnect callback
|
||||
void handleDeviceDisconnected(std::shared_ptr<ob::DeviceList> disconnectList) {
|
||||
std::string currentDevSn = "";
|
||||
{
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
std::shared_ptr<ob::DeviceInfo> devInfo = device->getDeviceInfo();
|
||||
currentDevSn = devInfo->getSerialNumber();
|
||||
}
|
||||
}
|
||||
const auto deviceCount = disconnectList->getCount();
|
||||
for(uint32_t i = 0; i < deviceCount; i++) {
|
||||
std::string deviceSN = disconnectList->getSerialNumber(i);
|
||||
std::cout << "Device disconnected, SN: " << deviceSN << std::endl;
|
||||
if(currentDevSn == deviceSN) {
|
||||
device.reset(); // release device
|
||||
pipeline.reset(); // release pipeline
|
||||
std::cout << "Current device disconnected" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchDepthWorkMode() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
// Check whether the camera depth working mode is supported
|
||||
if(!device->isPropertySupported(OB_STRUCT_CURRENT_DEPTH_ALG_MODE, OB_PERMISSION_READ_WRITE)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Query the current camera depth mode
|
||||
auto curDepthMode = device->getCurrentDepthWorkMode();
|
||||
std::cout << "current depth work mode: " << curDepthMode.name << std::endl;
|
||||
|
||||
// Get the list of camera depth modes
|
||||
auto depthModeList = device->getDepthWorkModeList();
|
||||
std::cout << "depthModeList size: " << depthModeList->getCount() << std::endl;
|
||||
for(uint32_t i = 0; i < depthModeList->getCount(); i++) {
|
||||
std::cout << "depthModeList[" << i << "]: " << (*depthModeList)[i].name << std::endl;
|
||||
}
|
||||
|
||||
// switch depth work mode to default (index=0) mode, user can switch to ohter mode like this.
|
||||
device->switchDepthWorkMode((*depthModeList)[0].name);
|
||||
std::cout << "switch depth work mode to:" << (*depthModeList)[0].name << std::endl;
|
||||
|
||||
// It is require to reopen the device and pipeline after switch depth work mode
|
||||
auto deviceInfo = device->getDeviceInfo();
|
||||
device.reset();
|
||||
pipeline.reset();
|
||||
auto deviceList = ctx->queryDeviceList();
|
||||
device = deviceList->getDeviceBySN(deviceInfo->getSerialNumber()); // using serial number to create device
|
||||
pipeline = std::make_shared<ob::Pipeline>(device);
|
||||
}
|
||||
|
||||
void turnOffHwD2d() {
|
||||
try {
|
||||
// Some models dose not support this feature
|
||||
if(device->isPropertySupported(OB_PROP_DISPARITY_TO_DEPTH_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_DISPARITY_TO_DEPTH_BOOL, false);
|
||||
std::cout << "turn off hardware disparity to depth converter (Turn on Software D2D)" << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void setDepthUnit() {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_PRECISION_LEVEL_INT, OB_PERMISSION_WRITE)) {
|
||||
device->setIntProperty(OB_PROP_DEPTH_PRECISION_LEVEL_INT, OB_PRECISION_1MM);
|
||||
std::cout << "set depth unit to 1mm" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth precision level switch is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void setDepthSoftFilter() {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_NOISE_REMOVAL_FILTER_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_DEPTH_NOISE_REMOVAL_FILTER_BOOL, true);
|
||||
std::cout << "turn on depth soft filter" << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void startStream() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
|
||||
device = pipeline->getDevice();
|
||||
auto sensorList = device->getSensorList();
|
||||
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config.
|
||||
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
|
||||
|
||||
for(uint32_t index = 0; index < sensorList->getCount(); index++) {
|
||||
// Query all supported infrared sensor type and enable the infrared stream.
|
||||
// For dual infrared device, enable the left and right infrared streams.
|
||||
// For single infrared device, enable the infrared stream.
|
||||
OBSensorType sensorType = sensorList->getSensorType(index);
|
||||
if(sensorType == OB_SENSOR_IR || sensorType == OB_SENSOR_IR_LEFT || sensorType == OB_SENSOR_IR_RIGHT || sensorType == OB_SENSOR_COLOR
|
||||
|| sensorType == OB_SENSOR_DEPTH) {
|
||||
try {
|
||||
auto sensor = sensorList->getSensor(sensorType);
|
||||
auto profileList = sensor->getStreamProfileList();
|
||||
if(profileList->getCount() > 0) {
|
||||
// get default (index=0) stream profile
|
||||
auto defProfile = profileList->getProfile(OB_PROFILE_DEFAULT);
|
||||
|
||||
auto defVsProfile = defProfile->as<ob::VideoStreamProfile>();
|
||||
profilesMap.insert(std::make_pair(sensorType, defVsProfile));
|
||||
auto it = sensorTypeToStringMap.find(sensorType);
|
||||
if(it != sensorTypeToStringMap.end()) {
|
||||
std::cout << it->second << defVsProfile->getWidth() << "x" << defVsProfile->getHeight() << " @ " << defVsProfile->getFps() << "fps"
|
||||
<< std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "unknown profile: " << defVsProfile->getWidth() << "x" << defVsProfile->getHeight() << " @ " << defVsProfile->getFps()
|
||||
<< "fps" << std::endl;
|
||||
}
|
||||
|
||||
// enable color stream.
|
||||
config->enableStream(defVsProfile);
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// start pipeline
|
||||
pipeline->start(config, handleFrameset);
|
||||
std::cout << "Stream started!" << std::endl;
|
||||
}
|
||||
std::shared_ptr<ob::FrameSet> fileterAlign(std::shared_ptr<ob::FrameSet> frameset) {
|
||||
auto newFrame = align->process(frameset);
|
||||
if(!newFrame) {
|
||||
return nullptr;
|
||||
}
|
||||
auto newFrameSet = newFrame->as<ob::FrameSet>();
|
||||
return newFrameSet;
|
||||
}
|
||||
void handleFrameset(std::shared_ptr<ob::FrameSet> frameset) {
|
||||
auto alignFrameSet = fileterAlign(frameset);
|
||||
// If no depthframe is present, it is discarded
|
||||
if(frameset->getCount() < 3) {
|
||||
return;
|
||||
}
|
||||
win->pushFramesToView(alignFrameSet);
|
||||
}
|
||||
|
||||
void getDeviceInformation() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
auto info = device->getDeviceInfo();
|
||||
// Get the name of the device
|
||||
std::cout << "-Device name: " << info->getName() << std::endl;
|
||||
// Get the pid, vid, uid of the device
|
||||
std::cout << "-Device pid: 0x" << std::hex << std::setw(4) << std::setfill('0') << info->getPid() << " vid: 0x" << std::hex << std::setw(4)
|
||||
<< std::setfill('0') << info->getVid() << " uid: " << info->getUid() << std::dec << std::endl;
|
||||
// By getting the firmware version number of the device
|
||||
auto fwVer = info->getFirmwareVersion();
|
||||
std::cout << "-Firmware version: " << fwVer << std::endl;
|
||||
// By getting the serial number of the device
|
||||
auto sn = info->getSerialNumber();
|
||||
std::cout << "-Serial number: " << sn << std::endl;
|
||||
// By getting the connection type of the device
|
||||
auto connectType = info->getConnectionType();
|
||||
std::cout << "-ConnectionType: " << connectType << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void getCameraParams() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(pipeline) {
|
||||
try {
|
||||
for(const auto &item: profilesMap) {
|
||||
auto profile = item.second;
|
||||
auto type = item.first;
|
||||
auto intrinsics = profile->getIntrinsic();
|
||||
auto distortion = profile->getDistortion();
|
||||
auto typeString = ob::TypeHelper::convertOBSensorTypeToString(type);
|
||||
std::cout << typeString << " intrinsics: "
|
||||
<< "fx:" << intrinsics.fx << ", fy: " << intrinsics.fy << ", cx: " << intrinsics.cx << ", cy: " << intrinsics.cy
|
||||
<< " ,width: " << intrinsics.width << ", height: " << intrinsics.height << std::endl;
|
||||
|
||||
std::cout << typeString << " distortion: "
|
||||
<< "k1:" << distortion.k1 << ", k2:" << distortion.k2 << ", k3:" << distortion.k3 << ", k4:" << distortion.k4
|
||||
<< ", k5:" << distortion.k5 << ", k6:" << distortion.k6 << ", p1:" << distortion.p1 << ", p2:" << distortion.p2 << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchLaser() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
auto pid = device->getDeviceInfo()->getPid();
|
||||
OBPropertyID propertyId = OB_PROP_LASER_BOOL;
|
||||
if(isGemini330Series(pid)) {
|
||||
propertyId = OB_PROP_LASER_CONTROL_INT;
|
||||
}
|
||||
|
||||
if(device->isPropertySupported(propertyId, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(propertyId);
|
||||
if(device->isPropertySupported(propertyId, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(propertyId, !value);
|
||||
if(!value) {
|
||||
std::cout << "laser turn on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "laser turn off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Laser switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchLDP() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_LDP_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_LDP_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_LDP_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_LDP_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "LDP turn on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "LDP turn off!" << std::endl;
|
||||
}
|
||||
std::cout << "Attention: For some models, it is require to restart depth stream after turn on/of LDP. Input \"stream\" command "
|
||||
"to restart stream!"
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "LDP switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void getLDPStatus() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_LDP_STATUS_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_LDP_STATUS_BOOL);
|
||||
std::cout << "LDP status:" << value << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cerr << "LDP status property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchDepthAE() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "Depth Auto-Exposure on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Depth Auto-Exposure off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth Auto-Exposure switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
void switchColorAE() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "Color Auto-Exposure on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Color Auto-Exposure off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color Auto-Exposure switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchDepthMirror() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_MIRROR_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_DEPTH_MIRROR_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_DEPTH_MIRROR_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "Note: Currently with the D2C(SW) turned on, Depth Mirror will not work!" << std::endl;
|
||||
std::cout << "Depth mirror on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Depth mirror off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth mirror switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchIRMirror() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_IR_MIRROR_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_IR_MIRROR_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_IR_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_IR_MIRROR_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "IR mirror on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "IR mirror off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "IR mirror switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchIRRightMirror() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_IR_RIGHT_MIRROR_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_IR_RIGHT_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_IR_RIGHT_MIRROR_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "IR Right mirror on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "IR Right mirror off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "IR mirror switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void switchColorMirror() {
|
||||
std::unique_lock<std::recursive_mutex> lk(deviceMutex);
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_MIRROR_BOOL, OB_PERMISSION_READ)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_COLOR_MIRROR_BOOL);
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_MIRROR_BOOL, OB_PERMISSION_WRITE)) {
|
||||
device->setBoolProperty(OB_PROP_COLOR_MIRROR_BOOL, !value);
|
||||
if(!value) {
|
||||
std::cout << "Color mirror on!" << std::endl;
|
||||
}
|
||||
else {
|
||||
std::cout << "Color mirror off!" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color mirror switch property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setDepthExposureValue(bool increase) {
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ_WRITE)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL);
|
||||
if(value) {
|
||||
device->setBoolProperty(OB_PROP_DEPTH_AUTO_EXPOSURE_BOOL, false);
|
||||
std::cout << "Depth AE close." << std::endl;
|
||||
}
|
||||
}
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_EXPOSURE_INT, OB_PERMISSION_READ)) {
|
||||
// get the value range
|
||||
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_DEPTH_EXPOSURE_INT);
|
||||
std::cout << "Depth current exposure max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
|
||||
|
||||
int value = device->getIntProperty(OB_PROP_DEPTH_EXPOSURE_INT);
|
||||
std::cout << "Depth current exposure:" << value << std::endl;
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_EXPOSURE_INT, OB_PERMISSION_WRITE)) {
|
||||
if(increase) {
|
||||
value += (valueRange.max - valueRange.min) / 10;
|
||||
if(value > valueRange.max) {
|
||||
value = valueRange.max;
|
||||
}
|
||||
}
|
||||
else {
|
||||
value -= (valueRange.max - valueRange.min) / 10;
|
||||
if(value < valueRange.min) {
|
||||
value = valueRange.min;
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that the value meet the step value requirements
|
||||
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
|
||||
|
||||
std::cout << "Set depth exposure:" << value << std::endl;
|
||||
device->setIntProperty(OB_PROP_DEPTH_EXPOSURE_INT, value);
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth exposure set property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth exposure get property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setColorExposureValue(bool increase) {
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, OB_PERMISSION_READ_WRITE)) {
|
||||
bool value = device->getBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL);
|
||||
if(value) {
|
||||
device->setBoolProperty(OB_PROP_COLOR_AUTO_EXPOSURE_BOOL, false);
|
||||
std::cout << "Color AE close." << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_EXPOSURE_INT, OB_PERMISSION_READ)) {
|
||||
// get the value range
|
||||
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_COLOR_EXPOSURE_INT);
|
||||
std::cout << "Color current exposure max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
|
||||
|
||||
int value = device->getIntProperty(OB_PROP_COLOR_EXPOSURE_INT);
|
||||
std::cout << "Color current exposure:" << value << std::endl;
|
||||
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_EXPOSURE_INT, OB_PERMISSION_WRITE)) {
|
||||
if(increase) {
|
||||
value += (valueRange.max - valueRange.min) / 10;
|
||||
if(value > valueRange.max) {
|
||||
value = valueRange.max;
|
||||
}
|
||||
}
|
||||
else {
|
||||
value -= (valueRange.max - valueRange.min) / 10;
|
||||
if(value < valueRange.min) {
|
||||
value = valueRange.min;
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that the value meet the step value requirements
|
||||
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
|
||||
|
||||
std::cout << "Set color exposure:" << value << std::endl;
|
||||
device->setIntProperty(OB_PROP_COLOR_EXPOSURE_INT, value);
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color exposure set property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color exposure get property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setDepthGainValue(bool increase) {
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_GAIN_INT, OB_PERMISSION_READ)) {
|
||||
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_DEPTH_GAIN_INT);
|
||||
std::cout << "Depth current gain max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
|
||||
int value = device->getIntProperty(OB_PROP_DEPTH_GAIN_INT);
|
||||
std::cout << "Depth current gain:" << value << std::endl;
|
||||
if(device->isPropertySupported(OB_PROP_DEPTH_GAIN_INT, OB_PERMISSION_WRITE)) {
|
||||
if(increase) {
|
||||
value += (valueRange.max - valueRange.min) / 10;
|
||||
if(value > valueRange.max) {
|
||||
value = valueRange.max;
|
||||
}
|
||||
}
|
||||
else {
|
||||
value -= (valueRange.max - valueRange.min) / 10;
|
||||
if(value < valueRange.min) {
|
||||
value = valueRange.min;
|
||||
}
|
||||
}
|
||||
// Ensure that the value meet the step value requirements
|
||||
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
|
||||
|
||||
std::cout << "Set depth gain:" << value << std::endl;
|
||||
device->setIntProperty(OB_PROP_DEPTH_GAIN_INT, value);
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth gain set property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Depth gain get property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setColorGainValue(bool increase) {
|
||||
if(device) {
|
||||
try {
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_GAIN_INT, OB_PERMISSION_READ)) {
|
||||
OBIntPropertyRange valueRange = device->getIntPropertyRange(OB_PROP_COLOR_GAIN_INT);
|
||||
std::cout << "Color current gain max:" << valueRange.max << ", min:" << valueRange.min << std::endl;
|
||||
int value = device->getIntProperty(OB_PROP_COLOR_GAIN_INT);
|
||||
std::cout << "Color current gain:" << value << std::endl;
|
||||
if(device->isPropertySupported(OB_PROP_COLOR_GAIN_INT, OB_PERMISSION_WRITE)) {
|
||||
if(increase) {
|
||||
value += (valueRange.max - valueRange.min) / 10;
|
||||
if(value > valueRange.max) {
|
||||
value = valueRange.max;
|
||||
}
|
||||
}
|
||||
else {
|
||||
value -= (valueRange.max - valueRange.min) / 10;
|
||||
if(value < valueRange.min) {
|
||||
value = valueRange.min;
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that the value meet the step value requirements
|
||||
value = valueRange.min + (value - valueRange.min) / valueRange.step * valueRange.step;
|
||||
|
||||
std::cout << "Set color gain:" << value << std::endl;
|
||||
device->setIntProperty(OB_PROP_COLOR_GAIN_INT, value);
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color gain set property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cerr << "Color gain get property is not supported." << std::endl;
|
||||
}
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType()
|
||||
<< std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void printUsage() {
|
||||
std::cout << "Support commands:" << std::endl;
|
||||
std::cout << " info / i - get device information" << std::endl;
|
||||
std::cout << " param / p - get camera parameter" << std::endl;
|
||||
std::cout << " laser / l - on/off laser" << std::endl;
|
||||
std::cout << " ldp / d - on/off LDP" << std::endl;
|
||||
std::cout << " ldp status / ds - get LDP status" << std::endl;
|
||||
std::cout << " color ae / ca - on/off Color auto exposure" << std::endl;
|
||||
std::cout << " inc color value / cei - increase Color exposure value" << std::endl;
|
||||
std::cout << " dec color value / ced - decrease Color exposure value" << std::endl;
|
||||
std::cout << " inc color gain / cgi - increase Color gain value" << std::endl;
|
||||
std::cout << " dec color gain / cgd - decrease Color gain value" << std::endl;
|
||||
std::cout << " color mirror / cm - on/off color mirror" << std::endl;
|
||||
std::cout << " depth ae / da - on/off Depth/IR auto exposure" << std::endl;
|
||||
std::cout << " depth mirror / dm - on/off Depth mirror" << std::endl;
|
||||
std::cout << " inc depth value / dei - increase Depth/IR exposure value" << std::endl;
|
||||
std::cout << " dec depth value / ded - decrease Depth/IR exposure value" << std::endl;
|
||||
std::cout << " inc depth gain / dgi - increase Depth/IR gain value" << std::endl;
|
||||
std::cout << " dec depth gain / dgd - decrease Depth/IR gain value" << std::endl;
|
||||
std::cout << " ir mirror / im - on/off Ir mirror" << std::endl;
|
||||
if(irRightMirrorSupport) {
|
||||
std::cout << " ir right mirror / irm - on/off Ir right mirror" << std::endl;
|
||||
}
|
||||
|
||||
std::cout << "--------------------------------" << std::endl;
|
||||
std::cout << " help / ? - print usage" << std::endl;
|
||||
std::cout << " quit / q- quit application" << std::endl;
|
||||
}
|
||||
|
||||
void commandProcess(std::string cmd) {
|
||||
if(cmd == "info" || cmd == "i") {
|
||||
getDeviceInformation();
|
||||
}
|
||||
else if(cmd == "param" || cmd == "p") {
|
||||
getCameraParams();
|
||||
}
|
||||
else if(cmd == "laser" || cmd == "l") {
|
||||
switchLaser();
|
||||
}
|
||||
else if(cmd == "ldp" || cmd == "d") {
|
||||
switchLDP();
|
||||
}
|
||||
else if(cmd == "ldp status" || cmd == "ds") {
|
||||
getLDPStatus();
|
||||
}
|
||||
else if(cmd == "color ae" || cmd == "ca") {
|
||||
switchColorAE();
|
||||
}
|
||||
else if(cmd == "inc color value" || cmd == "cei") {
|
||||
setColorExposureValue(true);
|
||||
}
|
||||
else if(cmd == "dec color value" || cmd == "ced") {
|
||||
setColorExposureValue(false);
|
||||
}
|
||||
else if(cmd == "inc color gain" || cmd == "cgi") {
|
||||
setColorGainValue(true);
|
||||
}
|
||||
else if(cmd == "dec color gain" || cmd == "cgd") {
|
||||
setColorGainValue(false);
|
||||
}
|
||||
else if(cmd == "inc depth value" || cmd == "dei") {
|
||||
setDepthExposureValue(true);
|
||||
}
|
||||
else if(cmd == "dec depth value" || cmd == "ded") {
|
||||
setDepthExposureValue(false);
|
||||
}
|
||||
else if(cmd == "inc depth gain" || cmd == "dgi") {
|
||||
setDepthGainValue(true);
|
||||
}
|
||||
else if(cmd == "dec depth gain" || cmd == "dgd") {
|
||||
setDepthGainValue(false);
|
||||
}
|
||||
else if(cmd == "depth ae" || cmd == "da") {
|
||||
switchDepthAE();
|
||||
}
|
||||
else if(cmd == "color mirror" || cmd == "cm") {
|
||||
switchColorMirror();
|
||||
}
|
||||
else if(cmd == "depth mirror" || cmd == "dm") {
|
||||
|
||||
switchDepthMirror();
|
||||
}
|
||||
else if(cmd == "ir mirror" || cmd == "im") {
|
||||
switchIRMirror();
|
||||
}
|
||||
else if(cmd == " ir right mirror" || cmd == "irm") {
|
||||
switchIRRightMirror();
|
||||
}
|
||||
else if(cmd == "help" || cmd == "?") {
|
||||
printUsage();
|
||||
}
|
||||
else {
|
||||
std::cerr << "Unsupported command received! Input \"help\" to get usage" << std::endl;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
# Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
# Licensed under the MIT License.
|
||||
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
project(ob_coordinate_transform)
|
||||
|
||||
add_executable(${PROJECT_NAME} coordinate_transform.cpp)
|
||||
|
||||
set_property(TARGET ${PROJECT_NAME} PROPERTY CXX_STANDARD 11)
|
||||
target_link_libraries(${PROJECT_NAME} ob::OrbbecSDK ob::examples::utils)
|
||||
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES FOLDER "examples")
|
||||
if(MSVC)
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES VS_DEBUGGER_WORKING_DIRECTORY "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
|
||||
endif()
|
||||
|
||||
install(TARGETS ${PROJECT_NAME} RUNTIME DESTINATION bin)
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
# C++ Sample: 3.advanced.coordinate_transform
|
||||
|
||||
## Overview
|
||||
|
||||
Use the SDK interface to transform different coordinate systems.
|
||||
|
||||
### Knowledge
|
||||
|
||||
Pipeline is a pipeline for processing data streams, providing multi-channel stream configuration, switching, frame aggregation, and frame synchronization functions
|
||||
|
||||
Frameset is a combination of different types of Frames
|
||||
|
||||
## Code overview
|
||||
|
||||
1. Enable color stream
|
||||
|
||||
```cpp
|
||||
auto colorProfiles = pipe.getStreamProfileList(OB_SENSOR_COLOR);
|
||||
if(colorProfiles) {
|
||||
colorProfile = colorProfiles->getVideoStreamProfile(1280, OB_HEIGHT_ANY, OB_FORMAT_RGB, 30);
|
||||
}
|
||||
config->enableStream(colorProfile);
|
||||
```
|
||||
|
||||
2. Enable depth stream
|
||||
|
||||
```cpp
|
||||
auto depthProfiles = pipe.getStreamProfileList(OB_SENSOR_DEPTH);
|
||||
std::shared_ptr<ob::VideoStreamProfile> depthProfile = nullptr;
|
||||
if(depthProfiles) {
|
||||
depthProfile = depthProfiles->getVideoStreamProfile(640, OB_HEIGHT_ANY, OB_FORMAT_Y16, 30);
|
||||
|
||||
}
|
||||
config->enableStream(depthProfile);
|
||||
```
|
||||
|
||||
3. Get frame data
|
||||
|
||||
```cpp
|
||||
auto colorFrame = frameSet->colorFrame();
|
||||
auto depthFrame = frameSet->depthFrame();
|
||||
```
|
||||
|
||||
4. Get get stream profile
|
||||
|
||||
```cpp
|
||||
auto colorProfile = colorFrame->getStreamProfile();
|
||||
auto depthProfile = depthFrame->getStreamProfile();
|
||||
```
|
||||
|
||||
5. Get the extrinsic parameters
|
||||
|
||||
```cpp
|
||||
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
|
||||
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
|
||||
```
|
||||
|
||||
6. Get the intrinsic parameters
|
||||
|
||||
```cpp
|
||||
auto colorIntrinsic = colorProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
auto colorDistortion = colorProfile->as<ob::VideoStreamProfile>()->getDistortion();
|
||||
```
|
||||
|
||||
7. Get the distortion parameters
|
||||
|
||||
```cpp
|
||||
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
|
||||
```
|
||||
|
||||
8. Processing
|
||||
|
||||
```cpp
|
||||
if(testType == "1") {
|
||||
transformation2dto2d(colorFrame, depthFrame);
|
||||
} else if (testType == "2") {
|
||||
transformation2dto3d(colorFrame, depthFrame);
|
||||
} else if (testType == "3") {
|
||||
transformation3dto3d(colorFrame, depthFrame);
|
||||
} else if (testType == "4") {
|
||||
transformation3dto2d(colorFrame, depthFrame);
|
||||
} else {
|
||||
std::cout << "Invalid command" << std::endl;
|
||||
}
|
||||
```
|
||||
|
||||
## Run Sample
|
||||
|
||||
Press the Esc key to exit the program.
|
||||
Press the 1 key - transformation 2d to 2d
|
||||
Press the 2 key - transformation 2d to 3d
|
||||
Press the 3 key - transformation 3d to 3d
|
||||
Press the 4 key - transformation 3d to 2d
|
||||
|
||||
### Result
|
||||
|
||||

|
||||
@@ -0,0 +1,299 @@
|
||||
// Copyright (c) Orbbec Inc. All Rights Reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
#include <libobsensor/ObSensor.hpp>
|
||||
#include "libobsensor/hpp/Utils.hpp"
|
||||
#include "libobsensor/hpp/Frame.hpp"
|
||||
|
||||
void printUsage();
|
||||
void transformation2dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
|
||||
void transformation2dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
|
||||
void transformation3dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
|
||||
void transformation3dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame);
|
||||
|
||||
int main(void) try {
|
||||
// Configure which streams to enable or disable for the Pipeline by creating a Config
|
||||
auto config = std::make_shared<ob::Config>();
|
||||
|
||||
// enable depth and color streams with specified format
|
||||
config->enableVideoStream(OB_STREAM_DEPTH, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
|
||||
config->enableVideoStream(OB_STREAM_COLOR, OB_WIDTH_ANY, OB_HEIGHT_ANY, OB_FPS_ANY, OB_FORMAT_ANY);
|
||||
|
||||
// set the frame aggregate output mode to ensure all types of frames are included in the output frameset
|
||||
config->setFrameAggregateOutputMode(OB_FRAME_AGGREGATE_OUTPUT_ALL_TYPE_FRAME_REQUIRE);
|
||||
|
||||
// Create a pipeline with default device to manage stream
|
||||
auto pipe = std::make_shared<ob::Pipeline>();
|
||||
|
||||
// Start the pipeline with config
|
||||
pipe->start(config);
|
||||
while(1) {
|
||||
printUsage();
|
||||
|
||||
std::cout << "\nInput command: ";
|
||||
std::string cmd = "1";
|
||||
std::getline(std::cin, cmd);
|
||||
if(cmd == "quit" || cmd == "q") {
|
||||
break;
|
||||
}
|
||||
|
||||
// Wait for a frameset from the pipeline
|
||||
auto frameSet = pipe->waitForFrameset(100);
|
||||
if(frameSet == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the color frame and check its validity
|
||||
auto colorFrame = frameSet->getFrame(OB_FRAME_COLOR);
|
||||
|
||||
// Get the depth frame and check its validity
|
||||
auto depthFrame = frameSet->getFrame(OB_FRAME_DEPTH);
|
||||
|
||||
if(cmd == "1") {
|
||||
transformation2dto2d(colorFrame, depthFrame);
|
||||
}
|
||||
else if(cmd == "2") {
|
||||
transformation2dto3d(colorFrame, depthFrame);
|
||||
}
|
||||
else if(cmd == "3") {
|
||||
transformation3dto3d(colorFrame, depthFrame);
|
||||
}
|
||||
else if(cmd == "4") {
|
||||
transformation3dto2d(colorFrame, depthFrame);
|
||||
}
|
||||
else {
|
||||
std::cout << "Invalid command" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
pipe->stop();
|
||||
return 0;
|
||||
}
|
||||
catch(ob::Error &e) {
|
||||
std::cerr << "function:" << e.getFunction() << "\nargs:" << e.getArgs() << "\nmessage:" << e.what() << "\ntype:" << e.getExceptionType() << std::endl;
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
void printUsage() {
|
||||
std::cout << "Support commands:" << std::endl;
|
||||
std::cout << " 1 - transformation 2d to 2d" << std::endl;
|
||||
std::cout << " 2 - transformation 2d to 3d" << std::endl;
|
||||
std::cout << " 3 - transformation 3d to 3d" << std::endl;
|
||||
std::cout << " 4 - transformation 3d to 2d" << std::endl;
|
||||
|
||||
std::cout << "--------------------------------" << std::endl;
|
||||
std::cout << " quit / q- quit application" << std::endl;
|
||||
}
|
||||
|
||||
void printRuslt(std::string msg, OBPoint2f sourcePixel, OBPoint2f targetPixel) {
|
||||
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ") -> (" << targetPixel.x << ", " << targetPixel.y << ")" << std::endl;
|
||||
}
|
||||
|
||||
void printRuslt(std::string msg, OBPoint2f sourcePixel, OBPoint3f targetPixel, float depthValue) {
|
||||
std::cout << msg << ":" << "depth " << depthValue << " (" << sourcePixel.x << ", " << sourcePixel.y << ") -> (" << targetPixel.x << ", " << targetPixel.y << ", " << targetPixel.z << ")" << std::endl;
|
||||
}
|
||||
|
||||
void printRuslt(std::string msg, OBPoint3f sourcePixel, OBPoint2f targetPixel) {
|
||||
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ", " << sourcePixel.z << ") -> (" << targetPixel.x << ", " << targetPixel.y << ")" << std::endl;
|
||||
}
|
||||
|
||||
void printRuslt(std::string msg, OBPoint3f sourcePixel, OBPoint3f targetPixel) {
|
||||
std::cout << msg << ":" << "(" << sourcePixel.x << ", " << sourcePixel.y << ", " << sourcePixel.z << ") -> (" << targetPixel.x << ", " << targetPixel.y << ", " << targetPixel.z << ")" << std::endl;
|
||||
}
|
||||
|
||||
// test the transformation from one 2D coordinate system to another
|
||||
void transformation2dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
|
||||
// Get the width and height of the color and depth frames
|
||||
auto colorFrameWidth = colorFrame->as<ob::VideoFrame>()->getWidth();
|
||||
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
|
||||
auto colorFrameHeight = colorFrame->as<ob::VideoFrame>()->getHeight();
|
||||
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
|
||||
|
||||
// Get the stream profiles for the color and depth frames
|
||||
auto colorProfile = colorFrame->getStreamProfile();
|
||||
auto depthProfile = depthFrame->getStreamProfile();
|
||||
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
|
||||
|
||||
// Get the intrinsic and distortion parameters for the color and depth streams
|
||||
auto colorIntrinsic = colorProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
auto colorDistortion = colorProfile->as<ob::VideoStreamProfile>()->getDistortion();
|
||||
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
|
||||
// Access the depth data from the frame
|
||||
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
|
||||
uint16_t convertAreaWidth = 3;
|
||||
uint16_t convertAreaHeight = 3;
|
||||
|
||||
// Transform depth values to the color frame's coordinate system
|
||||
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
|
||||
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
|
||||
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
|
||||
OBPoint2f targetPixel = {};
|
||||
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
|
||||
if(depthValue == 0) {
|
||||
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Demonstrate Depth 2D converted to Color 2D
|
||||
bool result = ob::CoordinateTransformHelper::transformation2dto2d(sourcePixel, depthValue, depthIntrinsic, depthDistortion, colorIntrinsic,
|
||||
colorDistortion, extrinsicD2C, &targetPixel);
|
||||
|
||||
// Check transformation result and whether the target pixel is within the color frame
|
||||
if(!result || targetPixel.y < 0 || targetPixel.x < 0 || targetPixel.y > colorFrameWidth || targetPixel.x > colorFrameWidth) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate the index position of the target pixel in the transformation data buffer
|
||||
auto index = (((uint32_t)targetPixel.y * colorFrameWidth) + (uint32_t)targetPixel.x);
|
||||
if(index > colorFrameWidth * colorFrameHeight) {
|
||||
continue;
|
||||
}
|
||||
|
||||
printRuslt("depth to color: depth image coordinate transform to color image coordinate", sourcePixel, targetPixel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test the transformation from 2D to 3D coordinates
|
||||
void transformation2dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
|
||||
// Get the width and height of the color and depth frames
|
||||
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
|
||||
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
|
||||
|
||||
// Get the stream profiles for the color and depth frames
|
||||
auto colorProfile = colorFrame->getStreamProfile();
|
||||
auto depthProfile = depthFrame->getStreamProfile();
|
||||
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
|
||||
|
||||
// Get the intrinsic and distortion parameters for the color and depth streams
|
||||
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
// Access the depth data from the frame
|
||||
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
|
||||
uint16_t convertAreaWidth = 3;
|
||||
uint16_t convertAreaHeight = 3;
|
||||
|
||||
// Transform depth values to the color frame's coordinate system
|
||||
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
|
||||
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
|
||||
// Get the coordinates of the current pixel
|
||||
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
|
||||
OBPoint3f targetPixel = {};
|
||||
// Get the depth value of the current pixel
|
||||
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
|
||||
if(depthValue == 0) {
|
||||
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Perform the 2D to 3D transformation
|
||||
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic, extrinsicD2C, &targetPixel);
|
||||
if(!result ) {
|
||||
continue;
|
||||
}
|
||||
|
||||
printRuslt("2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, targetPixel, depthValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test the transformation from 3D coordinates to 3D coordinates
|
||||
void transformation3dto3d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
|
||||
// Get the width and height of the color and depth frames
|
||||
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
|
||||
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
|
||||
|
||||
// Get the stream profiles for the color and depth frames
|
||||
auto colorProfile = colorFrame->getStreamProfile();
|
||||
auto depthProfile = depthFrame->getStreamProfile();
|
||||
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
|
||||
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
|
||||
|
||||
// Get the intrinsic and distortion parameters for the color and depth streams
|
||||
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
// Access the depth data from the frame
|
||||
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
|
||||
uint16_t convertAreaWidth = 3;
|
||||
uint16_t convertAreaHeight = 3;
|
||||
|
||||
// Transform depth values to the color frame's coordinate system
|
||||
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
|
||||
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
|
||||
// Get the coordinates of the current pixel
|
||||
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
|
||||
OBPoint3f tmpTargetPixel = {};
|
||||
OBPoint3f targetPixel = {};
|
||||
// Get the depth value of the current pixel
|
||||
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
|
||||
if(depthValue == 0) {
|
||||
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Perform the 2D to 3D transformation
|
||||
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic, extrinsicD2C, &tmpTargetPixel);
|
||||
if(!result ) {
|
||||
continue;
|
||||
}
|
||||
printRuslt("2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, tmpTargetPixel, depthValue);
|
||||
|
||||
// Perform the 3D to 3D transformation
|
||||
result = ob::CoordinateTransformHelper::transformation3dto3d(tmpTargetPixel, extrinsicC2D, &targetPixel);
|
||||
if(!result ) {
|
||||
continue;
|
||||
}
|
||||
printRuslt("3d to 3D: transform 3D coordinates relative to one sensor to 3D coordinates relative to another viewpoint", tmpTargetPixel, targetPixel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// test the transformation from 3D coordinates back to 2D coordinates
|
||||
void transformation3dto2d(std::shared_ptr<ob::Frame> colorFrame, std::shared_ptr<ob::Frame> depthFrame) {
|
||||
// Get the width and height of the color and depth frames
|
||||
auto depthFrameWidth = depthFrame->as<ob::VideoFrame>()->getWidth();
|
||||
auto depthFrameHeight = depthFrame->as<ob::VideoFrame>()->getHeight();
|
||||
|
||||
// Get the stream profiles for the color and depth frames
|
||||
auto colorProfile = colorFrame->getStreamProfile();
|
||||
auto depthProfile = depthFrame->getStreamProfile();
|
||||
auto extrinsicD2C = depthProfile->getExtrinsicTo(colorProfile);
|
||||
auto extrinsicC2D = colorProfile->getExtrinsicTo(depthProfile);
|
||||
|
||||
// Get the intrinsic and distortion parameters for the color and depth streams
|
||||
auto depthIntrinsic = depthProfile->as<ob::VideoStreamProfile>()->getIntrinsic();
|
||||
auto depthDistortion = depthProfile->as<ob::VideoStreamProfile>()->getDistortion();
|
||||
// Access the depth data from the frame
|
||||
uint16_t *pDepthData = (uint16_t *)depthFrame->getData();
|
||||
uint16_t convertAreaWidth = 3;
|
||||
uint16_t convertAreaHeight = 3;
|
||||
|
||||
// Transform depth values to the color frame's coordinate system
|
||||
for(uint32_t i = depthFrameHeight / 2; i < (depthFrameHeight / 2 + convertAreaHeight); i++) {
|
||||
for(uint32_t j = depthFrameWidth / 2; j < (depthFrameWidth / 2 + convertAreaWidth); j++) {
|
||||
// Get the coordinates of the current pixel
|
||||
OBPoint2f sourcePixel = { static_cast<float>(j), static_cast<float>(i) };
|
||||
OBPoint3f tmpTargetPixel = {};
|
||||
OBPoint2f targetPixel = {};
|
||||
// Get the depth value of the current pixel
|
||||
float depthValue = (float)pDepthData[i * depthFrameWidth + j];
|
||||
if(depthValue == 0) {
|
||||
std::cout << "The depth value is 0, so it's recommended to point the camera at a flat surface" << std::endl;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Perform the 2D to 3D transformation
|
||||
bool result = ob::CoordinateTransformHelper::transformation2dto3d(sourcePixel, depthValue, depthIntrinsic,
|
||||
extrinsicD2C, &tmpTargetPixel);
|
||||
if(!result ) {
|
||||
continue;
|
||||
}
|
||||
printRuslt("depth 2d to 3D: pixel coordinates and depth transform to point in 3D space", sourcePixel, tmpTargetPixel, depthValue);
|
||||
|
||||
// Perform the 3D to 2D transformation
|
||||
result = ob::CoordinateTransformHelper::transformation3dto2d(tmpTargetPixel, depthIntrinsic, depthDistortion, extrinsicC2D, &targetPixel);
|
||||
if(!result ) {
|
||||
continue;
|
||||
}
|
||||
printRuslt("3d to depth 2d : point in 3D space transform to the corresponding pixel coordinates in an image", tmpTargetPixel, targetPixel);
|
||||
}
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user