From 17d96c61c4c1b6365e5ad0959507c33fa7eae581 Mon Sep 17 00:00:00 2001 From: qupengwei <123456789> Date: Tue, 6 Jan 2026 10:49:45 +0800 Subject: [PATCH] =?UTF-8?q?=E6=8B=B7=E8=B4=9D=E6=89=98=E7=9B=98=E5=BA=93?= =?UTF-8?q?=E7=B3=BB=E7=BB=9F=E6=9E=B6=E6=9E=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 74 + docs/cmake_configuration_summary.md | 221 ++ docs/project_architecture.md | 513 +++ docs/project_class_interaction.md | 133 + image_capture/CMakeLists.txt | 251 ++ image_capture/cmake/CompilerOptions.cmake | 32 + image_capture/cmake/Dependencies.cmake | 129 + image_capture/cmake/PercipioSDK.cmake | 55 + image_capture/config.json | 130 + image_capture/note.md | 6 + image_capture/run_log.txt | 0 .../src/algorithm/core/detection_base.cpp | 172 + .../src/algorithm/core/detection_base.h | 116 + .../src/algorithm/core/detection_result.cpp | 184 + .../src/algorithm/core/detection_result.h | 96 + .../beam_rack_deflection_detection.cpp | 932 +++++ .../beam_rack_deflection_detection.h | 134 + .../pallet_offset/pallet_offset_detection.cpp | 1396 +++++++ .../pallet_offset/pallet_offset_detection.h | 104 + .../slot_occupancy_detection.cpp | 261 ++ .../slot_occupancy/slot_occupancy_detection.h | 37 + .../visual_inventory_detection.cpp | 129 + .../visual_inventory_detection.h | 40 + .../src/algorithm/utils/image_processor.cpp | 150 + .../src/algorithm/utils/image_processor.h | 63 + .../src/camera/mvs_multi_camera_capture.cpp | 306 ++ .../src/camera/mvs_multi_camera_capture.h | 122 + .../src/camera/ty_multi_camera_capture.cpp | 950 +++++ .../src/camera/ty_multi_camera_capture.h | 159 + image_capture/src/common/config_manager.cpp | 734 ++++ image_capture/src/common/config_manager.h | 124 + image_capture/src/common/log_manager.cpp | 104 + image_capture/src/common/log_manager.h | 80 + image_capture/src/common/log_streambuf.h | 39 + image_capture/src/common_types.h | 27 + image_capture/src/device/device_manager.cpp | 300 ++ image_capture/src/device/device_manager.h | 154 + image_capture/src/gui/mainwindow.cpp | 1014 ++++++ image_capture/src/gui/mainwindow.h | 106 + image_capture/src/gui/mainwindow.ui | 1205 +++++++ image_capture/src/gui/settings_widget.cpp | 305 ++ image_capture/src/gui/settings_widget.h | 75 + image_capture/src/main.cpp | 18 + .../src/redis/redis_communicator.cpp | 346 ++ image_capture/src/redis/redis_communicator.h | 145 + image_capture/src/redis/task_data.h | 16 + image_capture/src/task/task_manager.cpp | 855 +++++ image_capture/src/task/task_manager.h | 148 + .../src/tools/calibration_tool/README.md | 36 + .../calibration_tool/calibration_widget.cpp | 731 ++++ .../calibration_tool/calibration_widget.h | 85 + .../calibration_widget_BACKUP.cpp | 1 + .../src/tools/calibration_tool/main.cpp | 13 + .../src/tools/generate_reference/main.cpp | 280 ++ .../src/tools/intrinsic_dumper/README.md | 31 + .../src/tools/intrinsic_dumper/main.cpp | 116 + .../src/tools/slot_algo_tuner/README.md | 29 + .../src/tools/slot_algo_tuner/main.cpp | 13 + .../tools/slot_algo_tuner/tuner_widget.cpp | 246 ++ .../src/tools/slot_algo_tuner/tuner_widget.h | 53 + .../src/vision/vision_controller.cpp | 197 + image_capture/src/vision/vision_controller.h | 81 + .../third_party/mvs/Includes/CameraParams.h | 1441 ++++++++ .../mvs/Includes/MvCameraControl.h | 3195 +++++++++++++++++ .../third_party/mvs/Includes/MvErrorDefine.h | 122 + .../mvs/Includes/MvISPErrorDefine.h | 98 + .../mvs/Includes/MvObsoleteInterfaces.h | 2148 +++++++++++ .../mvs/Includes/ObsoleteCamParams.h | 655 ++++ .../third_party/mvs/Includes/PixelType.h | 201 ++ .../third_party/percipio/common/BayerISP.hpp | 218 ++ .../common/CommandLineFeatureHelper.hpp | 126 + .../percipio/common/CommandLineParser.hpp | 173 + .../percipio/common/DepthInpainter.cpp | 647 ++++ .../percipio/common/DepthInpainter.hpp | 36 + .../percipio/common/DepthRender.hpp | 249 ++ .../percipio/common/ImageSpeckleFilter.cpp | 120 + .../percipio/common/ImageSpeckleFilter.hpp | 22 + .../third_party/percipio/common/MatViewer.cpp | 95 + .../third_party/percipio/common/MatViewer.hpp | 144 + .../percipio/common/ParametersParse.cpp | 198 + .../percipio/common/ParametersParse.h | 6 + .../third_party/percipio/common/TYThread.cpp | 83 + .../third_party/percipio/common/TYThread.hpp | 25 + .../third_party/percipio/common/Utils.hpp | 496 +++ .../third_party/percipio/common/common.hpp | 539 +++ .../third_party/percipio/common/crc32.cpp | 1245 +++++++ .../third_party/percipio/common/crc32.h | 69 + .../third_party/percipio/common/huffman.cpp | 464 +++ .../third_party/percipio/common/huffman.h | 5 + .../third_party/percipio/common/json11.cpp | 790 ++++ .../third_party/percipio/common/json11.hpp | 232 ++ .../third_party/percipio/include/TYApi.h | 2951 +++++++++++++++ .../percipio/include/TYCoordinateMapper.h | 560 +++ .../third_party/percipio/include/TYDefs.h | 1224 +++++++ .../percipio/include/TYImageProc.h | 82 + .../third_party/percipio/include/TYVer.h | 3 + .../third_party/percipio/include/TyIsp.h | 109 + .../percipio/sample_v2/cpp/Device.cpp | 635 ++++ .../percipio/sample_v2/cpp/Frame.cpp | 472 +++ .../percipio/sample_v2/hpp/Device.hpp | 239 ++ .../percipio/sample_v2/hpp/Frame.hpp | 126 + .../sample_v2/如何设置分辨率和帧率.md | 221 ++ scripts/clear_redis_task_keys_DB0_DB1.bat | 54 + scripts/simulate_wms_task - flag=3.bat | 17 + scripts/simulate_wms_task - flag=5.bat | 14 + scripts/simulate_wms_task-flag=4.bat | 14 + scripts/读写分离脚本DB0-4个值_DB1-26个值.bat | 109 + 107 files changed, 35274 insertions(+) create mode 100644 .gitignore create mode 100644 docs/cmake_configuration_summary.md create mode 100644 docs/project_architecture.md create mode 100644 docs/project_class_interaction.md create mode 100644 image_capture/CMakeLists.txt create mode 100644 image_capture/cmake/CompilerOptions.cmake create mode 100644 image_capture/cmake/Dependencies.cmake create mode 100644 image_capture/cmake/PercipioSDK.cmake create mode 100644 image_capture/config.json create mode 100644 image_capture/note.md create mode 100644 image_capture/run_log.txt create mode 100644 image_capture/src/algorithm/core/detection_base.cpp create mode 100644 image_capture/src/algorithm/core/detection_base.h create mode 100644 image_capture/src/algorithm/core/detection_result.cpp create mode 100644 image_capture/src/algorithm/core/detection_result.h create mode 100644 image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.cpp create mode 100644 image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.h create mode 100644 image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.cpp create mode 100644 image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.h create mode 100644 image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.cpp create mode 100644 image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.h create mode 100644 image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.cpp create mode 100644 image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.h create mode 100644 image_capture/src/algorithm/utils/image_processor.cpp create mode 100644 image_capture/src/algorithm/utils/image_processor.h create mode 100644 image_capture/src/camera/mvs_multi_camera_capture.cpp create mode 100644 image_capture/src/camera/mvs_multi_camera_capture.h create mode 100644 image_capture/src/camera/ty_multi_camera_capture.cpp create mode 100644 image_capture/src/camera/ty_multi_camera_capture.h create mode 100644 image_capture/src/common/config_manager.cpp create mode 100644 image_capture/src/common/config_manager.h create mode 100644 image_capture/src/common/log_manager.cpp create mode 100644 image_capture/src/common/log_manager.h create mode 100644 image_capture/src/common/log_streambuf.h create mode 100644 image_capture/src/common_types.h create mode 100644 image_capture/src/device/device_manager.cpp create mode 100644 image_capture/src/device/device_manager.h create mode 100644 image_capture/src/gui/mainwindow.cpp create mode 100644 image_capture/src/gui/mainwindow.h create mode 100644 image_capture/src/gui/mainwindow.ui create mode 100644 image_capture/src/gui/settings_widget.cpp create mode 100644 image_capture/src/gui/settings_widget.h create mode 100644 image_capture/src/main.cpp create mode 100644 image_capture/src/redis/redis_communicator.cpp create mode 100644 image_capture/src/redis/redis_communicator.h create mode 100644 image_capture/src/redis/task_data.h create mode 100644 image_capture/src/task/task_manager.cpp create mode 100644 image_capture/src/task/task_manager.h create mode 100644 image_capture/src/tools/calibration_tool/README.md create mode 100644 image_capture/src/tools/calibration_tool/calibration_widget.cpp create mode 100644 image_capture/src/tools/calibration_tool/calibration_widget.h create mode 100644 image_capture/src/tools/calibration_tool/calibration_widget_BACKUP.cpp create mode 100644 image_capture/src/tools/calibration_tool/main.cpp create mode 100644 image_capture/src/tools/generate_reference/main.cpp create mode 100644 image_capture/src/tools/intrinsic_dumper/README.md create mode 100644 image_capture/src/tools/intrinsic_dumper/main.cpp create mode 100644 image_capture/src/tools/slot_algo_tuner/README.md create mode 100644 image_capture/src/tools/slot_algo_tuner/main.cpp create mode 100644 image_capture/src/tools/slot_algo_tuner/tuner_widget.cpp create mode 100644 image_capture/src/tools/slot_algo_tuner/tuner_widget.h create mode 100644 image_capture/src/vision/vision_controller.cpp create mode 100644 image_capture/src/vision/vision_controller.h create mode 100644 image_capture/third_party/mvs/Includes/CameraParams.h create mode 100644 image_capture/third_party/mvs/Includes/MvCameraControl.h create mode 100644 image_capture/third_party/mvs/Includes/MvErrorDefine.h create mode 100644 image_capture/third_party/mvs/Includes/MvISPErrorDefine.h create mode 100644 image_capture/third_party/mvs/Includes/MvObsoleteInterfaces.h create mode 100644 image_capture/third_party/mvs/Includes/ObsoleteCamParams.h create mode 100644 image_capture/third_party/mvs/Includes/PixelType.h create mode 100644 image_capture/third_party/percipio/common/BayerISP.hpp create mode 100644 image_capture/third_party/percipio/common/CommandLineFeatureHelper.hpp create mode 100644 image_capture/third_party/percipio/common/CommandLineParser.hpp create mode 100644 image_capture/third_party/percipio/common/DepthInpainter.cpp create mode 100644 image_capture/third_party/percipio/common/DepthInpainter.hpp create mode 100644 image_capture/third_party/percipio/common/DepthRender.hpp create mode 100644 image_capture/third_party/percipio/common/ImageSpeckleFilter.cpp create mode 100644 image_capture/third_party/percipio/common/ImageSpeckleFilter.hpp create mode 100644 image_capture/third_party/percipio/common/MatViewer.cpp create mode 100644 image_capture/third_party/percipio/common/MatViewer.hpp create mode 100644 image_capture/third_party/percipio/common/ParametersParse.cpp create mode 100644 image_capture/third_party/percipio/common/ParametersParse.h create mode 100644 image_capture/third_party/percipio/common/TYThread.cpp create mode 100644 image_capture/third_party/percipio/common/TYThread.hpp create mode 100644 image_capture/third_party/percipio/common/Utils.hpp create mode 100644 image_capture/third_party/percipio/common/common.hpp create mode 100644 image_capture/third_party/percipio/common/crc32.cpp create mode 100644 image_capture/third_party/percipio/common/crc32.h create mode 100644 image_capture/third_party/percipio/common/huffman.cpp create mode 100644 image_capture/third_party/percipio/common/huffman.h create mode 100644 image_capture/third_party/percipio/common/json11.cpp create mode 100644 image_capture/third_party/percipio/common/json11.hpp create mode 100644 image_capture/third_party/percipio/include/TYApi.h create mode 100644 image_capture/third_party/percipio/include/TYCoordinateMapper.h create mode 100644 image_capture/third_party/percipio/include/TYDefs.h create mode 100644 image_capture/third_party/percipio/include/TYImageProc.h create mode 100644 image_capture/third_party/percipio/include/TYVer.h create mode 100644 image_capture/third_party/percipio/include/TyIsp.h create mode 100644 image_capture/third_party/percipio/sample_v2/cpp/Device.cpp create mode 100644 image_capture/third_party/percipio/sample_v2/cpp/Frame.cpp create mode 100644 image_capture/third_party/percipio/sample_v2/hpp/Device.hpp create mode 100644 image_capture/third_party/percipio/sample_v2/hpp/Frame.hpp create mode 100644 image_capture/third_party/percipio/sample_v2/如何设置分辨率和帧率.md create mode 100644 scripts/clear_redis_task_keys_DB0_DB1.bat create mode 100644 scripts/simulate_wms_task - flag=3.bat create mode 100644 scripts/simulate_wms_task - flag=5.bat create mode 100644 scripts/simulate_wms_task-flag=4.bat create mode 100644 scripts/读写分离脚本DB0-4个值_DB1-26个值.bat diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ae5ab1a --- /dev/null +++ b/.gitignore @@ -0,0 +1,74 @@ +# Build directories +# 忽略所有build目录(包括根目录和子目录) +build/ +# 注意:项目只在 image_capture/build/ 目录下构建 +# 根目录的 build/ 文件夹应该被忽略,如果存在可以安全删除 +bin/ +lib/ +!camport3/lib/ +image_capture/src/images_template/ +image_capture/build_debug +# CMake generated files +CMakeCache.txt +CMakeFiles/ +cmake_install.cmake +Makefile +*.cmake + +# Visual Studio files +.vs/ +*.vcxproj +*.vcxproj.filters +*.vcxproj.user +*.sln +*.suo +*.user +*.sdf +*.opensdf + +# Qt autogen files +*_autogen/ +.qt/ +ui_*.h +moc_*.cpp +qrc_*.cpp + +# Compiled files +*.o +*.obj +*.exe +*.a +*.lib +!image_capture/camera_sdk/lib/**/*.lib + +# Saved images +*.png +*.jpg +*.jpeg +*.ply + +# IDE files +.vscode/ +.idea/ +*.swp +*.swo +*~ + +# Temporary files +*.tmp +*.temp +*.log +compile_commands.json.tmp* + +# OS files +.DS_Store +Thumbs.db + +*.gif + +.cache/ +!image_capture/camera_sdk/ +!image_capture/camera_sdk/lib/ + + +!image_capture/cmake/*.cmake diff --git a/docs/cmake_configuration_summary.md b/docs/cmake_configuration_summary.md new file mode 100644 index 0000000..8e3c348 --- /dev/null +++ b/docs/cmake_configuration_summary.md @@ -0,0 +1,221 @@ +# CMake 配置文档 + +本文档总结了 `image_capture` 项目的 CMake 构建系统配置。 + +--- + +## 目录结构 + +``` +image_capture/ +├── CMakeLists.txt # 主构建配置文件 +└── cmake/ # CMake 模块目录 + ├── CompilerOptions.cmake # 编译器选项配置 + ├── Dependencies.cmake # 外部依赖管理 + └── PercipioSDK.cmake # 相机 SDK 配置 +``` + +--- + +## 主配置文件:[CMakeLists.txt](file:///d:/Git/stereo_warehouse_inspection/image_capture/CMakeLists.txt) + +### 基本信息 +- **CMake 最低版本**: 3.10 +- **项目名称**: `image_capture` +- **编程语言**: C++ +- **构建生成器**: Visual Studio 17 2022 (MSVC) + +### 输出目录 +```cmake +CMAKE_RUNTIME_OUTPUT_DIRECTORY = ${CMAKE_BINARY_DIR}/bin/Release # 可执行文件 +CMAKE_LIBRARY_OUTPUT_DIRECTORY = ${CMAKE_BINARY_DIR}/lib/Release # 动态库 +CMAKE_ARCHIVE_OUTPUT_DIRECTORY = ${CMAKE_BINARY_DIR}/lib/Release # 静态库 +``` + +### 模块化设计 +项目采用模块化的 CMake 配置,通过 `cmake/` 目录下的三个模块文件组织: + +1. **CompilerOptions.cmake** - 编译器和全局设置 +2. **Dependencies.cmake** - Qt6、OpenCV、Open3D 依赖 +3. **PercipioSDK.cmake** - 图漾相机 SDK 配置 + +### 库和可执行文件 + +#### 1. Algorithm Library (`algorithm_lib`) +**类型**: 静态库 + +**源文件**: +- `src/algorithm/core/detection_base.cpp` +- `src/algorithm/core/detection_result.cpp` +- `src/algorithm/utils/image_processor.cpp` +- `src/algorithm/detections/slot_occupancy_detection.cpp` +- `src/algorithm/detections/pallet_offset_detection.cpp` +- `src/algorithm/detections/beam_rack_deflection_detection.cpp` +- `src/algorithm/detections/visual_inventory_detection.cpp` +- `src/algorithm/detections/visual_inventory_end_detection.cpp` + +**包含路径**: +- `src` +- `third_party/percipio/common` (修复 json11.hpp 引用) + +**依赖**: OpenCV, Open3D + +#### 2. Main Executable (`image_capture`) +**类型**: 可执行文件 + +**主要源文件**: +- `src/main.cpp` +- `src/camera/ty_multi_camera_capture.cpp` +- `src/camera/mvs_multi_camera_capture.cpp` +- `src/device/device_manager.cpp` +- `src/redis/redis_communicator.cpp` +- `src/task/task_manager.cpp` +- `src/vision/vision_controller.cpp` +- `src/common/log_manager.cpp` +- `src/common/config_manager.cpp` +- `src/gui/mainwindow.cpp` / `.h` / `.ui` + +**链接的库**: +- `algorithm_lib` (项目内部算法库) +- `cpp_api_lib` (相机 SDK C++ API 封装) +- `tycam` (相机 SDK 动态库) +- `${OpenCV_LIBS}` (OpenCV 库) +- `Open3D::Open3D` (Open3D 库) +- `Qt6::Core` 和 `Qt6::Widgets` (Qt 框架) +- `MvCameraControl.lib` (海康 MVS SDK) + +### 测试配置 +- **选项**: `BUILD_TESTS` (默认 ON) +- **测试目录**: `tests/` (通过 `add_subdirectory` 添加) + +--- + +## CMake 模块详解 + +### 1. [CompilerOptions.cmake](file:///d:/Git/stereo_warehouse_inspection/image_capture/cmake/CompilerOptions.cmake) + +#### C++ 标准 +- **标准**: C++17 +- **要求**: 必须支持 + +#### Qt 自动化工具 +```cmake +CMAKE_AUTOMOC ON # 自动 Meta-Object Compiler +CMAKE_AUTORCC ON # 自动 Resource Compiler +CMAKE_AUTOUIC ON # 自动 UI Compiler +``` + +#### 编译器优化选项 (MSVC) + +**Release 模式** (默认): +```cmake +/O2 # 优化速度 +/Ob2 # 内联任何合适的函数 +/Oi # 启用内建函数 +/Ot # 代码速度优先 +/Oy # 省略帧指针 +/GL # 全局程序优化 +``` + +**Debug 模式**: +```cmake +/Od # 禁用优化 +/Zi # 生成完整调试信息 +``` + +#### 其他设置 +- **定义**: `OPENCV_DEPENDENCIES` +- **compile_commands.json**: 自动生成(用于 IDE 智能提示) + +--- + +### 2. [Dependencies.cmake](file:///d:/Git/stereo_warehouse_inspection/image_capture/cmake/Dependencies.cmake) + +#### Qt6 配置 +```cmake +find_package(Qt6 REQUIRED COMPONENTS Widgets) +``` + +#### OpenCV 配置 +```cmake +find_package(OpenCV REQUIRED) +``` + +#### Open3D 配置 +```cmake +find_package(Open3D REQUIRED) +``` +用于点云处理和算法运算。 + +--- + +### 3. [PercipioSDK.cmake](file:///d:/Git/stereo_warehouse_inspection/image_capture/cmake/PercipioSDK.cmake) + +#### 相机 SDK 路径配置 +```cmake +CAMPORT3_ROOT = ${CMAKE_CURRENT_SOURCE_DIR}/camera_sdk +CAMPORT3_LIB_DIR = ${CAMPORT3_ROOT}/lib/win/x64 +``` + +#### 导入 tycam 动态库 +```cmake +add_library(tycam SHARED IMPORTED) +``` + +#### C++ API 封装库 (`cpp_api_lib`) +**类型**: 静态库 + +**源文件**: +`camera_sdk/sample_v2/cpp/*`, `camera_sdk/common/*` + +**依赖**: OpenCV + +--- + +## 构建流程 + +### 配置项目 +```bash +cd image_capture/build +cmake .. +``` + +可选参数: +```bash +-DOpenCV_DIR= # 指定 OpenCV 路径 +-DQt6_DIR= # 指定 Qt6 路径 +-DOpen3D_DIR= # 指定 Open3D 路径 +``` + +### 编译项目 +```bash +cmake --build . --config Release +# 或 +cmake --build . --config Debug +``` + +--- + +## 依赖项总结 + +| 依赖项 | 版本要求 | 用途 | +|--------|---------|------| +| CMake | ≥ 3.10 | 构建系统 | +| C++ | C++17 | 编程语言标准 | +| Qt6 | Widgets 组件 | GUI 框架 | +| OpenCV | 4.x | 图像处理 | +| Open3D | 0.17+ | 3D点云处理 | +| Percipio SDK | tycam.dll | 相机驱动 | +| MSVC | VS2022 (v143) | 编译器 | + +--- + +## 维护建议 + +1. **环境一致性**: 确保所有依赖项(Qt, OpenCV, Open3D)都是使用 MSVC 编译的 x64 版本。 +2. **DLL 管理**: 运行时确保所有必要的 DLL 都在可执行文件目录下。 +3. **版本检测**: 保持 Open3D 和 OpenCV 版本的一致性,避免 ABI 冲突。 + +--- + +*文档更新时间: 2025-12-19* diff --git a/docs/project_architecture.md b/docs/project_architecture.md new file mode 100644 index 0000000..059ac4a --- /dev/null +++ b/docs/project_architecture.md @@ -0,0 +1,513 @@ +# 项目架构及调用关系文档 + +## 1. 系统概述 + +本系统是一个基于立体视觉的仓库巡检图像采集与处理系统。它集成了图漾(Percipio)工业深度相机SDK和海康(MVS)工业2D相机SDK进行多相机图像采集,使用OpenCV进行图像处理,Qt6作为用户界面框架,并通过Redis与外部系统(如WMS仓库管理系统、机器人控制系统)进行通信和任务调度。 + +系统主要功能包括: +- **多相机统一管理**:同时支持深度相机(Percipio)和2D相机(MVS)的数据采集 +- **实时图像预览与状态监控**:GUI界面实时显示相机图像,支持深度图伪彩色显示 +- **基于Redis的任务触发与结果上报**:支持跨数据库的任务监听和结果写入 +- **多种检测算法**: + - 货位占用检测(Flag 1):基于2D图像的目标检测 + - 托盘位置偏移检测(Flag 2):基于深度数据的3D位置计算 + - 横梁/立柱变形检测(Flag 3):基于深度数据的结构变形测量 + - 视觉盘点检测(Flag 4):基于Halcon的QR码识别,支持连续扫描和去重 + - 盘点停止信号(Flag 5):停止Flag 4的连续扫描循环 +- **智能相机分配**:根据任务类型自动选择合适的相机设备 +- **系统配置管理与日志记录**:支持参数持久化、实时日志显示和错误处理 + +## 2. 目录结构说明 + +```text +scripts/ # 批处理脚本 (Redis数据库配置、模拟WMS任务等) +docs/ # 项目文档 +├── project_architecture.md # 项目架构文档 (本文档) +├── project_class_interaction.md # 类交互关系文档 +└── cmake_configuration_summary.md # CMake构建配置文档 + +image_capture/ +├── CMakeLists.txt # 主构建配置文件 +├── cmake/ # CMake模块配置 +│ ├── CompilerOptions.cmake # 编译器选项配置 +│ ├── Dependencies.cmake # 依赖项管理 (Qt6, OpenCV, Open3D) +│ └── PercipioSDK.cmake # Percipio相机SDK配置 +├── config.json # 系统配置文件 (相机参数、算法阈值、Redis配置等) +└── src/ + ├── algorithm/ # 核心算法库 + │ ├── core/ # 算法基类与结果定义 + │ │ ├── detection_base.h/cpp # 检测算法基类 + │ │ └── detection_result.h/cpp # 检测结果数据结构 + │ ├── detections/ # 具体检测算法实现 + │ │ ├── slot_occupancy/ # 货位占用检测 + │ │ ├── pallet_offset/ # 托盘偏移检测 + │ │ ├── beam_rack_deflection/ # 横梁立柱变形检测 + │ │ └── visual_inventory/ # 视觉盘点检测 + │ └── utils/ # 图像处理工具 + ├── camera/ # 相机驱动层 + │ ├── ty_multi_camera_capture.h/cpp # 图漾(Percipio)深度相机封装 + │ └── mvs_multi_camera_capture.h/cpp # 海康(MVS)2D相机封装 + ├── common/ # 通用设施 + │ ├── config_manager.h/cpp # 配置管理单例 (JSON配置加载/保存) + │ ├── log_manager.h/cpp # 日志管理 (spdlog封装) + │ └── log_streambuf.h # std::cout重定向到GUI + ├── device/ # 硬件设备管理 + │ └── device_manager.h/cpp # 相机设备单例管理 (统一设备接口) + ├── gui/ # 用户界面 (Qt6) + │ └── mainwindow.h/cpp/ui # 主窗口实现 (实时预览+设置界面) + ├── redis/ # 通信模块 + │ └── redis_communicator.h/cpp # Redis客户端封装 (跨数据库支持) + ├── task/ # 任务调度 + │ └── task_manager.h/cpp # 任务管理器 (队列+线程+算法调度) + ├── vision/ # 系统控制 + │ └── vision_controller.h/cpp # 顶层控制器 (Redis+Task协调) + ├── common_types.h # 通用数据类型 (Point3D, CameraIntrinsics等) + ├── tools/ # 工具程序目录 + │ ├── calibration_tool/ # 相机标定工具 + │ ├── slot_algo_tuner/ # 货位算法调参工具 + │ └── intrinsic_dumper/ # 相机内参导出工具 + └── main.cpp # 程序入口 +``` + +## 3. 核心架构设计 + +系统采用分层架构设计,各模块职责明确: + +- **展示层 (Presentation)**: `MainWindow` 负责Qt6界面显示、实时相机预览、手动控制、参数配置及日志展示。 +- **控制层 (Control)**: `VisionController` 作为系统级控制器,负责服务的启动/停止,协调Redis通信和任务管理,使用回调机制解耦模块间依赖。 +- **业务逻辑层 (Business Logic)**: `TaskManager` 负责任务队列管理、算法调度和结果处理;`DeviceManager` 作为硬件资源的统一访问点(单例模式)。 +- **算法层 (Algorithm)**: 提供具体的视觉检测功能,所有算法继承自 `DetectionBase`,支持统一的 `execute()` 接口。 +- **基础设施层 (Infrastructure)**: `CameraCapture` 封装底层相机SDK调用,`RedisCommunicator` 处理跨数据库通信,`ConfigManager` 管理系统配置。 + +### 系统分层架构图 + +```mermaid +graph TB + subgraph Presentation ["展示层 (Presentation)"] + direction TB + GUI[MainWindow] + end + + subgraph Control ["控制层 (Control)"] + VC[VisionController] + end + + subgraph Business ["业务逻辑层 (Business Logic)"] + direction TB + TM[TaskManager] + DM[DeviceManager] + end + + subgraph Algorithm ["算法层 (Algorithm)"] + direction TB + DB[DetectionBase] + Det[Concrete Detections
(Slot, Beam, etc.)] + end + + subgraph Infrastructure ["基础设施层 (Infrastructure)"] + direction TB + Cam[CameraCapture] + Redis[RedisCommunicator] + Conf[ConfigManager] + end + + %% 层级调用关系 + GUI --> VC + VC --> TM + VC --> Redis + + TM --> DM + TM --> DB + DB <|-- Det + + DM --> Cam + DM --> MVS[MvsMultiCameraCapture] + + %% 跨层辅助调用 + GUI -.-> Conf + TM -.-> Conf + + style Presentation fill:#e1f5fe,stroke:#01579b + style Control fill:#e8f5e9,stroke:#2e7d32 + style Business fill:#fff3e0,stroke:#ef6c00 + style Algorithm fill:#f3e5f5,stroke:#7b1fa2 + style Infrastructure fill:#eceff1,stroke:#455a64 +``` + +### 系统类图 +```mermaid +classDiagram + class MainWindow { + +VisionController visionController_ + +QTimer imageTimer_ + +updateImage() + +onSaveSettings() + +showLogMessage() + } + + class VisionController { + +shared_ptr redis_comm_ + +shared_ptr redis_result_comm_ + +shared_ptr task_manager_ + +initialize() + +start() + +stop() + -onTaskReceived() + } + + class DeviceManager { + <> + +shared_ptr capture_ + +unique_ptr mvs_cameras_ + +initialize() + +startAll() + +getLatestImages() + +computePointCloud() + +get2DCameraImage() + } + + class TaskManager { + +queue task_queue_ + +map detectors_ + +thread execution_thread_ + +handleTask() + +executeDetectionTask() + -executeVisualInventoryLoop() + -processResult() + -addWarningAlarmSignals() + } + + class CameraCapture { + +vector cameras_ + +getLatestImages() + +computePointCloud() + +start() + -captureThreadFunc() + } + + class MvsMultiCameraCapture { + +vector cameras_ + +getLatestImage() + +start() + } + + class RedisCommunicator { + +connect() + +startListening() + +writeString() + +setTaskCallback() + } + + class ConfigManager { + <> + +json config_data_ + +loadConfig() + +saveConfig() + +getValue() + } + + class DetectionBase { + <> + +execute(depth, color, side, result, point_cloud, beam_length) + } + + class SlotOccupancyDetection { + +execute() + } + + class PalletOffsetDetection { + +execute() + } + + class BeamRackDeflectionDetection { + +execute() + } + + class VisualInventoryDetection { + +execute() + } + + MainWindow --> VisionController : 拥有并管理 + VisionController --> RedisCommunicator : 管理 (任务监听) + VisionController --> TaskManager : 分发任务 + RedisCommunicator --> VisionController : 回调通知 (onTaskReceived) + + VisionController ..> DeviceManager : 依赖(全局单例) + TaskManager ..> DeviceManager : 获取图像数据 (Dependency) + DeviceManager --> CameraCapture : 拥有 (深度相机) + DeviceManager --> MvsMultiCameraCapture : 拥有 (2D相机) + + TaskManager --> DetectionBase : 调用算法 + DetectionBase <|-- SlotOccupancyDetection : 继承 + DetectionBase <|-- PalletOffsetDetection : 继承 + DetectionBase <|-- BeamRackDeflectionDetection : 继承 + DetectionBase <|-- VisualInventoryDetection : 继承 + + MainWindow ..> ConfigManager : 读写配置 (Dependency) + TaskManager ..> ConfigManager : 读取参数 (Dependency) + MainWindow ..> DeviceManager : 图像显示 (Dependency) +``` + +## 4. 关键模块详解 + +### 4.1 GUI与主入口 (MainWindow) +- **职责**: Qt6应用程序主窗口,负责UI渲染、用户交互、参数配置、实时预览及日志展示。 +- **调用关系**: + - 程序启动时创建 `VisionController` 并初始化系统。 + - 通过 `QTimer` (30FPS) 定期从 `DeviceManager` 获取最新图像更新界面显示。 + - **实时预览**: 支持深度图伪彩色显示和彩色图显示,带自适应缩放。 + - **设置界面**: Settings Tab提供完整的算法参数配置,包括: + - Beam/Rack Deflection: 横梁/立柱变形检测阈值和ROI配置 + - Pallet Offset: 托盘位置偏移检测参数 + - 系统配置: Redis连接参数、相机设置等 + - **日志显示**: 通过 `LogStreamBuf` 将 `std::cout/cerr` 重定向到GUI日志窗口。 + - 通过 `ConfigManager` 加载和保存 `config.json` 配置,支持热重载。 + +### 4.2 视觉控制器 (VisionController) +- **职责**: 系统的核心控制器,协调Redis通信和任务管理,支持无头模式运行。 +- **架构特点**: + - 使用智能指针管理 `RedisCommunicator` 和 `TaskManager` 生命周期。 + - 支持跨数据库Redis操作:任务监听DB(输入)和结果写入DB(输出)。 + - 通过回调机制实现模块解耦,避免循环依赖。 +- **工作流程**: + 1. `initialize()`: 创建并初始化两个Redis连接器(任务DB和结果DB)。 + 2. 初始化 `TaskManager`,传入Redis连接器用于结果写入和任务状态清空。 + 3. `start()`: 启动Redis任务监听线程,设置任务接收回调。 + 4. `onTaskReceived()`: 收到Redis任务时,通过回调转发给 `TaskManager::handleTask()`。 + +### 4.3 任务管理 (TaskManager) +- **职责**: 任务队列管理、算法调度、结果处理和跨线程执行的核心业务逻辑处理器。 +- **架构特点**: + - **异步处理**: 使用任务队列 + 独立执行线程,避免阻塞Redis监听和GUI。 + - **相机智能分配**: 根据任务Flag自动选择合适的相机设备和数据类型。 + - **去重机制**: Flag 4视觉盘点支持连续扫描和QR码去重。 + - **状态管理**: 提供任务执行状态查询接口,支持外部监控。 +- **工作流**: + 1. `handleTask()`: 接收Redis任务,加入线程安全的任务队列。 + 2. `taskExecutionThreadFunc()`: 后台线程持续处理队列任务。 + 3. **相机选择**: 根据Flag选择相机: + - Flag 1: MVS 2D相机 (SN: DA8743029左/DA8742900右) + - Flag 2/3: Percipio深度相机 (SN: 207000146458左/207000146703右) + - Flag 4: MVS 2D相机 (SN: DA8789631) + 连续扫描循环 + 4. **数据获取**: 调用 `DeviceManager` 获取图像,Flag 2/3时生成点云。 + 5. **算法执行**: 调用对应的 `DetectionBase::execute()` 方法。 + 6. **结果处理**: `processResult()` 格式化JSON、计算警告/报警、写入Redis结果DB。 + +### 4.4 设备管理 (DeviceManager) +- **职责**: 多类型相机的统一管理接口,全系统硬件资源的单例访问点。 +- **架构特点**: + - **双SDK支持**: 同时管理Percipio深度相机和MVS 2D相机。 + - **统一接口**: 提供一致的设备枚举、启动/停止和数据获取接口。 + - **线程安全**: 所有接口都是线程安全的,支持并发访问。 + - **资源管理**: 使用智能指针和RAII确保相机资源正确释放。 +- **功能**: + - `initialize()`: 扫描并初始化所有类型的相机设备。 + - `getLatestImages()`: 统一的图像获取接口,支持深度图+彩色图。 + - `get2DCameraImage()`: 专门的2D相机图像获取接口。 + - `computePointCloud()`: 基于深度图和相机内参计算3D点云。 + - **相机索引映射**: 内部管理深度相机和2D相机的索引映射。 + +### 4.5 相机驱动层 +- **Percipio深度相机** (`ty_multi_camera_capture.cpp`): + - 基于图漾工业相机SDK,支持TY系列深度相机。 + - 为每个相机维护独立采集线程和帧缓冲区。 + - 支持深度图和彩色图同步采集,内部处理时间戳对齐。 + - **点云计算**: 集成 `TYMapDepthImageToPoint3d`,利用相机标定参数生成精确3D点云。 + - 自动畸变校正和深度数据滤波。 + +- **MVS 2D相机** (`mvs_multi_camera_capture.cpp`): + - 基于海康工业相机SDK,支持MV系列2D相机。 + - 支持连续采集模式,内部缓冲区管理。 + - 提供高帧率彩色图像采集,适用于快速检测场景。 + - 支持相机序列号匹配,便于多相机场景下的设备识别。 + +### 4.6 配置管理 (ConfigManager) +- **职责**: 管理 `config.json` 文件,集中管理系统配置。 +- **管理内容**: + - Redis 连接信息。 + - 算法阈值 (Beam/Rack, Pallet Offset 等)。 + - ROI (Region of Interest) 坐标点。 + - 系统通用参数 (最小/最大深度等)。 +- **特性**: 单例模式,支持热加载(部分参数)和持久化保存。程序启动时由 `MainWindow` 加载,确保算法使用持久化的用户设置。GUI中的Settings Tab直接操作此模块。 + +## 5. 系统执行与数据流 + +### 5.1 初始化流程 +1. **程序启动**: `main.cpp` 创建Qt6应用程序,设置Fusion样式。 +2. **MainWindow构造**: + - 初始化Qt6 UI界面(主窗口 + Settings选项卡)。 + - **配置加载**: 调用 `ConfigManager::loadConfig()` 从 `config.json` 加载系统配置。 + - **设备初始化**: 调用 `DeviceManager::initialize()` 扫描Percipio和MVS相机。 + - **控制器创建**: 实例化 `VisionController`,传入Redis配置参数。 + - **Redis初始化**: `VisionController::initialize()` 创建任务监听和结果写入的Redis连接器。 + - **定时器启动**: 启动30FPS的 `QTimer` 用于实时图像预览。 +3. **设备启动**: 调用 `DeviceManager::startAll()` 启动所有相机采集线程。 +4. **服务启动**: 调用 `VisionController::start()` 开启Redis监听,确保设备就绪后再接收任务。 + +### 5.2 自动任务执行流 (Redis触发) + +```mermaid +sequenceDiagram + participant WMS as WMS/外部系统 + participant Redis_Task as Redis_Task_DB + participant RC_Task as RedisCommunicator_Task + participant VC as VisionController + participant TM as TaskManager + participant DM as DeviceManager + participant Algo as DetectionAlgorithm + participant RC_Result as RedisCommunicator_Result + participant Redis_Result as Redis_Result_DB + + WMS->>Redis_Task: SET vision_task_flag=1,side=left,time=xxx + Redis_Task->>RC_Task: Key change notification + RC_Task->>VC: onTaskReceived(task_data) + VC->>TM: handleTask(task_data) + + activate TM + TM->>TM: Queue task (async) + TM->>DM: getLatestImages() or get2DCameraImage() + DM-->>TM: images (depth+color or 2D only) + + alt Flag 2/3 (需要点云) + TM->>DM: computePointCloud(depth) + DM-->>TM: point_cloud (vector) + end + + TM->>Algo: execute(images, point_cloud, ...) + activate Algo + Algo-->>TM: DetectionResult + deactivate Algo + + TM->>TM: processResult() + addWarningAlarmSignals() + TM->>RC_Result: writeDetectionResult(json_map) + RC_Result->>Redis_Result: MSET key1=value1 key2=value2 ... + + TM->>RC_Task: writeString(vision_task_flag, "0") + TM->>RC_Task: writeString(vision_task_side, "") + TM->>RC_Task: writeString(vision_task_time, "") + RC_Task->>Redis_Task: Clear task flags + deactivate TM +``` + +1. **外部触发**: WMS系统通过Redis Task DB发布任务(设置 `vision_task_flag`、`side`、`time`)。 +2. **异步接收**: `RedisCommunicator_Task` 监听Task DB,触发回调给 `VisionController`。 +3. **任务队列**: `VisionController` 将任务加入 `TaskManager` 的线程安全队列。 +4. **后台执行**: `TaskManager` 执行线程处理任务,根据Flag选择相机和算法: + - **Flag 1**: MVS 2D相机 → `SlotOccupancyDetection` + - **Flag 2**: Percipio深度相机 → `PalletOffsetDetection` (带点云) + - **Flag 3**: Percipio深度相机 → `BeamRackDeflectionDetection` (带点云) + - **Flag 4**: MVS 2D相机 → `VisualInventoryDetection` (连续循环+QR识别) +5. **智能数据获取**: 根据任务类型调用相应的 `DeviceManager` 接口。 +6. **结果处理**: 计算警告/报警信号,格式化JSON结果。 +7. **跨DB写入**: 结果写入Redis Result DB,任务状态清理写入Task DB。 + +### 5.3 实时监控执行流 (GUI) + +```mermaid +sequenceDiagram + participant Timer as QTimer (30FPS) + participant MainWin as MainWindow + participant DM as DeviceManager + + loop 每33ms + Timer->>MainWin: timeout() + activate MainWin + MainWin->>DM: getLatestImages(0) + get2DCameraImage(0) + DM-->>MainWin: depth_img, color_img, mvs_img + + alt 深度相机活跃 + MainWin->>MainWin: applyColorMap(depth_img) → 伪彩色显示 + else 2D相机活跃 + MainWin->>MainWin: 显示彩色图像 + end + + MainWin->>MainWin: MatToQImage() + scaleToFit() + MainWin->>MainWin: update QLabel displays + deactivate MainWin + end +``` + +1. **高频刷新**: `QTimer` 以30FPS触发 `updateImage()`,确保流畅的实时预览。 +2. **多相机预览**: 同时获取深度相机和2D相机的最新图像,支持混合显示。 +3. **图像处理**: 深度图应用伪彩色映射,便于观察深度信息;彩色图直接显示。 +4. **自适应渲染**: OpenCV Mat转换为QImage,支持窗口大小自适应缩放。 +5. **状态同步**: 图像显示与任务执行异步进行,不影响检测性能。 + +## 6. 异常处理与日志 +- **日志**: 使用 `LogManager` 和 `spdlog` (如果集成) 或标准输出。 +- **重定向**: `LogStreamBuf` 将 `std::cout/cerr` 重定向到GUI的日志窗口,方便现场调试。 +- **错误恢复**: 相机掉线重连机制(在驱动层实现或计划中)。 + +## 6. 检测算法详解 + +### 6.1 算法框架 (DetectionBase) +所有检测算法继承自 `DetectionBase` 抽象基类,提供统一的接口: +```cpp +virtual bool execute(const cv::Mat& depth_img, + const cv::Mat& color_img, + const std::string& side, + DetectionResult& result, + const std::vector* point_cloud = nullptr, + double beam_length = 0.0) = 0; +``` + +### 6.2 具体算法实现 + +#### Flag 1: 货位占用检测 (SlotOccupancyDetection) +- **输入**: 2D彩色图像 (MVS相机) +- **算法**: 基于图像处理的目标检测和位置判断 +- **输出**: 货位占用状态 (occupied/free) +- **相机**: DA8743029 (左侧), DA8742900 (右侧) + +#### Flag 2: 托盘位置偏移检测 (PalletOffsetDetection) +- **输入**: 深度图 + 彩色图 + 3D点云 +- **算法**: 基于点云的3D位置计算,检测托盘相对于基准位置的偏移 +- **输出**: 左右偏移(mm)、前后偏移(mm)、插孔变形(mm)、旋转角度(°) +- **相机**: 207000146458 (左侧), 207000146703 (右侧) +- **警告/报警**: 基于阈值的四级判断 (正常/警告/报警) + +#### Flag 3: 横梁/立柱变形检测 (BeamRackDeflectionDetection) +- **输入**: 深度图 + 彩色图 + 3D点云 +- **算法**: 基于点云的结构变形测量 +- **输出**: 横梁弯曲量(mm)、立柱弯曲量(mm) +- **相机**: 207000146458 (左侧), 207000146703 (右侧) +- **警告/报警**: 基于阈值的四级判断 + +#### Flag 4: 视觉盘点检测 (VisualInventoryDetection) +- **输入**: 2D彩色图像 (MVS相机) +- **算法**: 基于Halcon的QR码识别,支持连续扫描和去重 +- **特殊机制**: 循环执行直到收到Flag 5停止信号,支持实时去重 +- **输出**: JSON格式的条码列表 `{"side": ["BOX001", "BOX002", ...]}` +- **相机**: DA8789631 (专用盘点相机) + +#### Flag 5: 盘点停止信号 +- **功能**: 停止Flag 4的连续扫描循环 +- **无算法执行**: 仅作为控制信号 + +### 6.3 相机分配策略 +系统根据任务Flag智能选择相机: + +| Flag | 相机类型 | 序列号 | 位置 | 数据类型 | +|------|---------|--------|------|----------| +| 1 | MVS 2D | DA8743029 / DA8742900 | 左/右 | 彩色图 | +| 2 | Percipio深度 | 207000146458 / 207000146703 | 左/右 | 深度+彩色+点云 | +| 3 | Percipio深度 | 207000146458 / 207000146703 | 左/右 | 深度+彩色+点云 | +| 4 | MVS 2D | DA8789631 | 盘点专用 | 彩色图 | + +## 7. 编译与构建 +- **构建系统**: CMake 3.10+ +- **编程语言**: C++17 +- **目标平台**: Windows 10/11 (MSVC 2022 v143) +- **主要依赖**: + - **Qt6**: Widgets组件 (GUI框架) + - **OpenCV 4.x**: 图像处理和计算机视觉 + - **Open3D 0.17+**: 3D点云处理 + - **Percipio SDK**: 图漾工业相机驱动 + - **MVS SDK**: 海康工业相机驱动 + - **Redis C++ Client**: hiredis + redis-plus-plus (Redis通信) +- **可选依赖**: Halcon (用于QR码识别,在Flag 4中使用) +- **构建流程**: 标准CMake流程,支持Release/Debug配置 + +--- + +*文档更新时间: 2025-01-06* diff --git a/docs/project_class_interaction.md b/docs/project_class_interaction.md new file mode 100644 index 0000000..5e57cb8 --- /dev/null +++ b/docs/project_class_interaction.md @@ -0,0 +1,133 @@ +# 项目功能类调用关系说明 (Project Class Interaction Documentation) + +本主要介绍 `image_capture` 项目核心功能类之间的调用关系、数据流向以及模块划分。 + +## 1. 核心模块概览 (Core Modules Overview) + +系统主要由以下几个核心模块组成: + +* **GUI 模块 (`MainWindow`)**: 程序的入口与界面显示,负责系统初始化。 +* **Vision 控制器 (`VisionController`)**: 系统的核心中枢,协调通信与任务管理。 +* **任务管理 (`TaskManager`)**: 负责具体的业务逻辑执行、算法调度和结果处理。 +* **设备管理 (`DeviceManager`)**: 负责相机等硬件设备的统一管理(单例模式)。 +* **通信模块 (`RedisCommunicator`)**: 负责与外部系统(如 WMS)通过 Redis 交互。 +* **算法模块 (`DetectionBase` 及其子类)**: 具体的图像处理算法。 + +## 2. 类调用关系图 (Class Interaction Diagram) + +```mermaid +classDiagram + class MainWindow { + +VisionController vision_controller + +init() + } + + class VisionController { + -shared_ptr redis_comm + -shared_ptr task_manager + +start() + +stop() + -onTaskReceived() + } + + class RedisCommunicator { + +startListening() + +writeDetectionResult() + +setTaskCallback() + } + + class TaskManager { + -queue task_queue + -map detectors + +handleTask() + -executeDetectionTask() + -getDetector(flag) + } + + class DeviceManager { + <> + +getInstance() + +getLatestImages() + +startAll() + } + + class DetectionBase { + <> + +execute(depth, color, ...) + } + + class ConcreteDetection { + +execute() + } + + MainWindow --> VisionController : 拥有并管理 + VisionController --> RedisCommunicator : 管理 (监听/发送) + VisionController --> TaskManager : 分发任务 + RedisCommunicator --> VisionController : 回调通知 (Callback) + TaskManager ..> DeviceManager : 获取图像数据 (Dependency) + TaskManager --> DetectionBase : 调用算法 + DetectionBase <|-- ConcreteDetection : 继承 +``` + +## 3. 详细调用流程 (Detailed Call Flow) + +### 3.1 系统初始化与启动 (Initialization & Startup) +1. **Entry Point**: `main.cpp` 创建 `QApplication` 并实例化 `MainWindow`。 +2. **MainWindow**: + * 构造函数中初始化界面。 + * 调用 `DeviceManager::getInstance().initialize()` 扫描并初始化相机设备。 + * 实例化 `VisionController` 成员变量。 + * 调用 `VisionController::initialize()`,配置 Redis 连接参数。 + * 调用 `VisionController::start()` 启动后台服务。 +3. **VisionController**: + * 在 `start()` 中调用 `RedisCommunicator::startListening()` 开启监听线程。 + +### 3.2 任务触发与执行 (Task Trigger & Execution) +当 Redis 中 `vision_task_flag` 发生变化时,流程如下: + +1. **RedisCommunicator**: + * 监听线程检测到 Flag 变化。 + * 通过回调函数 `VisionController::onTaskReceived` 通知控制器。 +2. **VisionController**: + * `onTaskReceived` 将接收到的 `RedisTaskData` 传递给 `TaskManager::handleTask`。 +3. **TaskManager**: + * `handleTask` 将任务推入内部的任务队列 `task_queue_`。 + * 工作线程 `taskExecutionThreadFunc` 从队列中取出任务。 + * **获取图像**: 调用 `DeviceManager::getInstance().getLatestImages(...)` 获取当前最新的深度图和彩色图。 + * **选择算法**: 根据任务 Flag 调用 `getDetector(flag)` 获取对应的算法实例(如 `PalletOffsetDetection`)。 + * **执行算法**: 调用 `detector->execute(depth_img, color_img, ...)` 进行计算。 + * **结果封装**: 将算法返回的数据填充到 `DetectionResult` 结构体中。 + +### 3.3 结果处理 (Result Handling) +算法执行完成后: + +1. **TaskManager**: + * 调用 `processResult(result)`。 + * 该函数会格式化结果为 JSON 字符串,并计算报警/警告状态。 + * 调用 `redis_result_comm_->writeDetectionResult(json)` 将结果写入 Redis。 +2. **RedisCommunicator**: + * 执行 Redis SET 命令,将 JSON 数据写入指定的 Key。 + +## 4. 关键类说明 (Key Class Descriptions) + +### VisionController (`src/vision/vision_controller.h`) +* **职责**: 作为系统的外观(Facade),对外提供统一的 start/stop 接口,对内协调 Redis 和 TaskManager。 +* **特点**: 它是 MainWindow 唯一直接交互的非 GUI 业务类。 + +### DeviceManager (`src/device/device_manager.h`) +* **职责**: 屏蔽底层相机 SDK(Percipio / MVS)的差异,提供统一的图像获取接口。 +* **模式**: 单例模式 (Singleton)。确保系统中只有一份硬件控制实例。 + +### TaskManager (`src/task/task_manager.h`) +* **职责**: 真正的“大脑”。负责任务的缓冲(队列)、图像获取、算法调度和结果回传。 +* **并发**: 拥有独立的执行线程,避免阻塞 Redis 监听线程或 GUI 线程。 + +### RedisCommunicator (`src/redis/redis_communicator.h`) +* **职责**: 封装 Redis 的底层 socket 操作,提供易用的读写接口和异步监听机制。 + +### DetectionBase (`src/algorithm/core/detection_base.h`) +* **职责**: 定义所有检测算法的统一接口 `execute`。 +* **扩展**: 新增算法只需继承此类并在 `TaskManager` 中注册即可。 + +--- +*文档生成时间: 2025-12-29* diff --git a/image_capture/CMakeLists.txt b/image_capture/CMakeLists.txt new file mode 100644 index 0000000..627e553 --- /dev/null +++ b/image_capture/CMakeLists.txt @@ -0,0 +1,251 @@ +cmake_minimum_required(VERSION 3.10) + +# 支持 MSVC +# 注意:配置 CMake 时请选择合适的生成器(例如 "Visual Studio 17 2022" 或 "Ninja") + +project(image_capture LANGUAGES CXX) + +# 检查是否使用 MSVC 风格的编译器 +if(NOT (MSVC OR CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")) + message(FATAL_ERROR "This project requires MSVC (Visual Studio) compiler. Please use Ninja with MSVC or Visual Studio generator.") +endif() + +# ============================================================================ +# 输出目录 +# ============================================================================ +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) +set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib) + +# 生成 compile_commands.json 文件,供 IntelliSense 使用 +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# ============================================================================ +# CMake 模块路径 +# ============================================================================ +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") +include(CompilerOptions) + +# ============================================================================ +# 依赖项 (Qt6, OpenCV) +# ============================================================================ +include(Dependencies) + +# ============================================================================ +# 相机 SDK 配置 +# ============================================================================ +include(PercipioSDK) + +# ============================================================================ +# 算法库 +# ============================================================================ +add_library(algorithm_lib STATIC + src/algorithm/core/detection_base.cpp + src/algorithm/core/detection_result.cpp + src/algorithm/utils/image_processor.cpp + + src/algorithm/detections/slot_occupancy/slot_occupancy_detection.cpp + src/algorithm/detections/pallet_offset/pallet_offset_detection.cpp + src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.cpp + src/algorithm/detections/visual_inventory/visual_inventory_detection.cpp + +) + +target_link_libraries(algorithm_lib PUBLIC + ${OpenCV_LIBS} + Open3D::Open3D + Qt6::Core + ${HALCON_LIBRARIES} +) + +target_include_directories(algorithm_lib PUBLIC + ${HALCON_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} + src + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio/common +) + +target_link_directories(algorithm_lib PUBLIC ${OpenCV_LIB_DIRS}) + +# ============================================================================ +# 主可执行文件 +# ============================================================================ +set(SOURCES + src/main.cpp + src/camera/ty_multi_camera_capture.cpp + src/camera/mvs_multi_camera_capture.cpp + src/device/device_manager.cpp + src/redis/redis_communicator.cpp + src/task/task_manager.cpp + src/vision/vision_controller.cpp + src/common/log_manager.cpp + src/common/config_manager.cpp + src/gui/mainwindow.cpp + src/gui/mainwindow.h + src/gui/mainwindow.ui + src/gui/settings_widget.cpp + src/gui/settings_widget.h +) + +add_executable(${PROJECT_NAME} WIN32 ${SOURCES}) + +target_include_directories(${PROJECT_NAME} PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/mvs/Includes + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} # Qt AUTOUIC 生成的头文件 +) + +target_link_libraries(${PROJECT_NAME} PRIVATE + algorithm_lib + cpp_api_lib + tycam + ${OpenCV_LIBS} + Qt6::Core + Qt6::Widgets + ws2_32 + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/mvs/Libraries/win64/MvCameraControl.lib +) + +target_link_directories(${PROJECT_NAME} PRIVATE + ${OpenCV_LIB_DIRS} + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio/lib/win/x64 +) + +if(Open3D_RUNTIME_DLLS) + foreach(DLL_FILE ${Open3D_RUNTIME_DLLS}) + get_filename_component(DLL_NAME "${DLL_FILE}" NAME) + add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${DLL_FILE}" + "$" + COMMENT "Copying runtime dependency: ${DLL_NAME}" + ) + endforeach() +endif() + +# Copy tycam.dll to executable directory +add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${CAMPORT3_LIB_DIR}/tycam.dll" + "$" + COMMENT "Copying tycam.dll to executable directory" +) + +# Copy Halcon DLLs +if(HALCON_ROOT) + set(HALCON_BIN_DIR "${HALCON_ROOT}/bin/x64-win64") + # Verify directory exists + if(EXISTS "${HALCON_BIN_DIR}") + set(HALCON_DLLS "halcon.dll" "halconcpp.dll") + foreach(DLL_NAME ${HALCON_DLLS}) + add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + "${HALCON_BIN_DIR}/${DLL_NAME}" + "$" + COMMENT "Copying Halcon DLL: ${DLL_NAME}" + ) + endforeach() + else() + message(WARNING "Halcon bin directory not found at: ${HALCON_BIN_DIR}. DLLs will not be copied.") + endif() +endif() + + + + +# ============================================================================ +# 工具链 +# ============================================================================ +add_executable(slot_algo_tuner WIN32 + src/tools/slot_algo_tuner/main.cpp + src/tools/slot_algo_tuner/tuner_widget.cpp + src/tools/slot_algo_tuner/tuner_widget.h +) + +target_include_directories(slot_algo_tuner PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} +) + +target_link_libraries(slot_algo_tuner PRIVATE + ${OpenCV_LIBS} + Qt6::Core + Qt6::Widgets +) + +target_link_directories(slot_algo_tuner PRIVATE ${OpenCV_LIB_DIRS}) + +add_executable(calibration_tool WIN32 + src/tools/calibration_tool/main.cpp + src/tools/calibration_tool/calibration_widget.cpp + src/tools/calibration_tool/calibration_widget.h +) + +target_include_directories(calibration_tool PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio/include + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/mvs/Includes +) + +target_link_libraries(calibration_tool PRIVATE + ${OpenCV_LIBS} + Qt6::Core + Qt6::Widgets + Open3D::Open3D + tycam +) + +target_compile_definitions(calibration_tool PRIVATE NOMINMAX) + +target_link_directories(calibration_tool PRIVATE ${OpenCV_LIB_DIRS}) + +# Intrinsic Dumper Tool +add_executable(intrinsic_dumper + src/tools/intrinsic_dumper/main.cpp +) + +target_include_directories(intrinsic_dumper PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio/include +) + +target_link_libraries(intrinsic_dumper PRIVATE + Qt6::Core + tycam +) + +# Reference Generator (Teach Tool) +add_executable(generate_reference + src/tools/generate_reference/main.cpp + src/device/device_manager.cpp + src/camera/ty_multi_camera_capture.cpp + src/camera/mvs_multi_camera_capture.cpp + src/common/log_manager.cpp + src/common/config_manager.cpp +) + +target_include_directories(generate_reference PRIVATE + ${CMAKE_CURRENT_SOURCE_DIR}/src + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_BINARY_DIR} + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio/include + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/mvs/Includes +) + +target_link_libraries(generate_reference PRIVATE + algorithm_lib + cpp_api_lib + ${OpenCV_LIBS} + Qt6::Core + Qt6::Widgets + tycam + ${CMAKE_CURRENT_SOURCE_DIR}/third_party/mvs/Libraries/win64/MvCameraControl.lib +) + +target_link_directories(generate_reference PRIVATE ${OpenCV_LIB_DIRS}) diff --git a/image_capture/cmake/CompilerOptions.cmake b/image_capture/cmake/CompilerOptions.cmake new file mode 100644 index 0000000..a65871d --- /dev/null +++ b/image_capture/cmake/CompilerOptions.cmake @@ -0,0 +1,32 @@ +# C++ Standard +set(CMAKE_CXX_STANDARD 17) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +# Output Directories +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + +# Generate compile_commands.json +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# Definitions +add_definitions(-DOPENCV_DEPENDENCIES) + +# Qt6 Setup (Global) +set(CMAKE_AUTOMOC ON) +set(CMAKE_AUTORCC ON) +set(CMAKE_AUTOUIC ON) + +# Compiler Specific Options +if(MSVC) + # MSVC specific options + add_compile_options(/utf-8) # Fix C4819 encoding warning + add_compile_options(/W3) # Warning level 3 + add_compile_options(/MP) # Multi-processor compilation + add_definitions(-D_CRT_SECURE_NO_WARNINGS) # Suppress C4996 deprecated warnings + + add_compile_options($<$:/O2>) # Maximize speed + add_compile_options($<$:/Ob2>) # Inline function expansion + + + +endif() diff --git a/image_capture/cmake/Dependencies.cmake b/image_capture/cmake/Dependencies.cmake new file mode 100644 index 0000000..72a919c --- /dev/null +++ b/image_capture/cmake/Dependencies.cmake @@ -0,0 +1,129 @@ +# Qt6 +if(NOT Qt6_DIR AND NOT ENV{Qt6_DIR} AND NOT CMAKE_PREFIX_PATH) + message(WARNING "Qt6 not found in environment. Please set CMAKE_PREFIX_PATH or Qt6_DIR.") +endif() + +find_package(Qt6 REQUIRED COMPONENTS Widgets) + +# OpenCV +if(DEFINED ENV{OpenCV_DIR}) + set(OpenCV_DIR $ENV{OpenCV_DIR}) + message(STATUS "Using OpenCV_DIR from environment: ${OpenCV_DIR}") +elseif(NOT OpenCV_DIR) + message(STATUS "OpenCV_DIR not set, trying to find OpenCV in standard locations...") + set(LEGACY_OPENCV_PATH "D:/enviroments/OPencv4.55/OPencv4.55_MSVC/opencv/build/x64/vc15/lib") + if(EXISTS ${LEGACY_OPENCV_PATH}) + set(OpenCV_DIR ${LEGACY_OPENCV_PATH}) + message(STATUS "Found legacy OpenCV path: ${OpenCV_DIR}") + endif() +endif() + +find_package(OpenCV REQUIRED) + +message(STATUS "OpenCV found: ${OpenCV_VERSION}") +message(STATUS "OpenCV libraries: ${OpenCV_LIBS}") +message(STATUS "OpenCV include dirs: ${OpenCV_INCLUDE_DIRS}") + +# Open3D +# Open3D +if(DEFINED ENV{Open3D_DIR}) + set(Open3D_DIR $ENV{Open3D_DIR}) + message(STATUS "Using Open3D_DIR from environment: ${Open3D_DIR}") +elseif(NOT Open3D_DIR) + # Default to 0.18 Release + set(DEFAULT_OPEN3D_PATH "D:/enviroments/Open3d/open3d-devel-windows-amd64-0.18.0-release/CMake") + # Debug path: D:/enviroments/Open3d/open3d-devel-windows-amd64-0.18.0-debug/CMake + + if(EXISTS ${DEFAULT_OPEN3D_PATH}) + set(Open3D_DIR ${DEFAULT_OPEN3D_PATH}) + message(STATUS "Using default Open3D path: ${Open3D_DIR}") + endif() +endif() + +find_package(Open3D REQUIRED) +message(STATUS "Open3D found: ${Open3D_VERSION}") +message(STATUS "Open3D DIR: ${Open3D_DIR}") + +# Find Open3D DLL and dependencies (TBB) +# Adjust ROOT calculation based on where Config is found. +get_filename_component(DIR_NAME "${Open3D_DIR}" NAME) +if("${DIR_NAME}" STREQUAL "CMake") + # Structure: root/CMake/Open3DConfig.cmake -> root is up one level + get_filename_component(Open3D_ROOT "${Open3D_DIR}/.." ABSOLUTE) +else() + # Assume standard install: root/lib/cmake/Open3D/Open3DConfig.cmake -> root is up 3 levels + get_filename_component(Open3D_ROOT "${Open3D_DIR}/../../.." ABSOLUTE) +endif() + +set(Open3D_BIN_DIR "${Open3D_ROOT}/bin") +set(Open3D_RUNTIME_DLLS "") + +find_file(Open3D_DLL NAMES Open3D.dll PATHS ${Open3D_BIN_DIR} NO_DEFAULT_PATH) +if(Open3D_DLL) + list(APPEND Open3D_RUNTIME_DLLS ${Open3D_DLL}) + message(STATUS "Found Open3D DLL: ${Open3D_DLL}") +else() + message(WARNING "Open3D DLL not found in ${Open3D_BIN_DIR}. You might need to add it to your PATH manually.") +endif() + +# Find TBB DLLs (tbb.dll or tbb12_debug.dll etc) +# We glob for tbb*.dll but filter based on build type to avoid mixing runtimes +file(GLOB TBB_ALL_DLLS "${Open3D_BIN_DIR}/tbb*.dll") +set(TBB_DLLS ${TBB_ALL_DLLS}) + +# Filter out debug DLLs (ending in _debug.dll or d.dll) +list(FILTER TBB_DLLS EXCLUDE REGEX ".*(_debug|d)\\.dll$") + +if(NOT TBB_DLLS) + # If no release DLLs found, check if we only have debug ones + if(TBB_ALL_DLLS) + message(WARNING "Only Debug TBB DLLs found in ${Open3D_BIN_DIR}. Release build might crash due to ABI mismatch!") + # Fallback: copy everything (dangerous but better than nothing?) + set(TBB_DLLS ${TBB_ALL_DLLS}) + else() + message(WARNING "No TBB DLLs found in ${Open3D_BIN_DIR}.") + endif() +endif() + + +if(TBB_DLLS) + list(APPEND Open3D_RUNTIME_DLLS ${TBB_DLLS}) + message(STATUS "Found TBB DLLs: ${TBB_DLLS}") +endif() + +# Halcon +# Force usage of the user known path if possible, or fallback to environment +set(USER_PROVIDED_HALCON_ROOT "C:/Users/cve/AppData/Local/Programs/MVTec/HALCON-23.11-Progress") + +if(EXISTS "${USER_PROVIDED_HALCON_ROOT}") + set(HALCON_ROOT "${USER_PROVIDED_HALCON_ROOT}") + message(STATUS "Using user provided HALCON_ROOT: ${HALCON_ROOT}") +elseif(DEFINED ENV{HALCONROOT}) + set(HALCON_ROOT $ENV{HALCONROOT}) + file(TO_CMAKE_PATH "${HALCON_ROOT}" HALCON_ROOT) + message(STATUS "Using HALCON_ROOT from environment: ${HALCON_ROOT}") +else() + message(WARNING "HALCONROOT not found.") +endif() + +if(HALCON_ROOT) + set(HALCON_INCLUDE_DIRS + "${HALCON_ROOT}/include" + "${HALCON_ROOT}/include/halconcpp" + ) + + if(WIN32) + set(HALCON_LIB_DIR "${HALCON_ROOT}/lib/x64-win64") + if(NOT EXISTS "${HALCON_LIB_DIR}") + set(HALCON_LIB_DIR "${HALCON_ROOT}/lib") + endif() + + set(HALCON_LIBRARIES + "${HALCON_LIB_DIR}/halcon.lib" + "${HALCON_LIB_DIR}/halconcpp.lib" + ) + endif() + + message(STATUS "Halcon include: ${HALCON_INCLUDE_DIRS}") + message(STATUS "Halcon libs: ${HALCON_LIBRARIES}") +endif() diff --git a/image_capture/cmake/PercipioSDK.cmake b/image_capture/cmake/PercipioSDK.cmake new file mode 100644 index 0000000..380df57 --- /dev/null +++ b/image_capture/cmake/PercipioSDK.cmake @@ -0,0 +1,55 @@ +# Camera SDK Paths +set(CAMPORT3_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/third_party/percipio) +set(CAMPORT3_LIB_DIR ${CAMPORT3_ROOT}/lib/win/x64) + +# Import tycam library (MinGW) +add_library(tycam SHARED IMPORTED) + +if(EXISTS ${CAMPORT3_LIB_DIR}/libtycam.dll.a) + set_target_properties(tycam PROPERTIES + IMPORTED_LOCATION ${CAMPORT3_LIB_DIR}/tycam.dll + IMPORTED_IMPLIB ${CAMPORT3_LIB_DIR}/libtycam.dll.a + ) + message(STATUS "Using libtycam.dll.a (MinGW compatible)") +elseif(EXISTS ${CAMPORT3_LIB_DIR}/tycam.lib) + set_target_properties(tycam PROPERTIES + IMPORTED_LOCATION ${CAMPORT3_LIB_DIR}/tycam.dll + IMPORTED_IMPLIB ${CAMPORT3_LIB_DIR}/tycam.lib + ) + message(STATUS "Using tycam.lib (may require conversion to .dll.a if linking fails)") +else() + message(FATAL_ERROR "Neither libtycam.dll.a nor tycam.lib found in ${CAMPORT3_LIB_DIR}") +endif() + +# Static API Library Sources +set(CPP_API_SOURCES + ${CAMPORT3_ROOT}/sample_v2/cpp/Device.cpp + ${CAMPORT3_ROOT}/sample_v2/cpp/Frame.cpp + ${CAMPORT3_ROOT}/common/MatViewer.cpp + ${CAMPORT3_ROOT}/common/TYThread.cpp + ${CAMPORT3_ROOT}/common/crc32.cpp + ${CAMPORT3_ROOT}/common/json11.cpp + ${CAMPORT3_ROOT}/common/ParametersParse.cpp + ${CAMPORT3_ROOT}/common/huffman.cpp + ${CAMPORT3_ROOT}/common/ImageSpeckleFilter.cpp + ${CAMPORT3_ROOT}/common/DepthInpainter.cpp +) + +add_library(cpp_api_lib STATIC ${CPP_API_SOURCES}) + +target_include_directories(cpp_api_lib PUBLIC + ${CAMPORT3_ROOT}/include + ${CAMPORT3_ROOT}/sample_v2/hpp + ${CAMPORT3_ROOT}/common + ${OpenCV_INCLUDE_DIRS} +) + +# Fix for MinGW: Ensure standard C++ headers are found +if(MINGW) + target_include_directories(cpp_api_lib SYSTEM PUBLIC + ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} + ) +endif() + +target_link_libraries(cpp_api_lib PUBLIC ${OpenCV_LIBS}) +target_link_directories(cpp_api_lib PUBLIC ${OpenCV_LIB_DIRS}) diff --git a/image_capture/config.json b/image_capture/config.json new file mode 100644 index 0000000..9a58cbf --- /dev/null +++ b/image_capture/config.json @@ -0,0 +1,130 @@ +{ + "redis": { + "host": "127.0.0.1", + "port": 6379, + "db": 0 + }, + "cameras": { + "depth_enabled": true, + "color_enabled": true, + "mapping": [ + { + "id": "camera_0", + "index": 0 + }, + { + "id": "camera_1", + "index": 1 + }, + { + "id": "camera_2", + "index": 2 + }, + { + "id": "camera_3", + "index": 3 + } + ] + }, + "vision": { + "save_path": "./images", + "log_level": 1 + }, + "algorithms": { + "beam_rack_deflection": { + "beam_roi_points": [ + { + "x": 100, + "y": 50 + }, + { + "x": 540, + "y": 80 + }, + { + "x": 540, + "y": 280 + }, + { + "x": 100, + "y": 280 + } + ], + "rack_roi_points": [ + { + "x": 50, + "y": 50 + }, + { + "x": 150, + "y": 50 + }, + { + "x": 150, + "y": 430 + }, + { + "x": 50, + "y": 430 + } + ], + "beam_thresholds": { + "A": -10.0, + "B": -5.0, + "C": 5.0, + "D": 10.0 + }, + "rack_thresholds": { + "A": -6.0, + "B": -3.0, + "C": 3.0, + "D": 6.0 + } + }, + "pallet_offset": { + "offset_lat_mm_thresholds": { + "A": -20.0, + "B": -10.0, + "C": 10.0, + "D": 20.0 + }, + "offset_lon_mm_thresholds": { + "A": -20.0, + "B": -10.0, + "C": 10.0, + "D": 20.0 + }, + "rotation_angle_thresholds": { + "A": -5.0, + "B": -2.5, + "C": 2.5, + "D": 5.0 + }, + "hole_def_mm_left_thresholds": { + "A": -8.0, + "B": -4.0, + "C": 4.0, + "D": 8.0 + }, + "hole_def_mm_right_thresholds": { + "A": -8.0, + "B": -4.0, + "C": 4.0, + "D": 8.0 + } + }, + "slot_occupancy": { + "depth_threshold_mm": 100.0, + "confidence_threshold": 0.8 + }, + "visual_inventory": { + "barcode_confidence_threshold": 0.7, + "roi_enabled": true + }, + "general": { + "min_depth_mm": 800.0, + "max_depth_mm": 3000.0, + "sample_points": 50 + } + } +} \ No newline at end of file diff --git a/image_capture/note.md b/image_capture/note.md new file mode 100644 index 0000000..23251b4 --- /dev/null +++ b/image_capture/note.md @@ -0,0 +1,6 @@ +# 确保在 image_capture 目录下 +cd d:\Git\stereo_warehouse_inspection\image_capture +Remove-Item -Recurse -Force build +# 使用 Visual Studio 生成器重新配置项目: 指定 -G "Visual Studio 17 2022" (根据你的VS版本调整,通常是 16 2019 或 17 2022)。 +cmake -G "Visual Studio 17 2022" -A x64 -B build +cmake --build build --config Release \ No newline at end of file diff --git a/image_capture/run_log.txt b/image_capture/run_log.txt new file mode 100644 index 0000000..e69de29 diff --git a/image_capture/src/algorithm/core/detection_base.cpp b/image_capture/src/algorithm/core/detection_base.cpp new file mode 100644 index 0000000..e7e8fb0 --- /dev/null +++ b/image_capture/src/algorithm/core/detection_base.cpp @@ -0,0 +1,172 @@ +#include "detection_base.h" +#include "../detections/beam_rack_deflection/beam_rack_deflection_detection.h" +#include "../detections/pallet_offset/pallet_offset_detection.h" +#include "../detections/slot_occupancy/slot_occupancy_detection.h" +#include "../detections/visual_inventory/visual_inventory_detection.h" + +#include "detection_result.h" +#include +#include +#include +#include +#include + +/** + * @brief 获取当前时间戳字符串 + */ +static std::string getCurrentTimeString() { + auto now = std::chrono::system_clock::now(); + auto time_t = std::chrono::system_clock::to_time_t(now); + std::tm *tm = std::localtime(&time_t); + std::stringstream ss; + ss << std::put_time(tm, "%Y-%m-%d %H:%M:%S"); + return ss.str(); +} + +// ========== SlotOccupancyDetection ========== +bool SlotOccupancyDetection::execute(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + DetectionResult &result, + const std::vector *point_cloud, + int beam_length) { + result.result_type = 1; + result.result_status = "fail"; + + // 调用算法进行检测 + SlotOccupancyResult algo_result; + if (!SlotOccupancyAlgorithm::detect(depth_img, color_img, side, + algo_result)) { + std::cout + << "[Detection] SlotOccupancy: Detection failed (Algorithm error)." + << std::endl; + result.result_status = "fail"; + result.last_update_time = getCurrentTimeString(); + return false; + } + + // 将算法结果填充到 DetectionResult + result.slot_occupied = algo_result.slot_occupied; + result.result_status = algo_result.success ? "success" : "fail"; + result.last_update_time = getCurrentTimeString(); + + // 日志输出到界面 (UI Log) + std::cout << "[Detection] SlotOccupancy Result: " + << (result.slot_occupied ? "Occupied (有货)" : "Empty (无货)") + << std::endl; + + return algo_result.success; +} + +// ========== PalletOffsetDetection ========== +bool PalletOffsetDetection::execute(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + DetectionResult &result, + const std::vector *point_cloud, + int beam_length) { + result.result_type = 2; + result.result_status = "fail"; + + // 调用算法进行检测 + PalletOffsetResult algo_result; + if (!PalletOffsetAlgorithm::detect(depth_img, color_img, side, algo_result, + point_cloud)) { + result.result_status = "fail"; + result.last_update_time = getCurrentTimeString(); + return false; + } + + // 将算法结果填充到 DetectionResult + result.offset_lat_mm_value = algo_result.offset_lat_mm_value; + result.offset_lon_mm_value = algo_result.offset_lon_mm_value; + result.rotation_angle_value = algo_result.rotation_angle_value; + result.hole_def_mm_left_value = algo_result.hole_def_mm_left_value; + result.hole_def_mm_right_value = algo_result.hole_def_mm_right_value; + + result.offset_lat_mm_threshold = algo_result.offset_lat_mm_threshold; + result.offset_lon_mm_threshold = algo_result.offset_lon_mm_threshold; + result.rotation_angle_threshold = algo_result.rotation_angle_threshold; + result.hole_def_mm_left_threshold = algo_result.hole_def_mm_left_threshold; + result.hole_def_mm_right_threshold = algo_result.hole_def_mm_right_threshold; + + result.offset_lat_mm_warning_alarm = algo_result.offset_lat_mm_warning_alarm; + result.offset_lon_mm_warning_alarm = algo_result.offset_lon_mm_warning_alarm; + result.rotation_angle_warning_alarm = + algo_result.rotation_angle_warning_alarm; + result.hole_def_mm_left_warning_alarm = + algo_result.hole_def_mm_left_warning_alarm; + result.hole_def_mm_right_warning_alarm = + algo_result.hole_def_mm_right_warning_alarm; + + result.result_status = algo_result.success ? "success" : "fail"; + result.last_update_time = getCurrentTimeString(); + + return algo_result.success; +} + +// ========== BeamRackDeflectionDetection ========== +bool BeamRackDeflectionDetection::execute( + const cv::Mat &depth_img, const cv::Mat &color_img, const std::string &side, + DetectionResult &result, const std::vector *point_cloud, + int beam_length) { + result.result_type = 3; + result.result_status = "fail"; + + // Select ROI based on beam_length + std::vector beam_roi; + if (beam_length == 2180) { + beam_roi = BeamRackDeflectionAlgorithm::BEAM_ROI_2180; + } else if (beam_length == 1380) { + beam_roi = BeamRackDeflectionAlgorithm::BEAM_ROI_1380; + } + + // 调用算法进行检测 + BeamRackDeflectionResult algo_result; + if (!BeamRackDeflectionAlgorithm::detect( + depth_img, color_img, side, algo_result, point_cloud, beam_roi)) { + result.result_status = "fail"; + result.last_update_time = getCurrentTimeString(); + return false; + } + + // 将算法结果填充到 DetectionResult + result.beam_def_mm_value = algo_result.beam_def_mm_value; + result.rack_def_mm_value = algo_result.rack_def_mm_value; + result.beam_def_mm_threshold = algo_result.beam_def_mm_threshold; + result.rack_def_mm_threshold = algo_result.rack_def_mm_threshold; + result.beam_def_mm_warning_alarm = algo_result.beam_def_mm_warning_alarm; + result.rack_def_mm_warning_alarm = algo_result.rack_def_mm_warning_alarm; + + result.result_status = algo_result.success ? "success" : "fail"; + result.last_update_time = getCurrentTimeString(); + + return algo_result.success; +} + +// ========== VisualInventoryDetection ========== +bool VisualInventoryDetection::execute(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + DetectionResult &result, + const std::vector *point_cloud, + int beam_length) { + result.result_type = 4; + result.result_status = "fail"; + + // 调用算法进行检测 + VisualInventoryResult algo_result; + if (!VisualInventoryAlgorithm::detect(depth_img, color_img, side, + algo_result)) { + result.result_status = "fail"; + result.last_update_time = getCurrentTimeString(); + return false; + } + + // 将算法结果填充到 DetectionResult + result.result_barcodes = algo_result.result_barcodes; + result.result_status = algo_result.success ? "success" : "fail"; + result.last_update_time = getCurrentTimeString(); + + return algo_result.success; +} diff --git a/image_capture/src/algorithm/core/detection_base.h b/image_capture/src/algorithm/core/detection_base.h new file mode 100644 index 0000000..59c0b3d --- /dev/null +++ b/image_capture/src/algorithm/core/detection_base.h @@ -0,0 +1,116 @@ +#pragma once + +#include + +struct DetectionResult; +namespace cv { +class Mat; +} +#include "../../common_types.h" + +/** + * @brief 检测任务基类 + * + * 所有检测任务都继承自此类,实现统一的接口 + */ +class DetectionBase { +public: + DetectionBase() {} + virtual ~DetectionBase() {} + + /** + * 执行检测任务 + * @param depth_img 深度图像(可选) + * @param color_img 彩色图像(可选) + * @param side 货架侧("left"或"right") + * @param result [输出] 检测结果 + * @param point_cloud [可选] 点云数据 + * @return 是否检测成功 + */ + virtual bool execute(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, DetectionResult &result, + const std::vector *point_cloud = nullptr, + int beam_length = 0) = 0; + + /** + * 获取任务类型(对应flag值) + */ + virtual int getTaskType() const = 0; + + /** + * 获取任务名称 + */ + virtual std::string getTaskName() const = 0; +}; + +/** + * @brief Task 1: 货位有无检测 + */ +class SlotOccupancyDetection : public DetectionBase { +public: + SlotOccupancyDetection() {} + virtual ~SlotOccupancyDetection() {} + + bool execute(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, DetectionResult &result, + const std::vector *point_cloud = nullptr, + int beam_length = 0) override; + + int getTaskType() const override { return 1; } + std::string getTaskName() const override { return "SlotOccupancyDetection"; } +}; + +/** + * @brief Task 2: 托盘位置偏移检测 - 插孔变形检测(取货时) + */ +class PalletOffsetDetection : public DetectionBase { +public: + PalletOffsetDetection() {} + virtual ~PalletOffsetDetection() {} + + bool execute(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, DetectionResult &result, + const std::vector *point_cloud = nullptr, + int beam_length = 0) override; + + int getTaskType() const override { return 2; } + std::string getTaskName() const override { return "PalletOffsetDetection"; } +}; + +/** + * @brief Task 3: 横梁变形检测 - 货架立柱变形检测(放货时) + */ +class BeamRackDeflectionDetection : public DetectionBase { +public: + BeamRackDeflectionDetection() {} + virtual ~BeamRackDeflectionDetection() {} + + bool execute(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, DetectionResult &result, + const std::vector *point_cloud = nullptr, + int beam_length = 0) override; + + int getTaskType() const override { return 3; } + std::string getTaskName() const override { + return "BeamRackDeflectionDetection"; + } +}; + +/** + * @brief Task 4: 视觉盘点(扫码) + */ +class VisualInventoryDetection : public DetectionBase { +public: + VisualInventoryDetection() {} + virtual ~VisualInventoryDetection() {} + + bool execute(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, DetectionResult &result, + const std::vector *point_cloud = nullptr, + int beam_length = 0) override; + + int getTaskType() const override { return 4; } + std::string getTaskName() const override { + return "VisualInventoryDetection"; + } +}; diff --git a/image_capture/src/algorithm/core/detection_result.cpp b/image_capture/src/algorithm/core/detection_result.cpp new file mode 100644 index 0000000..fbdf1e4 --- /dev/null +++ b/image_capture/src/algorithm/core/detection_result.cpp @@ -0,0 +1,184 @@ +#include "detection_result.h" +#include +#include + +std::string DetectionResult::toJson() const { + // TODO: 使用JSON库(如nlohmann/json)生成JSON字符串 + // 当前使用简单的字符串拼接方式 + std::ostringstream oss; + oss << "{"; + + // 基础字段 + oss << "\"result_status\":\"" << result_status << "\","; + oss << "\"result_type\":" << result_type << ","; + oss << "\"last_update_time\":\"" << last_update_time << "\""; + + // Flag 1 + if (result_type == 1) { + oss << ",\"slot_occupied\":" << (slot_occupied ? "true" : "false"); + } + + // Flag 2 + if (result_type == 2) { + oss << ",\"offset_lat_mm_value\":" << offset_lat_mm_value; + if (!offset_lat_mm_threshold.empty()) { + oss << ",\"offset_lat_mm_threshold\":" << offset_lat_mm_threshold; + } + if (!offset_lat_mm_warning_alarm.empty()) { + oss << ",\"offset_lat_mm_warning_alarm\":" << offset_lat_mm_warning_alarm; + } + + oss << ",\"offset_lon_mm_value\":" << offset_lon_mm_value; + if (!offset_lon_mm_threshold.empty()) { + oss << ",\"offset_lon_mm_threshold\":" << offset_lon_mm_threshold; + } + if (!offset_lon_mm_warning_alarm.empty()) { + oss << ",\"offset_lon_mm_warning_alarm\":" << offset_lon_mm_warning_alarm; + } + + oss << ",\"hole_def_mm_left_value\":" << hole_def_mm_left_value; + if (!hole_def_mm_left_threshold.empty()) { + oss << ",\"hole_def_mm_left_threshold\":" << hole_def_mm_left_threshold; + } + if (!hole_def_mm_left_warning_alarm.empty()) { + oss << ",\"hole_def_mm_left_warning_alarm\":" + << hole_def_mm_left_warning_alarm; + } + + oss << ",\"hole_def_mm_right_value\":" << hole_def_mm_right_value; + if (!hole_def_mm_right_threshold.empty()) { + oss << ",\"hole_def_mm_right_threshold\":" << hole_def_mm_right_threshold; + } + if (!hole_def_mm_right_warning_alarm.empty()) { + oss << ",\"hole_def_mm_right_warning_alarm\":" + << hole_def_mm_right_warning_alarm; + } + + oss << ",\"rotation_angle_value\":" << rotation_angle_value; + if (!rotation_angle_threshold.empty()) { + oss << ",\"rotation_angle_threshold\":" << rotation_angle_threshold; + } + if (!rotation_angle_warning_alarm.empty()) { + oss << ",\"rotation_angle_warning_alarm\":" + << rotation_angle_warning_alarm; + } + } + + // Flag 3 + if (result_type == 3) { + oss << ",\"beam_def_mm_value\":" << beam_def_mm_value; + if (!beam_def_mm_threshold.empty()) { + oss << ",\"beam_def_mm_threshold\":" << beam_def_mm_threshold; + } + if (!beam_def_mm_warning_alarm.empty()) { + oss << ",\"beam_def_mm_warning_alarm\":" << beam_def_mm_warning_alarm; + } + + oss << ",\"rack_def_mm_value\":" << rack_def_mm_value; + if (!rack_def_mm_threshold.empty()) { + oss << ",\"rack_def_mm_threshold\":" << rack_def_mm_threshold; + } + if (!rack_def_mm_warning_alarm.empty()) { + oss << ",\"rack_def_mm_warning_alarm\":" << rack_def_mm_warning_alarm; + } + } + + // Flag 4 & 5 + if (result_type == 4 || result_type == 5) { + if (!result_barcodes.empty()) { + oss << ",\"result_barcodes\":" << result_barcodes; + } + } + + oss << "}"; + return oss.str(); +} + +bool DetectionResult::fromJson(const std::string &json_str) { + // TODO: 使用JSON库解析JSON字符串 + // 当前实现为占位符 + std::cerr << "[DetectionResult] TODO: Implement JSON parsing" << std::endl; + return false; +} + +std::map DetectionResult::toMap() const { + std::map m; + + // Helper to convert float to string + auto floatToStr = [](float val) { return std::to_string(val); }; + + // Helper to convert bool to string "true"/"false" + auto boolToStr = [](bool val) { return val ? "true" : "false"; }; + + // 基础字段 (总是写入) + m["result_status"] = result_status; + m["result_type"] = std::to_string(result_type); + m["last_update_time"] = last_update_time; + + // Flag 1: 货位有无 + if (result_type == 1) { + m["slot_occupied"] = boolToStr(slot_occupied); + } + + // Flag 2: 托盘检测 + if (result_type == 2) { + m["offset_lat_mm_value"] = floatToStr(offset_lat_mm_value); + m["offset_lat_mm_threshold"] = + offset_lat_mm_threshold.empty() ? "{}" : offset_lat_mm_threshold; + m["offset_lat_mm_warning_alarm"] = offset_lat_mm_warning_alarm.empty() + ? "{}" + : offset_lat_mm_warning_alarm; + + m["offset_lon_mm_value"] = floatToStr(offset_lon_mm_value); + m["offset_lon_mm_threshold"] = + offset_lon_mm_threshold.empty() ? "{}" : offset_lon_mm_threshold; + m["offset_lon_mm_warning_alarm"] = offset_lon_mm_warning_alarm.empty() + ? "{}" + : offset_lon_mm_warning_alarm; + + m["hole_def_mm_left_value"] = floatToStr(hole_def_mm_left_value); + m["hole_def_mm_left_threshold"] = + hole_def_mm_left_threshold.empty() ? "{}" : hole_def_mm_left_threshold; + m["hole_def_mm_left_warning_alarm"] = hole_def_mm_left_warning_alarm.empty() + ? "{}" + : hole_def_mm_left_warning_alarm; + + m["hole_def_mm_right_value"] = floatToStr(hole_def_mm_right_value); + m["hole_def_mm_right_threshold"] = hole_def_mm_right_threshold.empty() + ? "{}" + : hole_def_mm_right_threshold; + m["hole_def_mm_right_warning_alarm"] = + hole_def_mm_right_warning_alarm.empty() + ? "{}" + : hole_def_mm_right_warning_alarm; + + m["rotation_angle_value"] = floatToStr(rotation_angle_value); + m["rotation_angle_threshold"] = + rotation_angle_threshold.empty() ? "{}" : rotation_angle_threshold; + m["rotation_angle_warning_alarm"] = rotation_angle_warning_alarm.empty() + ? "{}" + : rotation_angle_warning_alarm; + } + + // Flag 3: 横梁/立柱检测 + if (result_type == 3) { + m["beam_def_mm_value"] = floatToStr(beam_def_mm_value); + m["beam_def_mm_threshold"] = + beam_def_mm_threshold.empty() ? "{}" : beam_def_mm_threshold; + m["beam_def_mm_warning_alarm"] = + beam_def_mm_warning_alarm.empty() ? "{}" : beam_def_mm_warning_alarm; + + m["rack_def_mm_value"] = floatToStr(rack_def_mm_value); + m["rack_def_mm_threshold"] = + rack_def_mm_threshold.empty() ? "{}" : rack_def_mm_threshold; + m["rack_def_mm_warning_alarm"] = + rack_def_mm_warning_alarm.empty() ? "{}" : rack_def_mm_warning_alarm; + } + + // Flag 4 & 5: 视觉盘点 & 结束 + if (result_type == 4 || result_type == 5) { + m["result_barcodes"] = result_barcodes.empty() ? "{}" : result_barcodes; + } + + return m; +} diff --git a/image_capture/src/algorithm/core/detection_result.h b/image_capture/src/algorithm/core/detection_result.h new file mode 100644 index 0000000..e334bac --- /dev/null +++ b/image_capture/src/algorithm/core/detection_result.h @@ -0,0 +1,96 @@ +#pragma once + +#include +#include + +// TODO: 添加nlohmann/json库依赖 +// 临时使用简单的JSON字符串表示,后续替换为nlohmann::json +// 为了简化,这里使用std::string存储JSON字符串 +// 实际实现时应该使用nlohmann::json或类似的JSON库 +using JsonValue = std::string; // 临时定义,实际应使用nlohmann::json + +/** + * @brief 检测结果数据结构 + * + * 包含所有检测任务的结果数据,根据任务类型(flag)填充相应字段 + */ +struct DetectionResult { + // 基础字段 + std::string result_status; // "success" 或 "fail" + int result_type; // 对应 vision_task_flag(1~5) + std::string last_update_time; // "YYYY-MM-DD HH:MM:SS" + + // Flag 1: 货位有无检测 + bool slot_occupied; // 货位是否有托盘/货物 + + // Flag 2: 托盘位置偏移检测 - 插孔变形检测(取货时) + // 左右偏移量 + float offset_lat_mm_value; + JsonValue offset_lat_mm_threshold; // {"A": -5.0, "B": -3.0, "C": 3.0, "D": 5.0} + JsonValue offset_lat_mm_warning_alarm; // {"warning": false, "alarm": false} + + // 前后偏移量 + float offset_lon_mm_value; + JsonValue offset_lon_mm_threshold; + JsonValue offset_lon_mm_warning_alarm; + + // 左侧插孔变形 + float hole_def_mm_left_value; + JsonValue hole_def_mm_left_threshold; + JsonValue hole_def_mm_left_warning_alarm; + + // 右侧插孔变形 + float hole_def_mm_right_value; + JsonValue hole_def_mm_right_threshold; + JsonValue hole_def_mm_right_warning_alarm; + + // 托盘整体旋转角度 + float rotation_angle_value; + JsonValue rotation_angle_threshold; + JsonValue rotation_angle_warning_alarm; + + // Flag 3: 横梁变形检测 - 货架立柱变形检测(放货时) + // 横梁弯曲量 + float beam_def_mm_value; + JsonValue beam_def_mm_threshold; + JsonValue beam_def_mm_warning_alarm; + + // 立柱弯曲量 + float rack_def_mm_value; + JsonValue rack_def_mm_threshold; + JsonValue rack_def_mm_warning_alarm; + + // Flag 4: 视觉盘点(扫码) + JsonValue result_barcodes; // {"A01":["BOX111","BOX112"], "A02":["BOX210"]} + + DetectionResult() + : result_status("fail") + , result_type(0) + , slot_occupied(false) + , offset_lat_mm_value(0.0f) + , offset_lon_mm_value(0.0f) + , hole_def_mm_left_value(0.0f) + , hole_def_mm_right_value(0.0f) + , rotation_angle_value(0.0f) + , beam_def_mm_value(0.0f) + , rack_def_mm_value(0.0f) + { + } + + /** + * 转换为JSON字符串 + */ + std::string toJson() const; + + /** + * 从JSON字符串解析 + */ + bool fromJson(const std::string& json_str); + + /** + * 转换为Key-Value Map + * 用于分别写入Redis各个Key + */ + std::map toMap() const; +}; + diff --git a/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.cpp b/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.cpp new file mode 100644 index 0000000..7a8f773 --- /dev/null +++ b/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.cpp @@ -0,0 +1,932 @@ +#include "beam_rack_deflection_detection.h" +#include "../../../common/config_manager.h" +#define DEBUG_ROI_SELECTION // 启用交互式ROI选择(调试模式) +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include + +//==================== +// 步骤1:默认ROI点定义 +//==================== +// 定义默认ROI点(四个点:左上、右上、右下、左下) +// 横梁ROI默认点(示例值,可根据实际场景调整) +const std::vector + BeamRackDeflectionAlgorithm::DEFAULT_BEAM_ROI_POINTS = { + cv::Point2i(100, 50), // 左上 + cv::Point2i(540, 80), // 右上 + cv::Point2i(540, 280), // 右下 + cv::Point2i(100, 280) // 左下 +}; + +// 2180mm 横梁 ROI (Placeholder - Same as Default for now) +const std::vector BeamRackDeflectionAlgorithm::BEAM_ROI_2180 = { + cv::Point2i(100, 50), cv::Point2i(540, 80), cv::Point2i(540, 280), + cv::Point2i(100, 280)}; + +// 1380mm 横梁 ROI (Placeholder - Same as Default for now) +const std::vector BeamRackDeflectionAlgorithm::BEAM_ROI_1380 = { + cv::Point2i(100, 50), cv::Point2i(540, 80), cv::Point2i(540, 280), + cv::Point2i(100, 280)}; + +//==================== +// 步骤2:立柱ROI默认点定义 +//==================== +// 立柱ROI默认点(示例值,可根据实际场景调整) +const std::vector + BeamRackDeflectionAlgorithm::DEFAULT_RACK_ROI_POINTS = { + cv::Point2i(50, 50), // 左上 + cv::Point2i(150, 50), // 右上 + cv::Point2i(150, 430), // 右下 + cv::Point2i(50, 430) // 左下 +}; + +//==================== +// 步骤3:横梁阈值默认值定义 +//==================== +// 定义默认阈值(四个值:A负方向报警, B负方向警告, C正方向警告, D正方向报警) +// 横梁阈值默认值(示例值,可根据实际需求调整) +const std::vector BeamRackDeflectionAlgorithm::DEFAULT_BEAM_THRESHOLDS = + { + -50.0f, // A: 负方向报警阈值 (横梁Y+方向忽略) + -30.0f, // B: 负方向警告阈值 (横梁Y+方向忽略) + 30.0f, // C: 正方向警告阈值 (>30mm) + 50.0f // D: 正方向报警阈值 (>50mm) +}; + +//==================== +// 步骤4:立柱阈值默认值定义 +//==================== +// 立柱阈值默认值(示例值,可根据实际需求调整) +const std::vector BeamRackDeflectionAlgorithm::DEFAULT_RACK_THRESHOLDS = + { + -50.0f, // A: 负方向报警阈值 (对称参考) + -30.0f, // B: 负方向警告阈值 (对称参考) + 30.0f, // C: 正方向警告阈值 (绝对值 > 30mm) + 50.0f // D: 正方向报警阈值 (绝对值 > 50mm) +}; + +//==================== +// 步骤5:加载标定参数 +//==================== +bool BeamRackDeflectionAlgorithm::loadCalibration(Eigen::Matrix4d &transform) { + // 在当前目录查找 calibration_result_*.json 文件 + QDir dir = QDir::current(); + QStringList filters; + filters << "calibration_result_*.json"; + dir.setNameFilters(filters); + QFileInfoList list = dir.entryInfoList(QDir::Files, QDir::Time); // 按时间排序 + + if (list.empty()) { + std::cerr << "[BeamRackDeflectionAlgorithm] Warning: No calibration file " + "found. Using Identity." + << std::endl; + transform = Eigen::Matrix4d::Identity(); + return false; + } + + // 使用最新的文件 + QString filePath = list.first().absoluteFilePath(); + std::cout << "[BeamRackDeflectionAlgorithm] Loading calibration from: " + << filePath.toStdString() << std::endl; + + QFile file(filePath); + if (!file.open(QIODevice::ReadOnly)) { + std::cerr << "[BeamRackDeflectionAlgorithm] Error: Could not open file." + << std::endl; + transform = Eigen::Matrix4d::Identity(); + return false; + } + + QByteArray data = file.readAll(); + QJsonDocument doc = QJsonDocument::fromJson(data); + if (doc.isNull()) { + std::cerr << "[BeamRackDeflectionAlgorithm] Error: Invalid JSON." + << std::endl; + transform = Eigen::Matrix4d::Identity(); + return false; + } + + QJsonObject root = doc.object(); + if (root.contains("transformation_matrix")) { + QJsonArray arr = root["transformation_matrix"].toArray(); + if (arr.size() == 16) { + for (int i = 0; i < 4; ++i) { + for (int j = 0; j < 4; ++j) { + transform(i, j) = arr[i * 4 + j].toDouble(); + } + } + return true; + } + } + + std::cerr << "[BeamRackDeflectionAlgorithm] Error: transformation_matrix " + "missing or invalid." + << std::endl; + transform = Eigen::Matrix4d::Identity(); + return false; +} + +//==================== +// 步骤6:横梁和立柱变形检测主函数 +//==================== +bool BeamRackDeflectionAlgorithm::detect( + const cv::Mat &depth_img, const cv::Mat &color_img, const std::string &side, + BeamRackDeflectionResult &result, const std::vector *point_cloud, + const std::vector &beam_roi_points, + const std::vector &rack_roi_points, + const std::vector &beam_thresholds, + const std::vector &rack_thresholds) { + // 算法启用开关 + const bool USE_ALGORITHM = true; + + if (USE_ALGORITHM) { + // --- 真实算法逻辑 --- + // 6.1 初始化结果 + result.success = false; + result.beam_def_mm_value = 0.0f; + result.rack_def_mm_value = 0.0f; + + // 6.2 验证深度图 + if (depth_img.empty()) { + std::cerr << "[BeamRackDeflectionAlgorithm] ERROR: Depth image empty!" + << std::endl; + return false; + } + + // 6.3 检查点云 + if (!point_cloud || point_cloud->empty()) { + std::cerr + << "[BeamRackDeflectionAlgorithm] ERROR: Point cloud empty or null!" + << std::endl; + return false; + } + + // 6.4 加载标定参数 + Eigen::Matrix4d transform; + loadCalibration(transform); + + // 6.5 转换点云并按ROI组织 + // 注意:假设点云与深度图分辨率匹配(行优先) + // 如果点云只是有效点的列表而没有结构,我们无法轻松映射2D ROI + // 但通常标准会保持 size = width * height + if (point_cloud->size() != depth_img.cols * depth_img.rows) { + std::cerr << "[BeamRackDeflectionAlgorithm] Warning: Point cloud size " + "mismatch. Assuming organized." + << std::endl; + } + + int width = depth_img.cols; + int height = depth_img.rows; + + std::vector beam_points_3d; + std::vector rack_points_3d; + + // 6.6 辅助函数:检查点是否在ROI内 + auto isInRoi = [](const std::vector &roi, int x, int y) { + if (roi.size() < 3) + return false; + return cv::pointPolygonTest(roi, cv::Point2f((float)x, (float)y), + false) >= 0; + }; + + // 6.7 确定实际使用的ROI(使用默认值或自定义值) + std::vector actual_beam_roi = + beam_roi_points.empty() ? DEFAULT_BEAM_ROI_POINTS : beam_roi_points; + std::vector actual_rack_roi = + rack_roi_points.empty() ? DEFAULT_RACK_ROI_POINTS : rack_roi_points; + +// 6.8 交互式ROI选择(调试模式) +#ifdef DEBUG_ROI_SELECTION + // 辅助lambda函数:用于4点ROI选择 + auto selectPolygonROI = + [&](const std::string &winName, + const cv::Mat &bg_img) -> std::vector { + std::vector clicks; + std::string fullWinName = winName + " (Click 4 points)"; + cv::namedWindow(fullWinName, cv::WINDOW_AUTOSIZE); + + cv::setMouseCallback( + fullWinName, + [](int event, int x, int y, int flags, void *userdata) { + auto *points = static_cast *>(userdata); + if (event == cv::EVENT_LBUTTONDOWN) { + if (points->size() < 4) { + points->push_back(cv::Point(x, y)); + std::cout << "Clicked: (" << x << ", " << y << ")" << std::endl; + } + } + }, + &clicks); + + while (clicks.size() < 4) { + cv::Mat display = bg_img.clone(); + for (size_t i = 0; i < clicks.size(); ++i) { + cv::circle(display, clicks[i], 4, cv::Scalar(0, 0, 255), -1); + if (i > 0) + cv::line(display, clicks[i - 1], clicks[i], cv::Scalar(0, 255, 0), + 2); + } + cv::imshow(fullWinName, display); + int key = cv::waitKey(10); + if (key == 27) + return {}; // ESC键取消 + } + // 闭合多边形可视化 + cv::Mat final_display = bg_img.clone(); + for (size_t i = 0; i < clicks.size(); ++i) { + cv::circle(final_display, clicks[i], 4, cv::Scalar(0, 0, 255), -1); + if (i > 0) + cv::line(final_display, clicks[i - 1], clicks[i], + cv::Scalar(0, 255, 0), 2); + } + cv::line(final_display, clicks.back(), clicks.front(), + cv::Scalar(0, 255, 0), 2); + cv::imshow(fullWinName, final_display); + cv::waitKey(500); // Show for a bit + + cv::destroyWindow(fullWinName); + + // Convert to Point2i + std::vector result; + for (const auto &p : clicks) + result.push_back(p); + return result; + }; + + static bool showed_debug_warning = false; + if (!showed_debug_warning) { + std::cout << "[BeamRackDeflectionAlgorithm] DEBUG INFO: Interactive " + "Rectified ROI Selection Enabled." + << std::endl; + showed_debug_warning = true; + } + + if (!depth_img.empty()) { + // --- 矫正逻辑 --- + cv::Mat display_img; + cv::normalize(depth_img, display_img, 0, 255, cv::NORM_MINMAX, CV_8U); + cv::cvtColor(display_img, display_img, cv::COLOR_GRAY2BGR); + + // 尝试加载内参以进行矫正 + cv::Mat H = cv::Mat::eye(3, 3, CV_64F); + bool can_rectify = false; + + QDir dir_curr = QDir::current(); + QStringList filters; + filters << "intrinsics_*.json"; + dir_curr.setNameFilters(filters); + QFileInfoList list = dir_curr.entryInfoList(QDir::Files, QDir::Time); + + if (!list.empty()) { + QFile i_file(list.first().absoluteFilePath()); + if (i_file.open(QIODevice::ReadOnly)) { + QJsonDocument i_doc = QJsonDocument::fromJson(i_file.readAll()); + if (!i_doc.isNull() && i_doc.object().contains("depth")) { + QJsonObject d_obj = i_doc.object()["depth"].toObject(); + if (d_obj.contains("intrinsic")) { + QJsonArray i_arr = d_obj["intrinsic"].toArray(); + if (i_arr.size() >= 9) { + double fx = i_arr[0].toDouble(); + double fy = i_arr[4].toDouble(); + double cx = i_arr[2].toDouble(); + double cy = i_arr[5].toDouble(); + + Eigen::Matrix3d K; + K << fx, 0, cx, 0, fy, cy, 0, 0, 1; + + Eigen::Matrix3d R = transform.block<3, 3>(0, 0); + // 单应性矩阵 H = K * R * K_inv + // 这将图像变换为仿佛相机已按 R 旋转 + Eigen::Matrix3d H_eig = K * R * K.inverse(); + + for (int r = 0; r < 3; ++r) + for (int c = 0; c < 3; ++c) + H.at(r, c) = H_eig(r, c); + + can_rectify = true; + std::cout << "[BeamRackDeflectionAlgorithm] Intrinsics loaded. " + "Rectification enabled." + << std::endl; + } + } + } + } + } + + cv::Mat warp_img; + cv::Mat H_final = H.clone(); // 复制原始 H 以开始 + + if (can_rectify) { + // 1. 计算变换后的角点以找到新的边界框 + std::vector corners = { + cv::Point2f(0, 0), cv::Point2f((float)width, 0), + cv::Point2f((float)width, (float)height), + cv::Point2f(0, (float)height)}; + std::vector warped_corners; + cv::perspectiveTransform(corners, warped_corners, H); + + cv::Rect bbox = cv::boundingRect(warped_corners); + + // 2. 创建平移矩阵以将图像移入视野 + cv::Mat T = cv::Mat::eye(3, 3, CV_64F); + T.at(0, 2) = -bbox.x; + T.at(1, 2) = -bbox.y; + + // 3. 更新单应性矩阵 + H_final = T * H; + + // 4. 使用新尺寸和 H 进行变换 + cv::warpPerspective(display_img, warp_img, H_final, bbox.size()); + + std::cout << "[BeamRackDeflectionAlgorithm] Rectified Image Size: " + << bbox.width << "x" << bbox.height << std::endl; + } else { + std::cout << "[BeamRackDeflectionAlgorithm] Warning: Intrinsics not " + "found. Showing unrectified image." + << std::endl; + warp_img = display_img.clone(); + } + + // --- 选择横梁 ROI --- + std::cout << "[BeamRackDeflectionAlgorithm] Please click 4 points for " + "BEAM ROI..." + << std::endl; + auto beam_poly_visual = selectPolygonROI("Select BEAM", warp_img); + + // 如果已矫正,则映射回原始坐标 + if (beam_poly_visual.size() == 4) { + if (can_rectify) { + std::vector src, dst; + for (auto p : beam_poly_visual) + src.push_back(cv::Point2f(p.x, p.y)); + cv::perspectiveTransform(src, dst, + H_final.inv()); // Use H_final.inv() + actual_beam_roi.clear(); + for (auto p : dst) + actual_beam_roi.push_back( + cv::Point2i(std::round(p.x), std::round(p.y))); + } else { + actual_beam_roi = beam_poly_visual; + } + std::cout << "[BeamRackDeflectionAlgorithm] Beam ROI Updated." + << std::endl; + } + + // --- 选择立柱 ROI --- + std::cout << "[BeamRackDeflectionAlgorithm] Please click 4 points for " + "RACK ROI..." + << std::endl; + auto rack_poly_visual = selectPolygonROI("Select RACK", warp_img); + + if (rack_poly_visual.size() == 4) { + if (can_rectify) { + std::vector src, dst; + for (auto p : rack_poly_visual) + src.push_back(cv::Point2f(p.x, p.y)); + cv::perspectiveTransform(src, dst, + H_final.inv()); // Use H_final.inv() + actual_rack_roi.clear(); + for (auto p : dst) + actual_rack_roi.push_back( + cv::Point2i(std::round(p.x), std::round(p.y))); + } else { + actual_rack_roi = rack_poly_visual; + } + std::cout << "[BeamRackDeflectionAlgorithm] Rack ROI Updated." + << std::endl; + } + } +#endif + // ============================================ + + cv::Rect beam_bbox = cv::boundingRect(actual_beam_roi); + cv::Rect rack_bbox = cv::boundingRect(actual_rack_roi); + + // 处理横梁 ROI 区域 + float max_beam_deflection = 0.0f; + float max_rack_deflection = 0.0f; + + auto process_roi = [&](const cv::Rect &bbox, + const std::vector &poly, + std::vector &out_pts) { + int start_x = std::max(0, bbox.x); + int end_x = std::min(width, bbox.x + bbox.width); + int start_y = std::max(0, bbox.y); + int end_y = std::min(height, bbox.y + bbox.height); + + for (int y = start_y; y < end_y; ++y) { + for (int x = start_x; x < end_x; ++x) { + if (!isInRoi(poly, x, y)) + continue; + + int idx = y * width + x; + if (idx >= point_cloud->size()) + continue; + + const Point3D &pt = (*point_cloud)[idx]; + if (pt.z <= 0.0f || std::isnan(pt.x)) + continue; + + // Transform + Eigen::Vector4d p(pt.x, pt.y, pt.z, 1.0); + Eigen::Vector4d p_trans = transform * p; + + out_pts.emplace_back(p_trans.head<3>()); + } + } + }; + + process_roi(beam_bbox, actual_beam_roi, beam_points_3d); + process_roi(rack_bbox, actual_rack_roi, rack_points_3d); + + // =========================================== + // FIX: 自动旋转矫正 (PCA) + // 解决 "基准线不水平" 的问题,确保横梁水平,立柱垂直 + // 通过将数据旋转到水平/垂直,基准线(连接端点)将变为水平/垂直。 + // 从而使变形量(点到线的距离)等于 Y 轴(横梁)或 X 轴(立柱)的偏差。 + // =========================================== + auto correctRotation = [](std::vector &points, + bool is_beam) { + if (points.size() < 10) + return; + + // 1. Convert to cv::Mat for PCA (Only use X, Y) + int n = points.size(); + cv::Mat data(n, 2, CV_64F); + for (int i = 0; i < n; ++i) { + data.at(i, 0) = points[i].x(); + data.at(i, 1) = points[i].y(); + } + + // 2. Perform PCA + cv::PCA pca(data, cv::Mat(), cv::PCA::DATA_AS_ROW); + + // 3. Get primary eigenvector (direction of max variance) + // Eigenvectors are stored in rows. Row 0 is the primary vector. + cv::Point2d eigen_vec(pca.eigenvectors.at(0, 0), + pca.eigenvectors.at(0, 1)); + + // 4. Calculate angle relative to desired axis + // Beam (is_beam=true): Should align with X-axis (1, 0) + // Rack (is_beam=false): Should align with Y-axis (0, 1) + + double angle = std::atan2(eigen_vec.y, eigen_vec.x); // Angle of the data + + double rotation_angle = 0.0; + + if (is_beam) { + // Target: Horizontal (0 degrees) + rotation_angle = -angle; + } else { + // Target: Vertical (90 degrees or PI/2) + rotation_angle = (CV_PI / 2.0) - angle; + } + + // Normalize to -PI ~ PI + while (rotation_angle > CV_PI) + rotation_angle -= 2 * CV_PI; + while (rotation_angle < -CV_PI) + rotation_angle += 2 * CV_PI; + + // Safety check: Don't rotate if angle is suspicious huge (> 45 deg) + // unless confident For now, we trust PCA for standard slight tilts (< 30 + // deg). + + std::cout << "[BeamRackDeflectionAlgorithm] Correcting " + << (is_beam ? "Beam" : "Rack") + << " Rotation: " << rotation_angle * 180.0 / CV_PI << " deg." + << std::endl; + + // 5. Apply Rotation + double c = std::cos(rotation_angle); + double s = std::sin(rotation_angle); + + // Center of rotation: PCA mean + double cx = pca.mean.at(0); + double cy = pca.mean.at(1); + + for (int i = 0; i < n; ++i) { + double x = points[i].x() - cx; + double y = points[i].y() - cy; + + double x_new = x * c - y * s; + double y_new = x * s + y * c; + + points[i].x() = x_new + cx; + points[i].y() = y_new + cy; + // Z unchanged + } + }; + + // Apply corrections + correctRotation(beam_points_3d, true); + correctRotation(rack_points_3d, false); + // =========================================== + + // 6.9 计算变形量 + + // 分箱(切片)方法辅助函数 + auto calculate_deflection_binned = [&](std::vector &points, + bool is_beam_y_check, + const std::string &label) -> float { + if (points.empty()) + return 0.0f; + + // 1. 沿主轴排序点 + std::sort(points.begin(), points.end(), + [is_beam_y_check](const Eigen::Vector3d &a, + const Eigen::Vector3d &b) { + return is_beam_y_check ? (a.x() < b.x()) : (a.y() < b.y()); + }); + + // 2. 分箱 + int num_bins = 50; + if (points.size() < 100) + num_bins = 10; // Reduce bins for small sets + + double min_u = is_beam_y_check ? points.front().x() : points.front().y(); + double max_u = is_beam_y_check ? points.back().x() : points.back().y(); + +// 可视化辅助 +#ifdef DEBUG_ROI_SELECTION + int viz_w = 800; + int viz_h = 400; + cv::Mat viz_img = cv::Mat::zeros(viz_h, viz_w, CV_8UC3); + double disp_min_u = min_u; + double disp_max_u = max_u; + double min_v = 1e9, max_v = -1e9; + + auto map_u = [&](double u) -> int { + return (int)((u - disp_min_u) / (disp_max_u - disp_min_u) * + (viz_w - 40) + + 20); + }; +// Will define map_v later after range finding +#endif + + std::vector raw_centroids; + std::vector counts; + + double range_min = min_u; + double range_max = max_u; + double bin_step = (range_max - range_min) / num_bins; + + if (bin_step < 1.0) + return 0.0f; + + auto it = points.begin(); + double avg_pts_per_bin = 0; + int filled_bins = 0; + + for (int i = 0; i < num_bins; ++i) { + double bin_start = range_min + i * bin_step; + double bin_end = bin_start + bin_step; + + std::vector bin_pts; + while (it != points.end()) { + double val = is_beam_y_check ? it->x() : it->y(); + // double val_v = is_beam_y_check ? it->y() : it->x(); + if (val > bin_end) + break; + bin_pts.push_back(*it); + ++it; + } + + if (!bin_pts.empty()) { + // Robust Centroid (Trimmed Mean) + std::sort(bin_pts.begin(), bin_pts.end(), + [is_beam_y_check](const Eigen::Vector3d &a, + const Eigen::Vector3d &b) { + double val_a = is_beam_y_check ? a.y() : a.x(); // V axis + double val_b = is_beam_y_check ? b.y() : b.x(); + return val_a < val_b; + }); + + size_t n = bin_pts.size(); + size_t start = (size_t)(n * 0.25); + size_t end = (size_t)(n * 0.75); + if (end <= start) { + start = 0; + end = n; + } + + Eigen::Vector3d sum(0, 0, 0); + int count = 0; + for (size_t k = start; k < end; ++k) { + sum += bin_pts[k]; + count++; + } + + if (count > 0) { + raw_centroids.push_back(sum / count); + counts.push_back(bin_pts.size()); + avg_pts_per_bin += bin_pts.size(); + filled_bins++; + } + } + } + + if (filled_bins < 2) + return 0.0f; + avg_pts_per_bin /= filled_bins; + + // --- 2.1 Bin Filtering (Remove Noise) --- + // Filter out bins with significantly low density (e.g. < 20% of average) + std::vector bin_centroids; + for (size_t i = 0; i < raw_centroids.size(); ++i) { + if (counts[i] > avg_pts_per_bin * 0.2) { + bin_centroids.push_back(raw_centroids[i]); + + // Track V range for filtered points + double v = + is_beam_y_check ? raw_centroids[i].y() : raw_centroids[i].x(); +#ifdef DEBUG_ROI_SELECTION + if (v < min_v) + min_v = v; + if (v > max_v) + max_v = v; +#endif + } + } + + if (bin_centroids.size() < 2) { + std::cerr << "[BeamRack] Filtered bins too few." << std::endl; + return 0.0f; + } + +#ifdef DEBUG_ROI_SELECTION + // Adjust V range + double v_range = max_v - min_v; + if (v_range < 1.0) + v_range = 10.0; + min_v -= v_range * 0.5; // More margin + max_v += v_range * 0.5; + + auto map_v = [&](double v) -> int { + return (int)((v - min_v) / (max_v - min_v) * (viz_h - 40) + 20); + }; + + // Draw Points + for (size_t i = 0; i < bin_centroids.size(); ++i) { + double u = + is_beam_y_check ? bin_centroids[i].x() : bin_centroids[i].y(); + double v = + is_beam_y_check ? bin_centroids[i].y() : bin_centroids[i].x(); + cv::circle(viz_img, cv::Point(map_u(u), map_v(v)), 3, + cv::Scalar(255, 255, 0), -1); // Cyan Centroids + } +#endif + + // --- 3. Robust Baseline Fitting (Support Line) --- + // Instead of simple endpoints, fit a line to "valid support regions" + // Support Regions: First 15% and Last 15% of VALID centroids. + + std::vector support_points; + int support_count = (int)(bin_centroids.size() * 0.15); + if (support_count < 2) + support_count = 2; // At least 2 points at each end + if (support_count * 2 > bin_centroids.size()) + support_count = bin_centroids.size() / 2; + + for (int i = 0; i < support_count; ++i) + support_points.push_back(bin_centroids[i]); + for (int i = 0; i < support_count; ++i) + support_points.push_back(bin_centroids[bin_centroids.size() - 1 - i]); + + // Fit Line to Support Points (Least Squares) + // Model: v = m * u + c (since rotated, m should be close to 0) + double sum_u = 0, sum_v = 0, sum_uv = 0, sum_uu = 0; + int N = support_points.size(); + for (const auto &p : support_points) { + double u = is_beam_y_check ? p.x() : p.y(); + double v = is_beam_y_check ? p.y() : p.x(); + sum_u += u; + sum_v += v; + sum_uv += u * v; + sum_uu += u * u; + } + + double slope = 0, intercept = 0; + double denom = N * sum_uu - sum_u * sum_u; + if (std::abs(denom) > 1e-6) { + slope = (N * sum_uv - sum_u * sum_v) / denom; + intercept = (sum_v - slope * sum_u) / N; + } else { + // Vertical line? Should not happen after rotation. Fallback average. + slope = 0; + intercept = sum_v / N; + } + + std::cout << "[BeamRack] Baseline Fit: slope=" << slope + << ", intercept=" << intercept << " (Support Pts: " << N << ")" + << std::endl; + + // --- 4. Calculate Max Deflection --- + double max_def = 0.0; + Eigen::Vector3d max_pt; + double max_theoretical_v = 0; + + for (const auto &p : bin_centroids) { + double u = is_beam_y_check ? p.x() : p.y(); + double v = is_beam_y_check ? p.y() : p.x(); + + double theoretical_v = slope * u + intercept; + double def = 0; + + if (is_beam_y_check) { + // Beam: Y+ is down. Deflection = ActualY - TheoreticalY + // We rotated data, so Y+ might still be relevant if rotation was just + // alignment. Assuming standard coords: Sag (Down) is Y decreasing? Or + // increasing? In Camera Coords: Y is DOWN. So Sag is INCREASING Y. + // Deflection = v - theoretical_v. Positive = Down. + def = v - theoretical_v; + } else { + // Rack: Deflection is absolute distance + def = std::abs(v - theoretical_v); + } + + if (def > max_def) { + max_def = def; + max_pt = p; + max_theoretical_v = theoretical_v; + } + } + + // Robust Average of Max Region (Top 3) + // ... (Simplified: use raw max for now, or implement top-k avg if + // preferred) Sticking to Max for simplicity as requested, but previous + // code used Average. Let's reimplement Top 3 Average roughly around max + // peak? Actually, just returning max_def is cleaner for "maximum sag". + +#ifdef DEBUG_ROI_SELECTION + // Draw Baseline + double u_start = disp_min_u; + double v_start = slope * u_start + intercept; + double u_end = disp_max_u; + double v_end = slope * u_end + intercept; + cv::line(viz_img, cv::Point(map_u(u_start), map_v(v_start)), + cv::Point(map_u(u_end), map_v(v_end)), cv::Scalar(0, 255, 0), 2); + + // Draw Max Deflection + if (max_def != 0.0) { + double u_d = is_beam_y_check ? max_pt.x() : max_pt.y(); + double v_d = is_beam_y_check ? max_pt.y() : max_pt.x(); + cv::line(viz_img, cv::Point(map_u(u_d), map_v(v_d)), + cv::Point(map_u(u_d), map_v(max_theoretical_v)), + cv::Scalar(0, 0, 255), 2); + cv::putText(viz_img, "Max: " + std::to_string(max_def), + cv::Point(viz_w / 2, 50), cv::FONT_HERSHEY_SIMPLEX, 0.8, + cv::Scalar(0, 0, 255), 2); + } + + cv::imshow("Robust Deflection: " + label, viz_img); + cv::waitKey(100); +#endif + + return (float)max_def; + }; + + // 6.10 Run Calculation logic + + // --- 横梁变形(Y+ 方向)--- + + max_beam_deflection = + calculate_deflection_binned(beam_points_3d, true, "Beam"); + + // --- 立柱变形(X 方向)--- + max_rack_deflection = + calculate_deflection_binned(rack_points_3d, false, "Rack"); + + // 存储结果 + result.beam_def_mm_value = max_beam_deflection; + result.rack_def_mm_value = max_rack_deflection; + + std::cout << "[BeamRackDeflectionAlgorithm] Results: Beam=" + << max_beam_deflection << "mm, Rack=" << max_rack_deflection + << "mm" + << " (Beam Points: " << beam_points_3d.size() + << ", Rack Points: " << rack_points_3d.size() << ")" << std::endl; + + // 使用默认或提供的阈值 + // std::vector actual_beam_thresh = beam_thresholds.empty() ? + // DEFAULT_BEAM_THRESHOLDS : beam_thresholds; // OLD std::vector + // actual_rack_thresh = rack_thresholds.empty() ? DEFAULT_RACK_THRESHOLDS : + // rack_thresholds; // OLD + + // NEW: Use ConfigManager + std::vector actual_beam_thresh = + ConfigManager::getInstance().getBeamThresholds(); + std::vector actual_rack_thresh = + ConfigManager::getInstance().getRackThresholds(); + + // Fallback if empty (should not happen with getBeamThresholds defaults) + if (actual_beam_thresh.size() < 4) + actual_beam_thresh = DEFAULT_BEAM_THRESHOLDS; + if (actual_rack_thresh.size() < 4) + actual_rack_thresh = DEFAULT_RACK_THRESHOLDS; + + // 制作 json 阈值字符串的辅助函数 + auto make_json_thresh = [](const std::vector &t) { + return "{\"A\":" + std::to_string(t[0]) + + ",\"B\":" + std::to_string(t[1]) + + ",\"C\":" + std::to_string(t[2]) + + ",\"D\":" + std::to_string(t[3]) + "}"; + }; + + if (actual_beam_thresh.size() >= 4) { + result.beam_def_mm_threshold = make_json_thresh(actual_beam_thresh); + } + if (actual_rack_thresh.size() >= 4) { + result.rack_def_mm_threshold = make_json_thresh(actual_rack_thresh); + } + + // 检查状态 + // 横梁:正值为向下(Y+)。检查 C 和 D。 + // 负值为向上(Y-)。检查 A 和 B? + // 用户要求:“横梁由于货物仅向下弯曲...(Y 正方向)” + // 所以我们主要使用 max_beam_deflection检查 C(警告)和 D(报警)(这是 + // >0)。 遗留阈值具有负值,可能用于范围检查。 我们将假设标准 [A(neg), + // B(neg), C(pos), D(pos)] 格式。 + + bool beam_warn = (max_beam_deflection >= actual_beam_thresh[2]); // > C + bool beam_alrm = (max_beam_deflection >= actual_beam_thresh[3]); // > D + + // Rack: Bends Left or Right. We took Abs() -> always positive. + // So we check against C and D. + bool rack_warn = (max_rack_deflection >= actual_rack_thresh[2]); + bool rack_alrm = (max_rack_deflection >= actual_rack_thresh[3]); + + auto make_json_status = [](bool w, bool a) { + return "{\"warning\":" + std::string(w ? "true" : "false") + + ",\"alarm\":" + std::string(a ? "true" : "false") + "}"; + }; + + result.beam_def_mm_warning_alarm = make_json_status(beam_warn, beam_alrm); + result.rack_def_mm_warning_alarm = make_json_status(rack_warn, rack_alrm); + + // 标记为成功 + result.success = true; + +#ifdef DEBUG_ROI_SELECTION + std::cout << "[BeamRackDeflectionAlgorithm] Press ANY KEY to close graphs " + "and continue..." + << std::endl; + cv::waitKey(0); + cv::destroyAllWindows(); +#endif + + return result.success; + } else { + // --- 模拟数据逻辑 --- + std::cout << "[BeamRackDeflectionAlgorithm] Using FAKE DATA implementation " + "(Switch OFF)." + << std::endl; + + result.beam_def_mm_value = 5.5f; // 模拟横梁弯曲 + result.rack_def_mm_value = 2.2f; // 模拟立柱弯曲 + result.success = true; + + // 设置模拟阈值 + // std::vector actual_beam_thresh = beam_thresholds.empty() ? + // DEFAULT_BEAM_THRESHOLDS : beam_thresholds; std::vector + // actual_rack_thresh = rack_thresholds.empty() ? DEFAULT_RACK_THRESHOLDS : + // rack_thresholds; + std::vector actual_beam_thresh = + ConfigManager::getInstance().getBeamThresholds(); + std::vector actual_rack_thresh = + ConfigManager::getInstance().getRackThresholds(); + if (actual_beam_thresh.size() < 4) + actual_beam_thresh = DEFAULT_BEAM_THRESHOLDS; + if (actual_rack_thresh.size() < 4) + actual_rack_thresh = DEFAULT_RACK_THRESHOLDS; + + auto make_json_thresh = [](const std::vector &t) { + return "{\"A\":" + std::to_string(t[0]) + + ",\"B\":" + std::to_string(t[1]) + + ",\"C\":" + std::to_string(t[2]) + + ",\"D\":" + std::to_string(t[3]) + "}"; + }; + if (actual_beam_thresh.size() >= 4) + result.beam_def_mm_threshold = make_json_thresh(actual_beam_thresh); + if (actual_rack_thresh.size() >= 4) + result.rack_def_mm_threshold = make_json_thresh(actual_rack_thresh); + + // 设置模拟警告/报警状态 + result.beam_def_mm_warning_alarm = "{\"warning\":false,\"alarm\":false}"; + result.rack_def_mm_warning_alarm = "{\"warning\":false,\"alarm\":false}"; + + return result.success; + } +} diff --git a/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.h b/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.h new file mode 100644 index 0000000..33c9cc1 --- /dev/null +++ b/image_capture/src/algorithm/detections/beam_rack_deflection/beam_rack_deflection_detection.h @@ -0,0 +1,134 @@ +#pragma once + +#include "../../../common_types.h" +#include +#include +#include + + +/** + * @brief 四边形ROI结构(四个点定义) + */ +struct QuadrilateralROI { + cv::Point2i points[4]; // 四个点:左上、右上、右下、左下(按顺序) + + QuadrilateralROI() { + for (int i = 0; i < 4; ++i) { + points[i] = cv::Point2i(0, 0); + } + } + + QuadrilateralROI(const cv::Point2i &pt0, const cv::Point2i &pt1, + const cv::Point2i &pt2, const cv::Point2i &pt3) { + points[0] = pt0; + points[1] = pt1; + points[2] = pt2; + points[3] = pt3; + } + + bool isValid() const { + // 检查是否有有效的点(不全为0) + for (int i = 0; i < 4; ++i) { + if (points[i].x > 0 || points[i].y > 0) { + return true; + } + } + return false; + } + + cv::Rect getBoundingRect() const { + if (!isValid()) { + return cv::Rect(); + } + + int min_x = points[0].x, max_x = points[0].x; + int min_y = points[0].y, max_y = points[0].y; + + for (int i = 1; i < 4; ++i) { + min_x = std::min(min_x, points[i].x); + max_x = std::max(max_x, points[i].x); + min_y = std::min(min_y, points[i].y); + max_y = std::max(max_y, points[i].y); + } + + return cv::Rect(min_x, min_y, max_x - min_x, max_y - min_y); + } +}; + +/** + * @brief 横梁变形检测算法结果 + */ +struct BeamRackDeflectionResult { + // 变形量 + float beam_def_mm_value; // 横梁弯曲量(mm) + float rack_def_mm_value; // 立柱弯曲量(mm) + + // 阈值(JSON字符串) + std::string beam_def_mm_threshold; + std::string rack_def_mm_threshold; + + // 警告和报警信号(JSON字符串) + std::string beam_def_mm_warning_alarm; + std::string rack_def_mm_warning_alarm; + + bool success; // 算法是否执行成功 + + BeamRackDeflectionResult() + : beam_def_mm_value(0.0f), rack_def_mm_value(0.0f), success(false) {} +}; + +/** + * @brief 横梁变形检测算法 + * + * 检测横梁和货架立柱的变形 + */ +class BeamRackDeflectionAlgorithm { +public: + // 默认ROI点定义(四个点:左上、右上、右下、左下) + // 横梁ROI默认点 + static const std::vector DEFAULT_BEAM_ROI_POINTS; + // 立柱ROI默认点 + static const std::vector DEFAULT_RACK_ROI_POINTS; + + // 默认阈值定义(四个值:A负方向报警, B负方向警告, C正方向警告, D正方向报警) + // 横梁阈值默认值 + static const std::vector DEFAULT_BEAM_THRESHOLDS; + // 立柱阈值默认值 + static const std::vector DEFAULT_RACK_THRESHOLDS; + + // 2180mm 横梁 ROI (Placeholder) + static const std::vector BEAM_ROI_2180; + // 1380mm 横梁 ROI (Placeholder) + static const std::vector BEAM_ROI_1380; + + // ... (keep class definition) + + /** + * 执行横梁变形检测(使用深度图方案) + * @param depth_img 深度图像 + * @param color_img 彩色图像 + * @param side 货架侧("left"或"right") + * @param result [输出] 检测结果 + * @param point_cloud [可选] 点云数据 + * @param beam_roi_points + * 横梁ROI的四个点(左上、右上、右下、左下),为空时使用默认值 + * @param rack_roi_points + * 立柱ROI的四个点(左上、右上、右下、左下),为空时使用默认值 + * @param beam_thresholds 横梁阈值四个值[A,B,C,D],为空时使用默认值 + * @param rack_thresholds 立柱阈值四个值[A,B,C,D],为空时使用默认值 + * @return 是否检测成功 + */ + static bool + detect(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, BeamRackDeflectionResult &result, + const std::vector *point_cloud = nullptr, + const std::vector &beam_roi_points = + std::vector(), + const std::vector &rack_roi_points = + std::vector(), + const std::vector &beam_thresholds = std::vector(), + const std::vector &rack_thresholds = std::vector()); + +private: + static bool loadCalibration(Eigen::Matrix4d &transform); +}; diff --git a/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.cpp b/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.cpp new file mode 100644 index 0000000..93bbb66 --- /dev/null +++ b/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.cpp @@ -0,0 +1,1396 @@ +#include "pallet_offset_detection.h" +#include "../../../common/config_manager.h" +#include "../../../device/device_manager.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +// 辅助:生成 JSON 字符串 +namespace { +std::string format_threshold(float a, float b, float c, float d) { + QJsonObject obj; + obj["A"] = a; + obj["B"] = b; + obj["C"] = c; + obj["D"] = d; + return QJsonDocument(obj).toJson(QJsonDocument::Compact).toStdString(); +} + +std::string format_alarm(bool warn, bool alarm) { + QJsonObject obj; + obj["warning"] = warn; + obj["alarm"] = alarm; + return QJsonDocument(obj).toJson(QJsonDocument::Compact).toStdString(); +} +} // namespace + +// --------------------------------------------------------- +// 静态辅助:交互式 ROI 选择 +// --------------------------------------------------------- +std::vector +PalletOffsetAlgorithm::selectPolygonROI(const cv::Mat &visual_img) { + std::vector clicks; + std::string winName = "Select Pallet ROI (Click 4 points)"; + + cv::namedWindow(winName, cv::WINDOW_NORMAL); + cv::resizeWindow(winName, 800, 600); // 确保窗口大小合适 + + // 鼠标回调 + cv::setMouseCallback( + winName, + [](int event, int x, int y, int flags, void *userdata) { + auto *points = static_cast *>(userdata); + if (event == cv::EVENT_LBUTTONDOWN) { + if (points->size() < 4) { + points->push_back(cv::Point(x, y)); + std::cout << "Clicked: (" << x << ", " << y << ")" << std::endl; + } + } + }, + &clicks); + + while (clicks.size() < 4) { + cv::Mat display = visual_img.clone(); + // 绘制已选点 + for (size_t i = 0; i < clicks.size(); ++i) { + cv::circle(display, clicks[i], 5, cv::Scalar(0, 0, 255), -1); // 红点 + if (i > 0) + cv::line(display, clicks[i - 1], clicks[i], cv::Scalar(0, 255, 0), + 2); // 绿线连接 + } + + // 显示文本提示 + cv::putText(display, "Click 4 points to define ROI", cv::Point(20, 30), + cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 255, 255), 2); + + cv::imshow(winName, display); + int key = cv::waitKey(30); + if (key == 27) { // ESC 退出 + cv::destroyWindow(winName); + return {}; + } + } + + // 闭合多边形显示 + cv::Mat final_display = visual_img.clone(); + for (size_t i = 0; i < clicks.size(); ++i) { + cv::circle(final_display, clicks[i], 5, cv::Scalar(0, 0, 255), -1); + if (i > 0) + cv::line(final_display, clicks[i - 1], clicks[i], cv::Scalar(0, 255, 0), + 2); + } + cv::line(final_display, clicks.back(), clicks.front(), cv::Scalar(0, 255, 0), + 2); // 闭合 + + cv::putText(final_display, "ROI Selected. Press ANY KEY to continue.", + cv::Point(20, 30), cv::FONT_HERSHEY_SIMPLEX, 0.8, + cv::Scalar(0, 255, 0), 2); + + cv::imshow(winName, final_display); + cv::waitKey(0); + cv::destroyWindow(winName); + + std::vector result; + for (const auto &p : clicks) + result.push_back(p); + return result; +} + +// --------------------------------------------------------- +// 静态辅助:加载标定参数 +// --------------------------------------------------------- +bool PalletOffsetAlgorithm::loadCalibration(Eigen::Matrix4d &transform) { + // 默认单位矩阵 + transform = Eigen::Matrix4d::Identity(); + + // DEBUG LOGGING TO FILE + QFile debugFile("calib_debug.log"); + if (debugFile.open(QIODevice::WriteOnly | QIODevice::Append | + QIODevice::Text)) { + QTextStream out(&debugFile); + out << "------------------------------------------------\n"; + out << "loadCalibration called at " + << QDateTime::currentDateTime().toString() << "\n"; + } + + QStringList search_dirs; + search_dirs << QCoreApplication::applicationDirPath(); + search_dirs << QDir::currentPath(); + search_dirs + << "D:/Git/stereo_warehouse_inspection/image_capture/build/bin/Debug"; + + search_dirs.removeDuplicates(); + + for (const QString &dirPath : search_dirs) { + if (debugFile.isOpen()) { + QTextStream(&debugFile) << "Searching in: " << dirPath << "\n"; + } + + QDir dir(dirPath); + QStringList filters; + filters << "calibration_result_*.json"; + dir.setNameFilters(filters); + QFileInfoList list = dir.entryInfoList(QDir::Files, QDir::Time); + + if (!list.isEmpty()) { + QString filePath = list.first().absoluteFilePath(); + if (debugFile.isOpen()) { + QTextStream(&debugFile) << "Found file: " << filePath << "\n"; + } + std::cout << "[PalletOffset] Loading calibration from: " + << filePath.toStdString() << std::endl; + + QFile file(filePath); + if (file.open(QIODevice::ReadOnly)) { + QJsonDocument doc = QJsonDocument::fromJson(file.readAll()); + if (!doc.isNull()) { + QJsonObject root = doc.object(); + if (root.contains("transformation_matrix")) { + QJsonArray arr = root["transformation_matrix"].toArray(); + if (arr.size() == 16) { + if (debugFile.isOpen()) { + QTextStream(&debugFile) << "Matrix Loaded. Diagonal: "; + } + std::cout << "[PalletOffset] Matrix Diagonal: "; + for (int i = 0; i < 4; ++i) { + for (int j = 0; j < 4; ++j) { + transform(i, j) = arr[i * 4 + j].toDouble(); + if (i == j) { + std::cout << transform(i, j) << " "; + if (debugFile.isOpen()) { + QTextStream(&debugFile) << transform(i, j) << " "; + } + } + } + } + std::cout << std::endl; + if (debugFile.isOpen()) { + QTextStream(&debugFile) << "\nSUCCESS.\n"; + debugFile.close(); + } + return true; + } + } + } + } + } + } + + if (debugFile.isOpen()) { + QTextStream(&debugFile) << "FAILED to find calibration file.\n"; + debugFile.close(); + } + std::cerr << "[PalletOffset] Warning: No calibration file found in search " + "paths. Using Identity." + << std::endl; + return false; +} + +// --------------------------------------------------------- +// 主检测函数 +// --------------------------------------------------------- +bool PalletOffsetAlgorithm::detect(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + PalletOffsetResult &result, + const std::vector *point_cloud, + const std::vector &roi_points, + const CameraIntrinsics &input_intrinsics, + const cv::Mat *calib_mat_override) { + + // 1. 验证输入 + if (depth_img.empty()) { + std::cerr << "[PalletOffset] Error: Depth image is empty." << std::endl; + return false; + } + + if (depth_img.type() != CV_16UC1 && depth_img.type() != CV_32FC1) { + std::cerr << "[PalletOffset] Error: Invalid depth image type. Expected " + "CV_16UC1 or CV_32FC1. Got: " + << depth_img.type() << std::endl; + return false; + } + + // 2. 准备内参 + CameraIntrinsics intr = input_intrinsics; + if (intr.fx == 0 && intr.fy == 0) { + // 尝试自动获取 + float fx, fy, cx, cy; + // 假设使用设备0 + if (DeviceManager::getInstance().getDepthCameraIntrinsics(0, fx, fy, cx, + cy)) { + intr = CameraIntrinsics(fx, fy, cx, cy); + std::cout << "[PalletOffset] Auto-loaded intrinsics: fx=" << fx + << " fy=" << fy << std::endl; + } else { + std::cerr << "[PalletOffset] Warning: No intrinsics provided and " + "auto-load failed." + << std::endl; + } + } + + // 3. 加载外参 (标定矩阵) + // 3. 加载外参 (标定矩阵) + Eigen::Matrix4d calib_mat_eigen = Eigen::Matrix4d::Identity(); + bool calib_loaded = false; + + if (calib_mat_override && !calib_mat_override->empty()) { + // Use provided calibration + for (int i = 0; i < 4; ++i) { + for (int j = 0; j < 4; ++j) { + calib_mat_eigen(i, j) = calib_mat_override->at(i, j); + } + } + std::cout << "[PalletOffset] Using provided calibration override." + << std::endl; + calib_loaded = true; + } else { + // Try to load from file + if (loadCalibration(calib_mat_eigen)) { + std::cout << "[PalletOffset] Calibration loaded from file." << std::endl; + calib_loaded = true; + } + } + + // Rename for usage in lambdas + const Eigen::Matrix4d &calib_mat = calib_mat_eigen; + + // --------------------------------------------------------- + // 4. ROI 处理 + // --------------------------------------------------------- + std::vector actual_roi = roi_points; + if (actual_roi.empty()) { + std::cout + << "[PalletOffset] No ROI provided. Triggering interactive selection..." + << std::endl; + + cv::Mat vis_img; + // 将深度图转为 8位进行可视化 + cv::normalize(depth_img, vis_img, 0, 255, cv::NORM_MINMAX, CV_8U); + cv::cvtColor(vis_img, vis_img, cv::COLOR_GRAY2BGR); + + actual_roi = selectPolygonROI(vis_img); + if (actual_roi.size() < 4) { + std::cerr << "[PalletOffset] ROI selection cancelled." << std::endl; + return false; + } + } + + cv::Rect roi_bbox = cv::boundingRect(actual_roi); + // 边界检查 + roi_bbox &= cv::Rect(0, 0, depth_img.cols, depth_img.rows); + if (roi_bbox.empty()) + return false; + + // --- NEW: Generate Polygon Mask for Precise ROI --- + // Make mask relative to the roi_bbox to match roi_depth coordinates + cv::Mat poly_mask = cv::Mat::zeros(roi_bbox.size(), CV_8U); + std::vector local_roi_pts; + for (const auto &p : actual_roi) { + local_roi_pts.push_back(p - roi_bbox.tl()); + } + const cv::Point *pts_ptr = local_roi_pts.data(); + int npts = (int)local_roi_pts.size(); + cv::fillPoly(poly_mask, &pts_ptr, &npts, 1, cv::Scalar(255)); + // ------------------------------------------------ + + // --------------------------------------------------------- + // 5. 2D 特征检测 (Depth Segmentation) + // --------------------------------------------------------- + cv::Mat roi_depth = depth_img(roi_bbox).clone(); + cv::Mat mask; + + // 5.1 RANSAC 平面拟合分割 (Enhanced) + // 策略:先用直方图找大概的 Z 范围,排除前景噪声,再 RANSAC + + // A. 直方图预筛选 (Histogram Pre-filtering) + double min_val, max_val; + cv::minMaxLoc(roi_depth, &min_val, &max_val); + int bins = 100; + float hist_range[] = {(float)min_val, (float)max_val + 1}; + const float *histRangePtr = {hist_range}; + cv::Mat hist; + cv::calcHist(&roi_depth, 1, 0, roi_depth > 0, hist, 1, &bins, &histRangePtr, + true, false); + + int max_bin_idx = 0; + float max_bin_val = 0; + for (int i = 0; i < bins; ++i) { + if (hist.at(i) > max_bin_val) { + max_bin_val = hist.at(i); + max_bin_idx = i; + } + } + float peak_depth = + hist_range[0] + max_bin_idx * (hist_range[1] - hist_range[0]) / bins; + std::cout << "[PalletOffset] Histogram Peak Depth: " << peak_depth << "mm" + << std::endl; + + // B. 收集有效的 3D 点 (仅限 Peak 附近) + struct Point3f { + float x, y, z; + }; + std::vector cloud; + cloud.reserve(roi_depth.rows * roi_depth.cols / 4); + + float depth_min_limit = peak_depth - 150.0f; + float depth_max_limit = peak_depth + 150.0f; + + for (int r = 0; r < roi_depth.rows; r += 2) { + for (int c = 0; c < roi_depth.cols; c += 2) { + // Check Polygon Mask first + if (poly_mask.at(r, c) == 0) + continue; + + float z = 0; + if (roi_depth.type() == CV_16U) + z = roi_depth.at(r, c); + else if (roi_depth.type() == CV_32F) + z = roi_depth.at(r, c); + + // Filter out noise and distant background + if (z > depth_min_limit && z < depth_max_limit) { + float x = (c + roi_bbox.x - intr.cx) * z / intr.fx; + float y = (r + roi_bbox.y - intr.cy) * z / intr.fy; + cloud.push_back({x, y, z}); + } + } + } + + // C. RANSAC Parameters (Relaxed) + int max_iters = 200; + float dist_thresh = 15.0f; // Relaxed inlier distance + + float best_a = 0, best_b = 0, best_c = 0, best_d = 0; + int best_inliers = 0; + + if (cloud.size() > 50) { + std::srand(cv::getTickCount()); + for (int i = 0; i < max_iters; ++i) { + int idx1 = std::rand() % cloud.size(); + int idx2 = std::rand() % cloud.size(); + int idx3 = std::rand() % cloud.size(); + + Point3f p1 = cloud[idx1]; + Point3f p2 = cloud[idx2]; + Point3f p3 = cloud[idx3]; + + float v1x = p2.x - p1.x, v1y = p2.y - p1.y, v1z = p2.z - p1.z; + float v2x = p3.x - p1.x, v2y = p3.y - p1.y, v2z = p3.z - p1.z; + + float a = v1y * v2z - v1z * v2y; + float b = v1z * v2x - v1x * v2z; + float c = v1x * v2y - v1y * v2x; + + float norm = std::sqrt(a * a + b * b + c * c); + if (norm < 1e-4) + continue; + + a /= norm; + b /= norm; + c /= norm; + float d = -(a * p1.x + b * p1.y + c * p1.z); + + int inliers = 0; + for (int k = 0; k < cloud.size(); k += 5) { + float dist = + std::abs(a * cloud[k].x + b * cloud[k].y + c * cloud[k].z + d); + if (dist < dist_thresh) + inliers++; + } + + if (inliers > best_inliers) { + best_inliers = inliers; + best_a = a; + best_b = b; + best_c = c; + best_d = d; + } + } + // ... (RANSAC Loop End) ... + } else { + std::cerr << "[PalletOffset] Not enough points near peak depth." + << std::endl; + best_c = -1.0; + best_d = peak_depth; + } + + // --------------------------------------------------------- + // Optimization: Least-Squares Refinement (PCA) + // RANSAC only gives the best plane from 3 points. + // We refine it using ALL inliers for maximum accuracy. + // --------------------------------------------------------- + if (best_inliers > 50 && std::abs(best_c) > 0.001) { + std::vector inliers; + inliers.reserve(cloud.size()); + + // Collect all RANSAC inliers + for (const auto &p : cloud) { + float dist = + std::abs(best_a * p.x + best_b * p.y + best_c * p.z + best_d); + if (dist < dist_thresh) { + inliers.push_back(cv::Point3f(p.x, p.y, p.z)); + } + } + + if (inliers.size() > 10) { + // Use PCA to fit best plane to all inliers + cv::Mat data(inliers.size(), 3, CV_32F, inliers.data()); // Zero-copy + cv::PCA pca(data, cv::Mat(), cv::PCA::DATA_AS_ROW); + + // The normal is the eigenvector corresponding to the smallest eigenvalue + // (last row) + cv::Mat normal = pca.eigenvectors.row(2); // CV_32F + cv::Mat mean = pca.mean; // CV_32F + + float new_a = normal.at(0); + float new_b = normal.at(1); + float new_c = normal.at(2); + + // Ensure normal points towards camera (negative Z) + // Camera is at (0,0,0), looking down +Z? No, typically Z is forward. + // Existing logic: best_c should be negative? + // Let's stick to the convention: normal points against Z+ (towards camera + // origin if object is at Z+) or we align it with the previous best_c to + // avoid flipping. + if (new_c * best_c < 0) { + new_a = -new_a; + new_b = -new_b; + new_c = -new_c; + } + + float new_d = -(new_a * mean.at(0) + new_b * mean.at(1) + + new_c * mean.at(2)); + + // Update + best_a = new_a; + best_b = new_b; + best_c = new_c; + best_d = new_d; + std::cout << "[PalletOffset] Refined Plane via PCA (" << inliers.size() + << " points)." << std::endl; + } + } + + std::cout << "[PalletOffset] Final Plane: " << best_a << "x + " << best_b + << "y + " << best_c << "z + " << best_d << " = 0" << std::endl; + + // D. Generate Mask + if (best_c > 0) { + best_a = -best_a; + best_b = -best_b; + best_c = -best_c; + best_d = -best_d; + } + + cv::threshold(roi_depth, mask, 0, 255, cv::THRESH_BINARY); // Init + mask = cv::Mat::zeros(roi_depth.size(), CV_8U); + + for (int r = 0; r < roi_depth.rows; ++r) { + for (int c = 0; c < roi_depth.cols; ++c) { + // Check Polygon Mask + if (poly_mask.at(r, c) == 0) + continue; + + float z = 0; + if (roi_depth.type() == CV_16U) + z = roi_depth.at(r, c); + else + z = roi_depth.at(r, c); + + // Case 1: Void/Black Hole (Invalid Depth) inside ROI + // Note: Often boundaries are also invalid. + // We assume ROI is mostly pallet, so internal zeros are holes. + if (z <= 10) { + mask.at(r, c) = 255; + continue; + } + + float x = (c + roi_bbox.x - intr.cx) * z / intr.fx; + float y = (r + roi_bbox.y - intr.cy) * z / intr.fy; + + // Case 2: Deep Hole (Valid Depth but behind plane) + if (std::abs(best_c) > 1e-4) { + float plane_z = -(best_a * x + best_b * y + best_d) / best_c; + if (z > plane_z + 20.0f) { // 20mm deeper + mask.at(r, c) = 255; + } + } + } + } + + // 5.2 形态学处理 (Stronger close to fill voids) + cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5)); + cv::morphologyEx(mask, mask, cv::MORPH_OPEN, kernel); + cv::morphologyEx(mask, mask, cv::MORPH_CLOSE, kernel); + + // --- Visualization Step 1: Segmentation --- + // --- Visualization Step 1: Segmentation (Full Image) --- + { + // 1. Create full-size visualization background + cv::Mat full_vis; + cv::normalize(depth_img, full_vis, 0, 255, cv::NORM_MINMAX, CV_8U); + if (full_vis.channels() == 1) + cv::cvtColor(full_vis, full_vis, cv::COLOR_GRAY2BGR); + + // 2. Overlay mask in ROI (Green channel) + cv::Mat roi_vis = full_vis(roi_bbox); + std::vector channels; + cv::split(roi_vis, channels); + // Add mask to Green channel + cv::add(channels[1], mask, channels[1]); + cv::merge(channels, roi_vis); + + // 3. Draw ROI Box + cv::rectangle(full_vis, roi_bbox, cv::Scalar(0, 0, 255), 2); + + // 4. Show + std::string win = "Debug: Segmentation (RANSAC Plane Fit)"; + cv::namedWindow(win, cv::WINDOW_NORMAL); + cv::putText(full_vis, "Green = Holes", cv::Point(20, 50), + cv::FONT_HERSHEY_SIMPLEX, 1.0, cv::Scalar(0, 255, 0), 2); + cv::imshow(win, full_vis); + std::cout << "[PalletOffset] Debug: Segmentation shown. Press ANY KEY to " + "continue..." + << std::endl; + cv::waitKey(0); + cv::destroyWindow(win); + } + + // 5.3 轮廓提取 + std::vector> contours; + cv::findContours(mask, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE); + + std::vector candidate_holes; + cv::Mat cont_vis = cv::Mat::zeros(mask.size(), CV_8UC3); // For visualization + + std::cout << "[PalletOffset] Found " << contours.size() << " contours." + << std::endl; + + int idx = 0; + for (const auto &cnt : contours) { + cv::Rect r = cv::boundingRect(cnt); + double area = cv::contourArea(cnt); + float aspect = (float)r.width / r.height; + + // 筛选逻辑 (根据实际插孔大小调整) + // 托盘插孔在 1m 处通常很大 (>10000 px). 提高阈值以排除小干扰 + bool area_pass = (area > 2000 && area < 100000); // Min area 2000, Max 100k + bool aspect_pass = true; + + if (area_pass) { // 宽孔 + candidate_holes.push_back(r); + cv::rectangle(cont_vis, r, cv::Scalar(0, 255, 255), 2); // 黄色:符合形状 + std::cout << " Contour #" << idx << ": Area=" << area + << " Aspect=" << aspect << " [PASS]" << std::endl; + } else { + cv::rectangle(cont_vis, r, cv::Scalar(100, 0, 100), 1); // 紫色:被拒绝 + std::cout << " Contour #" << idx << ": Area=" << area + << " Aspect=" << aspect << " [REJECT - " + << (!area_pass ? "Area" : "Other") << "]" << std::endl; + } + idx++; + } + + // --- Visualization Step 2: Contours (Full Image) --- + { + std::string win = "Debug: Contours"; + cv::namedWindow(win, cv::WINDOW_NORMAL); + + // 1. Full size background + cv::Mat full_cont_vis; + cv::normalize(depth_img, full_cont_vis, 0, 255, cv::NORM_MINMAX, CV_8U); + if (full_cont_vis.channels() == 1) + cv::cvtColor(full_cont_vis, full_cont_vis, cv::COLOR_GRAY2BGR); + + // 2. Draw ROI + cv::rectangle(full_cont_vis, roi_bbox, cv::Scalar(0, 0, 255), 2); + + // 3. Draw filtered contours (Shifted by ROI offset) + int idx = 0; + for (const auto &cnt : contours) { + std::vector global_cnt = cnt; + for (auto &p : global_cnt) + p += roi_bbox.tl(); + + cv::Rect r = cv::boundingRect(global_cnt); + double area = cv::contourArea(cnt); // Area is same + + // Re-check filter to determine color + bool area_pass = (area > 2000 && area < 100000); + + if (area_pass) { + cv::rectangle(full_cont_vis, r, cv::Scalar(0, 255, 255), 2); // Yellow + cv::putText(full_cont_vis, + "#" + std::to_string(idx) + + " A:" + std::to_string((int)area), + r.tl() - cv::Point(0, 5), cv::FONT_HERSHEY_SIMPLEX, 0.5, + cv::Scalar(0, 255, 255)); + } else { + cv::rectangle(full_cont_vis, r, cv::Scalar(100, 0, 100), 1); // Purple + } + idx++; + } + + cv::imshow(win, full_cont_vis); + std::cout + << "[PalletOffset] Debug: Contours shown. Press ANY KEY to continue..." + << std::endl; + cv::waitKey(0); + cv::destroyWindow(win); + } + + // 5.4 匹配左右孔 (寻找最合理的两个) + std::cout << "[PalletOffset] Matching hole pair from " + << candidate_holes.size() << " candidates..." << std::endl; + + // 策略:按面积排序,取最大的两个,且 Y 坐标接近 + std::sort( + candidate_holes.begin(), candidate_holes.end(), + [](const cv::Rect &a, const cv::Rect &b) { return a.area() > b.area(); }); + + cv::Rect left_rect, right_rect; + bool found_pair = false; + + // cv::Rect left_rect, right_rect; // Already declared above + // bool found_pair = false; // Already declared above + double max_pair_area = 0; + + // Iterate through all pairs to find the best match + for (size_t i = 0; i < candidate_holes.size(); ++i) { + for (size_t j = i + 1; j < candidate_holes.size(); ++j) { + const cv::Rect &r1 = candidate_holes[i]; + const cv::Rect &r2 = candidate_holes[j]; + + // 1. Check Y-alignment (Vertical alignment) + if (abs(r1.y - r2.y) > 50) + continue; // Y difference too large + + // 2. Check Size Consistency (Area should be somewhat similar, e.g., + // within 50% difference) + double area1 = r1.area(); + double area2 = r2.area(); + double area_diff = abs(area1 - area2); + double max_a = std::max(area1, area2); + if (area_diff > 0.6 * max_a) + continue; // Area difference too large (>60%) + + // 3. Check X-separation (Holes shouldn't be too close or overlapping + // horizontally? Pallet holes have spacing) Optional: min distance check + + // This is a valid pair candidate. Check if it's the "largest" pair so + // far. + double total_area = area1 + area2; + if (total_area > max_pair_area) { + max_pair_area = total_area; + found_pair = true; + if (r1.x < r2.x) { + left_rect = r1; + right_rect = r2; + } else { + left_rect = r2; + right_rect = r1; + } + } + } + } + + if (found_pair) { + std::cout << "[PalletOffset] Selected pair: Left Area=" << left_rect.area() + << " Right Area=" << right_rect.area() << std::endl; + + // Draw selected pair on debug (if we were showing it) + // cv::rectangle(cont_vis, left_rect, cv::Scalar(0, 255, 0), 2); + // cv::rectangle(cont_vis, right_rect, cv::Scalar(0, 0, 255), 2); + } + + if (!found_pair) { + std::cerr << "[PalletOffset] Failed to find hole pair." << std::endl; + return false; + } + + // --------------------------------------------------------- + // 6. 稀疏 3D 重建与矫正 + // --------------------------------------------------------- + + // --------------------------------------------------------- + // 6. 稀疏 3D 重建与矫正 + // --------------------------------------------------------- + + // 辅助 lambda: 2D(Local ROI) -> World 3D + // Modify: Capture RANSAC plane params for 0-depth handling + auto toWorld = [&](int u_roi, int v_roi) -> Eigen::Vector3d { + // 还原全局坐标 + int u = u_roi + roi_bbox.x; + int v = v_roi + roi_bbox.y; + + // 获取深度 (mm) + float z_val = 0; + if (depth_img.type() == CV_16U) { + z_val = (float)depth_img.at(v, u); + } else if (depth_img.type() == CV_32F) { + z_val = depth_img.at(v, u); + } + + // Strategy for "Black Holes" (z <= 0 or invalid): + // Intersect ray with RANSAC plane. + + float Xc = 0, Yc = 0, Zc = 0; + + if (z_val > 0) { + // Normal case: Valid depth + Xc = (u - intr.cx) * z_val / intr.fx; + Yc = (v - intr.cy) * z_val / intr.fy; + Zc = z_val; + } else { + // Invalid depth: Use Plane Intersection + // Ray: P = t * D, where D = ((u-cx)/fx, (v-cy)/fy, 1) + // Plane: ax + by + cz + d = 0 + // t * (a*Dx + b*Dy + c*Dz) + d = 0 + // t = -d / (a*Dx + b*Dy + c*Dz) + + if (std::abs(best_c) > 1e-4) { // Plane valid + float dx = (u - intr.cx) / intr.fx; + float dy = (v - intr.cy) / intr.fy; + float dz = 1.0f; + + float denom = best_a * dx + best_b * dy + best_c * dz; + if (std::abs(denom) > 1e-6) { + float t = -best_d / denom; + if (t > 0) { + Xc = t * dx; + Yc = t * dy; + Zc = t * dz; + } + } + } + } + + if (Zc <= 0) + return Eigen::Vector3d(0, 0, 0); // Still invalid + + Eigen::Vector4d P_cam(Xc, Yc, Zc, 1.0); + + // World Space + Eigen::Vector4d P_w = calib_mat * P_cam; + + return P_w.head<3>(); + }; + + // 计算中心点世界坐标 + cv::Point cL(left_rect.x + left_rect.width / 2, + left_rect.y + left_rect.height / 2); + cv::Point cR(right_rect.x + right_rect.width / 2, + right_rect.y + right_rect.height / 2); + + // Use Plane Projection for Hole Centers (Inline) + Eigen::Vector3d P_L_cam; + { + float dx = (cL.x + roi_bbox.x - intr.cx) / intr.fx; + float dy = (cL.y + roi_bbox.y - intr.cy) / intr.fy; + float dz = 1.0f; + float val = best_a * dx + best_b * dy + best_c * dz; + if (std::abs(val) > 1e-6) { + float t = -best_d / val; + P_L_cam = Eigen::Vector3d(t * dx, t * dy, t * dz); + } else { + P_L_cam = Eigen::Vector3d(0, 0, 0); + } + } + + Eigen::Vector3d P_R_cam; + { + float dx = (cR.x + roi_bbox.x - intr.cx) / intr.fx; + float dy = (cR.y + roi_bbox.y - intr.cy) / intr.fy; + float dz = 1.0f; + float val = best_a * dx + best_b * dy + best_c * dz; + if (std::abs(val) > 1e-6) { + float t = -best_d / val; + P_R_cam = Eigen::Vector3d(t * dx, t * dy, t * dz); + } else { + P_R_cam = Eigen::Vector3d(0, 0, 0); + } + } + + // Convert to World + Eigen::Vector4d P_L_4d(P_L_cam.x(), P_L_cam.y(), P_L_cam.z(), 1.0); + Eigen::Vector4d P_R_4d(P_R_cam.x(), P_R_cam.y(), P_R_cam.z(), 1.0); + + Eigen::Vector3d P_L = (calib_mat * P_L_4d).head<3>(); + Eigen::Vector3d P_R = (calib_mat * P_R_4d).head<3>(); + + // Debug info for hole center depth recovery + std::cout << "[PalletOffset] Left Hole 3D: " << P_L.transpose() << std::endl; + std::cout << "[PalletOffset] Right Hole 3D: " << P_R.transpose() << std::endl; + + if (P_L.norm() == 0 || P_R.norm() == 0) { + std::cerr << "[PalletOffset] Invalid depth at hole centers (Plane fallback " + "failed)." + << std::endl; + return false; + } + + // --------------------------------------------------------- + // 7. 偏移量计算 + // --------------------------------------------------------- + // 7. 偏移量计算 + // --------------------------------------------------------- + + // [Improved Z-Calculation] + // Instead of just averaging P_L and P_R (which are hole centers), + // we calculate the depth of the "solid surface" between the holes. + + // NOTE: left_rect and right_rect are relative to 'roi_depth' (the ROI crop). + // We must convert them to GLOBAL coordinates for: + // 1. Correct 3D projection (using global intrinsics). + // 2. Correct sampling from full 'depth_img'. + + cv::Point2f center_uv_local = + (cv::Point2f(left_rect.tl() + left_rect.br()) * 0.5f + + cv::Point2f(right_rect.tl() + right_rect.br()) * 0.5f) * + 0.5f; + + cv::Point2f center_uv_global = + center_uv_local + cv::Point2f((float)roi_bbox.x, (float)roi_bbox.y); + + // ROI size for center sample (e.g. 40x40) + int roi_w = 40; + int roi_h = 40; + cv::Rect center_roi_rect((int)center_uv_global.x - roi_w / 2, + (int)center_uv_global.y - roi_h / 2, roi_w, roi_h); + + // Boundary check against global image + center_roi_rect &= cv::Rect(0, 0, depth_img.cols, depth_img.rows); + + double avg_depth_val = 0; + if (!center_roi_rect.empty()) { + cv::Mat center_region = depth_img(center_roi_rect); + // Calculate mean of valid non-zero pixels + int valid_count = 0; + double sum_depth = 0; + + for (int r = 0; r < center_region.rows; ++r) { + for (int c = 0; c < center_region.cols; ++c) { + float d = 0; + if (depth_img.type() == CV_16UC1) + d = center_region.at(r, c); + else + d = center_region.at(r, c); + + if (d > 100) { // Valid depth threshold + sum_depth += d; + valid_count++; + } + } + } + + if (valid_count > 0) { + avg_depth_val = sum_depth / valid_count; + } else { + // Fallback to hole average + std::cout << "[PalletOffset] Center ROI invalid depth, fallback to holes." + << std::endl; + avg_depth_val = (P_L.z() + P_R.z()) / 2.0; + } + } else { + std::cout << "[PalletOffset] Center ROI out of bounds, fallback to holes." + << std::endl; + avg_depth_val = (P_L.z() + P_R.z()) / 2.0; + } + + // Re-calculate P_center using robust Z and GLOBAL UV + double p_center_x = (center_uv_global.x - intr.cx) * avg_depth_val / intr.fx; + double p_center_y = (center_uv_global.y - intr.cy) * avg_depth_val / intr.fy; + Eigen::Vector3d P_center(p_center_x, p_center_y, avg_depth_val); + + // Save absolute coordinates + result.abs_x = (float)P_center.x(); + result.abs_y = (float)P_center.y(); + result.abs_z = (float)P_center.z(); + + result.left_hole_pos = + Point3D((float)P_L.x(), (float)P_L.y(), (float)P_L.z()); + result.right_hole_pos = + Point3D((float)P_R.x(), (float)P_R.y(), (float)P_R.z()); + + // 旋转计算 (Yaw) - Current Angle + // Note: Negate atan2 so that Clockwise is POSITIVE (User Request) + double angle_cur = -std::atan2(P_R.z() - P_L.z(), P_R.x() - P_L.x()); + + // DEBUG LOGGING + { + QFile debugFile("calib_debug.log"); + if (debugFile.open(QIODevice::WriteOnly | QIODevice::Append | + QIODevice::Text)) { + QTextStream out(&debugFile); + out << "-------------------------------------------\n"; + out << "Detect Result:\n"; + out << "P_L (World): " << P_L.x() << ", " << P_L.y() << ", " << P_L.z() + << "\n"; + out << "P_R (World): " << P_R.x() << ", " << P_R.y() << ", " << P_R.z() + << "\n"; + out << "Angle (rad): " << angle_cur << " (" << angle_cur * 180.0 / CV_PI + << " deg)\n"; + out << "Calib Matrix (0,0): " << calib_mat(0, 0) + << "\n"; // Check if Identity (1.0) or Rotated (~0.98) + out << "-------------------------------------------\n"; + } + } + + Eigen::Vector3d P_ref(0, 0, 0); + // 尝试读取 Reference + // Use C-string array to avoid std::vector/std::string heap issues across + // module boundaries + const char *search_paths[] = { + "reference_pallet.json", + "../../../src/images_template/reference_pallet.json", + "../src/images_template/reference_pallet.json", + "../../src/images_template/reference_pallet.json"}; + + bool ref_loaded = false; + for (const char *path_cstr : search_paths) { + QString qpath = QString::fromUtf8(path_cstr); + if (QFile::exists(qpath)) { + QFile refLink(qpath); + if (refLink.open(QIODevice::ReadOnly)) { + QJsonDocument d = QJsonDocument::fromJson(refLink.readAll()); + QJsonObject o = d.object(); + if (o.contains("x") && o.contains("y") && o.contains("z")) { + P_ref.x() = o["x"].toDouble(); + P_ref.y() = o["y"].toDouble(); + P_ref.z() = o["z"].toDouble(); + std::cout << "[PalletOffset] Loaded reference from " << path_cstr + << std::endl; + + // Try to load reference hole positions for angle calculation + // Priority 1: Use explicitly saved 'reference_angle' + if (o.contains("reference_angle")) { + double angle_ref_deg = o["reference_angle"].toDouble(); + double angle_ref_rad = angle_ref_deg * CV_PI / 180.0; + result.rotation_angle_value = + (float)((angle_cur - angle_ref_rad) * 180.0 / CV_PI); + std::cout << " Ref Angle: " << angle_ref_deg + << " deg (From JSON)" << std::endl; + } + // Priority 2: Calculate from hole positions (Legacy compatibility) + else if (o.contains("left_hole") && o.contains("right_hole")) { + QJsonObject l = o["left_hole"].toObject(); + QJsonObject r = o["right_hole"].toObject(); + Eigen::Vector3d ref_L(l["x"].toDouble(), l["y"].toDouble(), + l["z"].toDouble()); + Eigen::Vector3d ref_R(r["x"].toDouble(), r["y"].toDouble(), + r["z"].toDouble()); + + // Calculate Reference Angle + double angle_ref_rad = + std::atan2(ref_R.z() - ref_L.z(), ref_R.x() - ref_L.x()); + result.rotation_angle_value = + (float)((angle_cur - angle_ref_rad) * 180.0 / CV_PI); + std::cout << " Ref Angle: " + << (angle_ref_rad * 180.0 / CV_PI) << " deg (Calculated)" + << std::endl; + } else { + // Fallback for very old reference files + double angle_ref = 0.0; + result.rotation_angle_value = + (float)((angle_cur - angle_ref) * 180.0 / CV_PI); + std::cout << " Ref Angle: 0.0 deg (Default - holes " + "not in ref)" + << std::endl; + } + + ref_loaded = true; + break; + } + } + } + } + + if (!ref_loaded) { + // Auto-save current as reference + P_ref = P_center; + result.rotation_angle_value = + 0.0f; // No reference, so relative rotation is 0 + std::cout << "[PalletOffset] No valid reference found. Using CURRENT as " + "Reference." + << std::endl; + } + + result.offset_lat_mm_value = (float)(P_center.x() - P_ref.x()); // X轴差异 + result.offset_lon_mm_value = (float)(P_center.z() - P_ref.z()); // Z轴差异 + + std::cout + << "[PalletOffset] Note: Offsets are relative to the LOADED reference." + << std::endl; + + // --------------------------------------------------------- + // 8. 变形检测 (Top Edge Sampling) + // --------------------------------------------------------- + // 策略:取插孔矩形上边缘向下一点点 (e.g. 5px) 的一行点 + // 转换为世界坐标,检查 Y 轴偏差 + + // --------------------------------------------------------- + // 8. 变形检测 (Top Edge Sampling - Robust Binning) + // --------------------------------------------------------- + // 策略:扫描插孔 ROI 的 mask 上边缘,获取 3D 点 + // 然后参考 BeamRack 算法,使用分箱 + 基线拟合计算变形 + + // --- Visualization Init --- + cv::Mat debug_canvas; + { + // Normalize Depth ROI for visualization (0-255) + cv::Mat temp_norm; + cv::normalize(roi_depth, temp_norm, 0, 255, cv::NORM_MINMAX, CV_8U); + // Use Color Map for better visibility + cv::applyColorMap(temp_norm, debug_canvas, cv::COLORMAP_JET); + + // Draw ROI Rects + cv::rectangle(debug_canvas, left_rect, cv::Scalar(255, 255, 255), 2); + cv::rectangle(debug_canvas, right_rect, cv::Scalar(255, 255, 255), 2); + } + + // Store graph images + cv::Mat graph_left, graph_right; + + auto checkDeformation = [&](const cv::Rect &r, const std::string &label, + cv::Mat &canvas, cv::Mat &out_graph) -> float { + // 1. 提取边缘 3D 点 + std::vector edge_points; + + int start_x = r.x + 5; + int end_x = r.x + r.width - 5; + + // 遍历每一列 (x) + for (int c = start_x; c < end_x; ++c) { + // 从上向下扫描找 mask 边缘 + int search_start = std::max(0, r.y - 10); + int search_end = std::min(roi_depth.rows, r.y + r.height / 2); + + for (int r_row = search_start; r_row < search_end; ++r_row) { + if (mask.at(r_row, c) == 0) + continue; + + Eigen::Vector3d p = toWorld(c, r_row); + if (p.norm() > 0.001) { // 有效点 + edge_points.push_back(p); + } + break; // 每列只取一个上边缘点 + } + } + + if (edge_points.size() < 10) + return 0.0f; + + // 2. 分箱 + 鲁棒质心 (Binning) + // Sort by World X + std::sort(edge_points.begin(), edge_points.end(), + [](const Eigen::Vector3d &a, const Eigen::Vector3d &b) { + return a.x() < b.x(); + }); + + int num_bins = 20; + double min_x = edge_points.front().x(); + double max_x = edge_points.back().x(); + double step = (max_x - min_x) / num_bins; + + if (step < 0.1) + return 0.0f; + + struct Bin { + Eigen::Vector3d centroid_3d; + double deflection = 0.0; + }; + std::vector bins; + + auto it = edge_points.begin(); + for (int i = 0; i < num_bins; ++i) { + double bin_end = min_x + (i + 1) * step; + std::vector bin_items; + + while (it != edge_points.end() && it->x() <= bin_end) { + bin_items.push_back(*it); + ++it; + } + + if (bin_items.empty()) + continue; + + // Trimmed Mean (discard top/bottom 20% by Y) + std::sort(bin_items.begin(), bin_items.end(), + [](const Eigen::Vector3d &a, const Eigen::Vector3d &b) { + return a.y() < b.y(); + }); + + size_t n = bin_items.size(); + size_t start = (size_t)(n * 0.2); + size_t end = (size_t)(n * 0.8); + if (end <= start) { + start = 0; + end = n; + } + + Eigen::Vector3d sum(0, 0, 0); + + for (size_t k = start; k < end; ++k) { + sum += bin_items[k]; + } + + if ((end - start) > 0) { + bins.push_back({sum / (end - start), 0.0}); + } + } + + if (bins.size() < 4) + return 0.0f; // Need enough bins for anchors + + // 3. 基线拟合 (Horizontal Baseline at Highest Point) + // User requested to use the "box top" (horizontal) as reference to handle + // slanted camera. We find the Minimum Y (highest point) among all centroids + // and use that as the flat ceiling. + double min_y_ref = 1e9; + + // Also track min/max X for drawing the line across the full width + double min_x_ref = 1e9; + double max_x_ref = -1e9; + + for (const auto &b : bins) { + if (b.centroid_3d.y() < min_y_ref) + min_y_ref = b.centroid_3d.y(); + if (b.centroid_3d.x() < min_x_ref) + min_x_ref = b.centroid_3d.x(); + if (b.centroid_3d.x() > max_x_ref) + max_x_ref = b.centroid_3d.x(); + } + + Eigen::Vector3d p1(min_x_ref, min_y_ref, 0); + Eigen::Vector3d p2(max_x_ref, min_y_ref, 0); + + // 4. 计算偏差 (Use Middle 30%) + // Indices range from 35% to 65% of total bins + int idx_start = (int)(bins.size() * 0.35); + int idx_end = (int)(bins.size() * 0.65); + if (idx_end <= idx_start) { + idx_start = 0; + idx_end = bins.size(); + } + + std::vector deflections; + Bin *max_bin = nullptr; // For viz (just show largest to draw the line) + double max_def_val = -1e9; + + for (int i = 0; i < bins.size(); ++i) { + Bin &b = bins[i]; + // Theoretical Y is now constant (Horizontal Baseline) + double theoretical_y = min_y_ref; + b.deflection = + b.centroid_3d.y() - theoretical_y; // Positive = Sagging down + + // Collect only in middle zone + if (i >= idx_start && i < idx_end) { + deflections.push_back(b.deflection); + // Track max for drawing the line? Typically we draw line at max point + if (b.deflection > max_def_val) { + max_def_val = b.deflection; + max_bin = &b; + } + } + } + + // Result: Average of Top 3 in Middle Zone + float final_def = 0.0f; + if (!deflections.empty()) { + std::sort(deflections.begin(), deflections.end(), std::greater()); + int k_top = std::min((int)deflections.size(), 3); + double sum_top = 0; + for (int i = 0; i < k_top; ++i) + sum_top += deflections[i]; + final_def = (float)(sum_top / k_top); + } + + // Filter small noise or negative (upward) deflection + if (final_def < 0) + final_def = 0.0f; + + // --- Highlight on Global Map (Simplified) --- + cv::rectangle(canvas, r, cv::Scalar(0, 255, 0), 2); // Just box locally + + // --- Generate Graph Image --- + { + int w = 400, h = 300; + out_graph = cv::Mat::zeros(h, w, CV_8UC3); + + // Auto Scale + double min_u = bins.front().centroid_3d.x(); + double max_u = bins.back().centroid_3d.x(); + double min_v = 1e9, max_v = -1e9; + for (auto &b : bins) { + if (b.centroid_3d.y() < min_v) + min_v = b.centroid_3d.y(); + if (b.centroid_3d.y() > max_v) + max_v = b.centroid_3d.y(); + } + // Add padding + double v_range = max_v - min_v; + if (v_range < 1.0) + v_range = 1.0; + min_v -= v_range * 0.5; + max_v += v_range * 0.5; + + auto map_u = [&](double val) { + return (int)((val - min_u) / (max_u - min_u) * (w - 40) + 20); + }; + // Invert V for image coords if Y is height? Actually here Y is World Y + // (~height?). In image coords, larger Y is down. In curve, larger Y from + // algo usually means "down" (sagging). Let's map min_v to top (0), max_v + // to bottom (h). + auto map_v = [&](double val) { + return (int)((val - min_v) / (max_v - min_v) * (h - 40) + 20); + }; + + // Draw Axis + cv::rectangle(out_graph, cv::Rect(0, 0, w, h), cv::Scalar(50, 50, 50), + -1); // Grey BG + + // Draw Points + for (auto &b : bins) { + cv::circle( + out_graph, + cv::Point(map_u(b.centroid_3d.x()), map_v(b.centroid_3d.y())), 4, + cv::Scalar(0, 255, 255), -1); + } + // Draw Baseline + cv::line(out_graph, cv::Point(map_u(p1.x()), map_v(p1.y())), + cv::Point(map_u(p2.x()), map_v(p2.y())), cv::Scalar(0, 255, 0), + 2); + + // Draw Max Deflection + if (max_bin) { + double mx = max_bin->centroid_3d.x(); + double my = max_bin->centroid_3d.y(); + // Projection Y on baseline + double proj_y = + p1.y() + (mx - p1.x()) * (p2.y() - p1.y()) / (p2.x() - p1.x()); + + cv::arrowedLine(out_graph, cv::Point(map_u(mx), map_v(my)), + cv::Point(map_u(mx), map_v(proj_y)), + cv::Scalar(0, 0, 255), 2); + } + + // Title + std::string status = + label + ": " + std::to_string(final_def).substr(0, 4) + " mm"; + cv::putText(out_graph, status, cv::Point(10, 30), + cv::FONT_HERSHEY_SIMPLEX, 0.7, cv::Scalar(255, 255, 255), 2); + } + + return final_def; + }; + + result.hole_def_mm_left_value = + checkDeformation(left_rect, "Left", debug_canvas, graph_left); + result.hole_def_mm_right_value = + checkDeformation(right_rect, "Right", debug_canvas, graph_right); + + // --------------------------------------------------------- + // 9. 设置结果状态 + // --------------------------------------------------------- + result.success = true; + + // 填充示例阈值 (实际应从 Config 读取) + // result.offset_lat_mm_threshold = format_threshold(-20, -10, 10, 20); // Old + // hardcoded + auto lat_t = ConfigManager::getInstance().getPalletOffsetLatThresholds(); + result.offset_lat_mm_threshold = + format_threshold(lat_t[0], lat_t[1], lat_t[2], lat_t[3]); + + // result.offset_lon_mm_threshold = format_threshold(-20, -10, 10, 20); // Old + // hardcoded + auto lon_t = ConfigManager::getInstance().getPalletOffsetLonThresholds(); + result.offset_lon_mm_threshold = + format_threshold(lon_t[0], lon_t[1], lon_t[2], lon_t[3]); + + // result.rotation_angle_threshold = format_threshold(-5, -2, 2, 5); // Old + // hardcoded + auto rot_t = ConfigManager::getInstance().getPalletRotationAngleThresholds(); + result.rotation_angle_threshold = + format_threshold(rot_t[0], rot_t[1], rot_t[2], rot_t[3]); + + // --------------------------------------------------------- + // 10. 全局显示 (Composite) + // --------------------------------------------------------- + + // Resize Global Map to match (2x graph width) = 800 + cv::resize(debug_canvas, debug_canvas, cv::Size(800, 400)); // Resize for fit + + // Ensure graphs exist + if (graph_left.empty()) + graph_left = cv::Mat::zeros(300, 400, CV_8UC3); + if (graph_right.empty()) + graph_right = cv::Mat::zeros(300, 400, CV_8UC3); + + // Concatenate Graphs (Left | Right) + cv::Mat bottom_panel; + cv::hconcat(graph_left, graph_right, bottom_panel); + + // Concatenate Top and Bottom + cv::Mat final_view; + cv::vconcat(debug_canvas, bottom_panel, final_view); + + const std::string winName = "Pallet Detection (Global Context + Analysis)"; + cv::namedWindow(winName, cv::WINDOW_NORMAL); + cv::resizeWindow(winName, 800, 700); + cv::imshow(winName, final_view); + + std::cout << "[PalletOffset] Press ANY KEY to close windows and continue..." + << std::endl; + cv::waitKey(0); + cv::destroyAllWindows(); + + // result.result_status = "success"; // Removed: Not in struct + // result.last_update_time = "2024-12-23 ..."; // Removed: Not in struct + + result.success = true; + return true; + bool lat_alarm = (result.offset_lat_mm_value <= lat_t[1] || + result.offset_lat_mm_value >= + lat_t[2]); // Warning/Alarm logic adjustment? + // Usually: A=AlarmLow, B=WarnLow, C=WarnHigh, D=AlarmHigh + // Alarm if < A or > D + // Warn if (< B and >= A) or (> C and <= D) + // Simplified logic as requested: just "Alarm or not". Often just check outer + // bounds. + lat_alarm = (result.offset_lat_mm_value <= lat_t[0] || + result.offset_lat_mm_value >= lat_t[3]); + + // bool lon_alarm = abs(result.offset_lon_mm_value) > 20; // Old + bool lon_alarm = (result.offset_lon_mm_value <= lon_t[0] || + result.offset_lon_mm_value >= lon_t[3]); + + result.offset_lat_mm_warning_alarm = format_alarm(false, lat_alarm); + result.offset_lon_mm_warning_alarm = format_alarm(false, lon_alarm); + + std::cout << "[PalletOffset] Result: Lat=" << result.offset_lat_mm_value + << ", Lon=" << result.offset_lon_mm_value + << ", Rot=" << result.rotation_angle_value << std::endl; + + // --------------------------------------------------------- + // BLOCKING WAIT FOR DEBUG VIEW + // --------------------------------------------------------- + std::cout << "[PalletOffset] Press ANY KEY to close windows and continue..." + << std::endl; + cv::waitKey(0); + cv::destroyAllWindows(); + + return true; +} diff --git a/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.h b/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.h new file mode 100644 index 0000000..08e8195 --- /dev/null +++ b/image_capture/src/algorithm/detections/pallet_offset/pallet_offset_detection.h @@ -0,0 +1,104 @@ +#pragma once + +#include +#include +#include +#include + +#include "../../../common_types.h" + +namespace cv { class Mat; } + +/** + * @brief 托盘位置偏移检测算法结果 + */ +struct PalletOffsetResult { + // 位置偏移量 (相对于参考得出的世界坐标系下的差异) + float offset_lat_mm_value; // 左右偏移量(mm)- X轴 World + float offset_lon_mm_value; // 前后偏移量(mm)- Z轴 World + float rotation_angle_value; // 旋转角度(度)- 绕 Y轴 World + + // 插孔变形 + float hole_def_mm_left_value; // 左侧插孔变形(mm) + float hole_def_mm_right_value; // 右侧插孔变形(mm) + + // 绝对坐标 (用于生成参考模板) + float abs_x; + float abs_y; + float abs_z; + + // 个体插孔坐标 (可选,用于调试或Reference生成) + Point3D left_hole_pos; + Point3D right_hole_pos; + + // 阈值(JSON字符串) + std::string offset_lat_mm_threshold; + std::string offset_lon_mm_threshold; + std::string rotation_angle_threshold; + std::string hole_def_mm_left_threshold; + std::string hole_def_mm_right_threshold; + + // 警告和报警信号(JSON字符串) + std::string offset_lat_mm_warning_alarm; + std::string offset_lon_mm_warning_alarm; + std::string rotation_angle_warning_alarm; + std::string hole_def_mm_left_warning_alarm; + std::string hole_def_mm_right_warning_alarm; + + bool success; // 算法是否执行成功 + + PalletOffsetResult() + : offset_lat_mm_value(0.0f) + , offset_lon_mm_value(0.0f) + , rotation_angle_value(0.0f) + , hole_def_mm_left_value(0.0f) + , hole_def_mm_right_value(0.0f) + , abs_x(0.0f), abs_y(0.0f), abs_z(0.0f) + , success(false) + {} +}; + +/** + * @brief 托盘位置偏移检测算法 + * + * 检测托盘位置偏移和插孔变形 + * 核心逻辑: + * 1. 纯深度图输入,不依赖点云。 + * 2. 交互式 ROI 选择(当未提供 ROI 时)。 + * 3. 2D 特征检测 + 稀疏 3D 转换(利用内参和标定矩阵)。 + * 4. 世界坐标系下的偏移与变形计算。 + */ +class PalletOffsetAlgorithm { +public: + /** + * @brief 执行托盘位置偏移检测 + * + * @param depth_img 深度图像 (CV_16U or CV_32F) + * @param color_img 彩色图像 (仅用于显示,可选) + * @param side 货架侧("left"或"right") + * @param result [输出] 检测结果 + * @param point_cloud [可选] 点云数据 (若为空,则使用 depth + intrinsics 计算) + * @param roi_points [可选] ROI区域,若为空则触发交互式选择 + * @param intrinsics [可选] 相机内参,用于 2D->3D 转换。若为0则尝试自动获取。 + * @return 是否检测成功 + */ + static bool detect(const cv::Mat& depth_img, + const cv::Mat& color_img, + const std::string& side, + PalletOffsetResult& result, + const std::vector* point_cloud = nullptr, + const std::vector& roi_points = {}, + const CameraIntrinsics& intrinsics = CameraIntrinsics(), + const cv::Mat* calib_mat_override = nullptr); + +private: + /** + * @brief 从 JSON 文件加载标定矩阵 + */ + static bool loadCalibration(Eigen::Matrix4d& transform); + + /** + * @brief 交互式 ROI 选择 + */ + static std::vector selectPolygonROI(const cv::Mat& visual_img); +}; diff --git a/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.cpp b/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.cpp new file mode 100644 index 0000000..9938800 --- /dev/null +++ b/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.cpp @@ -0,0 +1,261 @@ +#include "slot_occupancy_detection.h" +#include +#include +#include +#include + + +//==================== +// 步骤1:配置参数 +//==================== +namespace Config { +// 基准图文件相对路径列表 (按顺序尝试) +const std::vector TEMPLATE_PATHS = { + "src\\images_template\\temp.bmp", + "..\\src\\images_template\\temp.bmp", + "..\\..\\src\\images_template\\temp.bmp", + "..\\..\\..\\src\\images_template\\temp.bmp", + "..\\..\\..\\..\\src\\images_template\\temp.bmp", + "d:\\Git\\stereo_warehouse_inspection\\image_capture\\src\\images_" + "template\\temp.bmp"}; + +// 差异阈值:当前像素与背景像素相差多少算“有变化” (0-255) +// 建议:如果环境光稳定,设为 20-30;如果光照波动大,设为 40-50 +const int DIFF_THRESHOLD = 28; + +// 面积阈值:差异像素总数超过多少算“有货” +// 建议:根据 ROI 大小调整,通常设为 ROI 面积的 5% - 10% +const int AREA_THRESHOLD = 1000000; + +// 高斯模糊核大小 (必须是奇数) +const int BLUR_SIZE = 7; + +// 目标工作分辨率 (相机分辨率) +const cv::Size TARGET_SIZE(4024, 3036); + +// ROI (感兴趣区域) - 默认值 +const cv::Rect ROI_DEFAULT(1400, 600, 1200, 1800); +} // namespace Config + +//==================== +// 步骤2:算法上下文(用于管理静态资源) +//==================== + +class SlotAlgoContext { +public: + SlotAlgoContext() : initialized_(false) {} + + // 初始化:加载并预处理基准图 + // 返回值:是否初始化成功 + bool ensureInitialized() { + std::lock_guard lock(mutex_); + if (initialized_) + return true; + + cv::Mat raw_ref; + // 1. 尝试加载基准图 + for (const auto &path : Config::TEMPLATE_PATHS) { + raw_ref = cv::imread(path, cv::IMREAD_GRAYSCALE); + if (!raw_ref.empty()) { + std::cout << "[SlotAlgo] Loaded template from: " << path << std::endl; + break; + } + } + + if (raw_ref.empty()) { + std::cerr << "[SlotAlgo] CRITICAL: Failed to load template image." + << std::endl; + return false; + } + + // 2. 尺寸对齐 (Resize) + // 只有当尺寸不匹配时才执行 Resize,确保 ref_img_processed_ 始终是 + // TARGET_SIZE + if (raw_ref.size() != Config::TARGET_SIZE) { + std::cout << "[SlotAlgo] Resizing template from " << raw_ref.cols << "x" + << raw_ref.rows << " to " << Config::TARGET_SIZE.width << "x" + << Config::TARGET_SIZE.height << std::endl; + cv::resize(raw_ref, ref_img_processed_, Config::TARGET_SIZE); + } else { + ref_img_processed_ = raw_ref; + } + + // 3. 预处理:高斯模糊 + // 提前对整张基准图进行模糊,避免每帧对 ROI 进行模糊,减少计算量 + // (注:如果内存紧张,可以只存原图,但为了速度建议存模糊后的图) + cv::GaussianBlur(ref_img_processed_, ref_img_processed_, + cv::Size(Config::BLUR_SIZE, Config::BLUR_SIZE), 0); + + // 4. 初始化形态学核 + morph_kernel_ = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5)); + + std::cout << "[SlotAlgo] Initialization complete. Reference size: " + << ref_img_processed_.cols << "x" << ref_img_processed_.rows + << std::endl; + + initialized_ = true; + return true; + } + + // 获取处理后的基准图 (只读引用) + const cv::Mat &getRefImage() const { return ref_img_processed_; } + + // 获取形态学核 + const cv::Mat &getMorphKernel() const { return morph_kernel_; } + + bool isInitialized() const { return initialized_; } + +private: + std::mutex mutex_; + bool initialized_; + cv::Mat ref_img_processed_; // 存储 Resize 并 Blur 后的基准图 + cv::Mat morph_kernel_; +}; + +// 全局静态上下文实例 +static SlotAlgoContext g_algo_context; + +//==================== +// 步骤3:辅助函数 +//==================== + +static cv::Rect getSafeROI(const cv::Rect &request_roi, int img_width, + int img_height) { + cv::Rect roi = request_roi; + roi.x = std::max(0, roi.x); + roi.y = std::max(0, roi.y); + roi.width = std::min(roi.width, img_width - roi.x); + roi.height = std::min(roi.height, img_height - roi.y); + return roi; +} + +//==================== +// 步骤4:核心算法实现 +//==================== + +bool SlotOccupancyAlgorithm::detect(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + SlotOccupancyResult &result) { + // 算法启用开关 + const bool USE_ALGORITHM = false; + + if (USE_ALGORITHM) { + // --- 真实算法逻辑 --- + // 初始化结果 + result.success = false; + result.slot_occupied = false; + + // 1. 确保算法所需的资源已加载 (懒加载模式) + if (!g_algo_context.ensureInitialized()) { + std::cerr << "[SlotAlgo] Algorithm not initialized, skipping detection." + << std::endl; + return false; + } + + // 2. 输入检查 + if (color_img.empty()) { + std::cerr << "[SlotAlgo] Input image is empty." << std::endl; + return false; + } + + try { + // 3. 准备当前帧灰度图 (高效转换) + cv::Mat curr_gray; + if (color_img.channels() == 3) { + cv::cvtColor(color_img, curr_gray, cv::COLOR_BGR2GRAY); + } else { + // 如果已经是灰度图,直接引用,避免拷贝 + curr_gray = color_img; + } + + // 验证输入尺寸 (假设输入应该匹配目标分辨率) + if (curr_gray.size() != Config::TARGET_SIZE) { + // 如果输入尺寸不对,这里选择报错或者 Resize + // 鉴于这是一个工业场景,分辨率突变通常是异常,这里建议打印警告如果必须处理则 + // Resize 但为了效率,我们尽量避免每帧 Resize。 + // 如果确实不一样,这里做一个临时 Resize 以保证程序不崩,但会影响性能 + // std::cout << "[SlotAlgo] Warning: Input size mismatch. Resizing..." + // << std::endl; 暂时不处理 resize,依靠 getSafeROI 防止崩坏,或者在 ROI + // 截取时会出错 + } + + // 4. 确定 ROI + // 实际应用中根据 side 选择 ROI; 目前使用默认 + cv::Rect roi = + getSafeROI(Config::ROI_DEFAULT, curr_gray.cols, curr_gray.rows); + + // 5. 截取 ROI + // 直接从 input 和 cached reference 中截取,无需 clone + cv::Mat img_roi = curr_gray(roi); + const cv::Mat &ref_full = g_algo_context.getRefImage(); + + // 确保 ref_full 够大覆盖 ROI (理论上 init 中已经 resize 到了 TARGET_SIZE) + // 双重保险 + cv::Rect ref_roi_rect = getSafeROI(roi, ref_full.cols, ref_full.rows); + if (ref_roi_rect != roi) { + std::cerr << "[SlotAlgo] Error: Reference image size mismatch with ROI." + << std::endl; + return false; + } + cv::Mat ref_roi = ref_full(ref_roi_rect); + + // 6. 图像处理 pipeline + // 只对当前帧 ROI 做高斯模糊 (基准图已经预处理过了) + cv::Mat img_roi_blurred; + cv::GaussianBlur(img_roi, img_roi_blurred, + cv::Size(Config::BLUR_SIZE, Config::BLUR_SIZE), 0); + + // 绝对差分 + cv::Mat diff; + cv::absdiff(img_roi_blurred, ref_roi, diff); + + // 二值化 + cv::Mat mask; + cv::threshold(diff, mask, Config::DIFF_THRESHOLD, 255, cv::THRESH_BINARY); + + // 形态学滤波 (去除噪点) + cv::morphologyEx(mask, mask, cv::MORPH_OPEN, + g_algo_context.getMorphKernel()); + + // 7. 统计判定 + int non_zero_pixels = cv::countNonZero(mask); + + // 可选:仅在状态变化时打印,避免刷屏 + // std::cout << "[SlotAlgo] Diff pixels: " << non_zero_pixels << + // std::endl; + + if (non_zero_pixels > Config::AREA_THRESHOLD) { + result.slot_occupied = true; + } else { + result.slot_occupied = false; + } + + result.success = true; + // std::cout << "[SlotAlgo] Result: " << (result.slot_occupied ? + // "Occupied" : "Empty") << std::endl; + + } catch (const cv::Exception &e) { + std::cerr << "[SlotAlgo] OpenCV Exception: " << e.what() << std::endl; + result.success = false; + return false; + } catch (...) { + std::cerr << "[SlotAlgo] Unknown Exception during detection." + << std::endl; + result.success = false; + return false; + } + + return result.success; + } else { + // --- 模拟数据逻辑 --- + std::cout << "[SlotOccupancyAlgorithm] Using FAKE DATA implementation " + "(Switch OFF)." + << std::endl; + + result.slot_occupied = false; // 模拟无货 + result.success = true; + + return result.success; + } +} diff --git a/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.h b/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.h new file mode 100644 index 0000000..2d8aa17 --- /dev/null +++ b/image_capture/src/algorithm/detections/slot_occupancy/slot_occupancy_detection.h @@ -0,0 +1,37 @@ +#pragma once + +#include + +namespace cv { class Mat; } + +/** + * @brief 货位有无检测算法结果 + */ +struct SlotOccupancyResult { + bool slot_occupied; // 货位是否有托盘/货物 + bool success; // 算法是否执行成功 + + SlotOccupancyResult() : slot_occupied(false), success(false) {} +}; + +/** + * @brief 货位有无检测算法 + * + * 分析深度图或彩色图,判断货位是否有托盘/货物 + */ +class SlotOccupancyAlgorithm { +public: + /** + * 执行货位有无检测 + * @param depth_img 深度图像 + * @param color_img 彩色图像 + * @param side 货架侧("left"或"right") + * @param result [输出] 检测结果 + * @return 是否检测成功 + */ + static bool detect(const cv::Mat& depth_img, + const cv::Mat& color_img, + const std::string& side, + SlotOccupancyResult& result); +}; + diff --git a/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.cpp b/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.cpp new file mode 100644 index 0000000..9ea9b8a --- /dev/null +++ b/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.cpp @@ -0,0 +1,129 @@ +#include "visual_inventory_detection.h" +#include "HalconCpp.h" +#include +#include + +using namespace HalconCpp; + +// Helper to convert cv::Mat to Halcon HImage +HImage MatToHImage(const cv::Mat &image) { + HImage hImage; + if (image.empty()) + return hImage; + + cv::Mat gray; + if (image.channels() == 3) { + cv::cvtColor(image, gray, cv::COLOR_BGR2GRAY); + } else { + gray = image.clone(); + } + + // Fix: Create a copy of the data to ensure it persists beyond function scope + // GenImage1 with "byte" type expects the data to remain valid + void *data_copy = new unsigned char[gray.total()]; + memcpy(data_copy, gray.data, gray.total()); + + try { + hImage.GenImage1("byte", gray.cols, gray.rows, data_copy); + } catch (...) { + delete[] static_cast(data_copy); + throw; + } + + // Note: The data_copy will be managed by Halcon's HImage + // We don't delete it here as Halcon takes ownership + return hImage; +} + +bool VisualInventoryAlgorithm::detect(const cv::Mat &depth_img, + const cv::Mat &color_img, + const std::string &side, + VisualInventoryResult &result) { + result.success = false; + + try { + if (color_img.empty()) { + std::cerr << "[VisualInventoryAlgorithm] Error: Empty image input." + << std::endl; + return false; + } + + // 1. Convert to HImage + HImage hImage = MatToHImage(color_img); + + // 2. Setup Halcon QR Code Model + HDataCode2D dataCode2d; + dataCode2d.CreateDataCode2dModel("QR Code", HTuple(), HTuple()); + dataCode2d.SetDataCode2dParam("default_parameters", "enhanced_recognition"); + + HTuple resultHandles, decodedDataStrings; + + // 3. Detect + // stop_after_result_num: 100 ensures we get up to 100 codes + HXLDCont symbolXLDs = + dataCode2d.FindDataCode2d(hImage, "stop_after_result_num", 100, + &resultHandles, &decodedDataStrings); + + // 4. Transform Results to JSON + // Format: {"A01":["BOX111","BOX112"], "A02":["BOX210"]} + // Since we don't have position information, group all codes under a generic + // key + std::string json_barcodes = "\"" + side + "\":["; + Hlong count = decodedDataStrings.Length(); + + for (Hlong i = 0; i < count; i++) { + if (i > 0) + json_barcodes += ","; + // Access string from HTuple using S() which returns const char* + HTuple s = decodedDataStrings[i]; + std::string code = std::string(s.S()); + + // Save raw code for deduplication + result.codes.push_back(code); + + // Escape special characters in JSON strings + // Replace backslashes first, then quotes + size_t pos = 0; + while ((pos = code.find('\\', pos)) != std::string::npos) { + code.replace(pos, 1, "\\\\"); + pos += 2; + } + pos = 0; + while ((pos = code.find('"', pos)) != std::string::npos) { + code.replace(pos, 1, "\\\""); + pos += 2; + } + json_barcodes += "\"" + code + "\""; + } + json_barcodes += "]"; + + result.result_barcodes = "{" + json_barcodes + "}"; + result.success = true; + + std::cout << "[VisualInventoryAlgorithm] Side: " << side + << " | Detected: " << count << " codes." << std::endl; + + } catch (HException &except) { + std::cerr << "[VisualInventoryAlgorithm] Halcon Exception: " + << except.ErrorMessage().Text() << std::endl; + result.result_barcodes = "{\"" + side + + "\":[], \"error\":\"Halcon Exception: " + + std::string(except.ErrorMessage().Text()) + "\"}"; + result.success = false; + } catch (std::exception &e) { + std::cerr << "[VisualInventoryAlgorithm] Exception: " << e.what() + << std::endl; + result.result_barcodes = + "{\"" + side + "\":[], \"error\":\"" + std::string(e.what()) + "\"}"; + result.success = false; + } catch (...) { + std::cerr + << "[VisualInventoryAlgorithm] Unknown Exception during detection." + << std::endl; + result.result_barcodes = + "{\"" + side + "\":[], \"error\":\"Unknown exception\"}"; + result.success = false; + } + + return result.success; +} diff --git a/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.h b/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.h new file mode 100644 index 0000000..4252cbe --- /dev/null +++ b/image_capture/src/algorithm/detections/visual_inventory/visual_inventory_detection.h @@ -0,0 +1,40 @@ +#pragma once + +#include +#include + +namespace cv { +class Mat; +} + +/** + * @brief 视觉盘点检测算法结果 + */ +struct VisualInventoryResult { + std::string + result_barcodes; // 条码扫描结果JSON: {"left":["BOX111","BOX112"]} 或 + // 错误时: {"left":[], "error":"error message"} + std::vector codes; // 原始条码列表,便于去重 + bool success; // 算法是否执行成功 + + VisualInventoryResult() : success(false) {} +}; + +/** + * @brief 视觉盘点检测算法 + * + * 识别货位位置并扫描条码 + */ +class VisualInventoryAlgorithm { +public: + /** + * 执行视觉盘点检测 + * @param depth_img 深度图像 + * @param color_img 彩色图像 + * @param side 货架侧("left"或"right") + * @param result [输出] 检测结果 + * @return 是否检测成功 + */ + static bool detect(const cv::Mat &depth_img, const cv::Mat &color_img, + const std::string &side, VisualInventoryResult &result); +}; diff --git a/image_capture/src/algorithm/utils/image_processor.cpp b/image_capture/src/algorithm/utils/image_processor.cpp new file mode 100644 index 0000000..a8f5b62 --- /dev/null +++ b/image_capture/src/algorithm/utils/image_processor.cpp @@ -0,0 +1,150 @@ +#include "image_processor.h" +#include +#include +#include +#include +#include +#include +#include + +// ========== ImageProcessor 类实现 ========== + +/** + * 构造函数 + */ +ImageProcessor::ImageProcessor() {} + +/** + * 处理深度图像 + * + * @param depth_img 输入的深度图像(16位无符号整数,CV_16U类型) + * @return cv::Mat 处理后的深度图(已应用伪彩色映射) + */ +cv::Mat ImageProcessor::processDepthImage(const cv::Mat& depth_img) { + if (depth_img.empty()) + return cv::Mat(); + + // 确保输入是16位深度图 + cv::Mat depthMap; + if (depth_img.type() == CV_16U) { + depthMap = depth_img; + } else { + // 如果不是16位,尝试转换或返回空 + return cv::Mat(); + } + + // 创建掩码,标记无效深度值(0值表示无效/无数据) + cv::Mat invalid_mask = (depthMap == 0); + + // 性能优化:避免不必要的clone,直接使用depthMap的视图 + // 只有在需要修改时才创建副本 + cv::Mat depthProcessed; + + // 检查是否有无效值需要处理 + int invalid_count = cv::countNonZero(invalid_mask); + if (invalid_count > 0) { + // 有无效值,需要创建副本并修改 + depthProcessed = depthMap.clone(); + // 将无效值设置为一个很大的值,这样在归一化时会被排除 + depthProcessed.setTo(65535, invalid_mask); + } else { + // 没有无效值,直接使用原图(避免不必要的复制) + depthProcessed = depthMap; + } + + // 计算有效深度值的范围(排除无效值) + double minVal, maxVal; + cv::minMaxLoc(depthProcessed, &minVal, &maxVal, nullptr, nullptr, + ~invalid_mask); + + // 如果所有值都无效,返回黑色图像 + if (maxVal == 0 || minVal == 65535) { + cv::Mat blackImg = cv::Mat::zeros(depthMap.size(), CV_8UC3); + return blackImg; + } + + // 如果所有有效深度值都相同(maxVal == minVal),避免除零错误 + // 返回一个统一颜色的深度图(中等灰色,对应JET色图的中间值) + if (maxVal == minVal) { + // 创建单通道灰度图,有效区域设置为中等灰度值(128对应JET色图的中间颜色) + cv::Mat grayMat = cv::Mat::zeros(depthMap.size(), CV_8UC1); + grayMat.setTo(128, ~invalid_mask); + // 应用伪彩色映射 + cv::Mat uniformImg; + cv::applyColorMap(grayMat, uniformImg, cv::COLORMAP_JET); + // 确保无效区域保持黑色 + uniformImg.setTo(cv::Scalar(0, 0, 0), invalid_mask); + return uniformImg; + } + + // 归一化有效深度值到0-255范围 + cv::Mat depthVis; + depthProcessed.convertTo(depthVis, CV_8U, 255.0 / (maxVal - minVal), + -minVal * 255.0 / (maxVal - minVal)); + + // 将无效区域设置为0(黑色) + depthVis.setTo(0, invalid_mask); + + // 应用伪彩色映射以提高可视性(JET色图:蓝色=近,红色=远) + cv::applyColorMap(depthVis, depthVis, cv::COLORMAP_JET); + + // 确保无效区域保持黑色(伪彩色映射后可能改变,需要再次设置) + depthVis.setTo(cv::Scalar(0, 0, 0), invalid_mask); + + return depthVis; +} + +/** + * 保存图像到文件 + * + * @param depth_img 深度图像(可选) + * @param color_img 彩色图像(可选) + * @param frame_num 帧编号,用于文件命名 + * @param save_dir 保存目录(可选,默认为当前目录) + */ +void ImageProcessor::saveImages(const cv::Mat& depth_img, const cv::Mat& color_img, + int frame_num, const std::string& save_dir) { + // 创建保存目录(如果指定了且不存在) + std::string actual_dir = save_dir.empty() ? "." : save_dir; + if (!save_dir.empty()) { + try { + std::filesystem::create_directories(actual_dir); + } catch (const std::exception& e) { + std::cerr << "[Save] Failed to create directory: " << e.what() << std::endl; + actual_dir = "."; // 回退到当前目录 + } + } + + // 获取当前系统时间用于文件命名 + auto now = std::chrono::system_clock::now(); + auto time_t = std::chrono::system_clock::to_time_t(now); + std::tm* tm = std::localtime(&time_t); + + char time_str[64]; + std::strftime(time_str, sizeof(time_str), "%Y%m%d_%H%M%S", tm); + + // 保存深度图 + if (!depth_img.empty()) { + std::stringstream depth_filename; + depth_filename << actual_dir << "/depth_" << time_str << "_frame" + << std::setfill('0') << std::setw(6) << frame_num << ".png"; + if (cv::imwrite(depth_filename.str(), depth_img)) { + std::cout << "[Save] Depth image saved: " << depth_filename.str() << std::endl; + } else { + std::cerr << "[Save] Failed to save depth image: " << depth_filename.str() << std::endl; + } + } + + // 保存彩色图 + if (!color_img.empty()) { + std::stringstream color_filename; + color_filename << actual_dir << "/color_" << time_str << "_frame" + << std::setfill('0') << std::setw(6) << frame_num << ".png"; + if (cv::imwrite(color_filename.str(), color_img)) { + std::cout << "[Save] Color image saved: " << color_filename.str() << std::endl; + } else { + std::cerr << "[Save] Failed to save color image: " << color_filename.str() << std::endl; + } + } +} + diff --git a/image_capture/src/algorithm/utils/image_processor.h b/image_capture/src/algorithm/utils/image_processor.h new file mode 100644 index 0000000..098caa0 --- /dev/null +++ b/image_capture/src/algorithm/utils/image_processor.h @@ -0,0 +1,63 @@ +#pragma once + +// OpenCV图像处理模块头文件 +// 负责深度图的处理(伪彩色映射)、显示和保存 +// 注意:此模块不依赖SDK,只使用OpenCV标准类型 +// 注意:彩色图的颜色空间转换已在图像采集层完成,此处不再处理 + +#include + +namespace cv { class Mat; } + +/** + * 图像处理器类 + * + * 功能说明: + * - 处理深度图(归一化、伪彩色映射) + * - 保存图像到文件 + * + * 注意:彩色图的颜色空间转换已在图像采集层(camera层)完成, + * 统一输出BGR格式,此处不再需要处理 + * + * 设计原则: + * - 此模块完全独立于SDK,只使用OpenCV的cv::Mat类型 + * - SDK到OpenCV的转换应在图像采集层(camera层)完成 + * - 图像显示由GUI层(MainWindow)负责,使用Qt的QLabel分别显示 + */ +class ImageProcessor +{ +public: + /** + * 构造函数 + */ + ImageProcessor(); + + /** + * 处理深度图像 + * + * @param depth_img 输入的深度图像(16位无符号整数,CV_16U类型) + * @return cv::Mat 处理后的深度图(已应用伪彩色映射,BGR格式) + * + * 功能说明: + * - 将深度值归一化到0-255范围 + * - 应用JET伪彩色映射(蓝色=近,红色=远) + * - 处理无效深度值(0值) + */ + cv::Mat processDepthImage(const cv::Mat& depth_img); + + /** + * 保存图像到文件 + * + * @param depth_img 深度图像(可选) + * @param color_img 彩色图像(可选) + * @param frame_num 帧编号,用于文件命名 + * @param save_dir 保存目录(可选,默认为当前目录) + * + * 功能说明: + * - 获取当前时间戳用于文件命名 + * - 保存深度图和彩色图到文件 + */ + void saveImages(const cv::Mat& depth_img, const cv::Mat& color_img, + int frame_num, const std::string& save_dir = ""); +}; + diff --git a/image_capture/src/camera/mvs_multi_camera_capture.cpp b/image_capture/src/camera/mvs_multi_camera_capture.cpp new file mode 100644 index 0000000..9120491 --- /dev/null +++ b/image_capture/src/camera/mvs_multi_camera_capture.cpp @@ -0,0 +1,306 @@ +/** + * @file mvs_multi_camera_capture.cpp + * @brief 海康 MVS 相机采集实现文件 + * + * 此文件包含了 MvsMultiCameraCapture 类的完整实现 + * - 封装 MVS SDK (MvCameraControl.h),管理多相机采集 + * - 将 SDK 的原始帧数据转换为 OpenCV 的 cv::Mat 格式 + * - 管理采集线程和缓冲区 + * + * 设计说明: + * - 每个相机使用独立的采集线程,避免阻塞 + * - 使用线程安全的缓冲区存储最新图像 + * - 统一输出 BGR 格式的彩色图 + */ + +#include "mvs_multi_camera_capture.h" +#include "MvCameraControl.h" +#include +#include +#include + +/** + * @brief 构造函数 + * 初始化运行标志和状态 + */ +MvsMultiCameraCapture::MvsMultiCameraCapture() : running_(false), initialized_(false) {} + +/** + * @brief 析构函数 + * 确保在对象销毁时正确停止所有采集线程和相机,并清理资源 + */ +MvsMultiCameraCapture::~MvsMultiCameraCapture() { + stop(); + // 清理句柄 + for (auto& cam : cameras_) { + if (cam.handle) { + MV_CC_DestroyHandle(cam.handle); + cam.handle = nullptr; + } + } +} + +/** + * @brief 初始化相机 + * + * 此函数完成以下工作: + * 1. 枚举所有连接的 GenTL GigE 和 USB 设备 + * 2. 创建并打开相机句柄 + * 3. 配置相机参数(如触发模式、包大小等) + * 4. 初始化图像缓冲区 + * + * @return true 初始化成功且至少找到一个设备, false 失败 + */ +bool MvsMultiCameraCapture::initialize() { + if (initialized_) return true; + + MV_CC_DEVICE_INFO_LIST stDeviceList; + memset(&stDeviceList, 0, sizeof(MV_CC_DEVICE_INFO_LIST)); + + // 枚举 GenTL GigE 和 USB 设备 + int nRet = MV_CC_EnumDevices(MV_GIGE_DEVICE | MV_USB_DEVICE, &stDeviceList); + if (MV_OK != nRet) { + std::cerr << "[MVS] EnumDevices failed: " << std::hex << nRet << std::endl; + return false; + } + + if (stDeviceList.nDeviceNum == 0) { + std::cout << "[MVS] No devices found." << std::endl; + return false; + } + + std::cout << "[MVS] Found " << stDeviceList.nDeviceNum << " devices." << std::endl; + + for (unsigned int i = 0; i < stDeviceList.nDeviceNum; i++) { + MV_CC_DEVICE_INFO* pDeviceInfo = stDeviceList.pDeviceInfo[i]; + if (NULL == pDeviceInfo) continue; + + CameraInfo camInfo; + camInfo.index = static_cast(cameras_.size()); + + // 获取序列号 + if (pDeviceInfo->nTLayerType == MV_GIGE_DEVICE) { + camInfo.serial_number = std::string((char*)pDeviceInfo->SpecialInfo.stGigEInfo.chSerialNumber); + } else if (pDeviceInfo->nTLayerType == MV_USB_DEVICE) { + camInfo.serial_number = std::string((char*)pDeviceInfo->SpecialInfo.stUsb3VInfo.chSerialNumber); + } + + // 创建句柄 + nRet = MV_CC_CreateHandle(&camInfo.handle, pDeviceInfo); + if (MV_OK != nRet) { + std::cerr << "[MVS] CreateHandle failed for device " << i << std::endl; + continue; + } + + // 打开设备 + nRet = MV_CC_OpenDevice(camInfo.handle); + if (MV_OK != nRet) { + std::cerr << "[MVS] OpenDevice failed for device " << i << std::endl; + MV_CC_DestroyHandle(camInfo.handle); + continue; + } + + // 确保触发模式为 OFF 以进行连续采集 + nRet = MV_CC_SetEnumValue(camInfo.handle, "TriggerMode", MV_TRIGGER_MODE_OFF); + if (MV_OK != nRet) { + std::cerr << "[MVS] Warning: Failed to set TriggerMode to Off. Ret = " << std::hex << nRet << std::endl; + } + + // 检查 GigE 的最佳包大小并设置 + if (pDeviceInfo->nTLayerType == MV_GIGE_DEVICE) { + int nPacketSize = MV_CC_GetOptimalPacketSize(camInfo.handle); + if (nPacketSize > 0) { + MV_CC_SetIntValue(camInfo.handle, "GevSCPSPacketSize", nPacketSize); + } + } + + cameras_.push_back(camInfo); + buffers_.push_back(std::make_shared()); + + // 记录相机分辨率 + MVCC_INTVALUE stWidth = {0}; + MVCC_INTVALUE stHeight = {0}; + int nRetW = MV_CC_GetIntValue(camInfo.handle, "Width", &stWidth); + int nRetH = MV_CC_GetIntValue(camInfo.handle, "Height", &stHeight); + + std::cout << "[MVS] Initialized camera " << camInfo.index << ": " << camInfo.serial_number; + if (MV_OK == nRetW && MV_OK == nRetH) { + std::cout << " (Resolution: " << stWidth.nCurValue << "x" << stHeight.nCurValue << ")"; + } + std::cout << std::endl; + } + + initialized_ = true; + return !cameras_.empty(); +} + +/** + * @brief 开始采集 + * 启动所有相机的抓图,并为每个相机创建一个采集线程 + * @return true 启动成功, false 失败 + */ +bool MvsMultiCameraCapture::start() { + if (!initialized_ || running_) return false; + + running_ = true; + for (const auto& cam : cameras_) { + // 开始抓取 + int nRet = MV_CC_StartGrabbing(cam.handle); + if (MV_OK != nRet) { + std::cerr << "[MVS] StartGrabbing failed for camera " << cam.index << std::endl; + } + + threads_.emplace_back(&MvsMultiCameraCapture::captureThreadFunc, this, cam.index); + } + return true; +} + +/** + * @brief 停止采集 + * 停止所有采集线程和相机抓图 + */ +void MvsMultiCameraCapture::stop() { + running_ = false; + for (auto& t : threads_) { + if (t.joinable()) t.join(); + } + threads_.clear(); + + for (const auto& cam : cameras_) { + MV_CC_StopGrabbing(cam.handle); + } +} + +/** + * @brief 获取相机 ID (序列号) + * @param camera_index 相机索引 + * @return 相机序列号 + */ +std::string MvsMultiCameraCapture::getCameraId(int camera_index) const { + if (camera_index >= 0 && camera_index < cameras_.size()) { + return cameras_[camera_index].serial_number; + } + return ""; +} + +/** + * @brief 获取指定相机的最新图像 + * 从线程安全的缓冲区中读取最新图像数据 + * + * @param camera_index 相机索引 + * @param[out] image 输出图像 + * @param[out] fps 当前帧率 + * @return true 成功获取, false 索引无效或无新图像 + */ +bool MvsMultiCameraCapture::getLatestImage(int camera_index, cv::Mat& image, double& fps) { + if (camera_index < 0 || camera_index >= buffers_.size()) return false; + + auto& buffer = buffers_[camera_index]; + std::lock_guard lock(buffer->mtx); + + if (buffer->image.empty()) return false; + + image = buffer->image.clone(); + fps = buffer->fps; + buffer->updated = false; + return true; +} + +/** + * @brief 转换为 OpenCV Mat 格式 + * + * 将 SDK 返回的帧数据转换为 OpenCV 的 BGR8 Mat。 + * + * @param handle 相机句柄 + * @param pFrame MVS 帧信息结构体指针 (MV_FRAME_OUT*) + * @param pUser 用户数据 (未使用) + * @return cv::Mat 转换后的 OpenCV 图像 + */ +cv::Mat MvsMultiCameraCapture::convertToMat(void* handle, void* pFrame, void* pUser) { + // pFrame 在 captureThreadFunc 中传入的是 MV_FRAME_OUT* 指针 + MV_FRAME_OUT* stFrameOut = (MV_FRAME_OUT*)pFrame; + MV_FRAME_OUT_INFO_EX* stUserInfo = stFrameOut->stFrameInfo.enPixelType == 0 ? nullptr : &stFrameOut->stFrameInfo; + + if (!handle || !stUserInfo) return cv::Mat(); + + cv::Mat image; + + MV_CC_PIXEL_CONVERT_PARAM stConvertParam = {0}; + stConvertParam.nWidth = stUserInfo->nWidth; + stConvertParam.nHeight = stUserInfo->nHeight; + stConvertParam.pSrcData = stFrameOut->pBufAddr; + stConvertParam.nSrcDataLen = stUserInfo->nFrameLen; + stConvertParam.enSrcPixelType = stUserInfo->enPixelType; + stConvertParam.enDstPixelType = PixelType_Gvsp_BGR8_Packed; // 转换为 OpenCV 的 BGR8 + stConvertParam.nDstBufferSize = stUserInfo->nWidth * stUserInfo->nHeight * 3; + + // 分配目标缓冲区 + image.create(stUserInfo->nHeight, stUserInfo->nWidth, CV_8UC3); + stConvertParam.pDstBuffer = image.data; + + int nRet = MV_CC_ConvertPixelType(handle, &stConvertParam); + if (MV_OK != nRet) { + std::cerr << "[MVS] ConvertPixelType failed: " << std::hex << nRet << std::endl; + return cv::Mat(); + } + + return image; +} + +/** + * @brief 采集线程函数 + * + * 每个相机的独立工作线程: + * 1. 循环调用 MV_CC_GetImageBuffer 获取图像 + * 2. 调用 convertToMat 转换为 cv::Mat + * 3. 更新线程安全缓冲区 + * 4. 计算并更新 FPS + * + * @param camera_index 相机索引 + */ +void MvsMultiCameraCapture::captureThreadFunc(int camera_index) { + auto& cam = cameras_[camera_index]; + auto& buffer = buffers_[camera_index]; + + MV_FRAME_OUT stFrameOut; + memset(&stFrameOut, 0, sizeof(MV_FRAME_OUT)); + + auto start_time = std::chrono::steady_clock::now(); + int frame_count = 0; + + while (running_) { + // 获取图像缓冲区,超时 1000ms + int nRet = MV_CC_GetImageBuffer(cam.handle, &stFrameOut, 1000); + if (MV_OK == nRet) { + + try { + // 传递 stFrameOut 指针进行转换 + cv::Mat image = convertToMat(cam.handle, &stFrameOut, nullptr); + + if (!image.empty()) { + std::lock_guard lock(buffer->mtx); + buffer->image = image; + buffer->updated = true; + + frame_count++; + auto now = std::chrono::steady_clock::now(); + double elapsed = std::chrono::duration_cast(now - start_time).count(); + if (elapsed >= 1.0) { + buffer->fps = frame_count / elapsed; + frame_count = 0; + start_time = now; + // std::cout << "[MVS] Cam " << camera_index << " FPS: " << buffer->fps << std::endl; + } + } + } catch (const std::exception& e) { + std::cerr << "[MVS] Exception in conversion: " << e.what() << std::endl; + } + + // 释放图像缓冲区 + MV_CC_FreeImageBuffer(cam.handle, &stFrameOut); + } else { + // 如果触发器正在等待,超时是预期的,但我们设置的是连续采集。 + std::cerr << "[MVS] GetImageBuffer failed: " << std::hex << nRet << std::endl; + } + } +} diff --git a/image_capture/src/camera/mvs_multi_camera_capture.h b/image_capture/src/camera/mvs_multi_camera_capture.h new file mode 100644 index 0000000..cc5c391 --- /dev/null +++ b/image_capture/src/camera/mvs_multi_camera_capture.h @@ -0,0 +1,122 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +/** + * @file mvs_multi_camera_capture.h + * @brief 海康 MVS 相机采集类定义 + */ + +/** + * @brief 图像缓冲区结构体 + * 存储从相机采集到的最新图像及相关元数据 + */ +struct ImageBuffer { + cv::Mat image; ///< 存储图像数据 (BGR格式) + std::mutex mtx; ///< 互斥锁,保证多线程访问安全 + bool updated = false; ///< 标记图像是否已更新 + double fps = 0.0; ///< 当前帧率 +}; + +/** + * @brief 相机信息结构体 + * 存储相机的句柄和序列号等信息 + */ +struct CameraInfo { + void* handle = nullptr; ///< MVS SDK 相机句柄 + std::string serial_number; ///< 相机序列号 + int index = -1; ///< 相机索引 +}; + +/** + * @brief MVS 多相机采集类 + * + * 功能说明: + * - 封装海康 MVS SDK,管理多相机采集 + * - 将 SDK 原始图像转换为 OpenCV Mat 格式 + * - 管理采集线程和缓冲区 + * - 提供最新的图像数据供上层调用 + */ +class MvsMultiCameraCapture { +public: + MvsMultiCameraCapture(); + ~MvsMultiCameraCapture(); + + /** + * @brief 初始化相机 + * 枚举并打开所有连接的 GenTL GigE 和 USB 设备 + * @return true 初始化成功且至少找到一个设备, false 失败 + */ + bool initialize(); + + /** + * @brief 开始采集 + * 启动所有相机的抓图,并开启采集线程 + * @return true 启动成功, false 失败 + */ + bool start(); + + /** + * @brief 停止采集 + * 停止抓图并关闭所有线程 + */ + void stop(); + + /** + * @brief 获取相机数量 + * @return 已初始化的相机数量 + */ + int getCameraCount() const { return static_cast(cameras_.size()); } + + /** + * @brief 获取指定相机的最新图像 + * @param camera_index 相机索引 + * @param[out] image 输出图像 (BGR格式) + * @param[out] fps 当前帧率 + * @return true 成功获取, false 索引无效或无新图像 + */ + bool getLatestImage(int camera_index, cv::Mat& image, double& fps); + + /** + * @brief 获取相机 ID (序列号) + * @param camera_index 相机索引 + * @return 相机序列号字符串 + */ + std::string getCameraId(int camera_index) const; + + /** + * @brief 检查是否正在运行 + * @return true 运行中, false 已停止 + */ + bool isRunning() const { return running_; } + +private: + /** + * @brief 采集线程函数 + * 每个相机运行在独立的线程中,持续从 SDK 获取图像 + * @param camera_index 相机索引 + */ + void captureThreadFunc(int camera_index); + + /** + * @brief 转换为 OpenCV Mat 格式 + * 使用 SDK 的 MV_CC_ConvertPixelType 函数将原始数据转换为 BGR8 格式 + * @param handle 相机句柄 + * @param pFrame 帧数据指针 (MV_FRAME_OUT*) + * @param pUser 用户数据 (保留,未使用) + * @return cv::Mat 转换后的图像 + */ + static cv::Mat convertToMat(void* handle, void* pFrame, void* pUser); + + std::vector cameras_; ///< 相机列表 + std::vector> buffers_; ///< 缓冲区列表 + std::vector threads_; ///< 采集线程列表 + std::atomic running_; ///< 运行状态标志 + bool initialized_ = false; ///< 初始化状态标志 +}; diff --git a/image_capture/src/camera/ty_multi_camera_capture.cpp b/image_capture/src/camera/ty_multi_camera_capture.cpp new file mode 100644 index 0000000..4e787b1 --- /dev/null +++ b/image_capture/src/camera/ty_multi_camera_capture.cpp @@ -0,0 +1,950 @@ +/** + * @file ty_multi_camera_capture.cpp + * @brief TY相机采集实现文件 + * + * 此文件包含了 CameraCapture 类的完整实现 + * - 封装TY相机SDK,管理多相机采集 + * - 将SDK的TYImage转换为OpenCV的cv::Mat格式 + * - 管理采集线程和缓冲区 + * - 输出原始cv::Mat格式的图像,供上层使用 + * + * 设计说明: + * - 每个相机使用独立的采集线程,避免阻塞 + * - 使用线程安全的缓冲区存储最新图像 + * - 使用clone()确保数据安全,避免悬空指针 + * - 统一输出BGR格式的彩色图,便于上层处理 + */ + +#include "ty_multi_camera_capture.h" +#include "TYCoordinateMapper.h" +#include +#include + +#ifdef _WIN32 +#include +#endif + +/** + * @brief 构造函数 + * + * 初始化所有成员变量为默认值:初始化列表的方式初始化成员变量,避免在构造函数体中初始化,提高代码可读性。 + * - streams_configured_: 流未配置 + * - depth_enabled_: 深度流未启用 + * - color_enabled_: 彩色流未启用 + * - running_: 未运行状态 + */ +CameraCapture::CameraCapture() + : streams_configured_(false), depth_enabled_(false), color_enabled_(false), + running_(false) {} + +/** + * @brief 析构函数 + * + * 确保在对象销毁时正确停止所有采集线程和相机 + * 调用stop()来清理资源,避免资源泄漏 + */ +CameraCapture::~CameraCapture() { stop(); } + +/** + * @brief 初始化并配置相机 + * + * 此函数完成以下工作: + * 1. 清理现有资源(如果之前已初始化) + * 2. 查询并打开所有可用的相机设备 + * 3. 为每个相机配置深度流和彩色流 + * 4. 创建图像处理器和缓冲区 + * + * @param enable_depth 是否启用深度流,true表示启用深度图采集 + * @param enable_color 是否启用彩色流,true表示启用彩色图采集 + * @return true 初始化成功,false 初始化失败(无设备或打开失败) + * + * @note 如果部分相机打开失败,只要至少有一个相机成功打开,函数仍返回true + */ +bool CameraCapture::initialize(bool enable_depth, bool enable_color) { + // 设置控制台代码页为UTF-8,确保中文正确显示 +#ifdef _WIN32 + SetConsoleOutputCP(65001); // UTF-8代码页 + SetConsoleCP(65001); // 设置输入代码页也为UTF-8 +#endif + + // 清理现有资源,确保重新初始化时状态干净 + stop(); + cameras_.clear(); + depth_processers_.clear(); + color_processers_.clear(); + camera_running_.clear(); + buffers_.clear(); + calib_infos_.clear(); + has_calib_info_.clear(); + + // 通过TY SDK的上下文查询设备列表 + // TYContext是单例模式,获取全局唯一的上下文实例 + auto &context = TYContext::getInstance(); + auto device_list = context.queryDeviceList(); + + // 检查是否找到设备-检查device_list是否为空或者设备数量为0 + if (!device_list || device_list->empty()) { + std::cerr << "[CameraCapture] No devices found!" << std::endl; + return false; + } + + // 获取所有可用设备数量,使用所有找到的设备 + int device_count = device_list->devCount(); + + std::cout << "[CameraCapture] Found " << device_count + << " device(s), will use all available devices" << std::endl; + + // 遍历设备列表,逐个打开所有可用相机 + for (int i = 0; i < device_count; i++) { + // 获取设备信息(包含设备ID等) + auto device_info = device_list->getDeviceInfo(i); + if (!device_info) { + std::cerr << "[CameraCapture] Failed to get camera device info! " << i << std::endl; + continue; // 跳过此设备,继续处理下一个 + } + + std::cout << "[CameraCapture] Preparing to open camera! " << i << ": " + << device_info->id() << std::endl; + + // 创建FastCamera对象(SDK提供的相机封装类) + auto camera = std::make_shared(); + // 使用设备ID打开相机 + TY_STATUS status = camera->open(device_info->id()); + + // 检查打开是否成功 + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Failed to open camera! " << i << ": " + << device_info->id() << std::endl; + continue; // 打开失败,跳过此设备 + } else { + std::cout << "[CameraCapture] Successfully opened camera! " << i << ": " + << device_info->id() << std::endl; + } + + // 成功打开,添加到相机列表 + cameras_.push_back(camera); + camera_running_.push_back(false); // 初始状态为未运行 + + // 图像处理器稍后在配置流时创建,这里先占位 + depth_processers_.push_back(nullptr); + color_processers_.push_back(nullptr); + + // 获取并保存标定信息 + TY_CAMERA_CALIB_INFO calib_info; + TY_STATUS calib_status = TYGetStruct(camera->handle(), TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA, + &calib_info, sizeof(calib_info)); + if (calib_status == TY_STATUS_OK) { + calib_infos_.push_back(calib_info); + has_calib_info_.push_back(true); + std::cout << "[CameraCapture] Camera " << i << " calibration info fetched." << std::endl; + } else { + calib_infos_.push_back(TY_CAMERA_CALIB_INFO()); + has_calib_info_.push_back(false); + std::cerr << "[CameraCapture] Camera " << i << " failed to fetch calibration info: " << calib_status << std::endl; + } + } + + // 检查是否至少成功打开一个相机 + if (cameras_.empty()) { + std::cerr << "[CameraCapture] No cameras opened successfully!" << std::endl; + return false; + } + + // ========== 配置流 ========== + // 保存流配置标志,供后续使用 + depth_enabled_ = enable_depth; + color_enabled_ = enable_color; + + // 为每个已打开的相机配置流 + for (size_t i = 0; i < cameras_.size(); i++) { + auto &camera = cameras_[i]; + + // 启用深度流 + if (enable_depth) { + // 调用SDK接口启用深度流 + TY_STATUS status = camera->stream_enable(FastCamera::stream_depth); + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to enable depth stream" + << std::endl; + } else { + // 创建深度图像处理器 + // ImageProcesser用于处理SDK返回的原始图像数据 + std::string depth_win_name = "depth_" + std::to_string(i); + depth_processers_[i] = + std::make_shared(depth_win_name.c_str()); + std::cout << "[CameraCapture] Camera " << i << " depth stream enabled" + << std::endl; + } + } + + // 启用彩色流 + if (enable_color) { + // 调用SDK接口启用彩色流 + TY_STATUS status = camera->stream_enable(FastCamera::stream_color); + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to enable color stream" + << std::endl; + } else { + // 创建彩色图像处理器 + std::string color_win_name = "color_" + std::to_string(i); + color_processers_[i] = + std::make_shared(color_win_name.c_str()); + std::cout << "[CameraCapture] Camera " << i << " color stream enabled" + << std::endl; + } + } + } + + // ========== 设置分辨率 ========== + // 为每个相机设置深度图和彩色图分辨率为640x480 + for (size_t i = 0; i < cameras_.size(); i++) { + auto &camera = cameras_[i]; + TY_DEV_HANDLE hDevice = camera->handle(); + + if (hDevice == 0) { + std::cerr << "[CameraCapture] Camera " << i << " handle is invalid, skip resolution setting" + << std::endl; + continue; + } + + // 设置深度图分辨率为1280x960(使用图像模式) + if (enable_depth) { + // 方法1:尝试使用图像模式(推荐,同时设置分辨率和格式) + TY_IMAGE_MODE depth_mode = TY_IMAGE_MODE_DEPTH16_1280x960; + TY_STATUS status = TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, depth_mode); + if (status != TY_STATUS_OK) { + // 方法2:如果图像模式不支持,回退到单独设置宽高 + std::cerr << "[CameraCapture] Camera " << i << " failed to set depth image mode 1280x960, trying width/height: " + << status << "(" << TYErrorString(status) << ")" << std::endl; + status = TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_WIDTH, 1280); + if (status == TY_STATUS_OK) { + status = TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_HEIGHT, 960); + } + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to set depth resolution: " + << status << "(" << TYErrorString(status) << ")" << std::endl; + } else { + std::cout << "[CameraCapture] Camera " << i << " depth resolution set to 1280x960 (via width/height)" + << std::endl; + } + } else { + std::cout << "[CameraCapture] Camera " << i << " depth resolution set to 1280x960" + << std::endl; + } + } + + // 设置彩色图分辨率为1280x960,使用YUYV格式 + if (enable_color) { + // 方法1:尝试使用YUYV格式的1280x960图像模式(推荐) + TY_IMAGE_MODE color_mode = TY_IMAGE_MODE_YUYV_1280x960; + TY_STATUS status = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, color_mode); + if (status != TY_STATUS_OK) { + // 方法2:如果YUYV模式不支持,尝试其他格式 + std::cerr << "[CameraCapture] Camera " << i << " failed to set YUYV_1280x960 mode, trying alternatives: " + << status << "(" << TYErrorString(status) << ")" << std::endl; + + // 尝试RGB格式 + color_mode = TY_IMAGE_MODE_RGB_1280x960; + status = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, color_mode); + if (status != TY_STATUS_OK) { + // 方法3:如果图像模式都不支持,回退到单独设置宽高 + std::cerr << "[CameraCapture] Camera " << i << " failed to set RGB_1280x960 mode, trying width/height: " + << status << "(" << TYErrorString(status) << ")" << std::endl; + status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_WIDTH, 1280); + if (status == TY_STATUS_OK) { + status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_HEIGHT, 960); + } + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to set color resolution: " + << status << "(" << TYErrorString(status) << ")" << std::endl; + } else { + std::cout << "[CameraCapture] Camera " << i << " color resolution set to 1280x960 (via width/height)" + << std::endl; + } + } else { + std::cout << "[CameraCapture] Camera " << i << " color resolution set to 1280x960 (RGB format)" + << std::endl; + } + } else { + std::cout << "[CameraCapture] Camera " << i << " color resolution set to 1280x960 YUYV" + << std::endl; + } + } + } + + // ========== 设置帧率 ========== + // 根据相机技术参数,640x480分辨率下: + // - 深度图:19 fps + // - RGB图(YUYV格式):25 fps + // 使用连续模式以获得最高帧率 + for (size_t i = 0; i < cameras_.size(); i++) { + auto &camera = cameras_[i]; + TY_DEV_HANDLE hDevice = camera->handle(); + + if (hDevice == 0) { + std::cerr << "[CameraCapture] Camera " << i << " handle is invalid, skip frame rate setting" + << std::endl; + continue; + } + + // 方法1:使用连续模式(TY_TRIGGER_MODE_OFF),让相机以最大帧率连续采集 + // 640x480分辨率下,连续模式应该能达到:深度19fps,RGB(YUYV)25fps + TY_TRIGGER_PARAM trigger_param; + trigger_param.mode = TY_TRIGGER_MODE_OFF; // 连续模式,不使用触发 + trigger_param.fps = 0; // 连续模式下fps参数无效 + trigger_param.rsvd = 0; + + TY_STATUS status = TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, + &trigger_param, sizeof(trigger_param)); + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to set trigger mode (continuous): " + << status << "(" << TYErrorString(status) << ")" << std::endl; + + // 方法2:如果连续模式不支持,尝试使用周期性触发模式 + // 根据技术参数,640x480下RGB可达25fps,深度可达19fps + // 设置为25fps以匹配RGB的最高帧率 + trigger_param.mode = TY_TRIGGER_MODE_M_PER; // 主模式,周期性触发 + trigger_param.fps = 25; // 设置帧率为25fps(匹配RGB YUYV格式的最高帧率) + trigger_param.rsvd = 0; + + status = TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, + &trigger_param, sizeof(trigger_param)); + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Camera " << i << " failed to set trigger mode (25fps): " + << status << "(" << TYErrorString(status) << ")" << std::endl; + } else { + std::cout << "[CameraCapture] Camera " << i << " frame rate set to 25fps (trigger mode)" + << std::endl; + } + } else { + std::cout << "[CameraCapture] Camera " << i << " set to continuous mode" + << std::endl; + } + } + + // 标记流已配置 + streams_configured_ = true; + + // ========== 创建图像缓冲区 ========== + // 为每个相机创建一个独立的图像缓冲区 + // 缓冲区用于存储采集线程获取的最新图像,供上层读取 + for (size_t i = 0; i < cameras_.size(); i++) { + buffers_.push_back(std::make_shared()); + } + + std::cout << "[CameraCapture] Initialization complete! Total " << cameras_.size() << " camera(s)" + << std::endl; + return true; +} + +/** + * @brief 启动采集 + * + * 此函数完成以下工作: + * 1. 检查相机和流配置状态 + * 2. 启动所有相机的数据流 + * 3. 为每个相机创建独立的采集线程 + * + * @return true 启动成功,false 启动失败(无相机或流未配置或启动失败) + * + * @note 如果部分相机启动失败,函数返回false,但已启动的相机需要手动停止 + * @note 每个相机使用独立的线程,避免相互阻塞 + */ +bool CameraCapture::start() { + // 检查是否已经在运行 + if (running_) { + std::cout << "[CameraCapture] System already running" << std::endl; + return true; + } + + // 检查是否有相机 + if (cameras_.empty()) { + std::cerr << "[CameraCapture] No cameras to start!" << std::endl; + return false; + } + + // 检查流是否已配置(必须先调用initialize) + if (!streams_configured_) { + std::cerr << "[CameraCapture] Streams not configured!" << std::endl; + return false; + } + + // ========== 启动所有相机 ========== + bool all_started = true; + for (size_t i = 0; i < cameras_.size(); i++) { + auto &camera = cameras_[i]; + // 调用SDK接口启动相机数据流 + TY_STATUS status = camera->start(); + + if (status == TY_STATUS_OK) { + camera_running_[i] = true; // 标记相机为运行状态 + std::cout << "[CameraCapture] Camera " << i << " started" << std::endl; + } else { + camera_running_[i] = false; + std::cerr << "[CameraCapture] Camera " << i << " failed to start" << std::endl; + all_started = false; // 记录有相机启动失败 + } + } + + // 如果有相机启动失败,返回false + if (!all_started) { + return false; + } + + // ========== 启动采集线程 ========== + // 设置运行标志,采集线程会检查此标志来决定是否开启 + running_ = true; + + // 为每个相机创建独立的采集线程 + // 线程函数:captureThreadFunc + // 参数:this指针指向调用 start() 的 CameraCapture + // 对象和相机索引、static_cast:类型转换操作符 + for (size_t i = 0; i < cameras_.size(); i++) { + capture_threads_.emplace_back(&CameraCapture::captureThreadFunc, this, + static_cast(i)); + } + + return true; +} + +/** + * @brief 停止采集 + * + * 此函数完成以下工作: + * 1. 设置运行标志为false,通知采集线程退出 + * 2. 等待所有采集线程结束(join) + * 3. 停止所有相机的数据流 + * + * @note 此函数是线程安全的,可以在任何线程中调用 + * @note 析构函数会自动调用此函数,确保资源正确释放 + */ +void CameraCapture::stop() { + // 设置运行标志为false,通知所有采集线程退出循环 + running_ = false; + + // 等待所有采集线程结束 + // join()会阻塞直到线程执行完毕,确保线程安全退出 + for (auto &t : capture_threads_) { + if (t.joinable()) { + t.join(); + } + } + capture_threads_.clear(); // 清空线程列表 + + // 停止所有相机的数据流 + for (size_t i = 0; i < cameras_.size(); i++) { + if (camera_running_[i] && cameras_[i]) { + cameras_[i]->stop(); // 调用SDK接口停止相机 + camera_running_[i] = false; // 标记相机为停止状态 + } + } +} + +/** + * @brief 获取相机数量 + * + * @return 当前已初始化的相机数量 + */ +int CameraCapture::getCameraCount() const { + return static_cast(cameras_.size()); +} + +/** + * @brief 获取指定相机的设备ID + * + * @param index 相机索引,从0开始 + * @return 相机设备ID字符串,如果索引无效则返回空字符串 + * + * @note 此函数会重新查询设备列表,确保返回最新的设备ID + */ +std::string CameraCapture::getCameraId(int index) const { + // 检查索引有效性 + if (index < 0 || index >= static_cast(cameras_.size())) { + return ""; + } + + // 重新查询设备列表获取ID + // 注意:这里使用SDK的设备列表而不是内部存储,确保ID是最新的 + auto &context = TYContext::getInstance(); + auto device_list = context.queryDeviceList(); + + if (!device_list || index >= device_list->devCount()) { + return ""; + } + + auto device_info = device_list->getDeviceInfo(index); + if (!device_info) { + return ""; + } + + return std::string(device_info->id()); +} + +/** + * @brief 获取指定相机的最新图像 + * + * 从线程安全的缓冲区中读取最新采集的图像数据 + * + * @param camera_index 相机索引,从0开始 + * @param depth [输出] 深度图,CV_16U格式,包含原始深度值(单位:毫米) + * @param color [输出] 彩色图,BGR格式,CV_8UC3类型 + * @param fps [输出] 当前帧率(帧/秒) + * @return true 成功获取图像,false 索引无效或缓冲区为空 + * + * @note 此函数是线程安全的,使用互斥锁保护缓冲区访问 + * @note 如果某个图像流未启用或尚未采集到数据,对应的Mat将为空 + * @note 使用copyTo()复制数据,确保返回的图像数据独立于缓冲区 + */ +bool CameraCapture::getLatestImages(int camera_index, cv::Mat &depth, + cv::Mat &color, double &fps) { + // 检查索引有效性 + if (camera_index < 0 || camera_index >= static_cast(buffers_.size())) { + return false; + } + + auto buffer = buffers_[camera_index]; + + // 使用互斥锁保护缓冲区访问,确保线程安全 + // lock_guard自动管理锁的获取和释放 + std::lock_guard lock(buffer->mtx); + + // 复制深度图数据 + if (!buffer->depth.empty()) { + buffer->depth.copyTo(depth); // 深拷贝,确保数据独立 + } else { + depth = cv::Mat(); // 如果为空,返回空Mat + } + + // 复制彩色图数据 + if (!buffer->color.empty()) { + buffer->color.copyTo(color); // 深拷贝,确保数据独立 + } else { + color = cv::Mat(); // 如果为空,返回空Mat + } + + // 复制FPS值 + fps = buffer->fps; + return true; +} + +/** + * @brief 检查是否正在运行 + * + * @return true 正在运行,false 已停止 + */ +bool CameraCapture::isRunning() const { return running_; } + +/** + * @brief 获取指定相机的深度相机内参 + * + * 从图漾相机SDK获取深度相机的内参(fx, fy, cx, cy) + * 内参存储在相机的标定数据中,通过TYGetStruct API获取 + * + * @param camera_index 相机索引,从0开始 + * @param fx [输出] 焦距x(像素单位) + * @param fy [输出] 焦距y(像素单位) + * @param cx [输出] 主点x坐标(像素单位) + * @param cy [输出] 主点y坐标(像素单位) + * @return true 成功获取内参,false 索引无效或获取失败 + * + * @note 内参矩阵格式为3x3: + * | fx 0 cx | + * | 0 fy cy | + * | 0 0 1 | + * @note 此函数需要在相机初始化后调用(initialize之后) + */ +bool CameraCapture::getDepthCameraIntrinsics(int camera_index, float& fx, float& fy, float& cx, float& cy) { + // 检查索引有效性 + if (camera_index < 0 || camera_index >= static_cast(cameras_.size())) { + std::cerr << "[CameraCapture] Invalid camera index: " << camera_index << std::endl; + return false; + } + + // 检查相机是否已打开 + auto camera = cameras_[camera_index]; + if (!camera) { + std::cerr << "[CameraCapture] Camera " << camera_index << " is not opened" << std::endl; + return false; + } + + // 获取相机设备句柄 + TY_DEV_HANDLE hDevice = camera->handle(); + if (hDevice == 0) { + std::cerr << "[CameraCapture] Camera " << camera_index << " handle is invalid" << std::endl; + return false; + } + + // 获取深度相机的内参 + TY_CAMERA_INTRINSIC intrinsic; + TY_STATUS status = TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_INTRINSIC, + &intrinsic, sizeof(intrinsic)); + + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] Failed to get depth camera intrinsics for camera " + << camera_index << ", error: " << status << "(" << TYErrorString(status) << ")" << std::endl; + return false; + } + + // 内参矩阵是3x3,按行主序存储: + // data[0] = fx, data[1] = 0, data[2] = cx + // data[3] = 0, data[4] = fy, data[5] = cy + // data[6] = 0, data[7] = 0, data[8] = 1 + fx = intrinsic.data[0]; // fx + fy = intrinsic.data[4]; // fy + cx = intrinsic.data[2]; // cx + cy = intrinsic.data[5]; // cy + + std::cout << "[CameraCapture] Camera " << camera_index + << " depth intrinsics: fx=" << fx << ", fy=" << fy + << ", cx=" << cx << ", cy=" << cy << std::endl; + + return true; +} + +/** + * @brief 采集线程函数 + * + * 这是每个相机独立运行的采集线程的主函数 + * 主要工作: + * 1. 从SDK获取原始帧数据(TYFrame) + * 2. 提取深度图和彩色图(TYImage) + * 3. 使用图像处理器处理原始数据(如果需要) + * 4. 转换为OpenCV格式(cv::Mat) + * 5. 进行颜色空间转换(统一为BGR) + * 6. 计算帧率 + * 7. 更新线程安全的缓冲区 + * + * @param camera_index 相机索引,标识此线程负责哪个相机 + * + * @note 此函数运行在独立的线程中,每个相机一个线程 + * @note 使用异常处理确保线程异常不会导致程序崩溃 + * @note 使用超时机制避免长时间阻塞 + * @note 缓冲区更新使用互斥锁保护,确保线程安全 + */ +void CameraCapture::captureThreadFunc(int camera_index) { + // 检查相机索引有效性 + if (camera_index < 0 || camera_index >= static_cast(cameras_.size())) { + return; + } + + // 获取此相机对应的缓冲区 + auto buffer = buffers_[camera_index]; + if (!buffer) { + std::cerr << "[CameraCapture] Camera " << camera_index << " buffer invalid" + << std::endl; + return; + } + + // 初始化帧计数和FPS计算相关变量 + int frame_count = 0; + auto start_time = + std::chrono::steady_clock::now(); // 记录开始时间,用于计算FPS + int consecutive_timeouts = 0; // 连续超时计数,用于检测相机是否异常 + + // 使用try-catch捕获异常,确保线程异常不会导致程序崩溃 + try { + // 主循环:持续采集图像直到停止标志被设置或相机停止运行 + while (running_ && camera_running_[camera_index]) { + // 再次检查相机索引有效性(防止在运行过程中相机被移除) + if (camera_index >= static_cast(cameras_.size()) || + !cameras_[camera_index]) { + break; + } + + // 从相机获取帧数据,超时时间500ms + // tryGetFrames是非阻塞的,如果500ms内没有新帧,返回nullptr + // 注意:根据实际测试,单帧处理时间约155ms,加上相机采集时间,500ms是合理的超时值 + // 如果相机帧率很低(<2fps),可以适当增加到1000ms + auto frame = cameras_[camera_index]->tryGetFrames(500); + if (!frame) { + // 获取帧失败(超时或错误) + consecutive_timeouts++; + // 如果连续超时超过10次,输出警告并短暂休眠 + // 这样可以减少错误日志的噪音,同时避免CPU占用过高 + if (consecutive_timeouts == 10) { + std::cerr << "[CameraCapture] Camera " << camera_index + << " consecutive timeout 10 times, may be low frame rate or connection issue" << std::endl; + } + if (consecutive_timeouts > 10) { + // 连续超时超过10次后,每次超时都休眠100ms,避免CPU空转 + std::this_thread::sleep_for(std::chrono::milliseconds(100)); + } + continue; // 继续下一次循环 + } + + // 成功获取帧,重置超时计数 + consecutive_timeouts = 0; + frame_count++; // 帧计数加1 + + // 记录帧获取时间,用于性能分析 + auto frame_start_time = std::chrono::steady_clock::now(); + + // ========== 处理深度图 ========== + cv::Mat depthMat; + auto depth_img = frame->depthImage(); // 从帧中提取深度图 + if (depth_img) { + // 如果深度流已启用,使用图像处理器处理原始数据 + auto depth_processer = depth_processers_[camera_index]; + if (depth_processer) { + // parse()处理原始TYImage数据,可能进行格式转换或校正 + depth_processer->parse(depth_img); + // image()返回处理后的TYImage + depth_img = depth_processer->image(); + } + // 将TYImage转换为OpenCV的Mat格式 + // TYImageToMat内部使用clone(),确保数据安全 + depthMat = TYImageToMat(depth_img); + } + + // ========== 处理彩色图 ========== + cv::Mat colorMat; + auto color_img = frame->colorImage(); // 从帧中提取彩色图 + if (color_img) { + // 如果彩色流已启用,使用图像处理器处理原始数据 + auto color_processer = color_processers_[camera_index]; + if (color_processer) { + // parse()处理原始TYImage数据 + color_processer->parse(color_img); + // image()返回处理后的TYImage + color_img = color_processer->image(); + } + + // 将TYImage转换为OpenCV的Mat格式 + cv::Mat rawColorMat = TYImageToMat(color_img); + if (!rawColorMat.empty()) { + // 获取像素格式标识,用于确定颜色空间转换方式 + int pixel_format = getPixelFormatId(color_img->pixelFormat()); + + // 性能优化:根据像素格式进行颜色空间转换,统一输出BGR格式 + // 优化:直接转换到目标Mat,避免中间变量 + if (pixel_format == 1) { + // RGB格式,转换为BGR + cv::cvtColor(rawColorMat, colorMat, cv::COLOR_RGB2BGR); + } else if (pixel_format == 2 || pixel_format == 3) { + // YUYV或YVYU格式(YUV422),转换为BGR + // 注意:YVYU和YUYV使用相同的转换代码 + cv::cvtColor(rawColorMat, colorMat, cv::COLOR_YUV2BGR_YUYV); + } else { + // BGR格式或其他,直接使用(假设已经是BGR) + // 性能优化:使用move语义,避免不必要的复制 + colorMat = std::move(rawColorMat); + } + } + } + + // ========== 计算FPS ========== + // 使用已采集的帧数和经过的时间计算平均帧率 + auto current_time = std::chrono::steady_clock::now(); + auto elapsed = std::chrono::duration_cast( + current_time - start_time) + .count(); + double fps = 0.0; + if (elapsed > 0) { + // FPS = 帧数 * 1000 / 经过的毫秒数 + fps = (frame_count * 1000.0) / elapsed; + } + + // ========== 更新缓冲区 ========== + // 使用互斥锁保护缓冲区,确保线程安全 + // 注意:这里使用独立的lock_guard作用域,确保锁在更新完成后立即释放 + { + std::lock_guard lock(buffer->mtx); + + // 更新深度图缓冲区 + // 性能优化:使用swap代替copyTo,避免数据复制,直接交换指针 + if (!depthMat.empty()) { + // 使用swap交换数据,这是零拷贝操作,只交换内部指针 + std::swap(buffer->depth, depthMat); + } else { + // 如果深度图为空,清空缓冲区 + buffer->depth = cv::Mat(); + } + + // 更新彩色图缓冲区 + // 性能优化:使用swap代替copyTo,避免数据复制 + if (!colorMat.empty()) { + // 使用swap交换数据,这是零拷贝操作,只交换内部指针 + std::swap(buffer->color, colorMat); + } else { + // 如果彩色图为空,清空缓冲区 + buffer->color = cv::Mat(); + } + + // 更新FPS和帧计数 + buffer->fps = fps; + buffer->frame_count = frame_count; + buffer->updated = true; // 标记缓冲区已更新 + } + // lock_guard在这里自动释放锁 + + // 性能监控:每100帧输出一次处理时间(可选,用于调试) + if (frame_count % 100 == 0) { + auto frame_end_time = std::chrono::steady_clock::now(); + auto frame_process_time = + std::chrono::duration_cast( + frame_end_time - frame_start_time) + .count(); + // if (frame_process_time > 50) { // 如果单帧处理时间超过50ms,输出警告 + // std::cout << "[CameraCapture] 相机 " << camera_index + // << " 单帧处理时间: " << frame_process_time << "ms" + // << std::endl; + // } + } + } // while循环结束 + } catch (const std::exception &e) { + // 捕获标准异常,记录错误信息 + std::cerr << "[CameraCapture] Camera " << camera_index << " capture thread exception: " + << e.what() << std::endl; + } catch (...) { + // 捕获所有其他异常(非标准异常) + std::cerr << "[CameraCapture] Camera " << camera_index + << " capture thread unknown exception" << std::endl; + } + // 线程函数结束,线程自动退出 +} + +/** + * @brief 将TYImage转换为OpenCV的Mat格式 + * + * 此函数将SDK的TYImage格式转换为OpenCV的cv::Mat格式 + * 关键点: + * 1. 根据像素格式确定OpenCV的Mat类型 + * 2. 创建临时Mat包装原始缓冲区(零拷贝视图) + * 3. 使用clone()创建数据副本,确保数据安全 + * + * @param img SDK的TYImage智能指针 + * @return cv::Mat OpenCV格式的图像矩阵,如果输入无效则返回空Mat + * + * @note 使用clone()创建数据副本是必要的,因为: + * - TYImage的数据可能在frame对象销毁后失效 + * - 如果不clone,返回的Mat会引用已释放的内存,导致悬空指针 + * - clone()虽然增加内存和CPU开销,但确保了数据安全 + * + * @note 支持的像素格式: + * - TY_PIXEL_FORMAT_DEPTH16: 16位深度图 -> CV_16U + * - TY_PIXEL_FORMAT_RGB: RGB彩色图 -> CV_8UC3 + * - TY_PIXEL_FORMAT_BGR: BGR彩色图 -> CV_8UC3 + * - TY_PIXEL_FORMAT_MONO: 单色图 -> CV_8U + * - TY_PIXEL_FORMAT_YUYV/YVYU: YUV422格式 -> CV_8UC2 + */ +cv::Mat CameraCapture::TYImageToMat(const std::shared_ptr &img) { + // 检查输入有效性 + if (!img || !img->buffer()) + return cv::Mat(); + + // 根据SDK的像素格式确定OpenCV的Mat数据类型 + int type = -1; + switch (img->pixelFormat()) { + case TY_PIXEL_FORMAT_DEPTH16: + type = CV_16U; // 16位无符号整数,用于深度值 + break; + case TY_PIXEL_FORMAT_RGB: + type = CV_8UC3; // 8位无符号整数,3通道(RGB) + break; + case TY_PIXEL_FORMAT_MONO: + type = CV_8U; // 8位无符号整数,单通道(灰度) + break; + case TY_PIXEL_FORMAT_YVYU: + case TY_PIXEL_FORMAT_YUYV: + type = CV_8UC2; // 8位无符号整数,2通道(YUV422) + break; + case TY_PIXEL_FORMAT_BGR: + type = CV_8UC3; // 8位无符号整数,3通道(BGR) + break; + default: + type = CV_8U; // 默认单通道 + break; + } + + // 创建临时Mat对象,直接包装原始缓冲区(零拷贝) + // 注意:这只是创建一个视图,不复制数据 + // 参数:高度、宽度、数据类型、原始数据指针 + cv::Mat tempMat(img->height(), img->width(), type, img->buffer()); + + // 使用clone()创建数据副本 + // 这是关键步骤:确保返回的Mat拥有独立的数据副本 + // 即使原始的TYImage被销毁,返回的Mat仍然有效 + // 虽然会增加内存和CPU开销,但这是确保数据安全的必要代价 + return tempMat.clone(); +} + +/** + * @brief 获取像素格式标识 + * + * 将SDK的像素格式枚举转换为简单的整数标识 + * 用于后续的颜色空间转换判断 + * + * @param pixel_format SDK的像素格式枚举值 + * @return 像素格式标识: + * - 0: BGR格式(或默认) + * - 1: RGB格式 + * - 2: YUYV格式(YUV422) + * - 3: YVYU格式(YUV422) + * + * @note 此函数用于简化颜色空间转换的判断逻辑 + */ +int CameraCapture::getPixelFormatId(TY_PIXEL_FORMAT pixel_format) { + switch (pixel_format) { + case TY_PIXEL_FORMAT_RGB: + return 1; // RGB格式 + case TY_PIXEL_FORMAT_YUYV: + return 2; // YUYV格式(YUV422) + case TY_PIXEL_FORMAT_YVYU: + return 3; // YVYU格式(YUV422) + case TY_PIXEL_FORMAT_BGR: + default: + return 0; // BGR格式或默认 + } +} + +/** + * @brief 利用SDK生成点云 + * @param camera_index 相机索引 + * @param depth_img 深度图 + * @param out_points 输出点云 + * @return 是否成功 + */ +bool CameraCapture::computePointCloud(int camera_index, const cv::Mat& depth_img, std::vector& out_points) { + if (camera_index < 0 || camera_index >= static_cast(cameras_.size())) { + return false; + } + + if (!has_calib_info_[camera_index]) { + std::cerr << "[CameraCapture] No calibration info for camera " << camera_index << std::endl; + return false; + } + + if (depth_img.empty()) { + return false; + } + + // Check for valid intrinsics to prevent division by zero crash + float fx = calib_infos_[camera_index].intrinsic.data[0]; + float fy = calib_infos_[camera_index].intrinsic.data[4]; + + if (std::abs(fx) < 1e-6 || std::abs(fy) < 1e-6) { + std::cerr << "[CameraCapture] Invalid intrinsics for camera " << camera_index + << " (fx=" << fx << ", fy=" << fy << "). Cannot compute point cloud." << std::endl; + return false; + } + + // 调整输出容器大小 + out_points.resize(depth_img.cols * depth_img.rows); + + // TY_VECT_3F {float x, y, z} 与 Point3D {float x, y, z} 内存布局兼容 + // 直接使用 SDK 函数生成点云 + TY_VECT_3F* p3d = reinterpret_cast(out_points.data()); + + TY_STATUS status = TYMapDepthImageToPoint3d(&calib_infos_[camera_index], + depth_img.cols, depth_img.rows, + (const uint16_t*)depth_img.data, + p3d); + + if (status != TY_STATUS_OK) { + std::cerr << "[CameraCapture] TYMapDepthImageToPoint3d failed: " << status << std::endl; + return false; + } + + return true; +} + \ No newline at end of file diff --git a/image_capture/src/camera/ty_multi_camera_capture.h b/image_capture/src/camera/ty_multi_camera_capture.h new file mode 100644 index 0000000..672fa31 --- /dev/null +++ b/image_capture/src/camera/ty_multi_camera_capture.h @@ -0,0 +1,159 @@ +#pragma once + +#include "Frame.hpp" +#include "Device.hpp" +#include "TYApi.h" +#include +#include +#include +#include +#include +#include +#include +#include "../common_types.h" + +using namespace percipio_layer; + +/** + * @brief CameraCapture + * 图像采集层,负责从SDK获取图像并转换为OpenCV格式 + * + * 功能说明: + * - 封装TY相机SDK,管理多相机采集 + * - 将SDK的TYImage转换为OpenCV的cv::Mat格式 + * - 管理采集线程和缓冲区 + * - 输出原始cv::Mat格式的图像,供上层使用 + * + * 设计原则: + * - 此模块属于图像采集层,可以依赖SDK + * - 输出标准OpenCV格式,实现SDK与算法层的隔离 + * - 不进行图像处理(如伪彩色映射等),只负责采集和格式转换 + */ +class CameraCapture +{ +public: + /** + * @brief 图像缓冲区结构 + * 存储原始采集的图像数据(cv::Mat格式) + */ + struct ImageBuffer { + cv::Mat depth; // 原始深度图(CV_16U格式) + cv::Mat color; // 原始彩色图(BGR格式) + double fps = 0.0; // 当前帧率 + int frame_count = 0; // 帧计数 + std::mutex mtx; // 互斥锁 + std::atomic updated{false}; // 更新标志、std::atomic 确保在多线程环境中对 updated 的操作是原子的,不会发生竞争条件。 + }; + + CameraCapture(); + ~CameraCapture(); + + /** + * 初始化并配置相机 + * @param enable_depth 是否启用深度流 + * @param enable_color 是否启用彩色流 + * @return 是否成功 + */ + bool initialize(bool enable_depth = true, bool enable_color = true); + + /** + * 启动采集 + * @return 是否成功 + */ + bool start(); + + /** + * 停止采集 + */ + void stop(); + + /** + * 获取相机数量 + * @return 相机数量 + */ + int getCameraCount() const; + + /** + * 获取相机ID + * @param index 相机索引 + * @return 相机ID字符串 + */ + std::string getCameraId(int index) const; + + /** + * 获取指定相机的最新图像 + * @param camera_index 相机索引 + * @param depth 输出的深度图(CV_16U格式,原始深度值) + * @param color 输出的彩色图(BGR格式) + * @param fps 输出的帧率 + * @return 是否成功获取到图像 + */ + bool getLatestImages(int camera_index, cv::Mat& depth, cv::Mat& color, double& fps); + + /** + * 检查是否正在运行 + * @return 是否运行中 + */ + bool isRunning() const; + + /** + * 获取指定相机的深度相机内参 + * @param camera_index 相机索引 + * @param fx [输出] 焦距x + * @param fy [输出] 焦距y + * @param cx [输出] 主点x + * @param cy [输出] 主点y + * @return 是否成功获取内参 + */ + // Added method for depth camera intrinsics + bool getDepthCameraIntrinsics(int camera_index, float& fx, float& fy, float& cx, float& cy); + + /** + * @brief 利用SDK生成点云 + * @param camera_index 相机索引 + * @param depth_img 深度图 + * @param out_points 输出点云 + * @return 是否成功 + */ + bool computePointCloud(int camera_index, const cv::Mat& depth_img, std::vector& out_points); + +private: + /** + * 采集线程函数 + * @param camera_index 相机索引 + */ + void captureThreadFunc(int camera_index); + + /** + * 将TYImage转换为OpenCV的Mat格式 + * @param img 输入的TYImage智能指针 + * @return cv::Mat OpenCV格式的图像矩阵 + */ + static cv::Mat TYImageToMat(const std::shared_ptr &img); + + /** + * 获取像素格式标识,用于颜色空间转换 + * @param pixel_format SDK的像素格式枚举 + * @return 像素格式标识(0: BGR, 1: RGB, 2: YUYV, 3: YVYU) + */ + static int getPixelFormatId(TY_PIXEL_FORMAT pixel_format); + + // SDK相关成员(原MultiCameraCapture的功能) + std::vector> cameras_; // 相机对象列表 + std::vector> depth_processers_; // 深度图像处理器 + std::vector> color_processers_; // 彩色图像处理器 + + std::vector camera_running_; // 相机运行状态 + bool streams_configured_; // 流是否已配置 + bool depth_enabled_; // 是否启用深度流 + bool color_enabled_; // 是否启用彩色流 + + // 采集线程和缓冲区 + std::vector> buffers_; // 图像缓冲区 + std::vector capture_threads_; // 采集线程 + std::atomic running_; // 运行标志 + + // 标定信息 + std::vector calib_infos_; + std::vector has_calib_info_; +}; diff --git a/image_capture/src/common/config_manager.cpp b/image_capture/src/common/config_manager.cpp new file mode 100644 index 0000000..d34db58 --- /dev/null +++ b/image_capture/src/common/config_manager.cpp @@ -0,0 +1,734 @@ +#include "config_manager.h" +#include +#include +#include + + +ConfigManager &ConfigManager::getInstance() { + static ConfigManager instance; + return instance; +} + +ConfigManager::ConfigManager() { + // 默认配置 + config_json_ = json11::Json::object{}; +} + +bool ConfigManager::loadConfig(const std::string &config_path) { + std::lock_guard lock(mutex_); + + std::ifstream file(config_path); + if (!file.is_open()) { + std::cerr << "ConfigManager: Failed to open config file: " << config_path + << std::endl; + return false; + } + + std::stringstream buffer; + buffer << file.rdbuf(); + std::string content = buffer.str(); + + std::string err; + config_json_ = json11::Json::parse(content, err); + + if (!err.empty()) { + std::cerr << "ConfigManager: Failed to parse JSON: " << err << std::endl; + return false; + } + + std::cout << "ConfigManager: Successfully loaded config from " << config_path + << std::endl; + return true; +} + +bool ConfigManager::saveConfig(const std::string &config_path) { + std::lock_guard lock(mutex_); + + std::string json_str = config_json_.dump(); + + std::ofstream file(config_path); + if (!file.is_open()) { + std::cerr << "ConfigManager: Failed to open config file for writing: " << config_path + << std::endl; + return false; + } + + file << json_str; + file.close(); + + std::cout << "ConfigManager: Successfully saved config to " << config_path << std::endl; + return true; +} + +// --- Accessors & Setters --- + +std::string ConfigManager::getRedisHost() const { + std::lock_guard lock(mutex_); + if (config_json_["redis"].is_object()) { + return config_json_["redis"]["host"].string_value(); + } + return "127.0.0.1"; // Default +} + +int ConfigManager::getRedisPort() const { + std::lock_guard lock(mutex_); + if (config_json_["redis"].is_object()) { + int port = config_json_["redis"]["port"].int_value(); + return port > 0 ? port : 6379; + } + return 6379; +} + +int ConfigManager::getRedisDb() const { + std::lock_guard lock(mutex_); + if (config_json_["redis"].is_object()) { + return config_json_["redis"]["db"].int_value(); + } + return 0; +} + +bool ConfigManager::isDepthEnabled() const { + std::lock_guard lock(mutex_); + if (config_json_["cameras"].is_object()) { + auto val = config_json_["cameras"]["depth_enabled"]; + if (val.is_bool()) + return val.bool_value(); + } + return true; // Default +} + +bool ConfigManager::isColorEnabled() const { + std::lock_guard lock(mutex_); + if (config_json_["cameras"].is_object()) { + auto val = config_json_["cameras"]["color_enabled"]; + if (val.is_bool()) + return val.bool_value(); + } + return true; // Default +} + +std::vector +ConfigManager::getCameraMappings() const { + std::lock_guard lock(mutex_); + std::vector mappings; + if (config_json_["cameras"].is_object() && + config_json_["cameras"]["mapping"].is_array()) { + for (const auto &item : config_json_["cameras"]["mapping"].array_items()) { + CameraMapping m; + m.id = item["id"].string_value(); + m.index = item["index"].int_value(); + mappings.push_back(m); + } + } + return mappings; +} + +std::string ConfigManager::getSavePath() const { + std::lock_guard lock(mutex_); + if (config_json_["vision"].is_object()) { + return config_json_["vision"]["save_path"].string_value(); + } + return "./"; +} + +int ConfigManager::getLogLevel() const { + std::lock_guard lock(mutex_); + if (config_json_["vision"].is_object()) { + return config_json_["vision"]["log_level"].int_value(); + } + return 0; +} + +// --- Algorithm Config Accessors --- + +// Beam/Rack Deflection - ROI Points +std::vector ConfigManager::getBeamROIPoints() const { + std::lock_guard lock(mutex_); + std::vector points; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"]["beam_roi_points"].is_array()) { + + const auto& points_array = config_json_["algorithms"]["beam_rack_deflection"]["beam_roi_points"].array_items(); + for (const auto& point : points_array) { + if (point.is_object()) { + int x = point["x"].int_value(); + int y = point["y"].int_value(); + points.push_back(cv::Point2i(x, y)); + } + } + } + + // Default values if not configured + if (points.empty()) { + points = { + cv::Point2i(100, 50), + cv::Point2i(540, 80), + cv::Point2i(540, 280), + cv::Point2i(100, 280) + }; + } + + return points; +} + +std::vector ConfigManager::getRackROIPoints() const { + std::lock_guard lock(mutex_); + std::vector points; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"]["rack_roi_points"].is_array()) { + + const auto& points_array = config_json_["algorithms"]["beam_rack_deflection"]["rack_roi_points"].array_items(); + for (const auto& point : points_array) { + if (point.is_object()) { + int x = point["x"].int_value(); + int y = point["y"].int_value(); + points.push_back(cv::Point2i(x, y)); + } + } + } + + // Default values if not configured + if (points.empty()) { + points = { + cv::Point2i(50, 50), + cv::Point2i(150, 50), + cv::Point2i(150, 430), + cv::Point2i(50, 430) + }; + } + + return points; +} + +// Beam/Rack Deflection - Thresholds +std::vector ConfigManager::getBeamThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"]["beam_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["beam_rack_deflection"]["beam_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + // Default values if not configured + if (thresholds.size() != 4) { + thresholds = {-10.0f, -5.0f, 5.0f, 10.0f}; + } + + return thresholds; +} + +std::vector ConfigManager::getRackThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"].is_object() && + config_json_["algorithms"]["beam_rack_deflection"]["rack_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["beam_rack_deflection"]["rack_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + // Default values if not configured + if (thresholds.size() != 4) { + thresholds = {-6.0f, -3.0f, 3.0f, 6.0f}; + } + + return thresholds; +} + +// Pallet Offset - Thresholds +std::vector ConfigManager::getPalletOffsetLatThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["pallet_offset"].is_object() && + config_json_["algorithms"]["pallet_offset"]["offset_lat_mm_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["pallet_offset"]["offset_lat_mm_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + if (thresholds.size() != 4) { + thresholds = {-20.0f, -10.0f, 10.0f, 20.0f}; + } + + return thresholds; +} + +std::vector ConfigManager::getPalletOffsetLonThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["pallet_offset"].is_object() && + config_json_["algorithms"]["pallet_offset"]["offset_lon_mm_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["pallet_offset"]["offset_lon_mm_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + if (thresholds.size() != 4) { + thresholds = {-20.0f, -10.0f, 10.0f, 20.0f}; + } + + return thresholds; +} + +std::vector ConfigManager::getPalletRotationAngleThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["pallet_offset"].is_object() && + config_json_["algorithms"]["pallet_offset"]["rotation_angle_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["pallet_offset"]["rotation_angle_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + if (thresholds.size() != 4) { + thresholds = {-5.0f, -2.5f, 2.5f, 5.0f}; + } + + return thresholds; +} + +std::vector ConfigManager::getPalletHoleDefLeftThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["pallet_offset"].is_object() && + config_json_["algorithms"]["pallet_offset"]["hole_def_mm_left_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["pallet_offset"]["hole_def_mm_left_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + if (thresholds.size() != 4) { + thresholds = {-8.0f, -4.0f, 4.0f, 8.0f}; + } + + return thresholds; +} + +std::vector ConfigManager::getPalletHoleDefRightThresholds() const { + std::lock_guard lock(mutex_); + std::vector thresholds; + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["pallet_offset"].is_object() && + config_json_["algorithms"]["pallet_offset"]["hole_def_mm_right_thresholds"].is_object()) { + + const auto& thresh = config_json_["algorithms"]["pallet_offset"]["hole_def_mm_right_thresholds"]; + thresholds.push_back(static_cast(thresh["A"].number_value())); + thresholds.push_back(static_cast(thresh["B"].number_value())); + thresholds.push_back(static_cast(thresh["C"].number_value())); + thresholds.push_back(static_cast(thresh["D"].number_value())); + } + + if (thresholds.size() != 4) { + thresholds = {-8.0f, -4.0f, 4.0f, 8.0f}; + } + + return thresholds; +} + +// Slot Occupancy +float ConfigManager::getSlotOccupancyDepthThreshold() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["slot_occupancy"].is_object()) { + return static_cast(config_json_["algorithms"]["slot_occupancy"]["depth_threshold_mm"].number_value()); + } + + return 100.0f; // Default +} + +float ConfigManager::getSlotOccupancyConfidenceThreshold() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["slot_occupancy"].is_object()) { + return static_cast(config_json_["algorithms"]["slot_occupancy"]["confidence_threshold"].number_value()); + } + + return 0.8f; // Default +} + +// Visual Inventory +float ConfigManager::getVisualInventoryBarcodeConfidence() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["visual_inventory"].is_object()) { + return static_cast(config_json_["algorithms"]["visual_inventory"]["barcode_confidence_threshold"].number_value()); + } + + return 0.7f; // Default +} + +bool ConfigManager::getVisualInventoryROIEnabled() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["visual_inventory"].is_object()) { + return config_json_["algorithms"]["visual_inventory"]["roi_enabled"].bool_value(); + } + + return true; // Default +} + +// General Algorithm Parameters +float ConfigManager::getAlgorithmMinDepth() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["general"].is_object()) { + return static_cast(config_json_["algorithms"]["general"]["min_depth_mm"].number_value()); + } + + return 800.0f; // Default +} + +float ConfigManager::getAlgorithmMaxDepth() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["general"].is_object()) { + return static_cast(config_json_["algorithms"]["general"]["max_depth_mm"].number_value()); + } + + return 3000.0f; // Default +} + +int ConfigManager::getAlgorithmSamplePoints() const { + std::lock_guard lock(mutex_); + + if (config_json_["algorithms"].is_object() && + config_json_["algorithms"]["general"].is_object()) { + return config_json_["algorithms"]["general"]["sample_points"].int_value(); + } + + return 50; // Default +} + +// Beam/Rack Deflection - Setters +void ConfigManager::setBeamROIPoints(const std::vector& points) { + std::lock_guard lock(mutex_); + // Note: json11 is immutable, we need to reconstruct the object or use a mutable json library. + // For this project using json11, we have to rebuild the part of the json tree. + // This is a bit expensive but config saving is rare. + // Actually, to make it easier with json11, we might need to parse, modify and dump if we want to keep comments? + // But json11 parser doesn't keep comments. + // Let's just modify the internal map if possible or rebuild. + // json11::Json is const. We need to cast it away or rebuild the whole structure? + // Rebuilding is safer. + + // Implementation Note: Since json11 is immutable, proper way is to create new Json objects. + // For simplicity in this context, we will use a "deep update" strategy helper if we had one. + // But here we need to do it manually. + + // Let's cheat a bit and use const_cast for the "value" if it was a simpler lib, but json11 uses shared_ptr... + // Okay, we will use a temporary mutable map approach for the 'algorithms' section. + + // Helper to get mutable map from Json object + auto get_mutable_map = [](const json11::Json& j) -> json11::Json::object { + return j.object_items(); + }; + + json11::Json::object root_map = get_mutable_map(config_json_); + json11::Json::object algo_map = get_mutable_map(root_map["algorithms"]); + json11::Json::object beam_rack_map = get_mutable_map(algo_map["beam_rack_deflection"]); + + std::vector points_json; + for(const auto& p : points) { + points_json.push_back(json11::Json::object{{"x", p.x}, {"y", p.y}}); + } + beam_rack_map["beam_roi_points"] = points_json; + + algo_map["beam_rack_deflection"] = beam_rack_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setRackROIPoints(const std::vector& points) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object beam_rack_map = algo_map["beam_rack_deflection"].object_items(); + + std::vector points_json; + for(const auto& p : points) { + points_json.push_back(json11::Json::object{{"x", p.x}, {"y", p.y}}); + } + beam_rack_map["rack_roi_points"] = points_json; + + algo_map["beam_rack_deflection"] = beam_rack_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setBeamThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object beam_rack_map = algo_map["beam_rack_deflection"].object_items(); + + beam_rack_map["beam_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["beam_rack_deflection"] = beam_rack_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setRackThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object beam_rack_map = algo_map["beam_rack_deflection"].object_items(); + + beam_rack_map["rack_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["beam_rack_deflection"] = beam_rack_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +// Pallet Offset Setters +void ConfigManager::setPalletOffsetLatThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object pallet_map = algo_map["pallet_offset"].object_items(); + + pallet_map["offset_lat_mm_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["pallet_offset"] = pallet_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setPalletOffsetLonThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object pallet_map = algo_map["pallet_offset"].object_items(); + + pallet_map["offset_lon_mm_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["pallet_offset"] = pallet_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setPalletRotationAngleThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object pallet_map = algo_map["pallet_offset"].object_items(); + + pallet_map["rotation_angle_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["pallet_offset"] = pallet_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setPalletHoleDefLeftThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object pallet_map = algo_map["pallet_offset"].object_items(); + + pallet_map["hole_def_mm_left_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["pallet_offset"] = pallet_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setPalletHoleDefRightThresholds(const std::vector& thresholds) { + if(thresholds.size() < 4) return; + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object pallet_map = algo_map["pallet_offset"].object_items(); + + pallet_map["hole_def_mm_right_thresholds"] = json11::Json::object{ + {"A", thresholds[0]}, {"B", thresholds[1]}, {"C", thresholds[2]}, {"D", thresholds[3]} + }; + + algo_map["pallet_offset"] = pallet_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +// Slot Occupancy Setters +void ConfigManager::setSlotOccupancyDepthThreshold(float value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object slot_map = algo_map["slot_occupancy"].object_items(); + + slot_map["depth_threshold_mm"] = value; + + algo_map["slot_occupancy"] = slot_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setSlotOccupancyConfidenceThreshold(float value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object slot_map = algo_map["slot_occupancy"].object_items(); + + slot_map["confidence_threshold"] = value; + + algo_map["slot_occupancy"] = slot_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +// Visual Inventory Setters +void ConfigManager::setVisualInventoryBarcodeConfidence(float value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object vis_map = algo_map["visual_inventory"].object_items(); + + vis_map["barcode_confidence_threshold"] = value; + + algo_map["visual_inventory"] = vis_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setVisualInventoryROIEnabled(bool value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object vis_map = algo_map["visual_inventory"].object_items(); + + vis_map["roi_enabled"] = value; + + algo_map["visual_inventory"] = vis_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +// General Setters +void ConfigManager::setAlgorithmMinDepth(float value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object gen_map = algo_map["general"].object_items(); + + gen_map["min_depth_mm"] = value; + + algo_map["general"] = gen_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setAlgorithmMaxDepth(float value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object gen_map = algo_map["general"].object_items(); + + gen_map["max_depth_mm"] = value; + + algo_map["general"] = gen_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +void ConfigManager::setAlgorithmSamplePoints(int value) { + std::lock_guard lock(mutex_); + json11::Json::object root_map = config_json_.object_items(); + json11::Json::object algo_map = root_map["algorithms"].object_items(); + json11::Json::object gen_map = algo_map["general"].object_items(); + + gen_map["sample_points"] = value; + + algo_map["general"] = gen_map; + root_map["algorithms"] = algo_map; + config_json_ = root_map; +} + +// Generic access +std::string ConfigManager::getString(const std::string &key, + const std::string &default_value) const { + std::lock_guard lock(mutex_); + // Simple top-level access, or implement dot notation parsing if needed. + // For now assuming top level. + if (config_json_[key].is_string()) { + return config_json_[key].string_value(); + } + return default_value; +} + +int ConfigManager::getInt(const std::string &key, int default_value) const { + std::lock_guard lock(mutex_); + if (config_json_[key].is_number()) { + return config_json_[key].int_value(); + } + return default_value; +} + +bool ConfigManager::getBool(const std::string &key, bool default_value) const { + std::lock_guard lock(mutex_); + if (config_json_[key].is_bool()) { + return config_json_[key].bool_value(); + } + return default_value; +} diff --git a/image_capture/src/common/config_manager.h b/image_capture/src/common/config_manager.h new file mode 100644 index 0000000..0001389 --- /dev/null +++ b/image_capture/src/common/config_manager.h @@ -0,0 +1,124 @@ +#pragma once + +#include "json11.hpp" +#include +#include +#include +#include +#include + + +/** + * @brief ConfigManager + * 全局配置管理器,单例模式 + * 负责加载和提供系统配置参数 + */ +class ConfigManager { +public: + static ConfigManager &getInstance(); + + // 禁止拷贝 + ConfigManager(const ConfigManager &) = delete; + ConfigManager &operator=(const ConfigManager &) = delete; + + /** + * 加载配置文件 + * @param config_path 配置文件路径,默认在当前目录查找 config.json + * @return 是否成功加载 + */ + bool loadConfig(const std::string &config_path = "config.json"); + + /** + * 保存配置文件 + * @param config_path 配置文件路径,默认在当前目录查找 config.json + * @return 是否成功保存 + */ + bool saveConfig(const std::string &config_path = "config.json"); + + // --- Accessors & Setters --- + + // Redis Config + std::string getRedisHost() const; + int getRedisPort() const; + int getRedisDb() const; + + // Camera Config + bool isDepthEnabled() const; + bool isColorEnabled() const; + struct CameraMapping { + std::string id; + int index; + }; + std::vector getCameraMappings() const; + + // Vision/Global Config + std::string getSavePath() const; + int getLogLevel() const; + + // Algorithm Config - Beam/Rack Deflection + std::vector getBeamROIPoints() const; + void setBeamROIPoints(const std::vector& points); + + std::vector getRackROIPoints() const; + void setRackROIPoints(const std::vector& points); + + std::vector getBeamThresholds() const; // Returns [A, B, C, D] + void setBeamThresholds(const std::vector& thresholds); + + std::vector getRackThresholds() const; // Returns [A, B, C, D] + void setRackThresholds(const std::vector& thresholds); + + // Algorithm Config - Pallet Offset + std::vector getPalletOffsetLatThresholds() const; + void setPalletOffsetLatThresholds(const std::vector& thresholds); + + std::vector getPalletOffsetLonThresholds() const; + void setPalletOffsetLonThresholds(const std::vector& thresholds); + + std::vector getPalletRotationAngleThresholds() const; + void setPalletRotationAngleThresholds(const std::vector& thresholds); + + std::vector getPalletHoleDefLeftThresholds() const; + void setPalletHoleDefLeftThresholds(const std::vector& thresholds); + + std::vector getPalletHoleDefRightThresholds() const; + void setPalletHoleDefRightThresholds(const std::vector& thresholds); + + // Algorithm Config - Slot Occupancy + float getSlotOccupancyDepthThreshold() const; + void setSlotOccupancyDepthThreshold(float value); + + float getSlotOccupancyConfidenceThreshold() const; + void setSlotOccupancyConfidenceThreshold(float value); + + // Algorithm Config - Visual Inventory + float getVisualInventoryBarcodeConfidence() const; + void setVisualInventoryBarcodeConfidence(float value); + + bool getVisualInventoryROIEnabled() const; + void setVisualInventoryROIEnabled(bool value); + + // Algorithm Config - General + float getAlgorithmMinDepth() const; + void setAlgorithmMinDepth(float value); + + float getAlgorithmMaxDepth() const; + void setAlgorithmMaxDepth(float value); + + int getAlgorithmSamplePoints() const; + void setAlgorithmSamplePoints(int value); + + + // Generic access (for dynamic access) + std::string getString(const std::string &key, + const std::string &default_value = "") const; + int getInt(const std::string &key, int default_value = 0) const; + bool getBool(const std::string &key, bool default_value = false) const; + +private: + ConfigManager(); + ~ConfigManager() = default; + + json11::Json config_json_; + mutable std::mutex mutex_; +}; diff --git a/image_capture/src/common/log_manager.cpp b/image_capture/src/common/log_manager.cpp new file mode 100644 index 0000000..2637b4f --- /dev/null +++ b/image_capture/src/common/log_manager.cpp @@ -0,0 +1,104 @@ +#include "log_manager.h" +#include + +LogManager &LogManager::getInstance() { + static LogManager instance; + return instance; +} + +void LogManager::setCallback(LogCallback callback) { + std::lock_guard lock(mutex_); + callback_ = callback; +} + +#include "config_manager.h" +#include +#include +#include +#include +#include +#include + + +void LogManager::logFormat(LogLevel level, const char *fmt, ...) { + // 1. Level Filter + if (static_cast(level) < ConfigManager::getInstance().getLogLevel()) { + return; + } + + // 2. Format Message + va_list args; + va_start(args, fmt); + + // Determine required size + va_list args_copy; + va_copy(args_copy, args); + int size = std::vsnprintf(nullptr, 0, fmt, args_copy); + va_end(args_copy); + + if (size < 0) { + va_end(args); + return; // Encoding error + } + + std::vector buffer(size + 1); + std::vsnprintf(buffer.data(), buffer.size(), fmt, args); + va_end(args); + + std::string message(buffer.data(), size); + + // 3. Delegate + logInternal(level, message); +} + +void LogManager::logInternal(LogLevel level, const std::string &message) { + // 1. Add Timestamp and Level Prefix + const char *levelStr = "[INFO] "; + bool isError = false; + + switch (level) { + case LogLevel::DEBUG: + levelStr = "[DEBUG] "; + break; + case LogLevel::INFO: + levelStr = "[INFO] "; + break; + case LogLevel::WARNING: + levelStr = "[WARN] "; + break; + case LogLevel::ERROR: + levelStr = "[ERROR] "; + isError = true; + break; + } + + auto t = std::time(nullptr); + auto tm = *std::localtime(&t); + + std::ostringstream oss; + oss << std::put_time(&tm, "[%Y-%m-%d %H:%M:%S] ") << levelStr << message + << "\n"; + std::string formattedMsg = oss.str(); + + // 2. Output + std::lock_guard lock(mutex_); + if (callback_) { + callback_(formattedMsg, isError); + } else { + if (isError) { + std::cerr << formattedMsg; + } else { + std::cout << formattedMsg; + } + } +} + +// 兼容旧接口 +void LogManager::log(const std::string &message, bool isError) { + logInternal(isError ? LogLevel::ERROR : LogLevel::INFO, message); +} + +void LogManager::clearCallback() { + std::lock_guard lock(mutex_); + callback_ = nullptr; +} diff --git a/image_capture/src/common/log_manager.h b/image_capture/src/common/log_manager.h new file mode 100644 index 0000000..f38c400 --- /dev/null +++ b/image_capture/src/common/log_manager.h @@ -0,0 +1,80 @@ +#pragma once + +#include +#include +#include +#include + + +/** + * @brief 全局日志管理器(单例模式) + * + * 用于将标准输出(cout/cerr)重定向到Qt日志系统 + * 任何模块都可以通过LogManager输出日志,这些日志会被重定向到MainWindow的日志文本框 + */ +// 日志级别枚举 +enum class LogLevel { DEBUG = 0, INFO = 1, WARNING = 2, ERROR = 3 }; + +/** + * @brief 全局日志管理器(单例模式) + * + * 用于将标准输出(cout/cerr)重定向到Qt日志系统 + * 支持格式化输出和日志级别过滤 + */ +class LogManager { +public: + // 日志回调函数类型 + using LogCallback = + std::function; + + /** + * @brief 获取单例实例 + */ + static LogManager &getInstance(); + + /** + * @brief 设置日志回调函数 + * @param callback 回调函数,接收日志消息和错误标志 + */ + void setCallback(LogCallback callback); + + /** + * @brief 格式化并输出日志消息 + * @param level 日志级别 + * @param fmt 格式化字符串 (printf style) + * @param ... 可变参数 + */ + void logFormat(LogLevel level, const char *fmt, ...); + + /** + * @brief 兼容旧接口的日志输出 + * @param message 日志消息 + * @param isError 是否为错误消息 + */ + void log(const std::string &message, bool isError = false); + + /** + * @brief 清除回调函数 + */ + void clearCallback(); + +private: + LogManager() = default; + ~LogManager() = default; + + // 内部实际执行日志输出的方法 + void logInternal(LogLevel level, const std::string &message); + + LogCallback callback_; + std::mutex mutex_; // 保护回调函数的线程安全 +}; + +// 宏定义以便于使用 +#define LOG_DEBUG(fmt, ...) \ + LogManager::getInstance().logFormat(LogLevel::DEBUG, fmt, ##__VA_ARGS__) +#define LOG_INFO(fmt, ...) \ + LogManager::getInstance().logFormat(LogLevel::INFO, fmt, ##__VA_ARGS__) +#define LOG_WARN(fmt, ...) \ + LogManager::getInstance().logFormat(LogLevel::WARNING, fmt, ##__VA_ARGS__) +#define LOG_ERROR(fmt, ...) \ + LogManager::getInstance().logFormat(LogLevel::ERROR, fmt, ##__VA_ARGS__) diff --git a/image_capture/src/common/log_streambuf.h b/image_capture/src/common/log_streambuf.h new file mode 100644 index 0000000..052e07b --- /dev/null +++ b/image_capture/src/common/log_streambuf.h @@ -0,0 +1,39 @@ +#pragma once + +#include +#include +#include "log_manager.h" + +/** + * @brief 自定义streambuf,用于重定向cout/cerr到LogManager + */ +class LogStreamBuf : public std::streambuf { +public: + LogStreamBuf(bool isError) : isError_(isError), buffer_() {} + +protected: + virtual int_type overflow(int_type c) override { + if (c != EOF) { + buffer_ += static_cast(c); + if (c == '\n') { + // 遇到换行符,输出完整行 + LogManager::getInstance().log(buffer_, isError_); + buffer_.clear(); + } + } + return c; + } + + virtual int sync() override { + if (!buffer_.empty()) { + LogManager::getInstance().log(buffer_, isError_); + buffer_.clear(); + } + return 0; + } + +private: + bool isError_; + std::string buffer_; +}; + diff --git a/image_capture/src/common_types.h b/image_capture/src/common_types.h new file mode 100644 index 0000000..4b1c1db --- /dev/null +++ b/image_capture/src/common_types.h @@ -0,0 +1,27 @@ +#pragma once + +#include + +/** + * @brief 点云数据结构 + * 表示一个三维点 + */ +struct Point3D { + float x, y, z; + Point3D() : x(0), y(0), z(0) {} + Point3D(float x, float y, float z) : x(x), y(y), z(z) {} +}; + +/** + * @brief 相机内参结构 + */ +struct CameraIntrinsics { + float fx; // 焦距x + float fy; // 焦距y + float cx; // 主点x + float cy; // 主点y + + CameraIntrinsics() : fx(0), fy(0), cx(0), cy(0) {} + CameraIntrinsics(float fx, float fy, float cx, float cy) + : fx(fx), fy(fy), cx(cx), cy(cy) {} +}; diff --git a/image_capture/src/device/device_manager.cpp b/image_capture/src/device/device_manager.cpp new file mode 100644 index 0000000..a160b78 --- /dev/null +++ b/image_capture/src/device/device_manager.cpp @@ -0,0 +1,300 @@ +/** + * @file device_manager.cpp + * @brief 设备管理器实现文件 + * + * 此文件实现了DeviceManager类的完整功能: + * - 设备初始化(扫描和配置相机) + * - 设备启动和停止 + * - 图像获取接口 + * - 设备信息查询 + * + * 设计说明: + * - DeviceManager是对CameraCapture的封装,提供统一的设备管理接口 + * - 不涉及业务逻辑,只负责设备层的管理 + * - 使用智能指针管理CameraCapture,自动释放资源 + */ + +#include "device_manager.h" +#include "../camera/ty_multi_camera_capture.h" +#include "../camera/mvs_multi_camera_capture.h" +#include + +/** + * @brief 获取单例实例 + * + * @return DeviceManager单例引用-DeviceManager&返回的是实例的引用 + */ +DeviceManager& DeviceManager::getInstance() { + static DeviceManager instance; // C++11保证线程安全的单例 + return instance; +} + +/** + * @brief 构造函数(私有) + * + * 初始化设备管理器,设置初始状态为未初始化 + */ +DeviceManager::DeviceManager() : initialized_(false) {} + +/** + * @brief 析构函数 + * + * 确保在对象销毁时正确停止所有设备 + * 调用stopAll()清理资源 + */ +DeviceManager::~DeviceManager() { + stopAll(); +} + +/** + * @brief 初始化并扫描设备 + * + * 初始化相机采集模块,扫描并配置所有可用的相机设备 + * + * @param enable_depth 是否启用深度流,true表示启用深度图采集 + * @param enable_color 是否启用彩色流,true表示启用彩色图采集 + * @return 发现的设备数量,0表示初始化失败或未找到设备 + * + * @note 如果已经初始化,直接返回当前设备数量(避免重复初始化) + * @note 初始化失败时,capture_会被重置为nullptr + */ +int DeviceManager::initialize(bool enable_depth, bool enable_color) { + // 如果已经初始化,直接返回当前设备数量 + if (initialized_) { + return getDeviceCount(); + } + + int total_count = 0; + + // 创建深度相机采集对象 + capture_ = std::make_shared(); + + // 初始化深度相机采集(扫描设备、配置流) + if (capture_->initialize(enable_depth, enable_color)) { + total_count += capture_->getCameraCount(); + std::cout << "[DeviceManager] Initialized " << capture_->getCameraCount() << " depth camera(s)" << std::endl; + } else { + std::cerr << "[DeviceManager] Failed to initialize depth cameras" << std::endl; + capture_.reset(); // 重置智能指针,释放资源 + } + + // 初始化MVS 2D相机 + mvs_cameras_ = std::make_unique(); + if (mvs_cameras_->initialize()) { + total_count += mvs_cameras_->getCameraCount(); + std::cout << "[DeviceManager] Initialized " << mvs_cameras_->getCameraCount() << " 2D camera(s)" << std::endl; + } else { + std::cout << "[DeviceManager] No 2D cameras found or initialization failed" << std::endl; + mvs_cameras_.reset(); + } + + + + // 获取设备数量并标记为已初始化 + initialized_ = true; + std::cout << "[DeviceManager] Total devices initialized: " << total_count << std::endl; + return total_count; +} + +/** + * @brief 启动所有设备 + * + * 启动所有相机的数据采集 + * + * @return true 启动成功,false 启动失败(capture_为空或启动失败) + * + * @note 必须先调用initialize()初始化设备 + */ +bool DeviceManager::startAll() { + bool success = true; + + // 启动深度相机 + if (capture_) { + if (!capture_->start()) { + success = false; + std::cerr << "[DeviceManager] Failed to start depth cameras" << std::endl; + } + } + + + + // 启动2D相机 + if (mvs_cameras_) { + if (!mvs_cameras_->start()) { + success = false; + std::cerr << "[DeviceManager] Failed to start 2D cameras" << std::endl; + } + } + + return success; +} + +/** + * @brief 停止所有设备 + * + * 停止所有相机的数据采集 + * + * @note 此函数是幂等的,可以安全地多次调用 + */ +void DeviceManager::stopAll() { + if (capture_) { + capture_->stop(); + } + +} + +/** + * @brief 获取设备数量 + * + * @return 当前已初始化的设备数量,0表示未初始化或无设备 + */ +int DeviceManager::getDeviceCount() const { + int count = 0; + if (capture_) { + count += capture_->getCameraCount(); + } + + if (mvs_cameras_) { + count += mvs_cameras_->getCameraCount(); + } + return count; +} + +int DeviceManager::getDepthCameraCount() const { + return capture_ ? capture_->getCameraCount() : 0; +} + + + +/** + * @brief 获取设备ID + * + * @param index 设备索引,从0开始 + * @return 设备ID字符串,如果索引无效或未初始化则返回空字符串 + */ +std::string DeviceManager::getDeviceId(int index) const { + int percipio_count = capture_ ? capture_->getCameraCount() : 0; + + if (index < percipio_count) { + return capture_->getCameraId(index); + } + + int mvs_index = index - percipio_count; + if (mvs_cameras_ && mvs_index >= 0 && mvs_index < mvs_cameras_->getCameraCount()) { + return "2D-" + mvs_cameras_->getCameraId(mvs_index); + } + + return ""; +} + + + +/** + * @brief 获取指定设备的最新图像 + * + * 从设备缓冲区中获取最新采集的图像数据 + * + * @param device_index 设备索引,从0开始 + * @param depth [输出] 深度图,CV_16U格式,包含原始深度值(单位:毫米) + * @param color [输出] 彩色图,BGR格式,CV_8UC3类型 + * @param fps [输出] 当前帧率(帧/秒) + * @return true 成功获取图像,false 获取失败(设备未初始化、索引无效、缓冲区为空) + * + * @note 此函数是线程安全的,使用互斥锁保护缓冲区访问 + * @note 如果某个图像流未启用或尚未采集到数据,对应的Mat将为空 + */ +bool DeviceManager::getLatestImages(int device_index, cv::Mat& depth, cv::Mat& color, double& fps) { + int percipio_count = capture_ ? capture_->getCameraCount() : 0; + + // 深度相机 + if (device_index < percipio_count) { + if (!capture_) return false; + return capture_->getLatestImages(device_index, depth, color, fps); + } + + // 2D相机 + int mvs_index = device_index - percipio_count; + if (mvs_cameras_ && mvs_index >= 0 && mvs_index < mvs_cameras_->getCameraCount()) { + depth = cv::Mat(); // 2D相机没有深度图 + return mvs_cameras_->getLatestImage(mvs_index, color, fps); + } + + + + + return false; +} + + + +/** + * @brief 检查是否正在运行 + * + * @return true 正在运行,false 已停止或未初始化 + */ +bool DeviceManager::isRunning() const { + bool anyScaleRunning = capture_ && capture_->isRunning(); + bool anyMVSRunning = mvs_cameras_ && mvs_cameras_->isRunning(); + return anyScaleRunning || anyMVSRunning; +} + +/** + * @brief 获取指定设备的深度相机内参 + * + * 从相机SDK获取深度相机的内参(fx, fy, cx, cy) + * 内参存储在相机的标定数据中 + * + * @param device_index 设备索引,从0开始 + * @param cy [输出] 主点y坐标(像素单位) + * @return 是否成功获取内参 + */ +bool DeviceManager::getDepthCameraIntrinsics(int device_index, float& fx, float& fy, float& cx, float& cy) { + if (!capture_) return false; + + // 只有深度相机有内参 + int percipio_count = capture_->getCameraCount(); + if (device_index < percipio_count) { + return capture_->getDepthCameraIntrinsics(device_index, fx, fy, cx, cy); + } + + return false; +} + +/** + * @brief 利用SDK生成点云 + * @param device_index 设备索引 + * @param depth_img 深度图 + * @param out_points 输出点云 + * @return 是否成功 + */ +bool DeviceManager::computePointCloud(int device_index, const cv::Mat& depth_img, std::vector& out_points) { + if (!capture_) return false; + + // 只有深度相机可以生成点云 + int percipio_count = capture_->getCameraCount(); + if (device_index < percipio_count) { + return capture_->computePointCloud(device_index, depth_img, out_points); + } + + return false; +} + + +int DeviceManager::get2DCameraCount() const { + return mvs_cameras_ ? mvs_cameras_->getCameraCount() : 0; +} + +bool DeviceManager::get2DCameraImage(int camera_index, cv::Mat& image, double& fps) { + if (!mvs_cameras_) { + return false; + } + return mvs_cameras_->getLatestImage(camera_index, image, fps); +} + +std::string DeviceManager::get2DCameraId(int camera_index) const { + if (!mvs_cameras_) { + return ""; + } + return mvs_cameras_->getCameraId(camera_index); +} + diff --git a/image_capture/src/device/device_manager.h b/image_capture/src/device/device_manager.h new file mode 100644 index 0000000..dd7d98b --- /dev/null +++ b/image_capture/src/device/device_manager.h @@ -0,0 +1,154 @@ +#pragma once + +// #include "../camera/ty_multi_camera_capture.h" -> Moved to cpp +// #include "../camera/mvs_multi_camera_capture.h" -> Moved to cpp + +#include +#include + +class CameraCapture; +class MvsMultiCameraCapture; +#include +#include +#include "../common_types.h" + +/** + * @brief DeviceManager + * 设备管理器(Device Manager),负责管理硬件设备(相机、读码器等) + * + * 采用单例模式,确保全局只有一个设备管理器实例 + * 任何模块都可以通过getInstance()访问设备 + * + * 功能说明: + * - 管理相机采集设备的初始化、启动、停止 + * - 管理读码器设备的初始化、启动、停止 + * - 提供设备访问接口(获取图像、设备信息等) + * - 支持未来扩展其他设备类型 + * + * 职责范围: + * - 设备生命周期管理(初始化、启动、停止) + * - 设备数据获取(图像、设备信息) + * - 不涉及业务逻辑(任务管理、结果处理等) + */ +class DeviceManager { +public: + /** + * 获取单例实例 + * @return DeviceManager单例引用 + */ + static DeviceManager& getInstance(); + + // 禁止拷贝和赋值 + DeviceManager(const DeviceManager&) = delete; + DeviceManager& operator=(const DeviceManager&) = delete; + + ~DeviceManager(); + + /** + * 初始化并扫描设备 + * @param enable_depth 是否启用深度流 + * @param enable_color 是否启用彩色流 + * @return 发现的设备数量 + */ + int initialize(bool enable_depth = true, bool enable_color = true); + + /** + * 启动所有设备 + * @return 是否成功 + */ + bool startAll(); + + /** + * 停止所有设备 + */ + void stopAll(); + + /** + * 获取设备数量 + * @return 设备数量 + */ + int getDeviceCount() const; + + /** + * 获取设备ID + * @param index 设备索引 + * @return 设备ID字符串 + */ + std::string getDeviceId(int index) const; + + /** + * 获取指定设备的最新图像 + * @param device_index 设备索引 + * @param depth 输出的深度图 + * @param color 输出的彩色图 + * @param fps 输出的帧率 + * @return 是否成功获取到图像 + */ + bool getLatestImages(int device_index, cv::Mat& depth, cv::Mat& color, double& fps); + + /** + * 检查是否正在运行 + * @return 是否运行中 + */ + bool isRunning() const; + + /** + * 获取指定设备的深度相机内参 + * @param device_index 设备索引 + * @param fx [输出] 焦距x + * @param fy [输出] 焦距y + * @param cx [输出] 主点x + * @param cy [输出] 主点y + * @return 是否成功获取内参 + */ + bool getDepthCameraIntrinsics(int device_index, float& fx, float& fy, float& cx, float& cy); + + /** + * @brief 利用SDK生成点云 + * @param device_index 设备索引 + * @param depth_img 深度图 + * @param out_points 输出点云 + * @return 是否成功 + */ + bool computePointCloud(int device_index, const cv::Mat& depth_img, std::vector& out_points); + + /** + * 获取深度相机数量 + * @return 深度相机数量 + */ + int getDepthCameraCount() const; + + /** + * 获取2D (MVS)相机数量 + * @return 2D相机数量 + */ + int get2DCameraCount() const; + + /** + * 获取2D相机图像 + * @param camera_index 2D相机索引(从0开始) + * @param image 输出的彩色图 + * @param fps 输出的帧率 + * @return 是否成功 + */ + bool get2DCameraImage(int camera_index, cv::Mat& image, double& fps); + + /** + * 获取2D相机ID + * @param camera_index 2D相机索引 + * @return 相机ID字符串 + */ + std::string get2DCameraId(int camera_index) const; + + + + + +private: + DeviceManager(); // 私有构造函数,确保单例 + + std::shared_ptr capture_; // Percipio深度相机采集对象 + std::unique_ptr mvs_cameras_; // MVS 2D相机采集对象 + bool initialized_; // 是否已初始化 +}; + diff --git a/image_capture/src/gui/mainwindow.cpp b/image_capture/src/gui/mainwindow.cpp new file mode 100644 index 0000000..0678c19 --- /dev/null +++ b/image_capture/src/gui/mainwindow.cpp @@ -0,0 +1,1014 @@ +/** + * @file mainwindow.cpp + * @brief 主窗口实现文件 + * + * 此文件实现了MainWindow类的完整功能: + * - GUI界面初始化和控件管理 + * - 设备管理器初始化和设备扫描 + * - 图像采集控制(开始/停止) + * - 图像实时显示更新 + * - 图像保存功能 + * - 日志输出功能 + * + * 设计说明: + * - 使用Qt信号槽机制实现事件驱动 + * - 使用定时器控制图像更新频率(30fps) + * - 支持最多4台相机同时显示 + * - 图像采集在后台线程进行,UI更新在主线程 + */ + +#include "mainwindow.h" +#include "../algorithm/utils/image_processor.h" +#include "../common/config_manager.h" +#include "../common/log_manager.h" +#include "../common/log_streambuf.h" // 在cpp文件中包含完整定义 +#include "../common_types.h" +#include "../device/device_manager.h" +#include "../vision/vision_controller.h" +#include "settings_widget.h" +#include "ui_mainwindow.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +/** + * @brief 构造函数 + * + * 初始化主窗口,完成以下工作: + * 1. 加载UI界面布局 + * 2. 获取UI控件指针 + * 3. 设置UI样式 + * 4. 初始化设备管理器(扫描相机) + * 5. 创建图像处理器 + * 6. 设置定时器和信号槽连接 + * + * @param parent 父窗口指针,默认为nullptr(顶级窗口) + * + * @note 设备初始化在构造函数中完成,但图像采集需要用户点击"开始"按钮后才启动 + * @note 如果未找到设备,会显示警告对话框,但窗口仍会显示 + */ +MainWindow::MainWindow(QWidget *parent) + : QMainWindow(parent) // 调用基类构造函数 + , + ui(new Ui::MainWindow) // 创建UI对象 + , + isCapturing_(false) // 采集状态初始为false + , + currentDeviceIndex_(0) // 当前设备索引初始为0 +{ + // ========== 1. 加载UI界面布局 ========== + // 从mainwindow.ui文件加载界面设计,设置所有控件的布局和属性 + ui->setupUi(this); + + // ========== 1.0 加载配置文件 ========== + // 确保在初始化其他组件之前加载配置 + ConfigManager::getInstance().loadConfig("config.json"); + + // ========== 1.1 设置相机垂直布局的拉伸因子 ========== + // Info布局拉伸因子为0(不伸展),Image布局拉伸因子为1(伸展) + ui->camera0VerticalLayout->setStretch(0, 0); + ui->camera0VerticalLayout->setStretch(1, 1); + ui->camera1VerticalLayout->setStretch(0, 0); + ui->camera1VerticalLayout->setStretch(1, 1); + ui->camera2VerticalLayout->setStretch(0, 0); + ui->camera2VerticalLayout->setStretch(1, 1); + ui->camera3VerticalLayout->setStretch(0, 0); + ui->camera3VerticalLayout->setStretch(1, 1); + + // ========== 1.1 设置网格布局的拉伸因子 ========== + // 设置行和列的拉伸因子,使图像区域能够等比例扩展 + // 参数1:行/列索引,参数2:拉伸因子(1表示等比例) + ui->cameraGridLayout->setRowStretch(0, 1); // 第一行(Camera 0和1) + ui->cameraGridLayout->setRowStretch(1, 1); // 第二行(Camera 2和3) + ui->cameraGridLayout->setColumnStretch(0, 1); // 第一列(Camera 0和2) + ui->cameraGridLayout->setColumnStretch(1, 1); // 第二列(Camera 1和3) + + // ========== 1.2 设置窗口自适应屏幕大小 ========== + // 获取主屏幕的可用几何区域(排除任务栏等) + QScreen *screen = QApplication::primaryScreen(); + if (screen) { + QRect availableGeometry = screen->availableGeometry(); + // 设置窗口大小为屏幕可用区域的80%(留出一些边距) + int windowWidth = static_cast(availableGeometry.width() * 0.8); + int windowHeight = static_cast(availableGeometry.height() * 0.8); + + // 设置窗口大小 + resize(windowWidth, windowHeight); + + // 居中显示窗口 + move((availableGeometry.width() - windowWidth) / 2, + (availableGeometry.height() - windowHeight) / 2); + } + + // ========== 2. 获取UI控件指针 ========== + // 从UI对象中获取各个控件的指针,方便后续操作 + // 彩色图标签(最多4个) + + // 深度图标签(最多4个) + depthImageLabels_[0] = ui->depthImageLabel0; + depthImageLabels_[1] = ui->depthImageLabel1; + depthImageLabels_[2] = ui->depthImageLabel2; + depthImageLabels_[3] = ui->depthImageLabel3; + + // 信息标签(显示相机编号、FPS等,最多4个) + + depthInfoLabels_[0] = ui->depthInfoLabel0; + depthInfoLabels_[1] = ui->depthInfoLabel1; + depthInfoLabels_[2] = ui->depthInfoLabel2; + depthInfoLabels_[3] = ui->depthInfoLabel3; + + // Clear the existing layout of the 2D tab and rebuild it programmatically for + // 5 cameras + QWidget *twoDTab = ui->twoDTab; + + // Delete all existing children to prevent overlapping "ghost" widgets from + // the .ui file + qDeleteAll( + twoDTab->findChildren(QString(), Qt::FindDirectChildrenOnly)); + + if (twoDTab->layout()) { + delete twoDTab->layout(); + } + + QGridLayout *twoDGridLayout = new QGridLayout(twoDTab); + twoDTab->setLayout(twoDGridLayout); + + // Initialize 2D camera display controls dynamically + for (int i = 0; i < MAX_2D_CAMERAS; ++i) { + // Create a group box or widget for each camera + QWidget *camWidget = new QWidget(twoDTab); + QVBoxLayout *camLayout = new QVBoxLayout(camWidget); + camLayout->setContentsMargins(0, 0, 0, 0); + + // Info label + twoDInfoLabels_[i] = new QLabel(camWidget); + twoDInfoLabels_[i]->setText(QString("2D - Cam%1: FPS: 0").arg(i)); + twoDInfoLabels_[i]->setAlignment(Qt::AlignLeft | Qt::AlignVCenter); + twoDInfoLabels_[i]->setFont(QFont("Arial", 11, QFont::Bold)); + twoDInfoLabels_[i]->setFixedHeight(30); + camLayout->addWidget(twoDInfoLabels_[i]); + + // Image label + twoDImageLabels_[i] = new QLabel(camWidget); + twoDImageLabels_[i]->setStyleSheet( + "QLabel { background-color : black; color : white; border: 1px solid " + "gray; }"); + twoDImageLabels_[i]->setMinimumSize(320, 240); + twoDImageLabels_[i]->setText("2D Image"); + twoDImageLabels_[i]->setAlignment(Qt::AlignCenter); + twoDImageLabels_[i]->setSizePolicy(QSizePolicy::Ignored, + QSizePolicy::Ignored); + camLayout->addWidget(twoDImageLabels_[i], 1); // Stretch factor 1 + + // Add to grid layout (3 columns) + int row = i / 3; + int col = i % 3; + twoDGridLayout->addWidget(camWidget, row, col); + + // Set stretch factors if needed + twoDGridLayout->setRowStretch(row, 1); + twoDGridLayout->setColumnStretch(col, 1); + } + + // 按钮控件 + startButton_ = ui->startButton; // 开始采集按钮 + stopButton_ = ui->stopButton; // 停止采集按钮 + saveButton_ = ui->saveButton; // 保存图像按钮 + savePointCloudButton_ = ui->savePointCloudButton; // 保存点云按钮 + + // 日志文本框 + logTextEdit_ = ui->logTextEdit; + + // ========== 3. 设置UI样式 ========== + // 设置图像标签样式:灰色边框 + QString imageLabelStyle = "border: 1px solid gray;"; + for (int i = 0; i < MAX_DEPTH_CAMERAS; ++i) { + // 设置彩色图标签样式和初始文本 + + // 设置深度图标签样式和初始文本 + depthImageLabels_[i]->setAlignment(Qt::AlignCenter); + depthImageLabels_[i]->setStyleSheet(imageLabelStyle); + depthImageLabels_[i]->setText("Depth Image"); + } + + // 设置日志栏样式:白色背景、黑色文字、灰色边框 + logTextEdit_->setStyleSheet( + "background-color: white; color: black; border: 1px solid gray;"); + + // ========== 4. 初始化日志和重定向cout/cerr ========== + // 设置日志回调函数,将LogManager的日志重定向到Qt日志文本框 + LogManager::getInstance().setCallback( + [this](const std::string &message, bool isError) { + // 注意:此回调可能在任意工作线程中被调用(包括相机采集线程、Redis监听线程等), + // 不能直接操作Qt控件,必须通过事件队列切回GUI线程。 + QString qMessage = QString::fromStdString(message); + // 移除末尾的换行符(如果有) + if (qMessage.endsWith('\n')) { + qMessage.chop(1); + } + + QMetaObject::invokeMethod( + this, [this, qMessage]() { this->appendLog(qMessage); }, + Qt::QueuedConnection // 确保在GUI线程执行 + ); + }); + + // 重定向cout和cerr到LogManager + coutBuf_ = std::make_unique(false); + cerrBuf_ = std::make_unique(true); + originalCout_ = std::cout.rdbuf(coutBuf_.get()); + originalCerr_ = std::cerr.rdbuf(cerrBuf_.get()); + + // 在日志中输出系统启动信息 + appendLog(QString("=== Stereo Warehouse Inspection System ===")); + appendLog(QString("System initialized")); + + // ========== 5. 初始化设备管理器(单例) ========== + // 设备管理器采用单例模式,任何模块都可以通过getInstance()访问 + appendLog(QString("Initializing devices...")); + int deviceCount = DeviceManager::getInstance().initialize(true, true); + + // 检查是否找到设备 + if (deviceCount == 0) { + appendLog(QString("ERROR: No devices found!")); + // 显示警告对话框,但窗口仍会显示(用户可以稍后重试) + QMessageBox::warning(this, "Warning", "No devices found!"); + return; // 提前返回,不继续初始化后续功能 + } + + // 输出设备信息到日志 + appendLog(QString("Found %1 device(s)").arg(deviceCount)); + for (int i = 0; i < deviceCount; ++i) { + QString deviceId = + QString::fromStdString(DeviceManager::getInstance().getDeviceId(i)); + appendLog(QString("Device %1: %2").arg(i).arg(deviceId)); + } + + // ========== 7. 创建图像处理器 ========== + // 为每个相机创建一个图像处理器,用于处理深度图(伪彩色映射等) + for (int i = 0; i < deviceCount; ++i) { + processors_.push_back(std::make_shared()); + } + appendLog(QString("Image processors created")); + + // ========== 7.5. 初始化并启动VisionController(Redis监控和算法触发) + // ========== + // 设备已由MainWindow初始化,VisionController直接使用DeviceManager单例 + appendLog(QString("Initializing Vision system (Redis monitoring and " + "algorithm triggering)...")); + visionController_ = std::make_shared(); + + // 仅初始化,不立即start,等设备启动后再启动监听,避免设备未就绪就被任务触发 + if (!visionController_->initialize("127.0.0.1", 6379, 0, 1)) { + appendLog(QString("WARNING: Vision system initialization failed")); + QMessageBox::warning(this, "Warning", + "Vision system failed to initialize!\nRedis " + "monitoring and algorithm triggering may not work."); + } else { + appendLog(QString( + "Vision system initialized (Redis ready, listening not started yet)")); + } + + // ========== 7.6. 自动启动设备采集 ========== + // 算法需要在后台自动触发,因此设备必须在启动时就开始采集 + appendLog("Starting all devices for background tasks..."); + try { + if (DeviceManager::getInstance().startAll()) { + appendLog("All devices started successfully."); + // 设备启动成功后再启动Redis监听,确保任务到来时设备已就绪 + if (visionController_) { + if (!visionController_->start()) { + appendLog("WARNING: Vision system start failed (Redis monitoring may " + "not work)."); + std::cerr << "[MainWindow] VisionController::start() failed" + << std::endl; + } else { + appendLog("Vision system started (Redis monitoring active)"); + } + } + } else { + appendLog("ERROR: Failed to start devices automatically (check " + "console/logs for details)."); + // 不要弹出模态对话框阻塞启动,只记录日志 + std::cerr << "[MainWindow] Failed to start devices automatically." + << std::endl; + } + } catch (const std::exception &e) { + appendLog(QString("CRITICAL EXCEPTION starting devices: %1").arg(e.what())); + std::cerr << "[MainWindow] CRITICAL EXCEPTION starting devices: " + << e.what() << std::endl; + } catch (...) { + appendLog("CRITICAL UNKNOWN EXCEPTION starting devices"); + std::cerr << "[MainWindow] CRITICAL UNKNOWN EXCEPTION starting devices" + << std::endl; + } + + // ========== 8. 设置定时器用于更新图像 ========== + // 创建定时器对象,父对象为this(MainWindow),自动管理生命周期 + updateTimer_ = new QTimer(this); + // 连接定时器的timeout信号到updateImage槽函数 + // 当定时器触发时,会自动调用updateImage()更新图像显示 + connect(updateTimer_, &QTimer::timeout, this, &MainWindow::updateImage); + // 注意:此时定时器还未启动,需要用户点击"开始"按钮后才启动 + + // ========== 9. 连接按钮信号槽 ========== + // Connect signals and slots + connect(ui->startButton, &QPushButton::clicked, this, + &MainWindow::onStartCapture); + connect(ui->stopButton, &QPushButton::clicked, this, + &MainWindow::onStopCapture); + connect(ui->saveButton, &QPushButton::clicked, this, + &MainWindow::onSaveImage); + connect(ui->savePointCloudButton, &QPushButton::clicked, this, + &MainWindow::onSavePointCloud); + + // ========== 9. 创建设置标签页 ========== + settingsWidget_ = new SettingsWidget(this); + ui->mainTabWidget->addTab(settingsWidget_, "Settings"); + connect(settingsWidget_, &SettingsWidget::settingsSaved, + [this]() { appendLog("Settings saved successfully"); }); + + // ========== 10. 设置初始按钮状态 ========== + // 初始状态:只有"开始"按钮可用,"停止"、"保存"和"保存点云"按钮禁用 + stopButton_->setEnabled(false); // 禁用停止按钮 + saveButton_->setEnabled(false); // 禁用保存按钮 + savePointCloudButton_->setEnabled(false); // 禁用保存点云按钮 + // startButton_默认是启用的,不需要设置 +} + +/** + * @brief 析构函数 + * + * 确保在窗口关闭时正确清理所有资源: + * 1. 停止设备采集(如果正在运行) + * 2. 释放设备管理器 + * 3. 释放UI对象 + * + * @note Qt会自动释放子对象(如QTimer),但需要手动释放new创建的对象 + * @note 如果设备正在采集,会先停止采集再释放资源 + */ +MainWindow::~MainWindow() { + // 先停止VisionController(会停止Redis监听和任务处理) + if (visionController_) { + visionController_->stop(); + visionController_.reset(); + } + + // 停止设备(使用单例) + if (DeviceManager::getInstance().isRunning()) { + DeviceManager::getInstance().stopAll(); + } + + // 恢复cout和cerr的原始缓冲区 + if (originalCout_) { + std::cout.rdbuf(originalCout_); + } + if (originalCerr_) { + std::cerr.rdbuf(originalCerr_); + } + + // 清除日志回调 + LogManager::getInstance().clearCallback(); + + // 释放UI对象 + delete ui; +} + +/** + * @brief 开始采集槽函数 + * + * 当用户点击"开始"按钮时触发此函数 + * 主要工作: + * 1. 检查设备管理器状态 + * 2. 启动所有设备(开始图像采集) + * 3. 启动定时器(开始图像更新) + * 4. 更新按钮状态 + * + * @note 此函数会启动采集线程,开始从相机获取图像 + * @note 定时器每33ms触发一次,约30fps的更新频率 + * @note 采集线程在后台运行,不会阻塞UI + */ +void MainWindow::onStartCapture() { + try { + appendLog("onStartCapture: Entering..."); + // 检查是否有可用设备 + if (DeviceManager::getInstance().getDeviceCount() == 0) { + QMessageBox::warning(this, "Error", "No devices available!"); + appendLog("onStartCapture: No devices found."); + return; + } + + // 如果设备已经在运行,只需要启动图像显示 + if (DeviceManager::getInstance().isRunning()) { + appendLog("onStartCapture: Devices already running, starting display."); + startImageDisplay(); + appendLog(QString("Image display started (device already running)")); + return; + } + + // 启动设备采集 + appendLog("onStartCapture: Starting all devices..."); + std::cout << "[MainWindow] Calling DeviceManager::startAll()..." + << std::endl; + if (!DeviceManager::getInstance().startAll()) { + QMessageBox::warning(this, "Error", "Failed to start devices!"); + appendLog("onStartCapture: Failed to start devices."); + std::cerr << "[MainWindow] DeviceManager::startAll() failed" << std::endl; + return; + } + std::cout << "[MainWindow] DeviceManager::startAll() returned success." + << std::endl; + appendLog("onStartCapture: Devices started successfully."); + + startImageDisplay(); + appendLog(QString("Capture started")); + } catch (const std::exception &e) { + appendLog(QString("CRITICAL ERROR in onStartCapture: %1").arg(e.what())); + std::cerr << "CRITICAL ERROR in onStartCapture: " << e.what() << std::endl; + QMessageBox::critical(this, "Critical Error", + QString("Crash in start capture: %1").arg(e.what())); + } catch (...) { + appendLog("CRITICAL ERROR in onStartCapture: Unknown exception"); + std::cerr << "CRITICAL ERROR in onStartCapture: Unknown exception" + << std::endl; + QMessageBox::critical(this, "Critical Error", + "Crash in start capture: Unknown exception"); + } +} + +/** + * @brief 停止采集槽函数 + * + * 当用户点击"停止"按钮时触发此函数 + * 主要工作: + * 1. 停止所有设备(停止图像采集) + * 2. 停止定时器(停止图像更新) + * 3. 清空图像显示 + * 4. 更新按钮状态 + * + * @note 此函数会停止采集线程,但不会释放设备管理器 + * @note 停止后可以再次点击"开始"按钮重新开始采集 + */ +void MainWindow::onStopCapture() { + // Stop all devices + DeviceManager::getInstance().stopAll(); + + // 停止图像显示 + stopImageDisplay(); + appendLog(QString("Capture stopped")); +} + +/** + * @brief 保存点云槽函数 + * + * 当用户点击"保存点云"按钮时触发此函数 + * 主要工作: + * 1. 获取当前相机的最新深度图 + * 2. 从DeviceManager获取相机内参 + * 3. 使用DepthToPointCloud生成点云 + * 4. 保存为PLY格式(ASCII格式,可用MeshLab、CloudCompare等软件打开) + * + * @note 保存的文件名包含时间戳和相机索引,避免文件名冲突 + * @note 保存目录:image_capture/pointclouds_save + * @note PLY格式是ASCII格式,可以用文本编辑器查看 + */ +void MainWindow::onSavePointCloud() { + // Prepare data + cv::Mat depth, color; + QString timestamp; + if (!prepareCapturedData(depth, color, timestamp)) { + return; + } + + // Check depth validity for point cloud + if (depth.empty() || depth.type() != CV_16U) { + QMessageBox::warning(this, "Warning", "Invalid depth image!"); + return; + } + + // ========== 生成点云 ========== + // ========== 生成和保存点云(异步线程) ========== + // 复制需要的数据到lambda,避免线程竞争 + int devIdx = currentDeviceIndex_; + + // Disable button to prevent re-entry + savePointCloudButton_->setEnabled(false); + appendLog("Starting background point cloud generation..."); + + std::thread([this, devIdx, depth, timestamp]() { + QString errMsg; + bool success = false; + QString finalFilename; + size_t pointsCount = 0; + + try { + std::vector point_cloud; + + // 耗时操作1:计算点云 + if (!DeviceManager::getInstance().computePointCloud(devIdx, depth, + point_cloud)) { + errMsg = "Failed to compute point cloud (SDK Error)!"; + } else if (point_cloud.empty()) { + errMsg = "Generated point cloud is empty!"; + } else { + pointsCount = point_cloud.size(); + + // 耗时操作2:转换和保存 (Open3D) + QString saveDir = "image_capture/pointclouds_save"; + QDir dir; + if (!dir.exists(saveDir)) { + if (!dir.mkpath(saveDir)) { + errMsg = "Failed to create directory: " + saveDir; + goto end; + } + } + + finalFilename = QString("%1/pointcloud_%2_%3.ply") + .arg(saveDir) + .arg(devIdx) + .arg(timestamp); + + // Convert to Open3D PointCloud + auto pcd = std::make_shared(); + pcd->points_.reserve(point_cloud.size()); + + int invalid_count = 0; + for (const auto &pt : point_cloud) { + // Filter invalid points (z <= 0 or NaN) + if (pt.z <= 0.0f || std::isnan(pt.x) || std::isnan(pt.y) || + std::isnan(pt.z)) { + invalid_count++; + continue; + } + // Keep units in mm + pcd->points_.emplace_back(Eigen::Vector3d(pt.x, pt.y, pt.z)); + } + + if (pcd->points_.empty()) { + errMsg = "All points filtered out (invalid depth)!"; + goto end; + } + + // Statistical Outlier Removal + // nb_neighbors=20, std_ratio=2.0 + std::shared_ptr pcd_filtered; + std::vector inliers; + std::tie(pcd_filtered, inliers) = + pcd->RemoveStatisticalOutliers(20, 2.0); + + if (pcd_filtered->points_.empty()) { + errMsg = "All points removed by outlier filter!"; + goto end; + } + + // Save + if (open3d::io::WritePointCloud(finalFilename.toStdString(), + *pcd_filtered)) { + success = true; + pointsCount = pcd_filtered->points_.size(); + } else { + errMsg = "Open3D failed to write file: " + finalFilename; + } + } + } catch (const std::exception &e) { + errMsg = QString("Exception: %1").arg(e.what()); + } catch (...) { + errMsg = "Unknown exception during save"; + } + + end: + // 回到主线程更新UI + QMetaObject::invokeMethod( + this, + [this, success, errMsg, finalFilename, pointsCount]() { + savePointCloudButton_->setEnabled(true); // Re-enable button + + if (success) { + appendLog(QString("Point cloud saved: %1 (%2 points)") + .arg(finalFilename) + .arg(pointsCount)); + QMessageBox::information( + this, "Success", + QString( + "Point cloud saved successfully!\n\nFile: %1\nPoints: %2") + .arg(finalFilename) + .arg(pointsCount)); + } else { + appendLog("Error saving point cloud: " + errMsg); + QMessageBox::warning(this, "Error", errMsg); + } + }, + Qt::QueuedConnection); + }).detach(); +} + +/** + * @brief 保存图像槽函数 + * + * 当用户点击"保存"按钮时触发此函数 + * 主要工作: + * 1. 获取当前相机的最新图像 + * 2. 创建保存目录(如果不存在) + * 3. 保存深度图(伪彩色版本和原始16位版本) + * 4. 保存彩色图 + * + * @note 保存的文件名包含时间戳,避免文件名冲突 + * @note 深度图保存两个版本: + * - 伪彩色版本(可视化用,8位) + * - 原始版本(16位,用于后续处理) + * @note 保存目录:image_capture/images_save + */ +void MainWindow::onSaveImage() { + // Prepare data + cv::Mat depth, color; + QString timestamp; + if (!prepareCapturedData(depth, color, timestamp)) { + return; + } + + // ========== 设置保存目录 ========== + QString saveDir = "image_capture/images_save"; + QDir dir; + if (!dir.mkpath(saveDir)) { + QMessageBox::warning( + this, "Error", QString("Failed to create directory: %1").arg(saveDir)); + return; + } + + bool saved = false; // 标记是否成功保存了至少一张图像 + + // ========== 保存深度图 ========== + if (!depth.empty()) { + // 保存处理后的伪彩色深度图(可视化版本) + // 使用ImageProcessor处理深度图,应用伪彩色映射(JET色彩映射) + cv::Mat depthVis = + processors_[currentDeviceIndex_]->processDepthImage(depth); + QString depthVisFile = QString("%1/snap_depth_vis_%2_%3.png") + .arg(saveDir) // 目录 + .arg(currentDeviceIndex_) // 相机索引 + .arg(timestamp); // 时间戳 + if (cv::imwrite(depthVisFile.toStdString(), depthVis)) { + appendLog(QString("Saved depth visualization: %1").arg(depthVisFile)); + saved = true; + } + + // 同时保存原始16位深度图(用于后续处理) + // 使用16位PNG格式保存原始深度值,保留完整的深度信息 + QString depthRawFile = QString("%1/snap_depth_raw_%2_%3.png") + .arg(saveDir) + .arg(currentDeviceIndex_) + .arg(timestamp); + if (cv::imwrite(depthRawFile.toStdString(), depth)) { + appendLog( + QString("Saved raw depth image (16-bit): %1").arg(depthRawFile)); + } + } + + // ========== 保存彩色图 ========== + if (!color.empty()) { + QString colorFile = QString("%1/snap_color_%2_%3.png") + .arg(saveDir) + .arg(currentDeviceIndex_) + .arg(timestamp); + if (cv::imwrite(colorFile.toStdString(), color)) { + appendLog(QString("Saved color image: %1").arg(colorFile)); + saved = true; + } + } + + // ========== 显示保存结果 ========== + if (saved) { + QMessageBox::information(this, "Success", "Images saved successfully!"); + } else { + QMessageBox::warning(this, "Warning", "Failed to save images!"); + } +} + +/** + * @brief 更新图像槽函数 + * + * 定时器触发时调用此函数(每33ms一次,约30fps) + * 主要工作: + * 1. 检查采集状态 + * 2. 调用updateDepthDisplay()更新所有相机的图像显示 + * + * @note 此函数由定时器自动调用,不需要手动调用 + * @note 如果采集未启动或设备未运行,直接返回 + */ +void MainWindow::updateImage() { + try { + // 检查采集状态和设备状态 + // 如果未在采集或设备未运行,直接返回(不更新图像) + if (!isCapturing_ || !DeviceManager::getInstance().isRunning()) { + return; + } + + // 更新所有相机的图像显示 + updateDepthDisplay(); + + // 更新2D相机显示 + update2DDisplay(); + } catch (const std::exception &e) { + std::cerr << "Error in updateImage: " << e.what() << std::endl; + // Disable capturing to prevent spamming errors + stopImageDisplay(); + appendLog(QString("Error updating image: %1").arg(e.what())); + } catch (...) { + std::cerr << "Unknown error in updateImage" << std::endl; + stopImageDisplay(); + } +} + +/** + * @brief 更新深度图显示 + * + * 更新所有已连接相机的图像显示 + * 遍历所有相机,为每个相机调用updateCameraDisplay() + * + * @note 最多支持MAX_CAMERAS(4)个相机同时显示 + */ +void MainWindow::updateDepthDisplay() { + // 检查设备管理器状态 + int deviceCount = DeviceManager::getInstance().getDeviceCount(); + if (deviceCount == 0) { + return; + } + + // 更新所有已连接的相机(最多MAX_DEPTH_CAMERAS个) + for (int i = 0; i < deviceCount && i < MAX_DEPTH_CAMERAS; ++i) { + updateCameraDisplay(i); + } +} + +/** + * @brief 更新指定相机的图像显示 + * + * 更新单个相机的彩色图和深度图显示 + * 主要工作: + * 1. 从设备管理器获取最新图像 + * 2. 处理深度图(应用伪彩色映射) + * 3. 在图像上添加文本信息(相机编号、FPS等) + * 4. 转换为QImage并显示在QLabel上 + * + * @param cameraIndex 相机索引,从0开始 + * + * @note 如果图像为空或获取失败,直接返回(不更新显示) + * @note 图像会按比例缩放以适应QLabel大小,保持宽高比 + */ +void MainWindow::updateCameraDisplay(int cameraIndex) { + // 检查相机索引有效性 + if (cameraIndex < 0 || cameraIndex >= MAX_DEPTH_CAMERAS) { + return; + } + + // 从设备管理器获取最新图像 + cv::Mat depth, color; // 深度图和彩色图 + double fps = 0.0; // 帧率 + + // 从ImageBuffer读取最新图像(线程安全) + if (!DeviceManager::getInstance().getLatestImages(cameraIndex, depth, color, + fps)) { + return; // 获取失败,直接返回 + } + + // ========== 更新信息标签 ========== + // 更新深度图的信息标签(显示相机编号和FPS) + QString depthInfo = QString("Depth - Cam%1: FPS: %2") + .arg(cameraIndex) + .arg(static_cast(fps)); + depthInfoLabels_[cameraIndex]->setText(depthInfo); + + // ========== 显示深度图 ========== + if (!depth.empty()) { + // 处理深度图(应用伪彩色映射) + // processDepthImage()会将16位深度图转换为8位伪彩色图(JET色彩映射) + cv::Mat depthVis = processors_[cameraIndex]->processDepthImage(depth); + + // 直接使用处理后的图像,不在图像上绘制文字 + + // 转换为QImage并显示 + QImage qimg = cvMatToQImage(depthVis); + if (!qimg.isNull()) { + // 转换为QPixmap + QPixmap pixmap = QPixmap::fromImage(qimg); + // 获取QLabel的大小 + QSize labelSize = depthImageLabels_[cameraIndex]->size(); + // 按比例缩放图像以适应QLabel(保持宽高比,使用平滑变换) + QPixmap scaledPixmap = pixmap.scaled(labelSize, Qt::KeepAspectRatio, + Qt::SmoothTransformation); + // 显示在QLabel上 + depthImageLabels_[cameraIndex]->setPixmap(scaledPixmap); + } + } +} + +/** + * @brief 将OpenCV的Mat转换为Qt的QImage + * + * 将OpenCV的cv::Mat格式转换为Qt的QImage格式,用于在QLabel上显示 + * 支持的数据类型: + * - CV_8UC1: 8位单通道(灰度图) + * - CV_8UC3: 8位3通道(BGR彩色图) + * - CV_8UC4: 8位4通道(BGRA彩色图) + * + * @param mat OpenCV的Mat图像矩阵 + * @return QImage Qt图像对象,如果转换失败或类型不支持则返回空QImage + * + * @note 使用copy()创建数据副本,确保返回的QImage独立于原始Mat + * @note CV_8UC3需要rgbSwapped(),因为OpenCV使用BGR,Qt使用RGB + * @note 如果Mat为空或类型不支持,返回空QImage + */ +QImage MainWindow::cvMatToQImage(const cv::Mat &mat) { + // 检查Mat是否为空 + if (mat.empty()) { + return QImage(); + } + + // 根据Mat的数据类型进行转换 + switch (mat.type()) { + case CV_8UC1: { + // 8位单通道(灰度图) + // 创建QImage,直接使用Mat的数据指针(零拷贝视图) + QImage image(mat.data, mat.cols, mat.rows, mat.step, + QImage::Format_Grayscale8); + // 使用copy()创建数据副本,确保QImage独立于Mat + return image.copy(); + } + case CV_8UC3: { + // 8位3通道(BGR彩色图) + // OpenCV使用BGR格式,Qt使用RGB格式,需要转换 + QImage image(mat.data, mat.cols, mat.rows, mat.step, QImage::Format_RGB888); + // rgbSwapped()交换R和B通道,将BGR转换为RGB + return image.rgbSwapped().copy(); + } + case CV_8UC4: { + // 8位4通道(BGRA彩色图) + QImage image(mat.data, mat.cols, mat.rows, mat.step, QImage::Format_ARGB32); + return image.copy(); + } + default: + // 不支持的数据类型,返回空QImage + return QImage(); + } +} + +/** + * @brief 添加日志消息 + * + * 在日志文本框中添加一条日志消息,包含时间戳 + * 主要工作: + * 1. 生成带时间戳的日志消息 + * 2. 追加到日志文本框 + * 3. 自动滚动到底部(显示最新日志) + * + * @param message 日志消息内容 + * + * @note 时间戳格式:hh:mm:ss.zzz(例如:14:30:52.123) + * @note 自动滚动到底部,确保用户能看到最新日志 + */ +void MainWindow::appendLog(const QString &message) { + // 检查日志文本框是否有效 + if (!logTextEdit_) { + return; + } + + // 生成时间戳(格式:hh:mm:ss.zzz) + QDateTime now = QDateTime::currentDateTime(); + QString timestamp = now.toString("hh:mm:ss.zzz"); + + // 格式化日志消息:[时间戳] 消息内容 + QString logMessage = QString("[%1] %2").arg(timestamp).arg(message); + + // 追加到日志文本框 + logTextEdit_->appendPlainText(logMessage); + + // 自动滚动到底部,确保用户能看到最新日志 + QScrollBar *scrollBar = logTextEdit_->verticalScrollBar(); + if (scrollBar) { + // 设置滚动条值为最大值(滚动到底部) + scrollBar->setValue(scrollBar->maximum()); + } +} + +/** + * @brief 启动图像显示 + * + * 更新按钮状态并启动定时器,开始图像显示更新 + */ +void MainWindow::startImageDisplay() { + isCapturing_ = true; + startButton_->setEnabled(false); + stopButton_->setEnabled(true); + saveButton_->setEnabled(true); + savePointCloudButton_->setEnabled(true); + updateTimer_->start(33); // 约30fps + appendLog("startImageDisplay: Timer started (33ms)"); +} + +/** + * @brief 停止图像显示 + * + * 停止定时器,更新按钮状态,并清空所有图像显示 + */ +void MainWindow::stopImageDisplay() { + isCapturing_ = false; + updateTimer_->stop(); + startButton_->setEnabled(true); + stopButton_->setEnabled(false); + saveButton_->setEnabled(false); + savePointCloudButton_->setEnabled(false); + + // 清空所有相机显示 + for (int i = 0; i < MAX_DEPTH_CAMERAS; ++i) { + depthImageLabels_[i]->clear(); + depthImageLabels_[i]->setText("Depth Image"); + // 重置信息标签 + depthInfoLabels_[i]->setText(QString("Depth - Cam%1: FPS: 0").arg(i)); + } +} + +// ============================================================================ +// Settings Tab Implementation +// ============================================================================ + +#include "../common/config_manager.h" +#include +#include +#include + +// Settings tab implementation removed and moved to SettingsWidget class + +bool MainWindow::prepareCapturedData(cv::Mat &depth, cv::Mat &color, + QString ×tamp) { + // Check if device manager is running + // 检查是否有可用设备 + if (!DeviceManager::getInstance().isRunning()) { + QMessageBox::warning(this, "Warning", "No active capture!"); + return false; + } + + // Get latest images + double fps; + // 从设备管理器获取最新图像 + if (!DeviceManager::getInstance().getLatestImages(currentDeviceIndex_, depth, + color, fps)) { + QMessageBox::warning(this, "Warning", "Failed to get images!"); + return false; + } + + // Generate timestamp + timestamp = QDateTime::currentDateTime().toString("yyyyMMdd_hhmmss"); + return true; +} + +void MainWindow::update2DDisplay() { + auto &deviceManager = DeviceManager::getInstance(); + int count = deviceManager.get2DCameraCount(); + + for (int i = 0; i < count && i < MAX_2D_CAMERAS; ++i) { + cv::Mat image; + double fps; + if (deviceManager.get2DCameraImage(i, image, fps)) { + update2DCameraDisplay(i, image, fps); + } + } +} + +void MainWindow::update2DCameraDisplay(int camera_index, const cv::Mat &image, + double fps) { + if (camera_index < 0 || camera_index >= MAX_2D_CAMERAS || image.empty()) + return; + + // 更新FPS显示 + if (twoDInfoLabels_[camera_index]) { + QString info = + QString("2D - %1: FPS: %2") + .arg(QString::fromStdString( + DeviceManager::getInstance().get2DCameraId(camera_index))) + .arg(QString::number(fps, 'f', 1)); + twoDInfoLabels_[camera_index]->setText(info); + } + + // 显示图像 + if (twoDImageLabels_[camera_index]) { + QImage qImg = cvMatToQImage(image); + + // 保持纵横比缩放 + QSize labelSize = twoDImageLabels_[camera_index]->size(); + if (!qImg.isNull()) { + twoDImageLabels_[camera_index]->setPixmap(QPixmap::fromImage(qImg).scaled( + labelSize, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + } + } +} diff --git a/image_capture/src/gui/mainwindow.h b/image_capture/src/gui/mainwindow.h new file mode 100644 index 0000000..dcb50e0 --- /dev/null +++ b/image_capture/src/gui/mainwindow.h @@ -0,0 +1,106 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Forward declarations +class SettingsWidget; +class ImageProcessor; +class VisionController; +class LogStreamBuf; + +// 必须在QT_BEGIN_NAMESPACE之前包含,确保MOC能看到完整类型定义 +#include "../common/log_streambuf.h" + +QT_BEGIN_NAMESPACE +namespace Ui { class MainWindow; } +QT_END_NAMESPACE + +class MainWindow : public QMainWindow +{ + Q_OBJECT + +public: + MainWindow(QWidget *parent = nullptr); + ~MainWindow(); + +private slots: + void onStartCapture(); + void onStopCapture(); + void onSaveImage(); + void onSavePointCloud(); + void updateImage(); + +private: + // Helper to reduce redundancy in save functions + bool prepareCapturedData(cv::Mat& depth, cv::Mat& color, QString& timestamp); + + Ui::MainWindow *ui; + std::shared_ptr visionController_; // Vision系统控制器(Redis监控和算法触发) + std::vector> processors_; + + // Supports max 4 depth cameras + static const int MAX_DEPTH_CAMERAS = 4; + static const int MAX_2D_CAMERAS = 5; + QLabel* depthImageLabels_[MAX_DEPTH_CAMERAS]; + QLabel* depthInfoLabels_[MAX_DEPTH_CAMERAS]; + + // 2D camera display control array + QLabel* twoDImageLabels_[MAX_2D_CAMERAS]; + QLabel* twoDInfoLabels_[MAX_2D_CAMERAS]; + + void update2DDisplay(); + void update2DCameraDisplay(int camera_index, const cv::Mat& image, double fps); + // 深度图信息标签(显示相机编号、FPS等) + + + + QPushButton* startButton_; + QPushButton* stopButton_; + QPushButton* saveButton_; + QPushButton* savePointCloudButton_; + QPlainTextEdit* logTextEdit_; + + QTimer* updateTimer_; + bool isCapturing_; + int currentDeviceIndex_; + + // 日志重定向相关 + std::unique_ptr coutBuf_; + std::unique_ptr cerrBuf_; + std::streambuf* originalCout_; + std::streambuf* originalCerr_; + + QImage cvMatToQImage(const cv::Mat& mat); + void updateDepthDisplay(); + void updateCameraDisplay(int cameraIndex); + + void appendLog(const QString& message); + + // 辅助函数:简化代码 + void startImageDisplay(); // 启动图像显示(更新按钮状态和定时器) + void stopImageDisplay(); // 停止图像显示(更新按钮状态、停止定时器、清空显示) + + // Settings tab + // Settings Widget + SettingsWidget* settingsWidget_; +}; + diff --git a/image_capture/src/gui/mainwindow.ui b/image_capture/src/gui/mainwindow.ui new file mode 100644 index 0000000..b520722 --- /dev/null +++ b/image_capture/src/gui/mainwindow.ui @@ -0,0 +1,1205 @@ + + + MainWindow + + + + 0 + 0 + 1280 + 800 + + + + + 800 + 600 + + + + Stereo Warehouse Inspection - 4 Cameras + + + + + 10 + + + 10 + + + 10 + + + 10 + + + 10 + + + + + 0 + + + + Depth Cameras + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 2 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + Depth - Cam0: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + Depth Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + Depth - Cam1: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + Depth Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + Depth - Cam2: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + Depth Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + Depth - Cam3: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + Depth Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + + + 2D Cameras + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 2 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + 2D - Cam0: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + 2D Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + 2D - Cam1: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + 2D Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + 2D - Cam2: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + 2D Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 0 + 0 + + + + + 16777215 + 30 + + + + + 0 + 25 + + + + + 11 + true + + + + 2D - Cam3: FPS: 0 + + + Qt::AlignmentFlag::AlignLeft|Qt::AlignmentFlag::AlignVCenter + + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + 0 + + + + + + 1 + 1 + + + + false + + + + 12 + true + + + + 2D Image + + + Qt::AlignmentFlag::AlignCenter + + + + + + + + + + + + + + + + + + 10 + + + + + 10 + + + + + 10 + + + + + + 120 + 35 + + + + + 12 + + + + Start Capture + + + + + + + + 120 + 35 + + + + + 12 + + + + Stop Capture + + + + + + + + 120 + 35 + + + + + 12 + + + + Save Image + + + + + + + + 120 + 35 + + + + + 12 + + + + Save Point Cloud + + + + + + + Qt::Orientation::Horizontal + + + + 40 + 20 + + + + + + + + + + + + + 300 + 50 + + + + + 1 + 0 + + + + + Consolas + 11 + + + + true + + + Log messages will appear here... + + + + + + + + + + + diff --git a/image_capture/src/gui/settings_widget.cpp b/image_capture/src/gui/settings_widget.cpp new file mode 100644 index 0000000..1f094cb --- /dev/null +++ b/image_capture/src/gui/settings_widget.cpp @@ -0,0 +1,305 @@ +#include "settings_widget.h" +#include "../common/config_manager.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +SettingsWidget::SettingsWidget(QWidget *parent) : QWidget(parent) { + setupUi(); + loadSettings(); +} + +void SettingsWidget::setupUi() { + auto mainLayout = new QVBoxLayout(this); + + auto tabWidget = new QTabWidget(this); + tabWidget->addTab(createBeamRackTab(), "Beam/Rack Deflection"); + tabWidget->addTab(createPalletOffsetTab(), "Pallet Offset"); + tabWidget->addTab(createOtherAlgorithmsTab(), "Other Algorithms"); + tabWidget->addTab(createGeneralTab(), "General"); + + mainLayout->addWidget(tabWidget); + + auto buttonLayout = new QHBoxLayout(); + buttonLayout->addStretch(); + + auto saveButton = new QPushButton("Save Settings", this); + saveButton->setMinimumSize(120, 35); + QFont font; + font.setPointSize(12); + saveButton->setFont(font); + connect(saveButton, &QPushButton::clicked, this, &SettingsWidget::saveSettings); + buttonLayout->addWidget(saveButton); + + mainLayout->addLayout(buttonLayout); +} + +QWidget* SettingsWidget::createBeamRackTab() { + auto widget = new QWidget(this); + auto layout = new QVBoxLayout(widget); + + auto roiGroup = new QGroupBox("感兴趣区域点", this); + auto roiLayout = new QGridLayout(roiGroup); + + roiLayout->addWidget(new QLabel("点", this), 0, 0); + roiLayout->addWidget(new QLabel("横梁 X", this), 0, 1); + roiLayout->addWidget(new QLabel("横梁 Y", this), 0, 2); + roiLayout->addWidget(new QLabel("立柱 X", this), 0, 3); + roiLayout->addWidget(new QLabel("立柱 Y", this), 0, 4); + + const QStringList pointNames = {"左上", "右上", "右下", "左下"}; + for (int i = 0; i < 4; ++i) { + beamRoiX_[i] = new QSpinBox(this); beamRoiX_[i]->setRange(0, 5000); + beamRoiY_[i] = new QSpinBox(this); beamRoiY_[i]->setRange(0, 5000); + rackRoiX_[i] = new QSpinBox(this); rackRoiX_[i]->setRange(0, 5000); + rackRoiY_[i] = new QSpinBox(this); rackRoiY_[i]->setRange(0, 5000); + + roiLayout->addWidget(new QLabel(pointNames[i], this), i+1, 0); + roiLayout->addWidget(beamRoiX_[i], i+1, 1); + roiLayout->addWidget(beamRoiY_[i], i+1, 2); + roiLayout->addWidget(rackRoiX_[i], i+1, 3); + roiLayout->addWidget(rackRoiY_[i], i+1, 4); + } + layout->addWidget(roiGroup); + + auto threshGroup = new QGroupBox("阈值 (mm)", this); + auto threshLayout = new QFormLayout(threshGroup); + + auto createDoubleSpin = [this](QDoubleSpinBox*& box) { + box = new QDoubleSpinBox(this); + box->setRange(-1000.0, 1000.0); + box->setSingleStep(0.1); + box->setDecimals(1); + }; + + createDoubleSpin(beamThresholdA_); + createDoubleSpin(beamThresholdB_); + createDoubleSpin(beamThresholdC_); + createDoubleSpin(beamThresholdD_); + createDoubleSpin(rackThresholdA_); + createDoubleSpin(rackThresholdB_); + createDoubleSpin(rackThresholdC_); + createDoubleSpin(rackThresholdD_); + + threshLayout->addRow("横梁负向报警 (A):", beamThresholdA_); + threshLayout->addRow("横梁负向预警 (B):", beamThresholdB_); + threshLayout->addRow("横梁正向预警 (C):", beamThresholdC_); + threshLayout->addRow("横梁正向报警 (D):", beamThresholdD_); + threshLayout->addRow("立柱负向报警 (A):", rackThresholdA_); + threshLayout->addRow("立柱负向预警 (B):", rackThresholdB_); + threshLayout->addRow("立柱正向预警 (C):", rackThresholdC_); + threshLayout->addRow("立柱正向报警 (D):", rackThresholdD_); + + layout->addWidget(threshGroup); + layout->addStretch(); + return widget; +} + +QWidget* SettingsWidget::createPalletOffsetTab() { + auto widget = new QWidget(this); + auto scroll = new QScrollArea(widget); + auto contentWidget = new QWidget(); + auto layout = new QVBoxLayout(contentWidget); + + auto createThreshGroup = [this](const QString& title, QDoubleSpinBox*& A, QDoubleSpinBox*& B, QDoubleSpinBox*& C, QDoubleSpinBox*& D) { + auto group = new QGroupBox(title, this); + auto flo = new QFormLayout(group); + + auto createDS = [this](QDoubleSpinBox*& box) { + box = new QDoubleSpinBox(this); + box->setRange(-1000.0, 1000.0); + box->setSingleStep(0.1); + box->setDecimals(1); + }; + + createDS(A); createDS(B); createDS(C); createDS(D); + flo->addRow("低位报警 (A):", A); + flo->addRow("低位预警 (B):", B); + flo->addRow("高位预警 (C):", C); + flo->addRow("高位报警 (D):", D); + return group; + }; + + layout->addWidget(createThreshGroup("横向偏移 (mm)", palletLatA_, palletLatB_, palletLatC_, palletLatD_)); + layout->addWidget(createThreshGroup("纵向偏移 (mm)", palletLonA_, palletLonB_, palletLonC_, palletLonD_)); + layout->addWidget(createThreshGroup("旋转角度 (deg)", palletRotA_, palletRotB_, palletRotC_, palletRotD_)); + layout->addWidget(createThreshGroup("左孔变形 (mm)", palletHoleLeftA_, palletHoleLeftB_, palletHoleLeftC_, palletHoleLeftD_)); + layout->addWidget(createThreshGroup("右孔变形 (mm)", palletHoleRightA_, palletHoleRightB_, palletHoleRightC_, palletHoleRightD_)); + + layout->addStretch(); + + auto mainLayout = new QVBoxLayout(widget); + scroll->setWidget(contentWidget); + scroll->setWidgetResizable(true); + mainLayout->addWidget(scroll); + + return widget; +} + +QWidget* SettingsWidget::createOtherAlgorithmsTab() { + auto widget = new QWidget(this); + auto layout = new QVBoxLayout(widget); + + auto slotGroup = new QGroupBox("库位占用", this); + auto slotLayout = new QFormLayout(slotGroup); + + slotDepthThreshold_ = new QDoubleSpinBox(this); + slotDepthThreshold_->setRange(0.0, 10000.0); + + slotConfidenceThreshold_ = new QDoubleSpinBox(this); + slotConfidenceThreshold_->setRange(0.0, 1.0); + slotConfidenceThreshold_->setSingleStep(0.05); + + slotLayout->addRow("深度阈值 (mm):", slotDepthThreshold_); + slotLayout->addRow("置信度阈值:", slotConfidenceThreshold_); + layout->addWidget(slotGroup); + + auto visGroup = new QGroupBox("视觉盘点", this); + auto visLayout = new QFormLayout(visGroup); + + visualBarcodeConfidence_ = new QDoubleSpinBox(this); + visualBarcodeConfidence_->setRange(0.0, 1.0); + visualBarcodeConfidence_->setSingleStep(0.05); + + visLayout->addRow("条码置信度:", visualBarcodeConfidence_); + layout->addWidget(visGroup); + + layout->addStretch(); + return widget; +} + +QWidget* SettingsWidget::createGeneralTab() { + auto widget = new QWidget(this); + auto layout = new QFormLayout(widget); + + minDepth_ = new QDoubleSpinBox(this); + minDepth_->setRange(0.0, 10000.0); + + maxDepth_ = new QDoubleSpinBox(this); + maxDepth_->setRange(0.0, 10000.0); + + samplePoints_ = new QSpinBox(this); + samplePoints_->setRange(1, 1000); + + layout->addRow("最小深度 (mm):", minDepth_); + layout->addRow("最大深度 (mm):", maxDepth_); + layout->addRow("采样点数:", samplePoints_); + + return widget; +} + +void SettingsWidget::loadSettings() { + auto& config = ConfigManager::getInstance(); + + auto beamPoints = config.getBeamROIPoints(); + for(int i=0; i<4 && isetValue(beamPoints[i].x); + beamRoiY_[i]->setValue(beamPoints[i].y); + } + auto rackPoints = config.getRackROIPoints(); + for(int i=0; i<4 && isetValue(rackPoints[i].x); + rackRoiY_[i]->setValue(rackPoints[i].y); + } + + auto beamT = config.getBeamThresholds(); + if(beamT.size() >= 4) { + beamThresholdA_->setValue(beamT[0]); + beamThresholdB_->setValue(beamT[1]); + beamThresholdC_->setValue(beamT[2]); + beamThresholdD_->setValue(beamT[3]); + } + + auto rackT = config.getRackThresholds(); + if(rackT.size() >= 4) { + rackThresholdA_->setValue(rackT[0]); + rackThresholdB_->setValue(rackT[1]); + rackThresholdC_->setValue(rackT[2]); + rackThresholdD_->setValue(rackT[3]); + } + + auto setThresh = [](std::vector v, QDoubleSpinBox* a, QDoubleSpinBox* b, QDoubleSpinBox* c, QDoubleSpinBox* d) { + if(v.size() >= 4) { + a->setValue(v[0]); b->setValue(v[1]); c->setValue(v[2]); d->setValue(v[3]); + } + }; + + setThresh(config.getPalletOffsetLatThresholds(), palletLatA_, palletLatB_, palletLatC_, palletLatD_); + setThresh(config.getPalletOffsetLonThresholds(), palletLonA_, palletLonB_, palletLonC_, palletLonD_); + setThresh(config.getPalletRotationAngleThresholds(), palletRotA_, palletRotB_, palletRotC_, palletRotD_); + setThresh(config.getPalletHoleDefLeftThresholds(), palletHoleLeftA_, palletHoleLeftB_, palletHoleLeftC_, palletHoleLeftD_); + setThresh(config.getPalletHoleDefRightThresholds(), palletHoleRightA_, palletHoleRightB_, palletHoleRightC_, palletHoleRightD_); + + slotDepthThreshold_->setValue(config.getSlotOccupancyDepthThreshold()); + slotConfidenceThreshold_->setValue(config.getSlotOccupancyConfidenceThreshold()); + visualBarcodeConfidence_->setValue(config.getVisualInventoryBarcodeConfidence()); + + minDepth_->setValue(config.getAlgorithmMinDepth()); + maxDepth_->setValue(config.getAlgorithmMaxDepth()); + samplePoints_->setValue(config.getAlgorithmSamplePoints()); +} + +void SettingsWidget::saveSettings() { + auto& config = ConfigManager::getInstance(); + + std::vector beamPts, rackPts; + for(int i=0; i<4; ++i) { + beamPts.push_back(cv::Point2i(beamRoiX_[i]->value(), beamRoiY_[i]->value())); + rackPts.push_back(cv::Point2i(rackRoiX_[i]->value(), rackRoiY_[i]->value())); + } + config.setBeamROIPoints(beamPts); + config.setRackROIPoints(rackPts); + + config.setBeamThresholds({ + (float)beamThresholdA_->value(), (float)beamThresholdB_->value(), + (float)beamThresholdC_->value(), (float)beamThresholdD_->value() + }); + config.setRackThresholds({ + (float)rackThresholdA_->value(), (float)rackThresholdB_->value(), + (float)rackThresholdC_->value(), (float)rackThresholdD_->value() + }); + + config.setPalletOffsetLatThresholds({ + (float)palletLatA_->value(), (float)palletLatB_->value(), + (float)palletLatC_->value(), (float)palletLatD_->value() + }); + config.setPalletOffsetLonThresholds({ + (float)palletLonA_->value(), (float)palletLonB_->value(), + (float)palletLonC_->value(), (float)palletLonD_->value() + }); + config.setPalletRotationAngleThresholds({ + (float)palletRotA_->value(), (float)palletRotB_->value(), + (float)palletRotC_->value(), (float)palletRotD_->value() + }); + config.setPalletHoleDefLeftThresholds({ + (float)palletHoleLeftA_->value(), (float)palletHoleLeftB_->value(), + (float)palletHoleLeftC_->value(), (float)palletHoleLeftD_->value() + }); + config.setPalletHoleDefRightThresholds({ + (float)palletHoleRightA_->value(), (float)palletHoleRightB_->value(), + (float)palletHoleRightC_->value(), (float)palletHoleRightD_->value() + }); + + config.setSlotOccupancyDepthThreshold((float)slotDepthThreshold_->value()); + config.setSlotOccupancyConfidenceThreshold((float)slotConfidenceThreshold_->value()); + config.setVisualInventoryBarcodeConfidence((float)visualBarcodeConfidence_->value()); + + config.setAlgorithmMinDepth((float)minDepth_->value()); + config.setAlgorithmMaxDepth((float)maxDepth_->value()); + config.setAlgorithmSamplePoints(samplePoints_->value()); + + if (config.saveConfig()) { + QMessageBox::information(this, "Success", "Configuration saved successfully."); + emit settingsSaved(); + } else { + QMessageBox::critical(this, "Error", "Failed to save configuration."); + } +} diff --git a/image_capture/src/gui/settings_widget.h b/image_capture/src/gui/settings_widget.h new file mode 100644 index 0000000..8982667 --- /dev/null +++ b/image_capture/src/gui/settings_widget.h @@ -0,0 +1,75 @@ +#pragma once + +#include +#include + +class QSpinBox; +class QDoubleSpinBox; + +class SettingsWidget : public QWidget { + Q_OBJECT + +public: + explicit SettingsWidget(QWidget *parent = nullptr); + ~SettingsWidget() = default; + +public slots: + void loadSettings(); + void saveSettings(); + +signals: + void settingsSaved(); + +private: + void setupUi(); + QWidget* createBeamRackTab(); + QWidget* createPalletOffsetTab(); + QWidget* createOtherAlgorithmsTab(); + QWidget* createGeneralTab(); + + // Beam/Rack Deflection + QSpinBox* beamRoiX_[4]; + QSpinBox* beamRoiY_[4]; + QSpinBox* rackRoiX_[4]; + QSpinBox* rackRoiY_[4]; + QDoubleSpinBox* beamThresholdA_; + QDoubleSpinBox* beamThresholdB_; + QDoubleSpinBox* beamThresholdC_; + QDoubleSpinBox* beamThresholdD_; + QDoubleSpinBox* rackThresholdA_; + QDoubleSpinBox* rackThresholdB_; + QDoubleSpinBox* rackThresholdC_; + QDoubleSpinBox* rackThresholdD_; + + // Pallet Offset + QDoubleSpinBox* palletLatA_; + QDoubleSpinBox* palletLatB_; + QDoubleSpinBox* palletLatC_; + QDoubleSpinBox* palletLatD_; + QDoubleSpinBox* palletLonA_; + QDoubleSpinBox* palletLonB_; + QDoubleSpinBox* palletLonC_; + QDoubleSpinBox* palletLonD_; + QDoubleSpinBox* palletRotA_; + QDoubleSpinBox* palletRotB_; + QDoubleSpinBox* palletRotC_; + QDoubleSpinBox* palletRotD_; + QDoubleSpinBox* palletHoleLeftA_; + QDoubleSpinBox* palletHoleLeftB_; + QDoubleSpinBox* palletHoleLeftC_; + QDoubleSpinBox* palletHoleLeftD_; + QDoubleSpinBox* palletHoleRightA_; + QDoubleSpinBox* palletHoleRightB_; + QDoubleSpinBox* palletHoleRightC_; + QDoubleSpinBox* palletHoleRightD_; + + // Other Algorithms + QDoubleSpinBox* slotDepthThreshold_; + QDoubleSpinBox* slotConfidenceThreshold_; + QDoubleSpinBox* visualBarcodeConfidence_; + + // General Parameters + QDoubleSpinBox* minDepth_; + QDoubleSpinBox* maxDepth_; + QSpinBox* samplePoints_; +}; diff --git a/image_capture/src/main.cpp b/image_capture/src/main.cpp new file mode 100644 index 0000000..6ca856e --- /dev/null +++ b/image_capture/src/main.cpp @@ -0,0 +1,18 @@ +#include "gui/mainwindow.h" +#include +#include +#include + +int main(int argc, char *argv[]) +{ + // 设置OpenCV日志级别 + cv::utils::logging::setLogLevel(cv::utils::logging::LOG_LEVEL_SILENT); + + QApplication app(argc, argv); + app.setStyle("Fusion"); + //创建MainWindow对象(此时会执行构造函数,初始化所有功能) + MainWindow window; + window.show(); + + return app.exec(); +} diff --git a/image_capture/src/redis/redis_communicator.cpp b/image_capture/src/redis/redis_communicator.cpp new file mode 100644 index 0000000..6249afc --- /dev/null +++ b/image_capture/src/redis/redis_communicator.cpp @@ -0,0 +1,346 @@ +/** + * @file redis_communicator.cpp + * @brief Redis通信模块实现文件 + */ +#include "redis_communicator.h" +#include +#include +#include +#include +#include + +#pragma comment(lib, "ws2_32.lib") + +RedisCommunicator::RedisCommunicator() + : redis_port_(6379), redis_db_(0), redis_context_(nullptr), + listening_(false), connected_(false), last_flag_(0), + socket_fd_(INVALID_SOCKET) { + // Initialize Winsock + WSADATA wsaData; + int iResult = WSAStartup(MAKEWORD(2, 2), &wsaData); + if (iResult != 0) { + std::cerr << "WSAStartup failed: " << iResult << std::endl; + } +} + +RedisCommunicator::~RedisCommunicator() { + stopListening(); + disconnectSocket(); + WSACleanup(); +} + +bool RedisCommunicator::initialize(const std::string &host, int port, int db, + const std::string &password) { + redis_host_ = host; + redis_port_ = port; + redis_db_ = db; + redis_password_ = password; + + // Disconnect if already connected + disconnectSocket(); + + if (connectSocket()) { + connected_ = true; + + // Authenticate if password provided + if (!redis_password_.empty()) { + std::string cmd = "AUTH " + redis_password_ + "\r\n"; + std::string response = sendCommand(cmd); + if (response.find("+OK") != 0) { + std::cerr << "Redis authentication failed: " << response << std::endl; + disconnectSocket(); + return false; + } + } + + // Select database if needed + if (db > 0) { + std::string cmd = "SELECT " + std::to_string(db) + "\r\n"; + sendCommand(cmd); + } + return true; + } + + return false; +} + +void RedisCommunicator::setTaskCallback(TaskCallback callback) { + std::lock_guard lock(callback_mutex_); + task_callback_ = callback; +} + +bool RedisCommunicator::startListening() { + if (listening_) + return true; + if (!connected_) + return false; + + listening_ = true; + listening_thread_ = + std::thread(&RedisCommunicator::listeningThreadFunc, this); + std::cout << "[RedisCommunicator] Started listening for task flag changes" + << std::endl; + return true; +} + +void RedisCommunicator::stopListening() { + if (!listening_) + return; + listening_ = false; + if (listening_thread_.joinable()) { + listening_thread_.join(); + } + std::cout << "[RedisCommunicator] Stopped listening" << std::endl; +} + +bool RedisCommunicator::connectSocket() { + struct addrinfo *result = NULL, *ptr = NULL, hints; + + ZeroMemory(&hints, sizeof(hints)); + hints.ai_family = AF_UNSPEC; + hints.ai_socktype = SOCK_STREAM; + hints.ai_protocol = IPPROTO_TCP; + + // Resolve the server address and port + std::string port_str = std::to_string(redis_port_); + int iResult = + getaddrinfo(redis_host_.c_str(), port_str.c_str(), &hints, &result); + if (iResult != 0) { + std::cerr << "getaddrinfo failed: " << iResult << std::endl; + return false; + } + + SOCKET connectSocket = INVALID_SOCKET; + + // Attempt to connect to an address until one succeeds + for (ptr = result; ptr != NULL; ptr = ptr->ai_next) { + connectSocket = socket(ptr->ai_family, ptr->ai_socktype, ptr->ai_protocol); + if (connectSocket == INVALID_SOCKET) { + std::cerr << "socket failed with error: " << WSAGetLastError() + << std::endl; + continue; + } + + iResult = connect(connectSocket, ptr->ai_addr, (int)ptr->ai_addrlen); + if (iResult == SOCKET_ERROR) { + closesocket(connectSocket); + connectSocket = INVALID_SOCKET; + continue; + } + break; + } + + freeaddrinfo(result); + + if (connectSocket == INVALID_SOCKET) { + std::cerr << "Unable to connect to server!" << std::endl; + return false; + } + + // Set timeout + DWORD timeout = 2000; // 2 seconds + setsockopt(connectSocket, SOL_SOCKET, SO_RCVTIMEO, (const char *)&timeout, + sizeof(timeout)); + setsockopt(connectSocket, SOL_SOCKET, SO_SNDTIMEO, (const char *)&timeout, + sizeof(timeout)); + + socket_fd_ = (unsigned long long)connectSocket; + return true; +} + +void RedisCommunicator::disconnectSocket() { + if (socket_fd_ != (unsigned long long)INVALID_SOCKET) { + closesocket((SOCKET)socket_fd_); + socket_fd_ = (unsigned long long)INVALID_SOCKET; + connected_ = false; + } +} + +std::string RedisCommunicator::sendCommand(const std::string &cmd) { + if (socket_fd_ == (unsigned long long)INVALID_SOCKET) + return ""; + + // Send the command + int iResult = send((SOCKET)socket_fd_, cmd.c_str(), (int)cmd.length(), 0); + if (iResult == SOCKET_ERROR) { + std::cerr << "send failed: " << WSAGetLastError() << std::endl; + disconnectSocket(); + return ""; + } + + // Read response (simple blocking read for now) + char recvbuf[4096]; + iResult = recv((SOCKET)socket_fd_, recvbuf, 4096, 0); + if (iResult > 0) { + return std::string(recvbuf, iResult); + } else if (iResult == 0) { + std::cout << "Connection closed" << std::endl; + disconnectSocket(); + } else { + // std::cerr << "recv failed: " << WSAGetLastError() << std::endl; + } + return ""; +} + +bool RedisCommunicator::readString(const std::string &key, std::string &value) { + if (!connected_) + return false; + // Simple inline command: GET key + std::string cmd = "GET " + key + "\r\n"; + std::string response = sendCommand(cmd); + return parseRedisResponse(response, value); +} + +bool RedisCommunicator::writeString(const std::string &key, + const std::string &value) { + if (!connected_) + return false; + + // Using RESP for SET to be safe with spaces in JSON + // *3\r\n$3\r\nSET\r\n$\r\nkey\r\n$\r\nvalue\r\n + std::string cmd = "*3\r\n$3\r\nSET\r\n$" + std::to_string(key.length()) + + "\r\n" + key + "\r\n$" + std::to_string(value.length()) + + "\r\n" + value + "\r\n"; + + std::string response = sendCommand(cmd); + return (response.find("+OK") == 0); +} + +bool RedisCommunicator::readInt(const std::string &key, int &value) { + std::string str_value; + if (readString(key, str_value)) { + try { + value = std::stoi(str_value); + return true; + } catch (...) { + } + } + return false; +} + +bool RedisCommunicator::parseRedisResponse(const std::string &response, + std::string &value) { + if (response.empty()) + return false; + + // RESP Bulk String: $5\r\nvalue\r\n + // RESP Null: $-1\r\n + // RESP Simple String: +OK\r\n + + if (response[0] == '$') { + size_t rn1 = response.find("\r\n"); + if (rn1 == std::string::npos) + return false; + + std::string lenStr = response.substr(1, rn1 - 1); + int len = std::stoi(lenStr); + + if (len == -1) + return false; // Key not found + + size_t rn2 = response.find("\r\n", rn1 + 2); + if (rn2 == std::string::npos) { + // Maybe response is truncated, simplistic check + if (response.length() >= rn1 + 2 + len) { + value = response.substr(rn1 + 2, len); + return true; + } + return false; + } + + value = response.substr(rn1 + 2, len); + return true; + } else if (response[0] == '+') { + size_t rn = response.find("\r\n"); + if (rn != std::string::npos) { + value = response.substr(1, rn - 1); + return true; + } + } else if (response[0] == ':') { + size_t rn = response.find("\r\n"); + if (rn != std::string::npos) { + value = response.substr(1, rn - 1); + return true; + } + } + + return false; +} + +bool RedisCommunicator::readTaskData(RedisTaskData &task_data) { + if (!connected_) + return false; + + int flag = 0; + if (!readInt("vision_task_flag", flag)) + return false; + task_data.flag = flag; + + readString("vision_task_side", task_data.side); + readString("vision_task_time", task_data.task_time); + + int beam_length = 0; + readInt("vision_task_beam_length", beam_length); + task_data.beam_length = beam_length; + + return true; +} + +bool RedisCommunicator::writeDetectionResult(const std::string &result_json) { + // Write result to redis + // Assuming result key logic, or maybe just write to "vision_result" + // The previous code didn't specify the key name, I'll assume "vision_result" + // for now or use a dynamic one? Let's assume a fixed key or one derived from + // task? Let's just use "vision_result" as a generic key based on context. Or, + // maybe check if there's a requirement. The header said "write detection + // result to Redis". + + return writeString("vision_task_result", result_json); +} + +bool RedisCommunicator::isConnected() const { return connected_; } + +void RedisCommunicator::listeningThreadFunc() { + std::cout << "[RedisCommunicator] Listening thread started" << std::endl; + // Attempt to reconnect loop if needed? + // For now simple polling + while (listening_) { + try { + if (!connected_) { + // Try to reconnect? + // For now just wait + std::this_thread::sleep_for(std::chrono::seconds(1)); + continue; + } + + RedisTaskData task_data; + if (readTaskData(task_data)) { + int current_flag = task_data.flag; + // Only notify on new non-zero flag + if (current_flag != last_flag_ && current_flag > 0) { + last_flag_ = current_flag; + std::lock_guard lock(callback_mutex_); + if (task_callback_) { + task_callback_(task_data); + } + std::cout << "[RedisCommunicator] Detected new task flag: " + << current_flag << std::endl; + } else if (current_flag == 0 && last_flag_ != 0) { + // Reset last flag if it goes back to 0 + last_flag_ = 0; + } + } + } catch (const std::exception &e) { + std::cerr << "[RedisCommunicator] Exception in listeningThreadFunc: " + << e.what() << std::endl; + } catch (...) { + std::cerr + << "[RedisCommunicator] Unknown exception in listeningThreadFunc." + << std::endl; + } + + std::this_thread::sleep_for( + std::chrono::milliseconds(100)); // Poll every 100ms + } + std::cout << "[RedisCommunicator] Listening thread ended" << std::endl; +} diff --git a/image_capture/src/redis/redis_communicator.h b/image_capture/src/redis/redis_communicator.h new file mode 100644 index 0000000..809475d --- /dev/null +++ b/image_capture/src/redis/redis_communicator.h @@ -0,0 +1,145 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +/** + * @brief Redis通信模块 + * + * 功能说明: + * - 监听Redis中vision_task_flag的变化 + * - 读取WMS写入的任务数据(flag, side, time) + * - 将Vision系统的检测结果写入Redis + * - 使用Redis Keyspace Notifications或发布/订阅机制监听变化 + * + * 设计原则: + * - 线程安全 + * - 异步监听,不阻塞主线程 + * - 提供回调接口,当任务标志位变化时通知上层 + */ +#include "task_data.h" + +class RedisCommunicator { +public: + /** + * 任务标志位变化回调函数类型 + * @param task_data 任务数据 + */ + using TaskCallback = std::function; + + RedisCommunicator(); + ~RedisCommunicator(); + + /** + * 初始化Redis连接 + * @param host Redis服务器地址(默认"127.0.0.1") + * @param port Redis服务器端口(默认6379) + * @param db Redis数据库编号(默认0) + * @param password Redis密码(可选) + * @return 是否成功连接 + */ + bool initialize(const std::string& host = "127.0.0.1", + int port = 6379, + int db = 0, + const std::string& password = ""); + + /** + * 设置任务标志位变化回调函数 + * @param callback 回调函数 + */ + void setTaskCallback(TaskCallback callback); + + /** + * 开始监听任务标志位变化 + * @return 是否成功启动监听 + */ + bool startListening(); + + /** + * 停止监听 + */ + void stopListening(); + + /** + * 读取任务数据 + * @param task_data [输出] 任务数据 + * @return 是否成功读取 + */ + bool readTaskData(RedisTaskData& task_data); + + /** + * 写入检测结果到Redis + * @param result_json 检测结果的JSON字符串 + * @return 是否成功写入 + */ + bool writeDetectionResult(const std::string& result_json); + + /** + * 向Redis写入字符串值 + * @param key Redis键名 + * @param value 值 + * @return 是否成功 + */ + bool writeString(const std::string& key, const std::string& value); + + /** + * 检查Redis连接状态 + * @return 是否已连接 + */ + bool isConnected() const; + +private: + /** + * 监听线程函数 + */ + void listeningThreadFunc(); + + /** + * 从Redis读取字符串值 + * @param key Redis键名 + * @param value [输出] 值 + * @return 是否成功 + */ + bool readString(const std::string& key, std::string& value); + + + + /** + * 从Redis读取整数值 + * @param key Redis键名 + * @param value [输出] 值 + * @return 是否成功 + */ + bool readInt(const std::string& key, int& value); + + // Redis连接参数 + std::string redis_host_; + int redis_port_; + int redis_db_; + std::string redis_password_; + + // Redis连接对象(使用void*避免暴露具体实现) + void* redis_context_; // 实际类型为redisContext*或类似 + + // 监听相关 + std::atomic listening_; + std::thread listening_thread_; + std::mutex callback_mutex_; + TaskCallback task_callback_; + + // 状态 + std::atomic connected_; + std::atomic last_flag_; // 上次读取的flag值,用于检测变化 + + // Socket handle (using uintptr_t to avoid including winsock headers here) + unsigned long long socket_fd_; + bool connectSocket(); + void disconnectSocket(); + std::string sendCommand(const std::string& cmd); + bool parseRedisResponse(const std::string& response, std::string& value); +}; + diff --git a/image_capture/src/redis/task_data.h b/image_capture/src/redis/task_data.h new file mode 100644 index 0000000..5a14288 --- /dev/null +++ b/image_capture/src/redis/task_data.h @@ -0,0 +1,16 @@ +#pragma once +#include + +/** + * @brief Redis任务数据结构 + * + * 独立定义的任务数据结构,包含flag、side和time + */ +struct RedisTaskData { + int flag; // 任务功能编号(1~5) + std::string side; // 货架侧(left/right) + std::string task_time; // 任务触发时间("YYYY-MM-DD HH:MM:SS") + int beam_length; // 横梁长度(mm),仅flag=3时有效,可选值:2180 / 1380 + + RedisTaskData() : flag(0), beam_length(0) {} +}; diff --git a/image_capture/src/task/task_manager.cpp b/image_capture/src/task/task_manager.cpp new file mode 100644 index 0000000..3d49f9b --- /dev/null +++ b/image_capture/src/task/task_manager.cpp @@ -0,0 +1,855 @@ +/** + * @file task_manager.cpp + * @brief 任务管理器实现文件(合并了结果处理功能) + * + * 此文件实现了TaskManager类的完整功能: + * - 任务接收和队列管理 + * - 任务分发和执行(根据flag选择对应的检测算法) + * - 检测结果处理(格式化、计算警告/报警、写入Redis) + * - 线程安全的任务执行 + * + * 设计说明: + * - 使用任务队列 + 执行线程的模式,实现异步任务处理 + * - 直接使用DeviceManager单例获取图像,简化架构 + * - 合并了结果处理功能,简化架构 + * - 所有共享数据使用互斥锁保护,确保线程安全 + */ + +#include "task_manager.h" +#include "../algorithm/core/detection_base.h" +#include "../common_types.h" +#include "../device/device_manager.h" +#include +#include +#include +#include +#include +#include + +/** + * @brief 构造函数 + * + * 初始化任务管理器,创建所有检测器实例 + * 检测器映射关系: + * - Flag 1: SlotOccupancyDetection (货位有无检测) + * - Flag 2: PalletOffsetDetection (托盘位置偏移检测) + * - Flag 3: BeamRackDeflectionDetection (横梁变形检测) + * - Flag 4: VisualInventoryDetection (视觉盘点) + + * + * @note 所有检测器在构造函数中创建,避免运行时创建的开销 + * @note 使用智能指针管理检测器生命周期,自动释放资源 + */ +TaskManager::TaskManager() + : current_status_(TaskStatus::IDLE) // 初始状态为空闲 + , + running_(false) // 初始状态为未运行 +{ + // 创建所有检测器,建立flag到检测器的映射关系 + // 使用std::make_shared创建智能指针,自动管理内存 + detectors_[1] = std::make_shared(); + detectors_[2] = std::make_shared(); + detectors_[3] = std::make_shared(); + detectors_[4] = std::make_shared(); + + std::cout << "[TaskManager] Initialization complete, created " + << detectors_.size() << " detector(s)" << std::endl; +} + +/** + * @brief 析构函数 + * + * 确保在对象销毁时正确停止任务执行线程 + * 清理所有资源,避免资源泄漏 + * + * @note 必须先停止当前任务,再等待线程结束 + * @note 使用join()确保线程安全退出 + */ +TaskManager::~TaskManager() { + stopCurrentTask(); // 停止当前任务,清空任务队列 + if (execution_thread_.joinable()) { + running_ = false; // 设置运行标志为false,通知线程退出 + execution_thread_.join(); // 等待线程结束 + } +} + +/** + * @brief 初始化任务管理器 + * + * 初始化任务管理器,启动任务执行线程 + * + * @param redis_comm + * Redis通信对象(用于写入检测结果),可以为nullptr(如果不需要写入结果) + * @return true 初始化成功,false 初始化失败(Redis未连接) + * + * @note 如果redis_comm为nullptr,则不会写入结果到Redis,但任务仍可正常执行 + * @note 任务执行线程在后台运行,持续从队列中取出任务并执行 + */ +bool TaskManager::initialize( + std::shared_ptr redis_result_comm, + std::shared_ptr redis_task_comm) { + // 如果已经在运行,直接返回成功(避免重复初始化) + if (running_) { + return true; + } + + // 保存Redis通信对象引用(用于后续写入结果和清空触发键) + redis_result_comm_ = redis_result_comm; + redis_task_comm_ = redis_task_comm; + + // 如果提供了Redis对象,检查连接状态 + if (redis_result_comm_ && !redis_result_comm_->isConnected()) { + std::cerr << "[TaskManager] Redis not connected" << std::endl; + return false; + } + + // 设置运行标志为true,启动任务执行线程 + running_ = true; + // 创建线程,执行taskExecutionThreadFunc函数 + // this指针指向当前TaskManager对象,用于在线程中访问成员函数 + execution_thread_ = std::thread(&TaskManager::taskExecutionThreadFunc, this); + + std::cout << "[TaskManager] Task manager initialized" << std::endl; + return true; +} + +/** + * @brief 处理新任务 + * + * 将新任务加入任务队列,等待执行线程处理 + * 此函数是线程安全的,可以在任何线程中调用 + * + * @param task_data + * 任务数据,包含flag(任务类型)、side(货架侧)、task_time(任务时间) + * + * @note flag必须在1-5范围内,对应5种不同的检测任务 + * @note 使用互斥锁保护任务队列,确保线程安全 + * @note 任务队列采用FIFO(先进先出)模式 + */ +void TaskManager::handleTask(const RedisTaskData &task_data) { + // 验证任务标志位有效性(1-5对应5种检测任务) + if (task_data.flag < 1 || task_data.flag > 5) { + std::cerr << "[TaskManager] Invalid task flag: " << task_data.flag + << std::endl; + return; + } + + // 使用互斥锁保护任务队列,确保线程安全 + // lock_guard自动管理锁的获取和释放 + { + std::lock_guard lock(task_queue_mutex_); + task_queue_.push(task_data); // 将任务加入队列 + } // 释放锁后再通知,避免执行线程被唤醒后立即阻塞 + + // 性能优化:使用条件变量通知等待的线程,避免轮询 + task_queue_cv_.notify_one(); + + std::cout << "[TaskManager] Received new task: flag=" << task_data.flag + << ", side=" << task_data.side << ", time=" << task_data.task_time + << std::endl; +} + +TaskManager::TaskStatus TaskManager::getCurrentTaskStatus() const { + return current_status_; +} + +bool TaskManager::getLatestResult(DetectionResult &result) { + std::lock_guard lock(result_mutex_); + if (current_status_ == TaskStatus::COMPLETED || + current_status_ == TaskStatus::FAILED) { + result = latest_result_; + return true; + } + return false; +} + +void TaskManager::stopCurrentTask() { + current_status_ = TaskStatus::IDLE; + { + std::lock_guard lock(task_queue_mutex_); + while (!task_queue_.empty()) { + task_queue_.pop(); + } + } + // 通知等待的线程,避免在停止时阻塞 + task_queue_cv_.notify_one(); +} + +/** + * @brief 任务执行线程函数 + * + * 这是任务执行线程的主函数,在后台持续运行 + * 主要工作流程: + * 1. 从任务队列中取出任务(如果队列为空则休眠等待) + * 2. 执行检测任务(获取图像、调用检测算法) + * 3. 处理检测结果(格式化、计算警告/报警、写入Redis) + * 4. 更新任务状态 + * + * @note 此函数运行在独立的线程中,不会阻塞主线程 + * @note 使用轮询方式检查任务队列,队列为空时休眠100ms避免CPU空转 + * @note 任务执行失败时,状态设置为FAILED,但不会影响后续任务执行 + */ +void TaskManager::taskExecutionThreadFunc() { + std::cout << "[TaskManager] Task execution thread started" << std::endl; + + // 主循环:持续处理任务直到running_为false + while (running_) { + // 从队列中取出任务 + RedisTaskData task_data; + { + // 性能优化:使用条件变量代替轮询,减少CPU占用 + // unique_lock支持条件变量,lock_guard不支持 + std::unique_lock lock(task_queue_mutex_); + + // 等待条件:队列非空或running_为false + // 如果队列为空,线程会阻塞在这里,直到有新任务到达或running_变为false + task_queue_cv_.wait(lock, + [this] { return !task_queue_.empty() || !running_; }); + + // 检查是否因为running_变为false而退出 + if (!running_ && task_queue_.empty()) { + break; + } + + // 队列非空,取出队列头部的任务(FIFO模式) + if (!task_queue_.empty()) { + task_data = task_queue_.front(); + task_queue_.pop(); // 从队列中移除 + } else { + continue; // 队列为空但running_仍为true,继续等待 + } + } // unique_lock在这里自动释放锁 + + // 执行任务 + current_status_ = TaskStatus::RUNNING; // 设置状态为运行中 + DetectionResult result; // 用于存储检测结果 + + std::cout << "[TaskManager] Starting task execution: flag=" + << task_data.flag << std::endl; + + // 执行检测任务(获取图像、调用检测算法) + if (executeDetectionTask(task_data, result)) { + // 任务执行成功 + current_status_ = TaskStatus::COMPLETED; + + // 保存结果到latest_result_(使用互斥锁保护) + { + std::lock_guard lock(result_mutex_); + latest_result_ = result; + } + + // 处理结果(格式化、计算警告/报警、写入Redis) + // 注意:processResult内部会处理所有结果相关的操作 + processResult(result); + + std::cout << "[TaskManager] Task execution completed: flag=" + << task_data.flag << std::endl; + } else { + // 任务执行失败 + current_status_ = TaskStatus::FAILED; + std::cerr << "[TaskManager] Task execution failed: flag=" + << task_data.flag << std::endl; + } + } // while循环结束 + + std::cout << "[TaskManager] Task execution thread exited" << std::endl; +} + +/** + * @brief 执行检测任务 + * + * 执行具体的检测任务,包括: + * 1. 根据flag获取对应的检测器 + * 2. 通过DeviceManager单例获取相机图像(根据side选择相机) + * 3. 调用检测器的execute()方法执行检测算法 + * + * @param task_data 任务数据,包含flag、side、task_time + * @param result [输出] 检测结果,由检测算法填充 + * @return true 检测成功,false + * 检测失败(检测器不存在、图像获取失败、算法执行失败) + * + * @note 直接使用DeviceManager单例获取图像,简化架构 + * @note side参数用于选择相机:left对应索引0,right对应索引1 + * @note 如果图像获取失败,depth_img和color_img将为空,检测算法需要处理这种情况 + */ +bool TaskManager::executeDetectionTask(const RedisTaskData &task_data, + DetectionResult &result) { + // 根据flag获取对应的检测器 + auto detector = getDetector(task_data.flag); + if (!detector) { + std::cerr << "[TaskManager] Detector not found for flag=" << task_data.flag + << std::endl; + return false; + } + + // 针对 flag=4 (Visual Inventory) 使用专门的循环逻辑 + if (task_data.flag == 4) { + DetectionResult final_result; // Create a temporary result object if needed, + // or modify signature to expect one? + // Wait, executeDetectionTask takes `DetectionResult &result`. I can pass + // that. But I need to match the signature in header: bool + // executeVisualInventoryLoop(const RedisTaskData& task_data, + // std::shared_ptr detector, DetectionResult& final_result); + return executeVisualInventoryLoop(task_data, detector, result); + } + + cv::Mat depth_img, color_img; + double fps = 0.0; + bool image_acquired = false; // 标记图像是否成功获取 + std::vector point_cloud; + + // 根据任务标志选择不同的相机采集策略 + if (task_data.flag == 1) { + // Flag 1: Slot Occupancy Detection + // 需求:vision_task_side=left选择相机:sn:DA8743029,vision_task_side=right选择相机:sn:DA8742900 + // 使用 MVS 2D 相机 + + std::string target_sn; + if (task_data.side == "left") { + target_sn = "DA8743029"; + } else if (task_data.side == "right") { + target_sn = "DA8742900"; + } else { + std::cerr << "[TaskManager] Invalid side for flag 1: " << task_data.side + << std::endl; + return false; + } + + // 查找匹配 SN 的相机索引 + int mvs_count = DeviceManager::getInstance().get2DCameraCount(); + int found_index = -1; + + for (int i = 0; i < mvs_count; ++i) { + std::string sn = DeviceManager::getInstance().get2DCameraId(i); + // 简单的字符串匹配 (包含关系或相等) + if (sn.find(target_sn) != std::string::npos) { + found_index = i; + break; + } + } + + if (found_index >= 0) { + std::cout << "[TaskManager] Using MVS Camera Index " << found_index + << " (SN: " << target_sn << ") for Flag 1" << std::endl; + + // 重试逻辑:尝试多次获取图像,应对相机刚启动第一次抓拍可能为空的情况 + for (int retry = 0; retry < 15; ++retry) { + image_acquired = DeviceManager::getInstance().get2DCameraImage( + found_index, color_img, fps); + if (image_acquired && !color_img.empty()) { + break; + } + std::cout << "[TaskManager] Waiting for image from camera " + << found_index << " (Retry " << retry + 1 << "/15)..." + << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } + + // depth_img 为空,Flag 1 算法只需要 color_img (或处理空 depth) + } else { + std::cerr << "[TaskManager] Camera with SN " << target_sn + << " not found for Flag 1!" << std::endl; + return false; + } + + } else if (task_data.flag == 2 || task_data.flag == 3) { + // Flag 2: PalletOffsetDetection (需要深度图) + // Flag 3: BeamRackDeflectionDetection (需要深度图) + + std::string target_sn; + if (task_data.side == "left") { + target_sn = "207000146458"; + } else if (task_data.side == "right") { + target_sn = "207000146703"; + } else { + std::cerr << "[TaskManager] Invalid side for flag " << task_data.flag + << ": " << task_data.side << std::endl; + return false; + } + + // 查找匹配 SN 的深度相机索引 + int depth_count = DeviceManager::getInstance().getDepthCameraCount(); + int found_index = -1; + + for (int i = 0; i < depth_count; ++i) { + // 注意:DeviceManager::getDeviceId 对于深度相机直接返回 SN + std::string sn = DeviceManager::getInstance().getDeviceId(i); + if (sn.find(target_sn) != std::string::npos) { + found_index = i; + break; + } + } + + if (found_index >= 0) { + std::cout << "[TaskManager] Using Depth Camera Index " << found_index + << " (SN: " << target_sn << ") for Flag " << task_data.flag + << std::endl; + + // 重试逻辑 + for (int retry = 0; retry < 15; ++retry) { + image_acquired = DeviceManager::getInstance().getLatestImages( + found_index, depth_img, color_img, fps); + if (image_acquired && + !depth_img.empty()) { // 深度图任务通常更关注深度图 + break; + } + std::cout << "[TaskManager] Waiting for depth/color images from camera " + << found_index << " (Retry " << retry + 1 << "/15)..." + << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } + + // 获取点云 + if (image_acquired) { + if (DeviceManager::getInstance().computePointCloud( + found_index, depth_img, point_cloud)) { + std::cout << "[TaskManager] Computed Point Cloud for Camera " + << found_index << ", Points: " << point_cloud.size() + << std::endl; + } else { + std::cerr << "[TaskManager] Failed to compute point cloud for camera " + << found_index << std::endl; + } + } + } else { + std::cerr << "[TaskManager] Depth Camera with SN " << target_sn + << " not found for Flag " << task_data.flag << "!" << std::endl; + // 调试输出所有可用相机 + std::cout << "[TaskManager] Available Depth Cameras: "; + for (int i = 0; i < depth_count; ++i) { + std::cout << DeviceManager::getInstance().getDeviceId(i) << " "; + } + std::cout << std::endl; + return false; + } + + } else if (task_data.flag == 4) { + // Flag 4: VisualInventoryDetection + // 使用指定的2D相机进行视觉盘库检测 + std::string target_sn = "DA8789631"; + + // 查找匹配 SN 的2D相机索引 + int mvs_count = DeviceManager::getInstance().get2DCameraCount(); + int found_index = -1; + + for (int i = 0; i < mvs_count; ++i) { + std::string sn = DeviceManager::getInstance().get2DCameraId(i); + // 简单的字符串匹配 (包含关系或相等) + if (sn.find(target_sn) != std::string::npos) { + found_index = i; + break; + } + } + + if (found_index >= 0) { + std::cout << "[TaskManager] Using MVS Camera Index " << found_index + << " (SN: " << target_sn << ") for Flag " << task_data.flag + << std::endl; + + // 重试逻辑:尝试多次获取图像,应对相机刚启动第一次抓拍可能为空的情况 + for (int retry = 0; retry < 15; ++retry) { + image_acquired = DeviceManager::getInstance().get2DCameraImage( + found_index, color_img, fps); + if (image_acquired && !color_img.empty()) { + break; + } + std::cout << "[TaskManager] Waiting for image from camera " + << found_index << " (Retry " << retry + 1 << "/15)..." + << std::endl; + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } + + // depth_img 为空,视觉盘库算法只需要 color_img + } else { + std::cerr << "[TaskManager] Camera with SN " << target_sn + << " not found for Flag " << task_data.flag << "!" << std::endl; + return false; + } + } else { + // 未知 Flag + std::cerr << "[TaskManager] Unknown task flag: " << task_data.flag + << std::endl; + return false; + } + + // 检查图像获取结果 + if (!image_acquired) { + std::cerr + << "[TaskManager] Failed to get images from DeviceManager for flag " + << task_data.flag << std::endl; + return false; + } + + // 执行检测算法 + bool success = false; + try { + std::cout << "[TaskManager] Invoking detector->execute..." << std::endl; + success = detector->execute(depth_img, color_img, task_data.side, result, + !point_cloud.empty() ? &point_cloud : nullptr, + task_data.beam_length); + std::cout << "[TaskManager] Detector returned: " + << (success ? "Success" : "Failure") << std::endl; + } catch (const std::exception &e) { + std::cerr << "[TaskManager] Exception during detection: " << e.what() + << std::endl; + success = false; + } catch (...) { + std::cerr << "[TaskManager] Unknown exception during detection." + << std::endl; + success = false; + } + + return success; +} + +std::shared_ptr TaskManager::getDetector(int flag) { + auto it = detectors_.find(flag); + if (it != detectors_.end()) { + return it->second; + } + return nullptr; +} + +/** + * @brief 处理检测结果 + * + * 处理检测结果,包括: + * 1. 添加警告和报警信号(根据阈值计算) + * 2. 格式化为JSON字符串 + * 3. 写入Redis + * + * @param result 检测结果(原始结果,不会被修改) + * @return true 处理成功,false 处理失败(Redis未连接、写入失败) + * + * @note 此函数会创建结果的副本,在副本上添加警告/报警信号,不修改原始结果 + * @note 结果写入Redis后,WMS系统可以读取并处理 + */ +bool TaskManager::processResult(const DetectionResult &result) { + // 检查Redis连接状态 + if (!redis_result_comm_ || !redis_result_comm_->isConnected()) { + std::cerr << "[TaskManager] Redis not connected, cannot write result" + << std::endl; + return false; + } + + // 创建结果副本,用于添加警告和报警信号 + // 使用副本避免修改原始结果,保持数据完整性 + DetectionResult processed_result = result; + + // 添加警告和报警信号 + // 根据检测类型(flag)和阈值,计算每个测量值的警告/报警状态 + addWarningAlarmSignals(processed_result); + + // 写入Redis + // 将结果拆分为单独的Key写入Redis + std::map result_map = processed_result.toMap(); + bool success = true; + + for (const auto &pair : result_map) { + if (!redis_result_comm_->writeString(pair.first, pair.second)) { + std::cerr << "[TaskManager] Failed to write key: " << pair.first + << std::endl; + success = false; + } + } + + // 结果写入完成后,清空触发键,避免程序重启后被旧任务自动触发 + // 约定:flag 置 0,side/time 置空字符串 + bool clear_ok = true; + if (redis_task_comm_ && redis_task_comm_->isConnected()) { + clear_ok &= redis_task_comm_->writeString("vision_task_flag", "0"); + clear_ok &= redis_task_comm_->writeString("vision_task_side", ""); + clear_ok &= redis_task_comm_->writeString("vision_task_time", ""); + } else { + // 如果没有提供 task DB 的连接,则尝试用结果 DB 连接,但可能写不到正确DB + clear_ok &= redis_result_comm_->writeString("vision_task_flag", "0"); + clear_ok &= redis_result_comm_->writeString("vision_task_side", ""); + clear_ok &= redis_result_comm_->writeString("vision_task_time", ""); + } + + if (success) { + std::cout + << "[TaskManager] Detection result written to Redis (26 keys): type=" + << processed_result.result_type << std::endl; + } else { + std::cerr << "[TaskManager] Failed to write some detection results" + << std::endl; + } + + if (!clear_ok) { + std::cerr << "[TaskManager] Warning: failed to clear task trigger keys " + "(vision_task_flag/side/time)." + << std::endl; + } + + return success; +} + +/** + * @brief 计算警告和报警信号(静态方法) + * + * 根据当前值和阈值,计算警告和报警状态 + * 阈值范围定义: + * - A < B < C < D + * - 正常范围:[B, C] + * - 警告范围:[A, B) 或 (C, D] + * - 报警范围:< A 或 > D + * + * @param value 当前测量值 + * @param threshold 阈值JSON字符串,格式:{"A":-5.0,"B":-3.0,"C":3.0,"D":5.0} + * @param warning_alarm [输出] + * 警告和报警信号JSON字符串,格式:{"warning":false,"alarm":false} + * + * @note 当前实现使用硬编码的阈值,后续应使用JSON库解析threshold参数 + * @note 报警时warning也设置为true(报警包含警告) + * @note 这是静态方法,可以在不创建TaskManager实例的情况下调用 + * + * @todo 使用JSON库(如nlohmann::json)解析threshold字符串 + */ +void TaskManager::calculateWarningAlarm(float value, + const std::string &threshold, + std::string &warning_alarm) { + // TODO: 使用JSON库解析threshold字符串 + // 当前使用简单的字符串解析方式 + // 假设threshold格式为: {"A":-5.0,"B":-3.0,"C":3.0,"D":5.0} + + // 临时实现:使用硬编码的阈值(实际应使用JSON库解析threshold参数) + // 后续实现示例: + // nlohmann::json j = nlohmann::json::parse(threshold); + // float A = j.value("A", -5.0f); + // float B = j.value("B", -3.0f); + // float C = j.value("C", 3.0f); + // float D = j.value("D", 5.0f); + float A = -5.0f, B = -3.0f, C = 3.0f, D = 5.0f; + + bool warning = false; // 警告标志 + bool alarm = false; // 报警标志 + + // 判断警告和报警 + // 阈值范围:A < B < C < D + // 正常范围:[B, C] - 无警告无报警 + // 警告范围:[A, B) 或 (C, D] - 有警告无报警 + // 报警范围:< A 或 > D - 有警告有报警 + + if (value < A || value > D) { + // 超出报警阈值范围 + alarm = true; // 设置报警标志 + warning = true; // 报警时也设置警告标志(报警包含警告) + } else if (value < B || value > C) { + // 超出正常范围但在报警阈值内(警告范围) + warning = true; // 只设置警告标志 + } + // else: 在正常范围内 [B, C],warning和alarm都保持false + + // 生成JSON字符串 + // 格式:{"warning":true/false,"alarm":true/false} + std::ostringstream oss; + oss << "{\"warning\":" << (warning ? "true" : "false") + << ",\"alarm\":" << (alarm ? "true" : "false") << "}"; + warning_alarm = oss.str(); +} + +/** + * @brief 为结果添加警告和报警信号 + * + * 根据检测类型(flag),为相应的测量值添加警告和报警信号 + * 支持的检测类型: + * - Flag 2: 托盘位置偏移检测(5个测量值) + * - Flag 3: 横梁变形检测(2个测量值) + * + * @param result [输入输出] 检测结果,会被修改(添加warning_alarm字段) + * + * @note 只有Flag 2和Flag 3需要计算警告/报警,其他flag的结果不处理 + * @note 每个测量值都有独立的阈值和警告/报警状态 + */ +void TaskManager::addWarningAlarmSignals(DetectionResult &result) { + // Flag 2: 托盘位置偏移检测 + // 包含5个测量值:左右偏移、前后偏移、左侧插孔变形、右侧插孔变形、旋转角度 + if (result.result_type == 2) { + // 左右偏移量 + if (!result.offset_lat_mm_threshold.empty()) { + calculateWarningAlarm(result.offset_lat_mm_value, + result.offset_lat_mm_threshold, + result.offset_lat_mm_warning_alarm); + } + + // 前后偏移量 + if (!result.offset_lon_mm_threshold.empty()) { + calculateWarningAlarm(result.offset_lon_mm_value, + result.offset_lon_mm_threshold, + result.offset_lon_mm_warning_alarm); + } + + // 左侧插孔变形 + if (!result.hole_def_mm_left_threshold.empty()) { + calculateWarningAlarm(result.hole_def_mm_left_value, + result.hole_def_mm_left_threshold, + result.hole_def_mm_left_warning_alarm); + } + + // 右侧插孔变形 + if (!result.hole_def_mm_right_threshold.empty()) { + calculateWarningAlarm(result.hole_def_mm_right_value, + result.hole_def_mm_right_threshold, + result.hole_def_mm_right_warning_alarm); + } + + // 旋转角度 + if (!result.rotation_angle_threshold.empty()) { + calculateWarningAlarm(result.rotation_angle_value, + result.rotation_angle_threshold, + result.rotation_angle_warning_alarm); + } + } + + // Flag 3: 横梁和立柱变形检测 + if (result.result_type == 3) { + // 横梁弯曲量 + if (!result.beam_def_mm_threshold.empty()) { + calculateWarningAlarm(result.beam_def_mm_value, + result.beam_def_mm_threshold, + result.beam_def_mm_warning_alarm); + } + + // 立柱弯曲量 + if (!result.rack_def_mm_threshold.empty()) { + calculateWarningAlarm(result.rack_def_mm_value, + result.rack_def_mm_threshold, + result.rack_def_mm_warning_alarm); + } + } +} + +/** + * @brief 视觉盘点循环执行函数 + * + * 专门处理 flag=4 的视觉盘点任务,实现连续抓拍和去重逻辑 + * 直到收到 flag=5 的任务(或系统停止)时退出循环 + */ +bool TaskManager::executeVisualInventoryLoop( + const RedisTaskData &task_data, std::shared_ptr detector, + DetectionResult &final_result) { + if (!detector) + return false; + + // 1. 相机准备 + std::string target_sn = "DA8789631"; + int mvs_count = DeviceManager::getInstance().get2DCameraCount(); + int found_index = -1; + + for (int i = 0; i < mvs_count; ++i) { + std::string sn = DeviceManager::getInstance().get2DCameraId(i); + if (sn.find(target_sn) != std::string::npos) { + found_index = i; + break; + } + } + + if (found_index < 0) { + std::cerr << "[TaskManager] Camera with SN " << target_sn + << " not found for Visual Inventory!" << std::endl; + return false; + } + + std::cout << "[TaskManager] Starting Visual Inventory Loop using Camera " + << found_index << std::endl; + + std::set seen_codes; + bool loop_running = true; + + while (loop_running && running_) { + // 2. 检查停止信号 (flag=5) + { + std::lock_guard lock(task_queue_mutex_); + if (!task_queue_.empty()) { + // 检查队列头部是否为停止信号 + if (task_queue_.front().flag == 5) { + task_queue_.pop(); // 消费停止信号 + std::cout << "[TaskManager] Visual Inventory Stopped by flag=5" + << std::endl; + loop_running = false; + break; + } + } + } + + DetectionResult result; + cv::Mat color_img, depth_img; // depth为空 + double fps = 0.0; + + // 3. 获取图像 + // 尝试获取图像,如果不成功则跳过本次检测 + bool image_acquired = DeviceManager::getInstance().get2DCameraImage( + found_index, color_img, fps); + + if (image_acquired && !color_img.empty()) { + // 4. 执行检测 + try { + if (detector->execute(depth_img, color_img, task_data.side, result)) { + // 5. 结果处理与去重 + // 解析 result.result_barcodes (JSON格式) + std::string json = result.result_barcodes; + std::vector new_codes; + + // 简单的字符串查找解析 + size_t array_start = json.find('['); + size_t array_end = json.find(']'); + + if (array_start != std::string::npos && + array_end != std::string::npos && array_end > array_start) { + size_t pos = array_start + 1; + while (pos < array_end) { + size_t quote_start = json.find('"', pos); + if (quote_start == std::string::npos || quote_start >= array_end) + break; + + size_t quote_end = json.find('"', quote_start + 1); + if (quote_end == std::string::npos || quote_end >= array_end) + break; + + std::string code = + json.substr(quote_start + 1, quote_end - quote_start - 1); + + // 简单的反转义 (只是为了匹配) + if (seen_codes.find(code) == seen_codes.end()) { + seen_codes.insert(code); + new_codes.push_back(code); + } + + pos = quote_end + 1; + } + } + + // 如果有新识别的码,则上报 + if (!new_codes.empty()) { + std::cout << "[TaskManager] Detected " << new_codes.size() + << " NEW codes." << std::endl; + + // 重新构建JSON结果 + std::string new_json = "{\"" + task_data.side + "\":["; + int idx = 0; + for (const auto &code : seen_codes) { + if (idx > 0) + new_json += ","; + new_json += "\"" + code + "\""; + idx++; + } + new_json += "]}"; + + result.result_barcodes = new_json; + processResult(result); + } + } + } catch (const std::exception &e) { + std::cerr << "[TaskManager] Exception during inventory detection: " + << e.what() << std::endl; + } + } + + // 6. 控制循环频率 + std::this_thread::sleep_for(std::chrono::milliseconds(200)); + } + + final_result.result_status = "success"; + return true; +} diff --git a/image_capture/src/task/task_manager.h b/image_capture/src/task/task_manager.h new file mode 100644 index 0000000..4f042fa --- /dev/null +++ b/image_capture/src/task/task_manager.h @@ -0,0 +1,148 @@ +#pragma once + +#include "../algorithm/core/detection_result.h" +#include "../redis/redis_communicator.h" +#include "../redis/task_data.h" +#include + +class DetectionBase; +#include +#include +#include +#include +#include +#include +#include +#include + + +/** + * @brief 任务管理器(合并了结果处理功能) + * + * 功能说明: + * - 接收Redis任务触发 + * - 根据flag分发到对应的检测任务 + * - 管理任务执行状态 + * - 协调检测算法执行 + * - 处理检测结果(格式化、计算警告/报警、写入Redis) + */ +class TaskManager { +public: + /** + * 任务执行状态 + */ + enum class TaskStatus { + IDLE, // 空闲 + RUNNING, // 执行中 + COMPLETED, // 已完成 + FAILED // 失败 + }; + + TaskManager(); + ~TaskManager(); + + /** + * 初始化任务管理器 + * @param redis_comm Redis通信对象(用于写入结果) + * @return 是否成功 + */ + bool + initialize(std::shared_ptr redis_result_comm = nullptr, + std::shared_ptr redis_task_comm = nullptr); + + /** + * 处理新任务 + * @param task_data 任务数据 + */ + void handleTask(const RedisTaskData &task_data); + + /** + * 获取当前任务状态 + */ + TaskStatus getCurrentTaskStatus() const; + + /** + * 获取最新检测结果 + * @param result [输出] 检测结果 + * @return 是否有结果 + */ + bool getLatestResult(DetectionResult &result); + + /** + * 停止当前任务 + */ + void stopCurrentTask(); + + /** + * 计算警告和报警信号(静态方法,供外部调用) + * @param value 当前值 + * @param threshold 阈值JSON字符串 {"A": -5.0, "B": -3.0, "C": 3.0, "D": 5.0} + * @param warning_alarm [输出] 警告和报警信号JSON字符串 {"warning": false, + * "alarm": false} + */ + static void calculateWarningAlarm(float value, const std::string &threshold, + std::string &warning_alarm); + +private: + /** + * 任务执行线程函数 + */ + void taskExecutionThreadFunc(); + + /** + * 执行检测任务 + * @param task_data 任务数据 + * @param result [输出] 检测结果 + * @return 是否成功 + */ + bool executeDetectionTask(const RedisTaskData &task_data, + DetectionResult &result); + + // 视觉盘点循环执行函数 + bool executeVisualInventoryLoop(const RedisTaskData &task_data, + std::shared_ptr detector, + DetectionResult &final_result); + + /** + * 获取指定类型的检测器 + * @param flag 任务类型(1~5) + * @return 检测器指针,如果不存在返回nullptr + */ + std::shared_ptr getDetector(int flag); + + /** + * 处理检测结果(格式化、计算警告/报警、写入Redis) + * @param result 检测结果 + * @return 是否成功处理 + */ + bool processResult(const DetectionResult &result); + + /** + * 为结果添加警告和报警信号 + * @param result 检测结果(会被修改) + */ + void addWarningAlarmSignals(DetectionResult &result); + + // 检测器映射表(flag -> detector) + std::map> detectors_; + + // 任务队列 + std::queue task_queue_; + std::mutex task_queue_mutex_; + std::condition_variable task_queue_cv_; // 条件变量,用于通知任务到达 + + // 当前任务状态 + std::atomic current_status_; + std::mutex result_mutex_; + DetectionResult latest_result_; + + // 任务执行线程 + std::atomic running_; + std::thread execution_thread_; + + // Redis通信对象(用于写入结果和清空触发键) + std::shared_ptr + redis_result_comm_; // 写结果(通常在输出DB) + std::shared_ptr + redis_task_comm_; // 清空触发键(通常在输入DB) +}; diff --git a/image_capture/src/tools/calibration_tool/README.md b/image_capture/src/tools/calibration_tool/README.md new file mode 100644 index 0000000..7db46e0 --- /dev/null +++ b/image_capture/src/tools/calibration_tool/README.md @@ -0,0 +1,36 @@ +# Calibration Tool (标定工具) + +## 简介 (Introduction) +本工具用于计算相机相对于特定平面(如地面或货架表面)的位姿(外参)。它通过读取深度图中的平面区域,拟合平面方程,计算出校正矩阵(Transformation Matrix)。 + +## 功能 (Features) +* **图像加载**:支持加载深度图(16-bit PNG/TIFF)和彩色图。 +* **ROI 选择**:在彩色图上交互式选择矩形区域(4个点)。 +* **自动映射**:将彩色图的 ROI 自动映射到深度图坐标系(支持手动缩放回退模式)。 +* **平面拟合**:使用 RANSAC 算法从点云中拟合最佳平面。 +* **结果保存**:将计算得到的变换矩阵保存为 JSON 文件,供主程序使用。 + +## 使用步骤 (Usage) +1. **加载参数**:点击 `Load Intrinsics`,选择由 `intrinsic_dumper` 生成的 `intrinsics_.json` 文件。 +2. **加载图像**:分别加载同一场景拍摄的 Color 图像和 Depth 图像。 +3. **选择区域**:在 Color 图像显示区域,依次点击 4 个点,围成一个矩形区域(目标平面)。 +4. **执行标定**:点击 `Execute Calibration`。 + * 工具会显示拟合的点数和平面方程。 + * 状态栏显示 `Calibration SUCCESS` 表示成功。 +5. **保存结果**:点击 `Save Result`。 + * 默认文件名为 `calibration_result_.json`。 + +## 输出格式 (Output) +JSON 文件包含: +* `camera_id`: 相机序列号 (SN)。 +* `transformation_matrix`: 4x4 变换矩阵(Row-major)。该矩阵表示从相机坐标系到世界坐标系(Reference Plane)的刚体变换 (Rotation + Translation)。 +* `roi_points_depth`: 深度图上的有效 ROI 区域顶点。 +* `calibration_time`: 标定时间。 + +### 术语解释 +* **Extrinsics (外参)**:在计算机视觉中,通常指相机相对于世界坐标系(或另一个相机)的旋转和平移关系。本工具生成的 `transformation_matrix` 即由于相机相对于地面/货架的位姿,因此在广义上属于“外参”。 + + +## 注意事项 (Notes) +* 如果相机缺少 RGB-Depth 外参,工具会自动使用 "Manual Scaling" 模式进行近似映射。 +* 请确保选取区域平整且深度数据有效(避免全黑区域)。 diff --git a/image_capture/src/tools/calibration_tool/calibration_widget.cpp b/image_capture/src/tools/calibration_tool/calibration_widget.cpp new file mode 100644 index 0000000..ef28684 --- /dev/null +++ b/image_capture/src/tools/calibration_tool/calibration_widget.cpp @@ -0,0 +1,731 @@ +#ifndef NOMINMAX +#define NOMINMAX +#endif + +#include "calibration_widget.h" +#include // Include Mapper +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // New + +// Placeholder for now +CalibrationWidget::CalibrationWidget(QWidget *parent) : QWidget(parent) { + setupUi(); + + // Init SDK for CoordinateMapper math functions + TY_STATUS status = TYInitLib(); + if (status != TY_STATUS_OK) { + QMessageBox::warning(this, "Error", "Failed to initialize TY SDK. Calibration might crash."); + } + + has_calibration_result_ = false; + is_selecting_roi_ = true; + has_calib_params_ = false; + + std::memset(calibration_matrix_, 0, sizeof(calibration_matrix_)); + + // Install event filter for mouse interaction + label_color_display_->installEventFilter(this); +} + +CalibrationWidget::~CalibrationWidget() { + TYDeinitLib(); +} + +bool CalibrationWidget::eventFilter(QObject *obj, QEvent *event) { + if (obj == label_color_display_ && event->type() == QEvent::MouseButtonPress) { + QMouseEvent *mouseEvent = static_cast(event); + if (mouseEvent->button() == Qt::LeftButton && is_selecting_roi_) { + // Coordinate mapping: Label -> Image + if (mat_color_raw_.empty()) return false; + + // Calculate scale + double scale_x = (double)label_color_display_->width() / mat_color_raw_.cols; + double scale_y = (double)label_color_display_->height() / mat_color_raw_.rows; + double scale = std::min(scale_x, scale_y); + + int offset_x = (label_color_display_->width() - mat_color_raw_.cols * scale) / 2; + int offset_y = (label_color_display_->height() - mat_color_raw_.rows * scale) / 2; + + int img_x = (mouseEvent->pos().x() - offset_x) / scale; + int img_y = (mouseEvent->pos().y() - offset_y) / scale; + + // Append point + if (img_x >= 0 && img_x < mat_color_raw_.cols && img_y >= 0 && img_y < mat_color_raw_.rows) { + if (roi_points_color_.size() >= 4) roi_points_color_.clear(); // Reset if full + roi_points_color_.push_back(cv::Point(img_x, img_y)); + updateDisplay(); + log("Added ROI point: (" + QString::number(img_x) + ", " + QString::number(img_y) + ")"); + } + } + return true; + } + return QWidget::eventFilter(obj, event); +} + + +void CalibrationWidget::setupUi() { + QVBoxLayout *main_layout = new QVBoxLayout(this); + + // Top Control Panel + QVBoxLayout *top_panel_layout = new QVBoxLayout(); // Container for 2 rows + + // Row 1: Source / Input + QHBoxLayout *input_layout = new QHBoxLayout(); + + // Removed Camera Controls + + btn_load_color_ = new QPushButton("加载彩色图", this); + btn_load_depth_ = new QPushButton("加载深度图", this); + btn_load_calib_ = new QPushButton("加载标定参数", this); + + input_layout->addWidget(btn_load_color_); + input_layout->addWidget(btn_load_depth_); + input_layout->addWidget(btn_load_calib_); + input_layout->addStretch(); + + // Row 2: Actions + QHBoxLayout *action_layout = new QHBoxLayout(); + btn_run_calibration_ = new QPushButton("执行标定", this); + btn_view_3d_ = new QPushButton("查看3D", this); + btn_save_result_ = new QPushButton("保存结果", this); + + action_layout->addWidget(btn_run_calibration_); + action_layout->addWidget(btn_view_3d_); + action_layout->addWidget(btn_save_result_); + action_layout->addStretch(); // Align Left + + top_panel_layout->addLayout(input_layout); + top_panel_layout->addLayout(action_layout); + + main_layout->addLayout(top_panel_layout); + + // Image Display Area + QHBoxLayout *display_layout = new QHBoxLayout(); + label_color_display_ = new QLabel("Color Image (Click to select ROI)", this); + label_color_display_->setMinimumSize(640, 480); + label_color_display_->setAlignment(Qt::AlignCenter); + label_color_display_->setStyleSheet("border: 1px solid gray;"); + + label_depth_display_ = new QLabel("Depth Image", this); + label_depth_display_->setMinimumSize(640, 480); + label_depth_display_->setAlignment(Qt::AlignCenter); + label_depth_display_->setStyleSheet("border: 1px solid gray;"); + + display_layout->addWidget(label_color_display_); + display_layout->addWidget(label_depth_display_); + + main_layout->addLayout(display_layout); + + // Log Area + text_log_ = new QTextEdit(this); + text_log_->setMaximumHeight(150); + text_log_->setReadOnly(true); + main_layout->addWidget(text_log_); + + // Connections + connect(btn_load_depth_, &QPushButton::clicked, this, &CalibrationWidget::loadDepthImage); + connect(btn_load_color_, &QPushButton::clicked, this, &CalibrationWidget::loadColorImage); + // connect(btn_capture_, &QPushButton::clicked, this, &CalibrationWidget::captureImage); // Removed + // connect(btn_refresh_cameras_, &QPushButton::clicked, this, &CalibrationWidget::refreshCameraList); // Removed + connect(btn_load_calib_, &QPushButton::clicked, this, &CalibrationWidget::loadCalibParams); + connect(btn_run_calibration_, &QPushButton::clicked, this, &CalibrationWidget::runCalibration); + connect(btn_save_result_, &QPushButton::clicked, this, &CalibrationWidget::saveCalibrationResult); + connect(btn_view_3d_, &QPushButton::clicked, this, &CalibrationWidget::view3DCloud); + + log("Ready. Please load images."); +} + +void CalibrationWidget::log(const QString& msg) { + text_log_->append(msg); +} + +// Helper for locking UI +void CalibrationWidget::setUiLocked(bool locked) { + bool enabled = !locked; + btn_load_depth_->setEnabled(enabled); + btn_load_color_->setEnabled(enabled); + // btn_capture_->setEnabled(enabled); + btn_load_calib_->setEnabled(enabled); + btn_run_calibration_->setEnabled(enabled); + btn_view_3d_->setEnabled(enabled); + btn_save_result_->setEnabled(enabled); + // btn_refresh_cameras_->setEnabled(enabled); + // combo_camera_list_->setEnabled(enabled); +} + +void CalibrationWidget::loadDepthImage() { + QString fileName = QFileDialog::getOpenFileName(this, "Open Depth Image", "", "Images (*.png *.tif *.tiff)"); + if (fileName.isEmpty()) return; + + setUiLocked(true); + log("Loading depth image..."); + + std::thread([this, fileName]() { + cv::Mat loaded = cv::imread(fileName.toStdString(), cv::IMREAD_UNCHANGED); + + QMetaObject::invokeMethod(this, [this, fileName, loaded]() { + if (loaded.empty()) { + log("Error: Failed to load depth image."); + } else { + mat_depth_raw_ = loaded; + log("Loaded depth image: " + fileName + " (" + QString::number(mat_depth_raw_.cols) + "x" + QString::number(mat_depth_raw_.rows) + ")"); + updateDisplay(); + } + setUiLocked(false); + }, Qt::QueuedConnection); + }).detach(); +} + +void CalibrationWidget::loadColorImage() { + QString fileName = QFileDialog::getOpenFileName(this, "Open Color Image", "", "Images (*.png *.jpg *.jpeg *.bmp)"); + if (fileName.isEmpty()) return; + + setUiLocked(true); + log("Loading color image..."); + + std::thread([this, fileName]() { + cv::Mat loaded = cv::imread(fileName.toStdString()); + + QMetaObject::invokeMethod(this, [this, fileName, loaded]() { + if (loaded.empty()) { + log("Error: Failed to load color image."); + } else { + mat_color_raw_ = loaded; + log("Loaded color image: " + fileName); + updateDisplay(); + } + setUiLocked(false); + }, Qt::QueuedConnection); + }).detach(); +} + +void CalibrationWidget::updateDisplay() { + if (!mat_color_raw_.empty()) { + // Draw ROI if exists + cv::Mat display = mat_color_raw_.clone(); + if (roi_points_color_.size() > 0) { + for (size_t i = 0; i < roi_points_color_.size(); ++i) { + cv::circle(display, roi_points_color_[i], 5, cv::Scalar(0, 0, 255), -1); + if (i > 0) { + cv::line(display, roi_points_color_[i-1], roi_points_color_[i], cv::Scalar(0, 255, 0), 2); + } + } + if (roi_points_color_.size() == 4) { + cv::line(display, roi_points_color_[3], roi_points_color_[0], cv::Scalar(0, 255, 0), 2); + } + } + + QImage img = cvMatToQImage(display); + label_color_display_->setPixmap(QPixmap::fromImage(img).scaled(label_color_display_->size(), Qt::KeepAspectRatio)); + } + + if (!mat_depth_raw_.empty()) { + // Normalize for display + cv::Mat display; + cv::normalize(mat_depth_raw_, display, 0, 255, cv::NORM_MINMAX, CV_8U); + cv::cvtColor(display, display, cv::COLOR_GRAY2BGR); // Fake color + + QImage img = cvMatToQImage(display); + label_depth_display_->setPixmap(QPixmap::fromImage(img).scaled(label_depth_display_->size(), Qt::KeepAspectRatio)); + } +} + +QImage CalibrationWidget::cvMatToQImage(const cv::Mat& mat) { + if (mat.type() == CV_8UC3) { + // BGR -> RGB + cv::Mat rgb; + cv::cvtColor(mat, rgb, cv::COLOR_BGR2RGB); + QImage img((const uchar*)rgb.data, rgb.cols, rgb.rows, rgb.step, QImage::Format_RGB888); + return img.copy(); + } else if (mat.type() == CV_8UC1) { + QImage img((const uchar*)mat.data, mat.cols, mat.rows, mat.step, QImage::Format_Grayscale8); + return img.copy(); + } + return QImage(); +} + + +// Helper struct for camera info - REMOVED +// refreshCameraList - REMOVED +// captureImage - REMOVED + +void CalibrationWidget::loadCalibParams() { + QString fileName = QFileDialog::getOpenFileName(this, "Load Intrinsics JSON", "", "JSON (*.json)"); + if (fileName.isEmpty()) return; + + QFile file(fileName); + if (!file.open(QIODevice::ReadOnly)) { + log("Error: Could not open file: " + fileName); + return; + } + + QByteArray data = file.readAll(); + QJsonDocument doc = QJsonDocument::fromJson(data); + if (doc.isNull()) { + log("Error: Invalid JSON format."); + return; + } + + QJsonObject root = doc.object(); + + auto parseCalib = [](const QJsonObject& obj, TY_CAMERA_CALIB_INFO& info) { + if (obj.contains("intrinsic")) { + QJsonArray arr = obj["intrinsic"].toArray(); + for(int i=0; i<9 && i= 2) { + sn = parts.last(); // Assume SN is the last part + } + } + current_camera_sn_ = sn; + + has_calib_params_ = true; + + // Log loaded values for verification + log(QString("Loaded Calibration Parameters for Camera SN: %1").arg(current_camera_sn_)); + auto logIntr = [&](const char* name, const TY_CAMERA_CALIB_INFO& info) { + log(QString("%1 Intrinsic: fx=%2 fy=%3 cx=%4 cy=%5").arg(name) + .arg(info.intrinsic.data[0]).arg(info.intrinsic.data[4]) + .arg(info.intrinsic.data[2]).arg(info.intrinsic.data[5])); + log(QString("%1 Distortion: k1=%2 k2=%3 p1=%4 p2=%5 k3=%6").arg(name) + .arg(info.distortion.data[0]).arg(info.distortion.data[1]) + .arg(info.distortion.data[2]).arg(info.distortion.data[3]) + .arg(info.distortion.data[4])); + }; + logIntr("Depth", depth_calib_); + logIntr("Color", color_calib_); + + log("Loaded Calibration Parameters from " + fileName); +} + + + +void CalibrationWidget::view3DCloud() { + if (!has_calibration_result_ || roi_points_depth_.empty()) { + log("Error: No calibration result or ROI points. Run calibration first."); + return; + } + + log("Generating 3D Visualization..."); + QApplication::processEvents(); + + // 1. Reconstruct Point Cloud from ROI + auto pcd_raw = std::make_shared(); + auto pcd_corrected = std::make_shared(); + + float fx = depth_calib_.intrinsic.data[0]; + float fy = depth_calib_.intrinsic.data[4]; + float cx = depth_calib_.intrinsic.data[2]; + float cy = depth_calib_.intrinsic.data[5]; + + // Rebuild cloud loop (same as runCalibration) + cv::Rect bounding_box = cv::boundingRect(roi_points_depth_); + int start_y = std::max(0, bounding_box.y); + int end_y = std::min(mat_depth_raw_.rows, bounding_box.y + bounding_box.height); + int start_x = std::max(0, bounding_box.x); + int end_x = std::min(mat_depth_raw_.cols, bounding_box.x + bounding_box.width); + + // Prepare Transform Matrix + Eigen::Matrix4d T_mat = Eigen::Matrix4d::Identity(); + for(int i=0; i<4; ++i) + for(int j=0; j<4; ++j) + T_mat(i,j) = (double)calibration_matrix_[i*4+j]; + + std::vector z_values; + + // 3D Visualization: Raw (RGB) vs Corrected (Heatmap) + double scale_x = (double)mat_depth_raw_.cols / (double)mat_color_raw_.cols; + double scale_y = (double)mat_depth_raw_.rows / (double)mat_color_raw_.rows; + + for (int y = start_y; y < end_y; ++y) { + for (int x = start_x; x < end_x; ++x) { + if (cv::pointPolygonTest(roi_points_depth_, cv::Point2f(x, y), false) < 0) continue; + + uint16_t d = mat_depth_raw_.at(y, x); + if (d == 0) continue; + + double z_mm = (double)d; + double x_mm = (x - cx) * z_mm / fx; + double y_mm = (y - cy) * z_mm / fy; + + if (std::isnan(x_mm) || std::isnan(y_mm) || std::isnan(z_mm)) continue; + + Eigen::Vector3d pt_raw(x_mm, y_mm, z_mm); + + // Add to Raw Cloud with RGB Colors + pcd_raw->points_.push_back(pt_raw); + + // Map depth pixel to color pixel + int col_x = std::min(std::max(0, (int)(x / scale_x)), mat_color_raw_.cols - 1); + int col_y = std::min(std::max(0, (int)(y / scale_y)), mat_color_raw_.rows - 1); + cv::Vec3b bgr = mat_color_raw_.at(col_y, col_x); + pcd_raw->colors_.push_back(Eigen::Vector3d(bgr[2]/255.0, bgr[1]/255.0, bgr[0]/255.0)); // BGR->RGB + + // Transform and Add to Corrected Cloud + Eigen::Vector4d pt_h(x_mm, y_mm, z_mm, 1.0); + Eigen::Vector4d pt_trans = T_mat * pt_h; + + // Shift corrected cloud to side for comparison (e.g., +1000mm in X) + pcd_corrected->points_.push_back(pt_trans.head<3>() + Eigen::Vector3d(1000.0, 0, 0)); + + z_values.push_back(pt_trans.z()); + } + } + + if (z_values.empty()) { + log("Error: No valid points in ROI."); + return; + } + + // 2. Compute Statistics & Heatmap Coloring + double sum_z = 0.0; + for(double z : z_values) sum_z += z; + double mean_z = sum_z / z_values.size(); + + double sq_sum = 0.0; + for(double z : z_values) sq_sum += (z - mean_z) * (z - mean_z); + double std_z = std::sqrt(sq_sum / z_values.size()); + + // Color Corrected cloud based on deviation from Mean Z + for (double z : z_values) { + double diff = std::abs(z - mean_z); + // Simple Heatmap: Green (0 error) -> Red (error > 2mm) + double ratio = std::min(1.0, diff / 2.0); + pcd_corrected->colors_.push_back(Eigen::Vector3d(ratio, 1.0 - ratio, 0.0)); + } + + log(QString("=== Validation Statistics ===")); + log(QString("Point Count: %1").arg(z_values.size())); + log(QString("Mean Z (Corrected): %1 mm (Target: ~0)").arg(mean_z, 0, 'f', 4)); + log(QString("StdDev Z (Flatness): %1 mm").arg(std_z, 0, 'f', 4)); + + // Quality Assessment (Focus on Flatness only, since Z is distance) + if (std_z < 2.0) { + log("Result: EXCELLENT. Plane is flat."); + } else if (std_z < 5.0) { + log("Result: GOOD. Minor noise."); + } else { + log("Result: WARNING. Plane may be curved or noisy."); + + } + + // 3. Visualize + log("Opening 3D Viewer..."); + open3d::visualization::DrawGeometries( + {pcd_raw, pcd_corrected}, + "Calibration Verification (Red: Raw, Green: Corrected)", + 1280, 720 + ); + log("Viewer closed."); +} + +// Synchronous implementation to avoid threading crashes +void CalibrationWidget::runCalibration() { + log("=== runCalibration() CALLED ==="); + + if (roi_points_color_.size() < 4) { + log("Error: Please select 4 points for ROI on Color Image."); + return; + } + if (!has_calib_params_) { + log("Error: Calibration parameters not loaded."); + return; + } + if (mat_depth_raw_.empty()) { + log("Error: Depth image not loaded."); + return; + } + + setUiLocked(true); + log("Starting Calibration (Synchronous)..."); + + // Force UI update + QApplication::processEvents(); + + try { + // Validation + if (mat_depth_raw_.type() != CV_16UC1) { + throw std::runtime_error("Depth image must be 16-bit (CV_16UC1)"); + } + if (depth_calib_.intrinsic.data[0] < 1e-6 || depth_calib_.intrinsic.data[4] < 1e-6) { + throw std::runtime_error("Invalid depth intrinsics (fx/fy is zero)"); + } + + // 1. Map Color ROI to Depth ROI (Manual Fallback) + log("Mapping ROI (Manual Scaling)..."); + QApplication::processEvents(); + + std::vector res_depth_roi; + + if (mat_color_raw_.empty() || mat_depth_raw_.empty()) { + throw std::runtime_error("Images empty during mapping"); + } + + double scale_x = (double)mat_depth_raw_.cols / (double)mat_color_raw_.cols; + double scale_y = (double)mat_depth_raw_.rows / (double)mat_color_raw_.rows; + + log(QString("Mapping Scale: X=%1, Y=%2").arg(scale_x).arg(scale_y)); + + for (const auto& p : roi_points_color_) { + int cx = (int)(p.x * scale_x); + int cy = (int)(p.y * scale_y); + // Clamp + cx = std::max(0, std::min(cx, mat_depth_raw_.cols - 1)); + cy = std::max(0, std::min(cy, mat_depth_raw_.rows - 1)); + res_depth_roi.push_back(cv::Point(cx, cy)); + } + + // 2. Build Point Cloud from ROI + log("Building Point Cloud..."); + QApplication::processEvents(); + + auto pcd = std::make_shared(); + cv::Rect bounding_box = cv::boundingRect(res_depth_roi); + + float fx = depth_calib_.intrinsic.data[0]; + float fy = depth_calib_.intrinsic.data[4]; + float cx = depth_calib_.intrinsic.data[2]; + float cy = depth_calib_.intrinsic.data[5]; + + int valid_points = 0; + int start_y = std::max(0, bounding_box.y); + int end_y = std::min(mat_depth_raw_.rows, bounding_box.y + bounding_box.height); + int start_x = std::max(0, bounding_box.x); + int end_x = std::min(mat_depth_raw_.cols, bounding_box.x + bounding_box.width); + + for (int y = start_y; y < end_y; ++y) { + for (int x = start_x; x < end_x; ++x) { + if (cv::pointPolygonTest(res_depth_roi, cv::Point2f(x, y), false) < 0) continue; + + uint16_t d = mat_depth_raw_.at(y, x); + if (d == 0) continue; + + double z_mm = (double)d; + double x_mm = (x - cx) * z_mm / fx; + double y_mm = (y - cy) * z_mm / fy; + + if (std::isnan(x_mm) || std::isnan(y_mm) || std::isnan(z_mm)) continue; + + pcd->points_.emplace_back(Eigen::Vector3d(x_mm, y_mm, z_mm)); + valid_points++; + } + } + + log(QString("Valid Points: %1").arg(valid_points)); + QApplication::processEvents(); + + if (valid_points < 100) { + throw std::runtime_error("Too few valid points (<100) in selected ROI"); + } + + // 3. RANSAC Plane Fitting + log("Fitting Plane (RANSAC)..."); + QApplication::processEvents(); + + std::vector inliers; + Eigen::Vector4d plane_model; + + // Restore RANSAC + std::tie(plane_model, inliers) = pcd->SegmentPlane(2.0, 3, 1000); + + log(QString("RANSAC Inliers: %1").arg(inliers.size())); + if (inliers.size() < 10) { + throw std::runtime_error("RANSAC failed to find a valid plane"); + } + + + // 4. Compute Rotation Matrix + double A = plane_model[0], B = plane_model[1], C = plane_model[2], D = plane_model[3]; + log(QString("Plane Equation: %1x + %2y + %3z + %4 = 0") + .arg(A).arg(B).arg(C).arg(D)); + + Eigen::Vector3d normal(A, B, C); + normal.normalize(); + Eigen::Vector3d target(0, 0, 1); // Z-axis + + Eigen::Matrix4d T_mat = Eigen::Matrix4d::Identity(); + + if (std::abs(normal.dot(target)) < 0.999) { + Eigen::Matrix3d R = Eigen::Quaterniond::FromTwoVectors(normal, target).toRotationMatrix(); + T_mat.block<3,3>(0,0) = R; + } + + // Z-offset removed as per user request. + // The transformation will align the plane normal to Z-axis but keep the original distance. + log("Skipping Z offset adjustment (User Requested). Plane remains at original distance."); + + // 5. Update Result + roi_points_depth_ = res_depth_roi; + for(int i=0; i<4; ++i) + for(int j=0; j<4; ++j) + calibration_matrix_[i*4+j] = (float)T_mat(i,j); + + has_calibration_result_ = true; + log("Calibration SUCCESS!"); + QMessageBox::information(this, "Success", "Calibration completed successfully."); + + } catch (const std::exception& e) { + log(QString("Calibration FAILED: %1").arg(e.what())); + QMessageBox::critical(this, "Calibration Failed", e.what()); + } catch (...) { + log("Calibration FAILED: Unknown error"); + QMessageBox::critical(this, "Calibration Failed", "Unknown error occurred."); + } + + setUiLocked(false); +} + + +bool CalibrationWidget::mapColorRoiToDepth(const std::vector& color_roi, std::vector& depth_roi) { + if (color_roi.empty()) return false; + depth_roi.clear(); + + // Prepare input for SDK + std::vector src_pixels(color_roi.size()); + for (size_t i = 0; i < color_roi.size(); ++i) { + src_pixels[i].x = color_roi[i].x; + src_pixels[i].y = color_roi[i].y; + // BGR values are not strictly needed for coordinate mapping but struct requires them + src_pixels[i].bgr_ch1 = 0; + src_pixels[i].bgr_ch2 = 0; + src_pixels[i].bgr_ch3 = 0; + } + + std::vector dst_pixels(color_roi.size()); + + // Call SDK Mapping + // Note: We need the raw depth buffer. Since mat_depth_raw_ is 16UC1, pointer cast is safe. + TY_STATUS status = TYMapRGBPixelsToDepthCoordinate( + &depth_calib_, + mat_depth_raw_.cols, mat_depth_raw_.rows, (const uint16_t*)mat_depth_raw_.data, + &color_calib_, + mat_color_raw_.cols, mat_color_raw_.rows, + src_pixels.data(), (uint32_t)src_pixels.size(), + 100, 10000, // min, max dist (mm) + dst_pixels.data(), + 1.0f // scale + ); + + if (status != TY_STATUS_OK) { + log("TYMapRGBPixelsToDepthCoordinate failed: " + QString::number(status)); + return false; + } + + // Extract result + for (const auto& p : dst_pixels) { + if (p.x >= 0 && p.y >= 0) { + depth_roi.push_back(cv::Point(p.x, p.y)); + } else { + // If point is invalid (e.g. no depth), use fallback or interpolate? + // For corners, this is critical. + log("Warning: Invalid depth mapping for point (" + QString::number(p.x) + "," + QString::number(p.y) + ")"); + // Fallback to closest valid or original (scaled) logic + depth_roi.push_back(cv::Point(p.x, p.y)); + } + } + + return true; +} + +void CalibrationWidget::saveCalibrationResult() { + if (!has_calibration_result_) { + log("Error: No calibration result to save. Run calibration first."); + return; + } + + // Default filename with SN + QString defaultName = "calibration_result_"; + if (!current_camera_sn_.isEmpty()) { + defaultName += current_camera_sn_; + } else { + defaultName += "unknown"; + } + defaultName += ".json"; + + QString fileName = QFileDialog::getSaveFileName(this, "Save Calibration Result", defaultName, "JSON (*.json)"); + if (fileName.isEmpty()) return; + + QJsonObject root; + + // Save Camera ID (SN) + if (!current_camera_sn_.isEmpty()) { + root["camera_id"] = current_camera_sn_; + } else { + root["camera_id"] = "unknown"; + } + + // Save Matrix (Row-major 4x4) + QJsonArray matArr; + for(int i=0; i<16; ++i) { + matArr.append(calibration_matrix_[i]); + } + root["transformation_matrix"] = matArr; + + // Save ROI Points (Depth) + QJsonArray roiArr; + for(const auto& p : roi_points_depth_) { + QJsonObject pt; + pt["x"] = p.x; + pt["y"] = p.y; + roiArr.append(pt); + } + root["roi_points_depth"] = roiArr; + + // Save Timestamp + root["calibration_time"] = QDateTime::currentDateTime().toString(Qt::ISODate); + + QJsonDocument doc(root); + QFile file(fileName); + if (file.open(QIODevice::WriteOnly)) { + file.write(doc.toJson()); + log("Calibration saved to: " + fileName); + QMessageBox::information(this, "Saved", "Calibration result saved successfully."); + } else { + log("Error: Could not write to file: " + fileName); + QMessageBox::critical(this, "Error", "Could not save file."); + } +} diff --git a/image_capture/src/tools/calibration_tool/calibration_widget.h b/image_capture/src/tools/calibration_tool/calibration_widget.h new file mode 100644 index 0000000..a24c189 --- /dev/null +++ b/image_capture/src/tools/calibration_tool/calibration_widget.h @@ -0,0 +1,85 @@ +#ifndef NOMINMAX +#define NOMINMAX +#endif + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include // Ensure this path is in include dirs + +class CalibrationWidget : public QWidget { + Q_OBJECT + +public: + explicit CalibrationWidget(QWidget *parent = nullptr); + ~CalibrationWidget(); + +protected: + bool eventFilter(QObject *obj, QEvent *event) override; + +private slots: + void loadDepthImage(); + void loadColorImage(); + // Capture slots removed + void loadCalibParams(); + void runCalibration(); + void saveCalibrationResult(); + void view3DCloud(); + +private: + void setupUi(); + void updateDisplay(); + QImage cvMatToQImage(const cv::Mat& mat); + + // Internal helpers + void log(const QString& msg); + void setUiLocked(bool locked); // New + // Convert ROI and run logic + bool mapColorRoiToDepth(const std::vector& color_roi, std::vector& depth_roi); + + // UI Controls + QLabel *label_color_display_; + QLabel *label_depth_display_; + QTextEdit *text_log_; + + QPushButton *btn_load_depth_; + QPushButton *btn_load_color_; + + // Camera controls removed + QPushButton *btn_load_calib_; + QPushButton *btn_run_calibration_; + QPushButton *btn_save_result_; + QPushButton *btn_view_3d_; + + // Data + + cv::Mat mat_depth_raw_; // 16UC1 + cv::Mat mat_color_raw_; // BGR + + // Calibration parameters for the camera (Intrinsics + Extrinsics between RGB and Depth) + TY_CAMERA_CALIB_INFO depth_calib_; + TY_CAMERA_CALIB_INFO color_calib_; + bool has_calib_params_; + + // ROI in Color Image + std::vector roi_points_color_; + std::vector roi_points_depth_; // Mapped ROI + bool is_selecting_roi_; + + // Calibration Result + bool has_calibration_result_; + // 4x4 matrix storged in vector or struct + float calibration_matrix_[16]; + + QString current_camera_sn_; +}; diff --git a/image_capture/src/tools/calibration_tool/calibration_widget_BACKUP.cpp b/image_capture/src/tools/calibration_tool/calibration_widget_BACKUP.cpp new file mode 100644 index 0000000..f0a3606 --- /dev/null +++ b/image_capture/src/tools/calibration_tool/calibration_widget_BACKUP.cpp @@ -0,0 +1 @@ +// This is a backup before attempting major surgery diff --git a/image_capture/src/tools/calibration_tool/main.cpp b/image_capture/src/tools/calibration_tool/main.cpp new file mode 100644 index 0000000..e2ca902 --- /dev/null +++ b/image_capture/src/tools/calibration_tool/main.cpp @@ -0,0 +1,13 @@ +#include +#include "calibration_widget.h" + +int main(int argc, char *argv[]) { + QApplication app(argc, argv); + + CalibrationWidget widget; + widget.setWindowTitle("Beam/Rack Deflection Calibration Tool"); + widget.resize(1200, 800); + widget.show(); + + return app.exec(); +} diff --git a/image_capture/src/tools/generate_reference/main.cpp b/image_capture/src/tools/generate_reference/main.cpp new file mode 100644 index 0000000..519279c --- /dev/null +++ b/image_capture/src/tools/generate_reference/main.cpp @@ -0,0 +1,280 @@ + +#include +#include +#include + + +#include +#include +#include +#include +#include +#include + + +#include +#include +#include +#include + + +// Algorithm +#include "../../algorithm/detections/pallet_offset/pallet_offset_detection.h" + +// State +std::vector g_roi_points; +cv::Mat g_depth, g_color, g_display_img; +bool g_trigger_detect = false; +std::string g_win_name = "Offline Reference Generator"; + +void onMouse(int event, int x, int y, int flags, void *userdata) { + if (event != cv::EVENT_LBUTTONDOWN) + return; + + // Add point + g_roi_points.push_back(cv::Point(x, y)); + std::cout << "[Tool] Point " << g_roi_points.size() << ": (" << x << "," << y + << ")" << std::endl; + + if (g_roi_points.size() == 4) { + std::cout << "[Tool] ROI Complete. Triggering Detection." << std::endl; + g_trigger_detect = true; + } +} + +int main(int argc, char *argv[]) { + QApplication app(argc, argv); + + std::cout << "Select a Depth Image (16-bit PNG/TIFF)..." << std::endl; + + QString fileName = QFileDialog::getOpenFileName( + nullptr, "Open Depth Image", "", "Images (*.png *.tif *.tiff *.bmp)"); + + if (fileName.isEmpty()) { + std::cerr << "No file selected." << std::endl; + return -1; + } + + std::string depth_path = fileName.toStdString(); + g_depth = cv::imread(depth_path, cv::IMREAD_UNCHANGED); + + if (g_depth.empty()) { + std::cerr << "Failed to load image: " << depth_path << std::endl; + return -1; + } + + std::cout << "Loaded Image: " << g_depth.cols << "x" << g_depth.rows + << " Type=" << g_depth.type() << std::endl; + + if (g_depth.type() != CV_16UC1) { + std::cerr << "[Error] Input image is not a 16-bit single-channel depth map " + "(CV_16UC1)." + << std::endl; + std::cerr << " Current type=" << g_depth.type() + << " (likely 8-bit 3-channel if 16)." << std::endl; + std::cerr << " Please select a valid raw depth image (unit: mm)." + << std::endl; + // Proceeding might be dangerous for the algorithm, but we can at least show + // it. For the tool to work, we really need CV_16U. Let's create a dummy + // 16-bit image if possible or just exit? Better to let the user know and + // maybe show the image so they see what they loaded. But the algorithm call + // later will likely fail or give bad results. + } + + // Optional: Load matching color + // ... + + cv::normalize(g_depth, g_display_img, 0, 255, cv::NORM_MINMAX, CV_8U); + if (g_display_img.channels() == 1) { + cv::cvtColor(g_display_img, g_display_img, cv::COLOR_GRAY2BGR); + } else { + // If already 3 channels (e.g. user loaded a color image by mistake), ensure + // it is 8-bit BGR for display normalize already made it CV_8U. No + // conversion needed if it's already BGR. + } + + cv::namedWindow(g_win_name, cv::WINDOW_AUTOSIZE); + cv::setMouseCallback(g_win_name, onMouse); + + std::cout << "\n=========================================" << std::endl; + std::cout << " Controls:" << std::endl; + std::cout << " [Click on Image] : Select ROI (4 pts)" << std::endl; + std::cout << " [R] : Reset ROI" << std::endl; + std::cout << " [ESC] : Exit" << std::endl; + std::cout << "=========================================\n" << std::endl; + + bool running = true; + while (running) { + cv::Mat show = g_display_img.clone(); + + // Draw ROI + for (size_t i = 0; i < g_roi_points.size(); ++i) { + cv::circle(show, g_roi_points[i], 4, cv::Scalar(0, 0, 255), -1); + if (i > 0) + cv::line(show, g_roi_points[i - 1], g_roi_points[i], + cv::Scalar(0, 255, 0), 2); + } + if (g_roi_points.size() == 4) { + cv::line(show, g_roi_points[3], g_roi_points[0], cv::Scalar(0, 255, 0), + 2); + } + + if (g_roi_points.empty()) { + cv::putText(show, "Click 4 points to select ROI", cv::Point(20, 30), + cv::FONT_HERSHEY_SIMPLEX, 0.7, cv::Scalar(0, 255, 0), 2); + } + + cv::imshow(g_win_name, show); + + if (g_trigger_detect) { + g_trigger_detect = false; + + // ------------------------------------------------- + // Load Calibration (Auto-search Multi-path) + // ------------------------------------------------- + cv::Mat calib_mat; + QStringList search_dirs; + search_dirs << QCoreApplication::applicationDirPath(); + search_dirs << QDir::currentPath(); + search_dirs << QDir::currentPath() + "/../"; // Parent + search_dirs << "D:/Git/stereo_warehouse_inspection/image_capture/build/" + "bin/Debug"; // Hard fallback + + bool found_calib = false; + + for (const QString &dirPath : search_dirs) { + QDir dir(dirPath); + QStringList filters; + filters << "calibration_result_*.json"; + dir.setNameFilters(filters); + QFileInfoList list = + dir.entryInfoList(QDir::Files, QDir::Time); // Newest first + + if (!list.isEmpty()) { + QString calibPath = list.first().absoluteFilePath(); + std::cout << "[Tool] Found calibration file: " + << calibPath.toStdString() << " (Newest)" << std::endl; + + QFile f(calibPath); + if (f.open(QIODevice::ReadOnly)) { + QJsonDocument doc = QJsonDocument::fromJson(f.readAll()); + QJsonObject obj = doc.object(); + if (obj.contains("transformation_matrix")) { + QJsonArray arr = obj["transformation_matrix"].toArray(); + if (arr.size() == 16) { + calib_mat = cv::Mat::eye(4, 4, CV_64F); + std::cout << "[Tool] Matrix Diagonal: "; + for (int i = 0; i < 4; ++i) { + for (int j = 0; j < 4; ++j) { + calib_mat.at(i, j) = arr[i * 4 + j].toDouble(); + if (i == j) + std::cout << calib_mat.at(i, j) << " "; + } + } + std::cout << std::endl; + std::cout << "[Tool] Calibration Loaded Successfully." + << std::endl; + found_calib = true; + break; // Stop searching + } + } + } + } + } + + if (!found_calib) { + std::cout << "\n[Tool] !!! WARNING: NO CALIBRATION FILE FOUND !!!" + << std::endl; + std::cout << "[Tool] !!! Running in CAMERA FRAME (Uncalibrated) !!!" + << std::endl; + std::cout << "[Tool] !!! Angles will likely match Physical Camera Tilt " + "(approx 0 if flat, 18 if tilted) !!!\n" + << std::endl; + } + + // Hardcoded Intrinsics from valid logs (User specific) + // fx=1053.48, fy=1053.48, cx=640.301, cy=496.681 + CameraIntrinsics intr; + intr.fx = 1053.48f; + intr.fy = 1053.48f; + intr.cx = 640.301f; + intr.cy = 496.681f; + + std::cout << "[Tool] Detecting with Intrinsics: fx=" << intr.fx + << " cx=" << intr.cx << std::endl; + + PalletOffsetResult res; + bool success = PalletOffsetAlgorithm::detect(g_depth, cv::Mat(), "left", + res, nullptr, g_roi_points, + intr, &calib_mat); + + if (success) { + std::cout << "[Tool] Success!" << std::endl; + std::cout << " Abs Pos: (" << res.abs_x << ", " << res.abs_y << ", " + << res.abs_z << ")" << std::endl; + + // Calculate angle for storage + double angle_rad = + std::atan2(res.right_hole_pos.z - res.left_hole_pos.z, + res.right_hole_pos.x - res.left_hole_pos.x); + double angle_deg = angle_rad * 180.0 / CV_PI; + + QJsonObject root; + root["x"] = res.abs_x; + root["y"] = res.abs_y; + root["z"] = res.abs_z; + root["reference_angle"] = angle_deg; // Explicitly save angle + + QJsonObject leftObj; + leftObj["x"] = res.left_hole_pos.x; + leftObj["y"] = res.left_hole_pos.y; + leftObj["z"] = res.left_hole_pos.z; + root["left_hole"] = leftObj; + + QJsonObject rightObj; + rightObj["x"] = res.right_hole_pos.x; + rightObj["y"] = res.right_hole_pos.y; + rightObj["z"] = res.right_hole_pos.z; + root["right_hole"] = rightObj; + + QFile file("reference_pallet.json"); + if (file.open(QIODevice::WriteOnly)) { + file.write(QJsonDocument(root).toJson()); + file.close(); + std::cout << "[GenerateReference] SUCCESS: Saved reference to " + "reference_pallet.json" + << std::endl; + std::cout << " Ref Pos (X,Y,Z): " << res.abs_x + << ", " << res.abs_y << ", " << res.abs_z << std::endl; + // Calculate angle for display + double angle_deg = + std::atan2(res.right_hole_pos.z - res.left_hole_pos.z, + res.right_hole_pos.x - res.left_hole_pos.x) * + 180.0 / CV_PI; + std::cout << " Ref Angle: " << angle_deg << " deg" + << std::endl; + + QMessageBox::information( + nullptr, "Success", + "Reference data saved to reference_pallet.json"); + } + } else { + std::cerr << "[Tool] Failed." << std::endl; + } + + // Clear to allow re-selection? Or keep? + // Keep points to show what was selected. + // Reset must be manual. + } + + int key = cv::waitKey(30); + if (key == 27) + running = false; + if (key == 'r' || key == 'R') { + g_roi_points.clear(); + std::cout << "[Tool] ROI Reset." << std::endl; + } + } + + return 0; +} diff --git a/image_capture/src/tools/intrinsic_dumper/README.md b/image_capture/src/tools/intrinsic_dumper/README.md new file mode 100644 index 0000000..69ed5ab --- /dev/null +++ b/image_capture/src/tools/intrinsic_dumper/README.md @@ -0,0 +1,31 @@ +# Intrinsic Dumper (内参导出工具) + +## 简介 (Introduction) +本工具是一个命令行程序 (CLI),用于自动扫描连接的 Percipio 相机,并提取其出厂标定参数(内参、畸变系数、外参)。 + +## 功能 (Features) +* **自动扫描**:通过 Percipio SDK 枚举所有连接的设备。 +* **参数提取**:并发获取 Depth 相机和 Color 相机的标定数据。 +* **自动命名**:根据相机序列号 (SN) 生成唯一的文件名。 +* **格式规范**:输出符合项目标准的 JSON 格式。 + +## 使用步骤 (Usage) +1. 确保相机已正确连接并安装驱动。 +2. 双击运行 `intrinsic_dumper.exe` 或在命令行中执行。 +3. 程序将输出日志,提示发现的设备 SN。 +4. 执行完成后,程序会自动关闭(或按任意键退出,视环境而定)。 +5. 在当前目录下查找生成的 `intrinsics_.json` 文件。 + +## 输出格式 (Output) +文件名示例:`intrinsics_207000146458.json` +```json +{ + "depth": { + "intrinsic": [fx, 0, cx, 0, fy, cy, 0, 0, 1], + "distortion": [...], + "extrinsic": [...] + }, + "color": { ... }, + "camera_id": "207000146458" +} +``` diff --git a/image_capture/src/tools/intrinsic_dumper/main.cpp b/image_capture/src/tools/intrinsic_dumper/main.cpp new file mode 100644 index 0000000..8ea7ed1 --- /dev/null +++ b/image_capture/src/tools/intrinsic_dumper/main.cpp @@ -0,0 +1,116 @@ +#ifndef NOMINMAX +#define NOMINMAX +#endif + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +int main(int argc, char *argv[]) { + // QCoreApplication not needed for basic JSON operations + + std::cout << "Initializing Percipio SDK..." << std::endl; + TY_STATUS status = TYInitLib(); + if (status != TY_STATUS_OK) { + std::cerr << "Failed to init SDK: " << status << std::endl; + return -1; + } + + TYUpdateInterfaceList(); + uint32_t n = 0; + TYGetInterfaceNumber(&n); + if (n == 0) { + std::cerr << "No interfaces found." << std::endl; + TYDeinitLib(); + return -1; + } + + std::vector ifaces(n); + TYGetInterfaceList(&ifaces[0], n, &n); + + int device_count = 0; + + for (const auto& ifaceInfo : ifaces) { + TY_INTERFACE_HANDLE hIface = nullptr; + if (TYOpenInterface(ifaceInfo.id, &hIface) == TY_STATUS_OK) { + TYUpdateDeviceList(hIface); + uint32_t devCount = 0; + TYGetDeviceNumber(hIface, &devCount); + if (devCount > 0) { + std::vector devs(devCount); + TYGetDeviceList(hIface, &devs[0], devCount, &devCount); + + for (uint32_t i = 0; i < devCount; ++i) { + std::string sn = devs[i].id; + std::cout << "Found Device SN: " << sn << std::endl; + + TY_DEV_HANDLE handle = nullptr; + if (TYOpenDevice(hIface, devs[i].id, &handle) == TY_STATUS_OK) { + + // Fetch Calib Info + TY_CAMERA_CALIB_INFO depth_info, color_info; + bool has_depth = false; + bool has_color = false; + + if (TYGetStruct(handle, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA, &depth_info, sizeof(depth_info)) == TY_STATUS_OK) { + has_depth = true; + std::cout << " - Got Depth Calibration." << std::endl; + } + + if (TYGetStruct(handle, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &color_info, sizeof(color_info)) == TY_STATUS_OK) { + has_color = true; + std::cout << " - Got Color Calibration." << std::endl; + } + + TYCloseDevice(handle); + + if (has_depth || has_color) { + QJsonObject root; + + auto formatInfo = [](const TY_CAMERA_CALIB_INFO& info) -> QJsonObject { + QJsonObject obj; + QJsonArray intr, extr, dist; + for(int k=0; k<9; k++) intr.append((double)info.intrinsic.data[k]); + for(int k=0; k<12; k++) dist.append((double)info.distortion.data[k]); + for(int k=0; k<16; k++) extr.append((double)info.extrinsic.data[k]); + + obj["intrinsic"] = intr; + obj["distortion"] = dist; + obj["extrinsic"] = extr; + return obj; + }; + + if (has_depth) root["depth"] = formatInfo(depth_info); + if (has_color) root["color"] = formatInfo(color_info); + + QString filename = QString("intrinsics_%1.json").arg(QString::fromStdString(sn)); + QJsonDocument doc(root); + QFile file(filename); + if (file.open(QIODevice::WriteOnly)) { + file.write(doc.toJson()); + file.close(); + std::cout << " -> Saved to " << filename.toStdString() << std::endl; + } + } + device_count++; + } + } + } + TYCloseInterface(hIface); + } + } + + if (device_count == 0) { + std::cout << "No devices processed." << std::endl; + } + + TYDeinitLib(); + return 0; +} diff --git a/image_capture/src/tools/slot_algo_tuner/README.md b/image_capture/src/tools/slot_algo_tuner/README.md new file mode 100644 index 0000000..731f2d8 --- /dev/null +++ b/image_capture/src/tools/slot_algo_tuner/README.md @@ -0,0 +1,29 @@ +# Slot Algorithm Tuner (算法调优工具) + +## 简介 (Introduction) +本工具是一个可视化参数调试程序,用于在离线环境下对货架变形检测算法的关键参数进行微调。它通过对比标准图像与输入图像的差异,模拟算法的处理流程。 + +## 功能 (Features) +* **图像加载**:支持加载 Reference (基准) 图像和 Input (待测) 图像。 +* **参数实时调整**: + * **ROI (x, y, w, h)**:感兴趣区域设置。 + * **Threshold**:差分二值化阈值。 + * **Blur Size**:高斯模糊核大小。 + * **Area Threshold**:连通域面积过滤阈值。 +* **可视化反馈**:实时显示 Reference、Input、Difference (差分图) 和 Mask (掩膜) 结果。 + +## 使用步骤 (Usage) +1. **加载基准图**:点击 `Load Reference`,选择一张无变形的标准货架图像。 +2. **加载测试图**:点击 `Load Input`,选择需要检测的现场图像。 +3. **调整参数**: + * 调整右侧面板的 SpinBox 数值。 + * 点击 `Process` 按钮(或参数改变时自动刷新)查看效果。 +4. **观察结果**: + * **Diff Image**:显示两幅图像的像素差异。 + * **Mask Image**:显示经过阈值和滤波后的检测结果。 + * 下方文本框会显示检测到的区域信息(如有)。 + +## 适用场景 (Use Cases) +* 确定现场环境下的最佳二值化阈值。 +* 调整检测敏感度以过滤噪声。 +* 验证 ROI 区域是否覆盖目标货架。 diff --git a/image_capture/src/tools/slot_algo_tuner/main.cpp b/image_capture/src/tools/slot_algo_tuner/main.cpp new file mode 100644 index 0000000..acb24d3 --- /dev/null +++ b/image_capture/src/tools/slot_algo_tuner/main.cpp @@ -0,0 +1,13 @@ +#include +#include "tuner_widget.h" + +int main(int argc, char *argv[]) { + QApplication app(argc, argv); + + TunerWidget w; + w.setWindowTitle("Slot Algorithm Tuner"); + w.resize(1200, 800); + w.show(); + + return app.exec(); +} diff --git a/image_capture/src/tools/slot_algo_tuner/tuner_widget.cpp b/image_capture/src/tools/slot_algo_tuner/tuner_widget.cpp new file mode 100644 index 0000000..982f866 --- /dev/null +++ b/image_capture/src/tools/slot_algo_tuner/tuner_widget.cpp @@ -0,0 +1,246 @@ +#include "tuner_widget.h" +#include +#include +#include +#include +#include +#include + +TunerWidget::TunerWidget(QWidget *parent) : QWidget(parent) { + setupUi(); +} + +void TunerWidget::setupUi() { + QVBoxLayout *outerLayout = new QVBoxLayout(this); + + // Create Scroll Area + QScrollArea *scrollArea = new QScrollArea(this); + scrollArea->setWidgetResizable(true); + + // Create internal container widget + QWidget *contentWidget = new QWidget(scrollArea); + QVBoxLayout *mainLayout = new QVBoxLayout(contentWidget); + + // Set content of scroll area + scrollArea->setWidget(contentWidget); + outerLayout->addWidget(scrollArea); + + // 1. Controls Area + QGroupBox *grpControls = new QGroupBox("Controls", this); + QGridLayout *layoutControls = new QGridLayout(grpControls); + + btn_load_ref_ = new QPushButton("Load Reference (Empty Slot)", this); + btn_load_input_ = new QPushButton("Load Input (Current Test)", this); + + connect(btn_load_ref_, &QPushButton::clicked, this, &TunerWidget::loadReferenceImage); + connect(btn_load_input_, &QPushButton::clicked, this, &TunerWidget::loadInputImage); + + layoutControls->addWidget(btn_load_ref_, 0, 0); + layoutControls->addWidget(btn_load_input_, 0, 1); + + // ROI Controls + QGroupBox *grpROI = new QGroupBox("ROI Settings", this); + QHBoxLayout *layoutROI = new QHBoxLayout(grpROI); + + spin_roi_x_ = new QSpinBox(this); spin_roi_x_->setRange(0, 5000); spin_roi_x_->setPrefix("X: "); spin_roi_x_->setValue(100); + spin_roi_y_ = new QSpinBox(this); spin_roi_y_->setRange(0, 5000); spin_roi_y_->setPrefix("Y: "); spin_roi_y_->setValue(100); + spin_roi_w_ = new QSpinBox(this); spin_roi_w_->setRange(1, 5000); spin_roi_w_->setPrefix("W: "); spin_roi_w_->setValue(800); + spin_roi_h_ = new QSpinBox(this); spin_roi_h_->setRange(1, 5000); spin_roi_h_->setPrefix("H: "); spin_roi_h_->setValue(600); + + layoutROI->addWidget(spin_roi_x_); + layoutROI->addWidget(spin_roi_y_); + layoutROI->addWidget(spin_roi_w_); + layoutROI->addWidget(spin_roi_h_); + + layoutControls->addWidget(grpROI, 1, 0, 1, 2); + + // Param Controls + QGroupBox *grpParams = new QGroupBox("Algorithm Params", this); + QHBoxLayout *layoutParams = new QHBoxLayout(grpParams); + + spin_threshold_ = new QSpinBox(this); spin_threshold_->setRange(0, 255); spin_threshold_->setPrefix("Diff Thresh: "); spin_threshold_->setValue(30); + spin_blur_ = new QSpinBox(this); spin_blur_->setRange(1, 21); spin_blur_->setSingleStep(2); spin_blur_->setPrefix("Blur Size: "); spin_blur_->setValue(5); + spin_area_threshold_ = new QSpinBox(this); spin_area_threshold_->setRange(0, 1000000); spin_area_threshold_->setPrefix("Area Thresh: "); spin_area_threshold_->setValue(5000); + + connect(spin_roi_x_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_roi_y_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_roi_w_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_roi_h_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_threshold_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_blur_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + connect(spin_area_threshold_, QOverload::of(&QSpinBox::valueChanged), this, &TunerWidget::process); + + layoutParams->addWidget(spin_threshold_); + layoutParams->addWidget(spin_blur_); + layoutParams->addWidget(spin_area_threshold_); + + layoutControls->addWidget(grpParams, 2, 0, 1, 2); + + // Result Text + label_result_text_ = new QLabel("Ready", this); + label_result_text_->setStyleSheet("font-size: 16px; font-weight: bold; color: blue;"); + layoutControls->addWidget(label_result_text_, 3, 0, 1, 2); + + mainLayout->addWidget(grpControls); + + // 2. Images Area + QGridLayout *layoutImages = new QGridLayout(); + + label_ref_ = new QLabel("Reference", this); label_ref_->setScaledContents(false); label_ref_->setAlignment(Qt::AlignCenter); label_ref_->setStyleSheet("border: 1px solid gray; background: black;"); + label_input_ = new QLabel("Input + ROI", this); label_input_->setScaledContents(false); label_input_->setAlignment(Qt::AlignCenter); label_input_->setStyleSheet("border: 1px solid gray; background: black;"); + label_diff_ = new QLabel("AbsDiff", this); label_diff_->setScaledContents(false); label_diff_->setAlignment(Qt::AlignCenter); label_diff_->setStyleSheet("border: 1px solid gray; background: black;"); + label_mask_ = new QLabel("Result Mask", this); label_mask_->setScaledContents(false); label_mask_->setAlignment(Qt::AlignCenter); label_mask_->setStyleSheet("border: 1px solid gray; background: black;"); + + layoutImages->addWidget(new QLabel("Reference Image"), 0, 0); + layoutImages->addWidget(label_ref_, 1, 0); + + layoutImages->addWidget(new QLabel("Input Image (Red Box = ROI)"), 0, 1); + layoutImages->addWidget(label_input_, 1, 1); + + layoutImages->addWidget(new QLabel("Difference Image"), 2, 0); + layoutImages->addWidget(label_diff_, 3, 0); + + layoutImages->addWidget(new QLabel("Detection Mask"), 2, 1); + layoutImages->addWidget(label_mask_, 3, 1); + + mainLayout->addLayout(layoutImages); +} + +void TunerWidget::loadReferenceImage() { + QString path = QFileDialog::getOpenFileName(this, "Open Reference Image", "", "Images (*.png *.jpg *.bmp)"); + if (path.isEmpty()) return; + + mat_ref_raw_ = cv::imread(path.toStdString(), cv::IMREAD_GRAYSCALE); + // Resize to target resolution if needed (simulating the real system) + cv::Size target_size(4024, 3036); + if (!mat_ref_raw_.empty() && mat_ref_raw_.size() != target_size) { + cv::resize(mat_ref_raw_, mat_ref_raw_, target_size); + } + process(); +} + +void TunerWidget::loadInputImage() { + QString path = QFileDialog::getOpenFileName(this, "Open Input Image", "", "Images (*.png *.jpg *.bmp)"); + if (path.isEmpty()) return; + + mat_input_raw_ = cv::imread(path.toStdString(), cv::IMREAD_GRAYSCALE); + // Resize to target resolution if needed + cv::Size target_size(4024, 3036); + if (!mat_input_raw_.empty() && mat_input_raw_.size() != target_size) { + cv::resize(mat_input_raw_, mat_input_raw_, target_size); + } + process(); +} + +void TunerWidget::process() { + if (mat_ref_raw_.empty() || mat_input_raw_.empty()) { + if (!mat_ref_raw_.empty()) { + QImage qimg = cvMatToQImage(mat_ref_raw_); + if (!qimg.isNull()) { + QPixmap p = QPixmap::fromImage(qimg); + // Scale with aspect ratio + label_ref_->setPixmap(p.scaled(640, 480, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + } + } + if (!mat_input_raw_.empty()) { + QImage qimg = cvMatToQImage(mat_input_raw_); + if (!qimg.isNull()) { + QPixmap p = QPixmap::fromImage(qimg); + label_input_->setPixmap(p.scaled(640, 480, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + } + } + return; + } + + // 1. Get Params + int rx = spin_roi_x_->value(); + int ry = spin_roi_y_->value(); + int rw = spin_roi_w_->value(); + int rh = spin_roi_h_->value(); + int blur = spin_blur_->value(); + int diff_th = spin_threshold_->value(); + int area_th = spin_area_threshold_->value(); + + if (blur % 2 == 0) blur++; // Ensure odd + + // 2. Validate ROI + int img_w = mat_input_raw_.cols; + int img_h = mat_input_raw_.rows; + rx = std::max(0, rx); + ry = std::max(0, ry); + rw = std::min(rw, img_w - rx); + rh = std::min(rh, img_h - ry); + + cv::Rect roi(rx, ry, rw, rh); + + // 3. Process + cv::Mat input_roi = mat_input_raw_(roi); + cv::Mat ref_roi = mat_ref_raw_(roi); + + cv::Mat input_blurred, ref_blurred; + cv::GaussianBlur(input_roi, input_blurred, cv::Size(blur, blur), 0); + cv::GaussianBlur(ref_roi, ref_blurred, cv::Size(blur, blur), 0); + + cv::Mat diff; + cv::absdiff(input_blurred, ref_blurred, diff); + + cv::Mat mask; + cv::threshold(diff, mask, diff_th, 255, cv::THRESH_BINARY); + + cv::Mat kernel = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5)); + cv::morphologyEx(mask, mask, cv::MORPH_OPEN, kernel); + + int non_zero = cv::countNonZero(mask); + + // 4. Update Result + bool occupied = (non_zero > area_th); + if (occupied) { + label_result_text_->setText(QString("Occupied (Pixels: %1 > %2)").arg(non_zero).arg(area_th)); + label_result_text_->setStyleSheet("font-size: 16px; font-weight: bold; color: red;"); + } else { + label_result_text_->setText(QString("Empty (Pixels: %1 <= %2)").arg(non_zero).arg(area_th)); + label_result_text_->setStyleSheet("font-size: 16px; font-weight: bold; color: green;"); + } + + // 5. Update Displays + // Fixed display width for consistent layout + const int DISP_W = 640; + const int DISP_H = 480; + + // Ref + QImage q_ref = cvMatToQImage(mat_ref_raw_); + label_ref_->setPixmap(QPixmap::fromImage(q_ref).scaled(DISP_W, DISP_H, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + + // Input with ROI Draw + cv::Mat input_vis; + // 确保是彩色图以画红框 + if (mat_input_raw_.channels() == 1) { + cv::cvtColor(mat_input_raw_, input_vis, cv::COLOR_GRAY2BGR); + } else { + input_vis = mat_input_raw_.clone(); + } + cv::rectangle(input_vis, roi, cv::Scalar(0, 0, 255), 10); // Red box + QImage q_input = cvMatToQImage(input_vis); + label_input_->setPixmap(QPixmap::fromImage(q_input).scaled(DISP_W, DISP_H, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + + // Diff + QImage q_diff = cvMatToQImage(diff); + label_diff_->setPixmap(QPixmap::fromImage(q_diff).scaled(DISP_W, DISP_H, Qt::KeepAspectRatio, Qt::SmoothTransformation)); + + // Mask + QImage q_mask = cvMatToQImage(mask); + label_mask_->setPixmap(QPixmap::fromImage(q_mask).scaled(DISP_W, DISP_H, Qt::KeepAspectRatio, Qt::SmoothTransformation)); +} + +QImage TunerWidget::cvMatToQImage(const cv::Mat& mat) { + if (mat.empty()) return QImage(); + + if (mat.type() == CV_8UC1) { + QImage image(mat.data, mat.cols, mat.rows, mat.step, QImage::Format_Grayscale8); + return image.copy(); + } else if (mat.type() == CV_8UC3) { + QImage image(mat.data, mat.cols, mat.rows, mat.step, QImage::Format_RGB888); + return image.rgbSwapped(); + } + return QImage(); +} diff --git a/image_capture/src/tools/slot_algo_tuner/tuner_widget.h b/image_capture/src/tools/slot_algo_tuner/tuner_widget.h new file mode 100644 index 0000000..eceb7ae --- /dev/null +++ b/image_capture/src/tools/slot_algo_tuner/tuner_widget.h @@ -0,0 +1,53 @@ +#pragma once + +#include +#include +#include +#include +#include +#include + +class TunerWidget : public QWidget { + Q_OBJECT + +public: + explicit TunerWidget(QWidget *parent = nullptr); + ~TunerWidget() = default; + +private slots: + void loadReferenceImage(); + void loadInputImage(); + void process(); + +private: + void setupUi(); + void updateDisplay(); + QImage cvMatToQImage(const cv::Mat& mat); + + // UI Controls + QLabel *label_ref_; + QLabel *label_input_; + QLabel *label_diff_; + QLabel *label_mask_; + + QPushButton *btn_load_ref_; + QPushButton *btn_load_input_; + + QSpinBox *spin_roi_x_; + QSpinBox *spin_roi_y_; + QSpinBox *spin_roi_w_; + QSpinBox *spin_roi_h_; + QSpinBox *spin_threshold_; + QSpinBox *spin_blur_; + QSpinBox *spin_area_threshold_; + + QLabel *label_result_text_; + + // Data + cv::Mat mat_ref_raw_; + cv::Mat mat_input_raw_; + cv::Mat mat_ref_display_; + cv::Mat mat_input_display_; + cv::Mat mat_diff_display_; + cv::Mat mat_mask_display_; +}; diff --git a/image_capture/src/vision/vision_controller.cpp b/image_capture/src/vision/vision_controller.cpp new file mode 100644 index 0000000..b80e59e --- /dev/null +++ b/image_capture/src/vision/vision_controller.cpp @@ -0,0 +1,197 @@ +/** + * @file vision_controller.cpp + * @brief Vision系统主控制器实现文件 + * + * 此文件实现了VisionController类的完整功能: + * - 系统初始化(Redis、任务管理器) + * - 系统启动和停止 + * - 任务接收和分发 + * - 模块间协调和数据流管理 + * + * 设计说明: + * - VisionController是系统唯一控制器,统一管理Redis和任务模块 + * - 设备由MainWindow初始化,VisionController直接使用DeviceManager单例 + * - 使用回调函数实现模块间解耦 + * - 所有模块使用智能指针管理,自动释放资源 + */ + +#include "vision_controller.h" +#include "../redis/redis_communicator.h" +#include "../task/task_manager.h" +#include "../device/device_manager.h" +#include +#include + +/** + * @brief 构造函数 + * + * 初始化所有成员变量为默认值 + * - running_: 系统未运行 + * - initialized_: 系统未初始化 + */ +VisionController::VisionController() + : running_(false) // 系统未运行 + , initialized_(false) // 系统未初始化 +{ +} + +/** + * @brief 析构函数 + * + * 确保在对象销毁时正确停止系统 + * 调用stop()清理所有资源 + */ +VisionController::~VisionController() { + stop(); +} + +/** + * @brief 初始化Vision系统 + * + * @param redis_host Redis服务器地址,默认"127.0.0.1" + * @param redis_port Redis服务器端口,默认6379 + * @param task_db 任务监听Redis数据库编号 + * @param result_db 结果写入Redis数据库编号 + * @return true 初始化成功,false 初始化失败 + */ +bool VisionController::initialize(const std::string& redis_host, + int redis_port, + int task_db, + int result_db) { + if (initialized_) { + std::cout << "[VisionController] System already initialized" << std::endl; + return true; + } + + std::cout << "[VisionController] Starting Vision system initialization..." << std::endl; + std::cout << "[VisionController] Redis config: Input DB=" << task_db << ", Output DB=" << result_db << std::endl; + + // ========== 1. 初始化Redis任务监听模块 (Input DB) ========== + redis_comm_ = std::make_shared(); + // TODO: move password to config + if (!redis_comm_->initialize(redis_host, redis_port, task_db, "123456")) { + std::cerr << "[VisionController] Redis task communicator (DB " << task_db << ") initialization failed" << std::endl; + return false; + } + + // ========== 2. 初始化Redis结果写入模块 (Output DB) ========== + redis_result_comm_ = std::make_shared(); + if (!redis_result_comm_->initialize(redis_host, redis_port, result_db, "123456")) { + std::cerr << "[VisionController] Redis result communicator (DB " << result_db << ") initialization failed" << std::endl; + return false; + } + std::cout << "[VisionController] Redis communicators initialized successfully" << std::endl; + + // ========== 3. 初始化任务管理器 ========== + task_manager_ = std::make_shared(); + + // 初始化任务管理器,传入结果写入(输出DB)和触发清空(输入DB)两个Redis连接 + if (!task_manager_->initialize(redis_result_comm_, redis_comm_)) { + std::cerr << "[VisionController] Task manager initialization failed" << std::endl; + return false; + } + std::cout << "[VisionController] Task manager initialized successfully" << std::endl; + + // ========== 4. 设置回调函数 ========== + redis_comm_->setTaskCallback( + [this](const RedisTaskData& task_data) { + this->onTaskReceived(task_data); + } + ); + + initialized_ = true; + std::cout << "[VisionController] Vision system initialization complete" << std::endl; + return true; +} + +bool VisionController::start() { + if (!initialized_) { + std::cerr << "[VisionController] System not initialized, cannot start" << std::endl; + return false; + } + + if (running_) { + return true; + } + + std::cout << "[VisionController] Starting Vision system..." << std::endl; + + // 启动Redis任务监听 (只有 Input DB 需要监听) + if (!redis_comm_->startListening()) { + std::cerr << "[VisionController] Redis listening start failed" << std::endl; + return false; + } + std::cout << "[VisionController] Redis listening started" << std::endl; + + running_ = true; + std::cout << "[VisionController] Vision system started successfully" << std::endl; + return true; +} + +void VisionController::stop() { + if (!running_) { + return; + } + + std::cout << "[VisionController] Stopping Vision system..." << std::endl; + + running_ = false; + + // 停止Redis监听 + if (redis_comm_) { + redis_comm_->stopListening(); + } + // redis_result_comm_ 不需要专门停止,因为它不跑监听线程,析构时会自动断开 + + if (task_manager_) { + task_manager_->stopCurrentTask(); + } + + std::cout << "[VisionController] Vision system stopped" << std::endl; +} + +/** + * @brief 检查系统是否正在运行 + * + * @return true 系统正在运行,false 系统已停止 + */ +bool VisionController::isRunning() const { + return running_; +} + +/** + * @brief 任务接收回调函数 + * + * 当Redis检测到新任务时,RedisCommunicator会调用此函数 + * 此函数将任务转发给TaskManager处理 + * + * @param task_data 任务数据,包含flag、side、task_time + * + * @note 此函数运行在Redis监听线程中,需要快速返回,避免阻塞监听 + * @note TaskManager使用任务队列异步处理任务,不会阻塞此函数 + */ +void VisionController::onTaskReceived(const RedisTaskData& task_data) { + // 如果系统未处于运行状态,忽略任务,防止在初始化/停止阶段拉起任务 + if (!running_) { + std::cout << "[VisionController] Received task while system not running, ignoring. flag=" + << task_data.flag << ", side=" << task_data.side << std::endl; + return; + } + + // 设备未就绪时也忽略任务,避免在相机尚未完全启动时触发算法导致异常 + if (!DeviceManager::getInstance().isRunning()) { + std::cerr << "[VisionController] DeviceManager not running, ignoring task: flag=" + << task_data.flag << ", side=" << task_data.side << std::endl; + return; + } + + std::cout << "[VisionController] Received new task: flag=" << task_data.flag + << ", side=" << task_data.side << std::endl; + + // 将任务转发给TaskManager处理 + // TaskManager会将任务加入队列,由执行线程异步处理 + if (task_manager_) { + task_manager_->handleTask(task_data); + } +} + diff --git a/image_capture/src/vision/vision_controller.h b/image_capture/src/vision/vision_controller.h new file mode 100644 index 0000000..d180fa4 --- /dev/null +++ b/image_capture/src/vision/vision_controller.h @@ -0,0 +1,81 @@ +#pragma once + +#include +#include +#include +#include +#include +#include "../redis/task_data.h" + +class RedisCommunicator; +class TaskManager; + +/** + * @brief Vision系统主控制器(唯一系统级控制器) + * + * 这是Vision系统的唯一系统级控制器,负责Redis监听和任务处理。 + * + * 功能说明: + * - 整合Redis通信和任务管理模块 + * - 协调各模块之间的交互和数据流 + * - 提供统一的系统启动和停止接口 + * - 管理Redis监听和任务处理的生命周期 + * + * 架构层次: + * - VisionController(系统控制器)-> 管理Redis和任务模块 + * - RedisCommunicator(通信模块) + * - TaskManager(任务管理模块,包含结果处理) + * + * 注意:设备由MainWindow初始化和管理,VisionController直接使用DeviceManager单例。 + * TaskManager也直接使用DeviceManager单例获取图像。 + */ +class VisionController { +public: + VisionController(); + ~VisionController(); + + /** + * 初始化Vision系统 + * @param redis_host Redis服务器地址 + * @param redis_port Redis服务器端口 + * @param task_db 任务监听Redis数据库编号 + * @param result_db 结果写入Redis数据库编号 + * @return 是否成功初始化 + */ + bool initialize(const std::string& redis_host = "127.0.0.1", + int redis_port = 6379, + int task_db = 0, + int result_db = 1); + + /** + * 启动Vision系统 + * @return 是否成功启动 + */ + bool start(); + + /** + * 停止Vision系统 + */ + void stop(); + + /** + * 检查系统是否正在运行 + */ + bool isRunning() const; + +private: + /** + * 任务回调函数(当Redis检测到新任务时调用) + */ + void onTaskReceived(const RedisTaskData& task_data); + + // 各模块 + std::shared_ptr redis_comm_; // 任务监听 (Input) + std::shared_ptr redis_result_comm_; // 结果写入 (Output) + std::shared_ptr task_manager_; + + // 状态 + std::atomic running_; + bool initialized_; +}; + diff --git a/image_capture/third_party/mvs/Includes/CameraParams.h b/image_capture/third_party/mvs/Includes/CameraParams.h new file mode 100644 index 0000000..eda0eac --- /dev/null +++ b/image_capture/third_party/mvs/Includes/CameraParams.h @@ -0,0 +1,1441 @@ + +#ifndef _MV_CAMERA_PARAMS_H_ +#define _MV_CAMERA_PARAMS_H_ + +#include "PixelType.h" + +#ifndef __cplusplus +typedef char bool; +#define true 1 +#define false 0 +#endif + +/// \~chinese 排序方式 \~english The Method of Sorting +typedef enum _MV_SORT_METHOD_ +{ + SortMethod_SerialNumber = 0, ///< \~chinese 按序列号排序 \~english Sorting by SerialNumber + SortMethod_UserID = 1, ///< \~chinese 按用户自定义名字排序 \~english Sorting by UserID + SortMethod_CurrentIP_ASC = 2, ///< \~chinese 按当前IP地址排序(升序,只对GEV相机有效,其它类型相机按默认排序) \~english Sorting by current IP(Ascending, Available for GEV cameras only. Other types of cameras are sorted by default) + SortMethod_CurrentIP_DESC = 3, ///< \~chinese 按当前IP地址排序(降序,只对GEV相机有效,其它类型相机按默认排序) \~english Sorting by current IP(Descending, Available for GEV cameras only. Other types of cameras are sorted by default) + +}MV_SORT_METHOD; + + +/// \~chinese GigE设备信息 \~english GigE device info +typedef struct _MV_GIGE_DEVICE_INFO_ +{ + unsigned int nIpCfgOption; ///< [OUT] \~chinese IP配置选项 \~english IP Configuration Options + unsigned int nIpCfgCurrent; ///< [OUT] \~chinese 当前IP配置 \~english IP Configuration + unsigned int nCurrentIp; ///< [OUT] \~chinese 当前IP地址 \~english Current Ip + unsigned int nCurrentSubNetMask; ///< [OUT] \~chinese 当前子网掩码 \~english Curtent Subnet Mask + unsigned int nDefultGateWay; ///< [OUT] \~chinese 当前网关 \~english Current Gateway + unsigned char chManufacturerName[32]; ///< [OUT] \~chinese 制造商名称 \~english Manufacturer Name + unsigned char chModelName[32]; ///< [OUT] \~chinese 型号名称 \~english Model Name + unsigned char chDeviceVersion[32]; ///< [OUT] \~chinese 设备版本 \~english Device Version + unsigned char chManufacturerSpecificInfo[48]; ///< [OUT] \~chinese 制造商的具体信息 \~english Manufacturer Specific Information + unsigned char chSerialNumber[16]; ///< [OUT] \~chinese 序列号 \~english Serial Number + unsigned char chUserDefinedName[16]; ///< [OUT] \~chinese 用户自定义名称 \~english User Defined Name + unsigned int nNetExport; ///< [OUT] \~chinese 网口IP地址 \~english NetWork IP Address + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MV_GIGE_DEVICE_INFO; + +///< \~chinese 最大的数据信息大小 \~english Maximum data information size +#define INFO_MAX_BUFFER_SIZE 64 + +/// \~chinese USB设备信息 \~english USB device info +typedef struct _MV_USB3_DEVICE_INFO_ +{ + unsigned char CrtlInEndPoint; ///< [OUT] \~chinese 控制输入端点 \~english Control input endpoint + unsigned char CrtlOutEndPoint; ///< [OUT] \~chinese 控制输出端点 \~english Control output endpoint + unsigned char StreamEndPoint; ///< [OUT] \~chinese 流端点 \~english Flow endpoint + unsigned char EventEndPoint; ///< [OUT] \~chinese 事件端点 \~english Event endpoint + unsigned short idVendor; ///< [OUT] \~chinese 供应商ID号 \~english Vendor ID Number + unsigned short idProduct; ///< [OUT] \~chinese 产品ID号 \~english Device ID Number + unsigned int nDeviceNumber; ///< [OUT] \~chinese 设备索引号 \~english Device Number + unsigned char chDeviceGUID[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备GUID号 \~english Device GUID Number + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 供应商名字 \~english Vendor Name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 型号名字 \~english Model Name + unsigned char chFamilyName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 家族名字 \~english Family Name + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备版本 \~english Device Version + unsigned char chManufacturerName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 制造商名字 \~english Manufacturer Name + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 序列号 \~english Serial Number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 用户自定义名字 \~english User Defined Name + unsigned int nbcdUSB; ///< [OUT] \~chinese 支持的USB协议 \~english Support USB Protocol + unsigned int nDeviceAddress; ///< [OUT] \~chinese 设备地址 \~english Device Address + unsigned int nReserved[2]; ///< \~chinese 预留 \~english Reserved + +}MV_USB3_DEVICE_INFO; + +/// \~chinese CameraLink设备信息 \~english CameraLink device info +typedef struct _MV_CamL_DEV_INFO_ +{ + unsigned char chPortID[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 串口号 \~english Port ID + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 型号名字 \~english Model Name + unsigned char chFamilyName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 名称 \~english Family Name + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备版本 \~english Device Version + unsigned char chManufacturerName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 制造商名字 \~english Manufacturer Name + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 序列号 \~english Serial Number + + unsigned int nReserved[38]; ///< \~chinese 预留 \~english Reserved + +}MV_CamL_DEV_INFO; + +///< \~chinese CoaXPress相机信息 \~english CoaXPress device information +typedef struct _MV_CXP_DEVICE_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; /// \~chinese 采集卡ID \~english Interface ID of Frame Grabber + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 供应商名字 \~english Vendor name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 型号名字 \~english Model name + unsigned char chManufacturerInfo[INFO_MAX_BUFFER_SIZE];///< \~chinese 厂商信息 \~english Manufacturer information + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机版本 \~english Device version + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 用户自定义名字 \~english User defined name + unsigned char chDeviceID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机ID \~english Device ID + unsigned int nReserved[7]; ///< \~chinese 保留字段 \~english Reserved +}MV_CXP_DEVICE_INFO; + +///< \~chinese 采集卡Camera Link相机信息 \~english Camera Link device information on frame grabber +typedef struct _MV_CML_DEVICE_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; /// \~chinese 采集卡ID \~english Interface ID of Frame Grabber + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 供应商名字 \~english Vendor name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 型号名字 \~english Model name + unsigned char chManufacturerInfo[INFO_MAX_BUFFER_SIZE];///< \~chinese 厂商信息 \~english Manufacturer information + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机版本 \~english Device version + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 用户自定义名字 \~english User defined name + unsigned char chDeviceID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机ID \~english Device ID + unsigned int nReserved[7]; ///< \~chinese 保留字段 \~english Reserved +}MV_CML_DEVICE_INFO; + +///< \~chinese XoFLink相机信息 \~english XoFLink device information +typedef struct _MV_XOF_DEVICE_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; /// \~chinese 采集卡ID \~english Interface ID of Frame Grabber + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 供应商名字 \~english Vendor name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 型号名字 \~english Model name + unsigned char chManufacturerInfo[INFO_MAX_BUFFER_SIZE];///< \~chinese 厂商信息 \~english Manufacturer information + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机版本 \~english Device version + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 用户自定义名字 \~english User defined name + unsigned char chDeviceID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机ID \~english Device ID + unsigned int nReserved[7]; ///< \~chinese 保留字段 \~english Reserved +}MV_XOF_DEVICE_INFO; + +///< \~chinese 虚拟相机信息 \~english Virtual device information +typedef struct _MV_GENTL_VIR_DEVICE_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; /// \~chinese 采集卡ID \~english Interface ID of Frame Grabber + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 供应商名字 \~english Vendor name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 型号名字 \~english Model name + unsigned char chManufacturerInfo[INFO_MAX_BUFFER_SIZE];///< \~chinese 厂商信息 \~english Manufacturer information + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机版本 \~english Device version + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial number + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 用户自定义名字 \~english User defined name + unsigned char chDeviceID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 相机ID \~english Device ID + unsigned char chTLType[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 传输层类型 \~english GenTL Type + unsigned int nReserved[7]; ///< \~chinese 保留字段 \~english Reserved +}MV_GENTL_VIR_DEVICE_INFO; + +///< \~chinese 设备传输层协议类型 \~english Device Transport Layer Protocol Type +#define MV_UNKNOW_DEVICE 0x00000000 ///< \~chinese 未知设备类型,保留意义 \~english Unknown Device Type, Reserved +#define MV_GIGE_DEVICE 0x00000001 ///< \~chinese GigE设备 \~english GigE Device +#define MV_1394_DEVICE 0x00000002 ///< \~chinese 1394-a/b 设备 \~english 1394-a/b Device +#define MV_USB_DEVICE 0x00000004 ///< \~chinese USB 设备 \~english USB Device +#define MV_CAMERALINK_DEVICE 0x00000008 ///< \~chinese CameraLink设备 \~english CameraLink Device +#define MV_VIR_GIGE_DEVICE 0x00000010 ///< \~chinese 虚拟GigE设备,包含虚拟GEV采集卡下的设备 \~english Virtual GigE Device,include GenTL virtual device +#define MV_VIR_USB_DEVICE 0x00000020 ///< \~chinese 虚拟USB设备,不支持虚拟采集卡下的设备 \~english Virtual USB Device,not supports GenTL virtual device +#define MV_GENTL_GIGE_DEVICE 0x00000040 ///< \~chinese 自研网卡下GigE设备,某些卡不支持此协议,如GE1104 \~english GenTL GigE Device +#define MV_GENTL_CAMERALINK_DEVICE 0x00000080 ///< \~chinese CameraLink相机设备 \~english GenTL CameraLink Camera Device +#define MV_GENTL_CXP_DEVICE 0x00000100 ///< \~chinese CoaXPress设备 \~english GenTL CoaXPress Device +#define MV_GENTL_XOF_DEVICE 0x00000200 ///< \~chinese XoF设备 \~english GenTL XoF Device +#define MV_GENTL_VIR_DEVICE 0x00000800 ///< \~chinese 虚拟采集卡下的设备,不支持虚拟GEV采集卡下的设备 \~english GenTL Virtual Device,not supports GenTL virtual GigE device + +/// \~chinese 设备信息 \~english Device info +typedef struct _MV_CC_DEVICE_INFO_ +{ + unsigned short nMajorVer; ///< [OUT] \~chinese 主要版本 \~english Major Version + unsigned short nMinorVer; ///< [OUT] \~chinese 次要版本 \~english Minor Version + unsigned int nMacAddrHigh; ///< [OUT] \~chinese 高MAC地址 \~english High MAC Address + unsigned int nMacAddrLow; ///< [OUT] \~chinese 低MAC地址 \~english Low MAC Address + unsigned int nTLayerType; ///< [OUT] \~chinese 设备传输层协议类型 \~english Device Transport Layer Protocol Type + + unsigned int nDevTypeInfo; ///< [OUT] \~chinese 设备类型信息 \~english Device Type Info + ///< \~chinese 设备类型规则 \~english Device Type Rules + ///< 7 - 0 bit: \~chinese 预留 \~english Reserved + ///< 15 - 8 bit: \~chinese 产品子类别 \~english Product subtype + ///< 23 - 16 bit: \~chinese 产品类型 \~english product type + ///< 31 - 24bit: \~chinese 产品线 \~english Product Line 产品线 //eg: 0x01 标准产品/2D Produces; 0x02 3D产品/3D Produces ; 0x03 智能ID产品/Intelligent ID products + unsigned int nReserved[3]; ///< \~chinese 预留 \~english Reserved + + union + { + MV_GIGE_DEVICE_INFO stGigEInfo; ///< [OUT] \~chinese GigE设备信息 \~english GigE Device Info + MV_USB3_DEVICE_INFO stUsb3VInfo; ///< [OUT] \~chinese USB设备信息 \~english USB Device Info + MV_CamL_DEV_INFO stCamLInfo; ///< [OUT] \~chinese CameraLink设备信息 \~english CameraLink Device Info + MV_CML_DEVICE_INFO stCMLInfo; ///< [OUT] \~chinese 采集卡CameraLink设备信息 \~english CameraLink Device Info On Frame Grabber + MV_CXP_DEVICE_INFO stCXPInfo; ///< [OUT] \~chinese 采集卡CoaXPress设备信息 \~english CoaXPress Device Info On Frame Grabber + MV_XOF_DEVICE_INFO stXoFInfo; ///< [OUT] \~chinese 采集卡XoF设备信息 \~english XoF Device Info On Frame Grabber + MV_GENTL_VIR_DEVICE_INFO stVirInfo; ///< [OUT] \~chinese 采集卡虚拟设备信息, 仅支持协议MV_GENTL_VIR_DEVICE \~english Virtual Device Info On Frame Grabber,device transport layer protocol type is MV_GENTL_VIR_DEVICE + }SpecialInfo; + +}MV_CC_DEVICE_INFO; + +///< \~chinese 最多支持的传输层实例个数 \~english The maximum number of supported transport layer instances +#define MV_MAX_TLS_NUM 8 +///< \~chinese 最大支持的设备个数 \~english The maximum number of supported devices +#define MV_MAX_DEVICE_NUM 256 + +/// \~chinese 设备信息列表 \~english Device Information List +typedef struct _MV_CC_DEVICE_INFO_LIST_ +{ + unsigned int nDeviceNum; ///< [OUT] \~chinese 在线设备数量 \~english Online Device Number + MV_CC_DEVICE_INFO* pDeviceInfo[MV_MAX_DEVICE_NUM]; ///< [OUT] \~chinese 支持最多256个设备 \~english Support up to 256 devices + +}MV_CC_DEVICE_INFO_LIST; + + +///< \~chinese 采集卡类型 \~english Interface type +#define MV_GIGE_INTERFACE 0x00000001 ///< \~chinese GigE Vision采集卡 \~english GigE Vision interface +#define MV_CAMERALINK_INTERFACE 0x00000004 ///< \~chinese Camera Link采集卡 \~english Camera Link interface +#define MV_CXP_INTERFACE 0x00000008 ///< \~chinese CoaXPress采集卡 \~english CoaXPress interface +#define MV_XOF_INTERFACE 0x00000010 ///< \~chinese XoFLink采集卡 \~english XoFLink interface +#define MV_VIR_INTERFACE 0x00000020 ///< \~chinese 虚拟采集卡 \~english Virtual interface +#define MV_LC_INTERFACE 0x00000040 ///< \~chinese 光源控制卡 \~english Light Controller interface + + +///< \~chinese 最大支持的采集卡数量 \~english The maximum number of Frame Grabber interface supported +#define MV_MAX_INTERFACE_NUM 64 + +///< \~chinese 采集卡信息 \~english Interface information +typedef struct _MV_INTERFACE_INFO_ +{ + unsigned int nTLayerType; ///< \~chinese 采集卡类型 \~english Interface type + // 低16位有效: bits(0~2)代表功能, bits(3~7)代表相机, bits(8-15)代表总线 + // The lower 16 bits are valid: bits (0~2) represents the function, bits (3~7) represents the device, and bits (8~15) represents the bus + // |15 14 13 12 11 10 9 8 | 7 6 5 4 3 | 2 1 0 | + // +-----------------------------+---------------+---------+ + // | bus | device | func | + unsigned int nPCIEInfo; ///< \~chinese 采集卡的PCIE插槽信息 \~english PCIe slot information of interface + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 采集卡ID \~english Interface ID + unsigned char chDisplayName[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 显示名称 \~english Display name + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< \~chinese 序列号 \~english Serial number + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 型号 \~english model name + unsigned char chManufacturer[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 厂商 \~english manufacturer name + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 版本号 \~english device version + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 自定义名称 \~english user defined name + unsigned int nReserved[64]; ///< \~chinese 保留字段 \~english Reserved +}MV_INTERFACE_INFO; + +///< \~chinese 采集卡信息列表 \~english Interface Information List +typedef struct _MV_INTERFACE_INFO_LIST_ +{ + unsigned int nInterfaceNum; ///< [OUT] \~chinese 采集卡数量 \~english Interface Number + MV_INTERFACE_INFO* pInterfaceInfos[MV_MAX_INTERFACE_NUM]; ///< [OUT] \~chinese 采集卡信息, 支持最多64个设备 \~english Information of interfaces, support up to 64 interfaces +}MV_INTERFACE_INFO_LIST; + + + +/// \~chinese 通过GenTL枚举到的接口信息 \~english Interface Information with GenTL +typedef struct _MV_GENTL_IF_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese GenTL接口ID \~english Interface ID + unsigned char chTLType[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 传输层类型 \~english GenTL Type + unsigned char chDisplayName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese Interface显示名称 \~english Display Name + unsigned int nCtiIndex; ///< [OUT] \~chinese GenTL的cti文件索引 \~english The Index of Cti Files + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_GENTL_IF_INFO; + +///< \~chinese 最大支持的GenTL接口数量 \~english The maximum number of GenTL interface supported +#define MV_MAX_GENTL_IF_NUM 256 + +/// \~chinese 通过GenTL枚举到的接口信息列表 \~english Inferface Information List with GenTL +typedef struct _MV_GENTL_IF_INFO_LIST_ +{ + unsigned int nInterfaceNum; ///< [OUT] \~chinese 在线接口数量 \~english Online Inferface Number + MV_GENTL_IF_INFO* pIFInfo[MV_MAX_GENTL_IF_NUM]; ///< [OUT] \~chinese 支持最多256个接口 \~english Support up to 256 inferfaces + +}MV_GENTL_IF_INFO_LIST; + +/// \~chinese 通过GenTL枚举到的设备信息 \~english Device Information with GenTL +typedef struct _MV_GENTL_DEV_INFO_ +{ + unsigned char chInterfaceID[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese GenTL接口ID \~english Interface ID + unsigned char chDeviceID[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备ID \~english Device ID + unsigned char chVendorName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 供应商名字 \~english Vendor Name + unsigned char chModelName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 型号名字 \~english Model Name + unsigned char chTLType[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 传输层类型 \~english GenTL Type + unsigned char chDisplayName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备显示名称 \~english Display Name + unsigned char chUserDefinedName[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 用户自定义名字 \~english User Defined Name + unsigned char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 序列号 \~english Serial Number + unsigned char chDeviceVersion[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备版本号 \~english Device Version + unsigned int nCtiIndex; ///< [OUT] \~chinese GenTL的cti文件索引 \~english The Index of Cti Files + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_GENTL_DEV_INFO; + +///< \~chinese 最大支持的GenTL设备数量 \~english The maximum number of GenTL devices supported +#define MV_MAX_GENTL_DEV_NUM 256 + +/// \~chinese 通过GenTL枚举到的设备信息列表 \~english Device Information List with GenTL +typedef struct _MV_GENTL_DEV_INFO_LIST_ +{ + unsigned int nDeviceNum; ///< [OUT] \~chinese 在线设备数量 \~english Online Device Number + MV_GENTL_DEV_INFO* pDeviceInfo[MV_MAX_GENTL_DEV_NUM]; ///< [OUT] \~chinese 支持最多256个设备 \~english Support up to 256 devices + +}MV_GENTL_DEV_INFO_LIST; + +/// \~chinese 设备的访问模式 \~english Device Access Mode +#define MV_ACCESS_Exclusive 1 /// \~chinese 独占权限,其他APP只允许读CCP寄存器 \~english Exclusive authority, other APP is only allowed to read the CCP register +#define MV_ACCESS_ExclusiveWithSwitch 2 /// \~chinese 可以从5模式下抢占权限,然后以独占权限打开 \~english You can seize the authority from the 5 mode, and then open with exclusive authority +#define MV_ACCESS_Control 3 /// \~chinese 控制权限,其他APP允许读所有寄存器 \~english Control authority, allows other APP reading all registers +#define MV_ACCESS_ControlWithSwitch 4 /// \~chinese 可以从5的模式下抢占权限,然后以控制权限打开 \~english You can seize the authority from the 5 mode, and then open with control authority +#define MV_ACCESS_ControlSwitchEnable 5 /// \~chinese 以可被抢占的控制权限打开 \~english Open with seized control authority +#define MV_ACCESS_ControlSwitchEnableWithKey 6 /// \~chinese 可以从5的模式下抢占权限,然后以可被抢占的控制权限打开 \~english You can seize the authority from the 5 mode, and then open with seized control authority +#define MV_ACCESS_Monitor 7 /// \~chinese 读模式打开设备,适用于控制权限下 \~english Open with read mode and is available under control authority + +/// \~chinese Chunk内容 \~english The content of ChunkData +typedef struct _MV_CHUNK_DATA_CONTENT_ +{ + unsigned char* pChunkData; ///< [OUT] \~chinese Chunk数据 \~english Chunk Data + unsigned int nChunkID; ///< [OUT] \~chinese Chunk ID \~english Chunk ID + unsigned int nChunkLen; ///< [OUT] \~chinese Chunk的长度 \~english Chunk Length + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CHUNK_DATA_CONTENT; + + +/// \~chinese 图像信息 \~english Image information +typedef struct _MV_CC_IMAGE_ +{ + unsigned int nWidth; ///< \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< \~chinese 图像高 \~english Height + enum MvGvspPixelType enPixelType; ///< ~chinese 像素格式 \~english Pixel type + + unsigned char* pImageBuf; ///< \~chinese 图像缓存 \~english Image buffer + uint64_t nImageBufSize; ///< \~chinese 图像缓存大小 \~english Image buffer size + uint64_t nImageLen; ///< \~chinese 图像长度 \~english Image length + + unsigned int nReserved[4]; ///< \~chinese 预留字段 \~english Reserved + +}MV_CC_IMAGE; + +typedef enum _MV_FRAME_EXTRA_INFO_TYPE_ +{ + MV_FRAME_EXTRA_NO_INFO = 0x0000, + MV_FRAME_EXTRA_SUBIMAGES = 0x0001, // 子图 + MV_FRAME_EXTRA_MULTIPARTS = 0x0002, // 多部分 +}MV_FRAME_EXTRA_INFO_TYPE; + +// ZONE方向(自上而下或者自下而上) +typedef enum _MV_GIGE_ZONE_DIRECTION_ +{ + MV_GIGE_PART_ZONE_TOP_DOWN = 0, + MV_GIGE_PART_ZONE_BOTTOM_UP = 1, +} MV_GIGE_ZONE_DIRECTION; + +typedef struct _MV_GIGE_ZONE_INFO_ +{ + MV_GIGE_ZONE_DIRECTION enDirection; // 解析方向 (0: 自上向下,1:自下向上) + union + { + unsigned char* pZoneAddr; // 起始地址 + uint64_t nAlign; // 对齐 + } stZone; + uint64_t nLength; // 数据长度 + + unsigned int nReserved[6]; // 保留 +} MV_GIGE_ZONE_INFO; + +typedef union _MV_GIGE_MULRI_PART_DATA_INFO_ +{ + // (data_type ≤ 0x0009) MV_MULTI_PART_DATA_TYPE + struct + { + unsigned int nSizeX; + unsigned int nSizeY; + unsigned int nOffsetX; + unsigned int nOffsetY; + unsigned short nPaddingX; + } stGeneral; + + // (data_type == 0x000B or data_type == 0x000C) MV_MULTI_PART_DATA_TYPE + struct + { + unsigned char nJpegFlag; + unsigned int nTimestampTickFrequencyHigh; + unsigned int nTimestampTickFrequencyLow; + unsigned int nJpegDataFormat; + } stJpeg; + + // 若是自定义类型则保留原始未解析数据,否则清空该字段 + unsigned char pDataTypeSpecific[24]; +} MV_GIGE_PART_DATA_INFO; + +// 枚举类型 +typedef enum _MV_GIGE_MULTI_PART_DATA_TYPE_ +{ + MV_GIGE_DT_2D_IMAGE_1_PLANAR = 0x0001, + MV_GIGE_DT_2D_IMAGE_2_PLANAR = 0x0002, + MV_GIGE_DT_2D_IMAGE_3_PLANAR = 0x0003, + MV_GIGE_DT_2D_IMAGE_4_PLANAR = 0x0004, + MV_GIGE_DT_3D_IMAGE_1_PLANAR = 0x0005, + MV_GIGE_DT_3D_IMAGE_2_PLANAR = 0x0006, + MV_GIGE_DT_3D_IMAGE_3_PLANAR = 0x0007, + MV_GIGE_DT_3D_IMAGE_4_PLANAR = 0x0008, + MV_GIGE_DT_CONFIDENCE_MAP = 0x0009, + MV_GIGE_DT_CHUNK_DATA = 0x000A, + MV_GIGE_DT_JPEG_IMAGE = 0x000B, + MV_GIGE_DT_JPEG2000_IMAGE = 0x000C, +}MV_GIGE_MULTI_PART_DATA_TYPE; + +// MULTI_PART传输方式的缓存图像节点信息 +typedef struct _MV_GIGE_MULTI_PART_INFO_ +{ + MV_GIGE_MULTI_PART_DATA_TYPE enDataType; // 数据类型 MV_MULTI_PART_DATA_TYPE + unsigned int nDataFormat; // 数据格式(例如像素格式) + unsigned int nSourceID; // 图像源ID + unsigned int nRegionID; // 区域ID + unsigned int nDataPurposeID; // 目的ID + unsigned int nZones; // 当前Part所包含的Zone区域数目 + MV_GIGE_ZONE_INFO* pZoneInfo; // Zone信息 + uint64_t nLength; // 数据长度 + unsigned char* pPartAddr; // 当前Part的起始数据地址 + MV_GIGE_PART_DATA_INFO stDataTypeSpecific; // 数据类型携带的特定数据 + + unsigned int nReserved[8]; // 保留 +}MV_GIGE_MULTI_PART_INFO; + +/// \~chinese 输出帧的信息 \~english Output Frame Information +typedef struct _MV_FRAME_OUT_INFO_EX_ +{ + unsigned short nWidth; ///< [OUT] \~chinese 图像宽(最大65535,超出请用nExtendWidth) \~english Image Width (over 65535, use nExtendWidth) + unsigned short nHeight; ///< [OUT] \~chinese 图像高(最大65535,超出请用nExtendHeight) \~english Image Height(over 65535, use nExtendHeight) + enum MvGvspPixelType enPixelType; ///< [OUT] \~chinese 像素格式 \~english Pixel Type + + unsigned int nFrameNum; ///< [OUT] \~chinese 帧号 \~english Frame Number + unsigned int nDevTimeStampHigh; ///< [OUT] \~chinese 时间戳高32位 \~english Timestamp high 32 bits + unsigned int nDevTimeStampLow; ///< [OUT] \~chinese 时间戳低32位 \~english Timestamp low 32 bits + unsigned int nReserved0; ///< [OUT] \~chinese 保留,8字节对齐 \~english Reserved, 8-byte aligned + int64_t nHostTimeStamp; ///< [OUT] \~chinese 主机生成的时间戳 \~english Host-generated timestamp + + unsigned int nFrameLen; ///< [OUT] \~chinese 帧的长度(4GB以上图像使用nFrameLenEx替代) \~english The Length of Frame + + /// \~chinese 设备水印时标 \~english Device frame-specific time scale + unsigned int nSecondCount; ///< [OUT] \~chinese 秒数 \~english The Seconds + unsigned int nCycleCount; ///< [OUT] \~chinese 周期数 \~english The Count of Cycle + unsigned int nCycleOffset; ///< [OUT] \~chinese 周期偏移量 \~english The Offset of Cycle + + float fGain; ///< [OUT] \~chinese 增益 \~english Gain + float fExposureTime; ///< [OUT] \~chinese 曝光时间 \~english Exposure Time + unsigned int nAverageBrightness; ///< [OUT] \~chinese 平均亮度 \~english Average brightness + + /// \~chinese 白平衡相关 \~english White balance + unsigned int nRed; ///< [OUT] \~chinese 红色 \~english Red + unsigned int nGreen; ///< [OUT] \~chinese 绿色 \~english Green + unsigned int nBlue; ///< [OUT] \~chinese 蓝色 \~english Blue + + unsigned int nFrameCounter; ///< [OUT] \~chinese 总帧数 \~english Frame Counter + unsigned int nTriggerIndex; ///< [OUT] \~chinese 触发计数 \~english Trigger Counting + + unsigned int nInput; ///< [OUT] \~chinese 输入 \~english Input + unsigned int nOutput; ///< [OUT] \~chinese 输出 \~english Output + + /// \~chinese ROI区域 \~english ROI Region + unsigned short nOffsetX; ///< [OUT] \~chinese 水平偏移量 \~english OffsetX + unsigned short nOffsetY; ///< [OUT] \~chinese 垂直偏移量 \~english OffsetY + unsigned short nChunkWidth; ///< [OUT] \~chinese Chunk宽 \~english The Width of Chunk + unsigned short nChunkHeight; ///< [OUT] \~chinese Chunk高 \~english The Height of Chunk + + unsigned int nLostPacket; ///< [OUT] \~chinese 本帧丢包数 \~english Lost Packet Number In This Frame + + unsigned int nUnparsedChunkNum; ///< [OUT] \~chinese 未解析的Chunkdata个数 \~english Unparsed Chunk Number + union + { + MV_CHUNK_DATA_CONTENT* pUnparsedChunkContent; ///< [OUT] \~chinese 未解析的Chunk \~english Unparsed Chunk Content + int64_t nAligning; ///< [OUT] \~chinese 校准 \~english Aligning + }UnparsedChunkList; + + unsigned int nExtendWidth; ///< [OUT] \~chinese 图像宽(扩展变量) \~english Image Width + unsigned int nExtendHeight; ///< [OUT] \~chinese 图像高(扩展变量) \~english Image Height + + uint64_t nFrameLenEx; ///< [OUT] \~chinese 帧的长度 \~english The Length of Frame + + unsigned int nExtraType; ///< [OUT] \~chinese 判断携带的额外信息的类型:子图(SubImageList)还是多图(MultiPartArray) MV_FRAME_EXTRA_INFO_TYPE类型 + + unsigned int nSubImageNum; ///< [OUT] \~chinese 图像缓存中的子图(多图)个数 \~english Sub Image(MulitiPart) Number + + union + { + MV_CC_IMAGE* pstSubImage; ///< [OUT] \~chinese 子图信息 \~english Sub image info + MV_GIGE_MULTI_PART_INFO* pstPartInfo; ///< [OUT] \~chinese 图像部分信息 \~english Image Parts Information + int64_t nAligning; ///< [OUT] \~chinese 校准 \~english Aligning + } SubImageList; + + union + { + void* pUser; ///< [OUT] \~chinese 自定义指针(外部注册缓存时,内存地址对应的用户自定义指针) \~english Custom pointer (user-defined pointer corresponding to memory address when registering external cache) + int64_t nAligning; ///< [OUT] \~chinese 校准 \~english Aligning + }UserPtr; + + unsigned int nFirstLineEncoderCount; ///< [OUT] \~chinese 首行编码器计数 \~english First line encoder count + unsigned int nLastLineEncoderCount; ///< [OUT] \~chinese 尾行编码器计数 \~english Last line encoder count + + unsigned int nReserved[24]; ///< \~chinese 预留 \~english Reserved + +}MV_FRAME_OUT_INFO_EX; + +/// \~chinese 图像结构体,输出图像地址及图像信息 \~english Image Struct, output the pointer of Image and the information of the specific image +typedef struct _MV_FRAME_OUT_ +{ + unsigned char* pBufAddr; ///< [OUT] \~chinese 图像指针地址 \~english pointer of image + MV_FRAME_OUT_INFO_EX stFrameInfo; ///< [OUT] \~chinese 图像信息 \~english information of the specific image + + unsigned int nRes[16]; ///< \~chinese 预留 \~english Reserved + +}MV_FRAME_OUT; + +/// \~chinese 取流策略 \~english The strategy of Grabbing +typedef enum _MV_GRAB_STRATEGY_ +{ + MV_GrabStrategy_OneByOne = 0, ///< \~chinese 从旧到新一帧一帧的获取图像 \~english Grab One By One + MV_GrabStrategy_LatestImagesOnly = 1, ///< \~chinese 获取列表中最新的一帧图像 \~english Grab The Latest Image + MV_GrabStrategy_LatestImages = 2, ///< \~chinese 获取列表中最新的图像 \~english Grab The Latest Images + MV_GrabStrategy_UpcomingImage = 3, ///< \~chinese 等待下一帧图像 \~english Grab The Upcoming Image + +}MV_GRAB_STRATEGY; + +/// \~chinese 网络传输的相关信息 \~english Network transmission information +typedef struct _MV_NETTRANS_INFO_ +{ + int64_t nReceiveDataSize; ///< [OUT] \~chinese 已接收数据大小[Start和Stop之间] \~english Received Data Size + int nThrowFrameCount; ///< [OUT] \~chinese 丢帧数量 \~english Throw frame number + unsigned int nNetRecvFrameCount; ///< [OUT] \~chinese 已接收的帧数 \~english Received Frame Count + int64_t nRequestResendPacketCount; ///< [OUT] \~chinese 请求重发包数 \~english Request Resend Packet Count + int64_t nResendPacketCount; ///< [OUT] \~chinese 重发包数 \~english Resend Packet Count + +}MV_NETTRANS_INFO; + +/// \~chinese 信息类型 \~english Information Type +#define MV_MATCH_TYPE_NET_DETECT 0x00000001 ///< \~chinese 网络流量和丢包信息 \~english Network traffic and packet loss information +#define MV_MATCH_TYPE_USB_DETECT 0x00000002 ///< \~chinese host接收到来自U3V设备的字节总数 \~english The total number of bytes host received from U3V device + +/// \~chinese 全匹配的一种信息结构体 \~english A fully matched information structure +typedef struct _MV_ALL_MATCH_INFO_ +{ + unsigned int nType; ///< [IN] \~chinese 需要输出的信息类型,e.g. MV_MATCH_TYPE_NET_DETECT、MV_MATCH_TYPE_USB_DETECT \~english Information type need to output ,e.g. MV_MATCH_TYPE_NET_DETECT、MV_MATCH_TYPE_USB_DETECT + void* pInfo; ///< [OUT] \~chinese 输出的信息缓存,由调用者分配 \~english Output information cache, which is allocated by the caller + unsigned int nInfoSize; ///< [IN] \~chinese 信息缓存的大小 \~english Information cache size + +}MV_ALL_MATCH_INFO; + +/// \~chinese 网络流量和丢包信息反馈结构体,对应类型为 MV_MATCH_TYPE_NET_DETECT \~english Network traffic and packet loss feedback structure, the corresponding type is MV_MATCH_TYPE_NET_DETECT +typedef struct _MV_MATCH_INFO_NET_DETECT_ +{ + int64_t nReceiveDataSize; ///< [OUT] \~chinese 已接收数据大小[Start和Stop之间] \~english Received data size + int64_t nLostPacketCount; ///< [OUT] \~chinese 丢失的包数量 \~english Number of packets lost + unsigned int nLostFrameCount; ///< [OUT] \~chinese 丢帧数量 \~english Number of frames lost + unsigned int nNetRecvFrameCount; ///< [OUT] \~chinese 接收到的图像帧数 \~english Received Frame Count + int64_t nRequestResendPacketCount; ///< [OUT] \~chinese 请求重发包数 \~english Request Resend Packet Count + int64_t nResendPacketCount; ///< [OUT] \~chinese 重发包数 \~english Resend Packet Count + +}MV_MATCH_INFO_NET_DETECT; + +/// \~chinese host收到从u3v设备端的总字节数,对应类型为 MV_MATCH_TYPE_USB_DETECT \~english The total number of bytes host received from the u3v device side, the corresponding type is MV_MATCH_TYPE_USB_DETECT +typedef struct _MV_MATCH_INFO_USB_DETECT_ +{ + int64_t nReceiveDataSize; ///< [OUT] \~chinese 已接收数据大小 [Open和Close之间] \~english Received data size + unsigned int nReceivedFrameCount; ///< [OUT] \~chinese 已收到的帧数 \~english Number of frames received + unsigned int nErrorFrameCount; ///< [OUT] \~chinese 错误帧数 \~english Number of error frames + + unsigned int nReserved[2]; ///< \~chinese 保留 \~english Reserved + +}MV_MATCH_INFO_USB_DETECT; + +/// \~chinese 显示帧信息 \~english Display frame information +typedef struct _MV_DISPLAY_FRAME_INFO_EX_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高 \~english Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel type + + unsigned char* pImageBuf; ///< [IN] \~chinese 输入图像缓存 \~english Input image buffer + unsigned int nImageBufLen; ///< [IN] \~chinese 输入图像长度 \~english Input image length + + unsigned int enRenderMode; /// [IN] \~chinese 图像渲染方式 Windows:0-GDI(默认), 1-D3D, 2-OPENGL Linux: 0-OPENGL(默认) \~english Windows:0-GDI(default), 1-D3D, 2-OPENGL Linux: 0-OPENGL(default) + unsigned int nRes[3]; ///< \~chinese 保留 \~english Reserved + +}MV_DISPLAY_FRAME_INFO_EX; + + +/// \~chinese 保存图片格式 \~english Save image type +enum MV_SAVE_IAMGE_TYPE +{ + MV_Image_Undefined = 0, ///< \~chinese 未定义的图像格式 \~english Undefined Image Type + MV_Image_Bmp = 1, ///< \~chinese BMP图像格式 \~english BMP Image Type + MV_Image_Jpeg = 2, ///< \~chinese JPEG图像格式 \~english Jpeg Image Type + MV_Image_Png = 3, ///< \~chinese PNG图像格式 \~english Png Image Type + MV_Image_Tif = 4, ///< \~chinese TIFF图像格式 \~english TIFF Image Type + +}; + +/// \~chinese 图片保存参数 \~english Save Image Parameters +typedef struct _MV_SAVE_IMAGE_PARAM_EX3_ +{ + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存 \~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input Data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式 \~english Input Data Pixel Format + unsigned int nWidth; ///< [IN] \~chinese 图像宽 \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高 \~english Image Height + + unsigned char* pImageBuffer; ///< [OUT] \~chinese 输出图片缓存 \~english Output Image Buffer + unsigned int nImageLen; ///< [OUT] \~chinese 输出图片长度 \~english Output Image length + unsigned int nBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Output buffer size provided + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输出图片格式 \~english Output Image Format + unsigned int nJpgQuality; ///< [IN] \~chinese JPG编码质量(50-99],其它格式无效 \~english Encoding quality(50-99],Other formats are invalid + + + unsigned int iMethodValue; ///< [IN] \~chinese 插值方法 0-快速 1-均衡(其它值默认为均衡) 2-最优 3-最优+ , RBGG/BRGG/GGRB/GGBR相关像素格式不支持0和3 \~english Bayer interpolation method 0-Fast 1-Equilibrium 2-Optimal 3-Optimal+, Pixels in RBGG/BRGG/GGRB/GGBR formats do not support 0 and 3. + + unsigned int nReserved[3]; ///< \~chinese 预留 \~english Reserved + +}MV_SAVE_IMAGE_PARAM_EX3; + + +// 保存图片到文件参数 +typedef struct _MV_SAVE_IMAGE_TO_FILE_PARAM_EX_ +{ + unsigned int nWidth; ///< [IN] 图像宽 + unsigned int nHeight; ///< [IN] 图像高 + enum MvGvspPixelType enPixelType; ///< [IN] 输入数据的像素格式 + unsigned char* pData; ///< [IN] 输入数据缓存 + unsigned int nDataLen; ///< [IN] 输入数据大小 + + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] 输入图片格式 + char* pcImagePath; ///< [IN] 输入文件路径, Windows平台路径长度不超过260字节,Linux平台不超过255字节 + + unsigned int nQuality; ///< [IN] JPG编码质量(50-99],其它格式无效 + int iMethodValue; ///< [IN] 插值方法 0-快速 1-均衡(其它值默认为均衡) 2-最优 3-最优+, RBGG/BRGG/GGRB/GGBR相关像素格式不支持0和3 + unsigned int nReserved[8]; + +}MV_SAVE_IMAGE_TO_FILE_PARAM_EX; + +// 保存图片所需参数 +typedef struct _MV_CC_SAVE_IMAGE_PARAM_ +{ + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] 输入图片格式 + unsigned int nQuality; ///< [IN] JPG编码质量(50-99],其它格式无效 + int iMethodValue; ///< [IN] 插值方法 0-快速 1-均衡(其它值默认为均衡) 2-最优 3-最优+, RBGG/BRGG/GGRB/GGBR相关像素格式不支持0和3 + + unsigned int nReserved[8]; + +}MV_CC_SAVE_IMAGE_PARAM; + +/// \~chinese 旋转角度 \~english Rotation angle +typedef enum _MV_IMG_ROTATION_ANGLE_ +{ + MV_IMAGE_ROTATE_90 = 1, + MV_IMAGE_ROTATE_180 = 2, + MV_IMAGE_ROTATE_270 = 3, + +}MV_IMG_ROTATION_ANGLE; + +/// \~chinese 图像旋转结构体 \~english Rotate image structure +typedef struct _MV_CC_ROTATE_IMAGE_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned int nWidth; ///< [IN][OUT] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN][OUT] \~chinese 图像高 \~english Height + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + MV_IMG_ROTATION_ANGLE enRotationAngle; ///< [IN] \~chinese 旋转角度 \~english Rotation angle + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_ROTATE_IMAGE_PARAM; + +/// \~chinese 翻转类型 \~english Flip type +typedef enum _MV_IMG_FLIP_TYPE_ +{ + MV_FLIP_VERTICAL = 1, + MV_FLIP_HORIZONTAL = 2, + +}MV_IMG_FLIP_TYPE; + +/// \~chinese 图像翻转结构体 \~english Flip image structure +typedef struct _MV_CC_FLIP_IMAGE_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned int nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高 \~english Height + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + MV_IMG_FLIP_TYPE enFlipType; ///< [IN] \~chinese 翻转类型 \~english Flip type + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FLIP_IMAGE_PARAM; + + +/// \~chinese 像素转换结构体 \~english Pixel convert structure +typedef struct _MV_CC_PIXEL_CONVERT_PARAM_EX_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高 \~english Height + + enum MvGvspPixelType enSrcPixelType; ///< [IN] \~chinese 源像素格式 \~english Source pixel format + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + enum MvGvspPixelType enDstPixelType; ///< [IN] \~chinese 目标像素格式 \~english Destination pixel format + unsigned char* pDstBuffer; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + unsigned int nRes[4]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_PIXEL_CONVERT_PARAM_EX; + + + + + +/// \~chinese Gamma类型 \~english Gamma type +typedef enum _MV_CC_GAMMA_TYPE_ +{ + MV_CC_GAMMA_TYPE_NONE = 0, ///< \~chinese 不启用 \~english Disable + MV_CC_GAMMA_TYPE_VALUE = 1, ///< \~chinese Gamma值 \~english Gamma value + MV_CC_GAMMA_TYPE_USER_CURVE = 2, ///< \~chinese Gamma曲线 \~english Gamma curve + ///< \~chinese 8位,长度:256*sizeof(unsigned char) \~english 8bit,length:256*sizeof(unsigned char) + ///< \~chinese 10位,长度:1024*sizeof(unsigned short) \~english 10bit,length:1024*sizeof(unsigned short) + ///< \~chinese 12位,长度:4096*sizeof(unsigned short) \~english 12bit,length:4096*sizeof(unsigned short) + ///< \~chinese 16位,长度:65536*sizeof(unsigned short) \~english 16bit,length:65536*sizeof(unsigned short) + MV_CC_GAMMA_TYPE_LRGB2SRGB = 3, ///< \~chinese linear RGB to sRGB \~english linear RGB to sRGB + MV_CC_GAMMA_TYPE_SRGB2LRGB = 4, ///< \~chinese sRGB to linear RGB(仅色彩插值时支持,色彩校正时无效) \~english sRGB to linear RGB + +}MV_CC_GAMMA_TYPE; + +// Gamma信息 +/// \~chinese Gamma信息结构体 \~english Gamma info structure +typedef struct _MV_CC_GAMMA_PARAM_T_ +{ + MV_CC_GAMMA_TYPE enGammaType; ///< [IN] \~chinese Gamma类型 \~english Gamma type + float fGammaValue; ///< [IN] \~chinese Gamma值:0.1 ~ 4.0 \~english Gamma value:0.1 ~ 4.0 + unsigned char* pGammaCurveBuf; ///< [IN] \~chinese Gamma曲线缓存 \~english Gamma curve buffer + unsigned int nGammaCurveBufLen; ///< [IN] \~chinese Gamma曲线缓存长度 \~english Gamma curve buffer size + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_GAMMA_PARAM; + +/// \~chinese CCM参数 \~english CCM param +typedef struct _MV_CC_CCM_PARAM_T_ +{ + bool bCCMEnable; ///< [IN] \~chinese 是否启用CCM \~english CCM enable + int nCCMat[9]; ///< [IN] \~chinese CCM矩阵[-8192~8192] \~english Color correction matrix[-8192~8192] + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_CCM_PARAM; + +/// \~chinese CCM参数 \~english CCM param +typedef struct _MV_CC_CCM_PARAM_EX_T_ +{ + bool bCCMEnable; ///< [IN] \~chinese 是否启用CCM \~english CCM enable + int nCCMat[9]; ///< [IN] \~chinese CCM矩阵[-65536~65536] \~english Color correction matrix[-65536~65536] + unsigned int nCCMScale; ///< [IN] \~chinese 量化系数(2的整数幂,最大65536) \~english Quantitative scale(Integer power of 2, <= 65536) + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_CCM_PARAM_EX; + +/// \~chinese 对比度调节结构体 \~english Contrast structure +typedef struct _MV_CC_CONTRAST_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度(最小8) \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度(最小8) \~english Image Height + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + + unsigned int nContrastFactor; ///< [IN] \~chinese 对比度值,[1,10000] \~english Contrast factor,[1,10000] + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_CONTRAST_PARAM; + +/// \~chinese 水印信息 \~english Frame-specific information +typedef struct _MV_CC_FRAME_SPEC_INFO_ +{ + /// \~chinese 设备水印时标 \~english Device frame-specific time scale + unsigned int nSecondCount; ///< [OUT] \~chinese 秒数 \~english The Seconds + unsigned int nCycleCount; ///< [OUT] \~chinese 周期数 \~english The Count of Cycle + unsigned int nCycleOffset; ///< [OUT] \~chinese 周期偏移量 \~english The Offset of Cycle + + float fGain; ///< [OUT] \~chinese 增益 \~english Gain + float fExposureTime; ///< [OUT] \~chinese 曝光时间 \~english Exposure Time + unsigned int nAverageBrightness; ///< [OUT] \~chinese 平均亮度 \~english Average brightness + + /// \~chinese 白平衡相关 \~english White balance + unsigned int nRed; ///< [OUT] \~chinese 红色 \~english Red + unsigned int nGreen; ///< [OUT] \~chinese 绿色 \~english Green + unsigned int nBlue; ///< [OUT] \~chinese 蓝色 \~english Blue + + unsigned int nFrameCounter; ///< [OUT] \~chinese 总帧数 \~english Frame Counter + unsigned int nTriggerIndex; ///< [OUT] \~chinese 触发计数 \~english Trigger Counting + + unsigned int nInput; ///< [OUT] \~chinese 输入 \~english Input + unsigned int nOutput; ///< [OUT] \~chinese 输出 \~english Output + + /// \~chinese ROI区域 \~english ROI Region + unsigned short nOffsetX; ///< [OUT] \~chinese 水平偏移量 \~english OffsetX + unsigned short nOffsetY; ///< [OUT] \~chinese 垂直偏移量 \~english OffsetY + unsigned short nFrameWidth; ///< [OUT] \~chinese 水印宽 \~english The Width of Chunk + unsigned short nFrameHeight; ///< [OUT] \~chinese 水印高 \~english The Height of Chunk + + unsigned int nReserved[16]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FRAME_SPEC_INFO; + +/// \~chinese 去紫边结构体 \~english PurpleFringing structure +typedef struct _MV_CC_PURPLE_FRINGING_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度(最小4) \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度(最小4) \~english Image Height + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + + unsigned int nKernelSize; ///< [IN] \~chinese 滤波核尺寸,仅支持3,5,7,9 \~english Filter Kernel Size, only supports 3,5,7,9 + unsigned int nEdgeThreshold; ///< [IN] \~chinese 边缘阈值[0,2040] \~english EdgeThreshold + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_PURPLE_FRINGING_PARAM; + +/// \~chinese ISP配置结构体 \~english ISP configuration structure +typedef struct _MV_CC_ISP_CONFIG_PARAM_T_ +{ + char* pcConfigPath; ///< [IN] \~chinese 配置文件路径(路径修改后会重新创建算法句柄) \~english Config file path + + unsigned int nRes[16]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_ISP_CONFIG_PARAM; + +/// \~chinese 无损解码参数 \~english High Bandwidth decode structure +typedef struct _MV_CC_HB_DECODE_PARAM_T_ +{ + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcLen; ///< [IN] \~chinese 输入数据大小 \~english Input data size + + unsigned int nWidth; ///< [OUT] \~chinese 图像宽 \~english Width + unsigned int nHeight; ///< [OUT] \~chinese 图像高 \~english Height + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据大小 \~english Output data size + enum MvGvspPixelType enDstPixelType; ///< [OUT] \~chinese 输出的像素格式 \~english Output pixel format + + MV_CC_FRAME_SPEC_INFO stFrameSpecInfo; ///< [OUT] \~chinese 水印信息 \~english Frame Spec Info + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_HB_DECODE_PARAM; + +/// \~chinese 录像格式定义 \~english Record Format Type +typedef enum _MV_RECORD_FORMAT_TYPE_ +{ + MV_FormatType_Undefined = 0, ///< \~chinese 未定义的录像格式 \~english Undefined Recode Format Type + MV_FormatType_AVI = 1, ///< \~chinese AVI录像格式 \~english AVI Recode Format Type + +}MV_RECORD_FORMAT_TYPE; + +/// \~chinese 录像参数 \~english Record Parameters +typedef struct _MV_CC_RECORD_PARAM_T_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式 \~english Pixel Type + + unsigned short nWidth; ///< [IN] \~chinese 图像宽(2的倍数) \~english Width + unsigned short nHeight; ///< [IN] \~chinese 图像高(2的倍数) \~english Height + + float fFrameRate; ///< [IN] \~chinese 帧率fps [1/16 -1000] \~english The Rate of Frame [1/16 -1000] + unsigned int nBitRate; ///< [IN] \~chinese 码率kbps [128-16*1024] \~english The Rate of Bitrate [128-16*1024] + + MV_RECORD_FORMAT_TYPE enRecordFmtType; ///< [IN] \~chinese 录像格式 \~english Recode Format Type + + char* strFilePath; ///< [IN] \~chinese 录像文件存放路径(如果路径中存在中文,需转成utf-8) \~english File Path + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_RECORD_PARAM; + +/// \~chinese 传入的图像数据 \~english Input Data +typedef struct _MV_CC_INPUT_FRAME_INFO_T_ +{ + unsigned char* pData; ///< [IN] \~chinese 图像数据指针 \~english Record Data + unsigned int nDataLen; ///< [IN] \~chinese 图像大小 \~english The Length of Record Data + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_INPUT_FRAME_INFO; + +/// \~chinese 采集模式 \~english Acquisition mode +typedef enum _MV_CAM_ACQUISITION_MODE_ +{ + MV_ACQ_MODE_SINGLE = 0, ///< \~chinese 单帧模式 \~english Single Mode + MV_ACQ_MODE_MUTLI = 1, ///< \~chinese 多帧模式 \~english Multi Mode + MV_ACQ_MODE_CONTINUOUS = 2, ///< \~chinese 持续采集模式 \~english Continuous Mode + +}MV_CAM_ACQUISITION_MODE; + +/// \~chinese 增益模式 \~english Gain Mode +typedef enum _MV_CAM_GAIN_MODE_ +{ + MV_GAIN_MODE_OFF = 0, ///< \~chinese 关闭 \~english Single Mode + MV_GAIN_MODE_ONCE = 1, ///< \~chinese 一次 \~english Multi Mode + MV_GAIN_MODE_CONTINUOUS = 2, ///< \~chinese 连续 \~english Continuous Mode + +}MV_CAM_GAIN_MODE; + +/// \~chinese 曝光模式 \~english Exposure Mode +typedef enum _MV_CAM_EXPOSURE_MODE_ +{ + MV_EXPOSURE_MODE_TIMED = 0, ///< \~chinese 时间 \~english Timed + MV_EXPOSURE_MODE_TRIGGER_WIDTH = 1, ///< \~chinese 触发脉冲宽度 \~english TriggerWidth +}MV_CAM_EXPOSURE_MODE; + +/// \~chinese 自动曝光模式 \~english Auto Exposure Mode +typedef enum _MV_CAM_EXPOSURE_AUTO_MODE_ +{ + MV_EXPOSURE_AUTO_MODE_OFF = 0, ///< \~chinese 关闭 \~english Off + MV_EXPOSURE_AUTO_MODE_ONCE = 1, ///< \~chinese 一次 \~english Once + MV_EXPOSURE_AUTO_MODE_CONTINUOUS = 2, ///< \~chinese 连续 \~english Continuous + +}MV_CAM_EXPOSURE_AUTO_MODE; + +/// \~chinese 触发模式 \~english Trigger Mode +typedef enum _MV_CAM_TRIGGER_MODE_ +{ + MV_TRIGGER_MODE_OFF = 0, ///< \~chinese 关闭 \~english Off + MV_TRIGGER_MODE_ON = 1, ///< \~chinese 打开 \~english ON + +}MV_CAM_TRIGGER_MODE; + +/// \~chinese Gamma选择器 \~english Gamma Selector +typedef enum _MV_CAM_GAMMA_SELECTOR_ +{ + MV_GAMMA_SELECTOR_USER = 1, ///< \~chinese 用户 \~english Gamma Selector User + MV_GAMMA_SELECTOR_SRGB = 2, ///< \~chinese sRGB \~english Gamma Selector sRGB + +}MV_CAM_GAMMA_SELECTOR; + +/// \~chinese 白平衡 \~english White Balance +typedef enum _MV_CAM_BALANCEWHITE_AUTO_ +{ + MV_BALANCEWHITE_AUTO_OFF = 0, ///< \~chinese 关闭 \~english Off + MV_BALANCEWHITE_AUTO_ONCE = 2, ///< \~chinese 一次 \~english Once + MV_BALANCEWHITE_AUTO_CONTINUOUS = 1, ///< \~chinese 连续 \~english Continuous + +}MV_CAM_BALANCEWHITE_AUTO; + +/// \~chinese 触发源 \~english Trigger Source +typedef enum _MV_CAM_TRIGGER_SOURCE_ +{ + MV_TRIGGER_SOURCE_LINE0 = 0, ///< \~chinese Line0 \~english Line0 + MV_TRIGGER_SOURCE_LINE1 = 1, ///< \~chinese Line1 \~english Line1 + MV_TRIGGER_SOURCE_LINE2 = 2, ///< \~chinese Line2 \~english Line2 + MV_TRIGGER_SOURCE_LINE3 = 3, ///< \~chinese Line3 \~english Line3 + MV_TRIGGER_SOURCE_COUNTER0 = 4, ///< \~chinese Conuter0 \~english Conuter0 + + MV_TRIGGER_SOURCE_SOFTWARE = 7, ///< \~chinese 软触发 \~english Software + MV_TRIGGER_SOURCE_FrequencyConverter= 8, ///< \~chinese 变频器 \~english Frequency Converter + +}MV_CAM_TRIGGER_SOURCE; + +/// \~chinese GigEVision IP配置 \~english GigEVision IP Configuration +#define MV_IP_CFG_STATIC 0x05000000 ///< \~chinese 静态 \~english Static +#define MV_IP_CFG_DHCP 0x06000000 ///< \~chinese DHCP \~english DHCP +#define MV_IP_CFG_LLA 0x04000000 ///< \~chinese LLA \~english LLA + +/// \~chinese GigEVision网络传输模式 \~english GigEVision Net Transfer Mode +#define MV_NET_TRANS_DRIVER 0x00000001 ///< \~chinese 驱动 \~english Driver +#define MV_NET_TRANS_SOCKET 0x00000002 ///< \~chinese Socket \~english Socket + +/// \~chinese CameraLink波特率 \~english CameraLink Baud Rates (CLUINT32) +#define MV_CAML_BAUDRATE_9600 0x00000001 ///< \~chinese 9600 \~english 9600 +#define MV_CAML_BAUDRATE_19200 0x00000002 ///< \~chinese 19200 \~english 19200 +#define MV_CAML_BAUDRATE_38400 0x00000004 ///< \~chinese 38400 \~english 38400 +#define MV_CAML_BAUDRATE_57600 0x00000008 ///< \~chinese 57600 \~english 57600 +#define MV_CAML_BAUDRATE_115200 0x00000010 ///< \~chinese 115200 \~english 115200 +#define MV_CAML_BAUDRATE_230400 0x00000020 ///< \~chinese 230400 \~english 230400 +#define MV_CAML_BAUDRATE_460800 0x00000040 ///< \~chinese 460800 \~english 460800 +#define MV_CAML_BAUDRATE_921600 0x00000080 ///< \~chinese 921600 \~english 921600 +#define MV_CAML_BAUDRATE_AUTOMAX 0x40000000 ///< \~chinese 最大值 \~english Auto Max + +/// \~chinese 异常消息类型 \~english Exception message type +#define MV_EXCEPTION_DEV_DISCONNECT 0x00008001 ///< \~chinese 设备断开连接 \~english The device is disconnected +#define MV_EXCEPTION_VERSION_CHECK 0x00008002 ///< \~chinese SDK与驱动版本不匹配 \~english SDK does not match the driver version + +/// \~chinese 流异常类型 +typedef enum _MV_CC_STREAM_EXCEPTION_TYPE_ +{ + MV_CC_STREAM_EXCEPTION_ABNORMAL_IMAGE = 0x4001, ///< \~chinese 图像异常(图像长度不正确、数据包内容解析异常和校验失败等),丢弃该帧(可能原因:链路传输异常和设备发包异常等) + MV_CC_STREAM_EXCEPTION_LIST_OVERFLOW = 0x4002, ///< \~chinese 缓存列表已满(没有及时取走图像),采集卡下相机不支持, 外部注册缓存时, 单USB口相机不支持 + MV_CC_STREAM_EXCEPTION_LIST_EMPTY = 0x4003, ///< \~chinese 缓存列表为空(取走图像后未及时将图像缓存归还) + MV_CC_STREAM_EXCEPTION_RECONNECTION = 0x4004, ///< \~chinese 触发一次断流恢复(仅U3V支持) + MV_CC_STREAM_EXCEPTION_DISCONNECTED = 0x4005, ///< \~chinese 断流恢复失败,取流被中止(仅U3V支持) + MV_CC_STREAM_EXCEPTION_DEVICE = 0x4006, ///< \~chinese 设备异常,取流被中止(仅U3V支持) + MV_CC_STREAM_EXCEPTION_PARTIAL_IMAGE = 0x4007, ///< \~chinese 行高不足,丢弃残帧(线阵相机或者采集卡配置了残帧丢弃模式,出图行高不足时被SDK丢弃) + MV_CC_STREAM_EXCEPTION_IMAGE_BUFFER_OVERFLOW = 0x4008, ///< \~chinese 设备发送的图像数据大小超过了图像缓冲区容量(该帧丢弃) +}MV_CC_STREAM_EXCEPTION_TYPE; + +/// \~chinese 流异常回调信息 \~english Stream exception callback infomation +typedef struct _MV_CC_STREAM_EXCEPTION_INFO_T_ +{ + char chSerialNumber[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 设备序列号 \~english Device serial number + unsigned int nStreamIndex; ///< [OUT] \~chinese 流通道序号 \~english Stream index + MV_CC_STREAM_EXCEPTION_TYPE enExceptionType; ///< [OUT] \~chinese 流异常类型 \~english Exception type + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_STREAM_EXCEPTION_INFO; + +///< \~chinese 设备Event事件名称最大长度 \~english Max length of event name +#define MAX_EVENT_NAME_SIZE 128 + +/// \~chinese Event事件回调信息\ \~english Event callback infomation +typedef struct _MV_EVENT_OUT_INFO_ +{ + char EventName[MAX_EVENT_NAME_SIZE]; ///< [OUT] \~chinese Event名称 \~english Event name + + unsigned short nEventID; ///< [OUT] \~chinese Event号 \~english Event ID + unsigned short nStreamChannel; ///< [OUT] \~chinese 流通道序号 \~english Circulation number + + unsigned int nBlockIdHigh; ///< [OUT] \~chinese 帧号高位 (暂无固件支持) \~english BlockId high, not support + unsigned int nBlockIdLow; ///< [OUT] \~chinese 帧号低位 (暂无固件支持) \~english BlockId low, not support + + unsigned int nTimestampHigh; ///< [OUT] \~chinese 时间戳高位 \~english Timestramp high + unsigned int nTimestampLow; ///< [OUT] \~chinese 时间戳低位 \~english Timestramp low + + void* pEventData; ///< [OUT] \~chinese Event数据 (暂无固件支持) \~english Event data, not support + unsigned int nEventDataSize; ///< [OUT] \~chinese Event数据长度 (暂无固件支持) \~english Event data len, not support + + unsigned int nReserved[16]; ///< \~chinese 预留 \~english Reserved + +}MV_EVENT_OUT_INFO; + +/// \~chinese 文件存取 \~english File Access +typedef struct _MV_CC_FILE_ACCESS_T +{ + const char* pUserFileName; ///< [IN] \~chinese 用户文件名 \~english User file name + const char* pDevFileName; ///< [IN] \~chinese 设备文件名 \~english Device file name + + unsigned int nReserved[32]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FILE_ACCESS; + +/// \~chinese 文件存取 \~english File Access +typedef struct _MV_CC_FILE_ACCESS_E +{ + char* pUserFileBuf; ///< [IN] \~chinese 用户数据缓存 \~english User data Buffer + unsigned int pFileBufSize; ///< [IN] \~chinese 用户数据缓存大小 \~english data buffer size + unsigned int pFileBufLen; ///< [OUT][IN] \~chinese 文件数据缓存总长度(读取时输出文件数据总长度,写入时输入文件数据总长度) \~english data buffer len + const char* pDevFileName; ///< [IN] \~chinese 设备文件名 \~english Device file name + + unsigned int nReserved[32]; ///< \~chinese 预留 \~english Reserved +}MV_CC_FILE_ACCESS_EX; + +/// \~chinese 文件存取进度 \~english File Access Progress +typedef struct _MV_CC_FILE_ACCESS_PROGRESS_T +{ + int64_t nCompleted; ///< [OUT] \~chinese 已完成的长度 \~english Completed Length + int64_t nTotal; ///< [OUT] \~chinese 总长度 \~english Total Length + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_FILE_ACCESS_PROGRESS; + +/// \~chinese Gige的传输类型 \~english The transmission type of Gige +typedef enum _MV_GIGE_TRANSMISSION_TYPE_ +{ + MV_GIGE_TRANSTYPE_UNICAST = 0x0, ///< \~chinese 表示单播(默认) \~english Unicast mode + MV_GIGE_TRANSTYPE_MULTICAST = 0x1, ///< \~chinese 表示组播(组播IP范围[224.*.*.*-239.*.*.*]) \~english Multicast mode + MV_GIGE_TRANSTYPE_LIMITEDBROADCAST = 0x2, ///< \~chinese 表示局域网内广播,暂不支持 \~english Limited broadcast mode,not support + MV_GIGE_TRANSTYPE_SUBNETBROADCAST = 0x3, ///< \~chinese 表示子网内广播,暂不支持 \~english Subnet broadcast mode,not support + MV_GIGE_TRANSTYPE_CAMERADEFINED = 0x4, ///< \~chinese 表示从设备获取,暂不支持 \~english Transtype from camera,not support + MV_GIGE_TRANSTYPE_UNICAST_DEFINED_PORT = 0x5, ///< \~chinese 表示用户自定义应用端接收图像数据Port号 \~english User Defined Receive Data Port + MV_GIGE_TRANSTYPE_UNICAST_WITHOUT_RECV = 0x00010000, ///< \~chinese 表示设置了单播,但本实例不接收图像数据 \~english Unicast without receive data + MV_GIGE_TRANSTYPE_MULTICAST_WITHOUT_RECV= 0x00010001, ///< \~chinese 表示组播模式,但本实例不接收图像数据 \~english Multicast without receive data + +}MV_GIGE_TRANSMISSION_TYPE; + +/// \~chinese 网络传输模式 \~english Transmission type +typedef struct _MV_TRANSMISSION_TYPE_T +{ + MV_GIGE_TRANSMISSION_TYPE enTransmissionType; ///< [IN] \~chinese 传输模式 \~english Transmission type + unsigned int nDestIp; ///< [IN] \~chinese 目标IP,组播模式下有意义 \~english Destination IP + unsigned short nDestPort; ///< [IN] \~chinese 目标Port,组播模式下有意义 \~english Destination port + + unsigned int nReserved[32]; ///< \~chinese 预留 \~english Reserved + +}MV_TRANSMISSION_TYPE; + +/// \~chinese 动作命令信息 \~english Action Command +typedef struct _MV_ACTION_CMD_INFO_T +{ + unsigned int nDeviceKey; ///< [IN] \~chinese 设备密钥 \~english Device Key; + unsigned int nGroupKey; ///< [IN] \~chinese 组键 \~english Group Key + unsigned int nGroupMask; ///< [IN] \~chinese 组掩码 \~english Group Mask + + unsigned int bActionTimeEnable; ///< [IN] \~chinese 只有设置成1时Action Time才有效,非1时无效 \~english Action Time Enable + int64_t nActionTime; ///< [IN] \~chinese 预定的时间,和主频有关 \~english Action Time + + const char* pBroadcastAddress; ///< [IN] \~chinese 广播包地址 \~english Broadcast Address + unsigned int nTimeOut; ///< [IN] \~chinese 等待ACK的超时时间,如果为0表示不需要ACK \~english TimeOut + + unsigned int bSpecialNetEnable; ///< [IN] \~chinese 只有设置成1时指定的网卡IP才有效,非1时无效 \~english Special IP Enable + unsigned int nSpecialNetIP; ///< [IN] \~chinese 指定的网卡IP \~english Special Net IP address + + unsigned int nReserved[14]; ///< \~chinese 预留 \~english Reserved + +}MV_ACTION_CMD_INFO; + +/// \~chinese 动作命令返回信息 \~english Action Command Result +typedef struct _MV_ACTION_CMD_RESULT_T +{ + unsigned char strDeviceAddress[12 + 3 + 1]; ///< [OUT] \~chinese 设备IP \~english IP address of the device + + int nStatus; ///< [OUT] \~chinese 状态码 \~english status code returned by the device + //1.0x0000:success. + //2.0x8001:Command is not supported by the device. + //3.0x8013:The device is not synchronized to a master clock to be used as time reference. + //4.0x8015:A device queue or packet data has overflowed. + //5.0x8016:The requested scheduled action command was requested at a time that is already past. + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MV_ACTION_CMD_RESULT; + +/// \~chinese 动作命令返回信息列表 \~english Action Command Result List +typedef struct _MV_ACTION_CMD_RESULT_LIST_T +{ + unsigned int nNumResults; ///< [OUT] \~chinese 返回值个数 \~english Number of returned values + MV_ACTION_CMD_RESULT* pResults; ///< [OUT] \~chinese 动作命令结果 \~english Reslut of action command + +}MV_ACTION_CMD_RESULT_LIST; + +/// \~chinese 每个节点对应的接口类型 \~english Interface type corresponds to each node +enum MV_XML_InterfaceType +{ + IFT_IValue, ///< \~chinese Value \~english IValue interface + IFT_IBase, ///< \~chinese Base \~english IBase interface + IFT_IInteger, ///< \~chinese Integer \~english IInteger interface + IFT_IBoolean, ///< \~chinese Boolean \~english IBoolean interface + IFT_ICommand, ///< \~chinese Command \~english ICommand interface + IFT_IFloat, ///< \~chinese Float \~english IFloat interface + IFT_IString, ///< \~chinese String \~english IString interface + IFT_IRegister, ///< \~chinese Register \~english IRegister interface + IFT_ICategory, ///< \~chinese Category \~english ICategory interface + IFT_IEnumeration, ///< \~chinese Enumeration \~english IEnumeration interface + IFT_IEnumEntry, ///< \~chinese EnumEntry \~english IEnumEntry interface + IFT_IPort, ///< \~chinese Port \~english IPort interface +}; + +/// \~chinese 节点的访问模式 \~english Node Access Mode +enum MV_XML_AccessMode +{ + AM_NI, ///< \~chinese 不可实现 \~english Not implemented + AM_NA, ///< \~chinese 不可用 \~english Not available + AM_WO, ///< \~chinese 只写 \~english Write Only + AM_RO, ///< \~chinese 只读 \~english Read Only + AM_RW, ///< \~chinese 读写 \~english Read and Write + AM_Undefined, ///< \~chinese 未定义 \~english Object is not yet initialized + AM_CycleDetect, ///< \~chinese 内部用于AccessMode循环检测 \~english used internally for AccessMode cycle detection +}; + +/// \~chinese 最大节点个数 \~english Max Number of Nodes +#define MV_MAX_NODE_NUM 1024 +/// \~chinese 节点名称的最大长度 \~english Max Length of a Node Name +#define MV_MAX_NODE_NAME_LEN 64 +/// \~chinese 节点名称 \~english Node Name +typedef struct _MVCC_NODE_NAME_T +{ + char strName[MV_MAX_NODE_NAME_LEN]; ///< \~chinese 节点名称 \~english Nodes Name + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_NODE_NAME; +/// \~chinese 节点列表 \~english Node List +typedef struct _MVCC_NODE_NAME_LIST_T +{ + unsigned int nNodeNum; ///< \~chinese 节点个数 \~english Number of Node + MVCC_NODE_NAME stNodeName[MV_MAX_NODE_NUM]; ///< \~chinese 节点名称 \~english Node Name + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_NODE_NAME_LIST; + +/// \~chinese 最大错误个数 \~english Max Number of Error +#define MV_MAX_NODE_ERROR_NUM 64 + +/// \~chinese 导入参数报错时的原因,错误码 \~english Reasons for importing parameter errors code +typedef enum _MVCC_NODE_ERR_TYPE_ +{ + MVCC_NODE_ERR_NODE_INVALID = 1, ///< \~chinese 节点不存在 \~english Usually, the operating node does not exist in the device + MVCC_NODE_ERR_ACCESS = 2, ///< \~chinese 访问条件错误,通常是节点不可读写 \~english Access condition error, usually due to nodes not being readable or writable + MVCC_NODE_ERR_OUT_RANGE = 3, ///< \~chinese 写入越界,超出该节点支持的范围 \~english Write out of bounds, beyond the supported range of this node + MVCC_NODE_ERR_VERIFY_FAILD = 4, ///< \~chinese 校验失败,通常是写入的值与文件中的值不匹配 \~english Verification failed, usually due to a mismatch between the written value and the value in the file + + MVCC_NODE_ERR_OTHER = 100, ///< \~chinese 其它错误,可查阅日志 \~english Other errors, can view logs + +}MVCC_NODE_ERR_TYPE; +/// \~chinese 错误信息 \~english Error Name +typedef struct _MVCC_NODE_ERROR_T +{ + char strName[MV_MAX_NODE_NAME_LEN]; ///< \~chinese 节点名称 \~english Nodes Name + MVCC_NODE_ERR_TYPE enErrType; ///< \~chinese 错误类型 \~english Error Type + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_NODE_ERROR; +/// \~chinese 错误信息列表 \~english Error List +typedef struct _MVCC_NODE_ERROR_LIST_T +{ + unsigned int nErrorNum; ///< \~chinese 错误个数 \~english Number of Error + MVCC_NODE_ERROR stNodeError[MV_MAX_NODE_ERROR_NUM]; ///< \~chinese 错误信息 \~english Error Name + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_NODE_ERROR_LIST; + +/// \~chinese 最大XML符号数 \~english Max XML Symbolic Number +#define MV_MAX_XML_SYMBOLIC_NUM 64 +/// \~chinese 枚举类型值 \~english Enumeration Value +typedef struct _MVCC_ENUMVALUE_T +{ + unsigned int nCurValue; ///< [OUT] \~chinese 当前值 \~english Current Value + unsigned int nSupportedNum; ///< [OUT] \~chinese 数据的有效数据个数 \~english Number of valid data + unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; ///< [OUT] \~chinese 支持的枚举值 \~english Support Value + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_ENUMVALUE; + +/// \~chinese 最大枚举条目对应的符号数量 \~english Max Enum Entry Symbolic Number +#define MV_MAX_ENUM_SYMBOLIC_NUM 256 + +/// \~chinese 枚举类型值 \~english Enumeration Value +typedef struct _MVCC_ENUMVALUE_EX_T +{ + unsigned int nCurValue; ///< [OUT] \~chinese 当前值 \~english Current Value + unsigned int nSupportedNum; ///< [OUT] \~chinese 数据的有效数据个数 \~english Number of valid data + unsigned int nSupportValue[MV_MAX_ENUM_SYMBOLIC_NUM]; ///< [OUT] \~chinese 支持的枚举值 \~english Support Value + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_ENUMVALUE_EX; + + +/// \~chinese 最大枚举条目对应的符号长度 \~english Max Enum Entry Symbolic Number +#define MV_MAX_SYMBOLIC_LEN 64 +/// \~chinese 枚举类型条目 \~english Enumeration Entry +typedef struct _MVCC_ENUMENTRY_T +{ + unsigned int nValue; ///< [IN] \~chinese 指定值 \~english Value + char chSymbolic[MV_MAX_SYMBOLIC_LEN]; ///< [OUT] \~chinese 指定值对应的符号 \~english Symbolic + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_ENUMENTRY; + +/// \~chinese Int类型值 \~english Int Value +typedef struct _MVCC_INTVALUE_T +{ + unsigned int nCurValue; ///< [OUT] \~chinese 当前值 \~english Current Value + unsigned int nMax; ///< [OUT] \~chinese 最大值 \~english Max + unsigned int nMin; ///< [OUT] \~chinese 最小值 \~english Min + unsigned int nInc; ///< [OUT] \~chinese \~english Inc + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_INTVALUE; + +/// \~chinese Int类型值Ex \~english Int Value Ex +typedef struct _MVCC_INTVALUE_EX_T +{ + int64_t nCurValue; ///< [OUT] \~chinese 当前值 \~english Current Value + int64_t nMax; ///< [OUT] \~chinese 最大值 \~english Max + int64_t nMin; ///< [OUT] \~chinese 最小值 \~english Min + int64_t nInc; ///< [OUT] \~chinese Inc \~english Inc + + unsigned int nReserved[16]; ///< \~chinese 预留 \~english Reserved + +}MVCC_INTVALUE_EX; + +/// \~chinese Float类型值 \~english Float Value +typedef struct _MVCC_FLOATVALUE_T +{ + float fCurValue; ///< [OUT] \~chinese 当前值 \~english Current Value + float fMax; ///< [OUT] \~chinese 最大值 \~english Max + float fMin; ///< [OUT] \~chinese 最小值 \~english Min + + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_FLOATVALUE; + +/// \~chinese String类型值 \~english String Value +typedef struct _MVCC_STRINGVALUE_T +{ + char chCurValue[256]; ///< [OUT] \~chinese 当前值 \~english Current Value + + int64_t nMaxLength; ///< [OUT] \~chinese 最大长度 \~english MaxLength + unsigned int nReserved[2]; ///< \~chinese 预留 \~english Reserved + +}MVCC_STRINGVALUE; + +/// \~chinese 辅助线颜色 \~english Color of Auxiliary Line +typedef struct _MVCC_COLORF +{ + float fR; ///< [IN] \~chinese 红色,根据像素颜色的相对深度,范围为[0.0 , 1.0],代表着[0, 255]的颜色深度 \~english Red,Range[0.0, 1.0] + float fG; ///< [IN] \~chinese 绿色,根据像素颜色的相对深度,范围为[0.0 , 1.0],代表着[0, 255]的颜色深度 \~english Green,Range[0.0, 1.0] + float fB; ///< [IN] \~chinese 蓝色,根据像素颜色的相对深度,范围为[0.0 , 1.0],代表着[0, 255]的颜色深度 \~english Blue,Range[0.0, 1.0] + float fAlpha; ///< [IN] \~chinese 透明度,根据像素颜色的相对透明度,范围为[0.0 , 1.0],GDI渲染不支持 \~english Alpha,Range[0.0, 1.0], it is not supported under GDI rendering. + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_COLORF; + +/// \~chinese 自定义点 \~english Point defined +typedef struct _MVCC_POINTF +{ + float fX; ///< [IN] \~chinese 该点距离图像左边缘距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Left,Range[0.0, 1.0] + float fY; ///< [IN] \~chinese 该点距离图像上边缘距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Top,Range[0.0, 1.0] + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_POINTF; + +/// \~chinese 矩形框区域信息 \~english Rect Area Info +typedef struct _MVCC_RECT_INFO +{ + float fTop; ///< [IN] \~chinese 矩形上边缘距离图像上边缘的距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Top,Range[0, 1.0] + float fBottom; ///< [IN] \~chinese 矩形下边缘距离图像上边缘的距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Top,Range[0, 1.0] + float fLeft; ///< [IN] \~chinese 矩形左边缘距离图像左边缘的距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Left,Range[0, 1.0] + float fRight; ///< [IN] \~chinese 矩形右边缘距离图像左边缘的距离,根据图像的相对位置,范围为[0.0 , 1.0] \~english Distance From Left,Range[0, 1.0] + + MVCC_COLORF stColor; ///< [IN] \~chinese 辅助线颜色 \~english Color of Auxiliary Line + unsigned int nLineWidth; ///< [IN] \~chinese 辅助线宽度,宽度只能是1或2 \~english Width of Auxiliary Line, width is 1 or 2 + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_RECT_INFO; + +/// \~chinese 圆形框区域信息 \~english Circle Area Info +typedef struct _MVCC_CIRCLE_INFO +{ + MVCC_POINTF stCenterPoint; ///< [IN] \~chinese 圆心信息 \~english Circle Point Info + + float fR1; ///< [IN] \~chinese 宽向半径,根据图像的相对位置[0, 1.0],半径与圆心的位置有关,需保证画出的圆在显示框范围之内,否则报错 \~english Windth Radius, Range[0, 1.0] + float fR2; ///< [IN] \~chinese 高向半径,根据图像的相对位置[0, 1.0],半径与圆心的位置有关,需保证画出的圆在显示框范围之内,否则报错 \~english Height Radius, Range[0, 1.0] + + MVCC_COLORF stColor; ///< [IN] \~chinese 辅助线颜色信息 \~english Color of Auxiliary Line + unsigned int nLineWidth; ///< [IN] \~chinese 辅助线宽度,宽度只能是1或2 \~english Width of Auxiliary Line, width is 1 or 2 + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_CIRCLE_INFO; + +/// \~chinese 线条辅助线信息 \~english Linear Auxiliary Line Info +typedef struct _MVCC_LINES_INFO +{ + MVCC_POINTF stStartPoint; ///< [IN] \~chinese 线条辅助线的起始点坐标 \~english The Start Point of Auxiliary Line + MVCC_POINTF stEndPoint; ///< [IN] \~chinese 线条辅助线的终点坐标 \~english The End Point of Auxiliary Line + MVCC_COLORF stColor; ///< [IN] \~chinese 辅助线颜色信息 \~english Color of Auxiliary Line + unsigned int nLineWidth; ///< [IN] \~chinese 辅助线宽度,宽度只能是1或2 \~english Width of Auxiliary Line, width is 1 or 2 + unsigned int nReserved[4]; ///< \~chinese 预留 \~english Reserved + +}MVCC_LINES_INFO; + +///< \~chinese 分时曝光时最多将源图像拆分的个数 \~english The maximum number of source image to be split in time-division exposure +#define MV_MAX_SPLIT_NUM 8 + +/// \~chinese 图像重构的方式 \~english Image reconstruction method +typedef enum _MV_IMAGE_RECONSTRUCTION_METHOD_ +{ + MV_SPLIT_BY_LINE = 1, ///< \~chinese 源图像单行拆分成多张图像 \~english Source image split into multiple images by line + +}MV_IMAGE_RECONSTRUCTION_METHOD; + +/// \~chinese 图像重构后的图像列表 \~english List of images after image reconstruction +typedef struct _MV_OUTPUT_IMAGE_INFO_ +{ + unsigned int nWidth; ///< [OUT] \~chinese 源图像宽 \~english Source Image Width + unsigned int nHeight; ///< [OUT] \~chinese 源图像高 \~english Source Image Height + enum MvGvspPixelType enPixelType; ///< [OUT] \~chinese 像素格式 \~english Pixel format + + unsigned char* pBuf; ///< [IN][OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved +}MV_OUTPUT_IMAGE_INFO; + +/// \~chinese 重构图像参数信息 \~english Restructure image parameters +typedef struct _MV_RECONSTRUCT_IMAGE_PARAM_ +{ + unsigned int nWidth; ///< [IN] \~chinese 源图像宽 \~english Source Image Width + unsigned int nHeight; ///< [IN] \~chinese 源图像高 \~english Source Image Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned int nExposureNum; ///< [IN] \~chinese 曝光个数(1-8] \~english Exposure number + MV_IMAGE_RECONSTRUCTION_METHOD enReconstructMethod; ///< [IN] \~chinese 图像重构方式 \~english Image restructuring method + + MV_OUTPUT_IMAGE_INFO stDstBufList[MV_MAX_SPLIT_NUM]; ///< [OUT] \~chinese 输出数据缓存信息 \~english Output data info + + unsigned int nRes[4]; +}MV_RECONSTRUCT_IMAGE_PARAM; + +/// \~chinese 串口信息 \~english Serial Port Info +typedef struct _MV_CAML_SERIAL_PORT_ +{ + unsigned char chSerialPort[INFO_MAX_BUFFER_SIZE]; ///< [OUT] \~chinese 串口号 \~english Serial Port + + unsigned int nRes[4]; ///<\~chinese 预留 \~english Reserved +}MV_CAML_SERIAL_PORT; + +///< \~chinese 最大支持的串口数量 \~english The maximum number of serial port supported +#define MV_MAX_SERIAL_PORT_NUM 64 + +typedef struct _MV_CAML_SERIAL_PORT_LIST_ +{ + unsigned int nSerialPortNum; ///< [OUT] \~chinese 串口数量 \~english Serial Port Num + MV_CAML_SERIAL_PORT stSerialPort[MV_MAX_SERIAL_PORT_NUM]; ///< [IN][OUT] \~chinese 串口信息 \~english Serial Port Information + + unsigned int nRes[4]; ///<\~chinese 预留 \~english Reserved +}MV_CAML_SERIAL_PORT_LIST; + +#endif /* _MV_CAMERA_PARAMS_H_ */ diff --git a/image_capture/third_party/mvs/Includes/MvCameraControl.h b/image_capture/third_party/mvs/Includes/MvCameraControl.h new file mode 100644 index 0000000..6c30ceb --- /dev/null +++ b/image_capture/third_party/mvs/Includes/MvCameraControl.h @@ -0,0 +1,3195 @@ + +#ifndef _MV_CAMERA_CTRL_H_ +#define _MV_CAMERA_CTRL_H_ + +#include "MvErrorDefine.h" +#include "CameraParams.h" +#include "MvObsoleteInterfaces.h" + + + +#ifndef MV_CAMCTRL_API + +#if (defined (_WIN32) || defined(WIN64)) +#if defined(MV_CAMCTRL_EXPORTS) +#define MV_CAMCTRL_API __declspec(dllexport) +#else +#define MV_CAMCTRL_API __declspec(dllimport) +#endif +#else +#ifndef __stdcall +#define __stdcall +#endif + +#ifndef MV_CAMCTRL_API +#define MV_CAMCTRL_API +#endif +#endif + +#endif + +#ifdef MV_CAMCTRL_API + +#if (defined (_WIN32) || defined(WIN64)) + #if defined(MV_CAMCTRL_EXPORTS) + #define MV_CAMCTRL_API __declspec(dllexport) + #else + #define MV_CAMCTRL_API __declspec(dllimport) + #endif + #else + #ifndef __stdcall + #define __stdcall + #endif + + #if defined(MV_CAMCTRL_EXPORTS) + #define MV_CAMCTRL_API __attribute__((visibility("default"))) + #else + #define MV_CAMCTRL_API + #endif + #endif + +#endif + +#ifndef IN + #define IN +#endif + +#ifndef OUT + #define OUT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + + +/****************************** ch: 摘要 | en: Instructions**********************************************/ + +/** + * @~chinese + * 该头文件主要包含13部分: + * 0.回调函数定义 + * 1.SDK初始化 + * 2.相机的配置(枚举/打开/关闭)和取流接口 + * 3.采集卡的配置(枚举/打开/关闭) + * 4.相机/采集卡属性万能配置接口&读写寄存器接口 + * 5.相机/采集卡 升级 + * 6.相机和采集卡 注册异常回调和事件接口 + * 7.仅GigE设备支持的接口 + * 8.仅CameraLink 设备支持的接口 + * 9.仅U3V设备支持的接口 + * 10.GenTL相关接口 + * 11.图像保存、格式转换等相关接口 + * 12.适用于支持串口通信的设备接口 + + * @~english + * This header file mainly includes 13 sections: + * 0.Callback function definition + * 1.SDK initialization + * 2.Camera configuration (enumeration/open/close) and streaming API + * 3.Frame grabber configuration (enumeration/open/close) + * 4.Universal property configuration API & register read/write API for cameras/frame grabbers + * 5.Firmware upgrade for cameras/frame grabbers + * 6.Exception callback registration and event API for cameras and frame grabbers + * 7.API exclusively for GigE devices + * 8.API exclusively for CameraLink devices + * 9.API exclusively for USB3 Vision (U3V) devices + * 10.GenTL-related API + * 11.Image saving and format conversion API + * 12.API for devices supporting serial communication +**/ + + +/*******************Part0 ch: 回调函数定义 | en: Callback function definition*******************/ + +/// \addtogroup 回调函数定义 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 取图回调函数 + * @param pData [OUT] 图像数据指针 + * @param pFrameInfo [OUT] 图像信息结构体 + * @param pUser [OUT] 用户自定义变量 + + * @~english + * @brief Image Callback function + * @param pData [OUT] It refers to the pointer to image data. + * @param pFrameInfo [OUT] It refers to the image information structure. + * @param pUser [OUT] It refers to the user-defined variable. +*****************************************************************************/ +typedef void(__stdcall *MvImageCallbackEx)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 取图回调函数 + * @param pstFrame [OUT] 图像数据和图像信息 + * @param pUser [OUT] 用户自定义变量 + * @param bAutoFree [OUT] true-回调函数退出后自动释放图像缓存,false-回调结束不释放图像缓存,需调用 MV_CC_FreeImageBuffer() + + + * @~english + * @brief Image callback function + * @param pstFrame [OUT] It refers to the image data and information. + * @param pUser [OUT] It refers to the user-defined variable. + * @param bAutoFree [OUT] Whether to release image buffer automatically after callback is completed. + true: the image buffer is released automatically after callback is completed, + false: the image buffer cannot be released automatically, and it is required to call MV_CC_FreeImageBuffer(). + +*****************************************************************************/ +typedef void(__stdcall *MvImageCallbackEx2)(MV_FRAME_OUT* pstFrame, void *pUser, bool bAutoFree); + +/********************************************************************//** + * @~chinese + * @brief 事件回调函数 + * @param pEventInfo [OUT] 事件信息 + * @param pUser [OUT] 用户自定义变量 + + * @~english + * @brief Event callback function + * @param pEventInfo [OUT] It refers to the event information. + * @param pUser [OUT] It refers to the user-defined variable. +*****************************************************************************/ +typedef void(__stdcall *MvEventCallback)(MV_EVENT_OUT_INFO * pEventInfo, void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 流异常回调函数 + * @param pstStreamExceptionInfo [OUT] 流异常信息 + * @param pUser [OUT] 用户自定义变量 + + * @~english + * @brief Stream exception callback function. + * @param pstStreamExceptionInfo [OUT] It refers to the stream exception information. + * @param pUser [OUT] It refers to the user-defined variable. +*****************************************************************************/ +typedef void(__stdcall *MvStreamExceptionCallback)(MV_CC_STREAM_EXCEPTION_INFO* pstStreamExceptionInfo, void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 异常消息回调 + * @param nMsgType [OUT] 异常类型 + * @param pUser [OUT] 用户自定义变量 + + * @~english + * @brief Exception callback function. + * @param nMsgType [OUT] It refers to the exception type. + * @param pUser [OUT] It refers to the user-defined variable. +*****************************************************************************/ +typedef void(__stdcall *MvExceptionCallback)(unsigned int nMsgType, void *pUser); + +/// @} + +/**************************Part1 ch: SDK 初始化 | en: SDK Initialization ******************************************/ + +/// \addtogroup SDK初始化 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 初始化SDK + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Initializes SDK resources. + * @return Returns MV_OK for success, and returns corresponding error code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_Initialize(); + +/********************************************************************//** + * @~chinese + * @brief 反初始化SDK,释放资源 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks main函数退出前调用 + + * @~english + * @brief Releases SDK resources. + * @return Returns MV_OK for success, and returns corresponding error code for failure. + * @remarks Call this API before exiting the main function. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_Finalize(); + +/// @} + +/// \addtogroup SDK版本信息 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 获取SDK版本号 + * @return 返回4字节版本号 + | 主 | 次 | 修正 | 测试 | + | :---: | :---: | :---: | :---: | + | 8bits | 8bits | 8bits | 8bits | + * @remarks \li 比如返回值为0x01000001,即SDK版本号为V1.0.0.1。 + \li MV_CC_GetSDKVersion() 引入头文件、Lib文件后,SDK环境搭建完毕,可直接调用。 + + * @~english + * @brief Gets SDK Version + * @return Always return 4 Bytes of version number + |Main |Sub |Rev | Test| + 8bits 8bits 8bits 8bits + * @remarks For example, if the return value is 0x01000001, the SDK version is V1.0.0.1. + ************************************************************************/ +MV_CAMCTRL_API unsigned int __stdcall MV_CC_GetSDKVersion(); + +/// @} + + + +/**************************Part2 ch: 相机的控制和取流 | en: Camera control and streaming******************************************/ + +/// \addtogroup 相机初始化 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 枚举设备 + * @param nTLayerType [IN] 枚举传输层, 参数定义参见CameraParams.h定义 + | 传输层协议类型定义 | 值 | 说明 | + | :--- | :---: | :--- | + | \ref MV_UNKNOW_DEVICE | 0x00000000 | 未知设备类型 | + | \ref MV_GIGE_DEVICE | 0x00000001 | GigE设备 | + | \ref MV_1394_DEVICE | 0x00000002 | 1394-a/b设备 | + | \ref MV_USB_DEVICE | 0x00000004 | USB设备 | + | \ref MV_CAMERALINK_DEVICE | 0x00000008 | 串口设备,包含Camera Link设备和串口视觉控制器 | + | \ref MV_VIR_GIGE_DEVICE | 0x00000010 | 虚拟GigE设备 | + | \ref MV_VIR_USB_DEVICE | 0x00000020 | 虚拟USB设备 | + | \ref MV_GENTL_GIGE_DEVICE | 0x00000040 | 自研网卡下GigE设备 | + | \ref MV_GENTL_CAMERALINK_DEVICE | 0x00000080 | CameraLink设备 | + | \ref MV_GENTL_CXP_DEVICE | 0x00000100 | CoaXPress设备 | + | \ref MV_GENTL_XOF_DEVICE | 0x00000200 | XoF设备 | + | \ref MV_GENTL_VIR_DEVICE | 0x00000800 | 虚拟设备 | + * @param pstDevList [IN][OUT] 设备列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请,建议尽量避免多线程枚举操作。\n + * \li 参数枚举传输层,适配传入MV_GIGE_DEVICE、MV_1394_DEVICE、MV_USB_DEVICE、MV_CAMERALINK_DEVICE;MV_GIGE_DEVICE该参数传出所有GiGE相关的设备信息(包含虚拟GiGE和GenTL下的GiGE设备),MV_USB_DEVICE该参数传出所有USB设备,包含虚拟USB设备。\n + + * @~english + * @brief Enumerates devices, including cameras connected to frame grabbers. + * @param nTLayerType [IN] It refers to the transport layer protocol type. For more details, refer to CameraParams.h. for example, #define MV_GIGE_DEVICE 0x00000001 + * @param pstDevList [IN][OUT] It refers to the device list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. + It is recommended to avoid multithreaded enumeration operations. + * @remarks For the parameter nTLayerType, the following parameters are supported: MV_GIGE_DEVICE, MV_1394_DEVICE, MV_USB_DEVICE, and MV_CAMERALINK_DEVICE. + MV_GIGE_DEVICE sends out information of all GigE devices (including virtual GigE devices and GigE devices of GenTL), and MV_USB_DEVICE sends out information of USB devices (including virtual USB devices). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevices(IN unsigned int nTLayerType, IN OUT MV_CC_DEVICE_INFO_LIST* pstDevList); + +/********************************************************************//** + * @~chinese + * @brief 根据厂商名字枚举设备 + * @param nTLayerType [IN] 枚举传输层,参数定义参见CameraParams.h + | 传输层协议类型定义 | 值 | 说明 | + | :--- | :---: | :--- | + | \ref MV_UNKNOW_DEVICE | 0x00000000 | 未知设备类型 | + | \ref MV_GIGE_DEVICE | 0x00000001 | GigE设备 | + | \ref MV_1394_DEVICE | 0x00000002 | 1394-a/b设备 | + | \ref MV_USB_DEVICE | 0x00000004 | USB设备 | + | \ref MV_CAMERALINK_DEVICE | 0x00000008 | 串口设备,包含Camera Link设备和串口视觉控制器 | + | \ref MV_VIR_GIGE_DEVICE | 0x00000010 | 虚拟GigE设备 | + | \ref MV_VIR_USB_DEVICE | 0x00000020 | 虚拟USB设备 | + | \ref MV_GENTL_GIGE_DEVICE | 0x00000040 | 自研网卡下GigE设备 | + | \ref MV_GENTL_CAMERALINK_DEVICE | 0x00000080 | CameraLink设备 | + | \ref MV_GENTL_CXP_DEVICE | 0x00000100 | CoaXPress设备 | + | \ref MV_GENTL_XOF_DEVICE | 0x00000200 | XoF设备 | + | \ref MV_GENTL_VIR_DEVICE | 0x00000800 | 虚拟设备 | + * @param pstDevList [IN][OUT] 设备列表 + * @param strManufacturerName [IN] 厂商名字 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 参数枚举传输层,适配传入MV_GIGE_DEVICE、MV_1394_DEVICE、MV_USB_DEVICE、MV_CAMERALINK_DEVICE;MV_GIGE_DEVICE该参数传出所有GiGE相关的设备信息(包含虚拟GiGE和GenTL下的GiGE设备),MV_USB_DEVICE该参数传出所有USB设备,包含虚拟USB设备。\n + * \li 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请,建议尽量避免多线程枚举操作。\n + * \li MV_GENTL_GIGE_DEVICE、MV_GENTL_CAMERALINK_DEVICE、MV_GENTL_CXP_DEVICE、MV_GENTL_XOF_DEVICE传输层可以返回对采集卡下的相机信息。 + + * @~english + * @brief Enumerates devices according to manufacturers. + * @param nTLayerType [IN] It refers to the transport layer protocol type. For more details, refer to CameraParams.h. for example, #define MV_GIGE_DEVICE 0x00000001 + * @param pstDevList [IN][OUT] It refers to the device list. + * @param strManufacturerName [IN] It refers to the manufacturers. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks For the parameter nTLayerType, the following parameters are supported: MV_GIGE_DEVICE, MV_1394_DEVICE, MV_USB_DEVICE, and MV_CAMERALINK_DEVICE. + MV_GIGE_DEVICE sends out information of all GigE devices (including virtual GigE devices and GigE devices of GenTL), and MV_USB_DEVICE sends out information of USB devices (including virtual USB devices). + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. + It is recommended to avoid multithreaded enumeration operations. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevicesEx(IN unsigned int nTLayerType, IN OUT MV_CC_DEVICE_INFO_LIST* pstDevList, IN const char* strManufacturerName); + + +/********************************************************************//** + * @~chinese + * @brief 枚举设备扩展(可指定排序方式枚举、根据厂商名字过滤) + * @param nTLayerType [IN] 枚举传输层(区分每一种传输层类型,不耦合),参数定义参见CameraParams.h + | 传输层协议类型定义 | 值 | 说明 | + | :--- | :---: | :--- | + | \ref MV_UNKNOW_DEVICE | 0x00000000 | 未知设备类型 | + | \ref MV_GIGE_DEVICE | 0x00000001 | GigE设备 | + | \ref MV_1394_DEVICE | 0x00000002 | 1394-a/b设备 | + | \ref MV_USB_DEVICE | 0x00000004 | USB设备 | + | \ref MV_CAMERALINK_DEVICE | 0x00000008 | 串口设备,包含Camera Link设备和串口视觉控制器 | + | \ref MV_VIR_GIGE_DEVICE | 0x00000010 | 虚拟GigE设备 | + | \ref MV_VIR_USB_DEVICE | 0x00000020 | 虚拟USB设备 | + | \ref MV_GENTL_GIGE_DEVICE | 0x00000040 | 自研网卡下GigE设备 | + | \ref MV_GENTL_CAMERALINK_DEVICE | 0x00000080 | CameraLink设备 | + | \ref MV_GENTL_CXP_DEVICE | 0x00000100 | CoaXPress设备 | + | \ref MV_GENTL_XOF_DEVICE | 0x00000200 | XoF设备 | + | \ref MV_GENTL_VIR_DEVICE | 0x00000800 | 虚拟设备 | + * @param pstDevList [IN][OUT] 设备列表 + * @param strManufacturerName [IN] 厂商名字(可传NULL,即不过滤) + * @param enSortMethod [IN] 排序方式 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请,建议尽量避免多线程枚举操作。 + \li strManufacturerName可传入NULL,若传入NULL则返回排好序的所有设备列表,若不为NULL则只返回排好序的指定厂商设备列表。\n + \li MV_GENTL_GIGE_DEVICE、MV_GENTL_CAMERALINK_DEVICE、MV_GENTL_CXP_DEVICE、MV_GENTL_XOF_DEVICE传输层可以返回对采集卡下的相机信息。\n + * @note MV_CC_EnumDevicesEx2() 与 MV_CC_EnumDevicesEx() 接口的差异如下: + \li MV_CC_EnumDevicesEx() 传入MV_GIGE_DEVICE,除了枚举网段内的网口相机以外,还会枚举虚拟网口相机和自研采集卡下的网口相机;若传入MV_USB_DEVICE,则会枚举USB口相机和虚拟USB口相机。\n + \li MV_CC_EnumDevicesEx2() 传入MV_GIGE_DEVICE,仅枚举网段内的网口相机;若传入MV_USB_DEVICE,则仅枚举USB口相机。\n + \li MV_CC_EnumDevicesEx2() 多出排序的功能,由参数MV_SORT_METHOD enSortMethod决定排序方式。 + + + * @~english + * @brief Enumerates devices, supporting enumerating devices by specified sorting method and filtering by manufacturer name. + * @param nTLayerType [IN] It refers to the transport layer protocol type. For more details, refer to CameraParams.h. for example, #define MV_GIGE_DEVICE 0x00000001 + * @param pstDevList [IN][OUT] It refers to the device list. + * @param strManufacturerName [IN] It refers to the name of the manufacturer (NULL means not filtering). + * @param enSortMethod [IN] It refers to the sorting type. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. + It is recommended to avoid multithreaded enumeration operations. + strManufacturerName can be set to NULL, which indicates enumerating all devices according to the specified sorting type; + if not set to NULL, the sorted device list of specified manufacturers will be returned. + + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevicesEx2(IN unsigned int nTLayerType, IN OUT MV_CC_DEVICE_INFO_LIST* pstDevList, IN const char* strManufacturerName, IN MV_SORT_METHOD enSortMethod); + +/********************************************************************//** + * @~chinese + * @brief 通过采集卡句柄枚举设备 + * @param handle [IN] 采集卡句柄 + * @param pstDevList [OUT] 设备列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请,\n + \li 建议尽量避免多线程枚举操作。 + + * @~english + * @brief Enumerates devices by frame grabber handle. + * @param handle [IN] It refers to the frame grabber handle. + * @param pstDevList [OUT] It refers to the device list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. It is recommended to avoid multithreaded enumeration operations. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevicesByInterface(IN void* handle, OUT MV_CC_DEVICE_INFO_LIST* pstDevList); + +/********************************************************************//** + * @~chinese + * @brief 设备是否可连接 + * @param pstDevInfo [IN] 设备信息结构体 + * @param nAccessMode [IN] 访问权限,参数定义参见CameraParams.h定义 + | 宏定义 | 宏定义值 | 含义 | + | :--- | :---: | :--- | + | \ref MV_ACCESS_Exclusive | 1 | 独占权限,其他APP只允许读CCP寄存器 | + | \ref MV_ACCESS_ExclusiveWithSwitch | 2 | 可以从5模式下抢占权限,然后以独占权限打开 | + | \ref MV_ACCESS_Control | 3 | 控制权限,其他APP允许读所有寄存器 | + | \ref MV_ACCESS_ControlWithSwitch | 4 | 可以从5模式下抢占权限,然后以控制权限打开 | + | \ref MV_ACCESS_ControlSwitchEnable | 5 | 以可被抢占的控制权限打开 | + | \ref MV_ACCESS_ControlSwitchEnableWithKey | 6 | 可以从5模式下抢占权限,然后以可被抢占的控制权限打开 | + | \ref MV_ACCESS_Monitor | 7 | 读模式打开设备,适用于控制权限下 | + * @return 若设备可达,返回true;若设备不可达,返回false。 + * @remarks \li GigE相机: 读取设备CCP寄存器的值,判断当前状态是否具有某种访问权限。\n + 如果设备(MV_GENTL_GIGE_DEVICE/MV_GENTL_GIGE_DEVICE)不支持MV_ACCESS_ExclusiveWithSwitch、MV_ACCESS_ControlWithSwitch、MV_ACCESS_ControlSwitchEnable、MV_ACCESS_ControlSwitchEnableWithKey这四种模式,接口返回false。(目前设备不支持这4种抢占模式,国际上主流的厂商的设备也都暂不支持这4种模式。) + \li MV_GIGE_DEVICE/MV_GENTL_GIGE_DEVICE 类型设备:按照nAccessMode,返回当前是否可以被连接; + @note + \li 该接口支持虚拟相机,U3V相机,CXP, XoF, CameraLink采集卡相机, nAccessMode无效,如果相机没有被连接返回true, 如果设备被第三方连接,则返回false + \li 该接口不支持CameraLink设备(返回false) + * @~english + * @brief Checks if the specified device can be accessed. + * @param pstDevInfo [IN] It refers to device information. + * @param nAccessMode [IN] It refers to access mode. Refer to the 'CameraParams.h' for parameter definitions, for example, #define MV_ACCESS_Exclusive 1 (This parameter is only valid for devices of type MV_GIGE-DEVICE/MV_GENTL_GIGE-DEVICE) + * @return Returns true for accessible status, and false for inaccessible status. + * @remarks You can read the device CCP register value to check the current access permission. + Return false if the device(MV_GENTL_GIGE_DEVICE/MV_GENTL_GIGE_DEVICE) does not support the modes MV_ACCESS_ExclusiveWithSwitch, MV_ACCESS_ControlWithSwitch, MV_ACCESS_ControlSwitchEnable and MV_ACCESS_ControlSwitchEnableWithKey. Currently, the device does not support the 4 modes, neither do the devices from other mainstream manufacturers. + This API supports virtual cameras, U3V cameras, CoaXPress (CXP), XoF, and CameraLink frame grabber cameras. The nAccessMode parameter has no actual effect. It returns true if the camera is not connected, and false if the device is occupied by a third party. + This API does not support CameraLink devices (returns false). + **************************************************************************/ +MV_CAMCTRL_API bool __stdcall MV_CC_IsDeviceAccessible(IN MV_CC_DEVICE_INFO* pstDevInfo, IN unsigned int nAccessMode); + +/********************************************************************//** + * @~chinese + * @brief 创建设备句柄 + * @param handle [IN][OUT] 设备句柄 + * @param pstDevInfo [IN] 设备信息结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 根据输入的设备信息,创建库内部必须的资源和初始化内部模块。\n + \li 通过该接口创建句柄,调用SDK接口,会默认生成SDK日志文件,如果不需要生成日志文件,可以将日志配置文件中的日志等级改成off。 + + * @~english + * @brief Creates a device handle. + * @param handle [IN][OUT] It refers to the device handle. + * @param pstDevInfo [IN] It refers to device information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Create required resources within library and initialize internal module according to input device information. + SDK log file will be created by default when you call the API to create the device handle. If log file generation is not required, you can set the log level to off in the log configuration file. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateHandle(IN OUT void ** handle, IN const MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 打开设备 + * @param handle [IN] 设备句柄 + * @param nAccessMode [IN] 访问权限, 参数定义参见CameraParams.h定义 + | 宏定义 | 宏定义值 | 含义 | + | :--- | :---: | :--- | + | \ref MV_ACCESS_Exclusive | 1 | 独占权限,其他APP只允许读CCP寄存器 | + | \ref MV_ACCESS_ExclusiveWithSwitch | 2 | 可以从5模式下抢占权限,然后以独占权限打开 | + | \ref MV_ACCESS_Control | 3 | 控制权限,其他APP允许读所有寄存器 | + | \ref MV_ACCESS_ControlWithSwitch | 4 | 可以从5模式下抢占权限,然后以控制权限打开 | + | \ref MV_ACCESS_ControlSwitchEnable | 5 | 以可被抢占的控制权限打开 | + | \ref MV_ACCESS_ControlSwitchEnableWithKey | 6 | 可以从5模式下抢占权限,然后以可被抢占的控制权限打开 | + | \ref MV_ACCESS_Monitor | 7 | 读模式打开设备,适用于控制权限下 | + * @param nSwitchoverKey [IN] 切换访问权限时的密钥 (仅对 MV_GIGE_DEVICE 类型的设备有效) + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 根据设置的设备参数,找到对应的设备,连接设备, 调用接口时可不传入nAccessMode和nSwitchoverKey,此时默认设备访问模式为独占权限。\n + \li MV_GIGE_DEVICE 类型设备,目前相机固件暂不支持MV_ACCESS_ExclusiveWithSwitch、MV_ACCESS_ControlWithSwitch、MV_ACCESS_ControlSwitchEnable、MV_ACCESS_ControlSwitchEnableWithKey这四种抢占模式, 可通过SDK接口设置。\n + \li MV_GENTL_GIGE_DEVICE 设备只支持 nAccessMode 是 MV_ACCESS_Exclusive 、MV_ACCESS_Control 、MV_ACCESS_Monitor权限。\n + \li 对于U3V设备,CXP,Cameralink(MV_CAMERALINK_DEVICE、MV_GENTL_CAMERALINK_DEVICE),XoF设备,虚拟GEV,虚拟U3V设备:nAccessMode、nSwitchoverKey这两个参数无效; 默认以控制权限打开设备。\n + \li 该接口支持网口设备不枚举直接打开,不支持U口和GenTL设备不枚举打开设备。\n + + * @~english + * @brief Turns on the device. + * @param handle [IN] It refers to the device handle. + * @param nAccessMode [IN] It refers to access mode. Refer to the 'CameraParams.h'. + * @param nSwitchoverKey [IN] It refers to the secret key for switching access permission. (only valid for devices of type MV_GIGE_DEVICE.) + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks You can find the device and connect it according to the configured device parameters. + The parameters nAccessMode and nSwitchoverKey are optional, and the device access mode is exclusive permission by default. + For GigE devices, the camera firmware does not support the following preemption modes: MV_ACCESS_ExclusiveWithSwitch, MV_ACCESS_ControlWithSwitch, MV_ACCESS_ControlSwitchEnable, and MV_ACCESS_ControlSwitchEnableWithKey. + For GenTL devices, the camera firmware only supports the following modes: MV_ACCESS_Exclusive, MV_ACCESS_Control, and MV_ACCESS_Monitor. + For U3V, CXP, camera link, XoF, virtual GEV, and virtual U3V devices, the parameters nAccessMode and nSwitchoverKey are invalid, and the device is opened with control permission via MV_ACCESS_Control by default. + This API allows turning on GigE devices without enumeration, but it does not suport turning on USB or GenTL devices without enumeration. + ************************************************************************/ +#ifndef __cplusplus +MV_CAMCTRL_API int __stdcall MV_CC_OpenDevice(IN void* handle, IN unsigned int nAccessMode, IN unsigned short nSwitchoverKey); +#else +MV_CAMCTRL_API int __stdcall MV_CC_OpenDevice(IN void* handle, IN unsigned int nAccessMode = MV_ACCESS_Exclusive, IN unsigned short nSwitchoverKey = 0); +#endif + +/********************************************************************//** + * @~chinese + * @brief 判断设备是否处于连接状态 + * @param handle [IN] 设备句柄 + * @return 若设备处于连接状态,返回true;没连接或失去连接,返回false + + * @~english + * @brief Checks if the camera is connected. + * @param handle [IN] It refers to the device handle. + * @return Returns true if the device is connected, and returns false if the device is not connected or disconnected. + ***********************************************************************/ +MV_CAMCTRL_API bool __stdcall MV_CC_IsDeviceConnected(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取设备信息,取流之前调用 + * @param handle [IN] 设备句柄 + * @param pstDevInfo [IN][OUT] 返回给调用者有关设备信息结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 支持用户在打开设备后获取设备信息,不支持GenTL设备。 \n + \li 若该设备是GigE设备,则调用该接口存在阻塞风险,因此不建议在取流过程中调用该接口。 + + * @~english + * @brief Gets device information, used before image grabbing. + * @param handle [IN] It refers to the device handle. + * @param pstDevInfo [IN][OUT] It refers to the pointer to device information structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after MV_CC_OpenDevice() to get the device information. + For a GigE device, there is a blocking risk when calling the API, so it is not recommended to call the API during image grabbing. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceInfo(IN void * handle, IN OUT MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 获取各种类型的信息 + * @param handle [IN] 设备句柄 + * @param pstInfo [IN][OUT] 返回给调用者有关设备各种类型的信息结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 接口里面输入需要获取的信息类型(指定 MV_ALL_MATCH_INFO 结构体中的nType类型),获取对应的信息(在 MV_ALL_MATCH_INFO 结构体中pInfo里返回)。\n + \li 该接口的调用前置条件取决于所获取的信息类型,获取GigE设备的 \ref MV_MATCH_TYPE_NET_DETECT 信息需在开启抓图之后调用,获取U3V设备的 \ref MV_MATCH_TYPE_USB_DETECT 信息需在打开设备之后调用。\n + \li 信息类型 MV_MATCH_TYPE_NET_DETECT 对应结构体\ref MV_MATCH_INFO_NET_DETECT , 只支持MV_GIGE_DEVICE相机/MV_GENTL_GIGE_DEVICE相机。 \n + \li 信息类型 MV_MATCH_TYPE_USB_DETECT 对应结构体\ref MV_MATCH_INFO_USB_DETECT , 只支持MV_USB_DEVICE 类型相机。 \n + @note 该接口不支持MV_CAMERALINK_DEVICE设备。 + + * @~english + * @brief Gets information of all types. + * @param handle [IN] It refers to the device handle. + * @param pstInfo [IN][OUT] It refers to the pointer to information structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Input required information type (specify nType in structure MV_ALL_MATCH_INFO ), and get corresponding information (returned via pInfo in structure MV_ALL_MATCH_INFO ). + The calling precondition of this API is determined by the required information type. To obtain MV_MATCH_TYPE_NET_DETECT information of GigE devices, this API should be called after image grabbing starts. To obtain MV_MATCH_TYPE_USB_DETECT information of U3V devices, this API should be called after the device is turned on. + The information type MV_MATCH_TYPE_NET_DETECT corresponds to the structure MV_MATCH_INFO_NET_DETECT, which only supports cameras of MV_GIGE_DEVICE and MV_GENTL_GIGE_DEVICE types + The information type MV_MATCH_TYPE_USB_DETECT corresponds to the structure MV_MATCH_INFO_USB_DETECT, which only supports cameras of MV_USB_DEVICE type + This API is not supported by MV_CAMERALINK_DEVICE device. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAllMatchInfo(IN void* handle, IN OUT MV_ALL_MATCH_INFO* pstInfo); + +/********************************************************************//** + * @~chinese + * @brief 关闭设备 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过 MV_CC_OpenDevice() 连接设备后,可以通过该接口断开设备连接,释放资源。 + + * @~english + * @brief Turns off the device. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is used to disconnect the device and release resources. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CloseDevice(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 销毁设备句柄 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 若向本接口传入采集卡句柄,其效果和 MV_CC_DestroyInterface() 相同; + + * @~english + * @brief Destroys the device handle. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks If a frame grabber handle is passed to MV_CC_DestroyHandle, its effect is identical to MV_CC_DestroyInterface. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DestroyHandle(IN void * handle); + +/// @} + +/// \addtogroup 图像采集 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调 + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过该接口可以设置图像数据回调函数,在 MV_CC_CreateHandle() 之后即可调用。 \n + * @remarks 图像数据采集有两种方式,两种方式不能复用: \n + * \li 调用 MV_CC_RegisterImageCallBackEx() 设置图像数据回调函数,然后调用 MV_CC_StartGrabbing() 开始采集,采集的图像数据在设置的回调函数中返回。 \n + * \li 调用 MV_CC_StartGrabbing() 开始采集,然后在应用层循环调用 MV_CC_GetImageBuffer() 和 MV_CC_FreeImageBuffer() 获取指定像素格式的帧数据,获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。 \n + @note 该接口不支持M MV_CAMERALINK_DEVICE() 的设备。 \n + + * @~english + * @brief Registers an image data callback (extended API 1). + * @param handle [IN] It refers to the device handle. + * @param cbOutput [IN] It refers to the pointer to the callback function. + * @param pUser [IN] It refers to user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Before calling this API to register an image data callback, you should call the API MV_CC_CreateHandle(). + There are two image acquisition methods, and the two methods cannot be used together: + Method 1: Call MV_CC_RegisterImageCallBackEx() to register an image data callback, and then call MV_CC_StartGrabbing() to start image acquisition. The collected image data will be returned in the configured callback function. + Method 2: Call MV_CC_StartGrabbing() to start image acquisition, and the call MV_CC_GetOneFrameTimeout() repeatedly in application layer to get frame data in specified pixel format. When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. + This API is not supported by devices with MV_CAMERALINK_DEVICE + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackEx(IN void* handle, IN MvImageCallbackEx cbOutput, IN void* pUser); + +/********************************************************************//** +* @~chinese +* @brief 注册图像数据回调 +* @param handle [IN] 设备句柄 +* @param cbOutput [IN] 回调函数指针 +* @param bAutoFree [IN] 图像缓存自动回收标记 + \li true:回调结束后,图像缓存会被SDK回收 + \li false:回调结束后,需要调用 MV_CC_FreeImageBuffer() 接口才能回收图像缓存 +* @param pUser [IN] 用户自定义变量 +* @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 +* @remarks \li 通过该接口可以设置图像数据回调函数,在 MV_CC_CreateHandle() 之后即可调用。 + \li 该接口不支持 \ref MV_CAMERALINK_DEVICE 类型的设备。 + \li 该方式与 MV_CC_RegisterImageCallBackEx() 等注册图像回调函数的接口互斥; + \li 该接口与 MV_CC_GetImageBuffer() 、 MV_CC_GetOneFrameTimeout() 等主动取流接口互斥; + \li 该接口中 \ref MvImageCallbackEx2 的“MV_FRAME_OUT* pstFrame”为SDK内部变量。如需在回调函数外使用,您需浅拷贝pstFrame结构体(不需要拷贝图像数据)。 + + * @~english + * @brief Registers an image data callback (extended API 2). + * @param handle [IN] It refers to the device handle. + * @param cbOutput [IN] It refers to the pointer to the callback function. + * @param bAutoFree [IN] It refers to the mark for automatic releasing of image buffer. (true:The image buffer will be released and reused by SDK after callback. false:After callback, it is required to call MV_CC_FreeImageBuffer() to release and reuse the image buffer.) + * @param pUser [IN] It refers to the user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Before calling this API to register an image data callback, you should call the API MV_CC_CreateHandle(). + When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. + This API is not supported by devices with MV_CAMERALINK_DEVICE. + The pstFrame parameter in the callback function is an internal temporary variable of the SDK, and its content must be copied before it can be used outside the callback. +***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackEx2(IN void* handle, IN MvImageCallbackEx2 cbOutput, IN void* pUser, IN bool bAutoFree); + +/********************************************************************//** + * @~chinese + * @brief 开始取流 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口不支持 \ref MV_CAMERALINK_DEVICE 类型的设备。 + + * @~english + * @brief Starts image grabbing. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is not supported by MV_CAMERALINK_DEVICE. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StartGrabbing(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 停止取流 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口不支持 \ref MV_CAMERALINK_DEVICE 类型的设备。 + + * @~english + * @brief Stops image grabbing. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is not supported by MV_CAMERALINK_DEVICE. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StopGrabbing(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧图片(与 MV_CC_Display() 不能同时使用) + * @param handle [IN] 设备句柄 + * @param pstFrame [IN][OUT] 图像数据和图像信息 + * @param nMsec [IN] 等待超时时间,输入INFINITE时表示无限等待,直到收到一帧数据或者停止取流 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 调用该接口获取图像数据帧之前需要先调用 MV_CC_StartGrabbing() 启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。该接口支持设置超时时间,SDK内部等待直到有数据时返回,可以增加取流平稳性,适合用于对平稳性要求较高的场合。 \n + \li 该接口与 MV_CC_FreeImageBuffer() 配套使用,当处理完取到的数据后,需要用 MV_CC_FreeImageBuffer() 接口将pstFrame内的数据指针权限进行释放。 \n + \li 该接口与 MV_CC_GetOneFrameTimeout() 相比,有着更高的效率。且其取流缓存的分配由sdk内部自动分配或者外部注册,而 MV_CC_GetOneFrameTimeout() 接口是需要客户自行分配。\n + \li 该接口在调用 MV_CC_Display() 后无法取流。 \n + \li 该接口对于U3V、GIGE设备均可支持。 \n + \li 该接口不支持CameraLink设备。\n + + * @~english + * @brief Gets one frame of image. (This API cannot be used with MV_CC_Display() at the same time.) + * @param handle [IN] It refers to the device handle. + * @param pstFrame [IN][OUT] It refers to image data and information. + * @param nMsec [IN] It refers to the timeout duration, unit: millisecond. You can input INFINITE to set unlimited timeout duration, and image grabbing will not stop until a frame of data is received or the image grabbing is manually stopped. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Before calling this API to get image data frame, call MV_CC_StartGrabbing to start image acquisition. + This API can get frame data actively, the upper layer program should control the frequency of calling this API according to the frame rate. This API supports setting timeout duration for receiving image data frames, which helps ensure stable image acquisition. It is applicable to scenes with high-stability requirement for getting images. + This API and MV_CC_FreeImageBuffer should be called in pairs, after processing the acquired data, you should call MV_CC_FreeImageBuffer to release the data pointer permission of pstFrame. + This API's image buffer is allocated by the SDK internally or registered externally, it has higher image acquisition efficiency than MV_CC_GetOneFrameTimeout() , whose image buffer needs to be manually allocated by the user. + This API cannot be called to grab images after calling MV_CC_Display(). + This API is not supported by MV_CAMERALINK_DEVICE. + This API is supported by both USB3 vision camera and GigE camera. + *****************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageBuffer(IN void* handle, IN OUT MV_FRAME_OUT* pstFrame, IN unsigned int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 释放图像缓存(此接口用于释放不再使用的图像缓存,与 MV_CC_GetImageBuffer() 配套使用) + * @param handle [IN] 设备句柄 + * @param pstFrame [IN] 图像数据和图像数据 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 该接口与 MV_CC_GetImageBuffer() 配套使用,使用 MV_CC_GetImageBuffer() 接口取到的图像数据pstFrame,需要用 MV_CC_FreeImageBuffer() 接口进行权限释放。 \n + \li 该接口对于取流效率高于 MV_CC_GetOneFrameTimeout() 接口,且 MV_CC_GetImageBuffer() 在不进行Free的情况下,最大支持输出的节点数与 MV_CC_SetImageNodeNum() 接口所设置的节点数相等,默认节点数是1。\n + \li 该接口对于U3V、GIGE设备均可支持。 \n + \li 该接口不支持CameraLink设备。\n + + * @~english + * @brief Releases image buffer. (This API is used to release the image buffer that is no longer used, and it is used with MV_CC_GetImageBuffer() in pairs.) + * @param handle [IN] It refers to the device handle. + * @param pstFrame [IN] It refers to image data and information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API and MV_CC_GetImageBuffer() should be called in pairs. After calling MV_CC_GetImageBuffer() to get image data pstFrame, call MV_CC_FreeImageBuffer() to release the permission. + This API has higher efficiency of image acquisition than the API MV_CC_GetOneFrameTimeout(). The max. number of nodes that can be outputted by MV_CC_GetImageBuffer()(without freeing the buffer) is the same as the "nNum" set by the API MV_CC_SetImageNodeNum(). + This API is not supported by MV_CAMERALINK_DEVICE. + The API is supported by both USB3 vision camera and GigE camera. + **********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FreeImageBuffer(IN void* handle, IN MV_FRAME_OUT* pstFrame); + +/********************************************************************//** + * @~chinese + * @brief 采用超时机制获取一帧图片,SDK内部等待直到有数据时返回 + * @param handle [IN] 设备句柄 + * @param pData [IN][OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pstFrameInfo [IN][OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 调用该接口获取图像数据帧之前需要先调用 MV_CC_StartGrabbing() 启动图像采集。该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。该接口支持设置超时时间,SDK内部等待直到有数据时返回,可以增加取流平稳性,适合用于对平稳性要求较高的场合。\n + * @note \li 该接口对于U3V、GigE设备均可支持。\n + \li 该接口不支持CameraLink设备。 \n + + * @~english + * @brief Gets one frame of image with timeout, and the SDK waits internally to return until data is available. + * @param handle [IN] It refers to the device handle. + * @param pData [IN][OUT] It refers to the pointer to receive image data. + * @param nDataSize [IN] It refers to received buffer size. + * @param pstFrameInfo [IN][OUT] It refers to the structure of image information. + * @param nMsec [IN] It refers to timeout duration. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Before calling this API to get image data frame, call MV_CC_StartGrabbing() to start image acquisition. + This API can get frame data actively, the upper layer program should control the frequency of calling this API according to the frame rate. + This API supports setting timeout duration for receiving image data frames, which helps ensure stable image acquisition. It is applicable to scenes with high-stability requirement for getting images. + This API is supported by both USB3 vision devices and GigE devices. + This API is not supported by MV_CAMERALINK_DEVICE. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrameTimeout(IN void* handle, IN OUT unsigned char* pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pstFrameInfo, IN unsigned int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 清除取流数据缓存 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 该接口允许用户在不停止取流的时候,就能清除缓存中不需要的图像。 \n + \li 该接口在连续模式切触发模式后,可以清除历史数据。 \n + \li 该接口当前仅支持清除SDK内部的图像缓存,暂不支持清除采集卡内的缓存。 + + * @~english + * @brief Clears the image buffer. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to clear the images you do not need in the buffer when the streaming is in progress. + After switching to trigger mode from continuous mode, you can call this API to clear historical data buffer. + Call this API to clear internal image buffer of SDK. The clearing of the buffer inside the frame grabbers is not supported. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ClearImageBuffer(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取当前图像缓存区的有效图像个数 + * @param handle [IN] 设备句柄 + * @param pnValidImageNum [IN][OUT] 当前图像缓存区中有效图像个数的指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口只统计SDK内部的有效图像个数,不包括采集卡缓存内的有效图像个数 + + * @~english + * @brief Gets the number of valid images in the current image buffer. + * @param handle [IN] It refers to the device handle. + * @param pnValidImageNum [IN][OUT] It refers to the pointer to the number of valid images in the current image buffer. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API can only be called to get the number of valid images in the SDK, not the number of those in frame grabbers. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetValidImageNum(IN void* handle, IN OUT unsigned int *pnValidImageNum); + +/********************************************************************//** + * @~chinese + * @brief 设置SDK内部图像缓存节点个数,大于等于1,在抓图前调用 + * @param handle [IN] 设备句柄 + * @param nNum [IN] 缓存节点个数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 调用该接口可以设置SDK内部图像缓存节点个数,在调用 MV_CC_StartGrabbing() 开始抓图前调用。 \n + \li 由于不同相机的取流方式不同,在不调用 MV_CC_SetImageNodeNum() 情况下,不同相机的默认缓存节点个数不同。比如双U内部默认分配3个节点。 \n + \li SDK实际分配的节点个数 = SDK内部预分配的个数 + 通过调用该接口分配的节点;若系统内存资源不够,SDK内部将重新计算预分配的缓存节点个数,在该情况下,SDK实际分配的节点个数以重新计算的节点个数为准。 \n + \li 该接口不支持CameraLink设备。CameraLink设备可以通过GenTL方式连接并设置缓存节点个数。 \n + + * @~english + * @brief Sets the number of nodes for SDK internal image buffer. The value is no less than 1, and this API should be called before image grabbing. + * @param handle [IN] It refers to the device handle. + * @param nNum [IN] It refers to the number of buffer nodes, which cannot be less than 1. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API before MV_CC_StartGrabbing() to set the number of nodes for SDK internal image buffer. + Image grabbing methods vary from different camera types. If this API is not called, the default number of buffer nodes will be different. + The number of SDK allocated nodes = the pre-allocated nodes within SDK + the nodes allocated via this API. If the memory allocated by the system is insufficient, the pre-allocated nodes for SDK will be calculated again, and the actual number of allocated nodes will be set to the number of latest pre-allocated nodes. + If the system memory resources are insufficient, the SDK will recalculate and use it as the actual number of nodes. + This API does not support devices of type MV_CAMERALINK_DEVICE + This API is only valid for the SDK's internal allocation cache mode, and the external allocation cache mode (i.e., calling MV_CC_RegisterBuffer) is invalid; + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetImageNodeNum(IN void* handle, IN unsigned int nNum); + +/********************************************************************//** + * @~chinese + * @brief 设置取流策略 + * @param handle [IN] 设备句柄 + * @param enGrabStrategy [IN] 策略枚举值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口定义了四种取流策略,用户可以根据实际需求进行选择。具体描述如下: + \li OneByOne:从旧到新一帧一帧的从输出缓存列表中获取图像,打开设备后默认为该策略 + \li LatestImagesOnly:仅从输出缓存列表中获取最新的一帧图像,同时清空输出缓存列表 + \li LatestImages:从输出缓存列表中获取最新的OutputQueueSize帧图像,其中OutputQueueSize范围为1-ImageNodeNum,可用 MV_CC_SetOutputQueueSize() 接口设置,ImageNodeNum默认为1,可用 MV_CC_SetImageNodeNum() 接口设置 OutputQueueSize设置成1等同于LatestImagesOnly策略,OutputQueueSize设置成ImageNodeNum等同于OneByOne策略 + \li UpcomingImage:在调用取流接口时忽略输出缓存列表中所有图像,并等待设备即将生成的一帧图像。该策略仅支持GigE设备和USB设备 + @note + \li WINDOWS + 该接口仅支持 \ref MV_GIGE_DEVICE 、 \ref MV_USB_DEVICE 设备。 + \endif + \li LINUX + 该接口仅支持 \ref MV_USB_DEVICE 设备。 + \endif + + * @~english + * @brief Sets image grabbing strategy. + * @param handle [IN] It refers to the device handle. + * @param enGrabStrategy [IN] It refers to strategy enumeration. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks There are four defined image grabbing strategies, from which you can choose the suitable one according to the actual requirement. Details are following. + OneByOne:Gets image frames one by one in chronological order. It is the default strategy. + LatestImagesOnly:Only gets the latest one frame from the output buffer list, and clears the rest images in the list. + LatestImages:Gets the latest image of OutputQueueSize frame from the output buffer list. The range of OutputQueueSize is between 1 and ImageNodeNum. + If the OutputQueueSize value is set to 1, the strategy is same to LatestImagesOnly, and if the OutputQueueSize value is set to ImageNodeNum, the strategy is same to OneByOne. + You can set the OutputQueueSize via API MV_CC_SetOutputQueueSize(), and set the ImageNodeNum via API MV_CC_SetImageNodeNum(). + UpcomingImage:Ignores all images in the output buffer list during calling this API, and waits for the upcoming image generated by the device. This strategy is supported by GigE devices and USB devices only. + This API only support MV_GIGE_DEVICE, MV_USB_DEVICE device on Windows, and only support MV_USB_DEVICE device on Linux. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGrabStrategy(IN void* handle, IN MV_GRAB_STRATEGY enGrabStrategy); + +/********************************************************************//** + * @~chinese + * @brief 设置输出缓存个数(只有在 \ref MV_GrabStrategy_LatestImages 策略下才有效,范围:1-ImageNodeNum) + * @param handle [IN] 设备句柄 + * @param nOutputQueueSize [IN] 输出缓存个数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口需与LatestImages取流策略配套调用,用于设置LatestImages策略下最多允许缓存图像的个数。可以在取流过程中动态调节输出缓存个数。 + @note + \li WINDOWS + 该接口仅支持 \ref MV_GIGE_DEVICE 、 \ref MV_USB_DEVICE 设备。 + \endif + \li LINUX + 该接口仅支持 \ref MV_USB_DEVICE 设备。 + \endif + + * @~english + * @brief Sets the number of output buffers, range: [1, ImageNodeNum]. + * @param handle [IN] It refers to the device handle. + * @param nOutputQueueSize [IN] It refers to the number of output buffers. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is valid only when MV_CC_SetGrabStrategy() is set to LatestImages. It is called to set the max. number of buffer images under the LatestImages strategy. You can adjust the number of output buffers during image grabbing. + The user may change the output queue size while grabbing images. + This API only support MV_GIGE_DEVICE, MV_USB_DEVICE device on Windows, and only support MV_USB_DEVICE device on Linux. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetOutputQueueSize(IN void* handle, IN unsigned int nOutputQueueSize); + +/********************************************************************//** + * @~chinese + * @brief 分配对齐内存 + * @param nBufSize [IN] 分配内存的长度 + * @param nAlignment [IN] 内存对齐字节数 (必须是大于0,并且是2的整数次幂) + * @return 成功,返回申请内存地址;失败,返回 NULL + * @remarks + + * @~english + * @brief Allocates aligned memory + * @param nBufSize [IN] It refers to allocation length of memory + * @param nAlignment [IN] It refers to memory alignment size (must be greater than 0 and a power of 2) + * @return Returns memory address for success, and returns NULL for failure. + * @remarks +************************************************************************/ +MV_CAMCTRL_API void * __stdcall MV_CC_AllocAlignedBuffer(IN uint64_t nBufSize, IN unsigned int nAlignment); + +/********************************************************************//** + * @~chinese + * @brief 对齐内存释放 + * @param pBuffer [IN] 内存地址 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 对齐内存的释放,搭配 MV_CC_AllocAlignedBuffer() 使用 + + * @~english + * @brief Releases aligned memory + * @param pBuffer [IN] It refers to memory address + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API and MV_CC_AllocAlignedBuffer should be called in pairs. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FreeAlignedBuffer(IN void* pBuffer); + +/********************************************************************//** + * @~chinese + * @brief 获取设备payload大小(payload包含图像数据和Chunk数据)和内存对其方式,用于SDK外部注册缓存时,应用层分配足够的缓存及正确的内存对齐方式 + * @param handle [IN] 设备句柄 + * @param pnPayloadSize [IN OUT] 负载长度 + * @param pnAlignment [IN OUT] 负载内存对齐的字节数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks + + * @~english + * @brief Gets the device payload size (including image data and Chunk data) and memory alignment method. + It is used by the application layer to allocate sufficient buffer and correct memory alignment when registering external memory for SDK. + * @param handle [IN] It refers to the device handle. + * @param pnPayloadSize [IN OUT] It refers to the payload size. + * @param pnAlignment [IN OUT] It refers to alignment bytes. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetPayloadSize(IN void* handle, IN OUT uint64_t* pnPayloadSize, IN OUT unsigned int* pnAlignment); + +/********************************************************************//** + * @~chinese + * @brief 应用程序分配缓存,并注册到SDK内部,供SDK使用 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN] 内存地址 + * @param nBufSize [IN] 内存长度 + * @param pUser [IN] 用户指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 可以使用 MV_CC_GetPayloadSize() 获取缓存大小,并使用 MV_CC_AllocAlignedBuffer() 分配空间,之后使用 MV_CC_RegisterBuffer() 注册 + \li 注册的缓存需要由应用层通知SDK取消注册( MV_CC_UnRegisterBuffer() )后,进行释放( MV_CC_FreeAlignedBuffer() )。 + \li 使用该接口后,仅支持 MV_CC_GetImageBuffer() / MV_CC_FreeImageBuffer() / MV_CC_RegisterImageCallBackEx() 获取图像,不支持其他接口获取图像。\n + @note \li 使用该接口后,如果之前配置了SDK内部节点( MV_CC_SetImageNodeNum() )无效。\n + \li 双USB接口相机要求至少注册3块空间到SDK内部。其他相机暂无限制,但是为了避免缓存不足,请配置足够的缓存到底层。\n + + * @~english + * @brief Application allocates cache and registers it internally to the SDK for SDK usage + * @param handle [IN] It refers to the device handle. + * @param pBuffer [IN] It refers to external memory address + * @param nBufSize [IN] It refers to external length of memory + * @param pUser [IN] It refers to user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Registering memory can be done by using MV_CC_GetPayloadSize to obtain the memory size, and allocating the memory size using MV_CC_AllocAlignedBuffer + The registered memory needs to be notified by the application layer to the SDK to cancel the registration (MV_CC_UnregisterBuffer) and then released (MV_CC_FreeAlignedBuffer) + After using this API, only MV_CC_GetImageBuffer、MV_CC_FreeImageBuffer/MV_CC_RegisterImageCallBackEx is supported for image retrieval, and other API are not supported for image retrieval + After using this API, if the SDK internal node (MV_CC_SetImageNodeNum) was previously configured, it is invalid + The dual USB API camera requires at least 3 spaces to be registered inside the SDK;There is no limit for other cameras for the time being, but to avoid insufficient cache, please configure sufficient cache into the SDK +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterBuffer(IN void* handle, IN void *pBuffer, IN uint64_t nBufSize, IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 外部内存取消SDK内部注册 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN] 外部内存地址 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks + + * @~english + * @brief Revokes external memory + * @param handle [IN] It refers to the device handle + * @param pBuffer [IN] It refers to external memory address + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_UnRegisterBuffer(IN void* handle, IN void* pBuffer); + +/// @} + +/// \addtogroup 图像渲染 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 显示一帧图像(扩展接口) + * @param handle [IN] 设备句柄 + * @param hWnd [IN] 窗口句柄 + * @param pstDisplayInfo [IN] 图像信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口支持渲染的图像宽、高可达到Int类型。 + * \if WINDOWS + * 可选择GDI或D3D渲染模式,默认选择GDI模式。关于渲染模式的说明如下: + \li GDI模式:对电脑的显卡性能没有要求,适用于所有电脑。 + \li D3D模式:适用于已安装显卡驱动且显卡内存大于1GB的电脑。在该模式下,客户端预览的图像效果会优于GDI模式下的图像效果。该模式支持的最大图像分辨率为16384 * 163840。 + \li MV_CC_DisplayOneFrameEx() 支持PixelType_Gvsp_RGB8_Packed,PixelType_Gvsp_BGR8_Packed,PixelType_Gvsp_Mono8三种像素格式的渲染宽高大小至int类型,其余像素格式渲染仅支持宽高至short。 + \endif + + * @~english + * @brief Displays one frame of image (extended API 1). + * @param handle [IN] It refers to the device handle. + * @param hWnd [IN] It refers to window handle. + * @param pstDisplayInfo [IN] It refers to image information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks TThis API supports converting image width and image height to integer type data. The rendering mode can be set to GDI (default) or D3D. See details about rendering modes below. + The GDI mode is applicable to all computers with no requirements on graphic card performance. + The D3D mode is suitable for computers with a graphics card driver and the memory of the graphics card is greater than 1GB. In this mode, the image effect of the client preview is better than that of the GDI mode. The max. supported resolution is 16384 × 163840. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DisplayOneFrameEx(IN void* handle, IN void* hWnd, IN MV_DISPLAY_FRAME_INFO_EX* pstDisplayInfo); + +/********************************************************************//** + * @~chinese + * @brief 显示一帧图像(扩展接口2) + * @param handle [IN] 设备句柄 + * @param hWnd [IN] 窗口句柄 + * @param pstImage [IN] 图像信息 + * \if WINDOWS + * @param enRenderMode [IN] 渲染方式:0-GDI 1-D3D 2-OpenGL + * \endif + * \if LINUX + * @param enRenderMode [IN] 渲染方式:0-OpenGL + * \endif + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * \if WINDOWS + * @remarks 此接口支持4GB以上超大图渲染,调用时需要输入 MV_CC_IMAGE 中 nImageLen 的值。 \n + D3D和OpenGL模式适用于安装显卡驱动且显卡内存大于1GB的电脑,该模式下客户端预览的图像效果会优于GDI模式下的图像效果。渲染模式为D3D时,支持的最大分辨率为16384 * 163840。 \n + 根据图像大小是否超过4GB,该接口可选的渲染模式不同,详情如下: + \li 若图像大小大于4GB,仅支持使用OpenGL模式渲染图像,并且支持渲染RGB8_Packed,BGR8_Packed和Mono8格式的图像。\n + \li 若图像大小小于4GB,可根据实际情况选择GDI、D3D或OpenGL模式。若选择渲染的图像格式为RGB8_Packed,BGR8_Packed或Mono8格式,该渲染图像的宽高可达到int类型。 \n + * \endif + * \if LINUX + * \li 支持PixelType_Gvsp_RGB8_Packed,PixelType_Gvsp_BGR8_Packed,PixelType_Gvsp_Mono8三种像素格式图像大小超过4GB的渲染。 \n + \li 若图像大小未超过4GB,支持宽高大小至int类型。 \n + \li 调用时需要输入 MV_CC_IMAGE 结构体中nImageLen的值; \n + * \endif + + * @~english + * @brief Displays one frame of image (extended API 2). + * @param handle [IN] It refers to the device handle. + * @param hWnd [IN] It refers to window handle. + * @param pstImage [IN] It refers to the image information. + * @param enRenderMode [IN] It refers to image rendering mode. 0: GDI; 1: D3D; 2: OpenGL. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API supports rendering of images over 4 GB by entering nImageBufLen value of structure MV_CC_IMAGE. + D3D mode and OpenGL mode are suitable for computers with a graphics card driver and the memory of the graphics card is greater than 1GB. In these modes, the image effect of the client preview is better than that of the GDI mode. For D3D mode, the max. supported resolution is 16384 × 163840. + Rendering mode and supported rendering width and rendering height vary from images over 4 GB and images no more than 4 GB. + For an image over 4 GB, only OpenGL rendering mode is supported for image in pixel formats including RGB8_Packed, BGR8_Packed, and Mono8. + For image size under 4 GB, you can choose GDI, D3D, or OpenGL according to actual demand. For an image with pixel formats including RGB8_Packed, BGR8_Packed, and Mono8, it supports converting image width and image height to integer type data. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DisplayOneFrameEx2(IN void* handle, IN void* hWnd, IN MV_CC_IMAGE* pstImage, unsigned int enRenderMode); + +/// \if WINDOWS + +/********************************************************************//** + * @~chinese + * @brief 在图像上绘制矩形框辅助线 + * @param handle [IN] 设备句柄 + * @param pRectInfo [IN] 矩形辅助线的信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口仅支持windows平台 + + * @~english + * @brief Draws auxiliary rectangle frames on the image. + * @param handle [IN] It refers to the device handle. + * @param pRectInfo [IN] It refers to the information of auxiliary rectangle frame. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only supports windows platform. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DrawRect(IN void* handle, IN MVCC_RECT_INFO* pRectInfo); + +/********************************************************************//** + * @~chinese + * @brief 在图像上绘制圆形辅助线 + * @param handle [IN] 设备句柄 + * @param pCircleInfo [IN] 圆形辅助线的信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口仅支持windows平台 + + * @~english + * @brief Draws auxiliary circle frames on the image. + * @param handle [IN] It refers to the device handle. + * @param pCircleInfo [IN] It refers to the information of auxiliary circle frame. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only supports windows platform. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DrawCircle(IN void* handle, IN MVCC_CIRCLE_INFO* pCircleInfo); + +/********************************************************************//** + * @~chinese + * @brief 在图像上绘制线条 + * @param handle [IN] 设备句柄 + * @param pLinesInfo [IN] 线条辅助线信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口仅支持windows平台 + + * @~english + * @brief Draws lines on the image. + * @param handle [IN] It refers to the device handle. + * @param pLinesInfo [IN] It refers to the information of line + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only supports windows platform. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DrawLines(IN void* handle, IN MVCC_LINES_INFO* pLinesInfo); + +/// \endif +/// @} + +/**************************Part3 ch: 采集卡的配置 | en: Frame grabber control ******************************************/ + +/// \addtogroup 采集卡初始化 +/// @{ +/********************************************************************//** + * @~chinese + * @brief 枚举采集卡 + * @param nTLayerType [IN] 采集卡接口类型 + | 采集卡接口类型定义 | 值 | 说明 | + | :--- | :---: | :--- | + | \ref MV_GIGE_INTERFACE | 0x00000001 | GigE Vision采集卡 | + | \ref MV_CAMERALINK_INTERFACE | 0x00000004 | Camera Link采集卡 | + | \ref MV_CXP_INTERFACE | 0x00000008 | CoaXPress采集卡 | + | \ref MV_XOF_INTERFACE | 0x00000010 | XoFLink采集卡 | + | \ref MV_VIR_INTERFACE | 0x00000020 | 虚拟采集卡 | + | \ref MV_LC_INTERFACE | 0x00000040 | 光源控制卡 | + * @param pInterfaceInfoList [IN][OUT] 采集卡列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Enumerates frame grabbers. + * @param nTLayerType [IN] It refers to the frame grabber interface type. eg: (MV_GIGE_INTERFACE | MV_CAMERALINK_INTERFACE | MV_CXP_INTERFACE| MV_XOF_INTERFACE | MV_VIR_INTERFACE | MV_LC_INTERFACE) + * @param pInterfaceInfoList [IN][OUT] It refers to the frame grabber list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumInterfaces(IN unsigned int nTLayerType, IN OUT MV_INTERFACE_INFO_LIST* pInterfaceInfoList); + +/********************************************************************//** + * @~chinese + * @brief 创建采集卡句柄 + * @param handle [OUT] 采集卡句柄 + * @param pInterfaceInfo [IN] 采集卡信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Creates frame grabber handle. + * @param handle [OUT] It refers to the frame grabber handle. + * @param pInterfaceInfo [IN] It refers to the frame grabber information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateInterface(IN OUT void ** handle, IN MV_INTERFACE_INFO* pInterfaceInfo); + +/********************************************************************//** + * @~chinese + * @brief 通过采集卡ID创建采集卡句柄 + * @param handle [IN][OUT] 采集卡句柄 + * @param pInterfaceID [IN] 采集卡ID + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Creates frame grabber handle by frame grabber ID. + * @param handle [IN][OUT] It refers to the frame grabber handle. + * @param pInterfaceID [IN] It refers to the frame grabber ID such as frame grabber serial No. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateInterfaceByID(IN OUT void ** handle, IN const char* pInterfaceID); + +/********************************************************************//** + * @~chinese + * @brief 打开采集卡 + * @param handle [IN] 采集卡句柄 + * @param pReserved [IN] 预留,直接填NULL + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Turns on the frame grabber. + * @param handle [IN] It refers to the frame grabber handle. + * @param pReserved [IN] Reserved (NULL). + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_OpenInterface(IN void* handle, IN char* pReserved); + +/********************************************************************//** + * @~chinese + * @brief 关闭采集卡 + * @param handle [IN] 采集卡句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Turns off the frame grabber. + * @param handle [IN] It refers to the frame grabber handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CloseInterface(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 销毁采集卡句柄 + * @param handle [IN]采集卡句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 若传入相机句柄,其效果和 MV_CC_DestroyHandle() 相同; + * @if LINUX + * @remarks 该接口不支持arm和Linux32平台 + * @endif + + * @~english + * @brief Destroys the frame grabber handle. + * @param handle [IN] It refers to the frame grabber handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks If camera handle is passed, the effect is the same as the MV_CC_DestroyHandle. This API do not support arm and Linux32 platform. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DestroyInterface(IN void* handle); + +/// @} + + +/*******************Part4 ch: 相机/采集卡属性万能配置接口 | en: Universal configuration API for camera/frame grabber properties*******************/ + +/// \addtogroup 属性配置 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 获取Integer属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param pstIntValue [IN][OUT] 返回给调用者有关设备属性结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取int类型的指定节点的值。具体可查看客户端的属性描述。 + + * @~english + * @brief Gets the value of integer type node. + * @param handle [IN] It refers to the device handle. + * @param strKey [IN] It refers to key value (node name), for example, using "Width" to get the image width. + * @param pstIntValue [IN][OUT] It refers to the pointer to device feature structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of integer type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetIntValueEx(IN void* handle,IN const char* strKey,IN OUT MVCC_INTVALUE_EX *pstIntValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Integer型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param nValue [IN] 想要设置的设备的属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置int类型的指定节点的值,具体可以查看客户端属性描述。 + + * @~english + * @brief Sets the value of integer type node + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "Width" to set width. + * @param nValue [IN] It refers to the device node value to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of integer type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetIntValueEx(IN void* handle,IN const char* strKey,IN int64_t nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Enum属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param pstEnumValue [IN][OUT] 返回给调用者有关设备属性结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取Enum类型的指定节点的值。 + + * @~english + * @brief Gets the value of enumeration type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "PixelFormat" to get pixel format. + * @param pstEnumValue [IN][OUT] It refers to the pointer to device feature structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Enum type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetEnumValue(IN void* handle,IN const char* strKey,IN OUT MVCC_ENUMVALUE *pstEnumValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Enum属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param pstEnumValue [IN][OUT] 返回给调用者有关设备属性结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 连接设备之后调用该接口可以获取Enum类型的指定节点的值。\n + * \li 区别与 MV_CC_GetEnumValue() ,此接口返回的枚举有效个数扩展到256个。 + + * @~english + * @brief Gets the value of enumeration type node (extended API). + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "PixelFormat" to get pixel format. + * @param pstEnumValue [IN][OUT] It refers to the pointer to device feature structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Enum type + The valid enumerations this API returns extend to 256. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetEnumValueEx(IN void* handle, IN const char* strKey, IN OUT MVCC_ENUMVALUE_EX *pstEnumValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Enum型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param nValue [IN] 想要设置的设备的属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置Enum类型的指定节点的值。 + + * @~english + * @brief Sets the value of enumeration type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "PixelFormat" to set pixel format. + * @param nValue [IN] It refers to the device node value to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Enum type + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetEnumValue(IN void* handle,IN const char* strKey,IN unsigned int nValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Enum型节点指定值的符号 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param pstEnumEntry [IN][OUT] 想要获取的设备的属性符号 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取Enum类型的指定节点的值所对应的符号。 + + * @~english + * @brief Gets the enumerator name according to the node name and assigned value. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "PixelFormat" to get pixel format. + * @param pstEnumEntry [IN][OUT] It refers to the enumerator name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Enum type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetEnumEntrySymbolic(IN void* handle,IN const char* strKey,IN OUT MVCC_ENUMENTRY* pstEnumEntry); + +/********************************************************************//** + * @~chinese + * @brief 设置Enum型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值,如获取像素格式信息则为"PixelFormat" + * @param strValue [IN] 想要设置的设备的属性字符串 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置Enum类型的指定节点的值。 + + * @~english + * @brief Sets the value of enumeration type node + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to key value (node name), for example, using "PixelFormat" to set pixel format. + * @param strValue [IN] It refers to device property string to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Enum type + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetEnumValueByString(IN void* handle,IN const char* strKey,IN const char* strValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Float属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param pstFloatValue [IN][OUT] 返回给调用者有关设备属性结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取float类型的指定节点的值。 + + * @~english + * @brief Gets the value of float type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param pstFloatValue [IN][OUT] It refers to the structure pointer to the returned device features. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of float type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFloatValue(IN void* handle,IN const char* strKey,IN OUT MVCC_FLOATVALUE *pstFloatValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Float型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param fValue [IN] 想要设置的设备的属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置float类型的指定节点的值。 + + * @~english + * @brief Sets the value of float type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param fValue [IN] It refers to device node value to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of float type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFloatValue(IN void* handle,IN const char* strKey,IN float fValue); + +/********************************************************************//** + * @~chinese + * @brief 获取Boolean属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param pbValue [IN][OUT] 返回给调用者有关设备属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取bool类型的指定节点的值。 + + * @~english + * @brief Gets the value of boolean type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param pbValue [IN][OUT] It refers to the structure pointer for returning device features. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of bool type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBoolValue(IN void* handle,IN const char* strKey,IN OUT bool *pbValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Boolean型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param bValue [IN] 想要设置的设备的属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置bool类型的指定节点的值。 + + * @~english + * @brief Sets the value of boolean type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param bValue [IN] It refers to device node value to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of bool type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBoolValue(IN void* handle,IN const char* strKey,IN bool bValue); + +/********************************************************************//** + * @~chinese + * @brief 获取String属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param pstStringValue [IN][OUT] 返回给调用者有关设备属性结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以获取string类型的指定节点的值。 + + * @~english + * @brief Gets the value of string type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param pstStringValue [IN][OUT] It refers to the structure pointer to the returned device features. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarksCall this API after connecting to the device to get the value of specified node of string type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetStringValue(IN void* handle,IN const char* strKey,IN OUT MVCC_STRINGVALUE *pstStringValue); + +/********************************************************************//** + * @~chinese + * @brief 设置String型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @param strValue [IN] 想要设置的设备的属性值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置string类型的指定节点的值。 + + * @~english + * @brief Sets the value of string type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @param strValue [IN] It refers to device node value to be set. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of string type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetStringValue(IN void* handle,IN const char* strKey,IN const char* strValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Command型属性值 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strKey [IN] 属性键值 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置指定的Command类型节点。 + + * @~english + * @brief Sets the value of command type node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strKey [IN] It refers to the key value. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to get the value of specified node of Command type. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetCommandValue(IN void* handle,IN const char* strKey); + +/********************************************************************//** + * @~chinese + * @brief 获得当前节点的访问模式 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strName [IN] 节点名称 + * @param penAccessMode [IN][OUT] 节点的访问模式 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Gets access mode of the current node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strName [IN] It refers to the node name. + * @param penAccessMode [IN][OUT] It refers to access mode for node. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetNodeAccessMode(IN void* handle, IN const char * strName, IN OUT enum MV_XML_AccessMode *penAccessMode); + +/********************************************************************//** + * @~chinese + * @brief 获得当前节点的类型 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strName [IN] 节点名称 + * @param penInterfaceType [IN][OUT] 节点的类型 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 在调用 MV_CC_GetIntValueEx() 、 MV_CC_SetIntValueEx() 等万能接口之前,您可调用该接口获取节点类型,方便选择合适的接口进行节点值的设置和获取。 + + * @~english + * @brief Gets the type of the current node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strName [IN] It refers to the node name. + * @param penInterfaceType [IN][OUT] It refers to node type. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks You can call this API to get the node type in advance before calling the universal API, so that you can choose the proper universal API to get and set node value. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetNodeInterfaceType(IN void* handle, IN const char * strName, IN OUT enum MV_XML_InterfaceType *penInterfaceType); + +/********************************************************************//** + * @~chinese + * @brief 导入设备属性 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strFileName [IN] 属性文件名 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Loads device features. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strFileName [IN] It refers to the feature file name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FeatureLoad(IN void* handle, IN const char* strFileName); + +/********************************************************************//** + * @~chinese + * @brief 导入设备属性并保存错误信息列表 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strFileName [IN] 属性文件名 + * @param pstNodeErrorList [IN OUT] 错误信息列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 部分节点导入失败时,接口返回MV_OK,通过错误信息列表中stNodeError获取出错节点及失败原因 + * \li pstNodeErrorList该参数在外部申请并由内部填充数据。该参数可接受填入null,代表用户不关心导入时的错误信息。 + + * @~english + * @brief Loads device features, and saves error information list. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param strFileName [IN] It refers to the feature file name. + * @param pstNodeErrorList [IN OUT] It refers to the error message list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks If loading part of the nodes fails, the API will return MV_OK. You can get the error node and the reason for failure through stNodeError in the error message list. + * The parameter pstNodeErrorList is requested by the user externally and filled with data internally, and the value null indicates that the user is not concerned about error occurred when loading. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FeatureLoadEx(IN void* handle, IN const char* strFileName, IN OUT MVCC_NODE_ERROR_LIST* pstNodeErrorList); + +/********************************************************************//** + * @~chinese + * @brief 保存设备属性 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param strFileName [IN] 属性文件名 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Saves device features. + * @param handle [IN] It refers to device handle / frame grabber handle. + * @param strFileName [IN] It refers to the feature file name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FeatureSave(IN void* handle, IN const char* strFileName); + +/// @} + +/// \addtogroup 读写寄存器 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 读内存 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pBuffer [IN][OUT] 作为返回值使用,保存读到的内存值(GEV设备内存值是按照大端模式存储的,采集卡设备和采集卡下相机按照大端存储,其它协议设备按照小端存储) + * @param nAddress [IN] 待读取的内存地址,该地址可以从设备的Camera.xml文件中获取,形如xxx_RegAddr的xml节点值 + * @param nLength [IN] 待读取的内存长度 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 读取设备某段寄存器的数据。 + + * @~english + * @brief Read data from device register. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pBuffer [IN][OUT] It refers to data buffer for saving memory value that is read (GEV memory value is stored based on big-endian mode, frame grabber device is stored based on big-endian mode, and memory value of other devices is stored based on little-endian mode) + * @param nAddress [IN] It refers to memory address to be read. It can be acquired from Camera.xml file, in a form similar to XML node value of xxx_RegAddr. + * @param nLength [IN] It refers to length of memory to be read. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Access device and read the data from certain register. +*************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ReadMemory(IN void* handle , IN OUT void *pBuffer, IN int64_t nAddress, IN int64_t nLength); + +/********************************************************************//** + * @~chinese + * @brief 写内存 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pBuffer [IN] 待写入的内存值(注意GEV设备内存值要按照大端模式存储,采集卡设备和采集卡下相机按照大端存储,其它协议设备按照小端存储) + * @param nAddress [IN] 待写入的内存地址,该地址可以从设备的Camera.xml文件中获取,形如xxx_RegAddr的xml节点值 + * @param nLength [IN] 待写入的内存长度 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 访问设备,把一段数据写入某段寄存器。 + + * @~english + * @brief Writes data into device register. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pBuffer [IN] It refers to memory value to be written(GEV memory value is stored based on big-endian mode, frame grabber device is stored based on big-endian mode, and memory value of other devices is stored based on little-endian mode) + * @param nAddress [IN] It refers to memory address to be written to. It can be acquired from Camera.xml file, in a form similar to XML node value of xxx_RegAddr. + * @param nLength [IN] It refers to length of memory to be written. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Access device for writing data to certain segment of register. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_WriteMemory(IN void* handle, IN const void *pBuffer, IN int64_t nAddress, IN int64_t nLength); + +/// @} + +/// \addtogroup 设备XML文件 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 清除GenICam节点缓存 + * @param handle [IN] 设备句柄/采集卡句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remark 在加载工业相机节点时需要读取GenICam配置文件,该接口可以起到清除GenICam缓存的功能。 + + * @~english + * @brief Clears the buffer of the GenICam node. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_InvalidateNodes(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 获取设备属性树XML + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pData [IN][OUT] XML数据接收缓存 + * @param nDataSize [IN] 接收缓存大小 + * @param pnDataLen [IN][OUT] 实际数据大小 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 当pData为NULL或nDataSize比实际的xml文件小时,不拷贝数据,由pnDataLen返回xml文件大小。 \n + \li 当pData为有效缓存地址,且缓存足够大时,拷贝完整数据保存在该缓存里面,并由pnDataLen返回xml文件实际大小。 \n + + * @~english + * @brief Gets the XML file of device feature tree. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pData [IN][OUT] It refers to received XML data buffer. + * @param nDataSize [IN] It refers to received buffer size. + * @param pnDataLen [IN][OUT] It refers to actual data size. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks When pData is NULL or when the value of nDataSize is less than the XML file size, no data will be copied, and the XML file size will be returned by pnDataLen. + When pData is valid and the buffer size is enough, the complete data will be copied and stored in the buffer, and the XML file size will be returned by pnDataLen. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetGenICamXML(IN void* handle, IN OUT unsigned char* pData, IN unsigned int nDataSize, IN OUT unsigned int* pnDataLen); + +/// @} + +/// \addtogroup 读写相机文件 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 从设备读取文件 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pstFileAccess [IN] 文件存取结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Reads the file from the device. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pstFileAccess [IN] It refers to the file access structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessRead(IN void* handle, IN MV_CC_FILE_ACCESS * pstFileAccess); + +/********************************************************************//** + * @~chinese + * @brief 从设备读取文件(扩展接口,文件是Data数据) + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pstFileAccessEx [IN] 文件存取结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口直接使用缓存数据,进行读写操作,避免直接操作文件出现无权限的问题。该接口是 MV_CC_FileAccessRead() 的扩展接口。 + + * @~english + * @brief Reads the Data file from the device (extended). + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pstFileAccessEx [IN] It refers to the file access structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessReadEx(IN void* handle, IN OUT MV_CC_FILE_ACCESS_EX * pstFileAccessEx); + +/********************************************************************//** + * @~chinese + * @brief 将文件写入设备 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pstFileAccess [IN] 文件存取结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Writes file to device. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pstFileAccess [IN] It refers to the file access structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessWrite(IN void* handle, IN MV_CC_FILE_ACCESS * pstFileAccess); + +/********************************************************************//** + * @~chinese + * @brief 将缓存(buffer)写入设备 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pstFileAccessEx [IN][OUT] 文件存取结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口直接使用缓存数据,进行写操作,避免直接将文件写入C盘,系统保护出现写失败。该接口是 MV_CC_FileAccessWrite() 的扩展接口。 + + * @~english + * @brief Writes buffer to device. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pstFileAccessEx [IN][OUT] It refers to the file access structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to write the file by using the buffer data in case of the error of system protection when operating files in C disk. This API is the extended API of MV_CC_FileAccessWrite(). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FileAccessWriteEx(IN void* handle, IN OUT MV_CC_FILE_ACCESS_EX * pstFileAccessEx); + +/********************************************************************//** + * @~chinese + * @brief 获取文件存取的进度 + * @param handle [IN] 设备句柄/采集卡句柄 + * @param pstFileAccessProgress [IN][OUT] 进度内容 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 (当前文件存取的状态) + + * @~english + * @brief Gets file access progress. + * @param handle [IN] It refers to the device handle or frame grabber handle. + * @param pstFileAccessProgress [IN][OUT] It refers to the file access progress. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFileAccessProgress(IN void* handle, IN OUT MV_CC_FILE_ACCESS_PROGRESS * pstFileAccessProgress); + +/// @} + +/*******************Part5 ch: 相机和采集卡 升级 | en: Camera /Frame grabber upgrade *******************/ + +/// \addtogroup 设备升级 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 设备本地升级 + * @param handle [IN] 设备句柄 + * @param strFilePathName [IN] 文件名 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过该接口可以将升级固件文件发送给设备进行升级。该接口需要等待升级固件文件成功传给设备端之后再返回,响应时间可能较长。 + + * @~english + * @brief Upgrades device via local file. + * @param handle [IN] It refers to the device handle. + * @param strFilePathName [IN] It refers to the file name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Provided this API to send upgrade firmware to device. + It may take a long response time since the API will only be recalled after the upgrade firmware is sent to device. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_LocalUpgrade(IN void* handle, IN const void* strFilePathName); + +/********************************************************************//** + * @~chinese + * @brief 获取升级进度 + * @param handle [IN] 设备句柄 + * @param pnProcess [IN][OUT] 进度接收地址 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Gets the upgrade progress. + * @param handle [IN] It refers to the device handle. + * @param pnProcess [IN][OUT] It refers to address for receiving upgrade progress. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetUpgradeProcess(IN void* handle, IN OUT unsigned int* pnProcess); + +/// @} + +/*******************Part6 ch: 相机和采集卡 注册异常回调和事件接口 | en: Exception callback registration and event API for cameras and frame grabbers*******************/ + +/// \addtogroup 事件及异常 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 注册异常消息回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param cbException [IN] 异常回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 该接口需要在 MV_CC_OpenDevice() 打开设备之后调用。\n + \li 设备异常断开连接后可以在回调里面获取到异常消息,GigE设备掉线之后需要先调用 MV_CC_CloseDevice() 接口关闭设备,再调用 MV_CC_OpenDevice() 接口重新打开设备。 + | 宏定义 | 宏定义值 | 含义 | + | :---: | :---: | :---: | + | \ref MV_EXCEPTION_DEV_DISCONNECT | 0x00008001 | 设备断开连接 | + + * @~english + * @brief Register callback function for getting exception information of cameras and dongles. + * @param handle [IN] It refers to the device handle. + * @param cbException [IN] It refers to the pointer to the exception callback function. + * @param pUser [IN] It refers to the user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after turning on the device by calling MV_CC_OpenDevice(). + When the device is exceptionally disconnected, you can get the exception message from callback function. + For disconnected GigE device, call MV_CC_CloseDevice() to turn off the device, and then call MV_CC_OpenDevice() to turn on the device again. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterExceptionCallBack(IN void* handle, IN MvExceptionCallback cbException, IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册全部事件回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 通过该接口设置事件回调,可以在回调函数里面获取采集、曝光等事件信息。 + \li 该接口不支持CameraLink设备。 + + * @~english + * @brief Registers a callback for all events. + * @param handle [IN] It refers to the device handle. + * @param cbEvent [IN] It refers to the pointer to the event callback function. + * @param pUser [IN] It refers to user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to set the event callback function to get event information including acquisition and exposure. + This API is not supported by CameraLink device. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterAllEventCallBack(IN void* handle, IN MvEventCallback cbEvent, IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册单个事件回调,在打开设备之后调用 + * @param handle [IN] 设备句柄 + * @param strEventName [IN] 事件名称 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 + * @note \li 通过该接口设置事件回调,可以在回调函数里面获取采集、曝光等事件信息。 + \li 该接口不支持CameraLink设备。 + + * @~english + * @brief Registers a callback for single event. + * @param handle [IN] It refers to the device handle. + * @param strEventName [IN] It refers to the event name. + * @param cbEvent [IN] It refers to the pointer to the event callback function. + * @param pUser [IN] It refers to user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to set the event callback function to get event information including acquisition and exposure. + This API is not supported by CameraLink device . +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterEventCallBackEx(IN void* handle, IN const char* strEventName, IN MvEventCallback cbEvent, IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册流异常消息回调 + * @param handle [IN] 设备句柄 + * @param cbStreamException [IN] 异常回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 +* @remarks \li 该接口注册的回调函数中不能调用 MV_CC_StopGrabbing() 、 MV_CC_CloseDevice() 和 MV_CC_DestroyHandle() ,只用于消息通知。 + \li 该接口不支持虚拟相机和导入三方cti的场景。 + + * @~english + * @brief Registers a stream exception callback. + * @param handle [IN] It refers to the device handle. + * @param cbStreamException [IN] It refers to the pointer to the exception callback function. + * @param pUser [IN] It refers to the user-defined variable. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks In the callback function registered via this API, MV_CC_StopGrabbing(), MV_CC_CloseDevice(), and MV_CC_DestroyHandle() cannot be called, and the callback function can only be used for message notification. + This API does not support virtual cameras and scenarios involving the import of third-party CTI files. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterStreamExceptionCallBack(IN void* handle, IN MvStreamExceptionCallback cbStreamException, IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 开启设备指定事件 + * @param handle [IN] 设备句柄 + * @param strEventName [IN] 事件名称 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Enables specified event of device. + * @param handle [IN] It refers to the device handle. + * @param strEventName [IN] It refers to the event name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EventNotificationOn(IN void* handle, IN const char* strEventName); + +/********************************************************************//** + * @~chinese + * @brief 关闭设备指定事件 + * @param handle [IN] 设备句柄 + * @param strEventName [IN] 事件名称 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Disable specified event of device + * @param handle [IN] It refers to the device handle. + * @param strEventName [IN] It refers to the event name. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EventNotificationOff(IN void* handle, IN const char* strEventName); + +/// @} + +/*******************Part7 ch: 仅GigE设备支持的接口 | en: API exclusively for GigE devices*******************/ + +/// \addtogroup GigE相机 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 设置枚举超时时间,仅支持GigE协议,范围:[1, UINT_MAX) + * @param nMilTimeout [IN] 超时时间,应为无符号整数,默认100ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 在调用 MV_CC_EnumDevices() 等枚举接口前使用该接口,可设置枚举GIGE设备的网卡最大超时时间(默认100ms),可以减少最大超时时间,以加快枚举GIGE设备的速度。\n + \li 该接口仅支持输入无符号整数。 + * @note 该接口仅支持GigEVision设备。 + + * @~english + * @brief Sets enumeration timeout duration, range: [1, UINT_MAX). Only GigE protocol is supported. + * @param nMilTimeout [IN] It refers to the timeout duration, unit: millisecond. The value should be an integer (100 by default). + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API before calling enumeration APIs including MV_CC_EnumDevices() to set the timeout duration for enumerating GigE devices (100 ms by default). You can accelerate the enumeration by reducing the timeout duration. + * @remarks Only supports GigE vision devices. + + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetEnumDevTimeout(IN unsigned int nMilTimeout); + +/********************************************************************//** + * @~chinese + * @brief 强制IP + * @param handle [IN] 设备句柄 + * @param nIP [IN] 设置的IP + * @param nSubNetMask [IN] 子网掩码 + * @param nDefaultGateWay [IN] 默认网关 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 强制设置设备网络参数(包括IP、子网掩码、默认网关),强制设置之后将需要重新创建设备句柄,支持GigEVision(MV_GIGE_DEVICE)设备和GenTL(MV_GENTL_GIGE_DEVICE)设备。 \n + \li 如果设备为DHCP的状态,调用该接口强制设置设备网络参数之后设备将会重启。 \n + + * @~english + * @brief Sets device network parameters forcefully, including IP address, subnet mask, and default gateway. + * @param handle [IN] It refers to the device handle. + * @param nIP [IN] It refers to the IP address. + * @param nSubNetMask [IN] It refers to the subnet mask. + * @param nDefaultGateWay [IN] It refers to the default gateway. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks After forcing the configuration of device network parameters (including IP address, subnet mask,and default gateway), create device handle again. + This API is supported GigEVision(MV_GIGE_DEVICE) and GenTL(MV_GENTL_GIGE_DEVICE) device. + The device will restart after calling this API to set network parameters forcefully when the device is in DHCP status. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_ForceIpEx(IN void* handle, IN unsigned int nIP, IN unsigned int nSubNetMask, IN unsigned int nDefaultGateWay); + +/********************************************************************//** + * @~chinese + * @brief 配置IP方式 + * @param handle [IN] 设备句柄 + * @param nType [IN] IP类型,见MV_IP_CFG_x + | 宏定义 | 宏定义值 | 含义 | + | :---: | :---: | :---: | + | \ref MV_IP_CFG_STATIC | 0x05000000 | 固定IP地址模式 | + | \ref MV_IP_CFG_DHCP | 0x06000000 | DHCP自动获取IP模式 | + | \ref MV_IP_CFG_LLA | 0x04000000 | LLA(Link-local address),链路本地地址 | + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 发送命令设置设备的IP方式,如DHCP、LLA等,仅支持GigEVision(MV_GIGE_DEVICE)和GenTl(MV_GENTL_GIGE_DEVICE)的设备。 + + * @~english + * @brief Configures IP mode. + * @param handle [IN] It refers to the device handle. + * @param nType [IN] It refers to IP type. Refer to MV_IP_CFG_x for more details. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is only supported by GigE vision devices and GenTL(MV_GENTL_GIGE_DEVICE) device. You can send command to set the MVC IP configuration mode, including DHCP and LLA. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetIpConfig(IN void* handle, IN unsigned int nType); + +/********************************************************************//** + * @~chinese + * @brief 设置仅使用某种模式,type: MV_NET_TRANS_x,不设置时,默认优先使用driver + * @param handle [IN] 设备句柄 + * @param nType [IN] 网络传输模式,见MV_NET_TRANS_x + | 宏定义 | 宏定义值 | 含义 | + | :---: | :---: | :---: | + | \ref MV_NET_TRANS_DRIVER | 0x00000001 | 驱动模式 | + | \ref MV_NET_TRANS_SOCKET | 0x00000002 | Socket模式 | + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过该接口可以设置SDK内部优先使用的网络模式,默认优先使用驱动模式,仅GigEVision设备支持。 + + * @~english + * @brief Sets SDK internal priority network mode. If it is not set, driver mode is used by default. + * @param handle [IN] It refers to the device handle. + * @param nType [IN] It refers to the network transmission mode. See MV_NET_TRANS_x for details. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarksSet You can call this API to set the internal priority network mode for the SDK (driver mode by default). This API is supported by GigE vision devices only. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetNetTransMode(IN void* handle, IN unsigned int nType); + +/********************************************************************//** + * @~chinese + * @brief 获取网络传输信息 + * @param handle [IN] 设备句柄 + * @param pstInfo [IN][OUT] 信息结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过该接口可以获取网络传输相关信息,包括已接收数据大小、丢帧数量等,在 MV_CC_StartGrabbing() 开启采集之后调用。仅GigEVision相机支持。 + + * @~english + * @brief Gets network transmission information. + * @param handle [IN] It refers to the device handle. + * @param pstInfo [IN][OUT] It refers to network transmission information structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get information about network transmission after grabbing images via calling MV_CC_StartGrabbing(), including received data size and the number of lost frames. + This API is supported only by GigEVision devices. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetNetTransInfo(IN void* handle, IN OUT MV_NETTRANS_INFO* pstInfo); + +/********************************************************************//** + * @~chinese + * @brief 设置枚举命令的回复包类型 + * @param nMode [IN] 回复包类型(默认广播),0-单播,1-广播 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口只对GigE相机有效。 + + * @~english + * @brief Sets the ACK mode of enumeration command. + * @param nMode [IN] It refers to the ACK mode (default broadcast). 0: unicast; 1: broadcast. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is only supported by GigE devices. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetDiscoveryMode(IN unsigned int nMode); + +/********************************************************************//** + * @~chinese + * @brief 设置GVSP取流超时时间 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 超时时间,默认300ms,范围:>10ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后,取流动作发生前,调用该接口可以设置GVSP取流超时时间。GVSP取流超时设置过短可能造成图像异常,设置过长可能造成取流时间变长。 + + * @~english + * @brief Sets timeout duration for image grabbing via GVSP. + * @param handle [IN] It refers to the device handle. + * @param nMillisec [IN] It refers to timeout duration (unit:millisecond), range:>10ms. The default value is 300 ms. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks After connecting to the device and before starting image grabbing, call this API to set timeout duration for image grabbing via GVSP. + Image exception might occur if timeout duration is too short, and the streaming duration will become longer if timeout duration is too long. + * + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGvspTimeout(IN void* handle, IN unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取GVSP取流超时时间 + * @param handle [IN] 设备句柄 + * @param pnMillisec [IN][OUT] 超时时间指针,以毫秒为单位 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的GVSP取流超时时间 + + * @~english + * @brief Gets timeout for image grabbing via GVSP. + * @param handle [IN] It refers to the device handle. + * @param pnMillisec [IN][OUT] It refers to pointer to the timeout duration, unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get the current timeout duration of image grabbing via GVSP. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGvspTimeout(IN void* handle, IN OUT unsigned int* pnMillisec); + +/********************************************************************//** + * @~chinese + * @brief 设置GVCP命令超时时间 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 超时时间(ms),默认500ms,范围:[0,10000] + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置GVCP命令超时时间。 + + * @~english + * @brief Sets timeout for GVCP command. + * @param handle [IN] It refers to the device handle. + * @param nMillisec [IN] It refers to the timeout duration, range: [0, 10000], unit: millisecond. It is 500 ms by default. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to set the timeout of GVCP command. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGvcpTimeout(IN void* handle, IN unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取GVCP命令超时时间 + * @param handle [IN] 设备句柄 + * @param pnMillisec [IN][OUT] 超时时间指针,以毫秒为单位 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的GVCP超时时间。 + + * @~english + * @brief Gets timeout duration for GVCP command. + * @param handle [IN] It refers to the device handle. + * @param pnMillisec [IN][OUT] It refers to pointer to the timeout duration, unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get the current GVCP timeout duration. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGvcpTimeout(IN void* handle, IN OUT unsigned int* pnMillisec); + +/********************************************************************//** + * @~chinese + * @brief 设置重传GVCP命令次数 + * @param handle [IN] 设备句柄 + * @param nRetryGvcpTimes [IN] 重传次数,范围:0-100 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于在GVCP包传输异常时,增加重传的次数,在一定程度上可以避免设备掉线,范围为0-100。 + + * @~english + * @brief Sets the number of times for resending GVCP command. + * @param handle [IN] It refers to the device handle. + * @param nRetryGvcpTimes [IN] It refers to the number of times for resending. It should be between 0 and 100. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to increase the resending times when exception occurred during GVCP packet transmission, range: [0, 100]. To some extent, it can prevent the device from getting offline. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetRetryGvcpTimes(IN void* handle, IN unsigned int nRetryGvcpTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取重传GVCP命令次数 + * @param handle [IN] 设备句柄 + * @param pnRetryGvcpTimes [IN][OUT] 重传次数指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的GVCP重传次数,默认3次。 + + * @~english + * @brief Gets the number of times for resending GVCP command. + * @param handle [IN] It refers to the device handle. + * @param pnRetryGvcpTimes [IN][OUT] It refers to the pointer to resending times. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get the current resending times of GVCP command (3 by default). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetRetryGvcpTimes(IN void* handle, IN OUT unsigned int* pnRetryGvcpTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取最佳的packet size,该接口目前只支持GigE设备 + * @param handle [IN] 设备句柄 + * @return 最佳packetsize + * @remarks \li 获取最佳的packet size,对应GigEVision设备是SCPS,对应U3V设备是每次从驱动读取的包大小,该大小即网络上传输一个包的大小。\n + \li 该接口需要在 MV_CC_OpenDevice() 之后、 MV_CC_StartGrabbing() 之前调用。\n + \li 该接口不支持CameraLink设备、U3V设备。\n + \li 该接口不支持GenTL设备(协议不支持),如果是GenTL方式添加的网口相机,建议根据网络实际情况配置GevSCPSPacketSize,或者配置1500。 + + * @~english + * @brief Gets the optimal packet size. + * @param handle [IN] It refers to the device handle. + * @return Returns optimal packet size. + * @remarks The optimal packet size for GigEVision device is SCPS. + This API should be called after calling MV_CC_OpenDevice(), and before calling MV_CC_StartGrabbing(). + This API is not supported by CameraLink device and U3V device. + This API is not supported by GenTL device (unsupported protocols). For GigE Vision cameras added via GenTL, configure GevSCPSPacketSize or configure 1500 as needed. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOptimalPacketSize(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 设置是否打开重发包支持,及重发包设置 + * @param handle [IN] 设备句柄 + * @param bEnable [IN] 是否支持重发包 + * @param nMaxResendPercent [IN] 最大重发比 + * @param nResendTimeout [IN] 重发超时时间,范围:0-10000ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 连接设备之后调用该接口可以设置重发包属性,仅GigEVision设备支持。 + + * @~english + * @brief Sets whether to enable packet resending, and sets corresponding parameters. + * @param handle [IN] It refers to the device handle. + * @param bEnable [IN] Whether to enable packet resending. + * @param nMaxResendPercent [IN] It refers to the max. resending percent. + * @param nResendTimeout [IN] It refers to resending timeout duration. rang:0-10000ms + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API after connecting to the device to set packet resending parameters. This API is only supported by GigE vision devices. + ************************************************************************/ +#ifndef __cplusplus +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResend(IN void* handle, IN unsigned int bEnable, IN unsigned int nMaxResendPercent, IN unsigned int nResendTimeout); +#else +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResend(IN void* handle, IN unsigned int bEnable, IN unsigned int nMaxResendPercent = 100, IN unsigned int nResendTimeout = 50); +#endif + +/********************************************************************//** + * @~chinese + * @brief 设置重传命令最大尝试次数 + * @param handle [IN] 设备句柄 + * @param nRetryTimes [IN] 重传命令最大尝试次数,默认20 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口必须在调用 MV_GIGE_SetResend() 开启重传包功能之后调用,否则失败且返回MV_E_CALLORDER + + * @~english + * @brief Sets the max. command resending times. + * @param handle [IN] It refers to the device handle. + * @param nRetryTimes [IN] It refers to the max. command resending times. It is 20 by default. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API should be called after resending packet is enabled via calling MV_GIGE_SetResend(). If APIs are not called in order, it will be failed and MV_E_CALLORDER will be returned. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResendMaxRetryTimes(IN void* handle, IN unsigned int nRetryTimes); + +/********************************************************************//** + * @~chinese + * @brief 获取重传命令最大尝试次数 + * @param handle [IN] 设备句柄 + * @param pnRetryTimes [IN][OUT] 重传命令最大尝试次数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口必须在调用 MV_GIGE_SetResend() 开启重传包功能之后调用,否则失败且返回MV_E_CALLORDER。 + + * @~english + * @brief Gets the max. command resending times. + * @param handle [IN] It refers to the device handle. + * @param pnRetryTimes [IN][OUT] It refers to the max. times to retry resending lost packets + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API should be called after resending packet is enabled via calling MV_GIGE_SetResend(). If APIs are not called in order, it will be failed and MV_E_CALLORDER will be returned. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetResendMaxRetryTimes(IN void* handle, IN OUT unsigned int* pnRetryTimes); + +/********************************************************************//** + * @~chinese + * @brief 设置同一重传包多次请求之间的时间间隔 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 同一重传包多次请求之间的时间间隔,默认10ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口必须在调用 MV_GIGE_SetResend() 开启重传包功能之后调用,否则失败且返回MV_E_CALLORDER。 + + * @~english + * @brief Sets the time interval of two resending requests for one packet. + * @param handle [IN] It refers to the device handle. + * @param nMillisec [IN] It refers to the time interval of two resending requests for one packet. It is 10 ms by default. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API should be called after resending packet is enabled via calling MV_GIGE_SetResend(). If APIs are not called in order, MV_GIGE_SetResendTimeInterval() calling will be failed and MV_E_CALLORDER will be returned. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetResendTimeInterval(IN void* handle, IN unsigned int nMillisec); + +/********************************************************************//** + * @~chinese + * @brief 获取同一重传包多次请求之间的时间间隔 + * @param handle [IN] 设备句柄 + * @param pnMillisec [IN][OUT] 同一重传包多次请求之间的时间间隔 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口必须在调用 MV_GIGE_SetResend() 开启重传包功能之后调用,否则失败且返回MV_E_CALLORDER。 + + * @~english + * @brief Gets the time interval of two resending requests for one packet. + * @param handle [IN] It refers to the device handle. + * @param pnMillisec [IN][OUT] It refers to the time interval of two resending requests for one packet. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API should be called after resending packet is enabled via calling MV_GIGE_SetResend(). If APIs are not called in order, MV_GIGE_GetResendTimeInterval() calling will be failed and MV_E_CALLORDER will be returned. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetResendTimeInterval(IN void* handle, IN OUT unsigned int* pnMillisec); + +/********************************************************************//** + * @~chinese + * @brief 设置传输模式,可以为单播模式、组播模式等 + * @param handle [IN] 设备句柄 + * @param pstTransmissionType [IN] 传输模式结构体 + | 宏定义 | 宏定义值 | 含义 | + | :---: | :---: | :---: | + | \ref MV_GIGE_TRANSTYPE_UNICAST | 0 | 单播 | + | \ref MV_GIGE_TRANSTYPE_MULTICAST | 1 |组播 | + | \ref MV_GIGE_TRANSTYPE_LIMITEDBROADCAST | 2 | 表示局域网内广播 | + | \ref MV_GIGE_TRANSTYPE_SUBNETBROADCAST | 3 | 表示子网内广播 | + | \ref MV_GIGE_TRANSTYPE_CAMERADEFINED | 4 | 表示从相机获取 | + | \ref MV_GIGE_TRANSTYPE_UNICAST_DEFINED_PORT | 5 | 表示用户自定义应用端接收图像数据Port号 | + | \ref MV_GIGE_TRANSTYPE_UNICAST_WITHOUT_RECV | 00010000 | 表示设置了单播,但本实例不接收图像数据 | + | \ref MV_GIGE_TRANSTYPE_MULTICAST_WITHOUT_RECV | 00010001 | 表示组播模式,但本实例不接收图像数据 | + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 通过该接口可以设置传输模式为单播、组播等模式,仅GigEVision设备支持。 + + * @~english + * @brief Sets the transmission mode, including unicast and multicast. + * @param handle [IN] It refers to the device handle. + * @param pstTransmissionType [IN] It refers to the transmission mode structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to set the transmission mode as unicast mode and multicast mode.This API is only supported by GigE vision devices. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetTransmissionType(IN void* handle, IN MV_TRANSMISSION_TYPE * pstTransmissionType); + +/********************************************************************//** + * @~chinese + * @brief 发出动作命令 + * @param pstActionCmdInfo [IN] 动作命令信息 + * @param pstActionCmdResults [IN][OUT] 动作命令返回信息列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 仅GigEVision设备支持。 + + * @~english + * @brief Sends action commands. + * @param pstActionCmdInfo [IN] It refers to information of action commands. + * @param pstActionCmdResults [IN][OUT] It refers to list of returned information about action commands. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is supported only by GigEVision devices. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_IssueActionCommand(IN MV_ACTION_CMD_INFO* pstActionCmdInfo, IN OUT MV_ACTION_CMD_RESULT_LIST* pstActionCmdResults); + +/********************************************************************//** + * @~chinese + * @brief 获取组播状态 + * @param pstDevInfo [IN] 设备信息结构体 + * @param pbStatus [IN][OUT] 组播状态,true:组播状态,false:非组播 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于判断设备当前是否处于组播状态,解决客户端枚举时需要打开设备判断组播的问题。 \n + 仅支持标准GigE Vision设备。 + + * @~english + * @brief Gets multicast status. + * @param pstDevInfo [IN] It refers to device information. + * @param pbStatus [IN][OUT] It refers to status (true: multicast status; false: not multicast status). + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks When enumerating the device, call this API to check if the device is in multicast status without turning on the device. + This API only support GigE Vision Device. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetMulticastStatus(IN MV_CC_DEVICE_INFO* pstDevInfo, IN OUT bool* pbStatus); + +/// @} + +/*******************Part8 ch: 仅CameraLink 设备支持的接口 | en: API exclusively for CameraLink devices*******************/ + +/// \addtogroup 串口设备 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 获取串口信息列表 + * @param pstSerialPortList [IN][OUT] 串口信息列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取本地的串口信息。 + + * @~english + * @brief Gets serial port information list. + * @param pstSerialPortList [IN][OUT] It refers to serial port information list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is used to get local serial port information. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetSerialPortList(IN OUT MV_CAML_SERIAL_PORT_LIST* pstSerialPortList); + +/********************************************************************//** + * @~chinese + * @brief 设置取指定枚举串口 + * @param pstSerialPortList [IN][OUT] 串口信息列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于设置枚举CameraLink 设备的指定串口。 + + * @~english + * @brief Specifies the serial ports for enumerations. + * @param pstSerialPortList [IN] It refers to serial port information list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to specify serial ports for camera link device enumeration. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetEnumSerialPorts(IN MV_CAML_SERIAL_PORT_LIST* pstSerialPortList); + +/***********************************************************************************************************//** + * @~chinese + * @brief 设置设备波特率 + * @param handle [IN] 设备句柄 + * @param nBaudrate [IN] 设置的波特率值,数值参考CameraParams.h中宏定义 + | CamercaLink波特率定义 | 值 | 对应的波特率值 | + | :--- | :---: | :--- | + | \ref MV_CAML_BAUDRATE_9600 | 0x00000001 | 9600 | + | \ref MV_CAML_BAUDRATE_19200 | 0x00000002 | 19200 | + | \ref MV_CAML_BAUDRATE_38400 | 0x00000004 | 38400 | + | \ref MV_CAML_BAUDRATE_57600 | 0x00000008 | 57600 | + | \ref MV_CAML_BAUDRATE_115200 | 0x00000010 | 115200 | + | \ref MV_CAML_BAUDRATE_230400 | 0x00000020 | 230400 | + | \ref MV_CAML_BAUDRATE_460800 | 0x00000040 | 460800 | + | \ref MV_CAML_BAUDRATE_921600 | 0x00000080 | 921600 | + | \ref MV_CAML_BAUDRATE_AUTOMAX | 0x40000000 | 最大值 | + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口支持在设备未连接时调用。通过GenTL协议访问设备时,需要先连接设备,才能调用该接口。\n + 因硬件/系统/外部干扰等因素,配置高波特率可能导致通信异常,建议配置波特率最大小于115200。 + + * @~english + * @brief Sets baud rate for the device. + * @param handle [IN] It refers to the device handle. + * @param nBaudrate [IN] It refers to baud rate to set. Refer to the 'CameraParams.h' for parameter definitions. for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks You can call this API when the device is not connected. If the device is accessed via GenTL protocol, call this API after the device is connected. + High baud rate may cause communication exception due to factors such as hardware specification, system configuration, and external interference. + It is recommended to configure a baud rate of less than 115200 +************************************************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetDeviceBaudrate(IN void* handle, IN unsigned int nBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备波特率 + * @param handle [IN] 设备句柄 + * @param pnCurrentBaudrate [IN][OUT] 波特率信息指针,数值参考CameraParams.h中宏定义 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口支持在设备未连接时调用。 + + * @~english + * @brief Gets baud rate for devices. + * @param handle [IN] It refers to the device handle. + * @param pnCurrentBaudrate [IN][OUT] It refers to the pointer to baud rate information. See the 'CameraParams.h' for parameter definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks You can call this API when the device is not connected. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetDeviceBaudrate(IN void* handle,IN OUT unsigned int* pnCurrentBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备与主机间连接支持的波特率 + * @param handle [IN] 设备句柄 + * @param pnBaudrateAblity [IN][OUT] 支持的波特率信息的指针。 所有支持波特率的"或运算"结果,单个数值参考CameraParams.h中宏定义 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口支持在设备未连接时调用。 + + * @~english + * @brief Gets the supported baud rate of the connection between the device and host. + * @param handle [IN] It refers to the device handle. + * @param pnBaudrateAblity [IN][OUT] It refers to the pointer to the supported baud rate information. See 'CameraParams.h' for the definitions of single value of the OR operation results of all supported baud rate. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks You can call this API when the device is not connected. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetSupportBaudrates(IN void* handle,IN OUT unsigned int* pnBaudrateAblity); + +/********************************************************************//** + * @~chinese + * @brief 设置串口操作等待时长 + * @param handle [IN] 设备句柄 + * @param nMillisec [IN] 串口操作的等待时长, 单位为ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Sets the waiting duration for serial port operation. + * @param handle [IN] It refers to the device handle. + * @param nMillisec [IN] It refers to waiting time of serial port operation, unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetGenCPTimeOut(IN void* handle, IN unsigned int nMillisec); + +/// @} + +/*******************Part9 ch: 仅U3V设备支持的接口 | en: API exclusively for USB3 Vision (U3V) devices*******************/ + +/// \addtogroup U3V相机 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 设置U3V的传输包大小 + * @param handle [IN] 设备句柄 + * @param nTransferSize [IN] 传输的包大小, Byte,默认为1M,rang:>=0x400,建议最大值:[windows] rang <= 0x400000;[Linux] rang <= 0x200000 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 增加传输包大小可以适当降低取流时的CPU占用率。但不同的PC和不同USB扩展卡存在不同的兼容性,如果该参数设置过大可能会出现取不到图像的风险。 + + * @~english + * @brief Sets transmission packet size of USB3 vision cameras. + * @param handle [IN] It refers to the device handle. + * @param nTransferSize [IN] It refers to the size of the transmission packet (unit: byte), and the default value is 1 MB (1,048,576 bytes).rang: >=0x400. + Recommended maximum values: [Windows] range ≤ 0x400000; [Linux] range ≤ 0x200000. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Increasing the packet size can reduce the CPU usage, but for different computers and USB expansion cards, the compatibility is different. If the packet size is too large, image acquisition might fail. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetTransferSize(IN void* handle, IN unsigned int nTransferSize); + +/********************************************************************//** + * @~chinese + * @brief 获取U3V的传输包大小 + * @param handle [IN] 设备句柄 + * @param pnTransferSize [IN][OUT] 传输的包大小指针, Byte + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的U3V传输包大小,默认1M。 + + * @~english + * @brief Gets transmission packet size of USB3 vision cameras. + * @param handle [IN] It refers to the device handle. + * @param pnTransferSize [IN][OUT] It refers to the pointer to the size of the transmission packet (unit: byte). + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get the packet size of the current USB3 vision device (1 MB by default). +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_GetTransferSize(IN void* handle, IN OUT unsigned int* pnTransferSize); + +/********************************************************************//** + * @~chinese + * @brief 设置U3V的传输通道个数 + * @param handle [IN] 设备句柄 + * @param nTransferWays [IN] 传输通道个数,范围:1-10 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 用户可以根据PC的性能、设备出图帧率、图像大小和内存使用率等因素对该参数进行调节。但不同的PC和不同的USB扩展卡存在不同的兼容性。 + + * @~english + * @brief Sets the number of transmission channels of USB3 vision cameras. + * @param handle [IN] It refers to the device handle. + * @param nTransferWays [IN] It refers to the number of transmission channels. It should be between 1 to 10. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This parameter can be adjusted based on computer performance, device image frame rate, device image size, and device memory usage. But compatibility differs due to different PC and USB expansion cards. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetTransferWays(IN void* handle, IN unsigned int nTransferWays); + +/********************************************************************//** + * @~chinese + * @brief 获取U3V的传输通道个数 + * @param handle [IN] 设备句柄 + * @param pnTransferWays [IN][OUT] 传输通道个数指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的U3V异步取流节点个数,U口相机传输通道个数和像素格式对应的负载包大小相关,可通过(最大异步注册长度/像素格式对应的负载包大小)计算得出。 \n + * 2000W设备的MONO8默认为3个,YUV为默认2个,RGB为默认1个,其它情况默认8个节点。 + + * @~english + * @brief Gets the number of transmission channels of USB3 vision cameras. + * @param handle [IN] It refers to the device handle. + * @param pnTransferWays [IN][OUT] It refers to the pointer to the number of transmission channels. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is used to get the current number of U3V asynchronous image acquisition nodes. + For USB3 vision cameras, the number of transmission channels is closely related to the packet size corresponding to the pixel format, and it can be calculated based on the max. asynchronous registration length/packet size of pixel format. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_GetTransferWays(IN void* handle, IN OUT unsigned int* pnTransferWays); + +/********************************************************************//** + * @~chinese + * @brief 设置U3V的事件缓存节点个数 + * @param handle [IN] 设备句柄 + * @param nEventNodeNum [IN] 事件缓存节点个数,范围:1-64 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于设置当前的U3V事件缓存节点个数,默认情况下为5个。 + + * @~english + * @brief Sets the number of event buffer nodes of USB3 vision cameras. + * @param handle [IN] It refers to the device handle. + * @param nEventNodeNum [IN] It refers to the number of event buffer nodes, range: [1, 64]. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to set the number of the buffer nodes for the current USB3 vision event. The default value is 5. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetEventNodeNum(IN void* handle, IN unsigned int nEventNodeNum); + +/********************************************************************//** + * @~chinese + * @brief 设置U3V的同步读写超时时间,范围为:[1000, INT_MAX),默认1000 ms + * @param handle [IN] 设备句柄 + * @param nMills [IN] 设置同步读写超时时间,默认时间为1000ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 增加设置同步读取时间接口,兼容部分相机配置参数很慢,超过1000ms的情况 + + * @~english + * @brief Sets the timeout duration for sync reading and writing of USB3 vision devices (1000 ms by default), range: [1000, INT_MAX]. + * @param handle [IN] It refers to the device handle. + * @param nMills [IN] It refers to the timeout duration for sync reading and writing (1000 by default), unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Increasing the timeout duration for sync reading and writing can help deal with the problem that some cameras' parameter configuration process is very slow (more than 1000 ms). +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_SetSyncTimeOut(IN void* handle, IN unsigned int nMills); + +/********************************************************************//** + * @~chinese + * @brief 获取U3V相机同步读写超时时间 + * @param handle [IN] 设备句柄 + * @param pnMills [IN][OUT] 获取的超时时间(ms) + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口用于获取当前的U3V同步读写超时时间大小,默认1000ms。 + + * @~english + * @brief Gets the timeout duration for sync reading and writing of USB3 vision devices. + * @param handle [IN] It refers to the device handle. + * @param pnMills [IN][OUT] It refers to the timeout duration, unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to get the timeout duration for sync reading and writing of USB3 vision cameras (1000 ms by default). +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_GetSyncTimeOut(IN void* handle, IN OUT unsigned int* pnMills); + +/// @} + + +/*******************Part10 ch: GenTL相关接口 | en: GenTL-related API*******************/ + +/// \addtogroup GenTL +/// @{ + +/******************************************************************************//** + * @~chinese + * @brief 通过GenTL枚举Interfaces + * @param pstIFList [IN][OUT] Interfaces列表 + * @param strGenTLPath [IN] GenTL的cti文件路径 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li Interfaces列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请,建议尽量避免多线程枚举操作。\n + \li 暂不支持直接调用MvProducerU3V.cti和MvProducerGEV.cti, 支持调用其他.cti + + * @~english + * @brief Enumerates interfaces via GenTL. + * @param pstIFList [IN][OUT] It refers to interface list. + * @param strGenTLPath [IN] It refers to CTI file path of GenTL. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. + It is recommended to avoid multithreaded enumeration operations. + MvProducerU3V.cti and MvProducerGEV.cti calling are unsupported. + *******************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumInterfacesByGenTL(IN OUT MV_GENTL_IF_INFO_LIST* pstIFList, IN const char * strGenTLPath); + +/********************************************************************//** + * @~chinese + * @brief 卸载cti库 + * @param pGenTLPath [IN] 枚举卡时加载的cti文件路径 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 卸载前需要保证通过该cti枚举出的相机已全部关闭,否则报错前置条件错误。 + + * @~english + * @brief Unload the CTI library. + * @param pGenTLPath [IN] It refers to the CTI file path during the enumeration. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Make sure that all cameras enumerated by the CTI file are closed before calling this API. Otherwise, MV_E_PRECONDITION error will be returned. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_UnloadGenTLLibrary(IN const char * pGenTLPath); + +/*****************************************************************************************************//** + * @~chinese + * @brief 通过GenTL Interface枚举设备 + * @param pstIFInfo [IN] Interface信息 + * @param pstDevList [IN][OUT] 设备列表 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 设备列表的内存是在SDK内部分配的,多线程调用该接口时会进行设备列表内存的释放和申请。 + * @note 尽量避免多线程枚举操作。 + + * @~english + * @brief Enumerates devices via GenTL interface. + * @param pstIFInfo [IN] It refers to interface information. + * @param pstDevList [IN][OUT] It refers to the device list. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The memory of device list is internally allocated. When this API is called in multiple threads, the SDK will release and apply for the device list memory. + It is recommended to avoid multithreaded enumeration operations. + *****************************************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumDevicesByGenTL(IN MV_GENTL_IF_INFO* pstIFInfo, IN OUT MV_GENTL_DEV_INFO_LIST* pstDevList); + +/********************************************************************//** + * @~chinese + * @brief 通过GenTL设备信息创建设备句柄 + * @param handle [IN][OUT] 设备句柄 + * @param pstDevInfo [IN] 设备信息结构体指针 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 根据输入的设备信息,创建库内部必须的资源和初始化内部模块。 + + * @~english + * @brief Creates the device handle by GenTL related device information. + * @param handle [IN][OUT] It refers to interface information. + * @param pstDevInfo [IN] It refers to the struct pointer to device Information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Create required resources within library and initialize internal module according to input device information. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateHandleByGenTL(IN OUT void ** handle, IN const MV_GENTL_DEV_INFO* pstDevInfo); + +/// @} + +/*******************Part11 ch: 图像保存、格式转换等相关接口 | en: Image saving and format conversion API*******************/ + +/// \addtogroup 图像处理 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 保存图片,支持Bmp和Jpeg. + * @param handle [IN] 设备句柄 + * @param pstSaveParam [IN][OUT] 保存图片参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 通过该接口可以将从设备采集到的原始图像数据转换成JPEG或者BMP等格式并存放在指定内存中,然后用户可以将转换之后的数据直接保存成图片文件。\n + \li 该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用 MV_CC_GetOneFrameTimeout() 或者 MV_CC_RegisterImageCallBackEx() 设置回调函数,获取一帧图像数据,然后再通过该接口转换格式。\n + \li 该接口支持长乘宽至UINT_MAX,其中 MV_CC_SaveImageEx2() 支持长乘宽最大至 USHRT_MAX,JPEG格式最大支持宽高为65500。 + + * @~english + * @brief Saves images, supporting BMP and JPEG. + * @param handle [IN] It refers to the device handle. + * @param pstSaveParam [IN][OUT] It refers to the structure of image saving parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to convert the collected original images to JPEG or BMP format and save them to specified memory. You can then save the converted data as image files. + This API requires no specific calling sequence. The conversion will be executed when there is any image data. You can call MV_CC_GetOneFrameTimeout() or MV_CC_RegisterImageCallBackEx() to set the callback function and get one image frame, then call this API to convert the format. + This API supports setting the nWidth/nHeight/Length parameter to UINT_MAX: MV_CC_SaveImageEx2() supports setting the max. parameter to USHRT_MAX, and JPEG format supports the max. width and height value 65500. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageEx3(IN void* handle, IN OUT MV_SAVE_IMAGE_PARAM_EX3* pstSaveParam); + +/********************************************************************//** + * @~chinese + * @brief 保存图像到文件 + * @param handle [IN] 设备句柄 + * @param pstSaveFileParam [IN][OUT] 保存图片文件参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 该接口支持BMP/JPEG/PNG/TIFF。\n + \li 该接口支持保存的图像长乘宽至UINT_MAX, MV_CC_SaveImageToFile() 支持长乘宽最大至USHRT_MAX。JPEG格式最大支持宽高为65500 px。 \n + \li 该接口是 MV_CC_SaveImageToFile() 接口的扩展接口。 + + * @~english + * @brief Saves image to file (extended API 1) + * @param handle [IN] It refers to the device handle. + * @param pstSaveFileParam [IN][OUT] It refers to the structure of image file saving parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks It supports saving images in BMP, JPEG, PNG, and TIFF formats. + this API support the parameter nWidth/nHeight/Length to UINT_MAX. + For images in JPEG format, the supported max. width and height values are 65500. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageToFileEx(IN void* handle, IN OUT MV_SAVE_IMAGE_TO_FILE_PARAM_EX* pstSaveFileParam); + +/********************************************************************//** + * @~chinese + * @brief 保存图像到文件 + * @param handle [IN] 设备句柄 + * @param pstImage [IN] 图像信息 + * @param pSaveImageParam [IN] 存图参数 + * @param pcImagePath [IN] 存图路径,Windows平台长度不超过260字节,Linux平台不超过255字节 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 该接口支持4G以上超大图的PNG/TIFF存图,非超大图像支持BMP/JPEG/TIFF/PNG。 \n + \li JPEG格式最大支持宽高为65500 px。 + + * @~english + * @brief Saves image to file (extended API 2) + * @param handle [IN] It refers to the device handle. + * @param pstImage [IN] It refers to the image information. + * @param pSaveImageParam [IN] It refers to the image saving parameter. + * @param pcImagePath [IN] It refers to the image saving path. On Windows length does not exceed 260 bytes, and on Linux, it does not exceed 255 bytes. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks It supports saving images over 4 GB in PNG and TIFF formats, and images under 4 GB in BMP, JPEG, TIFF, and PNG formats. + For images in JPEG format, the supported max. width and height values are 65500. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageToFileEx2(IN void* handle, IN MV_CC_IMAGE* pstImage, IN MV_CC_SAVE_IMAGE_PARAM* pSaveImageParam, IN const char* pcImagePath); + +/********************************************************************//** + * @~chinese + * @brief 图像旋转 + * @param handle [IN] 设备句柄 + * @param pstRotateParam [IN][OUT] 图像旋转参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口只支持MONO8/RGB24/BGR24格式数据的90/180/270度旋转。 + + * @~english + * @brief Rotates images. + * @param handle [IN] It refers to the device handle. + * @param pstRotateParam [IN][OUT] It refers to image rotation parameters structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only supports 90°, 180°, and 270° rotation of images in Mono 8, RGB 24, and BGR 24 formats. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RotateImage(IN void* handle, IN OUT MV_CC_ROTATE_IMAGE_PARAM* pstRotateParam); + +/********************************************************************//** + * @~chinese + * @brief 图像翻转 + * @param handle [IN] 设备句柄 + * @param pstFlipParam [IN][OUT] 图像翻转参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 该接口只支持MONO8/RGB24/BGR24格式数据的垂直和水平翻转。 + + * @~english + * @brief Flips images + * @param handle [IN] It refers to the device handle. + * @param pstFlipParam [IN][OUT] It refers to the structure of image flipping parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only support vertical and horizontal flipping of images in Mono 8, RGB 24, and BGR 24 formats. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_FlipImage(IN void* handle, IN OUT MV_CC_FLIP_IMAGE_PARAM* pstFlipParam); + +/********************************************************************//** + * @~chinese + * @brief 像素格式转换 + * @param handle [IN] 设备句柄 + * @param pstCvtParam [IN][OUT] 像素格式转换参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 通过该接口可以将从设备采集到的原始图像数据转换成用户所需的像素格式并存放在指定内存中。 \n + \li 该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用 MV_CC_GetOneFrameTimeout() 或者 MV_CC_RegisterImageCallBackEx() 设置回调函数,获取一帧图像数据,然后再通过该接口转换格式。如果设备当前采集图像是JPEG压缩的格式,则不支持调用该接口进行转换。 \n + \li 该接口支持转换的图像长乘宽至UINT_MAX。 \n + \li 像素格式转换能力的详情,如下表所示。第一列为输入像素格式,第一行为转换后的像素格式。“ √ ”代表可以转换为目标像素格式。 + | 输入格式\输出格式 | Mono8 | RGB24 | BGR24 | YUV422 | YV12 | YUV422 YUYV | + | ----------- | :-----: | :-----: | :-----: | :------: | :----: | :-----------: | + | Mono8 | × | √ | √ | √ | √ | × | + | Mono10 | √ | √ | √ | √ | √ | × | + | Mono10P | √ | √ | √ | √ | √ | × | + | Mono12 | √ | √ | √ | √ | √ | × | + | Mono12P | √ | √ | √ | √ | √ | × | + | BayerGR8 | √ | √ | √ | √ | √ | × | + | BayerRG8 | √ | √ | √ | √ | √ | × | + | BayerGB8 | √ | √ | √ | √ | √ | × | + | BayerBG8 | √ | √ | √ | √ | √ | × | + | BayerRBGG8 | × | √ | √ | × | × | × | + | BayerGR10 | √ | √ | √ | √ | √ | × | + | BayerRG10 | √ | √ | √ | √ | √ | × | + | BayerGB10 | √ | √ | √ | √ | √ | × | + | BayerBG10 | √ | √ | √ | √ | √ | × | + | BayerGR12 | √ | √ | √ | √ | √ | × | + | BayerRG12 | √ | √ | √ | √ | √ | × | + | BayerGB12 | √ | √ | √ | √ | √ | × | + | BayerBG12 | √ | √ | √ | √ | √ | × | + | BayerGR10P | √ | √ | √ | √ | √ | × | + | BayerRG10P | √ | √ | √ | √ | √ | × | + | BayerGB10P | √ | √ | √ | √ | √ | × | + | BayerBG10P | √ | √ | √ | √ | √ | × | + | BayerGR12P | √ | √ | √ | √ | √ | × | + | BayerRG12P | √ | √ | √ | √ | √ | × | + | BayerGB12P | √ | √ | √ | √ | √ | × | + | BayerBG12P | √ | √ | √ | √ | √ | × | + | RGB8P | √ | × | √ | √ | √ | × | + | BGR8P | √ | √ | × | √ | √ | × | + | YUV422P | √ | √ | √ | × | √ | × | + | YUV422 YUYV | √ | √ | √ | √ | √ | × | + + * @~english + * @brief Converts pixel format. + * @param handle [IN] It refers to the device handle. + * @param pstCvtParam [IN][OUT] It refers to the structure of pixel format conversion parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to convert the collected original images to images in required pixel format and save them to specified memory. + This API requires no specific calling sequence. The conversion will be executed when there is any image data. + You can call MV_CC_GetOneFrameTimeout() or MV_CC_RegisterImageCallBackEx() to set the callback function and get one image frame, then call this API to convert the format. + If the collected image is in compressed JPEG format, it cannot be converted via this API. + this API support the parameter nWidth/nHeight/Length to UINT_MAX. + Comparing with the API MV_CC_ConvertPixelType, this API support the parameter nWidth/nHeight/Length to UINT_MAX. + + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ConvertPixelTypeEx(IN void* handle, IN OUT MV_CC_PIXEL_CONVERT_PARAM_EX* pstCvtParam); + +/********************************************************************//** + * @~chinese + * @brief 设置插值算法类型 + * @param handle [IN] 设备句柄 + * @param nBayerCvtQuality [IN] Bayer的插值方法 0-快速 1-均衡(默认为均衡) 2-最优 3-最优+ + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 设置内部图像转换接口的Bayer插值算法类型参数, MV_CC_ConvertPixelTypeEx() 、 MV_CC_GetImageForRGB() 和 MV_CC_GetImageForBGR() 接口内部使用的插值算法是该接口所设定的。 + + * @~english + * @brief Sets the interpolation method of Bayer format. + * @param handle [IN] It refers to the device handle. + * @param nBayerCvtQuality [IN] It refers to interpolation method. 0: fast; 1: equilibrated; 2: optimal (default); 3: optimal plus. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Call this API to set the Bayer interpolation algorithm type parameter for the APIs: MV_CC_ConvertPixelTypeEx() , MV_CC_GetImageForRGB() , and MV_CC_GetImageForBGR(). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerCvtQuality(IN void* handle, IN unsigned int nBayerCvtQuality); + +/********************************************************************//** + * @~chinese + * @brief 插值算法平滑使能设置 + * @param handle [IN] 设备句柄 + * @param bFilterEnable [IN] 平滑使能(默认关闭) + * @return 成功,返回#MV_OK;错误,返回错误码 + * @remarks 设置内部图像转换接口的贝尔插值平滑使能参数, MV_CC_ConvertPixelTypeEx() 、 MV_CC_SaveImageEx3() 和 MV_CC_SaveImageToFileEx() 内部使用的插值算法是该接口所设定的。 + + * @~english + * @brief Enables or disables the smoothing function of interpolation algorithm. + * @param handle [IN] It refers to the device handle. + * @param bFilterEnable [IN] Whether to enable the smoothing function of interpolation algorithm (disabled by default). + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is used to enable or disable the smoothing function of Bayer interpolation, and it determines the interpolation algorithm of the APIs: MV_CC_ConvertPixelTypeEx()、MV_CC_SaveImageToFileEx and MV_CC_SaveImageEx3(). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerFilterEnable(IN void* handle, IN bool bFilterEnable); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的Gamma值 + * @param handle [IN] 设备句柄 + * @param fBayerGammaValue [IN] Gamma值:0.1 ~ 4.0 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 设置该值后,在Bayer图像(Bayer8/10/12/16)转RGB/BGR图像(RGB24/48、RGBA32/64、BGR24/48、BGRA32/64)时起效。 \n 相关接口: MV_CC_ConvertPixelTypeEx() 、 MV_CC_SaveImageEx3() 、 MV_CC_SaveImageToFile() 。 + + * @~english + * @brief Sets the Gamma value in Bayer pattern. + * @param handle [IN] It refers to the device handle. + * @param fBayerGammaValue [IN] It refers to the Gamma value, range: [0.1, 4.0]. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks After setting this value, it takes effect when converting Bayer images (Bayer8/10/12/16) to RGB/BGR images (RGB24/48, RGBA32/64, BGR24/48, BGRA32/64). Related API: MV_CC_ConvertPixelTypeEx, MV_CC_SaveImageEx3, MV_CC_SaveImageToFileEx. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerGammaValue(IN void* handle, IN float fBayerGammaValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Mono8/Bayer8/10/12/16格式的Gamma值 + * @param handle [IN] 设备句柄 + * @param MvGvspPixelType enSrcPixelType [IN] 像素格式,支持Mono8, Bayer(Bayer8/10/12/16) + * @param fGammaValue [IN] Gamma值:0.1 ~ 4.0 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks \li 设置Mono8的gamma值后,在调用 MV_CC_ConvertPixelTypeEx() 将Mono8转成Mono8时gamma值起效。 \n + * \li 设置Bayer的gamma值后,在Bayer图像(Bayer8/10/12/16)转RGB/BGR图像(RGB24/48、RGBA32/64、BGR24/48、BGRA32/64)时起效。相关接口: MV_CC_ConvertPixelType() 、 MV_CC_SaveImageToFile() 、 MV_CC_SaveImageEx3() 。 + * \li 该接口兼容 MV_CC_SetBayerGammaValue() ,新增支持Mono8像素格式。 + + * @~english + * @brief Sets Gamma value of Mono 8 or Bayer 8/10/12/16 pattern. + * @param handle [IN] It refers to the device handle. + * @param MvGvspPixelType enSrcPixelType [IN] It refers to the pixel format. Supports PixelType_Gvsp_Mono8 and Bayer 8/10/12/16. + * @param fGammaValue [IN] It refers to the Gamma value, range: [0.1, 4.0]. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The Gamma value in Mono 8 pattern set via this API will be used when MV_CC_ConvertPixelType() is called to convert Mono 8 to Mono 8. + * @remarks The Gamma value in Bayer8/10/12/16 pattern set via this API will be used when calling MV_CC_ConvertPixelTypeEx() , MV_CC_SaveImageEx3() , or MV_CC_SaveImageToFileEx() to convert Bayer 8/10/12/16 format to RGB 24/48, RGBA 32/64, BGR 24/48 or BGRA 32/64. + * @remarks This API is compatible with MV_CC_SetBayerGammaValue() , and it supports Mono 8 pixel format. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGammaValue(IN void* handle, IN enum MvGvspPixelType enSrcPixelType, IN float fGammaValue); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的Gamma信息 + * @param handle [IN] 设备句柄 + * @param pstGammaParam [IN] Gamma信息 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 设置该值后,在Bayer图像(Bayer8/10/12/16)转RGB/BGR图像(RGB24/48、RGBA32/64、BGR24/48、BGRA32/64)时起效。 相关接口: MV_CC_ConvertPixelTypeEx() 、 MV_CC_SaveImageEx3() 、 MV_CC_SaveImageToFileEx() 。 + * @note 以相机实际支持情况为准。 + + * @~english + * @brief Sets the Gamma value of Bayer pattern. + * @param handle [IN] It refers to the device handle. + * @param pstGammaParam [IN] It refers to the Gamma information. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The Gamma value set by this API will be used when calling MV_CC_ConvertPixelTypeEx() , MV_CC_SaveImageEx3(), MV_CC_SaveImageToFileEx(), to convert Bayer 8/10/12/16 format to RGB24/48, BGR24/48, RGBA32/64, or BGRA32/64. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerGammaParam(IN void* handle, IN MV_CC_GAMMA_PARAM* pstGammaParam); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的CCM使能和矩阵,量化系数默认1024 + * @param handle [IN] 设备句柄 + * @param pstCCMParam [IN] CCM参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 开启CCM并设置CCM矩阵后,在Bayer图像(Bayer8/10/12/16)转RGB/BGR图像(RGB24/48、RGBA32/64、BGR24/48、BGRA32/64)时起效。 相关接口: MV_CC_ConvertPixelTypeEx() 、 MV_CC_SaveImageEx3() 、 MV_CC_SaveImageToFileEx() 。 + + * @~english + * @brief Enables/disables CCM and sets CCM parameters in Bayer pattern. The default quantitative scale is 1024. + * @param handle [IN] It refers to the device handle. + * @param pstCCMParam [IN] It refers to the CCM parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks After the API is called to enable CCM and set the CCM, the CCM parameters will take effect when MV_CC_ConvertPixelTypeEx() or MV_CC_SaveImageEx3() or MV_CC_SaveImageToFileEx() is called to convert Bayer 8/10/12/16 format to RGB 24/48, RGBA 32/64, BGR 24/48, or BGRA 32/64. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerCCMParam(IN void* handle, IN MV_CC_CCM_PARAM* pstCCMParam); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的CCM使能和矩阵 + * @param handle [IN] 设备句柄 + * @param pstCCMParam [IN] CCM参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 开启CCM并设置CCM矩阵后,在Bayer图像(Bayer8/10/12/16)转RGB/BGR图像(RGB24/48、RGBA32/64、BGR24/48、BGRA32/64)时起效。 相关接口: MV_CC_ConvertPixelTypeEx() 、 MV_CC_SaveImageEx3() 、 MV_CC_SaveImageToFileEx() 。 + + * @~english + * @brief Enables and disables CCM, and sets CCM parameters of Bayer pattern. + * @param handle [IN] It refers to the device handle. + * @param pstCCMParam [IN] It refers to the color correction parameter structure. + * @return Success, return MV_OK. Failure, return error code + * @remarks After the API is called to enable CCM and set the CCM, the CCM parameters will take effect when MV_CC_ConvertPixelTypeEx() or MV_CC_SaveImageEx3() or MV_CC_SaveImageToFileEx() is called to convert Bayer 8/10/12/16 format to RGB 24/48, RGBA 32/64, BGR 24/48, or BGRA 32/64. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerCCMParamEx(IN void* handle, IN MV_CC_CCM_PARAM_EX* pstCCMParam); + +/********************************************************************//** + * @~chinese + * @brief 图像对比度调节 + * @param handle [IN] 设备句柄 + * @param pstContrastParam [IN][OUT] 对比度调节参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks + + * @~english + * @brief Adjusts image contrast. + * @param handle [IN] It refers to the device handle. + * @param pstContrastParam [IN][OUT] It refers to the contrast parameter structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ImageContrast(IN void* handle, IN OUT MV_CC_CONTRAST_PARAM* pstContrastParam); + +/********************************************************************//** + * @~chinese + * @brief 图像去紫边 + * @param handle [IN] 设备句柄 + * @param pstPurpleFringingParam [IN][OUT] 去紫边参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 像素格式仅支持PixelType_Gvsp_RGB8_Packed和PixelType_Gvsp_BGR8_Packed + + * @~english + * @brief Corrects purple fringing of the image. + * @param handle [IN] It refers to the device handle. + * @param pstPurpleFringingParam [IN][OUT] It refers to purple fringing correction parameter. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API only supports processing images in PixelType_Gvsp_RGB8_Packed and PixelType_Gvsp_BGR8_Packed formats. + * ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_PurpleFringing(IN void* handle, IN MV_CC_PURPLE_FRINGING_PARAM* pstPurpleFringingParam); + +/********************************************************************//** + * @~chinese + * @brief 设置ISP参数 + * @param handle [IN] 设备句柄 + * @param pstParam [IN] ISP配置参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Sets ISP parameters. + * @param handle [IN] It refers to the device handle. + * @param pstParam [IN][OUT] It refers to the ISP parameter structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetISPConfig(void* handle, IN MV_CC_ISP_CONFIG_PARAM* pstParam); + +/********************************************************************//** + * @~chinese + * @brief 对图像进行ISP算法处理 + * @param handle [IN] 设备句柄 + * @param pstInputImage [IN] 输入图像结构体 + * @param pstOutputImage [IN][OUT] 输出图像结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 需要先调用 MV_CC_SetISPConfig() 传入配置文件, 配置文件由ISP工具生成 + + * @~english + * @brief Processes the images with ISP algorithm. + * @param handle [IN] It refers to the device handle. + * @param pstInputImage [IN] It refers to the input image structure. + * @param pstOutputImage [IN][OUT] It refers to the output image structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Before calling this API, call MV_CC_SetISPConfig() to import configuration file generated by the ISP tool. + * ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ISPProcess(void* handle, IN MV_CC_IMAGE* pstInputImage, MV_CC_IMAGE* pstOutputImage); + +/********************************************************************//** + * @~chinese + * @brief 无损解码 + * @param handle [IN] 设备句柄 + * @param pstDecodeParam [IN][OUT] 无损解码参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 将从相机中取到的无损压缩码流解码成裸数据,同时支持解析当前相机实时图像的水印信息(如果输入的无损码流不是当前相机或者不是实时取流的,则水印解析可能异常); + 若解码失败,请检查以下情况: + \li CPU是否支持 SSE AVX指令集。 + \li 当前帧是否异常(丢包等),若存在,可能导致解码异常。 + \li 相机出图是否异常,即使不丢包也会异常。 + + * @~english + * @brief Decodes lossless compression stream into raw data. + * @param handle [IN] It refers to the device handle. + * @param pstDecodeParam [IN][OUT] It refers to the structure of lossless decoding parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks Supports decoding the lossless compression stream of camera to raw data, and parsing the watermark of real-time images of the current camera. If the inputted lossless stream is not real-time or does not belong to the current camera, an exception may occur during watermark parsing. + If the decoding fails, check if it is one of the following circumstances: + (1) CPU does not support SSE AVX instruction set. + (2) IException occurs on the current frame (e.g., packet loss). + (3) Exception occurs on image generating from camera, even without packet loss. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_HB_Decode(IN void* handle, IN OUT MV_CC_HB_DECODE_PARAM* pstDecodeParam); + +/********************************************************************//** + * @~chinese + * @brief 开始录像 + * @param handle [IN] 设备句柄 + * @param pstRecordParam [IN] 录像参数结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @note 该接口最大支持Width×Height为8000×8000大小,最小支持96*96。若超出,会导致调用 MV_CC_InputOneFrame() 错误。 + + * @~english + * @brief Starts recording. + * @param handle [IN] It refers to the device handle. + * @param pstRecordParam [IN] It refers to the recording parameter structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The max. supported width × height is 8000*8000. The min. supported width × height is 96*96. If the value exceeds, an error will occur when calling MV_CC_InputOneFrame(). + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StartRecord(IN void* handle, IN MV_CC_RECORD_PARAM* pstRecordParam); + +/********************************************************************//** + * @~chinese + * @brief 输入录像数据 + * @param handle [IN] 设备句柄 + * @param pstInputFrameInfo [IN] 录像数据结构体 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Inputs raw data for recording. + * @param handle [IN] It refers to the device handle. + * @param pstInputFrameInfo [IN] It refers to the record data structure. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_InputOneFrame(IN void* handle, IN MV_CC_INPUT_FRAME_INFO * pstInputFrameInfo); + +/********************************************************************//** + * @~chinese + * @brief 停止录像 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + + * @~english + * @brief Stops recording. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_StopRecord(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 重构图像(用于分时曝光功能) + * @param handle [IN] 设备句柄 + * @param pstReconstructParam [IN][OUT] 重构图像参数 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码"。 + * @remarks 图像分割功能可将多个不同曝光值所对应的图像交叠合并为1张图像。 \n + 使用时,需与线阵相机的“MultiLightControl”节点搭配。假设设置该节点为2,则相机会将2个不同曝光值所对应的两张图像交叠合并为1张图像(实际高度为2张图像的高度)发送给上层应用程序。 \n + 此时,调用该接口并传入分时曝光值nExposureNum为2,可将相机发送的1张图像分割为2张图像,并且这2张图像分别对应1个曝光值。 \n + 若使用普通相机或未打开线阵相机的“MultiLightControl”节点,且nExposureNum设置为n,则图像分割无意义,只是将图像按行分割为n张图像,每张图像的高度变为原图像的1/n。 \n + + * @~english + * @brief Reconstructs the image for multi-light control. + * @param handle [IN] It refers to the device handle. + * @param pstReconstructParam [IN][OUT] It refers to the image reconstruction parameters. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API should be used with "MultiLightControl" node of line scan camera. If the value of MultiLightControl node is 2, the camera will reconstruct 2 images with different exposure values into one image (with its height the sum of two images) and send it to the upper layer application. + If this API is called then and the value of nExposureNum is set to 2, the reconstructed image will be later divided to 2 images with two corresponding exposure values. + If line scan camera is not used or MultiLightControl node of line scan camera is disabled, and nExposureNum value is set to n, the image reconstructing function will not work. The image will be divided into n images by line, each of them with the height 1/n of the original image. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ReconstructImage(IN void* handle, IN OUT MV_RECONSTRUCT_IMAGE_PARAM* pstReconstructParam); + +/// @} + + + +/**************************Part12 ch: 支持串口通信的设备接口 | en: API for devices supporting serial communication ******************************************/ + +/// \addtogroup 串口控制相关 +/// @{ + +/********************************************************************//** + * @~chinese + * @brief 打开串口 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码" 。 + * @remarks 此接口适用于支持串口通信的相机 + + * @~english + * @brief Opens the serial port. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks This API is compatible with cameras supporting serial communication + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SerialPort_Open(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 向串口写数据,一次最大写512字节的数据 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN] 数据 + * @param nLength [IN] 数据长度 + * @param pnWriteLen [OUT] 实际写成功的数据长度 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码" 。 + * @remarks 接口为阻塞模式,数据全部发送完成或者发送失败时返回 + + * @~english + * @brief Writes data to serial port, allowing a maximum of 512 bytes written at a time. + * @param handle [IN] It refers to the device handle. + * @param pBuffer [IN] It refers to the data buffer. + * @param nLength [IN] It refers to the data length. + * @param pnWriteLen [OUT] It refers to the actual written data length. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The API is in blocking mode, returning the result only when all data has been successfully transmitted or transmission fails. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SerialPort_Write(IN void* handle, IN const void *pBuffer, IN unsigned int nLength, OUT unsigned int* pnWriteLen); + +/********************************************************************//** + * @~chinese + * @brief 读串口数据 + * @param handle [IN] 设备句柄 + * @param pBuffer [IN] 数据 + * @param nLength [IN] 数据长度 + * @param pnReadLen [OUT] 实际读到的数据长度 + * @param nMsec [IN] 超时时间,单位:ms + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码" 。 + * @remarks 接口为阻塞模式,当有收到数据、到达超时时间、出现异常时,立即返回 + + * @~english + * @brief Reads the serial port data. + * @param handle [IN] It refers to the device handle. + * @param pBuffer [IN] It refers to the data buffer. + * @param nLength [IN] It refers to the data buffer length. + * @param pnReadLen [OUT] It refers to the reader data length. + * @param nMsec [IN] It refers to the timeout duration, unit: millisecond. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks The API is in blocking mode, returning the result only when data is received, timed out, or exception occurs. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SerialPort_Read(IN void* handle, IN void *pBuffer, IN unsigned int nLength, OUT unsigned int* pnReadLen, IN unsigned int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 清空已接收的串口数据 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码" 。 + * @remarks + + * @~english + * @brief Clears the received serial port data. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SerialPort_ClearBuffer(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 关闭串口 + * @param handle [IN] 设备句柄 + * @return 成功,返回\ref 状态码 "MV_OK";失败,返回\ref 状态码 "状态码" 。 + * @remarks + + * @~english + * @brief Closes the serial port. + * @param handle [IN] It refers to the device handle. + * @return Returns MV_OK for success, and returns corresponding Error Code for failure. + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SerialPort_Close(IN void* handle); + +/// @} + +#ifdef __cplusplus +} +#endif + +#endif //_MV_CAMERA_CTRL_H_ diff --git a/image_capture/third_party/mvs/Includes/MvErrorDefine.h b/image_capture/third_party/mvs/Includes/MvErrorDefine.h new file mode 100644 index 0000000..c63fd37 --- /dev/null +++ b/image_capture/third_party/mvs/Includes/MvErrorDefine.h @@ -0,0 +1,122 @@ + +#ifndef _MV_ERROR_DEFINE_H_ +#define _MV_ERROR_DEFINE_H_ + +#include "MvISPErrorDefine.h" + +/********************************************************************/ +/// \~chinese +/// \name 正确码定义 +/// @{ +/// \~english +/// \name Definition of correct code +/// @{ +#define MV_OK 0x00000000 ///< \~chinese 成功,无错误 \~english Successed, no error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name 通用错误码定义:范围0x80000000-0x800000FF +/// @{ +/// \~english +/// \name Definition of General error code +/// @{ +#define MV_E_HANDLE 0x80000000 ///< \~chinese 错误或无效的句柄 \~english Error or invalid handle +#define MV_E_SUPPORT 0x80000001 ///< \~chinese 不支持的功能 \~english Not supported function +#define MV_E_BUFOVER 0x80000002 ///< \~chinese 缓存已满 \~english Buffer overflow +#define MV_E_CALLORDER 0x80000003 ///< \~chinese 函数调用顺序错误 \~english Function calling order error +#define MV_E_PARAMETER 0x80000004 ///< \~chinese 错误的参数 \~english Incorrect parameter +#define MV_E_RESOURCE 0x80000006 ///< \~chinese 资源申请失败 \~english Applying resource failed +#define MV_E_NODATA 0x80000007 ///< \~chinese 无数据 \~english No data +#define MV_E_PRECONDITION 0x80000008 ///< \~chinese 前置条件有误,或运行环境已发生变化 \~english Precondition error, or running environment changed +#define MV_E_VERSION 0x80000009 ///< \~chinese 版本不匹配 \~english Version mismatches +#define MV_E_NOENOUGH_BUF 0x8000000A ///< \~chinese 传入的内存空间不足 \~english Insufficient memory +#define MV_E_ABNORMAL_IMAGE 0x8000000B ///< \~chinese 异常图像,可能是丢包导致图像不完整 \~english Abnormal image, maybe incomplete image because of lost packet +#define MV_E_LOAD_LIBRARY 0x8000000C ///< \~chinese 动态导入DLL失败 \~english Load library failed +#define MV_E_NOOUTBUF 0x8000000D ///< \~chinese 没有可输出的缓存 \~english No Avaliable Buffer +#define MV_E_ENCRYPT 0x8000000E ///< \~chinese 加密错误 \~english Encryption error +#define MV_E_OPENFILE 0x8000000F ///< \~chinese 打开文件出现错误 \~english open file error +#define MV_E_BUF_IN_USE 0x80000010 ///< \~chinese 缓存地址已使用 \~english Buffer already in use +#define MV_E_BUF_INVALID 0x80000011 ///< \~chinese 无效的缓存地址 \~english Buffer address invalid +#define MV_E_NOALIGN_BUF 0x80000012 ///< \~chinese 缓存对齐异常 \~english Buffer alignmenterror error +#define MV_E_NOENOUGH_BUF_NUM 0x80000013 ///< \~chinese 缓存个数不足 \~english Insufficient cache count +#define MV_E_PORT_IN_USE 0x80000014 ///< \~chinese 串口被占用 \~english Port is in use +#define MV_E_IMAGE_DECODEC 0x80000015 ///< \~chinese 解码错误(SDK校验图像异常)\~english Decoding error (SDK verification image exception) +#define MV_E_UINT32_LIMIT 0x80000016 /// \~chinese 图像大小超过unsigned int返回,接口不支持 +#define MV_E_IMAGE_HEIGHT 0x80000017 /// \~chinese 图像高度异常(残帧丢弃) \~english image height anomaly (discard incomplete images) +#define MV_E_NOENOUGH_DDR 0x80000018 ///< \~chinese DDR缓存不足 \~english The DDR cache is Insufficient +#define MV_E_NOENOUGH_STREAM 0x80000019 ///< \~chinese 流通道不足 \~english The stream channel is Insufficient +#define MV_E_NORESPONSE 0x8000001A ///< \~chinese 设备无响应 \~english No response from device + +#define MV_E_UNKNOW 0x800000FF ///< \~chinese 未知的错误 \~english Unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name GenICam系列错误:范围0x80000100-0x800001FF +/// @{ +/// \~english +/// \name GenICam Series Error Codes: Range from 0x80000100 to 0x800001FF +/// @{ +#define MV_E_GC_GENERIC 0x80000100 ///< \~chinese 通用错误 \~english General error +#define MV_E_GC_ARGUMENT 0x80000101 ///< \~chinese 参数非法 \~english Illegal parameters +#define MV_E_GC_RANGE 0x80000102 ///< \~chinese 值超出范围 \~english The value is out of range +#define MV_E_GC_PROPERTY 0x80000103 ///< \~chinese 属性 \~english Property +#define MV_E_GC_RUNTIME 0x80000104 ///< \~chinese 运行环境有问题 \~english Running environment error +#define MV_E_GC_LOGICAL 0x80000105 ///< \~chinese 逻辑错误 \~english Logical error +#define MV_E_GC_ACCESS 0x80000106 ///< \~chinese 节点访问条件有误 \~english Node accessing condition error +#define MV_E_GC_TIMEOUT 0x80000107 ///< \~chinese 超时 \~english Timeout +#define MV_E_GC_DYNAMICCAST 0x80000108 ///< \~chinese 转换异常 \~english Transformation exception +#define MV_E_GC_UNKNOW 0x800001FF ///< \~chinese GenICam未知错误 \~english GenICam unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name GigE_STATUS对应的错误码:范围0x80000200-0x800002FF +/// @{ +/// \~english +/// \name GigE_STATUS Error Codes: Range from 0x80000200 to 0x800002FF +/// @{ +#define MV_E_NOT_IMPLEMENTED 0x80000200 ///< \~chinese 命令不被设备支持 \~english The command is not supported by device +#define MV_E_INVALID_ADDRESS 0x80000201 ///< \~chinese 访问的目标地址不存在 \~english The target address being accessed does not exist +#define MV_E_WRITE_PROTECT 0x80000202 ///< \~chinese 目标地址不可写 \~english The target address is not writable +#define MV_E_ACCESS_DENIED 0x80000203 ///< \~chinese 设备无访问权限 \~english No permission +#define MV_E_BUSY 0x80000204 ///< \~chinese 设备忙,或网络断开 \~english Device is busy, or network disconnected +#define MV_E_PACKET 0x80000205 ///< \~chinese 网络包数据错误 \~english Network data packet error +#define MV_E_NETER 0x80000206 ///< \~chinese 网络相关错误 \~english Network error +#define MV_E_SUPPORT_MODIFY_DEVICE_IP 0x8000020E ///< 在固定IP模式下不支持修改设备IP模式 \~english Current Mode Not Support Modify Ip +#define MV_E_KEY_VERIFICATION 0x8000020F ///< \~chinese 秘钥校验错误 \~english SwitchKey error +#define MV_E_IP_CONFLICT 0x80000221 ///< \~chinese 设备IP冲突 \~english Device IP conflict +/// @} + +/********************************************************************/ +/// \~chinese +/// \name USB_STATUS对应的错误码:范围0x80000300-0x800003FF +/// @{ +/// \~english +/// \name USB_STATUS Error Codes: Range from 0x80000300 to 0x800003FF +/// @{ +#define MV_E_USB_READ 0x80000300 ///< \~chinese 读usb出错 \~english Reading USB error +#define MV_E_USB_WRITE 0x80000301 ///< \~chinese 写usb出错 \~english Writing USB error +#define MV_E_USB_DEVICE 0x80000302 ///< \~chinese 设备异常 \~english Device exception +#define MV_E_USB_GENICAM 0x80000303 ///< \~chinese GenICam相关错误 \~english GenICam error +#define MV_E_USB_BANDWIDTH 0x80000304 ///< \~chinese 带宽不足 \~english Insufficient bandwidth +#define MV_E_USB_DRIVER 0x80000305 ///< \~chinese 驱动不匹配或者未装驱动 \~english Driver mismatch or unmounted drive +#define MV_E_USB_UNKNOW 0x800003FF ///< \~chinese USB未知的错误 \~english USB unknown error +/// @} + +/********************************************************************/ +/// \~chinese +/// \name 升级时对应的错误码:范围0x80000400-0x800004FF +/// @{ +/// \~english +/// \name Upgrade Error Codes: Range from 0x80000400 to 0x800004FF +/// @{ +#define MV_E_UPG_FILE_MISMATCH 0x80000400 ///< \~chinese 升级固件不匹配 \~english Firmware mismatches +#define MV_E_UPG_LANGUSGE_MISMATCH 0x80000401 ///< \~chinese 升级固件语言不匹配 \~english Firmware language mismatches +#define MV_E_UPG_CONFLICT 0x80000402 ///< \~chinese 升级冲突(设备已经在升级了再次请求升级即返回此错误) \~english Upgrading conflicted (repeated upgrading requests during device upgrade) +#define MV_E_UPG_INNER_ERR 0x80000403 ///< \~chinese 升级时设备内部出现错误 \~english Camera internal error during upgrade +#define MV_E_UPG_UNKNOW 0x800004FF ///< \~chinese 升级时未知错误 \~english Unknown error during upgrade +/// @} + +#endif //_MV_ERROR_DEFINE_H_ diff --git a/image_capture/third_party/mvs/Includes/MvISPErrorDefine.h b/image_capture/third_party/mvs/Includes/MvISPErrorDefine.h new file mode 100644 index 0000000..0592f5d --- /dev/null +++ b/image_capture/third_party/mvs/Includes/MvISPErrorDefine.h @@ -0,0 +1,98 @@ + +#ifndef _MV_ISP_ERROR_DEFINE_H_ +#define _MV_ISP_ERROR_DEFINE_H_ + +/************************************************************************ +* 来自ISP算法库的错误码 +************************************************************************/ +// 通用类型 +#define MV_ALG_OK 0x00000000 //处理正确 +#define MV_ALG_ERR 0x10000000 //不确定类型错误 + +// 能力检查 +#define MV_ALG_E_ABILITY_ARG 0x10000001 //能力集中存在无效参数 + +// 内存检查 +#define MV_ALG_E_MEM_NULL 0x10000002 //内存地址为空 +#define MV_ALG_E_MEM_ALIGN 0x10000003 //内存对齐不满足要求 +#define MV_ALG_E_MEM_LACK 0x10000004 //内存空间大小不够 +#define MV_ALG_E_MEM_SIZE_ALIGN 0x10000005 //内存空间大小不满足对齐要求 +#define MV_ALG_E_MEM_ADDR_ALIGN 0x10000006 //内存地址不满足对齐要求 + +// 图像检查 +#define MV_ALG_E_IMG_FORMAT 0x10000007 //图像格式不正确或者不支持 +#define MV_ALG_E_IMG_SIZE 0x10000008 //图像宽高不正确或者超出范围 +#define MV_ALG_E_IMG_STEP 0x10000009 //图像宽高与step参数不匹配 +#define MV_ALG_E_IMG_DATA_NULL 0x1000000A //图像数据存储地址为空 + +// 输入输出参数检查 +#define MV_ALG_E_CFG_TYPE 0x1000000B //设置或者获取参数类型不正确 +#define MV_ALG_E_CFG_SIZE 0x1000000C //设置或者获取参数的输入、输出结构体大小不正确 +#define MV_ALG_E_PRC_TYPE 0x1000000D //处理类型不正确 +#define MV_ALG_E_PRC_SIZE 0x1000000E //处理时输入、输出参数大小不正确 +#define MV_ALG_E_FUNC_TYPE 0x1000000F //子处理类型不正确 +#define MV_ALG_E_FUNC_SIZE 0x10000010 //子处理时输入、输出参数大小不正确 + +// 运行参数检查 +#define MV_ALG_E_PARAM_INDEX 0x10000011 //index参数不正确 +#define MV_ALG_E_PARAM_VALUE 0x10000012 //value参数不正确或者超出范围 +#define MV_ALG_E_PARAM_NUM 0x10000013 //param_num参数不正确 + +// 接口调用检查 +#define MV_ALG_E_NULL_PTR 0x10000014 //函数参数指针为空 +#define MV_ALG_E_OVER_MAX_MEM 0x10000015 //超过限定的最大内存 +#define MV_ALG_E_CALL_BACK 0x10000016 //回调函数出错 + +// 算法库加密相关检查 +#define MV_ALG_E_ENCRYPT 0x10000017 //加密错误 +#define MV_ALG_E_EXPIRE 0x10000018 //算法库使用期限错误 + +// 内部模块返回的基本错误类型 +#define MV_ALG_E_BAD_ARG 0x10000019 //参数范围不正确 +#define MV_ALG_E_DATA_SIZE 0x1000001A //数据大小不正确 +#define MV_ALG_E_STEP 0x1000001B //数据step不正确 + +// cpu指令集支持错误码 +#define MV_ALG_E_CPUID 0x1000001C //cpu不支持优化代码中的指令集 + +#define MV_ALG_WARNING 0x1000001D //警告 + +#define MV_ALG_E_TIME_OUT 0x1000001E //算法库超时 +#define MV_ALG_E_LIB_VERSION 0x1000001F //算法版本号出错 +#define MV_ALG_E_MODEL_VERSION 0x10000020 //模型版本号出错 +#define MV_ALG_E_GPU_MEM_ALLOC 0x10000021 //GPU内存分配错误 +#define MV_ALG_E_FILE_NON_EXIST 0x10000022 //文件不存在 +#define MV_ALG_E_NONE_STRING 0x10000023 //字符串为空 +#define MV_ALG_E_IMAGE_CODEC 0x10000024 //图像解码器错误 +#define MV_ALG_E_FILE_OPEN 0x10000025 //打开文件错误 +#define MV_ALG_E_FILE_READ 0x10000026 //文件读取错误 +#define MV_ALG_E_FILE_WRITE 0x10000027 //文件写错误 +#define MV_ALG_E_FILE_READ_SIZE 0x10000028 //文件读取大小错误 +#define MV_ALG_E_FILE_TYPE 0x10000029 //文件类型错误 +#define MV_ALG_E_MODEL_TYPE 0x1000002A //模型类型错误 +#define MV_ALG_E_MALLOC_MEM 0x1000002B //分配内存错误 +#define MV_ALG_E_BIND_CORE_FAILED 0x1000002C //线程绑核失败 + +// 降噪特有错误码 +#define MV_ALG_E_DENOISE_NE_IMG_FORMAT 0x10402001 //噪声特性图像格式错误 +#define MV_ALG_E_DENOISE_NE_FEATURE_TYPE 0x10402002 //噪声特性类型错误 +#define MV_ALG_E_DENOISE_NE_PROFILE_NUM 0x10402003 //噪声特性个数错误 +#define MV_ALG_E_DENOISE_NE_GAIN_NUM 0x10402004 //噪声特性增益个数错误 +#define MV_ALG_E_DENOISE_NE_GAIN_VAL 0x10402005 //噪声曲线增益值输入错误 +#define MV_ALG_E_DENOISE_NE_BIN_NUM 0x10402006 //噪声曲线柱数错误 +#define MV_ALG_E_DENOISE_NE_INIT_GAIN 0x10402007 //噪声估计初始化增益设置错误 +#define MV_ALG_E_DENOISE_NE_NOT_INIT 0x10402008 //噪声估计未初始化 +#define MV_ALG_E_DENOISE_COLOR_MODE 0x10402009 //颜色空间模式错误 +#define MV_ALG_E_DENOISE_ROI_NUM 0x1040200a //图像ROI个数错误 +#define MV_ALG_E_DENOISE_ROI_ORI_PT 0x1040200b //图像ROI原点错误 +#define MV_ALG_E_DENOISE_ROI_SIZE 0x1040200c //图像ROI大小错误 +#define MV_ALG_E_DENOISE_GAIN_NOT_EXIST 0x1040200d //输入的相机增益不存在(增益个数已达上限) +#define MV_ALG_E_DENOISE_GAIN_BEYOND_RANGE 0x1040200e //输入的相机增益不在范围内 +#define MV_ALG_E_DENOISE_NP_BUF_SIZE 0x1040200f //输入的噪声特性内存大小错误 + +// 去紫边特有错误码 +#define MV_ALG_E_PFC_ROI_PT 0x10405000 //去紫边算法ROI原点错误 +#define MV_ALG_E_PFC_ROI_SIZE 0x10405001 //去紫边算法ROI大小错误 +#define MV_ALG_E_PFC_KERNEL_SIZE 0x10405002 //去紫边算法滤波核尺寸错误 + +#endif //_MV_ISP_ERROR_DEFINE_H_ diff --git a/image_capture/third_party/mvs/Includes/MvObsoleteInterfaces.h b/image_capture/third_party/mvs/Includes/MvObsoleteInterfaces.h new file mode 100644 index 0000000..96557b2 --- /dev/null +++ b/image_capture/third_party/mvs/Includes/MvObsoleteInterfaces.h @@ -0,0 +1,2148 @@ + +#ifndef _MV_OBSOLETE_INTERFACES_H_ +#define _MV_OBSOLETE_INTERFACES_H_ + +#include "MvErrorDefine.h" +#include "CameraParams.h" +#include "ObsoleteCamParams.h" + +/** +* @brief 动态库导入导出定义 +* @brief Import and export definition of the dynamic library +*/ +#ifndef MV_CAMCTRL_API + + #if (defined (_WIN32) || defined(WIN64)) + #if defined(MV_CAMCTRL_EXPORTS) + #define MV_CAMCTRL_API __declspec(dllexport) + #else + #define MV_CAMCTRL_API __declspec(dllimport) + #endif + #else + #ifndef __stdcall + #define __stdcall + #endif + + #ifndef MV_CAMCTRL_API + #define MV_CAMCTRL_API + #endif + #endif + +#endif + +#ifndef IN + #define IN +#endif + +#ifndef OUT + #define OUT +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/************************************************************************/ +/* 不建议使用的接口 */ +/* Interfaces not recommended */ +/************************************************************************/ +/************************************************************************ + * @fn MV_CC_GetImageInfo + * @brief 获取图像基本信息 + * @param handle [IN] 设备句柄 + * @param pstInfo [IN][OUT] 返回给调用者有关相机图像基本信息结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + + * @fn MV_CC_GetImageInfo + * @brief Get basic information of image + * @param handle [IN] Device handle + * @param pstInfo [IN][OUT] Structure pointer of image basic information + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageInfo(IN void* handle, IN OUT MV_IMAGE_BASIC_INFO* pstInfo); + +/************************************************************************ + * @fn MV_CC_GetTlProxy + * @brief 获取GenICam代理 + * @param handle [IN] 句柄地址 + * @return GenICam代理类指针 ,正常返回值非NULL;异常返回NULL + + * @fn MV_CC_GetTlProxy + * @brief Get GenICam proxy + * @param handle [IN] Handle address + * @return GenICam proxy pointer, normal, return non-NULL; exception, return NULL + ************************************************************************/ +MV_CAMCTRL_API void* __stdcall MV_CC_GetTlProxy(IN void* handle); + +/*********************************************************************** + * @fn MV_XML_GetRootNode + * @brief 获取根节点 + * @param handle [IN] 句柄 + * @param pstNode [OUT] 根节点信息结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_XML_GetRootNode + * @brief Get root node + * @param handle [IN] Handle + * @param pstNode [OUT] Root node information structure + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetRootNode(IN void* handle, IN OUT MV_XML_NODE_FEATURE* pstNode); + +/*********************************************************************** + * @fn MV_XML_GetChildren + * @brief 从xml中获取指定节点的所有子节点,根节点为Root + * @param handle [IN] 句柄 + * @param pstNode [IN] 根节点信息结构体 + * @param pstNodesList [OUT] 节点列表结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_XML_GetChildren + * @brief Get all children node of specific node from xml, root node is Root + * @param handle [IN] Handle + * @param pstNode [IN] Root node information structure + * @param pstNodesList [OUT] Node information structure + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetChildren(IN void* handle, IN MV_XML_NODE_FEATURE* pstNode, IN OUT MV_XML_NODES_LIST* pstNodesList); + +/*********************************************************************** + * @fn MV_XML_GetNodeFeature + * @brief 获得当前节点的属性 + * @param handle [IN] 句柄 + * @param pstNode [IN] 根节点信息结构体 + * @param pstFeature [OUT] 当前节点属性结构体, + pstFeature 具体结构体内容参考 MV_XML_FEATURE_x + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_XML_GetNodeFeature + * @brief Get current node feature + * @param handle [IN] Handle + * @param pstNode [IN] Root node information structure + * @param pstFeature [OUT] Current node feature structure + Details of pstFeature refer to MV_XML_FEATURE_x + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_GetNodeFeature(IN void* handle, IN MV_XML_NODE_FEATURE* pstNode, IN OUT void* pstFeature); + +/*********************************************************************** + * @fn MV_XML_UpdateNodeFeature + * @brief 更新节点 + * @param handle [IN] 句柄 + * @param enType [IN] 节点类型 + * @param pstFeature [OUT] 当前节点属性结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_XML_UpdateNodeFeature + * @brief Update node + * @param handle [IN] Handle + * @param enType [IN] Node type + * @param pstFeature [OUT] Current node feature structure + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_UpdateNodeFeature(IN void* handle, IN enum MV_XML_InterfaceType enType, IN void* pstFeature); + +// 有节点需要更新时的回调函数 +// 当调用MV_XML_UpdateNodeFeature接口更新节点属性时,注册的回调函数cbUpdate会在pstNodesList中返回与之相关联的节点 +/*********************************************************************** + * @fn MV_XML_RegisterUpdateCallBack + * @brief 注册更新回调 + * @param handle [IN] 句柄 + * @param cbUpdate [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_XML_RegisterUpdateCallBack + * @brief Register update callback + * @param handle [IN] Handle + * @param cbUpdate [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_XML_RegisterUpdateCallBack(IN void* handle, + IN void(__stdcall* cbUpdate)(enum MV_XML_InterfaceType enType, void* pstFeature, MV_XML_NODES_LIST* pstNodesList, void* pUser), + IN void* pUser); + +/************************************************************************/ +/* 弃用的接口(存在更优化的接口可替换) */ +/* Abandoned interface */ +/************************************************************************/ +/*********************************************************************** + * @fn MV_CC_GetOneFrame + * @brief 获取一帧图像,此函数为查询式获取,每次调用查询内部缓存有 + 无数据,有数据则范围数据,无数据返回错误码 + (该接口已弃用,建议改用 MV_CC_GetOneFrameTimeOut接口) + * @param handle [IN] 句柄 + * @param pData [OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [OUT] 图像信息结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_GetOneFrame + * @brief Get one frame data, this function is using query to get data, + query whether the internal cache has data, return data if there has, return error code if no data + (This interface is abandoned, it is recommended to use the MV_CC_GetOneFrameTimeOut) + * @param handle [IN] Handle + * @param pData [OUT] Recevied image data pointer + * @param nDataSize [IN] Recevied buffer size + * @param pFrameInfo [OUT] Image information structure + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrame(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO* pFrameInfo); + +/*********************************************************************** + * @fn MV_CC_GetOneFrameEx + * @brief 获取一帧trunck数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则范围数据,无数据返回错误码 + (该接口已弃用,建议改用 MV_CC_GetOneFrameTimeOut接口) + * @param handle [IN] 句柄 + * @param pData [IN][OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pFrameInfo [IN][OUT] 图像信息结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_GetOneFrameEx + * @brief Get one frame of trunck data, this function is using query to get data, + query whether the internal cache has data, return data if there has, return error code if no data + (This interface is abandoned, it is recommended to use the MV_CC_GetOneFrameTimeOut) + * @param handle [IN] Handle + * @param pData [OUT] Recevied image data pointer + * @param nDataSize [IN] Recevied buffer size + * @param pFrameInfo [OUT] Image information structure + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetOneFrameEx(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pFrameInfo); + +/*********************************************************************** + * @fn MV_CC_RegisterImageCallBack + * @brief 注册图像数据回调(该接口已弃用,建议改用 MV_CC_RegisterImageCallBackEx接口) + * @param handle [IN] 句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_RegisterImageCallBack + * @brief Register image data callback (This interface is abandoned, it is recommended to use the MV_CC_RegisterImageCallBackEx) + * @param handle [IN] Handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBack(void* handle, + void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO* pFrameInfo, void* pUser), + void* pUser); + +/************************************************************************ + * @fn MV_CC_SaveImage + * @brief 保存图片(该接口仅支持Windows且已弃用,建议改用 MV_CC_SaveImageEx2接口) + * @param pSaveParam [IN][OUT] 保存图片参数结构体 + pData; // [IN] 输入数据缓存 + nDataLen; // [IN] 输入数据大小 + enPixelType; // [IN] 输入数据的像素格式 + nWidth; // [IN] 图像宽 + nHeight; // [IN] 图像高 + pImageBuffer; // [OUT] 输出图片缓存 + nImageLen; // [OUT] 输出图片大小 + nBufferSize; // [IN] 提供的输出缓冲区大小 + enImageType; // [IN] 输出图片格式 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_SaveImage + * @brief Save image (This interface only supports on Windows, and is abandoned, it is recommended to use the MV_CC_SaveImageEx2) + * @param pSaveParam [IN][OUT] Save image parameters structure + pData; // [IN] Input data buffer + nDataLen; // [IN] Input data size + enPixelType; // [IN] Input data pixel format + nWidth; // [IN] Width + nHeight; // [IN] Height + pImageBuffer; // [OUT] Output image buffer + nImageLen; // [OUT] Output image size + nBufferSize; // [IN] Provided output buffer size + enImageType; // [IN] Output image type + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImage(IN OUT MV_SAVE_IMAGE_PARAM* pSaveParam); + +/************************************************************************ + * @fn MV_CC_SaveImageEx + * @brief 保存图片,支持Bmp和Jpeg.编码质量在50-99之前 (该接口仅支持Windows且已弃用,建议改用 MV_CC_SaveImageEx2接口) + * @param pSaveParam [IN][OUT] 保存图片参数结构体 + pData; // [IN] 输入数据缓存 + nDataLen; // [IN] 输入数据大小 + enPixelType; // [IN] 输入数据的像素格式 + nWidth; // [IN] 图像宽 + nHeight; // [IN] 图像高 + pImageBuffer; // [OUT] 输出图片缓存 + nImageLen; // [OUT] 输出图片大小 + nBufferSize; // [IN] 提供的输出缓冲区大小 + enImageType; // [IN] 输出图片格式 + nJpgQuality; // [IN] 编码质量, (50-99] + nReserved[4]; + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_SaveImageEx + * @brief Save image, support Bmp and Jpeg. Encoding quality, (50-99] + This interface only supports on Windows, and is abandoned, it is recommended to use the MV_CC_SaveImageEx2 + * @param pSaveParam [IN][OUT] Save image parameters structure + pData; // [IN] Input data buffer + nDataLen; // [IN] Input data size + enPixelType; // [IN] Pixel format of input data + nWidth; // [IN] Image width + nHeight; // [IN] Image height + pImageBuffer; // [OUT] Output image buffer + nImageLen; // [OUT] Output image size + nBufferSize; // [IN] Output buffer size provided + enImageType; // [IN] Output image format + nJpgQuality; // [IN] Encoding quality, (50-99] + nReserved[4]; + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageEx(IN OUT MV_SAVE_IMAGE_PARAM_EX* pSaveParam); + +/********************************************************************//** + * @~chinese + * @brief Bayer噪声估计(该接口已弃用,建议改用ISP Tool方式进行标定) + * @param handle [IN] 设备句柄 + * @param pstNoiseEstimateParam [IN][OUT] Bayer噪声估计参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口只支持Bayer8/Bayer10/Bayer12格式,其它Bayer格式需先转成Bayer8/Bayer10/Bayer12格式。\n + 该接口只有在打开我司特定彩色相机后才可以正常使用,当相机被断开或者掉线后,继续使用该接口会报错。 + + * @~english + * @brief Noise estimate of Bayer format + * @param handle [IN] Device handle + * @param pstNoiseEstimateParam [IN][OUT] Noise estimate parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API only support Bayer8/Bayer10/Bayer12 format, other Bayer format must Convert to Bayer8/Bayer10/Bayer12 format.\n + This API is only available when the camera is turned on, and when the camera is disconnected or disconnected, continuing to use This API will return an error. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_BayerNoiseEstimate(IN void* handle, IN OUT MV_CC_BAYER_NOISE_ESTIMATE_PARAM* pstNoiseEstimateParam); + +/********************************************************************//** + * @~chinese + * @brief Bayer空域降噪(该接口已弃用,建议改用ISP Tool方式进行降噪) + * @param handle [IN] 设备句柄 + * @param pstSpatialDenoiseParam [IN][OUT] Bayer空域降噪参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口只支持Bayer8/Bayer10/Bayer12格式,其它Bayer格式需先转成Bayer8/Bayer10/Bayer12格式。\n + 该接口只有在打开我司特定彩色相机后才可以正常使用,当相机被断开或者掉线后,继续使用该接口会报错。 + + * @~english + * @brief Spatial Denoise of Bayer format + * @param handle [IN] Device handle + * @param pstSpatialDenoiseParam [IN][OUT] Spatial Denoise parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API only support Bayer8/Bayer10/Bayer12 format, other Bayer format must Convert to Bayer8/Bayer10/Bayer12 format.\n + This API is only available when the camera is turned on, and when the camera is disconnected or disconnected, continuing to use This API will return an error. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_BayerSpatialDenoise(IN void* handle, IN OUT MV_CC_BAYER_SPATIAL_DENOISE_PARAM* pstSpatialDenoiseParam); + +/********************************************************************//** + * @~chinese + * @brief 设置Bayer格式的CLUT使能和信息(该接口已弃用,建议改用ISP Tool方式进行设置) + * @param handle [IN] 设备句柄 + * @param pstCLUTParam [IN] CLUT参数 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 开启CLUT并设置CLUT信息后,在调用MV_CC_ConvertPixelType、MV_CC_SaveImageEx2接口将Bayer8/10/12/16格式转成RGB24/48, RGBA32/64,BGR24/48,BGRA32/64时起效。 + + * @~english + * @brief Set CLUT param + * @param handle [IN] Device handle + * @param pstCLUTParam [IN] CLUT parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks After enable the CLUT and set CLUT, It work in the calling MV_CC_ConvertPixelType\MV_CC_SaveImageEx2 API convert Bayer8/10/12/16 to RGB24/48, RGBA32/64,BGR24/48,BGRA32/64. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBayerCLUTParam(IN void* handle, IN MV_CC_CLUT_PARAM* pstCLUTParam); + +/********************************************************************//** + * @~chinese + * @brief 图像锐化(该接口已弃用,建议改用ISP Tool方式进行锐化) + * @param handle [IN] 设备句柄 + * @param pstSharpenParam [IN] 锐化参数 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @~english + * @brief Image sharpen + * @param handle [IN] Device handle + * @param pstSharpenParam [IN] Sharpen parameter structure + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ImageSharpen(IN void* handle, IN OUT MV_CC_SHARPEN_PARAM* pstSharpenParam); + +/********************************************************************//** + * @~chinese + * @brief 色彩校正(包括CCM和CLUT)(该接口已弃用,建议改用ISP Tool方式进行校正) + * @param handle [IN] 设备句柄 + * @param pstColorCorrectParam [IN] 色彩校正参数 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口支持单独CCM或者CLUT,也支持同时进行CCM和CLUT,用户可以通过CCM和CLUT信息中的使能开关进行选择。 + + * @~english + * @brief Color Correct(include CCM and CLUT) + * @param handle [IN] Device handle + * @param pstColorCorrectParam [IN] Color Correct parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API supports CCM or CLUT alone, as well as CCM and CLUT at the same time. The user can select by means of the enable switch in CCM and CLUT information. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ColorCorrect(IN void* handle, IN OUT MV_CC_COLOR_CORRECT_PARAM* pstColorCorrectParam); + +/********************************************************************//** + * @~chinese + * @brief 噪声估计(该接口已弃用,建议改用ISP Tool方式进行标定) + * @param handle [IN] 设备句柄 + * @param pstNoiseEstimateParam [IN] 噪声估计参数 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 如果用户选择全图做噪声估计,nROINum可输入0,pstROIRect可置空。 + + * @~english + * @brief Noise Estimate + * @param handle [IN] Device handle + * @param pstNoiseEstimateParam [IN] Noise Estimate parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks If the user selects the full image, nROINum can be typed with 0 and pstROIRect empty. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_NoiseEstimate(IN void* handle, IN OUT MV_CC_NOISE_ESTIMATE_PARAM* pstNoiseEstimateParam); + +/********************************************************************//** + * @~chinese + * @brief 空域降噪(该接口已弃用,建议改用ISP Tool方式进行降噪) + * @param handle [IN] 设备句柄 + * @param pstSpatialDenoiseParam [IN] 空域降噪参数 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @~english + * @brief Spatial Denoise + * @param handle [IN] Device handle + * @param pstSpatialDenoiseParam [IN] Spatial Denoise parameter structure + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SpatialDenoise(IN void* handle, IN OUT MV_CC_SPATIAL_DENOISE_PARAM* pstSpatialDenoiseParam); + + +/********************************************************************//** + * @~chinese + * @brief LSC标定 + * @param handle [IN] 设备句柄 + * @param pstLSCCalibParam [IN] 标定参数 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks + + * @~english + * @brief LSC Calib + * @param handle [IN] Device handle + * @param pstLSCCalibParam [IN] LSC Calib parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_LSCCalib(IN void* handle, IN OUT MV_CC_LSC_CALIB_PARAM* pstLSCCalibParam); + +/********************************************************************//** + * @~chinese + * @brief LSC校正 + * @param handle [IN] 设备句柄 + * @param pstLSCCorrectParam [IN] 校正参数 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @~english + * @brief LSC Correct + * @param handle [IN] Device handle + * @param pstLSCCorrectParam [IN] LSC Correct parameter structure + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_LSCCorrect(IN void* handle, IN OUT MV_CC_LSC_CORRECT_PARAM* pstLSCCorrectParam); + +/************************************************************************ + * @fn MV_GIGE_ForceIp + * @brief 强制IP(该接口已弃用,建议改用 MV_GIGE_ForceIpEx接口) + * @param handle:设备句柄 + * @param nIP [IN] 设置的IP + * @return 见返回错误码 + + * @fn MV_GIGE_ForceIp + * @brief Force IP (This interface is abandoned, it is recommended to use the MV_GIGE_ForceIpEx) + * @param handle Handle + * @param nIP [IN] IP to set + * @return Refer to error code +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_ForceIp(IN void* handle, unsigned int nIP); + +/************************************************************************ + * @fn MV_CC_RegisterEventCallBack + * @brief 注册事件回调(该接口已弃用,建议改用 MV_CC_RegisterEventCallBackEx接口),该接口只支持网口设备,不支持U口和GenTL设备 + * @param handle:设备句柄 + * @param cbEvent [IN] 事件回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 见返回错误码 + + * @fn MV_CC_RegisterEventCallBack + * @brief Register event callback (this interface has been deprecated and is recommended to be converted to the MV_CC_RegisterEventCallBackEx interface),only support GEV devices,don‘t support USB and GenTL Device. + * @param handle:设备句柄 + * @param cbEvent [IN] event callback pointer + * @param pUser [IN] User defined value + * @return 见返回错误码 +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterEventCallBack(void* handle, void(__stdcall* cbEvent)(unsigned int nExternalEventId, void* pUser), void* pUser); + +/*********************************************************************** + * @fn MV_CC_Display + * @brief 显示图像,注册显示窗口,内部自动显示(与MV_CC_GetImageBuffer不能同时使用,建议改用MV_CC_DisplayOneFrame接口) + * @param handle [IN] 句柄 + * @param hWnd [IN] 显示窗口句柄 + * @return 成功,返回MV_OK;错误,返回错误码 + + * @fn MV_CC_Display + * @brief Display one frame image, register display window, automatic display internally + * @param handle [IN] Handle + * @param hWnd [IN] Display Window Handle + * @return Success, return MV_OK. Failure, return error code + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_Display(IN void* handle, void* hWnd); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetIntValue(IN void* handle, + IN const char* strKey, + OUT MVCC_INTVALUE *pIntValue); + * @brief 获取Integer属性值(建议改用MV_CC_GetIntValueEx接口) + * @param void* handle [IN] 相机句柄 + * @param char* strKey [IN] 属性键值,如获取宽度信息则为"Width" + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机属性结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetIntValue(IN void* handle, + IN const char* strKey, + OUT MVCC_INTVALUE *pIntValue); + * @brief Get Integer value + * @param void* handle [IN] Handle + * @param char* strKey [IN] Key value, for example, using "Width" to get width + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of camera features + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetIntValue(IN void* handle,IN const char* strKey,OUT MVCC_INTVALUE *pIntValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetIntValue(IN void* handle, + IN const char* strKey, + IN unsigned int nValue); + * @brief 设置Integer型属性值(建议改用MV_CC_SetIntValueEx接口) + * @param void* handle [IN] 相机句柄 + * @param char* strKey [IN] 属性键值,如获取宽度信息则为"Width" + * const unsigned int nValue [IN] 想要设置的相机的属性值 + * @return 成功,返回MV_OK,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetIntValue(IN void* handle, + IN const char* strKey, + IN unsigned int nValue); + * @brief Set Integer value + * @param void* handle [IN] Handle + * @param char* strKey [IN] Key value, for example, using "Width" to set width + * const unsigned int nValue [IN] Feature value to set + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetIntValue(IN void* handle,IN const char* strKey,IN unsigned int nValue); + + +/************************************************************************/ +/* 相机参数获取和设置,此模块的所有接口已废弃,建议使用万能接口代替 */ +/* Get and set camara parameters, all interfaces of this module will be replaced by general interface*/ +/************************************************************************/ +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetWidth(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取图像宽度 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机宽度的信息结构体指针 + * 返回的pstValue结构体的意义 + * unsigned int nCurValue; // 代表相机当前的宽度值 + * unsigned int nMax; // 表示相机允许的最大可设置的宽度值 + * unsigned int nMin; // 表示相机允许的最小可设置的宽度值 + * unsigned int nInc; // 表示相机设置的宽度增量必须是nInc的倍数,否则无效 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 其他整型结构体参数的接口可参照此接口 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetWidth(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get image width + * @param void* handle [IN] Camera Handle + * MVCC_INTVALUE* pstValue [IN][OUT] Returns the information structure pointer about the camera's width for the caller + * The meaning of returns pstValue structure + * unsigned int nCurValue; // Represents the current width value of the camera + * unsigned int nMax; // Indicates the maximum settable width value allowed by the camera + * unsigned int nMin; // Indicates the minimum settable width value allowed by the camera + * unsigned int nInc; // Indicates that the width increment set by the camera must be a multiple of nInc, otherwise it is invalid + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Other Integer structure parameters interface can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetWidth(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ +* @fn MV_CAMCTRL_API int __stdcall MV_CC_SetWidth(IN void* handle, IN const unsigned int nValue); +* @brief 设置图像宽度 +* @param void* handle [IN] 相机句柄 +* const unsigned int nValue [IN] 想要设置的相机宽度的值,注意此宽度值必须是MV_CC_GetWidth接口返回的pstValue中的nInc的倍数才能设置成功 +* @return 成功,返回MV_OK,并且相机宽度将会更改为相应值,失败,返回错误码 + +* @fn MV_CAMCTRL_API int __stdcall MV_CC_SetWidth(IN void* handle, IN const unsigned int nValue); + * @brief Set image width + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] To set the value of the camera width, note that the width value must be a multiple of nInc in the pstValue returned by the MV_CC_GetWidth interface + * @return Success, return MV_OK, and the camera width will change to the corresponding value. Failure, return error code +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetWidth(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHeight(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取图像高度 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机高度的信息结构体指针 + * @return 成功,返回MV_OK,并将高度信息返回到结构体中,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHeight(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get image height + * @param void* handle [IN] Camera handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera height to user + * @return Success, return MV_OK, and return height information to the structure. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHeight(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHeight(IN void* handle, IN const unsigned int nValue); + * @brief 设置图像高度 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的相机宽度的值,注意此宽度值必须是MV_CC_GetWidth接口返回的pstValue中的nInc的倍数才能设置成功 + * @return 成功,返回MV_OK,并且相机高度将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHeight(IN void* handle, IN const unsigned int nValue); + * @brief Set image height + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Camera height value to set, note that this value must be times of nInc of pstValue returned by MV_CC_GetWidth + * @return Success, return MV_OK, and the camera height will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHeight(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetX(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取图像X偏移 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机X偏移的信息结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetX(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get image X offset + * @param void* handle [IN] Camera Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera X offset to user + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetX(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetX(IN void* handle, IN const unsigned int nValue); + * @brief 设置图像AOI偏移 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的相机AOI的值 + * @return 成功,返回MV_OK,并且相机AOI偏移将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetX(IN void* handle, IN const unsigned int nValue); + * @brief Set image X offset + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Camera X offset value to set + * @return Success, return MV_OK, and the camera X offset will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetX(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetY(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取图像Y偏移 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机Y偏移的信息结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetY(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get image Y offset + * @param void* handle [IN] Camera Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera Y offset to user + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAOIoffsetY(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetX(IN void* handle, IN const unsigned int nValue); + * @brief 设置图像AOI偏移 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的相机AOI的值 + * @return 成功,返回MV_OK,并且相机AOI偏移将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetY(IN void* handle, IN const unsigned int nValue); + * @brief Set image Y offset + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Camera Y offset value to set + * @return Success, return MV_OK, and the camera Y offset will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAOIoffsetY(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeLower(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取曝光下限 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机曝光值下限结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeLower(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get exposure lower limit + * @param void* handle [IN] Camera Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera exposure lower to user + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeLower(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeLower(IN void* handle, IN const unsigned int nValue); + * @brief 设置曝光值下限 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的曝光值下限 + * @return 成功,返回MV_OK,并且相机曝光下限将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeLower(IN void* handle, IN const unsigned int nValue); + * @brief Set exposure lower limit + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Exposure lower to set + * @return Success, return MV_OK, and the camera exposure time lower limit value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeLower(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeUpper(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取曝光上限 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机曝光值上限结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeUpper(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get exposure upper limit + * @param void* handle [IN] Camera Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera exposure upper to user + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAutoExposureTimeUpper(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeUpper(IN void* handle, IN const unsigned int nValue); + * @brief 设置曝光值上限 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的曝光值上限 + * @return 成功,返回MV_OK,并且相机曝光上限将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeUpper(IN void* handle, IN const unsigned int nValue); + * @brief Set exposure upper limit + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Exposure upper to set + * @return Success, return MV_OK, and the camera exposure time upper limit value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAutoExposureTimeUpper(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBrightness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取亮度值 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机亮度结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBrightness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get brightness + * @param void* handle [IN] Camera Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera brightness to user + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBrightness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBrightness(IN void* handle, IN const unsigned int nValue); + * @brief 设置亮度值 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的亮度值 + * @return 成功,返回MV_OK,并且相机亮度将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBrightness(IN void* handle, IN const unsigned int nValue); + * @brief Set brightness + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] Brightness upper to set + * @return Success, return MV_OK, and the camera brightness value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBrightness(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetFrameRate(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief 获取帧率 + * @param void* handle [IN] 相机句柄 + * MVCC_FLOATVALUE* pstValue [IN][OUT] 返回给调用者有关相机帧率的信息结构体指针 + * 返回的pstValue结构体的意义 + * float fCurValue; // 表示相机当前的帧率 + * float fMax; // 表示相机允许设置的最大帧率 + * float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 其他浮点型结构体参数的接口可参照此接口 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetFrameRate(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief Get Frame Rate + * @param void* handle [IN] Camera Handle + * MVCC_FLOATVALUE* pstValue [IN][OUT] Return pointer of information structure related to camera frame rate to user + * The meaning of returns pstValue structure + * float fCurValue; // Indicates the current frame rate of the camera + * float fMax; // Indicates the maximum frame rate allowed by the camera + * float fMin; // Indicates the minimum frame rate allowed by the camera + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFrameRate(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetFrameRate(IN void* handle, IN const float fValue); + * @brief 设置帧率 + * @param void* handle [IN] 相机句柄 + * const float fValue [IN] 想要设置的相机帧率 + * @return 成功,返回MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetFrameRate(IN void* handle, IN const float fValue); + * @brief Set frame rate + * @param void* handle [IN] Camera Handle + * const float fValue [IN] Camera frame rate to set + * @return Success, return MV_OK, and camera frame rate will be changed to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFrameRate(IN void* handle, IN const float fValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGain(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief 获取增益 + * @param void* handle [IN] 相机句柄 + * MVCC_FLOATVALUE* pstValue [IN][OUT] 返回给调用者有关相机增益的信息结构体指针 + * 返回的pstValue结构体的意义 + * float fCurValue; // 表示相机当前的帧率 + * float fMax; // 表示相机允许设置的最大帧率 + * float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 其他浮点型结构体参数的接口可参照此接口 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGain(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief Get Gain + * @param void* handle [IN] Camera Handle + * MVCC_FLOATVALUE* pstValue [IN][OUT] Return pointer of information structure related to gain to user + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * float fCurValue; // Camera current gain + * float fMax; // The maximum gain camera allowed + * float fMin; // The minimum gain camera allowed + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGain(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGain(IN void* handle, IN const float fValue); + * @brief 设置帧率 + * @param void* handle [IN] 相机句柄 + * const float fValue [IN] 想要设置的相机帧率 + * @return 成功,返回MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGain(IN void* handle, IN const float fValue); + * @brief Set Gain + * @param void* handle [IN] Camera Handle + * const float fValue [IN] Gain value to set + * @return Success, return MV_OK, and the camera gain value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGain(IN void* handle, IN const float fValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetExposureTime(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief 获取曝光时间 + * @param void* handle [IN] 相机句柄 + * MVCC_FLOATVALUE* pstValue [IN][OUT] 返回给调用者有关相机曝光时间的信息结构体指针 + * 返回的pstValue结构体的意义 + * float fCurValue; // 表示相机当前的帧率 + * float fMax; // 表示相机允许设置的最大帧率 + * float fMin; // 表示相机允许设置的最小帧率 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 其他浮点型结构体参数的接口可参照此接口 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetExposureTime(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief Get exposure time + * @param void* handle [IN] Camera Handle + * MVCC_FLOATVALUE* pstValue [IN][OUT] Return pointer of information structure related to exposure time to user + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * float fCurValue; // Camera current exposure time + * float fMax; // The maximum exposure time camera allowed + * float fMin; // The minimum exposure time camera allowed + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Other interface of Float structure parameters can refer to this interface + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetExposureTime(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetExposureTime(IN void* handle, IN const float fValue); + * @brief 设置曝光时间 + * @param void* handle [IN] 相机句柄 + * const float fValue [IN] 想要设置的相机帧率 + * @return 成功,返回MV_OK,并且相机帧率将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetExposureTime(IN void* handle, IN const float fValue); + * @brief Set exposure time + * @param void* handle [IN] Camera Handle + * const float fValue [IN] Exposure time to set + * @return Success, return MV_OK, and the camera exposure time value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetExposureTime(IN void* handle, IN const float fValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetPixelFormat(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取像素格式 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关像素格式的信息结构体指针 + * 返回的pstValue结构体的意义 + * unsigned int nCurValue; // 相机当前的像素格式,是枚举类型,比如说PixelType_Gvsp_Mono8, 这里获得的是其整型值,具体数值参照PixelType.h的MvGvspPixelType枚举类型 + * unsigned int nSupportedNum; // 相机支持的像素格式的个数 + * unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; // 相机所有支持的像素格式对应的整型值列表,后面要设置像素格式时,参数必须是这个数组中的一种,否则无效 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 其他枚举类型参数接口可参照此接口,有关相应参数的枚举类型对应的整型值请查找PixelType.h 和 CameraParams.h中相应的定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetPixelFormat(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get Pixel Format + * @param void* handle [IN] Camera Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Returns the information structure pointer about pixel format for the caller + * The meaning of returns pstValue structure + * unsigned int nCurValue; // The current pixel format of the camera, is the enumeration type, such as PixelType_Gvsp_Mono8, here is the integer value, the specific value please refer to MvGvspPixelType enumeration type in PixelType.h + * unsigned int nSupportedNum; // Number of pixel formats supported by the camera + * unsigned int nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]; // The integer values list correspond to all supported pixel formats of the camera, followed by when set the pixel format, the parameter must be one of this list, otherwise invalid + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Other interface of Enumeration structure parameters can refer to this interface, look for the corresponding definition in PixelType.h and CameraParams.h for the integer values of the enum type parameter + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetPixelFormat(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetPixelFormat(IN void* handle, IN const unsigned int nValue); + * @brief 设置像素格式 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的像素格式对应的整型值,调用此接口时可以直接填写枚举值,如MV_CC_SetPixelFormat(m_handle, PixelType_Gvsp_RGB8_Packed); + * @return 成功,返回MV_OK,并且相机像素格式将会更改为相应值,失败,返回错误码 + * + * 要设置的枚举类型必须是Get接口返回的nSupportValue[MV_MAX_XML_SYMBOLIC_NUM]中的一种,否则会失败 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetPixelFormat(IN void* handle, IN const unsigned int nValue); + * @brief Set Pixel Format + * @param void* handle [IN] Camera Handle + * const unsigned int nValue [IN] The corresponding integer value for pixel format to be set, when calling this interface can be directly filled in enumeration values, such as MV_CC_SetPixelFormat(m_handle, PixelType_Gvsp_RGB8_Packed); + * @return Success, return MV_OK, and the camera pixel format will change to the corresponding value. Failure, return error code + * + * Other interface of Enumeration structure parameters can refer to this interface, the enumeration type to be set must be one of the nSupportValue [MV_MAX_XML_SYMBOLIC_NUM] returned by the Get interface, otherwise it will fail + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetPixelFormat(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取采集模式 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关采集模式的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_ACQUISITION_MODE 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get acquisition mode + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of acquisition mode + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_ACQUISITION_MODE in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionMode(IN void* handle, IN const unsigned int nValue); + * @brief 设置像素格式 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的采集模式对应的整型值 + * @return 成功,返回MV_OK,并且相机采集模式将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionMode(IN void* handle, IN const unsigned int nValue); + * @brief Set acquisition mode + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to acquisition mode + * @return Success, return MV_OK, and the camera acquisition mode will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionMode(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGainMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取增益模式 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关增益模式的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_GAIN_MODE 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGainMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get gain mode + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of gain mode + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_GAIN_MODE in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGainMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGainMode(IN void* handle, IN const unsigned int nValue); + * @brief 设置增益模式 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的增益模式对应的整型值 + * @return 成功,返回MV_OK,并且相机增益模式将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGainMode(IN void* handle, IN const unsigned int nValue); + * @brief Set gain mode + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to gain mode + * @return Success, return MV_OK, and the camera gain mode will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGainMode(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetExposureAutoMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取自动曝光模式 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关自动曝光模式的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_EXPOSURE_AUTO_MODE 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetExposureAutoMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get auto exposure mode + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of auto exposure mode + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_EXPOSURE_AUTO_MODE in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetExposureAutoMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetExposureAutoMode(IN void* handle, IN const unsigned int nValue); + * @brief 设置自动曝光模式 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的自动曝光模式对应的整型值 + * @return 成功,返回MV_OK,并且相机自动曝光模式将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetExposureAutoMode(IN void* handle, IN const unsigned int nValue); + * @brief Set auto exposure mode + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to auto exposure mode + * @return Success, return MV_OK, and the camera auto exposure mode will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetExposureAutoMode(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取触发模式 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关触发模式的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_TRIGGER_MODE 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get trigger mode + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of trigger mode + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_TRIGGER_MODE in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerMode(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerMode(IN void* handle, IN const unsigned int nValue); + * @brief 设置触发模式 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的触发模式对应的整型值 + * @return 成功,返回MV_OK,并且相机触发模式将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerMode(IN void* handle, IN const unsigned int nValue); + * @brief Set trigger mode + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to trigger mode + * @return Success, return MV_OK, and the camera trigger mode will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerMode(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerDelay(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief 获取触发延时 + * @param void* handle [IN] 相机句柄 + * MVCC_FLOATVALUE* pstValue [IN][OUT] 返回给调用者有关相机触发延时的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 可参照接口MV_CC_GetFrameRate + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerDelay(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief Get tigger delay + * @param void* handle [IN] Handle + * MVCC_FLOATVALUE* pstValue [IN][OUT] Structure pointer of trigger delay + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Refer to MV_CC_GetFrameRate + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerDelay(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerDelay(IN void* handle, IN const float fValue); + * @brief 设置触发延时 + * @param void* handle [IN] 相机句柄 + * const float fValue [IN] 想要设置的相机触发延时 + * @return 成功,返回MV_OK,并且相机触发延时将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerDelay(IN void* handle, IN const float fValue); + * @brief Set tigger delay + * @param void* handle [IN] Handle + * const float fValue [IN] Trigger delay to set + * @return Success, return MV_OK, and the camera trigger delay will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerDelay(IN void* handle, IN const float fValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerSource(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取触发源 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关触发源的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_TRIGGER_SOURCE 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerSource(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get trigger source + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of trigger source + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_TRIGGER_SOURCE in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetTriggerSource(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerSource(IN void* handle, IN const unsigned int nValue); + * @brief 设置触发源 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的触发源对应的整型值 + * @return 成功,返回MV_OK,并且相机触发源将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerSource(IN void* handle, IN const unsigned int nValue); + * @brief Set trigger source + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to trigger source + * @return Success, return MV_OK, and the camera trigger source will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetTriggerSource(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_TriggerSoftwareExecute(IN void* handle); + * @brief 软触发一次(接口仅在已选择的触发源为软件触发时有效) + * @param void* handle [IN] 相机句柄 + * @return 成功,返回MV_OK, 失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_TriggerSoftwareExecute(IN void* handle); + * @brief Execute software trigger once (this interface only valid when the trigger source is set to software) + * @param void* handle [IN] Handle + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_TriggerSoftwareExecute(IN void* handle); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGammaSelector(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取Gamma类型 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关Gamma类型的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_GAMMA_SELECTOR 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGammaSelector(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get Gamma mode + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of gamma mode + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_GAMMA_SELECTOR in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGammaSelector(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGammaSelector(IN void* handle, IN const unsigned int nValue); + * @brief 设置Gamma类型 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的Gamma类型对应的整型值 + * @return 成功,返回MV_OK,并且相机Gamma类型将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGammaSelector(IN void* handle, IN const unsigned int nValue); + * @brief Set Gamma mode + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to gamma mode + * @return Success, return MV_OK, and the camera gamma mode will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGammaSelector(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGamma(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief 获取Gamma值 + * @param void* handle [IN] 相机句柄 + * MVCC_FLOATVALUE* pstValue [IN][OUT] 返回给调用者有关相机Gamma值的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + * 可参照接口MV_CC_GetFrameRate + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetGamma(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + * @brief Get Gamma value + * @param void* handle [IN] Handle + * MVCC_FLOATVALUE* pstValue [IN][OUT] Structure pointer of gamma value + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + * Refer to MV_CC_GetFrameRate + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetGamma(IN void* handle, IN OUT MVCC_FLOATVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGamma(IN void* handle, IN const float fValue); + * @brief 设置Gamma值 + * @param void* handle [IN] 相机句柄 + * const float fValue [IN] 想要设置的相机Gamma值 + * @return 成功,返回MV_OK,并且相机Gamma值将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetGamma(IN void* handle, IN const float fValue); + * @brief Set Gamma value + * @param void* handle [IN] Handle + * const float fValue [IN] Gamma value to set + * @return Success, return MV_OK, and the camera gamma value will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetGamma(IN void* handle, IN const float fValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetSharpness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取锐度 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机锐度结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetSharpness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get sharpness + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of sharpness + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetSharpness(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetSharpness(IN void* handle, IN const unsigned int nValue); + * @brief 设置锐度 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的锐度 + * @return 成功,返回MV_OK,并且相机锐度将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetSharpness(IN void* handle, IN const unsigned int nValue); + * @brief Set sharpness + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Sharpness to set + * @return Success, return MV_OK, and the camera sharpness will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSharpness(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取灰度 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机灰度结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get Hue + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of Hue + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHue(IN void* handle, IN const unsigned int nValue); + * @brief 设置灰度 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的灰度 + * @return 成功,返回MV_OK,并且相机灰度将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHue(IN void* handle, IN const unsigned int nValue); + * @brief Set Hue + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Hue to set + * @return Success, return MV_OK, and the camera Hue will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHue(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetSaturation(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取饱和度 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机饱和度结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetSaturation(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get Saturation + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of Saturation + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetSaturation(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetSaturation(IN void* handle, IN const unsigned int nValue); + * @brief 设置饱和度 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的饱和度 + * @return 成功,返回MV_OK,并且相机饱和度将会更改为相应值,失败,返回错误码 + +* @fn MV_CAMCTRL_API int __stdcall MV_CC_SetSaturation(IN void* handle, IN const unsigned int nValue); + * @brief Set Saturation + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Saturation to set + * @return Success, return MV_OK, and the camera Saturation will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSaturation(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceWhiteAuto(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief 获取自动白平衡 + * @param void* handle [IN] 相机句柄 + * MVCC_ENUMVALUE* pstValue [IN][OUT] 返回给调用者的有关自动白平衡的信息结构体指针 + * @return 成功,返回MV_OK,并获得相应参数信息的结构体, 失败, 返回错误码 + * + 可参照接口MV_CC_GetPixelFormat,参考 CameraParam.h 中的 MV_CAM_BALANCEWHITE_AUTO 定义 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceWhiteAuto(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + * @brief Get Auto white balance + * @param void* handle [IN] Handle + * MVCC_ENUMVALUE* pstValue [IN][OUT] Structure pointer of auto white balance + * @return Success, return MV_OK, and get the structure of the corresponding parameters. Failure, return error code + * + Refer to MV_CC_GetPixelFormat and definition of MV_CAM_BALANCEWHITE_AUTO in CameraParam.h + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceWhiteAuto(IN void* handle, IN OUT MVCC_ENUMVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceWhiteAuto(IN void* handle, IN const unsigned int nValue); + * @brief 设置自动白平衡 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 要设置的自动白平衡对应的整型值 + * @return 成功,返回MV_OK,并且相机自动白平衡将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceWhiteAuto(IN void* handle, IN const unsigned int nValue); + * @brief Set Auto white balance + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Integer value to set corresponding to auto white balance + * @return Success, return MV_OK, and the camera auto white balance will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceWhiteAuto(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioRed(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取白平衡 红 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机白平衡 红结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioRed(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get white balance red + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of white balance red + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioRed(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioRed(IN void* handle, IN const unsigned int nValue); + * @brief 设置白平衡 红 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的白平衡 红 + * @return 成功,返回MV_OK,并且相机白平衡 红将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioRed(IN void* handle, IN const unsigned int nValue); + * @brief Set white balance red + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] White balance red to set + * @return Success, return MV_OK, and the camera white balance red will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioRed(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioGreen(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取白平衡 绿 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机白平衡 绿结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioGreen(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get white balance green + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of white balance green + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioGreen(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioGreen(IN void* handle, IN const unsigned int nValue); + * @brief 设置白平衡 绿 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的白平衡 绿 + * @return 成功,返回MV_OK,并且相机白平衡 绿将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioGreen(IN void* handle, IN const unsigned int nValue); + * @brief Set white balance green + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] White balance green to set + * @return Success, return MV_OK, and the camera white balance green will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioGreen(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioBlue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取白平衡 蓝 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机白平衡 蓝结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioBlue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get white balance blue + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of white balance blue + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBalanceRatioBlue(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioBlue(IN void* handle, IN const unsigned int nValue); + * @brief 设置白平衡 蓝 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的白平衡 蓝 + * @return 成功,返回MV_OK,并且相机白平衡 蓝将会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioBlue(IN void* handle, IN const unsigned int nValue); + * @brief Set white balance blue + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] White balance blue to set + * @return Success, return MV_OK, and the camera white balance blue will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBalanceRatioBlue(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetFrameSpecInfoAbility(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取水印信息内包含的信息类型 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机水印信息内包含的信息类型结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetFrameSpecInfoAbility(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get information type included by frame stamp + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of information type included by frame stamp + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetFrameSpecInfoAbility(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetFrameSpecInfoAbility(IN void* handle, IN const unsigned int nValue); + * @brief 设置水印信息内包含的信息类型 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的水印信息内包含的信息类型 + * @return 成功,返回MV_OK,并且相机水印信息内包含的信息类型会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetFrameSpecInfoAbility(IN void* handle, IN const unsigned int nValue); + * @brief Set information type included by frame stamp + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Information type included by frame stamp to set + * @return Success, return MV_OK, and the camera information type included by frame stamp will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetFrameSpecInfoAbility(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceUserID(IN void* handle, IN OUT MVCC_STRINGVALUE* pstValue); + * @brief 获取设备自定义名字 + * @param void* handle [IN] 相机句柄 + * MVCC_STRINGVALUE* pstValue [IN OUT] 返回给调用者有关相机名字结构体指针 + * @return 成功,返回MV_OK,并且获取到相机的自定义名字,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceUserID(IN void* handle, IN OUT MVCC_STRINGVALUE* pstValue); + * @brief Get device user defined name + * @param void* handle [IN] Handle + * MVCC_STRINGVALUE* pstValue [IN OUT] Structure pointer of device name + * @return Success, return MV_OK, and get the camera user defined name. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetDeviceUserID(IN void* handle, IN OUT MVCC_STRINGVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetDeviceUserID(IN void* handle, IN const char* chValue); + * @brief 设置设备自定义名字 + * @param void* handle [IN] 相机句柄 + * IN const char* chValue [IN] 设备名字 + * @return 成功,返回MV_OK,并且设置设备自定义名字,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetDeviceUserID(IN void* handle, IN const char* chValue); + * @brief Set device user defined name + * @param void* handle [IN] Handle + * IN const char* chValue [IN] Device name + * @return Success, return MV_OK, and set the camera user defined name. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetDeviceUserID(IN void* handle, IN const char* chValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBurstFrameCount(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取一次触发的帧数 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机一次触发的帧数结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetBurstFrameCount(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get frame number trigger by once + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of frame number trigger by once + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetBurstFrameCount(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBurstFrameCount(IN void* handle, IN const unsigned int nValue); + * @brief 设置一次触发的帧数 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的一次触发的帧数 + * @return 成功,返回MV_OK,并且相机一次触发的帧数会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetBurstFrameCount(IN void* handle, IN const unsigned int nValue); + * @brief Set frame number trigger by once + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Frame number trigger by once to set + * @return Success, return MV_OK, and the camera frame number trigger by once will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetBurstFrameCount(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionLineRate(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取行频 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机行频结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionLineRate(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get line rate + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of line rate + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetAcquisitionLineRate(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionLineRate(IN void* handle, IN const unsigned int nValue); + * @brief 设置行频 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的行频 + * @return 成功,返回MV_OK,并且相机行频会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionLineRate(IN void* handle, IN const unsigned int nValue); + * @brief Set line rate + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Line rate to set + * @return Success, return MV_OK, and the camera line rate will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetAcquisitionLineRate(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHeartBeatTimeout(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取心跳信息 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机心跳信息结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_GetHeartBeatTimeout(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get heartbeat information + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of heartbeat information + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetHeartBeatTimeout(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHeartBeatTimeout(IN void* handle, IN const unsigned int nValue); + * @brief 设置心跳信息 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的心跳信息 + * @return 成功,返回MV_OK,并且相机心跳信息会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_CC_SetHeartBeatTimeout(IN void* handle, IN const unsigned int nValue); + * @brief Set heartbeat information + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Heartbeat information to set + * @return Success, return MV_OK, and the camera heartbeat information will change to the corresponding value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetHeartBeatTimeout(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPSPacketSize(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取网络包大小 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机网络包大小结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPSPacketSize(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get network packet size + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of network packet size + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPSPacketSize(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPSPacketSize(IN void* handle, IN const unsigned int nValue); + * @brief 设置网络包大小 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的网络包大小 + * @return 成功,返回MV_OK,并且相机网络包大小会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPSPacketSize(IN void* handle, IN const unsigned int nValue); + * @brief Set network packet size + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Packet size to set + * @return Success, return MV_OK, and change packet size to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPSPacketSize(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPD(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief 获取网络包发送间隔 + * @param void* handle [IN] 相机句柄 + * @param MVCC_INTVALUE* pstValue [IN][OUT] 返回给调用者有关相机网络包发送间隔结构体指针 + * @return 成功,返回MV_OK,失败,返回错误码 + * + * 可参照接口MV_CC_GetWidth + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPD(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + * @brief Get network packet sending delay + * @param void* handle [IN] Handle + * @param MVCC_INTVALUE* pstValue [IN][OUT] Structure pointer of network packet sending delay + * @return Success, return MV_OK. Failure, return error code + * + * Refer to MV_CC_GetWidth + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCPD(IN void* handle, IN OUT MVCC_INTVALUE* pstValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPD(IN void* handle, IN const unsigned int nValue); + * @brief 设置网络包发送间隔 + * @param void* handle [IN] 相机句柄 + * const unsigned int nValue [IN] 想要设置的网络包发送间隔 + * @return 成功,返回MV_OK,并且相机网络包发送间隔会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPD(IN void* handle, IN const unsigned int nValue); + * @brief Set network packet sending delay + * @param void* handle [IN] Handle + * const unsigned int nValue [IN] Packet delay to set + * @return Success, return MV_OK, and change packet delay to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCPD(IN void* handle, IN const unsigned int nValue); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCDA(IN void* handle, unsigned int* pnIP); + * @brief 获取接收端IP地址,0xa9fe0102 表示 169.254.1.2 + * @param void* handle [IN] 相机句柄 + * @param unsigned int* pnIP [IN][OUT] 返回给调用者接收端IP地址 + * @return 成功,返回MV_OK,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCDA(IN void* handle, unsigned int* pnIP); + * @brief Get receiver IP address, 0xa9fe0102 indicates 169.254.1.2 + * @param void* handle [IN] Handle + * @param unsigned int* pnIP [IN][OUT] Receiver IP address + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCDA(IN void* handle, unsigned int* pnIP); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCDA(IN void* handle, unsigned int nIP); + * @brief 设置接收端IP地址 + * @param void* handle [IN] 相机句柄 + * unsigned int nIP [IN] 想要设置的接收端IP地址 + * @return 成功,返回MV_OK,并且相机接收端IP地址会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCDA(IN void* handle, unsigned int nIP); + * @brief Set receiver IP address + * @param void* handle [IN] Handel + * unsigned int nIP [IN] Receiver IP address to set + * @return Success, return MV_OK, and change receiver IP address to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCDA(IN void* handle, unsigned int nIP); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCSP(IN void* handle, unsigned int* pnPort); + * @brief 获取发送端的端口号 + * @param void* handle [IN] 相机句柄 + * @param unsigned int* pnPort [IN][OUT] 返回给调用者发送端的端口号 + * @return 成功,返回MV_OK,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCSP(IN void* handle, unsigned int* pnPort); + * @brief Get transmitter port number + * @param void* handle [IN] Handle + * @param unsigned int* pnPort [IN][OUT] Transmitter port number + * @return Success, return MV_OK. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_GetGevSCSP(IN void* handle, unsigned int* pnPort); + +/************************************************************************ + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCSP(IN void* handle, unsigned int nPort); + * @brief 设置发送端的端口号 + * @param void* handle [IN] 相机句柄 + * unsigned int nPort [IN] 想要设置的发送端的端口号 + * @return 成功,返回MV_OK,并且相机发送端的端口号会更改为相应值,失败,返回错误码 + + * @fn MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCSP(IN void* handle, unsigned int nPort); + * @brief Set transmitter port number + * @param void* handle [IN] Handle + * unsigned int nPort [IN] Transmitter port number to set + * @return Success, return MV_OK, and change transmitter port number to setting value. Failure, return error code + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_GIGE_SetGevSCSP(IN void* handle, unsigned int nPort); + +/********************************************************************//** + * @~chinese + * @brief 设置设备波特率 + * @param handle [IN] 设备句柄 + * @param nBaudrate [IN] 设置的波特率值,数值参考CameraParams.h中宏定义,如#define MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks (该接口已弃用,建议改用 MV_CAML_SetDeviceBaudrate接口) + + * @~english + * @brief Set device baudrate using one of the CL_BAUDRATE_XXXX value + * @param handle [IN] Device handle + * @param nBaudrate [IN] baud rate to set. Refer to the 'CameraParams.h' for parameter definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return MV_OK. Failure, return error code + * @remarks (This interface is abandoned, it is recommended to use the MV_CAML_SetDeviceBaudrate) +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_SetDeviceBauderate(IN void* handle, unsigned int nBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备波特率 + * @param handle [IN] 设备句柄 + * @param pnCurrentBaudrate [OUT] 波特率信息指针,数值参考CameraParams.h中宏定义,如#define MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks (该接口已弃用,建议改用 MV_CAML_GetDeviceBaudrate接口) + + * @~english + * @brief Returns the current device baudrate, using one of the CL_BAUDRATE_XXXX value + * @param handle [IN] Device handle + * @param pnCurrentBaudrate [OUT] Return pointer of baud rate to user. Refer to the 'CameraParams.h' for parameter definitions, for example, #define MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return MV_OK. Failure, return error code + * @remarks (This interface is abandoned, it is recommended to use the MV_CAML_GetDeviceBaudrate) +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetDeviceBauderate(IN void* handle,unsigned int* pnCurrentBaudrate); + +/********************************************************************//** + * @~chinese + * @brief 获取设备与主机间连接支持的波特率 + * @param handle [IN] 设备句柄 + * @param pnBaudrateAblity [OUT] 支持的波特率信息的指针。所支持波特率的或运算结果,单个数值参考CameraParams.h中宏定义,如MV_CAML_BAUDRATE_9600 0x00000001 + * @return 成功,返回MV_OK,失败,返回错误码 + * @remarks (该接口已弃用,建议改用 MV_CAML_GetSupportBaudrates接口) + + * @~english + * @brief Returns supported baudrates of the combined device and host interface + * @param handle [IN] Device handle + * @param pnBaudrateAblity [OUT] Return pointer of the supported baudrates to user. 'OR' operation results of the supported baudrates. Refer to the 'CameraParams.h' for single value definitions, for example, MV_CAML_BAUDRATE_9600 0x00000001 + * @return Success, return MV_OK. Failure, return error code + * @remarks (This interface is abandoned, it is recommended to use the MV_CAML_GetSupportBaudrates) +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CAML_GetSupportBauderates(IN void* handle,unsigned int* pnBaudrateAblity); + +/********************************************************************//** +* @~chinese +* @brief 注册流异常消息回调,在打开设备之后调用(只支持U3V相机,不支持GenTL设备) +* @param handle [IN] 设备句柄 +* @param cbException [IN] 异常回调函数指针 +* @param pUser [IN] 用户自定义变量 +* @return 成功,返回MV_OK,失败,返回错误码 + +* @~english +* @brief Register exception stream callBack, call after open device (only support U3V Camera, don't support GenTL Device) +* @param handle [IN] Device handle +* @param cbException [IN] Exception callback function pointer +* @param pUser [IN] User defined variable +* @return Success, return MV_OK. Failure, return error code +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_USB_RegisterStreamExceptionCallBack(IN void* handle, IN void(__stdcall* cbException)(MV_CC_STREAM_EXCEPTION_TYPE enExceptionType, void* pUser), IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 保存图片,支持Bmp和Jpeg. + * @param handle [IN] 设备句柄 + * @param pstSaveParam [IN][OUT] 保存图片参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 通过将接口可以将从设备采集到的原始图像数据转换成JPEG或者BMP等格式并存放在指定内存中,然后用户可以将转换之后的数据直接保存成图片文件。 + 该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用MV_CC_GetOneFrameTimeout或者MV_CC_RegisterImageCallBackEx设置回调函数,获取一帧图像数据,然后再通过该接口转换格式。 + MV_CC_SaveImageEx2比MV_CC_SaveImageEx增加参数handle,为了保证与其他接口的统一。 + + * @~english + * @brief Save image, support Bmp and Jpeg. + * @param handle [IN] Device handle + * @param pstSaveParam [IN][OUT] Save image parameters structure + * @return Success, return MV_OK. Failure, return error code + * @remarks Once there is image data, you can call this API to convert the data. + You can also call MV_CC_GetOneFrameTimeout or MV_CC_RegisterImageCallBackEx or MV_CC_GetImageBuffer to get one image frame and set the callback function, and then call this API to convert the format. + Comparing with the API MV_CC_SaveImageEx, this API added the parameter handle to ensure the unity with other API. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageEx2(IN void* handle, MV_SAVE_IMAGE_PARAM_EX* pstSaveParam); + + +/********************************************************************//** + * @~chinese + * @brief 保存图像到文件 + * @param handle [IN] 设备句柄 + * @param pstSaveFileParam [IN][OUT] 保存图片文件参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 该接口支持BMP/JPEG/PNG/TIFF。 + + * @~english + * @brief Save the image file. + * @param handle [IN] Device handle + * @param pstSaveFileParam [IN][OUT] Save the image file parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API support BMP/JPEG/PNG/TIFF. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SaveImageToFile(IN void* handle, MV_SAVE_IMG_TO_FILE_PARAM* pstSaveFileParam); + + +/********************************************************************//** + * @~chinese + * @brief 像素格式转换 + * @param handle [IN] 设备句柄 + * @param pstCvtParam [IN][OUT] 像素格式转换参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 通过将接口可以将从设备采集到的原始图像数据转换成用户所需的像素格式并存放在指定内存中。 + 该接口调用无接口顺序要求,有图像源数据就可以进行转换,可以先调用MV_CC_GetOneFrameTimeout或者MV_CC_RegisterImageCallBackEx设置回调函数, + 获取一帧图像数据,然后再通过该接口转换格式。如果设备当前采集图像是JPEG压缩的格式,则不支持调用该接口进行转换。 + + * @~english + * @brief Pixel format conversion + * @param handle [IN] Device handle + * @param pstCvtParam [IN][OUT] Convert Pixel Type parameter structure + * @return Success, return MV_OK. Failure, return error code + * @remarks This API is used to transform the collected original data to pixel format and save to specified memory. + There is no order requirement to call this API, the transformation will execute when there is image data. + First call MV_CC_GetOneFrameTimeout or MV_CC_RegisterImageCallBackEx to set callback function, and get a frame of image data, + then call this API to transform the format. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_ConvertPixelType(IN void* handle, IN OUT MV_CC_PIXEL_CONVERT_PARAM* pstCvtParam); + +/********************************************************************//** +* @~chinese +* @brief 设置SDK日志路径 +* @param strSDKLogPath [IN] SDK日志路径 +* @return 成功,返回MV_OK;错误,返回错误码 +* @remarks 设置路径之后,可以指定路径存放日志, V2.4.1版本新增日志服务,开启服务之后该接口无效,默认日志服务为开启状态。 + +* @~english +* @brief Set SDK log path +* @param strSDKLogPath [IN] SDK log path +* @return Access, return true. Not access, return false +* @remarks For version V2.4.1, added log service, this API is invalid when the service is enabled.And The logging service is enabled by default + This API is used to set the log file storing path. +************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SetSDKLogPath(IN const char * strSDKLogPath); + +/********************************************************************//** + * @~chinese + * @brief 显示一帧图像 + * @param handle [IN] 设备句柄 + * @param pstDisplayInfo [IN] 图像信息 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 与设备类型无关,渲染模式为D3D时,支持的最大分辨率为16384 * 163840 + + * @~english + * @brief Display one frame image + * @param handle [IN] Device handle + * @param pstDisplayInfo [IN] Frame Info + * @return Success, return MV_OK. Failure, return error code + * @remarks Not related to device type,When the render mode is D3D, the maximum resolution supported is 16384 * 163840 + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_DisplayOneFrame(IN void* handle, IN MV_DISPLAY_FRAME_INFO* pstDisplayInfo); + +/********************************************************************//** + * @~chinese + * @brief 获取支持的传输层 + * @return 支持的传输层编号 + * @remarks 返回是设备的传输层,比如( MV_GIGE_DEVICE | MV_USB_DEVICE |MV_GENTL_XOF_DEVICE 等),不包含采集卡的类型 + + * @~english + * @brief Get supported Transport Layer + * @return Supported Transport Layer number + * @remarks The return is the transport layer of the device, such as (MV_GIGE-DEVICE | MV_USBDEVICE | MV_GENTL-XOF-DEVICE, etc.), excluding the type of Frame grabber + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_EnumerateTls(); + +/********************************************************************//** + * @~chinese + * @brief 创建设备句柄,不生成日志 + * @param handle [IN][OUT] 设备句柄 + * @param pstDevInfo [IN] 设备信息结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 根据输入的设备信息,创建库内部必须的资源和初始化内部模块 + 通过该接口创建句柄,调用SDK接口,不会默认生成SDK日志文件,如果需要生成日志文件可以通过MV_CC_CreateHandle创建句柄,日志文件自动生成 + + * @~english + * @brief Create Device Handle without log + * @param handle [IN][OUT] Device handle + * @param pstDevInfo [IN] Device Information Structure + * @return Success, return MV_OK. Failure, return error code + * @remarks Create required resources within library and initialize internal module according to input device information. + Create handle and call SDK interface through this interface, and SDK log file will not be created. To create logs, + create handle through MV_CC_CreateHandle, and log files will be automatically generated. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_CreateHandleWithoutLog(IN OUT void ** handle, IN const MV_CC_DEVICE_INFO* pstDevInfo); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调,RGB + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 通过该接口可以设置图像数据回调函数,在MV_CC_CreateHandle之后即可调用。图像数据采集有两种方式,两种方式不能复用: + 方式一:调用MV_CC_RegisterImageCallBackForRGB设置RGB24格式图像数据回调函数,然后调用MV_CC_StartGrabbing开始采集,采集的图像数据在设置的回调函数中返回。 + 方式二:调用MV_CC_StartGrabbing开始采集,然后在应用层循环调用MV_CC_GetImageForRGB获取RGB24格式的帧数据, + 获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。 + 该接口不支持MV_CAMERALINK_DEVICE 类型的设备。 + + * @~english + * @brief register image data callback, RGB + * @param handle [IN] Device handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle.There are two image acquisition modes, the two modes cannot be reused: + Mode 1: Call MV_CC_RegisterImageCallBackForRGB to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function. + Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForRGB repeatedly in application layer to get frame data with RGB24 format. + When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. + This interface does not support devices of type MV_CAMERALINK_DEVICE + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackForRGB(IN void* handle, + IN void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 注册图像数据回调,BGR + * @param handle [IN] 设备句柄 + * @param cbOutput [IN] 回调函数指针 + * @param pUser [IN] 用户自定义变量 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 通过该接口可以设置图像数据回调函数,在MV_CC_CreateHandle之后即可调用。图像数据采集有两种方式,两种方式不能复用: + 方式一:调用MV_CC_RegisterImageCallBackForBGR设置BGR24图像数据回调函数,然后调用MV_CC_StartGrabbing开始采集,采集的图像数据在设置的回调函数中返回。 + 方式二:调用MV_CC_StartGrabbing开始采集,然后在应用层循环调用MV_CC_GetImageForBGR获取BGR24格式的帧数据, + 获取帧数据时上层应用程序需要根据帧率控制好调用该接口的频率。 + 该接口不支持MV_CAMERALINK_DEVICE 类型的设备。 + + * @~english + * @brief register image data callback, BGR + * @param handle [IN] Device handle + * @param cbOutput [IN] Callback function pointer + * @param pUser [IN] User defined variable + * @return Success, return MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle.There are two image acquisition modes, the two modes cannot be reused: + Mode 1: Call MV_CC_RegisterImageCallBackForBGR to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function. + Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForBGR repeatedly in application layer to get frame data with BGR24 format. + When getting frame data,the upper application program should control the frequency of calling this API according to frame rate. + This interface does not support devices of type MV_CAMERALINK_DEVICE + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_RegisterImageCallBackForBGR(IN void* handle, + IN void(__stdcall* cbOutput)(unsigned char * pData, MV_FRAME_OUT_INFO_EX* pFrameInfo, void* pUser), IN void* pUser); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧RGB数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则获取数据,无数据返回错误码 + * @param handle [IN] 设备句柄 + * @param pData [IN][OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pstFrameInfo [IN][OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 每次调用该接口,将查询内部缓存是否有数据,如果有数据则转换成RGB24格式返回,如果没有数据则返回错误码。 + 因为图像转换成RGB24格式有耗时,所以当数据帧率过高时该接口可能会导致丢帧。调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。 + 该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。 + 该接口不支持MV_CAMERALINK_DEVICE设备。 + + * @~english + * @brief Get one frame of RGB data, this function is using query to get data + query whether the internal cache has data, get data if there has, return error code if no data + * @param handle [IN] Device handle + * @param pData [IN][OUT] Image data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pstFrameInfo [IN][OUT] Image information structure + * @param nMsec [IN] Waiting timeout + * @return Success, return MV_OK. Failure, return error code + * @remarks Each time the API is called, the internal cache is checked for data. If there is data, it will be transformed as RGB24 format for return, if there is no data, return error code. + As time-consuming exists when transform the image to RGB24 format,this API may cause frame loss when the data frame rate is too high. + Before calling this API to get image data frame, call MV_CC_StartGrabbing to start image acquisition. + This API can get frame data actively, the upper layer program should control the frequency of calling this API according to the frame rate. + This API is not supported by MV_CAMERALINK_DEVICE device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageForRGB(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pstFrameInfo, IN int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 获取一帧BGR数据,此函数为查询式获取,每次调用查询内部 + 缓存有无数据,有数据则获取数据,无数据返回错误码 + * @param handle [IN] 设备句柄 + * @param pData [IN][OUT] 图像数据接收指针 + * @param nDataSize [IN] 接收缓存大小 + * @param pstFrameInfo [IN][OUT] 图像信息结构体 + * @param nMsec [IN] 等待超时时间 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 每次调用该接口,将查询内部缓存是否有数据,如果有数据则转换成BGR24格式返回,如果没有数据则返回错误码。 + 因为图像转换成BGR24格式有耗时,所以当数据帧率过高时该接口可能会导致丢帧.调用该接口获取图像数据帧之前需要先调用MV_CC_StartGrabbing启动图像采集。 + 该接口为主动式获取帧数据,上层应用程序需要根据帧率,控制好调用该接口的频率。 + 该接口不支持CameraLink设备。 + + * @~english + * @brief Get one frame of BGR data, this function is using query to get data + query whether the internal cache has data, get data if there has, return error code if no data + * @param handle [IN] Device handle + * @param pData [IN][OUT] Image data receiving buffer + * @param nDataSize [IN] Buffer size + * @param pstFrameInfo [IN][OUT] Image information structure + * @param nMsec [IN] Waiting timeout + * @return Success, return MV_OK. Failure, return error code + * @remarks Before calling this API to set image data callback function, you should call this API MV_CC_CreateHandle. + There are two image acquisition modes, the two modes cannot be reused: + Mode 1: Call MV_CC_RegisterImageCallBackForBGR to set RGB24 format image data callback function, and then call MV_CC_StartGrabbing to start acquisition, the collected image data will be returned in the configured callback function. + Mode 2: Call MV_CC_StartGrabbing to start acquisition, and the call MV_CC_GetImageForBGR repeatedly in application layer to get frame data with BGR24 format. + When getting frame data, the upper application program should control the frequency of calling this API according to frame rate. + This API is not supported by CameraLink device. + ***********************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_GetImageForBGR(IN void* handle, IN OUT unsigned char * pData , IN unsigned int nDataSize, IN OUT MV_FRAME_OUT_INFO_EX* pstFrameInfo, IN int nMsec); + +/********************************************************************//** + * @~chinese + * @brief 打开获取或设置相机参数的GUI界面 + * @param handle [IN] 设备句柄 + * @return 成功,返回MV_OK,失败,返回错误码。 + * @remarks 通过MV_CC_OpenDevice连接设备后,可以通过该接口获取或设置设备参数。 + * @remarks 限制:在同一线程中多相机同时调用该接口,只能打开当前一个GUI界面,需要关闭当前相机GUI界面后,才可打开另一个相机的GUI界面(后续版本优化) + 该接口仅支持windows平台 + + * @~english + * @brief Open the GUI interface for getting or setting camera parameters + * @param handle [IN] Device handle + * @return Success, return MV_OK, Failure, return error code. + * @remarks After connecting to device through MV_CC_OpenDevice, use this interface to get or set device params. + * @remarks Limit: calling this interface multiple times in the same thread can only open one GUI interface. + You need to wait until the previous GUI interface is closed before opening the next GUI interface.(Subsequent version optimization) + This interface only supports windows platform. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_OpenParamsGUI(IN void* handle); + +/********************************************************************//** + * @~chinese + * @brief 保存3D点云数据,支持PLY、CSV和OBJ三种格式 + * @param handle [IN] 设备句柄 + * @param pstPointDataParam [IN][OUT] 保存点云数据参数结构体 + * @return 成功,返回MV_OK;错误,返回错误码 + * @remarks 3D数据格式保存成3D文件格式,支持PLY/CSV/OBJ, + 目前支持PixelType_Gvsp_Coord3D_ABC32、PixelType_Gvsp_Coord3D_ABC32f、PixelType_Gvsp_Coord3D_AB32、PixelType_Gvsp_Coord3D_AB32f、PixelType_Gvsp_Coord3D_AC32、PixelType_Gvsp_Coord3D_AC32f, + 暂不支持其他3D格式。 + + * @~english + * @brief Save 3D point data, support PLY、CSV and OBJ + * @param handle [IN] Device handle + * @param pstPointDataParam [IN][OUT] Save 3D point data parameters structure + * @return Success, return MV_OK. Failure, return error code + * @remarks Save the 3D data format to 3D file format,support PLY、CSV and OBJ, + only support PixelType_Gvsp_Coord3D_ABC32、PixelType_Gvsp_Coord3D_ABC32f、PixelType_Gvsp_Coord3D_AB32、PixelType_Gvsp_Coord3D_AB32f、PixelType_Gvsp_Coord3D_AC32、PixelType_Gvsp_Coord3D_AC32f + Other 3D format is not supported now. + ************************************************************************/ +MV_CAMCTRL_API int __stdcall MV_CC_SavePointCloudData(IN void* handle, IN OUT MV_SAVE_POINT_CLOUD_PARAM* pstPointDataParam); + + +#ifdef __cplusplus +} +#endif + +#endif //_MV_OBSOLETE_INTERFACES_H_ diff --git a/image_capture/third_party/mvs/Includes/ObsoleteCamParams.h b/image_capture/third_party/mvs/Includes/ObsoleteCamParams.h new file mode 100644 index 0000000..84b9b6d --- /dev/null +++ b/image_capture/third_party/mvs/Includes/ObsoleteCamParams.h @@ -0,0 +1,655 @@ + +#ifndef _MV_OBSOLETE_CAM_PARAMS_H_ +#define _MV_OBSOLETE_CAM_PARAMS_H_ + +#include "PixelType.h" + +/// \~chinese 输出帧的信息 \~english Output Frame Information +typedef struct _MV_FRAME_OUT_INFO_ +{ + unsigned short nWidth; ///< [OUT] \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< [OUT] \~chinese 图像高 \~english Image Height + enum MvGvspPixelType enPixelType; ///< [OUT] \~chinese 像素格式 \~english Pixel Type + + unsigned int nFrameNum; ///< [OUT] \~chinese 帧号 \~english Frame Number + unsigned int nDevTimeStampHigh; ///< [OUT] \~chinese 时间戳高32位 \~english Timestamp high 32 bits + unsigned int nDevTimeStampLow; ///< [OUT] \~chinese 时间戳低32位 \~english Timestamp low 32 bits + unsigned int nReserved0; ///< [OUT] \~chinese 保留,8字节对齐 \~english Reserved, 8-byte aligned + int64_t nHostTimeStamp; ///< [OUT] \~chinese 主机生成的时间戳 \~english Host-generated timestamp + + unsigned int nFrameLen; + + unsigned int nLostPacket; // 本帧丢包数 + unsigned int nReserved[2]; +}MV_FRAME_OUT_INFO; + +/// \~chinese 保存图片参数 \~english Save image type +typedef struct _MV_SAVE_IMAGE_PARAM_T_ +{ + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存 \~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据大小 \~english Input Data Size + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入像素格式 \~english Input Data Pixel Format + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< [IN] \~chinese 图像高 \~english Image Height + + unsigned char* pImageBuffer; ///< [OUT] \~chinese 输出图片缓存 \~english Output Image Buffer + unsigned int nImageLen; ///< [OUT] \~chinese 输出图片大小 \~english Output Image Size + unsigned int nBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Output buffer size provided + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输出图片格式 \~english Output Image Format + +}MV_SAVE_IMAGE_PARAM; + +typedef struct _MV_IMAGE_BASIC_INFO_ +{ + unsigned short nWidthValue; + unsigned short nWidthMin; + unsigned int nWidthMax; + unsigned int nWidthInc; + + unsigned int nHeightValue; + unsigned int nHeightMin; + unsigned int nHeightMax; + unsigned int nHeightInc; + + float fFrameRateValue; + float fFrameRateMin; + float fFrameRateMax; + + unsigned int enPixelType; ///< [OUT] \~chinese 当前的像素格式 \~english Current pixel format + unsigned int nSupportedPixelFmtNum; ///< [OUT] \~chinese 支持的像素格式种类 \~english Support pixel format + unsigned int enPixelList[MV_MAX_XML_SYMBOLIC_NUM]; + unsigned int nReserved[8]; + +}MV_IMAGE_BASIC_INFO; + + +/// \~chinese 噪声特性类型 \~english Noise feature type +typedef enum _MV_CC_BAYER_NOISE_FEATURE_TYPE +{ + MV_CC_BAYER_NOISE_FEATURE_TYPE_INVALID = 0, ///< \~chinese 无效值 \~english Invalid + MV_CC_BAYER_NOISE_FEATURE_TYPE_PROFILE = 1, ///< \~chinese 噪声曲线 \~english Noise curve + MV_CC_BAYER_NOISE_FEATURE_TYPE_LEVEL = 2, ///< \~chinese 噪声水平 \~english Noise level + MV_CC_BAYER_NOISE_FEATURE_TYPE_DEFAULT = 1, ///< \~chinese 默认值 \~english Default + +}MV_CC_BAYER_NOISE_FEATURE_TYPE; + +/// \~chinese Bayer格式降噪特性信息 \~english Denoise profile info +typedef struct _MV_CC_BAYER_NOISE_PROFILE_INFO_T_ +{ + unsigned int nVersion; ///< \~chinese 版本 \~english version + MV_CC_BAYER_NOISE_FEATURE_TYPE enNoiseFeatureType; ///< \~chinese 噪声特性类型 \~english noise feature type + enum MvGvspPixelType enPixelType; ///< \~chinese 图像格式 \~english image format + int nNoiseLevel; ///< \~chinese 平均噪声水平 \~english noise level + unsigned int nCurvePointNum; ///< \~chinese 曲线点数 \~english curve point number + int* nNoiseCurve; ///< \~chinese 噪声曲线 \~english noise curve + int* nLumCurve; ///< \~chinese 亮度曲线 \~english luminance curve + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_BAYER_NOISE_PROFILE_INFO; + +/// \~chinese Bayer格式噪声估计参数 \~english Bayer noise estimate param +typedef struct _MV_CC_BAYER_NOISE_ESTIMATE_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽(大于等于8) \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高(大于等于8) \~english Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据大小 \~english Input data size + + unsigned int nNoiseThreshold; ///< [IN] \~chinese 噪声阈值(0-4095) \~english Noise Threshold + + unsigned char* pCurveBuf; ///< [IN] \~chinese 用于存储噪声曲线和亮度曲线(需要外部分配,缓存大小:4096 * sizeof(int) * 2) \~english Buffer used to store noise and brightness curves, size:4096 * sizeof(int) * 2) + MV_CC_BAYER_NOISE_PROFILE_INFO stNoiseProfile; ///< [OUT] \~chinese 降噪特性信息 \~english Denoise profile + + unsigned int nThreadNum; ///< [IN] \~chinese 线程数量,0表示算法库根据硬件自适应;1表示单线程(默认);大于1表示线程数目 \~english Thread number, 0 means that the library is adaptive to the hardware, 1 means single thread(Default value), Greater than 1 indicates the number of threads + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_BAYER_NOISE_ESTIMATE_PARAM; + +/// \~chinese Bayer格式空域降噪参数 \~english Bayer spatial Denoise param +typedef struct _MV_CC_BAYER_SPATIAL_DENOISE_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽(大于等于8) \~english Width + unsigned int nHeight; ///< [IN] \~chinese 图像高(大于等于8) \~english Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据大小 \~english Input data size + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出降噪后的数据 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出降噪后的数据长度 \~english Output data length + + MV_CC_BAYER_NOISE_PROFILE_INFO stNoiseProfile; ///< [IN] \~chinese 降噪特性信息(来源于噪声估计) \~english Denoise profile + unsigned int nDenoiseStrength; ///< [IN] \~chinese 降噪强度(0-100) \~english nDenoise Strength + unsigned int nSharpenStrength; ///< [IN] \~chinese 锐化强度(0-32) \~english Sharpen Strength + unsigned int nNoiseCorrect; ///< [IN] \~chinese 噪声校正系数(0-1280) \~english Noise Correct + + unsigned int nThreadNum; ///< [IN] \~chinese 线程数量,0表示算法库根据硬件自适应;1表示单线程(默认);大于1表示线程数目 \~english Thread number, 0 means that the library is adaptive to the hardware, 1 means single thread(Default value), Greater than 1 indicates the number of threads + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_BAYER_SPATIAL_DENOISE_PARAM; + +/// \~chinese CLUT参数 \~english CLUT param +typedef struct _MV_CC_CLUT_PARAM_T_ +{ + bool bCLUTEnable; ///< [IN] \~chinese 是否启用CLUT \~english CLUT enable + unsigned int nCLUTScale; ///< [IN] \~chinese 量化系数(2的整数幂,最大65536) \~english Quantitative scale(Integer power of 2, <= 65536) + unsigned int nCLUTSize; ///< [IN] \~chinese CLUT大小,目前仅支持17 \~english CLUT size, currently only supports 17 + unsigned char* pCLUTBuf; ///< [IN] \~chinese 量化CLUT表 \~english CLUT buffer + unsigned int nCLUTBufLen; ///< [IN] \~chinese 量化CLUT缓存大小(nCLUTSize*nCLUTSize*nCLUTSize*sizeof(int)*3) \~english CLUT buffer length(nCLUTSize*nCLUTSize*nCLUTSize*sizeof(int)*3) + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_CLUT_PARAM; + +/// \~chinese 锐化结构体 \~english Sharpen structure +typedef struct _MV_CC_SHARPEN_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度(最小8) \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度(最小8) \~english Image Height + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + + unsigned int nSharpenAmount; ///< [IN] \~chinese 锐度调节强度,[0,500] \~english Sharpen amount,[0,500] // [nSharpenAmount 作废, 使用 nSharpenPosAmount & nSharpenNegAmount 替代 ] + unsigned int nSharpenRadius; ///< [IN] \~chinese 锐度调节半径(半径越大,耗时越长),[1,21] \~english Sharpen radius(The larger the radius, the longer it takes),[1,21] + unsigned int nSharpenThreshold; ///< [IN] \~chinese 锐度调节阈值,[0,255] \~english Sharpen threshold,[0,255] + + + unsigned int nSharpenPosAmount; // [IN] 锐度调节正向强度,范围:[0, 500] + unsigned int nSharpenNegAmount; // [IN] 锐度调节负向强度,范围:[0, 500] + + unsigned int nRes[6]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_SHARPEN_PARAM; + +/// \~chinese 色彩校正结构体 \~english Color correct structure +typedef struct _MV_CC_COLOR_CORRECT_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度 \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度 \~english Image Height + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + + unsigned int nImageBit; ///< [IN] \~chinese 有效图像位数(8,10,12,16) \~english Image bit(8 or 10 or 12 or 16) + MV_CC_GAMMA_PARAM stGammaParam; ///< [IN] \~chinese Gamma信息 \~english Gamma info + MV_CC_CCM_PARAM_EX stCCMParam; ///< [IN] \~chinese CCM信息 \~english CCM info + MV_CC_CLUT_PARAM stCLUTParam; ///< [IN] \~chinese CLUT信息 \~english CLUT info + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_COLOR_CORRECT_PARAM; + +/// \~chinese 矩形ROI结构体 \~english Rect ROI structure +typedef struct _MV_CC_RECT_I_ +{ + unsigned int nX; ///< \~chinese 矩形左上角X轴坐标 \~english X Position + unsigned int nY; ///< \~chinese 矩形左上角Y轴坐标 \~english Y Position + unsigned int nWidth; ///< \~chinese 矩形宽度 \~english Rect Width + unsigned int nHeight; ///< \~chinese 矩形高度 \~english Rect Height + +}MV_CC_RECT_I; + +/// \~chinese 噪声估计结构体 \~english Noise estimate structure +typedef struct _MV_CC_NOISE_ESTIMATE_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度(最小8) \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度(最小8) \~english Image Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + + MV_CC_RECT_I* pstROIRect; ///< [IN] \~chinese 图像ROI \~english Image ROI + unsigned int nROINum; ///< [IN] \~chinese ROI个数 \~english ROI number + + ///< \~chinese Bayer域噪声估计参数,Mono8/RGB域无效 \~english Bayer Noise estimate param,Mono8/RGB formats are invalid + unsigned int nNoiseThreshold; ///< [IN] \~chinese 噪声阈值[0,4095] \~english Noise threshold[0,4095] + ///< \~chinese 建议值:8bit,0xE0;10bit,0x380;12bit,0xE00 \~english Suggestive value:8bit,0xE0;10bit,0x380;12bit,0xE00 + + unsigned char* pNoiseProfile; ///< [OUT] \~chinese 输出噪声特性 \~english Output Noise Profile + unsigned int nNoiseProfileSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nNoiseProfileLen; ///< [OUT] \~chinese 输出噪声特性长度 \~english Output Noise Profile length + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_NOISE_ESTIMATE_PARAM; + +/// \~chinese 空域降噪结构体 \~english Spatial denoise structure +typedef struct _MV_CC_SPATIAL_DENOISE_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度(最小8) \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度(最小8) \~english Image Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据大小 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出降噪后的数据 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出降噪后的数据长度 \~english Output data length + + unsigned char* pNoiseProfile; ///< [IN] \~chinese 输入噪声特性 \~english Input Noise Profile + unsigned int nNoiseProfileLen; ///< [IN] \~chinese 输入噪声特性长度 \~english Input Noise Profile length + + ///< \~chinese Bayer域空域降噪参数,Mono8/RGB域无效 \~english Bayer Spatial denoise param,Mono8/RGB formats are invalid + unsigned int nBayerDenoiseStrength; ///< [IN] \~chinese 降噪强度[0,100] \~english Denoise Strength[0,100] + unsigned int nBayerSharpenStrength; ///< [IN] \~chinese 锐化强度[0,32] \~english Sharpen Strength[0,32] + unsigned int nBayerNoiseCorrect; ///< [IN] \~chinese 噪声校正系数[0,1280] \~english Noise Correct[0,1280] + + ///< \~chinese Mono8/RGB域空域降噪参数,Bayer域无效 \~english Mono8/RGB Spatial denoise param,Bayer formats are invalid + unsigned int nNoiseCorrectLum; ///< [IN] \~chinese 亮度校正系数[1,2000] \~english Noise Correct Lum[1,2000] + unsigned int nNoiseCorrectChrom; ///< [IN] \~chinese 色调校正系数[1,2000] \~english Noise Correct Chrom[1,2000] + unsigned int nStrengthLum; ///< [IN] \~chinese 亮度降噪强度[0,100] \~english Strength Lum[0,100] + unsigned int nStrengthChrom; ///< [IN] \~chinese 色调降噪强度[0,100] \~english Strength Chrom[0,100] + unsigned int nStrengthSharpen; ///< [IN] \~chinese 锐化强度[1,1000] \~english Strength Sharpen[1,1000] + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_SPATIAL_DENOISE_PARAM; + +/// \~chinese LSC标定结构体 \~english LSC calib structure +typedef struct _MV_CC_LSC_CALIB_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度[16,65535] \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度[16-65535] \~english Image Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pCalibBuf; ///< [OUT] \~chinese 输出标定表缓存 \~english Output calib buffer + unsigned int nCalibBufSize; ///< [IN] \~chinese 提供的标定表缓冲大小(nWidth*nHeight*sizeof(unsigned short)) \~english Provided output buffer size + unsigned int nCalibBufLen; ///< [OUT] \~chinese 输出标定表缓存长度 \~english Output calib buffer length + + unsigned int nSecNumW; ///< [IN] \~chinese 宽度分块数 \~english Width Sec num + unsigned int nSecNumH; ///< [IN] \~chinese 高度分块数 \~english Height Sec num + unsigned int nPadCoef; ///< [IN] \~chinese 边缘填充系数[1,5] \~english Pad Coef[1,5] + unsigned int nCalibMethod; ///< [IN] \~chinese 标定方式(0-中心为基准;1-最亮区域为基准;2-目标亮度为基准) \~english Calib method + unsigned int nTargetGray; ///< [IN] \~chinese 目标亮度(标定方式为2时有效) \~english Target Gray + ///< \~chinese 8位,范围:[0,255] \~english 8bit,range:[0,255] + ///< \~chinese 10位,范围:[0,1023] \~english 10bit,range:[0,1023] + ///< \~chinese 12位,范围:[0,4095] \~english 12bit,range:[0,4095] + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_LSC_CALIB_PARAM; + +/// \~chinese LSC校正结构体 \~english LSC correct structure +typedef struct _MV_CC_LSC_CORRECT_PARAM_T_ +{ + unsigned int nWidth; ///< [IN] \~chinese 图像宽度[16,65535] \~english Image Width + unsigned int nHeight; ///< [IN] \~chinese 图像高度[16,65535] \~english Image Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + unsigned char* pSrcBuf; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcBufLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + + unsigned char* pCalibBuf; ///< [IN] \~chinese 输入标定表缓存 \~english Input calib buffer + unsigned int nCalibBufLen; ///< [IN] \~chinese 输入标定表缓存长度 \~english Input calib buffer length + + unsigned int nRes[8]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_LSC_CORRECT_PARAM; + +/// \~chinese 某个节点对应的子节点个数最大值 \~english The maximum number of child nodes corresponding to a node +#define MV_MAX_XML_NODE_NUM_C 128 + +/// \~chinese 节点名称字符串最大长度 \~english The maximum length of node name string +#define MV_MAX_XML_NODE_STRLEN_C 64 + +/// \~chinese 节点String值最大长度 \~english The maximum length of Node String +#define MV_MAX_XML_STRVALUE_STRLEN_C 64 + +/// \~chinese 节点描述字段最大长度 \~english The maximum length of the node description field +#define MV_MAX_XML_DISC_STRLEN_C 512 + +/// \~chinese 最多的单元数 \~english The maximum number of units +#define MV_MAX_XML_ENTRY_NUM 10 + +/// \~chinese 父节点个数上限 \~english The maximum number of parent nodes +#define MV_MAX_XML_PARENTS_NUM 8 + +/// \~chinese 每个已经实现单元的名称长度 \~english The length of the name of each unit that has been implemented +#define MV_MAX_XML_SYMBOLIC_STRLEN_C 64 + +enum MV_XML_Visibility +{ + V_Beginner = 0, ///< Always visible + V_Expert = 1, ///< Visible for experts or Gurus + V_Guru = 2, ///< Visible for Gurus + V_Invisible = 3, ///< Not Visible + V_Undefined = 99 ///< Object is not yet initialized +}; + +/// \~chinese 单个节点基本属性 | en:Single Node Basic Attributes +typedef struct _MV_XML_NODE_FEATURE_ +{ + enum MV_XML_InterfaceType enType; ///< \~chinese 节点类型 \~english Node Type + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Is visibility + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述,目前暂不支持 \~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称 \~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名 \~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示 \~english Notice + + unsigned int nReserved[4]; +}MV_XML_NODE_FEATURE; + +/// \~chinese 节点列表 | en:Node List +typedef struct _MV_XML_NODES_LIST_ +{ + unsigned int nNodeNum; ///< \~chinese 节点个数 \~english Node Number + MV_XML_NODE_FEATURE stNodes[MV_MAX_XML_NODE_NUM_C]; +}MV_XML_NODES_LIST; + +typedef struct _MV_XML_FEATURE_Value_ +{ + enum MV_XML_InterfaceType enType; ///< \~chinese 节点类型 \~english Node Type + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述,目前暂不支持 \~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称 \~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名 \~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示 \~english Notice + unsigned int nReserved[4]; +}MV_XML_FEATURE_Value; + +typedef struct _MV_XML_FEATURE_Base_ +{ + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode +}MV_XML_FEATURE_Base; + +typedef struct _MV_XML_FEATURE_Integer_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nValue; ///< \~chinese 当前值 \~english Current Value + int64_t nMinValue; ///< \~chinese 最小值 \~english Min Value + int64_t nMaxValue; ///< \~chinese 最大值 \~english Max Value + int64_t nIncrement; ///< \~chinese 增量 \~english Increment + + unsigned int nReserved[4]; + +}MV_XML_FEATURE_Integer; + +typedef struct _MV_XML_FEATURE_Boolean_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + bool bValue; ///< \~chinese 当前值 \~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Boolean; + +typedef struct _MV_XML_FEATURE_Command_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Command; + +typedef struct _MV_XML_FEATURE_Float_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + double dfValue; ///< \~chinese 当前值 \~english Current Value + double dfMinValue; ///< \~chinese 最小值 \~english Min Value + double dfMaxValue; ///< \~chinese 最大值 \~english Max Value + double dfIncrement; ///< \~chinese 增量 \~english Increment + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Float; + +typedef struct _MV_XML_FEATURE_String_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + char strValue[MV_MAX_XML_STRVALUE_STRLEN_C]; ///< \~chinese 当前值 \~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_String; + +typedef struct _MV_XML_FEATURE_Register_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nAddrValue; ///< \~chinese 当前值 \~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Register; + +typedef struct _MV_XML_FEATURE_Category_ +{ + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述 目前暂不支持 \~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称 \~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名 \~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示 \~english Notice + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Category; + +typedef struct _MV_XML_FEATURE_EnumEntry_ +{ + char strName[MV_MAX_XML_NODE_STRLEN_C]; + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 目前暂不支持 \~english NOT SUPPORT NOW + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; + int bIsImplemented; + int nParentsNum; + MV_XML_NODE_FEATURE stParentsList[MV_MAX_XML_PARENTS_NUM]; + + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + int64_t nValue; ///< \~chinese 当前值 \~english Current Value + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int nReserved[8]; + +}MV_XML_FEATURE_EnumEntry; + +typedef struct _MV_XML_FEATURE_Enumeration_ +{ + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述 目前暂不支持 \~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称 \~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名 \~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示 \~english Notice + + int nSymbolicNum; ///< \~chinese ymbolic数 \~english Symbolic Number + char strCurrentSymbolic[MV_MAX_XML_SYMBOLIC_STRLEN_C];///< \~chinese 当前Symbolic索引 \~english Current Symbolic Index + char strSymbolic[MV_MAX_XML_SYMBOLIC_NUM][MV_MAX_XML_SYMBOLIC_STRLEN_C]; + enum MV_XML_AccessMode enAccessMode; ////< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + int64_t nValue; ///< \~chinese 当前值 \~english Current Value + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Enumeration; + +typedef struct _MV_XML_FEATURE_Port_ +{ + enum MV_XML_Visibility enVisivility; ///< \~chinese 是否可见 \~english Visible + char strDescription[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 节点描述,目前暂不支持 \~english Node Description, NOT SUPPORT NOW + char strDisplayName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 显示名称 \~english Display Name + char strName[MV_MAX_XML_NODE_STRLEN_C]; ///< \~chinese 节点名 \~english Node Name + char strToolTip[MV_MAX_XML_DISC_STRLEN_C]; ///< \~chinese 提示 \~english Notice + + enum MV_XML_AccessMode enAccessMode; ///< \~chinese 访问模式 \~english Access Mode + int bIsLocked; ///< \~chinese 是否锁定。0-否;1-是,目前暂不支持 \~english Locked. 0-NO; 1-YES, NOT SUPPORT NOW + + unsigned int nReserved[4]; +}MV_XML_FEATURE_Port; + +typedef struct _MV_XML_CAMERA_FEATURE_ +{ + enum MV_XML_InterfaceType enType; + union + { + MV_XML_FEATURE_Integer stIntegerFeature; + MV_XML_FEATURE_Float stFloatFeature; + MV_XML_FEATURE_Enumeration stEnumerationFeature; + MV_XML_FEATURE_String stStringFeature; + }SpecialFeature; + +}MV_XML_CAMERA_FEATURE; + + + +/// \~chinese 图片保存参数 \~english Save Image Parameters +typedef struct _MV_SAVE_IMAGE_PARAM_T_EX_ +{ + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存 \~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input Data length + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 输入数据的像素格式 \~english Input Data Pixel Format + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< [IN] \~chinese 图像高 \~english Image Height + + unsigned char* pImageBuffer; ///< [OUT] \~chinese 输出图片缓存 \~english Output Image Buffer + unsigned int nImageLen; ///< [OUT] \~chinese 输出图片长度 \~english Output Image length + unsigned int nBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Output buffer size provided + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输出图片格式 \~english Output Image Format + unsigned int nJpgQuality; ///< [IN] \~chinese JPG编码质量(50-99],其它格式无效 \~english Encoding quality(50-99],Other formats are invalid + + unsigned int iMethodValue; ///< [IN] \~chinese 插值方法 0-快速 1-均衡(其它值默认为均衡) 2-最优 3-最优+ \~english Bayer interpolation method 0-Fast 1-Equilibrium 2-Optimal 3-Optimal+ + + unsigned int nReserved[3]; ///< \~chinese 预留 \~english Reserved + +}MV_SAVE_IMAGE_PARAM_EX; + + + +/// \~chinese 图片保存参数 \~english Save Image Parameters +typedef struct _MV_SAVE_IMG_TO_FILE_PARAM_ +{ + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese输入数据的像素格式 \~english The pixel format of the input data + unsigned char* pData; ///< [IN] \~chinese 输入数据缓存 \~english Input Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input Data length + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Image Width + unsigned short nHeight; ///< [IN] \~chinese 图像高 \~english Image Height + enum MV_SAVE_IAMGE_TYPE enImageType; ///< [IN] \~chinese 输入图片格式 \~english Input Image Format + unsigned int nQuality; ///< [IN] \~chinese JPG编码质量(50-99],其它格式无效 \~english JPG Encoding quality(50-99],Other formats are invalid + char pImagePath[256]; ///< [IN] \~chinese 输入文件路径 \~english Input file path + + int iMethodValue; ///< [IN] \~chinese 插值方法 0-快速 1-均衡(其它值默认为均衡) 2-最优 3-最优+ \~english Bayer interpolation method 0-Fast 1-Equilibrium 2-Optimal 3-Optimal+ + + unsigned int nReserved[8]; ///< \~chinese 预留 \~english Reserved + +}MV_SAVE_IMG_TO_FILE_PARAM; + + +// \~chinese 像素转换结构体 \~english Pixel convert structure +typedef struct _MV_CC_PIXEL_CONVERT_PARAM_ +{ + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned short nHeight; ///< [IN] \~chinese 图像高 \~english Height + + enum MvGvspPixelType enSrcPixelType; ///< [IN] \~chinese 源像素格式 \~english Source pixel format + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + enum MvGvspPixelType enDstPixelType; ///< [IN] \~chinese 目标像素格式 \~english Destination pixel format + unsigned char* pDstBuffer; ///< [OUT] \~chinese 输出数据缓存 \~english Output data buffer + unsigned int nDstLen; ///< [OUT] \~chinese 输出数据长度 \~english Output data length + unsigned int nDstBufferSize; ///< [IN] \~chinese 提供的输出缓冲区大小 \~english Provided output buffer size + + unsigned int nRes[4]; ///< \~chinese 预留 \~english Reserved + +}MV_CC_PIXEL_CONVERT_PARAM; + +/// \~chinese 保存的3D数据格式 \~english The saved format for 3D data +enum MV_SAVE_POINT_CLOUD_FILE_TYPE +{ + MV_PointCloudFile_Undefined = 0, ///< \~chinese 未定义的点云格式 \~english Undefined point cloud format + MV_PointCloudFile_PLY = 1, ///< \~chinese PLY点云格式 \~english The point cloud format named PLY + MV_PointCloudFile_CSV = 2, ///< \~chinese CSV点云格式 \~english The point cloud format named CSV + MV_PointCloudFile_OBJ = 3, ///< \~chinese OBJ点云格式 \~english The point cloud format named OBJ + +}; + +/// \~chinese 保存3D数据到缓存 \~english Save 3D data to buffer +typedef struct _MV_SAVE_POINT_CLOUD_PARAM_ +{ + unsigned int nLinePntNum; ///< [IN] \~chinese 行点数,即图像宽 \~english The number of points in each row,which is the width of the image + unsigned int nLineNum; ///< [IN] \~chinese 行数,即图像高 \~english The number of rows,which is the height of the image + + enum MvGvspPixelType enSrcPixelType; ///< [IN] \~chinese 输入数据的像素格式 \~english The pixel format of the input data + unsigned char* pSrcData; ///< [IN] \~chinese 输入数据缓存 \~english Input data buffer + unsigned int nSrcDataLen; ///< [IN] \~chinese 输入数据长度 \~english Input data length + + unsigned char* pDstBuf; ///< [OUT] \~chinese 输出像素数据缓存 \~english Output pixel data buffer + unsigned int nDstBufSize; ///< [IN] \~chinese 提供的输出缓冲区大小(nLinePntNum * nLineNum * (16*3 + 4) + 2048) \~english Output buffer size provided(nLinePntNum * nLineNum * (16*3 + 4) + 2048) + unsigned int nDstBufLen; ///< [OUT] \~chinese 输出像素数据缓存长度 \~english Output pixel data buffer size + enum MV_SAVE_POINT_CLOUD_FILE_TYPE enPointCloudFileType; ///< [IN] \~chinese 提供输出的点云文件类型 \~english Output point data file type provided + + unsigned int nReserved[8]; ///< \~chinese 保留字段 \~english Reserved + +}MV_SAVE_POINT_CLOUD_PARAM; + +/// \~chinese 显示帧信息 \~english Display frame information +typedef struct _MV_DISPLAY_FRAME_INFO_ +{ + void* hWnd; ///< [IN] \~chinese 窗口句柄 \~english HWND + unsigned char* pData; ///< [IN] \~chinese 显示的数据 \~english Data Buffer + unsigned int nDataLen; ///< [IN] \~chinese 数据长度 \~english Data Size + unsigned short nWidth; ///< [IN] \~chinese 图像宽 \~english Width + unsigned short nHeight; ///< [IN] \~chinese 图像高 \~english Height + enum MvGvspPixelType enPixelType; ///< [IN] \~chinese 像素格式 \~english Pixel format + + unsigned int enRenderMode; /// [IN] \~chinese 图像渲染方式 Windows:0-GDI(默认), 1-D3D, 2-OPENGL Linux: 0-OPENGL(默认) \~english Windows:0-GDI(default), 1-D3D, 2-OPENGL Linux: 0-OPENGL(default) + unsigned int nRes[3]; ///< \~chinese 保留 \~english Reserved + +}MV_DISPLAY_FRAME_INFO; + + + + +#endif /* _MV_OBSOLETE_CAM_PARAMS_H_ */ diff --git a/image_capture/third_party/mvs/Includes/PixelType.h b/image_capture/third_party/mvs/Includes/PixelType.h new file mode 100644 index 0000000..e4e9758 --- /dev/null +++ b/image_capture/third_party/mvs/Includes/PixelType.h @@ -0,0 +1,201 @@ + +#ifndef _MV_PIXEL_TYPE_H_ +#define _MV_PIXEL_TYPE_H_ + +/************************************************************************/ +/* GigE Vision (2.0.03) PIXEL FORMATS */ +/************************************************************************/ + +// Indicate if pixel is monochrome or RGB +#define MV_GVSP_PIX_MONO 0x01000000 +#define MV_GVSP_PIX_RGB 0x02000000 // deprecated in version 1.1 +#define MV_GVSP_PIX_COLOR 0x02000000 +#define MV_GVSP_PIX_CUSTOM 0x80000000 +#define MV_GVSP_PIX_COLOR_MASK 0xFF000000 + +// Indicate effective number of bits occupied by the pixel (including padding). +// This can be used to compute amount of memory required to store an image. +#define MV_PIXEL_BIT_COUNT(n) ((n) << 16) + +#define MV_GVSP_PIX_EFFECTIVE_PIXEL_SIZE_MASK 0x00FF0000 +#define MV_GVSP_PIX_EFFECTIVE_PIXEL_SIZE_SHIFT 16 + +// Pixel ID: lower 16-bit of the pixel formats +#define MV_GVSP_PIX_ID_MASK 0x0000FFFF +#define MV_GVSP_PIX_COUNT 0x46 // next Pixel ID available + +/// \addtogroup 像素格式定义 +///@{ + +///< \~chinese 图片格式定义 +enum MvGvspPixelType +{ + // Undefined pixel type +#ifdef WIN32 + PixelType_Gvsp_Undefined = 0xFFFFFFFF, ///< 未定义的像素类型 + +#else + PixelType_Gvsp_Undefined = -1, ///< 未定义的像素类型 + +#endif + // Mono buffer format defines + PixelType_Gvsp_Mono1p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(1) | 0x0037), ///< Mono1p + PixelType_Gvsp_Mono2p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(2) | 0x0038), ///< Mono2p + PixelType_Gvsp_Mono4p = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(4) | 0x0039), ///< Mono4p + PixelType_Gvsp_Mono8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0001), ///< Mono8 + PixelType_Gvsp_Mono8_Signed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0002), ///< Mono8_Signed + PixelType_Gvsp_Mono10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0003), ///< Mono10 + PixelType_Gvsp_Mono10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0004), ///< Mono10_Packed + PixelType_Gvsp_Mono12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0005), ///< Mono12 + PixelType_Gvsp_Mono12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0006), ///< Mono12_Packed + PixelType_Gvsp_Mono14 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0025), ///< Mono14 + PixelType_Gvsp_Mono16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0007), ///< Mono16 + + // Bayer buffer format defines + PixelType_Gvsp_BayerGR8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0008), ///< BayerGR8 + PixelType_Gvsp_BayerRG8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0009), ///< BayerRG8 + PixelType_Gvsp_BayerGB8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000A), ///< BayerGB8 + PixelType_Gvsp_BayerBG8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000B), ///< BayerBG8 + PixelType_Gvsp_BayerRBGG8 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0046), ///< BayerRBGG8 + PixelType_Gvsp_BayerGR10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000C), ///< BayerGR10 + PixelType_Gvsp_BayerRG10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000D), ///< BayerRG10 + PixelType_Gvsp_BayerGB10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000E), ///< BayerGB10 + PixelType_Gvsp_BayerBG10 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000F), ///< BayerBG10 + PixelType_Gvsp_BayerGR12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0010), ///< BayerGR12 + PixelType_Gvsp_BayerRG12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0011), ///< BayerRG12 + PixelType_Gvsp_BayerGB12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0012), ///< BayerGB12 + PixelType_Gvsp_BayerBG12 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0013), ///< BayerBG12 + PixelType_Gvsp_BayerGR10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0026), ///< BayerGR10_Packed + PixelType_Gvsp_BayerRG10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0027), ///< BayerRG10_Packed + PixelType_Gvsp_BayerGB10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0028), ///< BayerGB10_Packed + PixelType_Gvsp_BayerBG10_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0029), ///< BayerBG10_Packed + PixelType_Gvsp_BayerGR12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002A), ///< BayerGR12_Packed + PixelType_Gvsp_BayerRG12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002B), ///< BayerRG12_Packed + PixelType_Gvsp_BayerGB12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002C), ///< BayerGB12_Packed + PixelType_Gvsp_BayerBG12_Packed = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002D), ///< BayerBG12_Packed + PixelType_Gvsp_BayerGR16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x002E), ///< BayerGR16 + PixelType_Gvsp_BayerRG16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x002F), ///< BayerRG16 + PixelType_Gvsp_BayerGB16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0030), ///< BayerGB16 + PixelType_Gvsp_BayerBG16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0031), ///< BayerBG16 + + // RGB Packed buffer format defines + PixelType_Gvsp_RGB8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0014), ///< RGB8_Packed + PixelType_Gvsp_BGR8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0015), ///< BGR8_Packed + PixelType_Gvsp_RGBA8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0016), ///< RGBA8_Packed + PixelType_Gvsp_BGRA8_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0017), ///< BGRA8_Packed + PixelType_Gvsp_RGB10_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0018), ///< RGB10_Packed + PixelType_Gvsp_BGR10_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0019), ///< BGR10_Packed + PixelType_Gvsp_RGB12_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x001A), ///< RGB12_Packed + PixelType_Gvsp_BGR12_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x001B), ///< BGR12_Packed + PixelType_Gvsp_RGB16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0033), ///< RGB16_Packed + PixelType_Gvsp_BGR16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x004B), ///< BGR16_Packed + PixelType_Gvsp_RGBA16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0064), ///< RGBA16_Packed + PixelType_Gvsp_BGRA16_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0051), ///< BGRA16_Packed + PixelType_Gvsp_RGB10V1_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x001C), ///< RGB10V1_Packed + PixelType_Gvsp_RGB10V2_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x001D), ///< RGB10V2_Packed + PixelType_Gvsp_RGB12V1_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(36) | 0X0034), ///< RGB12V1_Packed + PixelType_Gvsp_RGB565_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0035), ///< RGB565_Packed + PixelType_Gvsp_BGR565_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0X0036), ///< BGR565_Packed + + // YUV Packed buffer format defines + PixelType_Gvsp_YUV411_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x001E), ///< YUV411_Packed + PixelType_Gvsp_YUV422_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x001F), ///< YUV422_Packed + PixelType_Gvsp_YUV422_YUYV_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0032), ///< YUV422_YUYV_Packed + PixelType_Gvsp_YUV444_Packed = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0020), ///< YUV444_Packed + PixelType_Gvsp_YCBCR8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x003A), ///< YCBCR8_CBYCR + PixelType_Gvsp_YCBCR422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x003B), ///< YCBCR422_8 + PixelType_Gvsp_YCBCR422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0043), ///< YCBCR422_8_CBYCRY + PixelType_Gvsp_YCBCR411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x003C), ///< YCBCR411_8_CBYYCRYY + PixelType_Gvsp_YCBCR601_8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x003D), ///< YCBCR601_8_CBYCR + PixelType_Gvsp_YCBCR601_422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x003E), ///< YCBCR601_422_8 + PixelType_Gvsp_YCBCR601_422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0044), ///< YCBCR601_422_8_CBYCRY + PixelType_Gvsp_YCBCR601_411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x003F), ///< YCBCR601_411_8_CBYYCRYY + PixelType_Gvsp_YCBCR709_8_CBYCR = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0040), ///< YCBCR709_8_CBYCR + PixelType_Gvsp_YCBCR709_422_8 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0041), ///< YCBCR709_422_8 + PixelType_Gvsp_YCBCR709_422_8_CBYCRY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0045), ///< YCBCR709_422_8_CBYCRY + PixelType_Gvsp_YCBCR709_411_8_CBYYCRYY = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x0042), ///< YCBCR709_411_8_CBYYCRYY + + // YUV420 + PixelType_Gvsp_YUV420SP_NV12 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x8001), ///< YUV420SP_NV12 + PixelType_Gvsp_YUV420SP_NV21 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(12) | 0x8002), ///< YUV420SP_NV21 + + // RGB Planar buffer format defines + PixelType_Gvsp_RGB8_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0021), ///< RGB8_Planar + PixelType_Gvsp_RGB10_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0022), ///< RGB10_Planar + PixelType_Gvsp_RGB12_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0023), ///< RGB12_Planar + PixelType_Gvsp_RGB16_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0024), ///< RGB16_Planar + + // 自定义的图片格式 + PixelType_Gvsp_Jpeg = (MV_GVSP_PIX_CUSTOM | MV_PIXEL_BIT_COUNT(24) | 0x0001), ///< Jpeg + + PixelType_Gvsp_Coord3D_ABC32f = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x00C0), ///< 0x026000C0X + PixelType_Gvsp_Coord3D_ABC32f_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x00C1), ///< 0x026000C1X + + PixelType_Gvsp_Coord3D_AC32f = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(40) | 0x00C2), ///< 该值被废弃,请参考PixelType_Gvsp_Coord3D_AC32f_64; the value is discarded + PixelType_Gvsp_COORD3D_DEPTH_PLUS_MASK = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(28) | 0x0001), ///< 该值被废弃; the value is discarded (已放入Chunkdata) + + PixelType_Gvsp_Coord3D_ABC32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(96) | 0x3001), ///< Coord3D_ABC32 + PixelType_Gvsp_Coord3D_AB32f = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3002), ///< Coord3D_AB32f + PixelType_Gvsp_Coord3D_AB32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3003), ///< Coord3D_AB32 + PixelType_Gvsp_Coord3D_AC32f_64 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x00C2), ///< Coord3D_AC32f_64 + PixelType_Gvsp_Coord3D_AC32f_Planar = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x00C3), ///< Coord3D_AC32f_Planar + PixelType_Gvsp_Coord3D_AC32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x3004), ///< Coord3D_AC32 + PixelType_Gvsp_Coord3D_A32f = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x00BD), ///< Coord3D_A32f + PixelType_Gvsp_Coord3D_A32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x3005), ///< Coord3D_A32 + PixelType_Gvsp_Coord3D_C32f = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x00BF), ///< Coord3D_C32f + PixelType_Gvsp_Coord3D_C32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x3006), ///< Coord3D_C32 + PixelType_Gvsp_Coord3D_ABC16 = (MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x00B9), ///< Coord3D_ABC16 + PixelType_Gvsp_Coord3D_C16 = (MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x00B8), ///< Coord3D_C16 + + PixelType_Gvsp_Float32 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(32) | 0x0001), //0x81200001 + + //无损压缩像素格式定义 + PixelType_Gvsp_HB_Mono8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0001), ///< HB_Mono8 + PixelType_Gvsp_HB_Mono10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0003), ///< HB_Mono10 + PixelType_Gvsp_HB_Mono10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0004), ///< HB_Mono10_Packed + PixelType_Gvsp_HB_Mono12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0005), ///< HB_Mono12 + PixelType_Gvsp_HB_Mono12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0006), ///< HB_Mono12_Packed + PixelType_Gvsp_HB_Mono16 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0007), ///< HB_Mono16 + PixelType_Gvsp_HB_BayerGR8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0008), ///< HB_BayerGR8 + PixelType_Gvsp_HB_BayerRG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0009), ///< HB_BayerRG8 + PixelType_Gvsp_HB_BayerGB8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000A), ///< HB_BayerGB8 + PixelType_Gvsp_HB_BayerBG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x000B), ///< HB_BayerBG8 + PixelType_Gvsp_HB_BayerRBGG8 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(8) | 0x0046), ///< HB_BayerRBGG8 + PixelType_Gvsp_HB_BayerGR10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000C), ///< HB_BayerGR10 + PixelType_Gvsp_HB_BayerRG10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000D), ///< HB_BayerRG10 + PixelType_Gvsp_HB_BayerGB10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000E), ///< HB_BayerGB10 + PixelType_Gvsp_HB_BayerBG10 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x000F), ///< HB_BayerBG10 + PixelType_Gvsp_HB_BayerGR12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0010), ///< HB_BayerGR12 + PixelType_Gvsp_HB_BayerRG12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0011), ///< HB_BayerRG12 + PixelType_Gvsp_HB_BayerGB12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0012), ///< HB_BayerGB12 + PixelType_Gvsp_HB_BayerBG12 = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(16) | 0x0013), ///< HB_BayerBG12 + PixelType_Gvsp_HB_BayerGR10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0026), ///< HB_BayerGR10_Packed + PixelType_Gvsp_HB_BayerRG10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0027), ///< HB_BayerRG10_Packed + PixelType_Gvsp_HB_BayerGB10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0028), ///< HB_BayerGB10_Packed + PixelType_Gvsp_HB_BayerBG10_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x0029), ///< HB_BayerBG10_Packed + PixelType_Gvsp_HB_BayerGR12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002A), ///< HB_BayerGR12_Packed + PixelType_Gvsp_HB_BayerRG12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002B), ///< HB_BayerRG12_Packed + PixelType_Gvsp_HB_BayerGB12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002C), ///< HB_BayerGB12_Packed + PixelType_Gvsp_HB_BayerBG12_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_MONO | MV_PIXEL_BIT_COUNT(12) | 0x002D), ///< HB_BayerBG12_Packed + PixelType_Gvsp_HB_YUV422_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x001F), ///< HB_YUV422_Packed + PixelType_Gvsp_HB_YUV422_YUYV_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(16) | 0x0032), ///< HB_YUV422_YUYV_Packed + PixelType_Gvsp_HB_RGB8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0014), ///< HB_RGB8_Packed + PixelType_Gvsp_HB_BGR8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(24) | 0x0015), ///< HB_BGR8_Packed + PixelType_Gvsp_HB_RGBA8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0016), ///< HB_RGBA8_Packed + PixelType_Gvsp_HB_BGRA8_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(32) | 0x0017), ///< HB_BGRA8_Packed + PixelType_Gvsp_HB_RGB16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x0033), ///< HB_RGB16_Packed + PixelType_Gvsp_HB_BGR16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(48) | 0x004B), ///< HB_BGR16_Packed + PixelType_Gvsp_HB_RGBA16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0064), ///< HB_RGBA16_Packed + PixelType_Gvsp_HB_BGRA16_Packed = (MV_GVSP_PIX_CUSTOM | MV_GVSP_PIX_COLOR | MV_PIXEL_BIT_COUNT(64) | 0x0051), ///< HB_BGRA16_Packed + +}; +///@} + +#ifdef WIN32 +typedef __int64 int64_t; +typedef unsigned __int64 uint64_t; +#else +#include +#endif + +#endif /* _MV_PIXEL_TYPE_H_ */ diff --git a/image_capture/third_party/percipio/common/BayerISP.hpp b/image_capture/third_party/percipio/common/BayerISP.hpp new file mode 100644 index 0000000..da27676 --- /dev/null +++ b/image_capture/third_party/percipio/common/BayerISP.hpp @@ -0,0 +1,218 @@ +#ifndef SAMPLE_COMMON_ISP_HPP_ +#define SAMPLE_COMMON_ISP_HPP_ + +#include +#include +#include +#include +#include +#include +#include "TyIsp.h" + +/** + *The RGB image data output by some cameras is the original Bayer array. + *By calling the API provided by this file, Bayer data can be converted to BGR array. + *You can refer to the sample code: SimpleView_FetchFrame. +*/ + +static int __TYCompareFirmwareVersion(const TY_DEVICE_BASE_INFO &info, int major, int minor){ + const TY_VERSION_INFO &v = info.firmwareVersion; + if (v.major < major){ + return -1; + } + if (v.major == major && v.minor < minor){ + return -1; + } + if (v.major == major && v.minor == minor){ + return 0; + } + return 1; +} + +static TY_STATUS __TYDetectOldVer21ColorCam(TY_DEV_HANDLE dev_handle,bool *is_v21_color_device){ + TY_DEVICE_BASE_INFO info; + TY_STATUS res = TYGetDeviceInfo(dev_handle, &info); + if (res != TY_STATUS_OK){ + LOGI("get device info failed"); + return res; + } + *is_v21_color_device = false; + if (info.iface.type == TY_INTERFACE_USB){ + *is_v21_color_device = true; + } + if ((info.iface.type == TY_INTERFACE_ETHERNET || info.iface.type == TY_INTERFACE_RAW) && + __TYCompareFirmwareVersion(info, 2, 2) < 0){ + *is_v21_color_device = true; + } + return TY_STATUS_OK; +} + +static void __TYParseSizeFromImageMode(TY_IMAGE_MODE mode , int *image_size) { + const int mask = ((0x01 << 12) - 1); + int height = mode & mask; + int width = (mode >> 12) & mask; + image_size[0] = width; + image_size[1] = height; + +} + +///init color isp setting +///for bayer raw image process +static TY_STATUS ColorIspInitSetting(TY_ISP_HANDLE isp_handle, TY_DEV_HANDLE dev_handle){ + bool is_v21_color_device ; + TY_STATUS res = __TYDetectOldVer21ColorCam(dev_handle, &is_v21_color_device);//old version device has different config + if (res != TY_STATUS_OK){ + return res; + } + if (is_v21_color_device){ + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_BLACK_LEVEL, 11)); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_BLACK_LEVEL_GAIN, 256.f / (256 - 11))); + } + else{ + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_BLACK_LEVEL, 0)); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_BLACK_LEVEL_GAIN, 1.f)); + bool b; + ASSERT_OK(TYHasFeature(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, &b)); + if (b){ + TYSetInt(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, 1); + } + } + TYISPSetFeature(isp_handle, TY_ISP_FEATURE_BAYER_PATTERN, TY_ISP_BAYER_AUTO); + float shading[9] = { 0.30890417098999026, 10.63355541229248, -6.433426856994629, + 0.24413758516311646, 11.739893913269043, -8.148622512817383, + 0.1255662441253662, 11.88359546661377, -7.865192413330078 }; + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_SHADING, (uint8_t*)shading, sizeof(shading))); + int shading_center[2] = { 640, 480 }; + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_SHADING_CENTER, (uint8_t*)shading_center, sizeof(shading_center))); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_CCM_ENABLE, 0));//we are not using ccm by default + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_CAM_DEV_HANDLE, (uint8_t*)&dev_handle, sizeof(dev_handle))); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_CAM_DEV_COMPONENT, int32_t(TY_COMPONENT_RGB_CAM))); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_GAMMA, 1.f)); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_AUTOBRIGHT, 1));//enable auto bright control + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_ENABLE_AUTO_EXPOSURE_GAIN, 0));//disable ae by default + int default_image_size[2] = { 1280, 960 };// image size + int current_image_size[2] = { 1280, 960 };// image size for current parameters + TY_IMAGE_MODE img_mode; +#if 1 + res = TYGetEnum(dev_handle, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, &img_mode); + if (res == TY_STATUS_OK) { + __TYParseSizeFromImageMode(img_mode, current_image_size); + } + TY_ENUM_ENTRY mode_entry[10]; + uint32_t num; + res = TYGetEnumEntryInfo(dev_handle, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, mode_entry, 10, &num); + if (res == TY_STATUS_OK) { + __TYParseSizeFromImageMode(mode_entry[0].value, default_image_size); + } + +#else + //some device may not support WIDTH & HEIGHT feature. image mode is recommended + TYGetInt(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_WIDTH, &image_size[0]); + TYGetInt(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_HEIGHT, &image_size[1]); +#endif + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_IMAGE_SIZE, (uint8_t*)&default_image_size, sizeof(default_image_size)));//the orignal raw image size + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_INPUT_RESAMPLE_SCALE, default_image_size[0] / current_image_size[0]));//resampled input +#if 1 + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_ENABLE_AUTO_WHITEBALANCE, 1)); //eanble auto white balance +#else + //manual wb gain control + const float wb_rgb_gain[3] = { 2.0123140811920168, 1, 1.481866478919983 }; + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_WHITEBALANCE_GAIN, (uint8_t*)wb_rgb_gain, sizeof(wb_rgb_gain))); +#endif + + //try to load specifical device config from device storage + TY_COMPONENT_ID comp_all; + ASSERT_OK(TYGetComponentIDs(dev_handle, &comp_all)); + if (!(comp_all & TY_COMPONENT_STORAGE)){ + return TY_STATUS_OK; + } + bool has_isp_block = false; + ASSERT_OK(TYHasFeature(dev_handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_ISP_BLOCK, &has_isp_block)); + if (!has_isp_block){ + return TY_STATUS_OK; + } + uint32_t sz = 0; + ASSERT_OK(TYGetByteArraySize(dev_handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_ISP_BLOCK, &sz)); + if (sz <= 0){ + return TY_STATUS_OK; + } + std::vector buff(sz); + ASSERT_OK(TYGetByteArray(dev_handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_ISP_BLOCK, &buff[0], buff.size())); + res = TYISPLoadConfig(isp_handle, &buff[0], buff.size()); + if (res == TY_STATUS_OK){ + LOGD("Load RGB ISP Config From Device"); + } + return TY_STATUS_OK; +} + + +static TY_STATUS ColorIspInitAutoExposure(TY_ISP_HANDLE isp_handle, TY_DEV_HANDLE dev_handle){ + bool is_v21_color_device; + TY_STATUS res = __TYDetectOldVer21ColorCam(dev_handle, &is_v21_color_device);//old version device has different config + if (res != TY_STATUS_OK){ + return res; + } + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_ENABLE_AUTO_EXPOSURE_GAIN, 1)); + + // do not enable gain auto control by default +# if 1 + int auto_gain_range[2] = { -1, -1 }; + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_AUTO_GAIN_RANGE, (uint8_t*)&auto_gain_range, sizeof(auto_gain_range))); +#else + if(is_v21_color_device){ + const int old_auto_gain_range[2] = { 33, 255 }; + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_AUTO_GAIN_RANGE, (uint8_t*)&old_auto_gain_range, sizeof(old_auto_gain_range))); + } + else{ +#define CHECK_GO_FAILED(a) {if((a)!=TY_STATUS_OK) break;} + do{ + TY_FEATURE_ID_LIST feature_id = TY_INT_GAIN; + bool val; + CHECK_GO_FAILED(TYHasFeature(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_GAIN, &val)); + if (val) { + feature_id = TY_INT_GAIN; + } + CHECK_GO_FAILED(TYHasFeature(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, &val)); + if (val) { + feature_id = TY_INT_R_GAIN; + } + int auto_gain_range[2] = { 15, 255 }; + TY_INT_RANGE range; + CHECK_GO_FAILED(TYGetIntRange(dev_handle, TY_COMPONENT_RGB_CAM, feature_id, &range)); + auto_gain_range[0] = std::min(range.min + 1, range.max); + auto_gain_range[1] = std::max(range.max - 1, range.min); + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_AUTO_GAIN_RANGE, (uint8_t*)&auto_gain_range, sizeof(auto_gain_range))); + } while(0); +#undef CHECK_GO_FAILED + } +#endif + + //constraint exposure time + int auto_expo_range[2] = { 10, 100 }; + TY_INT_RANGE range; + res = TYGetIntRange(dev_handle, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, &range); + if (res == TY_STATUS_OK) { + auto_expo_range[0] = std::min(range.min + 1, range.max); + auto_expo_range[1] = std::max(range.max - 1, range.min); + } + ASSERT_OK(TYISPSetFeature(isp_handle, TY_ISP_FEATURE_AUTO_EXPOSURE_RANGE, (uint8_t*)&auto_expo_range, sizeof(auto_expo_range))); + return TY_STATUS_OK; +} + + +static TY_STATUS ColorIspShowSupportedFeatures(TY_ISP_HANDLE handle){ + int sz; + TY_STATUS res = TYISPGetFeatureInfoListSize(handle,&sz); + if (res != TY_STATUS_OK){ + return res; + } + std::vector info; + info.resize(sz); + TYISPGetFeatureInfoList(handle, &info[0], info.size()); + for (int idx = 0; idx < sz; idx++){ + printf("feature name : %-50s type : %s \n", info[idx].name, info[idx].value_type); + } + return TY_STATUS_OK; +} + +#endif diff --git a/image_capture/third_party/percipio/common/CommandLineFeatureHelper.hpp b/image_capture/third_party/percipio/common/CommandLineFeatureHelper.hpp new file mode 100644 index 0000000..02080e4 --- /dev/null +++ b/image_capture/third_party/percipio/common/CommandLineFeatureHelper.hpp @@ -0,0 +1,126 @@ +#ifndef CMDLINE_FEATURE_HELPER__H_ +#define CMDLINE_FEATURE_HELPER__H_ + +#include "CommandLineParser.hpp" +#include "TYApi.h" +#include "Utils.hpp" + +/// @brief command line sgb feature param id +struct ty_fetaure_options +{ + int component_id; + int feature_id; + + ty_fetaure_options(int comp_id = 0, int _f_id = 0) + { + component_id = comp_id; + feature_id = _f_id; + } +}; + +/// @brief command line feature helper for set device feature by command line args +class CommandLineFeatureHelper +{ +public: + TyCommandlineParser cmd_parser; ///< command line parser + + /// @brief add feature param to command line parser + /// @param param command line param name + /// @param comp_id component id , 0 for not a feature setting + /// @param feat_id feature id , 0 for not a feature setting + /// @param val default value + /// @param desc describe + /// @param is_flag is a flag only , no value + void add_feature(const std::string ¶m, int comp_id, int feat_id, int val, const std::string &desc, bool is_flag = false) + { + cmd_parser.addItem(param, desc, is_flag, std::to_string(val), ty_fetaure_options(comp_id, feat_id)); + } + + /// @brief add feature param to command line parser + /// @param param command line param name + /// @param comp_id component id , 0 for not a feature setting + /// @param feat_id feature id , 0 for not a feature setting + /// @param val default value + /// @param desc describe + /// @param is_flag is a flag only , no value + void add_feature(const std::string ¶m, int comp_id, int feat_id, std::string val, const std::string &desc, bool is_flag = false) + { + cmd_parser.addItem(param, desc, is_flag, val, ty_fetaure_options(comp_id, feat_id)); + } + + /// @brief add feature param to command line parser + /// @param name command line param name + /// @return command line item + const TyCommandlineItem *get_feature(const std::string &name) const + { + auto res = cmd_parser.get(name); + return res; + } + + /// @brief get command line param describe + /// @return describe string + std::string usage_describe() const + { + return cmd_parser.getUsage(); + } + + /// @brief parse command line args + void parse_argv(int argc, char *argv[]) + { + cmd_parser.parse(argc, argv); + } + + /// @brief set command line param to device + /// @param hDevice device handle + void set_device_feature(TY_DEV_HANDLE hDevice) + { + // loop for all command line argv items and set to device + for (auto &kv : cmd_parser.cmd_items) + { + auto &p = kv.second; + int res = TY_STATUS_OK; + if (!p.has_set) + { + continue; + } + int feature_id = p.ctx.feature_id; + int comp_id = p.ctx.component_id; + if (comp_id == 0 && feature_id == 0) + { + // param is not a feature setting + continue; + } + // set feature by type + int type = feature_id & 0xf000; + if (type == TY_FEATURE_INT) + { + int val = p.get_int_val(); + LOGD("set feature %s (compId 0x%x featId 0x%x) to %d", p.name.c_str(), comp_id, feature_id, val); + res = TYSetInt(hDevice, comp_id, feature_id, val); + } + else if (type == TY_FEATURE_BOOL) + { + bool val = p.get_bool_val(); + LOGD("set feature %s (compId 0x%x featId 0x%x) to %d", p.name.c_str(), comp_id, feature_id, val); + res = TYSetBool(hDevice, comp_id, feature_id, val); + } + else if (type == TY_FEATURE_FLOAT) + { + float val = p.get_float_val(); + LOGD("set feature %s (compId 0x%x featId 0x%x) to %f", p.name.c_str(), comp_id, feature_id, val); + res = TYSetFloat(hDevice, comp_id, feature_id, val); + } + else + { + LOGE("unknow feature type %d for %s", type, p.name.c_str()); + continue; + } + if (res != TY_STATUS_OK) + { + LOGE("set feature %s (%s) FAILED with return status code %d", p.name.c_str(), p.describe.c_str(), res); + } + } + } +}; + +#endif // CMDLINE_FEATURE_HELPER__H_ \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/CommandLineParser.hpp b/image_capture/third_party/percipio/common/CommandLineParser.hpp new file mode 100644 index 0000000..e51d170 --- /dev/null +++ b/image_capture/third_party/percipio/common/CommandLineParser.hpp @@ -0,0 +1,173 @@ +#ifndef _TYP_COMMAND_LINE_PARSER_HPP +#define _TYP_COMMAND_LINE_PARSER_HPP + +#include +#include +#include + +/// @brief command line arg item +/// @tparam T context type +template +class TyCommandlineItem +{ +public: + TyCommandlineItem(const std::string &name = "", + const std::string &describe = "", + bool is_flag = false, + const std::string &default_val = "") + { + this->name = name; + this->describe = describe; + this->default_val = default_val; + this->is_flag = is_flag; + has_set = false; + curr_val = default_val; + } + std::string name, describe; ///< name and describe + std::string default_val; ///< default value + bool is_flag; ///< flag only, no value + T ctx; ///< context + + bool has_set; ///< has set by command line + std::string curr_val; ///< current arg value + + + int get_int_val() const + { + return std::stoi(curr_val); + } + + float get_float_val() const + { + return std::stof(curr_val); + } + + double get_double_val() const + { + return std::stod(curr_val); + } + + std::string get_str_val() const + { + return curr_val; + } + + bool get_bool_val() const + { + return curr_val == "true" || curr_val == "1"; + } +}; + + +////-------------------- + +/// @brief command line parser +/// @tparam T context type +template +class TyCommandlineParser +{ + +public: + std::map> cmd_items; ///< command line items + + /// @brief add command line item + /// @param name item name + /// @param describe item describe + /// @param is_flag is flag only + /// @param default_val default value + /// @param ctx context + void addItem(const std::string &name, + const std::string &describe, + bool is_flag = false, + const std::string &default_val = "0", + T ctx = T()) + { + TyCommandlineItem item(name, describe, is_flag, default_val); + item.ctx = ctx; + cmd_items.emplace(name, item); + } + + /// @brief clear all items + void clear() + { + cmd_items.clear(); + } + + /// @brief parse command line + /// @param argc arg count + /// @param argv arg list + /// @return 0: success, -1: failed + int parse(int argc, char *argv[]) + { + int idx = 1; + while (idx < argc) + { + std::string arg = argv[idx]; + if (arg[0] != '-') + { + continue; + } + arg = arg.substr(1); + auto find_res = cmd_items.find(arg); + if (find_res== cmd_items.end()) { + printf("TyCommandlineParser:ignore unknow param: %s\n", arg.c_str()); + idx++; + continue; + } + auto& item = find_res->second; + item.has_set = true; + item.curr_val = item.default_val; + if (idx + 1 < argc && !item.is_flag) + { + item.curr_val = argv[idx + 1]; + idx++; + } + idx++; + } + return 0; + } + + /// @brief get command line item + /// @param name item name + /// @return item + const TyCommandlineItem *get(const std::string &name) const + { + auto find_res = cmd_items.find(name); + if (find_res != cmd_items.end()) { + return &find_res->second; + } + LOGE("ERROR: not find command argv by name %s ", name.c_str()); + return nullptr; + } + + /// @brief get usage string + /// @return usage string + std::string getUsage() const + { + std::string usage = "Usage: \n"; + size_t max_name_len = 1; + for (auto& kv : cmd_items) { + max_name_len = std::max(kv.first.size(), max_name_len); + } + for (auto& kv : cmd_items) + { + const auto &cmd = kv.second; + std::string name = cmd.name; + if (name.size() < max_name_len) { + name.append(max_name_len - name.size(), ' '); + } + usage += " -" + name + " "; + if (!cmd.is_flag) + { + usage += " "; + } + else { + usage += " "; + } + usage += cmd.describe + " \n"; + } + return usage; + } +}; + +#endif // _TYP_COMMAND_LINE_PARSER_HPP diff --git a/image_capture/third_party/percipio/common/DepthInpainter.cpp b/image_capture/third_party/percipio/common/DepthInpainter.cpp new file mode 100644 index 0000000..ca49d33 --- /dev/null +++ b/image_capture/third_party/percipio/common/DepthInpainter.cpp @@ -0,0 +1,647 @@ +#include "DepthInpainter.hpp" +#include + +#ifdef OPENCV_DEPENDENCIES + +#include +#ifndef CV_VERSION_EPOCH +#if defined (CV_MAJOR_VERSION) && (CV_VERSION_MAJOR == 4) +#include +#include +#include +#include +#endif +#endif + +using namespace cv; + +#undef CV_MAT_ELEM_PTR_FAST +#define CV_MAT_ELEM_PTR_FAST( mat, row, col, pix_size ) \ + ((mat).data.ptr + (size_t)(mat).step*(row) + (pix_size)*(col)) + +inline float +min4( float a, float b, float c, float d ) +{ + a = MIN(a,b); + c = MIN(c,d); + return MIN(a,c); +} + +#define CV_MAT_3COLOR_ELEM(img,type,y,x,c) CV_MAT_ELEM(img,type,y,(x)*3+(c)) +#define KNOWN 0 //known outside narrow band +#define BAND 1 //narrow band (known) +#define INSIDE 2 //unknown +#define CHANGE 3 //servise + +typedef struct CvHeapElem +{ + float T; + int i,j; + struct CvHeapElem* prev; + struct CvHeapElem* next; +} +CvHeapElem; + + +class CvPriorityQueueFloat +{ +protected: + CvHeapElem *mem,*empty,*head,*tail; + int num,in; + +public: + bool Init( const CvMat* f ) + { + int i,j; + for( i = num = 0; i < f->rows; i++ ) + { + for( j = 0; j < f->cols; j++ ) + num += CV_MAT_ELEM(*f,uchar,i,j)!=0; + } + if (num<=0) return false; + mem = (CvHeapElem*)cvAlloc((num+2)*sizeof(CvHeapElem)); + if (mem==NULL) return false; + + head = mem; + head->i = head->j = -1; + head->prev = NULL; + head->next = mem+1; + head->T = -FLT_MAX; + empty = mem+1; + for (i=1; i<=num; i++) { + mem[i].prev = mem+i-1; + mem[i].next = mem+i+1; + mem[i].i = -1; + mem[i].T = FLT_MAX; + } + tail = mem+i; + tail->i = tail->j = -1; + tail->prev = mem+i-1; + tail->next = NULL; + tail->T = FLT_MAX; + return true; + } + + bool Add(const CvMat* f) { + int i,j; + for (i=0; irows; i++) { + for (j=0; jcols; j++) { + if (CV_MAT_ELEM(*f,uchar,i,j)!=0) { + if (!Push(i,j,0)) return false; + } + } + } + return true; + } + + bool Push(int i, int j, float T) { + CvHeapElem *tmp=empty,*add=empty; + if (empty==tail) return false; + while (tmp->prev->T>T) tmp = tmp->prev; + if (tmp!=empty) { + add->prev->next = add->next; + add->next->prev = add->prev; + empty = add->next; + add->prev = tmp->prev; + add->next = tmp; + add->prev->next = add; + add->next->prev = add; + } else { + empty = empty->next; + } + add->i = i; + add->j = j; + add->T = T; + in++; + // printf("push i %3d j %3d T %12.4e in %4d\n",i,j,T,in); + return true; + } + + bool Pop(int *i, int *j) { + CvHeapElem *tmp=head->next; + if (empty==tmp) return false; + *i = tmp->i; + *j = tmp->j; + tmp->prev->next = tmp->next; + tmp->next->prev = tmp->prev; + tmp->prev = empty->prev; + tmp->next = empty; + tmp->prev->next = tmp; + tmp->next->prev = tmp; + empty = tmp; + in--; + // printf("pop i %3d j %3d T %12.4e in %4d\n",tmp->i,tmp->j,tmp->T,in); + return true; + } + + bool Pop(int *i, int *j, float *T) { + CvHeapElem *tmp=head->next; + if (empty==tmp) return false; + *i = tmp->i; + *j = tmp->j; + *T = tmp->T; + tmp->prev->next = tmp->next; + tmp->next->prev = tmp->prev; + tmp->prev = empty->prev; + tmp->next = empty; + tmp->prev->next = tmp; + tmp->next->prev = tmp; + empty = tmp; + in--; + // printf("pop i %3d j %3d T %12.4e in %4d\n",tmp->i,tmp->j,tmp->T,in); + return true; + } + + CvPriorityQueueFloat(void) { + num=in=0; + mem=empty=head=tail=NULL; + } + + ~CvPriorityQueueFloat(void) + { + cvFree( &mem ); + } +}; + +inline float VectorScalMult(CvPoint2D32f v1,CvPoint2D32f v2) { + return v1.x*v2.x+v1.y*v2.y; +} + +inline float VectorLength(CvPoint2D32f v1) { + return v1.x*v1.x+v1.y*v1.y; +} + +/////////////////////////////////////////////////////////////////////////////////////////// +//HEAP::iterator Heap_Iterator; +//HEAP Heap; + +static float FastMarching_solve(int i1,int j1,int i2,int j2, const CvMat* f, const CvMat* t) +{ + double sol, a11, a22, m12; + a11=CV_MAT_ELEM(*t,float,i1,j1); + a22=CV_MAT_ELEM(*t,float,i2,j2); + m12=MIN(a11,a22); + + if( CV_MAT_ELEM(*f,uchar,i1,j1) != INSIDE ) + if( CV_MAT_ELEM(*f,uchar,i2,j2) != INSIDE ) + if( fabs(a11-a22) >= 1.0 ) + sol = 1+m12; + else + sol = (a11+a22+sqrt((double)(2-(a11-a22)*(a11-a22))))*0.5; + else + sol = 1+a11; + else if( CV_MAT_ELEM(*f,uchar,i2,j2) != INSIDE ) + sol = 1+a22; + else + sol = 1+m12; + + return (float)sol; +} + +///////////////////////////////////////////////////////////////////////////////////// + + +static void +icvCalcFMM(const CvMat *f, CvMat *t, CvPriorityQueueFloat *Heap, bool negate) { + int i, j, ii = 0, jj = 0, q; + float dist; + + while (Heap->Pop(&ii,&jj)) { + + unsigned known=(negate)?CHANGE:KNOWN; + CV_MAT_ELEM(*f,uchar,ii,jj) = (uchar)known; + + for (q=0; q<4; q++) { + i=0; j=0; + if (q==0) {i=ii-1; j=jj;} + else if(q==1) {i=ii; j=jj-1;} + else if(q==2) {i=ii+1; j=jj;} + else {i=ii; j=jj+1;} + if ((i<=0)||(j<=0)||(i>f->rows)||(j>f->cols)) continue; + + if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) { + dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t), + FastMarching_solve(i+1,j,i,j-1,f,t), + FastMarching_solve(i-1,j,i,j+1,f,t), + FastMarching_solve(i+1,j,i,j+1,f,t)); + CV_MAT_ELEM(*t,float,i,j) = dist; + CV_MAT_ELEM(*f,uchar,i,j) = BAND; + Heap->Push(i,j,dist); + } + } + } + + if (negate) { + for (i=0; irows; i++) { + for(j=0; jcols; j++) { + if (CV_MAT_ELEM(*f,uchar,i,j) == CHANGE) { + CV_MAT_ELEM(*f,uchar,i,j) = KNOWN; + CV_MAT_ELEM(*t,float,i,j) = -CV_MAT_ELEM(*t,float,i,j); + } + } + } + } +} + + +static void +icvTeleaInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQueueFloat *Heap ) { + int i = 0, j = 0, ii = 0, jj = 0, k, l, q, color = 0; + float dist; + + if (CV_MAT_CN(out->type)==1) { + + while (Heap->Pop(&ii,&jj)) { + + CV_MAT_ELEM(*f,uchar,ii,jj) = KNOWN; + for(q=0; q<4; q++) { + if (q==0) {i=ii-1; j=jj;} + else if(q==1) {i=ii; j=jj-1;} + else if(q==2) {i=ii+1; j=jj;} + else if(q==3) {i=ii; j=jj+1;} + if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue; + + if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) { + dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t), + FastMarching_solve(i+1,j,i,j-1,f,t), + FastMarching_solve(i-1,j,i,j+1,f,t), + FastMarching_solve(i+1,j,i,j+1,f,t)); + CV_MAT_ELEM(*t,float,i,j) = dist; + + for (color=0; color<=0; color++) { + CvPoint2D32f gradI,gradT,r; + float Ia=0,Jx=0,Jy=0,s=1.0e-20f,w,dst,lev,dir,sat; + + if (CV_MAT_ELEM(*f,uchar,i,j+1)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,i,j-1)!=INSIDE) { + gradT.x=(float)((CV_MAT_ELEM(*t,float,i,j+1)-CV_MAT_ELEM(*t,float,i,j-1)))*0.5f; + } else { + gradT.x=(float)((CV_MAT_ELEM(*t,float,i,j+1)-CV_MAT_ELEM(*t,float,i,j))); + } + } else { + if (CV_MAT_ELEM(*f,uchar,i,j-1)!=INSIDE) { + gradT.x=(float)((CV_MAT_ELEM(*t,float,i,j)-CV_MAT_ELEM(*t,float,i,j-1))); + } else { + gradT.x=0; + } + } + if (CV_MAT_ELEM(*f,uchar,i+1,j)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,i-1,j)!=INSIDE) { + gradT.y=(float)((CV_MAT_ELEM(*t,float,i+1,j)-CV_MAT_ELEM(*t,float,i-1,j)))*0.5f; + } else { + gradT.y=(float)((CV_MAT_ELEM(*t,float,i+1,j)-CV_MAT_ELEM(*t,float,i,j))); + } + } else { + if (CV_MAT_ELEM(*f,uchar,i-1,j)!=INSIDE) { + gradT.y=(float)((CV_MAT_ELEM(*t,float,i,j)-CV_MAT_ELEM(*t,float,i-1,j))); + } else { + gradT.y=0; + } + } + for (k=i-range; k<=i+range; k++) { + int km=k-1+(k==1),kp=k-1-(k==t->rows-2); + for (l=j-range; l<=j+range; l++) { + int lm=l-1+(l==1),lp=l-1-(l==t->cols-2); + if (k>0&&l>0&&krows-1&&lcols-1) { + if ((CV_MAT_ELEM(*f,uchar,k,l)!=INSIDE)&& + ((l-j)*(l-j)+(k-i)*(k-i)<=range*range)) { + r.y = (float)(i-k); + r.x = (float)(j-l); + + dst = (float)(1./(VectorLength(r)*sqrt(VectorLength(r)))); + lev = (float)(1./(1+fabs(CV_MAT_ELEM(*t,float,k,l)-CV_MAT_ELEM(*t,float,i,j)))); + + dir=VectorScalMult(r,gradT); + if (fabs(dir)<=0.01) dir=0.000001f; + w = (float)fabs(dst*lev*dir); + + if (CV_MAT_ELEM(*f,uchar,k,l+1)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,k,l-1)!=INSIDE) { + // gradI.x=(float)((CV_MAT_ELEM(*out,uchar,km,lp+1)-CV_MAT_ELEM(*out,uchar,km,lm-1)))*2.0f; + gradI.x=(float)((CV_MAT_ELEM(*out,uint16_t,km,lp+1)-CV_MAT_ELEM(*out,uint16_t,km,lm-1)))*2.0f; + } else { + // gradI.x=(float)((CV_MAT_ELEM(*out,uchar,km,lp+1)-CV_MAT_ELEM(*out,uchar,km,lm))); + gradI.x=(float)((CV_MAT_ELEM(*out,uint16_t,km,lp+1)-CV_MAT_ELEM(*out,uint16_t,km,lm))); + } + } else { + if (CV_MAT_ELEM(*f,uchar,k,l-1)!=INSIDE) { + // gradI.x=(float)((CV_MAT_ELEM(*out,uchar,km,lp)-CV_MAT_ELEM(*out,uchar,km,lm-1))); + gradI.x=(float)((CV_MAT_ELEM(*out,uint16_t,km,lp)-CV_MAT_ELEM(*out,uint16_t,km,lm-1))); + } else { + gradI.x=0; + } + } + if (CV_MAT_ELEM(*f,uchar,k+1,l)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,k-1,l)!=INSIDE) { + // gradI.y=(float)((CV_MAT_ELEM(*out,uchar,kp+1,lm)-CV_MAT_ELEM(*out,uchar,km-1,lm)))*2.0f; + gradI.y=(float)((CV_MAT_ELEM(*out,uint16_t,kp+1,lm)-CV_MAT_ELEM(*out,uint16_t,km-1,lm)))*2.0f; + } else { + // gradI.y=(float)((CV_MAT_ELEM(*out,uchar,kp+1,lm)-CV_MAT_ELEM(*out,uchar,km,lm))); + gradI.y=(float)((CV_MAT_ELEM(*out,uint16_t,kp+1,lm)-CV_MAT_ELEM(*out,uint16_t,km,lm))); + } + } else { + if (CV_MAT_ELEM(*f,uchar,k-1,l)!=INSIDE) { + // gradI.y=(float)((CV_MAT_ELEM(*out,uchar,kp,lm)-CV_MAT_ELEM(*out,uchar,km-1,lm))); + gradI.y=(float)((CV_MAT_ELEM(*out,uint16_t,kp,lm)-CV_MAT_ELEM(*out,uint16_t,km-1,lm))); + } else { + gradI.y=0; + } + } + // Ia += (float)w * (float)(CV_MAT_ELEM(*out,uchar,km,lm)); + Ia += (float)w * (float)(CV_MAT_ELEM(*out,uint16_t,km,lm)); + Jx -= (float)w * (float)(gradI.x*r.x); + Jy -= (float)w * (float)(gradI.y*r.y); + s += w; + } + } + } + } + sat = (float)((Ia/s+(Jx+Jy)/(sqrt(Jx*Jx+Jy*Jy)+1.0e-20f)+0.5f)); + { + // CV_MAT_ELEM(*out,uchar,i-1,j-1) = cv::saturate_cast(sat); + CV_MAT_ELEM(*out,uint16_t,i-1,j-1) = cv::saturate_cast(sat); + } + } + + CV_MAT_ELEM(*f,uchar,i,j) = BAND; + Heap->Push(i,j,dist); + } + } + } + } +} + + +static void +icvNSInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQueueFloat *Heap) { + int i = 0, j = 0, ii = 0, jj = 0, k, l, q; + float dist; + + if (CV_MAT_CN(out->type)==1) { + + while (Heap->Pop(&ii,&jj)) { + + CV_MAT_ELEM(*f,uchar,ii,jj) = KNOWN; + for(q=0; q<4; q++) { + if (q==0) {i=ii-1; j=jj;} + else if(q==1) {i=ii; j=jj-1;} + else if(q==2) {i=ii+1; j=jj;} + else if(q==3) {i=ii; j=jj+1;} + if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue; + + if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) { + dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t), + FastMarching_solve(i+1,j,i,j-1,f,t), + FastMarching_solve(i-1,j,i,j+1,f,t), + FastMarching_solve(i+1,j,i,j+1,f,t)); + CV_MAT_ELEM(*t,float,i,j) = dist; + + { + CvPoint2D32f gradI,r; + float Ia=0,s=1.0e-20f,w,dst,dir; + + for (k=i-range; k<=i+range; k++) { + int km=k-1+(k==1),kp=k-1-(k==t->rows-2); + for (l=j-range; l<=j+range; l++) { + int lm=l-1+(l==1),lp=l-1-(l==t->cols-2); + if (k>0&&l>0&&krows-1&&lcols-1) { + if ((CV_MAT_ELEM(*f,uchar,k,l)!=INSIDE)&& + ((l-j)*(l-j)+(k-i)*(k-i)<=range*range)) { + r.y=(float)(i-k); + r.x=(float)(j-l); + + dst = 1/(VectorLength(r)*VectorLength(r)+1); + + if (CV_MAT_ELEM(*f,uchar,k+1,l)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,k-1,l)!=INSIDE) { + // gradI.x=(float)(abs(CV_MAT_ELEM(*out,uchar,kp+1,lm)-CV_MAT_ELEM(*out,uchar,kp,lm))+ + // abs(CV_MAT_ELEM(*out,uchar,kp,lm)-CV_MAT_ELEM(*out,uchar,km-1,lm))); + gradI.x=(float)(abs(CV_MAT_ELEM(*out,uint16_t,kp+1,lm)-CV_MAT_ELEM(*out,uint16_t,kp,lm))+ + abs(CV_MAT_ELEM(*out,uint16_t,kp,lm)-CV_MAT_ELEM(*out,uint16_t,km-1,lm))); + } else { + // gradI.x=(float)(abs(CV_MAT_ELEM(*out,uchar,kp+1,lm)-CV_MAT_ELEM(*out,uchar,kp,lm)))*2.0f; + gradI.x=(float)(abs(CV_MAT_ELEM(*out,uint16_t,kp+1,lm)-CV_MAT_ELEM(*out,uint16_t,kp,lm)))*2.0f; + } + } else { + if (CV_MAT_ELEM(*f,uchar,k-1,l)!=INSIDE) { + // gradI.x=(float)(abs(CV_MAT_ELEM(*out,uchar,kp,lm)-CV_MAT_ELEM(*out,uchar,km-1,lm)))*2.0f; + gradI.x=(float)(abs(CV_MAT_ELEM(*out,uint16_t,kp,lm)-CV_MAT_ELEM(*out,uint16_t,km-1,lm)))*2.0f; + } else { + gradI.x=0; + } + } + if (CV_MAT_ELEM(*f,uchar,k,l+1)!=INSIDE) { + if (CV_MAT_ELEM(*f,uchar,k,l-1)!=INSIDE) { + // gradI.y=(float)(abs(CV_MAT_ELEM(*out,uchar,km,lp+1)-CV_MAT_ELEM(*out,uchar,km,lm))+ + // abs(CV_MAT_ELEM(*out,uchar,km,lm)-CV_MAT_ELEM(*out,uchar,km,lm-1))); + gradI.y=(float)(abs(CV_MAT_ELEM(*out,uint16_t,km,lp+1)-CV_MAT_ELEM(*out,uint16_t,km,lm))+ + abs(CV_MAT_ELEM(*out,uint16_t,km,lm)-CV_MAT_ELEM(*out,uint16_t,km,lm-1))); + } else { + // gradI.y=(float)(abs(CV_MAT_ELEM(*out,uchar,km,lp+1)-CV_MAT_ELEM(*out,uchar,km,lm)))*2.0f; + gradI.y=(float)(abs(CV_MAT_ELEM(*out,uint16_t,km,lp+1)-CV_MAT_ELEM(*out,uint16_t,km,lm)))*2.0f; + } + } else { + if (CV_MAT_ELEM(*f,uchar,k,l-1)!=INSIDE) { + // gradI.y=(float)(abs(CV_MAT_ELEM(*out,uchar,km,lm)-CV_MAT_ELEM(*out,uchar,km,lm-1)))*2.0f; + gradI.y=(float)(abs(CV_MAT_ELEM(*out,uint16_t,km,lm)-CV_MAT_ELEM(*out,uint16_t,km,lm-1)))*2.0f; + } else { + gradI.y=0; + } + } + + gradI.x=-gradI.x; + dir=VectorScalMult(r,gradI); + + if (fabs(dir)<=0.01) { + dir=0.000001f; + } else { + dir = (float)fabs(VectorScalMult(r,gradI)/sqrt(VectorLength(r)*VectorLength(gradI))); + } + w = dst*dir; + // Ia += (float)w * (float)(CV_MAT_ELEM(*out,uchar,km,lm)); + Ia += (float)w * (float)(CV_MAT_ELEM(*out,uint16_t,km,lm)); + s += w; + } + } + } + } + // CV_MAT_ELEM(*out,uchar,i-1,j-1) = cv::saturate_cast((double)Ia/s); + CV_MAT_ELEM(*out,uint16_t,i-1,j-1) = cv::saturate_cast((double)Ia/s); + } + + CV_MAT_ELEM(*f,uchar,i,j) = BAND; + Heap->Push(i,j,dist); + } + } + } + + } +} + +#define SET_BORDER1_C1(image,type,value) {\ + int i,j;\ + for(j=0; jcols; j++) {\ + CV_MAT_ELEM(*image,type,0,j) = value;\ + }\ + for (i=1; irows-1; i++) {\ + CV_MAT_ELEM(*image,type,i,0) = CV_MAT_ELEM(*image,type,i,image->cols-1) = value;\ + }\ + for(j=0; jcols; j++) {\ + CV_MAT_ELEM(*image,type,erows-1,j) = value;\ + }\ + } + +#define COPY_MASK_BORDER1_C1(src,dst,type) {\ + int i,j;\ + for (i=0; irows; i++) {\ + for(j=0; jcols; j++) {\ + if (CV_MAT_ELEM(*src,type,i,j)!=0)\ + CV_MAT_ELEM(*dst,type,i+1,j+1) = INSIDE;\ + }\ + }\ + } + + +void +_cvInpaint( const CvArr* _input_img, const CvArr* _inpaint_mask, CvArr* _output_img, + double inpaintRange, int flags ) +{ + cv::Ptr mask, band, f, t, out; + cv::Ptr Heap, Out; + IplConvKernel *el_cross, *el_range; + + CvMat input_hdr, mask_hdr, output_hdr; + CvMat* input_img, *inpaint_mask, *output_img; + int range=cvRound(inpaintRange); + int erows, ecols; + + input_img = cvGetMat( _input_img, &input_hdr ); + inpaint_mask = cvGetMat( _inpaint_mask, &mask_hdr ); + output_img = cvGetMat( _output_img, &output_hdr ); + + if( !CV_ARE_SIZES_EQ(input_img,output_img) || !CV_ARE_SIZES_EQ(input_img,inpaint_mask)) + CV_Error( CV_StsUnmatchedSizes, "All the input and output images must have the same size" ); + + if( (CV_MAT_TYPE(input_img->type) != CV_16UC1) || + !CV_ARE_TYPES_EQ(input_img,output_img) ) + CV_Error( CV_StsUnsupportedFormat, + "Only 8-bit 1-channel and 3-channel input/output images are supported" ); + + if( CV_MAT_TYPE(inpaint_mask->type) != CV_8UC1 ) + CV_Error( CV_StsUnsupportedFormat, "The mask must be 8-bit 1-channel image" ); + + range = MAX(range,1); + range = MIN(range,100); + + ecols = input_img->cols + 2; + erows = input_img->rows + 2; + + f = cvCreateMat(erows, ecols, CV_8UC1); + t = cvCreateMat(erows, ecols, CV_32FC1); + band = cvCreateMat(erows, ecols, CV_8UC1); + mask = cvCreateMat(erows, ecols, CV_8UC1); + el_cross = cvCreateStructuringElementEx(3,3,1,1,CV_SHAPE_CROSS,NULL); + + cvCopy( input_img, output_img ); + cvSet(mask,cvScalar(KNOWN,0,0,0)); + COPY_MASK_BORDER1_C1(inpaint_mask,mask,uchar); + SET_BORDER1_C1(mask,uchar,0); + cvSet(f,cvScalar(KNOWN,0,0,0)); + cvSet(t,cvScalar(1.0e6f,0,0,0)); + cvDilate(mask,band,el_cross,1); // image with narrow band + cvReleaseStructuringElement(&el_cross); + Heap=new CvPriorityQueueFloat; + if (!Heap->Init(band)) + return; + cvSub(band,mask,band,NULL); + SET_BORDER1_C1(band,uchar,0); + if (!Heap->Add(band)) + return; + cvSet(f,cvScalar(BAND,0,0,0),band); + cvSet(f,cvScalar(INSIDE,0,0,0),mask); + cvSet(t,cvScalar(0,0,0,0),band); + + if( flags == CV_INPAINT_TELEA ) + { + out = cvCreateMat(erows, ecols, CV_8UC1); + el_range = cvCreateStructuringElementEx(2*range+1,2*range+1, + range,range,CV_SHAPE_RECT,NULL); + cvDilate(mask,out,el_range,1); + cvReleaseStructuringElement(&el_range); + cvSub(out,mask,out,NULL); + Out=new CvPriorityQueueFloat; + if (!Out->Init(out)) + return; + if (!Out->Add(band)) + return; + cvSub(out,band,out,NULL); + SET_BORDER1_C1(out,uchar,0); + icvCalcFMM(out,t,Out,true); + icvTeleaInpaintFMM(mask,t,output_img,range,Heap); + } + else if (flags == CV_INPAINT_NS) { + icvNSInpaintFMM(mask,t,output_img,range,Heap); + } else { + CV_Error( CV_StsBadArg, "The flags argument must be one of CV_INPAINT_TELEA or CV_INPAINT_NS" ); + } +} + +CvMat ToCvMat(const cv::Mat& m) +{ + CV_DbgAssert(m.dims <= 2); + CvMat dst = cvMat(m.rows, m.dims == 1 ? 1 : m.cols, m.type(), m.data); + dst.step = (int)m.step[0]; + dst.type = (dst.type & ~cv::Mat::CONTINUOUS_FLAG) | (m.flags & cv::Mat::CONTINUOUS_FLAG); + return dst; +} + +void _inpaint( InputArray _src, InputArray _mask, OutputArray _dst, + double inpaintRange, int flags ) +{ + Mat src = _src.getMat(), mask = _mask.getMat(); + _dst.create( src.size(), src.type() ); + CvMat c_src = ToCvMat(src), c_mask = ToCvMat(mask), c_dst = ToCvMat(_dst.getMat()); + _cvInpaint( &c_src, &c_mask, &c_dst, inpaintRange, flags ); +} + +////////////////////////////////////////////////////////////////////////////////////// + +cv::Mat DepthInpainter::genValidMask(const cv::Mat& depth) +{ + cv::Mat orgMask = (depth == 0); + // cv::Mat mask = orgMask.clone(); + cv::Mat mask = orgMask; + + cv::Mat kernel = cv::Mat::zeros(_kernelSize, _kernelSize, CV_8U); + cv::circle(kernel, cv::Point(kernel.cols/2, kernel.rows/2), kernel.rows/2, cv::Scalar(255), -1); + cv::erode(orgMask, mask, kernel); + cv::dilate(mask, mask, kernel); + + gSpeckleFilter.Compute(mask, 0, _maxInternalHoleToBeFilled, 1); + + // revert mask + mask = mask == 0; + + return mask; +} + +void DepthInpainter::inpaint(const cv::Mat& depth, cv::Mat& out, const cv::Mat& mask) +{ + cv::Mat newDepth; + cv::Mat _mask = mask.empty() ? (depth == 0) : mask; + if(depth.type() == CV_8U || depth.type() == CV_8UC3){ + cv::inpaint(depth, _mask, newDepth, _inpaintRadius, cv::INPAINT_TELEA); + } else if(depth.type() == CV_16U){ + _inpaint(depth, _mask, newDepth, _inpaintRadius, cv::INPAINT_TELEA); + } + + if(mask.empty() && !_fillAll){ + // gen masked image + cv::Mat mask = genValidMask(depth); + out = cv::Mat::zeros(depth.size(), CV_16U); + newDepth.copyTo(out, mask); + } else { + out = newDepth; + } +} +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/DepthInpainter.hpp b/image_capture/third_party/percipio/common/DepthInpainter.hpp new file mode 100644 index 0000000..7ad915e --- /dev/null +++ b/image_capture/third_party/percipio/common/DepthInpainter.hpp @@ -0,0 +1,36 @@ +#ifndef XYZ_INPAINTER_HPP_ +#define XYZ_INPAINTER_HPP_ + +#ifdef OPENCV_DEPENDENCIES + +#include +#include "ImageSpeckleFilter.hpp" + +//#warn("DepthInpainter this design no longer supported by new opencv version, using opencv inpaint api for alternative") + + +class DepthInpainter +{ +public: + int _kernelSize; + int _maxInternalHoleToBeFilled; + double _inpaintRadius; + bool _fillAll; + + + DepthInpainter() + : _kernelSize(5) + , _maxInternalHoleToBeFilled(50) + , _inpaintRadius(1) + , _fillAll(true) + { + } + + void inpaint(const cv::Mat& inputDepth, cv::Mat& out, const cv::Mat& mask); + +private: + cv::Mat genValidMask(const cv::Mat& depth); +}; + +#endif +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/DepthRender.hpp b/image_capture/third_party/percipio/common/DepthRender.hpp new file mode 100644 index 0000000..26a1082 --- /dev/null +++ b/image_capture/third_party/percipio/common/DepthRender.hpp @@ -0,0 +1,249 @@ +#ifndef PERCIPIO_SAMPLE_COMMON_DEPTH_RENDER_HPP_ +#define PERCIPIO_SAMPLE_COMMON_DEPTH_RENDER_HPP_ + +#ifdef OPENCV_DEPENDENCIES +#include +#ifndef CV_VERSION_EPOCH +#if defined (CV_MAJOR_VERSION) && (CV_VERSION_MAJOR == 4) +#include +#include +#endif +#endif +#include +#include + + +class DepthRender { +public: + enum OutputColorType { + COLORTYPE_RAINBOW = 0, + COLORTYPE_BLUERED = 1, + COLORTYPE_GRAY = 2 + }; + + enum ColorRangeMode { + COLOR_RANGE_ABS = 0, + COLOR_RANGE_DYNAMIC = 1 + }; + + DepthRender() : needResetColorTable(true) + , color_type(COLORTYPE_BLUERED) + , range_mode(COLOR_RANGE_DYNAMIC) + , min_distance(0) + , max_distance(0) + , invalid_label(0) + {} + + void SetColorType( OutputColorType ct = COLORTYPE_BLUERED ){ + if(ct != color_type){ + needResetColorTable = true; + color_type = ct; + } + } + + void SetRangeMode( ColorRangeMode rm = COLOR_RANGE_DYNAMIC ){ + if(range_mode != rm){ + needResetColorTable = true; + range_mode = rm; + } + } + + /// for abs mode + void SetColorRange(int minDis, int maxDis){ + min_distance = minDis; + max_distance = maxDis; + } + + /// input 16UC1 output 8UC3 + void Compute(const cv::Mat &src, cv::Mat& dst ){ + dst = Compute(src); + } + cv::Mat Compute(const cv::Mat &src){ + cv::Mat src16U; + if(src.type() != CV_16U){ + src.convertTo(src16U, CV_16U); + }else{ + src16U = src; + } + + if(needResetColorTable){ + BuildColorTable(); + needResetColorTable = false; + } + + cv::Mat dst; + filtered_mask = (src16U == invalid_label); + clr_disp = src16U.clone(); + if(COLOR_RANGE_ABS == range_mode) { + TruncValue(clr_disp, filtered_mask, min_distance, max_distance); + clr_disp -= min_distance; + clr_disp = clr_disp * 255 / (max_distance - min_distance); + clr_disp.convertTo(clr_disp, CV_8UC1); + } else { + unsigned short vmax, vmin; + HistAdjustRange(clr_disp, invalid_label, min_distance, vmin, vmax); + clr_disp = (clr_disp - vmin) * 255 / (vmax - vmin); + //clr_disp = 255 - clr_disp; + clr_disp.convertTo(clr_disp, CV_8UC1); + } + + switch (color_type) { + case COLORTYPE_GRAY: + clr_disp = 255 - clr_disp; + cv::cvtColor(clr_disp, dst, cv::COLOR_GRAY2BGR); + break; + case COLORTYPE_BLUERED: + //temp = 255 - clr_disp; + CalcColorMap(clr_disp, dst); + //cv::applyColorMap(temp, color_img, cv::COLORMAP_COOL); + break; + case COLORTYPE_RAINBOW: + //cv::cvtColor(color_img, color_img, CV_GRAY2BGR); + cv::applyColorMap(clr_disp, dst, cv::COLORMAP_RAINBOW); + break; + } + ClearInvalidArea(dst, filtered_mask); + + return dst; + } + +private: + void CalcColorMap(const cv::Mat &src, cv::Mat &dst){ + std::vector &table = _color_lookup_table; + assert(table.size() == 256); + assert(!src.empty()); + assert(src.type() == CV_8UC1); + dst.create(src.size(), CV_8UC3); + const unsigned char* sptr = src.ptr(); + unsigned char* dptr = dst.ptr(); + for (int i = src.size().area(); i != 0; i--) { + cv::Scalar &v = table[*sptr]; + dptr[0] = (unsigned char)v.val[0]; + dptr[1] = (unsigned char)v.val[1]; + dptr[2] = (unsigned char)v.val[2]; + dptr += 3; + sptr += 1; + } + } + void BuildColorTable(){ + _color_lookup_table.resize(256); + cv::Scalar from(50, 0, 0xff), to(50, 200, 255); + for (int i = 0; i < 128; i++) { + float a = (float)i / 128; + cv::Scalar &v = _color_lookup_table[i]; + for (int j = 0; j < 3; j++) { + v.val[j] = from.val[j] * (1 - a) + to.val[j] * a; + } + } + from = to; + to = cv::Scalar(255, 104, 0); + for (int i = 128; i < 256; i++) { + float a = (float)(i - 128) / 128; + cv::Scalar &v = _color_lookup_table[i]; + for (int j = 0; j < 3; j++) { + v.val[j] = from.val[j] * (1 - a) + to.val[j] * a; + } + } + } + //keep value in range + void TruncValue(cv::Mat &img, cv::Mat &mask, short min_val, short max_val){ + assert(max_val >= min_val); + assert(img.type() == CV_16SC1); + assert(mask.type() == CV_8UC1); + short* ptr = img.ptr(); + unsigned char* mask_ptr = mask.ptr(); + for (int i = img.size().area(); i != 0; i--) { + if (*ptr > max_val) { + *ptr = max_val; + *mask_ptr = 0xff; + } else if (*ptr < min_val) { + *ptr = min_val; + *mask_ptr = 0xff; + } + ptr++; + mask_ptr++; + } + } + void ClearInvalidArea(cv::Mat &clr_disp, cv::Mat &filtered_mask){ + assert(clr_disp.type() == CV_8UC3); + assert(filtered_mask.type() == CV_8UC1); + assert(clr_disp.size().area() == filtered_mask.size().area()); + unsigned char* filter_ptr = filtered_mask.ptr(); + unsigned char* ptr = clr_disp.ptr(); + int len = clr_disp.size().area(); + for (int i = 0; i < len; i++) { + if (*filter_ptr != 0) { + ptr[0] = ptr[1] = ptr[2] = 0; + } + filter_ptr++; + ptr += 3; + } + } + void HistAdjustRange(const cv::Mat &dist, ushort invalid, int min_display_distance_range + , ushort &min_val, ushort &max_val) { + std::map hist; + int sz = dist.size().area(); + const ushort* ptr = dist.ptr < ushort>(); + int total_num = 0; + for (int idx = sz; idx != 0; idx--, ptr++) { + if (invalid == *ptr) { + continue; + } + total_num++; + if (hist.find(*ptr) != hist.end()) { + hist[*ptr]++; + } else { + hist.insert(std::make_pair(*ptr, 1)); + } + } + if (hist.empty()) { + min_val = 0; + max_val = 2000; + return; + } + const int delta = total_num * 0.01; + int sum = 0; + min_val = hist.begin()->first; + for (std::map::iterator it = hist.begin(); it != hist.end();it++){ + sum += it->second; + if (sum > delta) { + min_val = it->first; + break; + } + } + + sum = 0; + max_val = hist.rbegin()->first; + for (std::map::reverse_iterator s = hist.rbegin() + ; s != hist.rend(); s++) { + sum += s->second; + if (sum > delta) { + max_val = s->first; + break; + } + } + + const int min_display_dist = min_display_distance_range; + if (max_val - min_val < min_display_dist) { + int m = (max_val + min_val) / 2; + max_val = m + min_display_dist / 2; + min_val = m - min_display_dist / 2; + if (min_val < 0) { + min_val = 0; + } + } + } + + bool needResetColorTable; + OutputColorType color_type; + ColorRangeMode range_mode; + int min_distance; + int max_distance; + uint16_t invalid_label; + cv::Mat clr_disp ; + cv::Mat filtered_mask; + std::vector _color_lookup_table; +}; + +#endif +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/ImageSpeckleFilter.cpp b/image_capture/third_party/percipio/common/ImageSpeckleFilter.cpp new file mode 100644 index 0000000..3ded5eb --- /dev/null +++ b/image_capture/third_party/percipio/common/ImageSpeckleFilter.cpp @@ -0,0 +1,120 @@ + +#include "ImageSpeckleFilter.hpp" +#include +#include + +#ifdef WIN32 +#include +#endif + +#ifdef OPENCV_DEPENDENCIES +struct Point2s { + Point2s(short _x, short _y) { + x = _x; + y = _y; + } + short x, y; +}; + +template +void filterSpecklesImpl(cv::Mat& img, int newVal, int maxSpeckleSize, int maxDiff, std::vector &_buf) { + int width = img.cols, height = img.rows; + int npixels = width * height;//number of pixels + size_t bufSize = npixels * (int)(sizeof(Point2s) + sizeof(int) + sizeof(uint8_t));//all pixel buffer + if (_buf.size() < bufSize) { + _buf.resize((int)bufSize); + } + + uint8_t* buf = (uint8_t*)(&_buf[0]); + int i, j, dstep = img.cols;//(int)(img.step / sizeof(T)); + int* labels = (int*)buf; + buf += npixels * sizeof(labels[0]); + Point2s* wbuf = (Point2s*)buf; + buf += npixels * sizeof(wbuf[0]); + uint8_t* rtype = (uint8_t*)buf; + int curlabel = 0; + + // clear out label assignments + memset(labels, 0, npixels * sizeof(labels[0])); + + for (i = 0; i < height; i++) { + T* ds = img.ptr(i); + int* ls = labels + width * i;//label ptr for a row + + for (j = 0; j < width; j++) { + if (ds[j] != newVal) { // not a bad disparity + if (ls[j]) { // has a label, check for bad label + if (rtype[ls[j]]) // small region, zero out disparity + ds[j] = (T)newVal; + } + // no label, assign and propagate + else { + Point2s* ws = wbuf; // initialize wavefront + Point2s p((short)j, (short)i); // current pixel + curlabel++; // next label + int count = 0; // current region size + ls[j] = curlabel; + + // wavefront propagation + while (ws >= wbuf) { // wavefront not empty + count++; + // put neighbors onto wavefront + T* dpp = &img.ptr(p.y)[p.x]; + T dp = *dpp; + int* lpp = labels + width * p.y + p.x; + + if (p.x < width - 1 && !lpp[+1] && dpp[+1] != newVal && std::abs(dp - dpp[+1]) <= maxDiff) { + lpp[+1] = curlabel; + *ws++ = Point2s(p.x + 1, p.y); + } + + if (p.x > 0 && !lpp[-1] && dpp[-1] != newVal && std::abs(dp - dpp[-1]) <= maxDiff) { + lpp[-1] = curlabel; + *ws++ = Point2s(p.x - 1, p.y); + } + + if (p.y < height - 1 && !lpp[+width] && dpp[+dstep] != newVal && std::abs(dp - dpp[+dstep]) <= maxDiff) { + lpp[+width] = curlabel; + *ws++ = Point2s(p.x, p.y + 1); + } + + if (p.y > 0 && !lpp[-width] && dpp[-dstep] != newVal && std::abs(dp - dpp[-dstep]) <= maxDiff) { + lpp[-width] = curlabel; + *ws++ = Point2s(p.x, p.y - 1); + } + + // pop most recent and propagate + // NB: could try least recent, maybe better convergence + p = *--ws; + } + + // assign label type + if (count <= maxSpeckleSize) { // speckle region + rtype[ls[j]] = 1; // small region label + ds[j] = (T)newVal; + } else + rtype[ls[j]] = 0; // large region label + } + } + } + } +} + +//////////////////////////////////////////////////////////////////////////// + +ImageSpeckleFilter gSpeckleFilter; + +void ImageSpeckleFilter::Compute(cv::Mat &image, int newVal, int maxSpeckleSize, int maxDiff) +{ + if(image.type() == CV_8U){ + filterSpecklesImpl(image, newVal, maxSpeckleSize, maxDiff, _labelBuf); + } else if(image.type() == CV_16U){ + filterSpecklesImpl(image, newVal, maxSpeckleSize, maxDiff, _labelBuf); + } else { + char sz[10]; + sprintf(sz, "%d", image.type()); + throw std::runtime_error(std::string("ImageSpeckleFilter only support 8u and 16u, not ") + sz); + } +} + +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/ImageSpeckleFilter.hpp b/image_capture/third_party/percipio/common/ImageSpeckleFilter.hpp new file mode 100644 index 0000000..0a9782f --- /dev/null +++ b/image_capture/third_party/percipio/common/ImageSpeckleFilter.hpp @@ -0,0 +1,22 @@ +#ifndef XYZ_IMAGE_SPECKLE_FILTER_HPP_ +#define XYZ_IMAGE_SPECKLE_FILTER_HPP_ + +#ifdef OPENCV_DEPENDENCIES +#include +#include + + +class ImageSpeckleFilter +{ +public: + void Compute(cv::Mat &image, int newVal = 0, int maxSpeckleSize = 50, int maxDiff = 6); + +private: + std::vector _labelBuf; +}; + +extern ImageSpeckleFilter gSpeckleFilter; + +#endif + +#endif diff --git a/image_capture/third_party/percipio/common/MatViewer.cpp b/image_capture/third_party/percipio/common/MatViewer.cpp new file mode 100644 index 0000000..6f78185 --- /dev/null +++ b/image_capture/third_party/percipio/common/MatViewer.cpp @@ -0,0 +1,95 @@ +#include +#include +#include "MatViewer.hpp" + + +#ifdef OPENCV_DEPENDENCIES +int GraphicItem::globalID = 0; + + +void OpencvViewer::_onMouseCallback(int event, int x, int y, int /*flags*/, void* ustc) +{ + OpencvViewer* p = (OpencvViewer*)ustc; + + // NOTE: This callback will be called very frequently while mouse moving, + // keep it simple + + bool repaint = false; + p->onMouseCallback(p->_orgImg, event, cv::Point(x,y), repaint); + if(repaint){ + p->showImage(); + } +} + + +void OpencvViewer::showImage() +{ + _showImg = _orgImg.clone(); + for(std::map::iterator it = _items.begin() + ; it != _items.end(); it++){ + it->second->draw(_showImg); + } + cv::imshow(_win.c_str(), _showImg); + cv::setMouseCallback(_win, _onMouseCallback, this); +} + +///////////////////////////// DepthViewer /////////////////////////////////////// + + +DepthViewer::DepthViewer(const std::string& win) + : OpencvViewer(win) + , _centerDepthItem(std::string(), cv::Point(0,20), 0.5, cv::Scalar(0,255,0), 2) + , _pickedDepthItem(std::string(), cv::Point(0,40), 0.5, cv::Scalar(0,255,0), 2) +{ + OpencvViewer::addGraphicItem(&_centerDepthItem); + OpencvViewer::addGraphicItem(&_pickedDepthItem); + depth_scale_unit = 1.f; +} + + +void DepthViewer::show(const cv::Mat& img) +{ + if(img.type() != CV_16U || img.total() == 0){ + return; + } + + char str[128]; + float val = img.at(img.rows / 2, img.cols / 2)*depth_scale_unit; + sprintf(str, "Depth at center: %.1f", val); + _centerDepthItem.set(str); + + val = img.at(_fixLoc.y, _fixLoc.x)*depth_scale_unit; + sprintf(str, "Depth at (%d,%d): %.1f", _fixLoc.x, _fixLoc.y , val); + _pickedDepthItem.set(str); + + _depth = img.clone(); + _renderedDepth = _render.Compute(img); + OpencvViewer::show(_renderedDepth); +} + + +void DepthViewer::onMouseCallback(cv::Mat& img, int event, const cv::Point pnt + , bool& repaint) +{ + repaint = false; + switch(event){ + case cv::EVENT_LBUTTONDOWN: { + _fixLoc = pnt; + char str[64]; + float val = _depth.at(pnt.y, pnt.x)*depth_scale_unit; + sprintf(str, "Depth at (%d,%d): %.1f", pnt.x, pnt.y, val); + printf(">>>>>>>>>>>>>>>> depth(%.1f)\n", val); + _pickedDepthItem.set(str); + repaint = true; + break; + } + case cv::EVENT_MOUSEMOVE: + // uint16_t val = _img.at(pnt.y, pnt.x); + // char str[32]; + // sprintf(str, "Depth at mouse: %d", val); + // drawText(img, str, cv::Point(0,60), 0.5, cv::Scalar(0,255,0), 2); + break; + } +} + +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/MatViewer.hpp b/image_capture/third_party/percipio/common/MatViewer.hpp new file mode 100644 index 0000000..ce6d615 --- /dev/null +++ b/image_capture/third_party/percipio/common/MatViewer.hpp @@ -0,0 +1,144 @@ +#ifndef XYZ_MAT_VIEWER_HPP_ +#define XYZ_MAT_VIEWER_HPP_ + +#ifdef OPENCV_DEPENDENCIES + +#include +#include +#include "DepthRender.hpp" + + +class GraphicItem +{ +public: + GraphicItem( + const cv::Scalar& color = cv::Scalar(255,255,255) + ) + : _id(++globalID), _color(color) {} + virtual ~GraphicItem() {} + + int id() const { return _id; } + + cv::Scalar color() const { return _color; } + void setColor(const cv::Scalar& color) { _color = color; } + + virtual void draw(cv::Mat& img) = 0; + +protected: + int _id; + cv::Scalar _color; + +private: + static int globalID; + +}; + +class GraphicRectangleItem : public GraphicItem +{ +public: + cv::Rect _rect; + + GraphicRectangleItem( + const cv::Scalar& color = cv::Scalar(255,255,255), + const cv::Rect& rect = cv::Rect() + ) + : GraphicItem(color), _rect(rect) {} + virtual ~GraphicRectangleItem() {} + void set(const cv::Rect& rect) { _rect = rect; } + virtual void draw(cv::Mat& img){ cv::rectangle(img, _rect, color()); } +}; + +class GraphicStringItem : public GraphicItem +{ +public: + std::string _str; + cv::Point _loc; + double _scale; + int _thick; + + GraphicStringItem( + const std::string& str = std::string(), + const cv::Point loc = cv::Point(), + double scale = 0, + const cv::Scalar& color = cv::Scalar(), + int thick = 0 + ) + : GraphicItem(color), _str(str), _loc(loc), _scale(scale), _thick(thick) {} + virtual ~GraphicStringItem() {} + void set(const std::string& str) { _str = str; } + virtual void draw(cv::Mat& img){ + cv::putText(img, _str, _loc, cv::FONT_HERSHEY_SIMPLEX, _scale, _color, _thick); + } +}; + + +class OpencvViewer +{ +public: + OpencvViewer(const std::string& win) + : _win(win) + { + _has_win = 0; + //cv::namedWindow(_win); + //cv::setMouseCallback(_win, _onMouseCallback, this); + } + ~OpencvViewer() + { + if (_has_win) + { + //cv::setMouseCallback(_win, NULL, NULL); + cv::destroyWindow(_win); + } + } + + const std::string& name() const {return _win;} + + virtual void show(const cv::Mat& img) + { + _has_win = 1; + _orgImg = img.clone(); + showImage(); + } + virtual void onMouseCallback(cv::Mat& /*img*/, int /*event*/, const cv::Point /*pnt*/ + , bool& repaint) {repaint = false;} + + void addGraphicItem(GraphicItem* item) { + _items.insert(std::make_pair(item->id(), item));} + void delGraphicItem(GraphicItem* item) { _items.erase(item->id()); } + +private: + static void _onMouseCallback(int event, int x, int y, int flags, void* ustc); + + void showImage(); + + cv::Mat _orgImg; + cv::Mat _showImg; + int _has_win; + std::string _win; + std::map _items; +}; + +////////////////////////////////////////////////////////////////////////////////// + +class DepthViewer : public OpencvViewer +{ +public: + DepthViewer(const std::string& win); + virtual void show(const cv::Mat& depthImage); + virtual void onMouseCallback(cv::Mat& img, int event, const cv::Point pnt + , bool& repaint); + + + float depth_scale_unit; +private: + cv::Mat _depth; + cv::Mat _renderedDepth; + DepthRender _render; + GraphicStringItem _centerDepthItem; + GraphicStringItem _pickedDepthItem; + cv::Point _fixLoc; +}; + + +#endif +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/ParametersParse.cpp b/image_capture/third_party/percipio/common/ParametersParse.cpp new file mode 100644 index 0000000..6384cb8 --- /dev/null +++ b/image_capture/third_party/percipio/common/ParametersParse.cpp @@ -0,0 +1,198 @@ + +#include "ParametersParse.h" +#include "json11.hpp" + +using namespace json11; + +TY_STATUS write_int_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + if(value.is_number()) + return TYSetInt(hDevice, comp, feat, static_cast(value.number_value())); + else + return TY_STATUS_ERROR; +} + +TY_STATUS write_float_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + if(value.is_number()) + return TYSetFloat(hDevice, comp, feat, static_cast(value.number_value())); + else + return TY_STATUS_ERROR; +} + +TY_STATUS write_enum_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + if(value.is_number()) + return TYSetEnum(hDevice, comp, feat, static_cast(value.number_value())); + else + return TY_STATUS_ERROR; +} + +TY_STATUS write_bool_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + if(value.is_bool()) + return TYSetBool(hDevice, comp, feat, value.bool_value()); + else + return TY_STATUS_ERROR; +} + +bool json_parse_arrar(const Json& value, std::vector& buff) +{ + buff.clear(); + if(value.is_array()) { + size_t size = value.array_items().size(); + buff.resize(size); + for(size_t i = 0; i < size; i++) + buff[i] = static_cast(value[i].number_value()); + return true; + } else { + return false; + } +} + +TY_STATUS write_string_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + std::vector buff(0); + if(json_parse_arrar(value, buff)) { + buff.push_back(0); + return TYSetString(hDevice, comp, feat, &buff[0]); + } else { + return TY_STATUS_ERROR; + } +} + +TY_STATUS write_bytearray_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + std::vector buff(0); + if(json_parse_arrar(value, buff)) { + return TYSetByteArray(hDevice, comp, feat, (uint8_t*)(&buff[0]), buff.size()); + } else { + return TY_STATUS_ERROR; + } +} + +TY_STATUS write_struct_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + std::vector buff(0); + if(json_parse_arrar(value, buff)) { + return TYSetStruct(hDevice, comp, feat, (void*)(&buff[0]), buff.size()); + } else { + return TY_STATUS_ERROR; + } +} + + +TY_STATUS device_write_feature(const TY_DEV_HANDLE hDevice, TY_COMPONENT_ID comp, TY_FEATURE_ID feat, const Json& value) +{ + TY_STATUS status = TY_STATUS_OK; + TY_FEATURE_TYPE type = TYFeatureType(feat); + switch (type) + { + case TY_FEATURE_INT: + status = write_int_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_FLOAT: + status = write_float_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_ENUM: + status = write_enum_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_BOOL: + status = write_bool_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_STRING: + status = write_string_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_BYTEARRAY: + status = write_bytearray_feature(hDevice, comp, feat, value); + break; + case TY_FEATURE_STRUCT: + status = write_struct_feature(hDevice, comp, feat, value); + break; + default: + status = TY_STATUS_INVALID_FEATURE; + break; + } + return status; +} + +struct DevParam +{ + TY_COMPONENT_ID compID; + TY_FEATURE_ID featID; + Json feat_value; +}; + +bool isValidJsonString(const char* code) +{ + std::string err; + const auto json = Json::parse(code, err); + if(json.is_null()) return false; + return true; +} + +bool json_parse(const TY_DEV_HANDLE hDevice, const char* jscode) +{ + std::string err; + const auto json = Json::parse(jscode, err); + + Json components = json["component"]; + if(components.is_array()) { + std::vector param_list(0); + for (auto &k : components.array_items()) { + const Json& comp_id = k["id"]; + const Json& comp_desc = k["desc"]; + const Json& features = k["feature"]; + + if(!comp_id.is_string()) continue; + if(!comp_desc.is_string()) continue; + if(!features.is_array()) continue; + + const char* comp_desc_str = comp_desc.string_value().c_str(); + const char* comp_id_str = comp_id.string_value().c_str(); + + TY_COMPONENT_ID m_comp_id; + sscanf(comp_id_str,"%x",&m_comp_id); + + for (auto &f : features.array_items()) { + const Json& feat_name = f["name"]; + const Json& feat_id = f["id"]; + const Json& feat_value = f["value"]; + + if(!feat_id.is_string()) continue; + if(!feat_name.is_string()) continue; + + const char* feat_name_str = feat_name.string_value().c_str(); + const char* feat_id_str = feat_id.string_value().c_str(); + + TY_FEATURE_ID m_feat_id; + sscanf(feat_id_str,"%x",&m_feat_id); + + param_list.push_back({m_comp_id, m_feat_id, feat_value}); + } + } + + while(1) + { + size_t cnt = param_list.size(); + for(auto it = param_list.begin(); it != param_list.end(); ) + { + if(TY_STATUS_OK == device_write_feature(hDevice, it->compID, it->featID, it->feat_value)) + { + it = param_list.erase(it); + } else { + ++it; + } + } + + if(param_list.size() == 0) { + return true; + } + + if(param_list.size() == cnt) { + return false; + } + } + } + return false; +} diff --git a/image_capture/third_party/percipio/common/ParametersParse.h b/image_capture/third_party/percipio/common/ParametersParse.h new file mode 100644 index 0000000..f1eb6c7 --- /dev/null +++ b/image_capture/third_party/percipio/common/ParametersParse.h @@ -0,0 +1,6 @@ +#ifndef _PARAMETERS_PARSE_H_ +#define _PARAMETERS_PARSE_H_ +#include "TYApi.h" +bool isValidJsonString(const char* code); +bool json_parse(const TY_DEV_HANDLE hDevice, const char* jscode); +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/TYThread.cpp b/image_capture/third_party/percipio/common/TYThread.cpp new file mode 100644 index 0000000..9f4be08 --- /dev/null +++ b/image_capture/third_party/percipio/common/TYThread.cpp @@ -0,0 +1,83 @@ +#include "TYThread.hpp" + +#ifdef _WIN32 + +#include +class TYThreadImpl +{ +public: + TYThreadImpl() : _thread(NULL) {} + int create(TYThread::Callback_t cb, void* arg) { + DWORD dwThreadId = 0; + _thread = CreateThread( + NULL, // default security attributes + 0, // use default stack size + (LPTHREAD_START_ROUTINE)cb, // thread function name + arg, // argument to thread function + 0, // use default creation flags + &dwThreadId); // returns the thread identifier + return 0; + } + int destroy() { + // TerminateThread(_thread, 0); + switch (WaitForSingleObject(_thread, INFINITE)) + { + case WAIT_OBJECT_0: + if (CloseHandle(_thread)) { + _thread = 0; + return 0; + } + else { + return -1; + } + default: + return -2; + } + } +private: + HANDLE _thread; +}; + +#else // _WIN32 + +#include +class TYThreadImpl +{ +public: + TYThreadImpl() {} + int create(TYThread::Callback_t cb, void* arg) { + int ret = pthread_create(&_thread, NULL, cb, arg); + return ret; + } + int destroy() { + pthread_join(_thread, NULL); + return 0; + } +private: + pthread_t _thread; +}; + +#endif // _WIN32 + +//////////////////////////////////////////////////////////////////////////// + +TYThread::TYThread() +{ + impl = new TYThreadImpl(); +} + +TYThread::~TYThread() +{ + delete impl; + impl = NULL; +} + +int TYThread::create(Callback_t cb, void* arg) +{ + return impl->create(cb, arg); +} + +int TYThread::destroy() +{ + return impl->destroy(); +} diff --git a/image_capture/third_party/percipio/common/TYThread.hpp b/image_capture/third_party/percipio/common/TYThread.hpp new file mode 100644 index 0000000..e81e580 --- /dev/null +++ b/image_capture/third_party/percipio/common/TYThread.hpp @@ -0,0 +1,25 @@ +#ifndef XYZ_TYThread_HPP_ +#define XYZ_TYThread_HPP_ + + +class TYThreadImpl; + +class TYThread +{ +public: + typedef void* (*Callback_t)(void*); + + TYThread(); + ~TYThread(); + + int create(Callback_t cb, void* arg); + int destroy(); + +private: + TYThreadImpl* impl; +}; + + + + +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/Utils.hpp b/image_capture/third_party/percipio/common/Utils.hpp new file mode 100644 index 0000000..8eb8504 --- /dev/null +++ b/image_capture/third_party/percipio/common/Utils.hpp @@ -0,0 +1,496 @@ +#ifndef SAMPLE_COMMON_UTILS_HPP_ +#define SAMPLE_COMMON_UTILS_HPP_ + +/** + * This file excludes opencv for sample_raw. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "TYApi.h" +#include "TYThread.hpp" +#include "crc32.h" +#include "ParametersParse.h" +#include "huffman.h" + +#ifndef ASSERT +#define ASSERT(x) do{ \ + if(!(x)) { \ + LOGE("Assert failed at %s:%d", __FILE__, __LINE__); \ + LOGE(" : " #x ); \ + abort(); \ + } \ + }while(0) +#endif + +#ifndef ASSERT_OK +#define ASSERT_OK(x) do{ \ + int err = (x); \ + if(err != TY_STATUS_OK) { \ + LOGE("Assert failed: error %d(%s) at %s:%d", err, TYErrorString(err), __FILE__, __LINE__); \ + LOGE(" : " #x ); \ + abort(); \ + } \ + }while(0) +#endif + +#ifndef CHECK_RET +#define CHECK_RET(x) do{ \ + int err = (x); \ + if(err != TY_STATUS_OK) { \ + LOGD(#x " failed: error %d(%s)", err, TYErrorString(err)); \ + LOGD("at %s:%d", __FILE__, __LINE__); \ + } \ + }while(0) +#endif + +#ifdef _WIN32 +# include +# include + static inline char* getLocalTime() + { + static char local[26] = {0}; + SYSTEMTIME wtm; + struct tm tm; + GetLocalTime(&wtm); + tm.tm_year = wtm.wYear - 1900; + tm.tm_mon = wtm.wMonth - 1; + tm.tm_mday = wtm.wDay; + tm.tm_hour = wtm.wHour; + tm.tm_min = wtm.wMinute; + tm.tm_sec = wtm.wSecond; + tm.tm_isdst = -1; + + strftime(local, 26, "%Y-%m-%d %H:%M:%S", &tm); + + return local; + } + + static inline uint64_t getSystemTime() + { + SYSTEMTIME wtm; + struct tm tm; + GetLocalTime(&wtm); + tm.tm_year = wtm.wYear - 1900; + tm.tm_mon = wtm.wMonth - 1; + tm.tm_mday = wtm.wDay; + tm.tm_hour = wtm.wHour; + tm.tm_min = wtm.wMinute; + tm.tm_sec = wtm.wSecond; + tm. tm_isdst = -1; + return mktime(&tm) * 1000 + wtm.wMilliseconds; + } + static inline void MSleep(uint32_t ms) + { + Sleep(ms); + } +#else +# include +# include + static inline char* getLocalTime() + { + static char local[26] = {0}; + time_t time; + + struct timeval tv; + gettimeofday(&tv, NULL); + + time = tv.tv_sec; + struct tm* p_time = localtime(&time); + strftime(local, 26, "%Y-%m-%d %H:%M:%S", p_time); + + return local; + } + + static inline uint64_t getSystemTime() + { + struct timeval tv; + gettimeofday(&tv, NULL); + return tv.tv_sec*1000 + tv.tv_usec/1000; + } + static inline void MSleep(uint32_t ms) + { + usleep(ms * 1000); + } +#endif + + +#define LOGD(fmt,...) printf("%" PRIu64 " (%s) " fmt "\n", getSystemTime(), getLocalTime(), ##__VA_ARGS__) +#define LOGI(fmt,...) printf("%" PRIu64 " (%s) " fmt "\n", getSystemTime(), getLocalTime(), ##__VA_ARGS__) +#define LOGW(fmt,...) printf("%" PRIu64 " (%s) " fmt "\n", getSystemTime(), getLocalTime(), ##__VA_ARGS__) +#define LOGE(fmt,...) printf("%" PRIu64 " (%s) Error: " fmt "\n", getSystemTime(), getLocalTime(), ##__VA_ARGS__) +#define xLOGD(fmt,...) +#define xLOGI(fmt,...) +#define xLOGW(fmt,...) +#define xLOGE(fmt,...) + + +#ifdef _WIN32 +# include +# define MSLEEP(x) Sleep(x) + // windows defined macro max/min +# ifdef max +# undef max +# endif +# ifdef min +# undef min +# endif +#else +# include +# include +# define MSLEEP(x) usleep((x)*1000) +#endif + +static inline const char* colorFormatName(TY_PIXEL_FORMAT fmt) +{ +#define FORMAT_CASE(a) case (a): return #a + switch(fmt){ + FORMAT_CASE(TY_PIXEL_FORMAT_UNDEFINED); + FORMAT_CASE(TY_PIXEL_FORMAT_MONO); + FORMAT_CASE(TY_PIXEL_FORMAT_RGB); + FORMAT_CASE(TY_PIXEL_FORMAT_YVYU); + FORMAT_CASE(TY_PIXEL_FORMAT_YUYV); + FORMAT_CASE(TY_PIXEL_FORMAT_DEPTH16); + FORMAT_CASE(TY_PIXEL_FORMAT_BAYER8GB); + FORMAT_CASE(TY_PIXEL_FORMAT_BAYER8BG); + FORMAT_CASE(TY_PIXEL_FORMAT_BAYER8GR); + FORMAT_CASE(TY_PIXEL_FORMAT_BAYER8RG); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_MONO10); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER10GBRG); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER10BGGR); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER10GRBG); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER10RGGB); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_MONO12); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER12GBRG); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER12BGGR); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER12GRBG); + FORMAT_CASE(TY_PIXEL_FORMAT_CSI_BAYER12RGGB); + FORMAT_CASE(TY_PIXEL_FORMAT_BGR); + FORMAT_CASE(TY_PIXEL_FORMAT_JPEG); + FORMAT_CASE(TY_PIXEL_FORMAT_MJPG); + default: return "UNKNOWN FORMAT"; + } +#undef FORMAT_CASE +} + + +static inline const TY_IMAGE_DATA* TYImageInFrame(const TY_FRAME_DATA& frame + , const TY_COMPONENT_ID comp) +{ + for(int i = 0; i < frame.validCount; i++){ + if(frame.image[i].componentID == comp){ + return &frame.image[i]; + } + } + return NULL; +} +static void *updateThreadFunc(void *userdata) +{ + TY_INTERFACE_HANDLE iface = (TY_INTERFACE_HANDLE)userdata; + TYUpdateDeviceList(iface); + return NULL; +} + +static TY_STATUS updateDevicesParallel(std::vector &ifaces, + uint64_t timeout=2000) +{ + if(ifaces.size() != 0) { + TYThread *updateThreads = new TYThread[ifaces.size()]; + for(int i = 0; i < ifaces.size(); i++) { + updateThreads[i].create(updateThreadFunc, ifaces[i]); + } + for(int i = 0; i < ifaces.size(); i++) { + updateThreads[i].destroy(); + } + delete [] updateThreads; + updateThreads = NULL; + } + return TY_STATUS_OK; +} + +static inline TY_STATUS selectDevice(TY_INTERFACE_TYPE iface + , const std::string& ID, const std::string& IP + , uint32_t deviceNum, std::vector& out) +{ + LOGD("Update interface list"); + ASSERT_OK( TYUpdateInterfaceList() ); + + uint32_t n = 0; + ASSERT_OK( TYGetInterfaceNumber(&n) ); + LOGD("Got %u interface list", n); + if(n == 0){ + LOGE("interface number incorrect"); + return TY_STATUS_ERROR; + } + + std::vector ifaces(n); + ASSERT_OK( TYGetInterfaceList(&ifaces[0], n, &n) ); + ASSERT( n == ifaces.size() ); + for(uint32_t i = 0; i < n; i++){ + LOGI("Found interface %u:", i); + LOGI(" name: %s", ifaces[i].name); + LOGI(" id: %s", ifaces[i].id); + LOGI(" type: 0x%x", ifaces[i].type); + if(TYIsNetworkInterface(ifaces[i].type)){ + LOGI(" MAC: %s", ifaces[i].netInfo.mac); + LOGI(" ip: %s", ifaces[i].netInfo.ip); + LOGI(" netmask: %s", ifaces[i].netInfo.netmask); + LOGI(" gateway: %s", ifaces[i].netInfo.gateway); + LOGI(" broadcast: %s", ifaces[i].netInfo.broadcast); + } + } + + out.clear(); + std::vector ifaceTypeList; + std::vector hIfaces; + ifaceTypeList.push_back(TY_INTERFACE_USB); + ifaceTypeList.push_back(TY_INTERFACE_ETHERNET); + ifaceTypeList.push_back(TY_INTERFACE_IEEE80211); + for(size_t t = 0; t < ifaceTypeList.size(); t++){ + for(uint32_t i = 0; i < ifaces.size(); i++){ + if(ifaces[i].type == ifaceTypeList[t] && (ifaces[i].type & iface) && deviceNum > out.size()){ + TY_INTERFACE_HANDLE hIface; + ASSERT_OK( TYOpenInterface(ifaces[i].id, &hIface) ); + hIfaces.push_back(hIface); + } + } + } + updateDevicesParallel(hIfaces); + for (uint32_t i = 0; i < hIfaces.size(); i++) { + TY_INTERFACE_HANDLE hIface = hIfaces[i]; + uint32_t n = 0; + TYGetDeviceNumber(hIface, &n); + if(n > 0){ + std::vector devs(n); + TYGetDeviceList(hIface, &devs[0], n, &n); + for(uint32_t j = 0; j < n; j++){ + if(deviceNum > out.size() && ((ID.empty() && IP.empty()) + || (!ID.empty() && devs[j].id == ID) + || (!IP.empty() && IP == devs[j].netInfo.ip))) + { + if (devs[j].iface.type == TY_INTERFACE_ETHERNET || devs[j].iface.type == TY_INTERFACE_IEEE80211) { + LOGI("*** Select %s on %s, ip %s", devs[j].id, devs[j].iface.id, devs[j].netInfo.ip); + } else { + LOGI("*** Select %s on %s", devs[j].id, devs[j].iface.id); + } + out.push_back(devs[j]); + } + } + } + TYCloseInterface(hIface); + } + + if(out.size() == 0){ + LOGE("not found any device"); + return TY_STATUS_ERROR; + } + + return TY_STATUS_OK; +} + +static inline TY_STATUS get_feature_enum_list(TY_DEV_HANDLE handle, + TY_COMPONENT_ID compID, + TY_FEATURE_ID featID, + std::vector &feature_info){ + uint32_t n = 0; + ASSERT_OK(TYGetEnumEntryCount(handle, compID, featID, &n)); + LOGD("=== %14s: entry count %d", "", n); + feature_info.clear(); + if (n == 0){ + return TY_STATUS_ERROR; + } + feature_info.resize(n); + ASSERT_OK(TYGetEnumEntryInfo(handle, compID, featID, &feature_info[0], n, &n)); + return TY_STATUS_OK; +} + +static inline TY_STATUS get_image_mode(TY_DEV_HANDLE handle + , TY_COMPONENT_ID compID + , TY_IMAGE_MODE &image_mode, int idx) +{ + std::vector image_mode_list; + ASSERT_OK(get_feature_enum_list(handle, compID, TY_ENUM_IMAGE_MODE, image_mode_list)); + if (image_mode_list.size() == 0 || idx < 0 + || idx > image_mode_list.size() -1){ + return TY_STATUS_ERROR; + } + image_mode = image_mode_list[idx].value; + return TY_STATUS_OK; +} + +static inline TY_STATUS get_default_image_mode(TY_DEV_HANDLE handle + , TY_COMPONENT_ID compID + , TY_IMAGE_MODE &image_mode) +{ + return get_image_mode(handle, compID, image_mode, 0); +} + +enum EncodingType : uint32_t +{ + HUFFMAN = 0, +}; +//10MB +#define MAX_STORAGE_SIZE (10*1024*1024) + +static inline TY_STATUS clear_storage(const TY_DEV_HANDLE handle) +{ + uint32_t block_size; + ASSERT_OK( TYGetByteArraySize(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, &block_size) ); + + uint8_t* blocks = new uint8_t[MAX_STORAGE_SIZE] (); + ASSERT_OK( TYSetByteArray(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, blocks, block_size) ); + + delete []blocks; + return TY_STATUS_OK; +} + +static inline TY_STATUS load_parameters_from_storage(const TY_DEV_HANDLE handle, std::string& js) +{ + uint32_t block_size; + uint8_t* blocks = new uint8_t[MAX_STORAGE_SIZE] (); + ASSERT_OK( TYGetByteArraySize(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, &block_size) ); + ASSERT_OK( TYGetByteArray(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, blocks, block_size) ); + + uint32_t crc_data = *(uint32_t*)blocks; + if(0 == crc_data || 0xffffffff == crc_data) { + LOGE("The CRC check code is empty."); + delete []blocks; + return TY_STATUS_ERROR; + } + + uint32_t crc; + uint8_t* js_code = blocks + 4; + crc = crc32_bitwise(js_code, strlen((const char*)js_code)); + if((crc != crc_data) || !isValidJsonString((const char*)js_code)) { + EncodingType type = *(EncodingType*)(blocks + 4); + ASSERT(type == HUFFMAN); + uint32_t huffman_size = *(uint32_t*)(blocks + 8); + uint8_t* huffman_ptr = (uint8_t*)(blocks + 12); + if(huffman_size > (MAX_STORAGE_SIZE - 12)) { + LOGE("Data length error."); + delete []blocks; + return TY_STATUS_ERROR; + } + + crc = crc32_bitwise(huffman_ptr, huffman_size); + if(crc_data != crc) { + LOGE("The data in the storage area has a CRC check error."); + delete []blocks; + return TY_STATUS_ERROR; + } + + std::string huffman_string(huffman_ptr, huffman_ptr + huffman_size); + if(!TextHuffmanDecompression(huffman_string, js)) { + LOGE("Huffman decoding error"); + delete []blocks; + return TY_STATUS_ERROR; + } + } else { + js = std::string((const char*)js_code); + } + + if(!json_parse(handle, (const char* )js.c_str())) { + LOGW("parameters load fail!"); + delete []blocks; + return TY_STATUS_ERROR; + } + + delete []blocks; + return TY_STATUS_OK; +} + +static inline TY_STATUS write_parameters_to_storage(const TY_DEV_HANDLE handle, const std::string& json_file) +{ + std::ifstream ifs(json_file); + if (!ifs.is_open()) { + LOGE("Unable to open file"); + return TY_STATUS_ERROR; + } + + std::stringstream buffer; + buffer << ifs.rdbuf(); + ifs.close(); + + std::string huffman_string; + if(!TextHuffmanCompression(buffer.str(), huffman_string)) { + LOGE("Huffman compression error"); + return TY_STATUS_ERROR; + } + + const char* str = huffman_string.data(); + uint32_t crc = crc32_bitwise(str, huffman_string.length()); + + uint32_t block_size; + ASSERT_OK( TYGetByteArraySize(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, &block_size) ); + if(block_size < huffman_string.length() + 12) { + LOGE("The configuration file is too large, the maximum size should not exceed 4000 bytes"); + return TY_STATUS_ERROR; + } + + uint8_t* blocks = new uint8_t[block_size] (); + *(uint32_t*)blocks = crc; + *(uint32_t*)(blocks + 4) = HUFFMAN; + *(uint32_t*)(blocks + 8) = huffman_string.length(); + memcpy((char*)blocks + 12, str, huffman_string.length()); + ASSERT_OK( TYSetByteArray(handle, TY_COMPONENT_STORAGE, TY_BYTEARRAY_CUSTOM_BLOCK, blocks, block_size) ); + + delete []blocks; + return TY_STATUS_OK; +} + +static inline void parse_firmware_errcode(TY_FW_ERRORCODE err_code) { + if (TY_FW_ERRORCODE_CAM0_NOT_DETECTED & err_code) { + LOGE("Left sensor Not Detected"); + } + if (TY_FW_ERRORCODE_CAM1_NOT_DETECTED & err_code) { + LOGE("Right sensor Not Detected"); + } + if (TY_FW_ERRORCODE_CAM2_NOT_DETECTED & err_code) { + LOGE("Color sensor Not Detected"); + } + if (TY_FW_ERRORCODE_POE_NOT_INIT & err_code) { + LOGE("POE init error"); + } + if (TY_FW_ERRORCODE_RECMAP_NOT_CORRECT & err_code) { + LOGE("RecMap error"); + } + if (TY_FW_ERRORCODE_LOOKUPTABLE_NOT_CORRECT & err_code) { + LOGE("Disparity error"); + } + if (TY_FW_ERRORCODE_DRV8899_NOT_INIT & err_code) { + LOGE("Motor init error"); + } + if (TY_FW_ERRORCODE_FOC_START_ERR & err_code) { + LOGE("Motor start failed"); + } + if (TY_FW_ERRORCODE_CONFIG_NOT_FOUND & err_code) { + LOGE("Config file not exist"); + } + if (TY_FW_ERRORCODE_CONFIG_NOT_CORRECT & err_code) { + LOGE("Broken Config file"); + } + if (TY_FW_ERRORCODE_XML_NOT_FOUND & err_code) { + LOGE("XML file not exist"); + } + if (TY_FW_ERRORCODE_XML_NOT_CORRECT & err_code) { + LOGE("XML Parse err"); + } + if (TY_FW_ERRORCODE_XML_OVERRIDE_FAILED & err_code) { + LOGE("Illegal XML file overrided, Only Used in Debug Mode!"); + } + if (TY_FW_ERRORCODE_CAM_INIT_FAILED & err_code) { + LOGE("Init default cam feature failed!"); + } + if (TY_FW_ERRORCODE_LASER_INIT_FAILED & err_code) { + LOGE("Init default laser feature failed!"); + } + +} +#endif diff --git a/image_capture/third_party/percipio/common/common.hpp b/image_capture/third_party/percipio/common/common.hpp new file mode 100644 index 0000000..b18e928 --- /dev/null +++ b/image_capture/third_party/percipio/common/common.hpp @@ -0,0 +1,539 @@ +#ifndef SAMPLE_COMMON_COMMON_HPP_ +#define SAMPLE_COMMON_COMMON_HPP_ + +#include "Utils.hpp" + +#include +#include + +#include +#include +#include + +#ifdef OPENCV_DEPENDENCIES +#include +#include "DepthRender.hpp" +#include "MatViewer.hpp" +#include "DepthInpainter.hpp" +#endif + +#include "TYThread.hpp" +#include "TyIsp.h" +#include "BayerISP.hpp" +#include "CommandLineParser.hpp" +#include "CommandLineFeatureHelper.hpp" + +static inline int decodeCsiRaw10(unsigned char* src, unsigned short* dst, int width, int height) +{ + if(width & 0x3) { + return -1; + } + int raw10_line_size = 5 * width / 4; + for(size_t i = 0, j = 0; i < raw10_line_size * height; i+=5, j+=4) + { + //[A2 - A9] | [B2 - B9] | [C2 - C9] | [D2 - D9] | [A0A1-B0B1-C0C1-D0D1] + dst[j + 0] = ((uint16_t)src[i + 0] << 2) | ((src[i + 4] & 0x3) >> 0); + dst[j + 1] = ((uint16_t)src[i + 1] << 2) | ((src[i + 4] & 0xc) >> 2); + dst[j + 2] = ((uint16_t)src[i + 2] << 2) | ((src[i + 4] & 0x30) >> 4); + dst[j + 3] = ((uint16_t)src[i + 3] << 2) | ((src[i + 4] & 0xc0) >> 6); + } + return 0; +} + +static inline int decodeCsiRaw12(unsigned char* src, unsigned short* dst, int width, int height) +{ + if(width & 0x1) { + return -1; + } + int raw12_line_size = 3 * width / 2; + for(size_t i = 0, j = 0; i < raw12_line_size * height; i+=3, j+=2) + { + //[A4 - A11] | [B4 - B11] | [A0A1A2A3-B0B1B2B3] + dst[j + 0] = ((uint16_t)src[i + 0] << 4) | ((src[i + 2] & 0x0f) >> 0); + dst[j + 1] = ((uint16_t)src[i + 1] << 4) | ((src[i + 2] & 0xf0) >> 4); + } + return 0; +} + +static inline int decodeCsiRaw14(unsigned char* src, unsigned short* dst, int width, int height) +{ + if(width & 0x3) { + return -1; + } + int raw14_line_size = 7 * width / 4; + for(size_t i = 0, j = 0; i < raw14_line_size * height; i+=7, j+=4) + { + //[A6 - A13] | [B6 - B13] | [C6 - C13] | [D6 - D13] | [A0A1A2A3A4A5-B0B1] | [B2B3B4B5-C0C1C2C3] | [C4C5-D0D1D2D3D4D5] + dst[j + 0] = ((uint16_t)src[i + 0] << 6) | ((src[i + 4] & 0x3f) >> 0); + dst[j + 1] = ((uint16_t)src[i + 1] << 6) | ((src[i + 4] & 0xc0) >> 6) | ((src[i + 5] & 0x0f) << 2); + dst[j + 2] = ((uint16_t)src[i + 2] << 6) | ((src[i + 5] & 0xf0) >> 4) | ((src[i + 6] & 0x03) << 4); + dst[j + 3] = ((uint16_t)src[i + 3] << 6) | ((src[i + 6] & 0xfc) >> 2); + } + return 0; +} + +#ifdef OPENCV_DEPENDENCIES +static inline int parseCsiRaw10(unsigned char* src, cv::Mat &dst, int width, int height) +{ + cv::Mat m(height, width, CV_16U); + decodeCsiRaw10(src, (ushort*)m.data, width, height); + //convert valid 10bit from lsb to msb, d = s * 64 + dst = m * 64; + return 0; +} + +static inline int parseCsiRaw12(unsigned char* src, cv::Mat &dst, int width, int height) +{ + cv::Mat m(height, width, CV_16U); + decodeCsiRaw12(src, (ushort*)m.data, width, height); + //convert valid 12bit from lsb to msb, d = s * 16 + dst = m * 16; + return 0; +} + +static inline int parseIrFrame(const TY_IMAGE_DATA* img, cv::Mat* pIR) +{ + if (img->pixelFormat == TY_PIXEL_FORMAT_MONO16 || img->pixelFormat==TY_PIXEL_FORMAT_TOF_IR_MONO16){ + *pIR = cv::Mat(img->height, img->width, CV_16U, img->buffer).clone(); + } else if(img->pixelFormat == TY_PIXEL_FORMAT_CSI_MONO10) { + *pIR = cv::Mat(img->height, img->width, CV_16U); + parseCsiRaw10((uchar*)img->buffer, (*pIR), img->width, img->height); + } else if(img->pixelFormat == TY_PIXEL_FORMAT_MONO) { + *pIR = cv::Mat(img->height, img->width, CV_8U, img->buffer).clone(); + } else if(img->pixelFormat == TY_PIXEL_FORMAT_CSI_MONO12) { + *pIR = cv::Mat(img->height, img->width, CV_8U, img->buffer).clone(); + parseCsiRaw12((uchar*)img->buffer, (*pIR), img->width, img->height); + } + else { + return -1; + } + + return 0; +} + +static inline int parseBayer8Frame(const TY_IMAGE_DATA* img, cv::Mat* pColor, TY_ISP_HANDLE color_isp_handle = NULL) +{ + int code = cv::COLOR_BayerGB2BGR; + switch (img->pixelFormat) + { + case TY_PIXEL_FORMAT_BAYER8GBRG: + code = cv::COLOR_BayerGR2BGR; + break; + case TY_PIXEL_FORMAT_BAYER8BGGR: + code = cv::COLOR_BayerRG2BGR; + break; + case TY_PIXEL_FORMAT_BAYER8GRBG: + code = cv::COLOR_BayerGB2BGR; + break; + case TY_PIXEL_FORMAT_BAYER8RGGB: + code = cv::COLOR_BayerBG2BGR; + break; + default: + LOGE("Invalid bayer8 fmt!"); + return -1; + } + + if (!color_isp_handle){ + cv::Mat raw(img->height, img->width, CV_8U, img->buffer); + cv::cvtColor(raw, *pColor, code); + } + else{ + cv::Mat raw(img->height, img->width, CV_8U, img->buffer); + pColor->create(img->height, img->width, CV_8UC3); + int sz = img->height* img->width * 3; + TY_IMAGE_DATA out_buff = TYInitImageData(sz, pColor->data, img->width, img->height); + out_buff.pixelFormat = TY_PIXEL_FORMAT_BGR; + int res = TYISPProcessImage(color_isp_handle, img, &out_buff); + if (res != TY_STATUS_OK){ + //fall back to using opencv api + cv::Mat raw(img->height, img->width, CV_8U, img->buffer); + cv::cvtColor(raw, *pColor, code); + } + } + return 0; +} + +static inline int parseBayer10Frame(const TY_IMAGE_DATA* img, cv::Mat* pColor) +{ + int code = cv::COLOR_BayerGB2BGR; + switch (img->pixelFormat) + { + case TY_PIXEL_FORMAT_CSI_BAYER10GBRG: + code = cv::COLOR_BayerGR2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER10BGGR: + code = cv::COLOR_BayerRG2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER10GRBG: + code = cv::COLOR_BayerGB2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER10RGGB: + code = cv::COLOR_BayerBG2BGR; + break; + default: + LOGE("Invalid bayer10 fmt!"); + return -1; + } + cv::Mat raw16(img->height, img->width, CV_16U); + parseCsiRaw10((uchar*)img->buffer, raw16, img->width, img->height); + cv::cvtColor(raw16, *pColor, code); + + return 0; +} + +static inline int parseBayer12Frame(const TY_IMAGE_DATA* img, cv::Mat* pColor) +{ + int code = cv::COLOR_BayerGB2BGR; + switch (img->pixelFormat) + { + case TY_PIXEL_FORMAT_CSI_BAYER12GBRG: + code = cv::COLOR_BayerGR2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER12BGGR: + code = cv::COLOR_BayerRG2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER12GRBG: + code = cv::COLOR_BayerGB2BGR; + break; + case TY_PIXEL_FORMAT_CSI_BAYER12RGGB: + code = cv::COLOR_BayerBG2BGR; + break; + default: + LOGE("Invalid bayer12 fmt!"); + return -1; + } + cv::Mat raw16(img->height, img->width, CV_16U); + parseCsiRaw12((uchar*)img->buffer, raw16, img->width, img->height); + cv::cvtColor(raw16, *pColor, code); + + return 0; +} + +static inline int parseColorFrame(const TY_IMAGE_DATA* img, cv::Mat* pColor, TY_ISP_HANDLE color_isp_handle = NULL) +{ + int ret = 0; + if (img->pixelFormat == TY_PIXEL_FORMAT_JPEG){ + std::vector _v((uchar*)img->buffer, (uchar*)img->buffer + img->size); + *pColor = cv::imdecode(_v, cv::IMREAD_COLOR); + ASSERT(img->width == pColor->cols && img->height == pColor->rows); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_YVYU){ + cv::Mat yuv(img->height, img->width, CV_8UC2, img->buffer); + cv::cvtColor(yuv, *pColor, cv::COLOR_YUV2BGR_YVYU); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_YUYV){ + cv::Mat yuv(img->height, img->width, CV_8UC2, img->buffer); + cv::cvtColor(yuv, *pColor, cv::COLOR_YUV2BGR_YUYV); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_RGB){ + cv::Mat rgb(img->height, img->width, CV_8UC3, img->buffer); + cv::cvtColor(rgb, *pColor, cv::COLOR_RGB2BGR); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_BGR){ + *pColor = cv::Mat(img->height, img->width, CV_8UC3, img->buffer).clone(); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_BAYER8GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8RGGB) + { + ret = parseBayer8Frame(img, pColor, color_isp_handle); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10RGGB) + { + ret = parseBayer10Frame(img, pColor); + } + else if(img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12RGGB) + { + ret = parseBayer12Frame(img, pColor); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_MONO){ + cv::Mat gray(img->height, img->width, CV_8U, img->buffer); + cv::cvtColor(gray, *pColor, cv::COLOR_GRAY2BGR); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_CSI_MONO10){ + cv::Mat gray16(img->height, img->width, CV_16U); + parseCsiRaw10((uchar*)img->buffer, gray16, img->width, img->height); + *pColor = gray16.clone(); + } + + return ret; +} + +static inline int parseImage(const TY_IMAGE_DATA* img, cv::Mat* image, TY_ISP_HANDLE color_isp_handle = NULL) +{ + int ret = 0; + if (img->pixelFormat == TY_PIXEL_FORMAT_JPEG){ + std::vector _v((uchar*)img->buffer, (uchar*)img->buffer + img->size); + *image = cv::imdecode(_v, cv::IMREAD_COLOR); + ASSERT(img->width == image->cols && img->height == image->rows); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_YVYU){ + cv::Mat yuv(img->height, img->width, CV_8UC2, img->buffer); + cv::cvtColor(yuv, *image, cv::COLOR_YUV2BGR_YVYU); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_YUYV){ + cv::Mat yuv(img->height, img->width, CV_8UC2, img->buffer); + cv::cvtColor(yuv, *image, cv::COLOR_YUV2BGR_YUYV); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_RGB){ + cv::Mat rgb(img->height, img->width, CV_8UC3, img->buffer); + cv::cvtColor(rgb, *image, cv::COLOR_RGB2BGR); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_BGR){ + *image = cv::Mat(img->height, img->width, CV_8UC3, img->buffer).clone(); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_BAYER8GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_BAYER8RGGB) + { + ret = parseBayer8Frame(img, image, color_isp_handle); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER10RGGB) + { + ret = parseBayer10Frame(img, image); + } + else if(img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12GBRG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12BGGR || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12GRBG || + img->pixelFormat == TY_PIXEL_FORMAT_CSI_BAYER12RGGB) + { + ret = parseBayer12Frame(img, image); + } + else if(img->pixelFormat == TY_PIXEL_FORMAT_MONO) { + *image = cv::Mat(img->height, img->width, CV_8U, img->buffer).clone(); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_CSI_MONO10){ + cv::Mat gray16(img->height, img->width, CV_16U); + ret = parseCsiRaw10((uchar*)img->buffer, gray16, img->width, img->height); + *image = gray16.clone(); + } + else if(img->pixelFormat == TY_PIXEL_FORMAT_CSI_MONO12) { + cv::Mat gray16(img->height, img->width, CV_16U); + ret = parseCsiRaw12((uchar*)img->buffer, gray16, img->width, img->height); + *image = gray16.clone(); + } + else if (img->pixelFormat == TY_PIXEL_FORMAT_MONO16 || img->pixelFormat==TY_PIXEL_FORMAT_TOF_IR_MONO16){ + *image = cv::Mat(img->height, img->width, CV_16U, img->buffer).clone(); + } + else { + return -1; + } + + return ret; +} + +static inline int parseFrame(const TY_FRAME_DATA& frame, cv::Mat* pDepth + , cv::Mat* pLeftIR, cv::Mat* pRightIR + , cv::Mat* pColor, TY_ISP_HANDLE color_isp_handle = NULL) +{ + for (int i = 0; i < frame.validCount; i++){ + if (frame.image[i].status != TY_STATUS_OK) continue; + + // get depth image + if (pDepth && frame.image[i].componentID == TY_COMPONENT_DEPTH_CAM){ + if (frame.image[i].pixelFormat == TY_PIXEL_FORMAT_XYZ48) { + *pDepth = cv::Mat(frame.image[i].height, frame.image[i].width + , CV_16SC3, frame.image[i].buffer).clone(); + } + else { + *pDepth = cv::Mat(frame.image[i].height, frame.image[i].width + , CV_16U, frame.image[i].buffer).clone(); + } + } + // get left ir image + if (pLeftIR && frame.image[i].componentID == TY_COMPONENT_IR_CAM_LEFT){ + parseIrFrame(&frame.image[i], pLeftIR); + } + // get right ir image + if (pRightIR && frame.image[i].componentID == TY_COMPONENT_IR_CAM_RIGHT){ + parseIrFrame(&frame.image[i], pRightIR); + } + // get BGR + if (pColor && frame.image[i].componentID == TY_COMPONENT_RGB_CAM){ + parseColorFrame(&frame.image[i], pColor, color_isp_handle); + } + } + + return 0; +} + +enum{ + PC_FILE_FORMAT_XYZ = 0, +}; + +static void writePC_XYZ(const cv::Point3f* pnts, const cv::Vec3b *color, size_t n, FILE* fp) +{ + if (color){ + for (size_t i = 0; i < n; i++){ + if (!std::isnan(pnts[i].x)){ + fprintf(fp, "%f %f %f %d %d %d\n", pnts[i].x, pnts[i].y, pnts[i].z, color[i][0], color[i][1], color[i][2]); + } + } + } + else{ + for (size_t i = 0; i < n; i++){ + if (!std::isnan(pnts[i].x)){ + fprintf(fp, "%f %f %f 0 0 0\n", pnts[i].x, pnts[i].y, pnts[i].z); + } + } + } +} + +static void writePointCloud(const cv::Point3f* pnts, const cv::Vec3b *color, size_t n, const char* file, int format) +{ + FILE* fp = fopen(file, "w"); + if (!fp){ + return; + } + + switch (format){ + case PC_FILE_FORMAT_XYZ: + writePC_XYZ(pnts, color, n, fp); + break; + default: + break; + } + + fclose(fp); +} +#else + + +#endif + +class CallbackWrapper +{ +public: + typedef void(*TY_FRAME_CALLBACK) (TY_FRAME_DATA*, void* userdata); + + CallbackWrapper(){ + _hDevice = NULL; + _cb = NULL; + _userdata = NULL; + _exit = true; + } + + TY_STATUS TYRegisterCallback(TY_DEV_HANDLE hDevice, TY_FRAME_CALLBACK v, void* userdata) + { + _hDevice = hDevice; + _cb = v; + _userdata = userdata; + _exit = false; + _cbThread.create(&workerThread, this); + return TY_STATUS_OK; + } + + void TYUnregisterCallback() + { + if (!_exit) { + _exit = true; + _cbThread.destroy(); + } + } + +private: + static void* workerThread(void* userdata) + { + CallbackWrapper* pWrapper = (CallbackWrapper*)userdata; + TY_FRAME_DATA frame; + + while (!pWrapper->_exit) + { + int err = TYFetchFrame(pWrapper->_hDevice, &frame, 100); + if (!err) { + pWrapper->_cb(&frame, pWrapper->_userdata); + } + } + LOGI("frameCallback exit!"); + return NULL; + } + + TY_DEV_HANDLE _hDevice; + TY_FRAME_CALLBACK _cb; + void* _userdata; + + bool _exit; + TYThread _cbThread; +}; + + + +#ifdef _WIN32 +static int get_fps() { + static int fps_counter = 0; + static clock_t fps_tm = 0; + const int kMaxCounter = 250; + fps_counter++; + if (fps_counter < kMaxCounter) { + return -1; + } + int elapse = (clock() - fps_tm); + int v = (int)(((float)fps_counter) / elapse * CLOCKS_PER_SEC); + fps_tm = clock(); + + fps_counter = 0; + return v; + } +#else +static int get_fps() { + static int fps_counter = 0; + static clock_t fps_tm = 0; + const int kMaxCounter = 200; + struct timeval start; + fps_counter++; + if (fps_counter < kMaxCounter) { + return -1; + } + + gettimeofday(&start, NULL); + int elapse = start.tv_sec * 1000 + start.tv_usec / 1000 - fps_tm; + int v = (int)(((float)fps_counter) / elapse * 1000); + gettimeofday(&start, NULL); + fps_tm = start.tv_sec * 1000 + start.tv_usec / 1000; + + fps_counter = 0; + return v; +} +#endif + +static std::vector TYReadBinaryFile(const char* filename) +{ + // open the file: + std::ifstream file(filename, std::ios::binary); + if (!file.is_open()){ + return std::vector(); + } + // Stop eating new lines in binary mode!!! + file.unsetf(std::ios::skipws); + + // get its size: + std::streampos fileSize; + + file.seekg(0, std::ios::end); + fileSize = file.tellg(); + file.seekg(0, std::ios::beg); + + // reserve capacity + std::vector vec; + vec.reserve(fileSize); + + // read the data: + vec.insert(vec.begin(), + std::istream_iterator(file), + std::istream_iterator()); + + return vec; +} + +#endif diff --git a/image_capture/third_party/percipio/common/crc32.cpp b/image_capture/third_party/percipio/common/crc32.cpp new file mode 100644 index 0000000..24c588c --- /dev/null +++ b/image_capture/third_party/percipio/common/crc32.cpp @@ -0,0 +1,1245 @@ +// ////////////////////////////////////////////////////////// +// Crc32.cpp +// Copyright (c) 2011-2019 Stephan Brumme. All rights reserved. +// Slicing-by-16 contributed by Bulat Ziganshin +// Tableless bytewise CRC contributed by Hagai Gold +// see http://create.stephan-brumme.com/disclaimer.html +// + +// if running on an embedded system, you might consider shrinking the +// big Crc32Lookup table: +// - crc32_bitwise doesn't need it at all +// - crc32_halfbyte has its own small lookup table +// - crc32_1byte needs only Crc32Lookup[0] +// - crc32_4bytes needs only Crc32Lookup[0..3] +// - crc32_8bytes needs only Crc32Lookup[0..7] +// - crc32_4x8bytes needs only Crc32Lookup[0..7] +// - crc32_16bytes needs all of Crc32Lookup + + +#include "crc32.h" + +#ifndef __LITTLE_ENDIAN + #define __LITTLE_ENDIAN 1234 +#endif +#ifndef __BIG_ENDIAN + #define __BIG_ENDIAN 4321 +#endif + +// define endianess and some integer data types +#if defined(_MSC_VER) || defined(__MINGW32__) + // Windows always little endian + #define __BYTE_ORDER __LITTLE_ENDIAN + + // intrinsics / prefetching + #if defined(__MINGW32__) || defined(__clang__) + #define PREFETCH(location) __builtin_prefetch(location) + #else + #if defined(__SSE2__) + #include + #define PREFETCH(location) _mm_prefetch(location, _MM_HINT_T0) + #else + #define PREFETCH(location) ; + #endif + #endif +#else + // defines __BYTE_ORDER as __LITTLE_ENDIAN or __BIG_ENDIAN + #include + + // intrinsics / prefetching + #ifdef __GNUC__ + #define PREFETCH(location) __builtin_prefetch(location) + #else + // no prefetching + #define PREFETCH(location) ; + #endif +#endif + +// abort if byte order is undefined +#if !defined(__BYTE_ORDER) +#error undefined byte order, compile with -D__BYTE_ORDER=1234 (if little endian) or -D__BYTE_ORDER=4321 (big endian) +#endif + + +namespace +{ + /// zlib's CRC32 polynomial + const uint32_t Polynomial = 0xEDB88320; + +#if __BYTE_ORDER == __BIG_ENDIAN + /// swap endianess + static inline uint32_t swap(uint32_t x) + { + #if defined(__GNUC__) || defined(__clang__) + return __builtin_bswap32(x); + #else + return (x >> 24) | + ((x >> 8) & 0x0000FF00) | + ((x << 8) & 0x00FF0000) | + (x << 24); + #endif + } +#endif + + /// Slicing-By-16 + #ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 + const size_t MaxSlice = 16; + #elif defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_8) + const size_t MaxSlice = 8; + #elif defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_4) + const size_t MaxSlice = 4; + #elif defined(CRC32_USE_LOOKUP_TABLE_BYTE) + const size_t MaxSlice = 1; + #else + #define NO_LUT // don't need Crc32Lookup at all + #endif + +} // anonymous namespace + +#ifndef NO_LUT +/// forward declaration, table is at the end of this file +extern const uint32_t Crc32Lookup[MaxSlice][256]; // extern is needed to keep compiler happy +#endif + + +/// compute CRC32 (bitwise algorithm) +uint32_t crc32_bitwise(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint8_t* current = (const uint8_t*) data; + + while (length-- != 0) + { + crc ^= *current++; + + for (int j = 0; j < 8; j++) + { + // branch-free + crc = (crc >> 1) ^ (-int32_t(crc & 1) & Polynomial); + + // branching, much slower: + //if (crc & 1) + // crc = (crc >> 1) ^ Polynomial; + //else + // crc = crc >> 1; + } + } + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +/// compute CRC32 (half-byte algoritm) +uint32_t crc32_halfbyte(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint8_t* current = (const uint8_t*) data; + + /// look-up table for half-byte, same as crc32Lookup[0][16*i] + static const uint32_t Crc32Lookup16[16] = + { + 0x00000000,0x1DB71064,0x3B6E20C8,0x26D930AC,0x76DC4190,0x6B6B51F4,0x4DB26158,0x5005713C, + 0xEDB88320,0xF00F9344,0xD6D6A3E8,0xCB61B38C,0x9B64C2B0,0x86D3D2D4,0xA00AE278,0xBDBDF21C + }; + + while (length-- != 0) + { + crc = Crc32Lookup16[(crc ^ *current ) & 0x0F] ^ (crc >> 4); + crc = Crc32Lookup16[(crc ^ (*current >> 4)) & 0x0F] ^ (crc >> 4); + current++; + } + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +#ifdef CRC32_USE_LOOKUP_TABLE_BYTE +/// compute CRC32 (standard algorithm) +uint32_t crc32_1byte(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint8_t* current = (const uint8_t*) data; + + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *current++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} +#endif + + +/// compute CRC32 (byte algorithm) without lookup tables +uint32_t crc32_1byte_tableless(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint8_t* current = (const uint8_t*) data; + + while (length-- != 0) + { + uint8_t s = uint8_t(crc) ^ *current++; + + // Hagai Gold made me aware of this table-less algorithm and send me code + + // polynomial 0xEDB88320 can be written in binary as 11101101101110001000001100100000b + // reverse the bits (or just assume bit 0 is the first one) + // and we have bits set at position 0, 1, 2, 4, 5, 7, 8, 10, 11, 12, 16, 22, 23, 26 + // => those are the shift offsets: + //crc = (crc >> 8) ^ + // t ^ + // (t >> 1) ^ (t >> 2) ^ (t >> 4) ^ (t >> 5) ^ // == y + // (t >> 7) ^ (t >> 8) ^ (t >> 10) ^ (t >> 11) ^ // == y >> 6 + // (t >> 12) ^ (t >> 16) ^ // == z + // (t >> 22) ^ (t >> 26) ^ // == z >> 10 + // (t >> 23); + + // the fastest I can come up with: + uint32_t low = (s ^ (s << 6)) & 0xFF; + uint32_t a = (low * ((1 << 23) + (1 << 14) + (1 << 2))); + crc = (crc >> 8) ^ + (low * ((1 << 24) + (1 << 16) + (1 << 8))) ^ + a ^ + (a >> 1) ^ + (low * ((1 << 20) + (1 << 12) )) ^ + (low << 19) ^ + (low << 17) ^ + (low >> 2); + + // Hagai's code: + /*uint32_t t = (s ^ (s << 6)) << 24; + + // some temporaries to optimize XOR + uint32_t x = (t >> 1) ^ (t >> 2); + uint32_t y = x ^ (x >> 3); + uint32_t z = (t >> 12) ^ (t >> 16); + + crc = (crc >> 8) ^ + t ^ (t >> 23) ^ + y ^ (y >> 6) ^ + z ^ (z >> 10);*/ + } + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +/// compute CRC32 (byte algorithm) without lookup tables +uint32_t crc32_1byte_tableless2(const void* data, size_t length, uint32_t previousCrc32) +{ + int32_t crc = ~previousCrc32; // note: signed integer, right shift distributes sign bit into lower bits + const uint8_t* current = (const uint8_t*) data; + + while (length-- != 0) + { + crc = crc ^ *current++; + + uint32_t c = (((crc << 31) >> 31) & ((Polynomial >> 7) ^ (Polynomial >> 1))) ^ + (((crc << 30) >> 31) & ((Polynomial >> 6) ^ Polynomial)) ^ + (((crc << 29) >> 31) & (Polynomial >> 5)) ^ + (((crc << 28) >> 31) & (Polynomial >> 4)) ^ + (((crc << 27) >> 31) & (Polynomial >> 3)) ^ + (((crc << 26) >> 31) & (Polynomial >> 2)) ^ + (((crc << 25) >> 31) & (Polynomial >> 1)) ^ + (((crc << 24) >> 31) & Polynomial); + + crc = ((uint32_t)crc >> 8) ^ c; // convert to unsigned integer before right shift + } + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_4 +/// compute CRC32 (Slicing-by-4 algorithm) +uint32_t crc32_4bytes(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint32_t* current = (const uint32_t*) data; + + // process four bytes at once (Slicing-by-4) + while (length >= 4) + { +#if __BYTE_ORDER == __BIG_ENDIAN + uint32_t one = *current++ ^ swap(crc); + crc = Crc32Lookup[0][ one & 0xFF] ^ + Crc32Lookup[1][(one>> 8) & 0xFF] ^ + Crc32Lookup[2][(one>>16) & 0xFF] ^ + Crc32Lookup[3][(one>>24) & 0xFF]; +#else + uint32_t one = *current++ ^ crc; + crc = Crc32Lookup[0][(one>>24) & 0xFF] ^ + Crc32Lookup[1][(one>>16) & 0xFF] ^ + Crc32Lookup[2][(one>> 8) & 0xFF] ^ + Crc32Lookup[3][ one & 0xFF]; +#endif + + length -= 4; + } + + const uint8_t* currentChar = (const uint8_t*) current; + // remaining 1 to 3 bytes (standard algorithm) + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} +#endif + + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_8 +/// compute CRC32 (Slicing-by-8 algorithm) +uint32_t crc32_8bytes(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint32_t* current = (const uint32_t*) data; + + // process eight bytes at once (Slicing-by-8) + while (length >= 8) + { +#if __BYTE_ORDER == __BIG_ENDIAN + uint32_t one = *current++ ^ swap(crc); + uint32_t two = *current++; + crc = Crc32Lookup[0][ two & 0xFF] ^ + Crc32Lookup[1][(two>> 8) & 0xFF] ^ + Crc32Lookup[2][(two>>16) & 0xFF] ^ + Crc32Lookup[3][(two>>24) & 0xFF] ^ + Crc32Lookup[4][ one & 0xFF] ^ + Crc32Lookup[5][(one>> 8) & 0xFF] ^ + Crc32Lookup[6][(one>>16) & 0xFF] ^ + Crc32Lookup[7][(one>>24) & 0xFF]; +#else + uint32_t one = *current++ ^ crc; + uint32_t two = *current++; + crc = Crc32Lookup[0][(two>>24) & 0xFF] ^ + Crc32Lookup[1][(two>>16) & 0xFF] ^ + Crc32Lookup[2][(two>> 8) & 0xFF] ^ + Crc32Lookup[3][ two & 0xFF] ^ + Crc32Lookup[4][(one>>24) & 0xFF] ^ + Crc32Lookup[5][(one>>16) & 0xFF] ^ + Crc32Lookup[6][(one>> 8) & 0xFF] ^ + Crc32Lookup[7][ one & 0xFF]; +#endif + + length -= 8; + } + + const uint8_t* currentChar = (const uint8_t*) current; + // remaining 1 to 7 bytes (standard algorithm) + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +/// compute CRC32 (Slicing-by-8 algorithm), unroll inner loop 4 times +uint32_t crc32_4x8bytes(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint32_t* current = (const uint32_t*) data; + + // enabling optimization (at least -O2) automatically unrolls the inner for-loop + const size_t Unroll = 4; + const size_t BytesAtOnce = 8 * Unroll; + + // process 4x eight bytes at once (Slicing-by-8) + while (length >= BytesAtOnce) + { + for (size_t unrolling = 0; unrolling < Unroll; unrolling++) + { +#if __BYTE_ORDER == __BIG_ENDIAN + uint32_t one = *current++ ^ swap(crc); + uint32_t two = *current++; + crc = Crc32Lookup[0][ two & 0xFF] ^ + Crc32Lookup[1][(two>> 8) & 0xFF] ^ + Crc32Lookup[2][(two>>16) & 0xFF] ^ + Crc32Lookup[3][(two>>24) & 0xFF] ^ + Crc32Lookup[4][ one & 0xFF] ^ + Crc32Lookup[5][(one>> 8) & 0xFF] ^ + Crc32Lookup[6][(one>>16) & 0xFF] ^ + Crc32Lookup[7][(one>>24) & 0xFF]; +#else + uint32_t one = *current++ ^ crc; + uint32_t two = *current++; + crc = Crc32Lookup[0][(two>>24) & 0xFF] ^ + Crc32Lookup[1][(two>>16) & 0xFF] ^ + Crc32Lookup[2][(two>> 8) & 0xFF] ^ + Crc32Lookup[3][ two & 0xFF] ^ + Crc32Lookup[4][(one>>24) & 0xFF] ^ + Crc32Lookup[5][(one>>16) & 0xFF] ^ + Crc32Lookup[6][(one>> 8) & 0xFF] ^ + Crc32Lookup[7][ one & 0xFF]; +#endif + + } + + length -= BytesAtOnce; + } + + const uint8_t* currentChar = (const uint8_t*) current; + // remaining 1 to 31 bytes (standard algorithm) + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} +#endif // CRC32_USE_LOOKUP_TABLE_SLICING_BY_8 + + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 +/// compute CRC32 (Slicing-by-16 algorithm) +uint32_t crc32_16bytes(const void* data, size_t length, uint32_t previousCrc32) +{ + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint32_t* current = (const uint32_t*) data; + + // enabling optimization (at least -O2) automatically unrolls the inner for-loop + const size_t Unroll = 4; + const size_t BytesAtOnce = 16 * Unroll; + + while (length >= BytesAtOnce) + { + for (size_t unrolling = 0; unrolling < Unroll; unrolling++) + { +#if __BYTE_ORDER == __BIG_ENDIAN + uint32_t one = *current++ ^ swap(crc); + uint32_t two = *current++; + uint32_t three = *current++; + uint32_t four = *current++; + crc = Crc32Lookup[ 0][ four & 0xFF] ^ + Crc32Lookup[ 1][(four >> 8) & 0xFF] ^ + Crc32Lookup[ 2][(four >> 16) & 0xFF] ^ + Crc32Lookup[ 3][(four >> 24) & 0xFF] ^ + Crc32Lookup[ 4][ three & 0xFF] ^ + Crc32Lookup[ 5][(three >> 8) & 0xFF] ^ + Crc32Lookup[ 6][(three >> 16) & 0xFF] ^ + Crc32Lookup[ 7][(three >> 24) & 0xFF] ^ + Crc32Lookup[ 8][ two & 0xFF] ^ + Crc32Lookup[ 9][(two >> 8) & 0xFF] ^ + Crc32Lookup[10][(two >> 16) & 0xFF] ^ + Crc32Lookup[11][(two >> 24) & 0xFF] ^ + Crc32Lookup[12][ one & 0xFF] ^ + Crc32Lookup[13][(one >> 8) & 0xFF] ^ + Crc32Lookup[14][(one >> 16) & 0xFF] ^ + Crc32Lookup[15][(one >> 24) & 0xFF]; +#else + uint32_t one = *current++ ^ crc; + uint32_t two = *current++; + uint32_t three = *current++; + uint32_t four = *current++; + crc = Crc32Lookup[ 0][(four >> 24) & 0xFF] ^ + Crc32Lookup[ 1][(four >> 16) & 0xFF] ^ + Crc32Lookup[ 2][(four >> 8) & 0xFF] ^ + Crc32Lookup[ 3][ four & 0xFF] ^ + Crc32Lookup[ 4][(three >> 24) & 0xFF] ^ + Crc32Lookup[ 5][(three >> 16) & 0xFF] ^ + Crc32Lookup[ 6][(three >> 8) & 0xFF] ^ + Crc32Lookup[ 7][ three & 0xFF] ^ + Crc32Lookup[ 8][(two >> 24) & 0xFF] ^ + Crc32Lookup[ 9][(two >> 16) & 0xFF] ^ + Crc32Lookup[10][(two >> 8) & 0xFF] ^ + Crc32Lookup[11][ two & 0xFF] ^ + Crc32Lookup[12][(one >> 24) & 0xFF] ^ + Crc32Lookup[13][(one >> 16) & 0xFF] ^ + Crc32Lookup[14][(one >> 8) & 0xFF] ^ + Crc32Lookup[15][ one & 0xFF]; +#endif + } + + length -= BytesAtOnce; + } + + const uint8_t* currentChar = (const uint8_t*) current; + // remaining 1 to 63 bytes (standard algorithm) + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} + + +/// compute CRC32 (Slicing-by-16 algorithm, prefetch upcoming data blocks) +uint32_t crc32_16bytes_prefetch(const void* data, size_t length, uint32_t previousCrc32, size_t prefetchAhead) +{ + // CRC code is identical to crc32_16bytes (including unrolling), only added prefetching + // 256 bytes look-ahead seems to be the sweet spot on Core i7 CPUs + + uint32_t crc = ~previousCrc32; // same as previousCrc32 ^ 0xFFFFFFFF + const uint32_t* current = (const uint32_t*) data; + + // enabling optimization (at least -O2) automatically unrolls the for-loop + const size_t Unroll = 4; + const size_t BytesAtOnce = 16 * Unroll; + + while (length >= BytesAtOnce + prefetchAhead) + { + PREFETCH(((const char*) current) + prefetchAhead); + + for (size_t unrolling = 0; unrolling < Unroll; unrolling++) + { +#if __BYTE_ORDER == __BIG_ENDIAN + uint32_t one = *current++ ^ swap(crc); + uint32_t two = *current++; + uint32_t three = *current++; + uint32_t four = *current++; + crc = Crc32Lookup[ 0][ four & 0xFF] ^ + Crc32Lookup[ 1][(four >> 8) & 0xFF] ^ + Crc32Lookup[ 2][(four >> 16) & 0xFF] ^ + Crc32Lookup[ 3][(four >> 24) & 0xFF] ^ + Crc32Lookup[ 4][ three & 0xFF] ^ + Crc32Lookup[ 5][(three >> 8) & 0xFF] ^ + Crc32Lookup[ 6][(three >> 16) & 0xFF] ^ + Crc32Lookup[ 7][(three >> 24) & 0xFF] ^ + Crc32Lookup[ 8][ two & 0xFF] ^ + Crc32Lookup[ 9][(two >> 8) & 0xFF] ^ + Crc32Lookup[10][(two >> 16) & 0xFF] ^ + Crc32Lookup[11][(two >> 24) & 0xFF] ^ + Crc32Lookup[12][ one & 0xFF] ^ + Crc32Lookup[13][(one >> 8) & 0xFF] ^ + Crc32Lookup[14][(one >> 16) & 0xFF] ^ + Crc32Lookup[15][(one >> 24) & 0xFF]; +#else + uint32_t one = *current++ ^ crc; + uint32_t two = *current++; + uint32_t three = *current++; + uint32_t four = *current++; + crc = Crc32Lookup[ 0][(four >> 24) & 0xFF] ^ + Crc32Lookup[ 1][(four >> 16) & 0xFF] ^ + Crc32Lookup[ 2][(four >> 8) & 0xFF] ^ + Crc32Lookup[ 3][ four & 0xFF] ^ + Crc32Lookup[ 4][(three >> 24) & 0xFF] ^ + Crc32Lookup[ 5][(three >> 16) & 0xFF] ^ + Crc32Lookup[ 6][(three >> 8) & 0xFF] ^ + Crc32Lookup[ 7][ three & 0xFF] ^ + Crc32Lookup[ 8][(two >> 24) & 0xFF] ^ + Crc32Lookup[ 9][(two >> 16) & 0xFF] ^ + Crc32Lookup[10][(two >> 8) & 0xFF] ^ + Crc32Lookup[11][ two & 0xFF] ^ + Crc32Lookup[12][(one >> 24) & 0xFF] ^ + Crc32Lookup[13][(one >> 16) & 0xFF] ^ + Crc32Lookup[14][(one >> 8) & 0xFF] ^ + Crc32Lookup[15][ one & 0xFF]; +#endif + } + + length -= BytesAtOnce; + } + + const uint8_t* currentChar = (const uint8_t*) current; + // remaining 1 to 63 bytes (standard algorithm) + while (length-- != 0) + crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; + + return ~crc; // same as crc ^ 0xFFFFFFFF +} +#endif + + +/// compute CRC32 using the fastest algorithm for large datasets on modern CPUs +uint32_t crc32_fast(const void* data, size_t length, uint32_t previousCrc32) +{ +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 + return crc32_16bytes (data, length, previousCrc32); +#elif defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_8) + return crc32_8bytes (data, length, previousCrc32); +#elif defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_4) + return crc32_4bytes (data, length, previousCrc32); +#elif defined(CRC32_USE_LOOKUP_TABLE_BYTE) + return crc32_1byte (data, length, previousCrc32); +#else + return crc32_halfbyte(data, length, previousCrc32); +#endif +} + + +/// merge two CRC32 such that result = crc32(dataB, lengthB, crc32(dataA, lengthA)) +uint32_t crc32_combine(uint32_t crcA, uint32_t crcB, size_t lengthB) +{ + // based on Mark Adler's crc_combine from + // https://github.com/madler/pigz/blob/master/pigz.c + + // main idea: + // - if you have two equally-sized blocks A and B, + // then you can create a block C = A ^ B + // which has the property crc(C) = crc(A) ^ crc(B) + // - if you append length(B) zeros to A and call it A' (think of it as AAAA000) + // and prepend length(A) zeros to B and call it B' (think of it as 0000BBB) + // then exists a C' = A' ^ B' + // - remember: if you XOR someting with zero, it remains unchanged: X ^ 0 = X + // - that means C' = A concat B so that crc(A concat B) = crc(C') = crc(A') ^ crc(B') + // - the trick is to compute crc(A') based on crc(A) + // and crc(B') based on crc(B) + // - since B' starts with many zeros, the crc of those initial zeros is still zero + // - that means crc(B') = crc(B) + // - unfortunately the trailing zeros of A' change the crc, so usually crc(A') != crc(A) + // - the following code is a fast algorithm to compute crc(A') + // - starting with crc(A) and appending length(B) zeros, needing just log2(length(B)) iterations + // - the details are explained by the original author at + // https://stackoverflow.com/questions/23122312/crc-calculation-of-a-mostly-static-data-stream/23126768 + // + // notes: + // - I squeezed everything into one function to keep global namespace clean (original code two helper functions) + // - most original comments are still in place, I added comments where these helper functions where made inline code + // - performance-wise there isn't any differenze to the original zlib/pigz code + + // degenerated case + if (lengthB == 0) + return crcA; + + /// CRC32 => 32 bits + const uint32_t CrcBits = 32; + + uint32_t odd [CrcBits]; // odd-power-of-two zeros operator + uint32_t even[CrcBits]; // even-power-of-two zeros operator + + // put operator for one zero bit in odd + odd[0] = Polynomial; // CRC-32 polynomial + for (int i = 1; i < (int)CrcBits; i++) + odd[i] = 1 << (i - 1); + + // put operator for two zero bits in even + // same as gf2_matrix_square(even, odd); + for (int i = 0; i < (int)CrcBits; i++) + { + uint32_t vec = odd[i]; + even[i] = 0; + for (int j = 0; vec != 0; j++, vec >>= 1) + if (vec & 1) + even[i] ^= odd[j]; + } + // put operator for four zero bits in odd + // same as gf2_matrix_square(odd, even); + for (int i = 0; i < (int)CrcBits; i++) + { + uint32_t vec = even[i]; + odd[i] = 0; + for (int j = 0; vec != 0; j++, vec >>= 1) + if (vec & 1) + odd[i] ^= even[j]; + } + + // the following loop becomes much shorter if I keep swapping even and odd + uint32_t* a = even; + uint32_t* b = odd; + // apply secondLength zeros to firstCrc32 + for (; lengthB > 0; lengthB >>= 1) + { + // same as gf2_matrix_square(a, b); + for (int i = 0; i < (int)CrcBits; i++) + { + uint32_t vec = b[i]; + a[i] = 0; + for (int j = 0; vec != 0; j++, vec >>= 1) + if (vec & 1) + a[i] ^= b[j]; + } + + // apply zeros operator for this bit + if (lengthB & 1) + { + // same as firstCrc32 = gf2_matrix_times(a, firstCrc32); + uint32_t sum = 0; + for (int i = 0; crcA != 0; i++, crcA >>= 1) + if (crcA & 1) + sum ^= a[i]; + crcA = sum; + } + + // switch even and odd + uint32_t* t = a; a = b; b = t; + } + + // return combined crc + return crcA ^ crcB; +} + + +// ////////////////////////////////////////////////////////// +// constants + + +#ifndef NO_LUT +/// look-up table, already declared above +const uint32_t Crc32Lookup[MaxSlice][256] = +{ + //// same algorithm as crc32_bitwise + //for (int i = 0; i <= 0xFF; i++) + //{ + // uint32_t crc = i; + // for (int j = 0; j < 8; j++) + // crc = (crc >> 1) ^ ((crc & 1) * Polynomial); + // Crc32Lookup[0][i] = crc; + //} + //// ... and the following slicing-by-8 algorithm (from Intel): + //// http://www.intel.com/technology/comms/perfnet/download/CRC_generators.pdf + //// http://sourceforge.net/projects/slicing-by-8/ + //for (int slice = 1; slice < MaxSlice; slice++) + // Crc32Lookup[slice][i] = (Crc32Lookup[slice - 1][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[slice - 1][i] & 0xFF]; + { + // note: the first number of every second row corresponds to the half-byte look-up table ! + 0x00000000,0x77073096,0xEE0E612C,0x990951BA,0x076DC419,0x706AF48F,0xE963A535,0x9E6495A3, + 0x0EDB8832,0x79DCB8A4,0xE0D5E91E,0x97D2D988,0x09B64C2B,0x7EB17CBD,0xE7B82D07,0x90BF1D91, + 0x1DB71064,0x6AB020F2,0xF3B97148,0x84BE41DE,0x1ADAD47D,0x6DDDE4EB,0xF4D4B551,0x83D385C7, + 0x136C9856,0x646BA8C0,0xFD62F97A,0x8A65C9EC,0x14015C4F,0x63066CD9,0xFA0F3D63,0x8D080DF5, + 0x3B6E20C8,0x4C69105E,0xD56041E4,0xA2677172,0x3C03E4D1,0x4B04D447,0xD20D85FD,0xA50AB56B, + 0x35B5A8FA,0x42B2986C,0xDBBBC9D6,0xACBCF940,0x32D86CE3,0x45DF5C75,0xDCD60DCF,0xABD13D59, + 0x26D930AC,0x51DE003A,0xC8D75180,0xBFD06116,0x21B4F4B5,0x56B3C423,0xCFBA9599,0xB8BDA50F, + 0x2802B89E,0x5F058808,0xC60CD9B2,0xB10BE924,0x2F6F7C87,0x58684C11,0xC1611DAB,0xB6662D3D, + 0x76DC4190,0x01DB7106,0x98D220BC,0xEFD5102A,0x71B18589,0x06B6B51F,0x9FBFE4A5,0xE8B8D433, + 0x7807C9A2,0x0F00F934,0x9609A88E,0xE10E9818,0x7F6A0DBB,0x086D3D2D,0x91646C97,0xE6635C01, + 0x6B6B51F4,0x1C6C6162,0x856530D8,0xF262004E,0x6C0695ED,0x1B01A57B,0x8208F4C1,0xF50FC457, + 0x65B0D9C6,0x12B7E950,0x8BBEB8EA,0xFCB9887C,0x62DD1DDF,0x15DA2D49,0x8CD37CF3,0xFBD44C65, + 0x4DB26158,0x3AB551CE,0xA3BC0074,0xD4BB30E2,0x4ADFA541,0x3DD895D7,0xA4D1C46D,0xD3D6F4FB, + 0x4369E96A,0x346ED9FC,0xAD678846,0xDA60B8D0,0x44042D73,0x33031DE5,0xAA0A4C5F,0xDD0D7CC9, + 0x5005713C,0x270241AA,0xBE0B1010,0xC90C2086,0x5768B525,0x206F85B3,0xB966D409,0xCE61E49F, + 0x5EDEF90E,0x29D9C998,0xB0D09822,0xC7D7A8B4,0x59B33D17,0x2EB40D81,0xB7BD5C3B,0xC0BA6CAD, + 0xEDB88320,0x9ABFB3B6,0x03B6E20C,0x74B1D29A,0xEAD54739,0x9DD277AF,0x04DB2615,0x73DC1683, + 0xE3630B12,0x94643B84,0x0D6D6A3E,0x7A6A5AA8,0xE40ECF0B,0x9309FF9D,0x0A00AE27,0x7D079EB1, + 0xF00F9344,0x8708A3D2,0x1E01F268,0x6906C2FE,0xF762575D,0x806567CB,0x196C3671,0x6E6B06E7, + 0xFED41B76,0x89D32BE0,0x10DA7A5A,0x67DD4ACC,0xF9B9DF6F,0x8EBEEFF9,0x17B7BE43,0x60B08ED5, + 0xD6D6A3E8,0xA1D1937E,0x38D8C2C4,0x4FDFF252,0xD1BB67F1,0xA6BC5767,0x3FB506DD,0x48B2364B, + 0xD80D2BDA,0xAF0A1B4C,0x36034AF6,0x41047A60,0xDF60EFC3,0xA867DF55,0x316E8EEF,0x4669BE79, + 0xCB61B38C,0xBC66831A,0x256FD2A0,0x5268E236,0xCC0C7795,0xBB0B4703,0x220216B9,0x5505262F, + 0xC5BA3BBE,0xB2BD0B28,0x2BB45A92,0x5CB36A04,0xC2D7FFA7,0xB5D0CF31,0x2CD99E8B,0x5BDEAE1D, + 0x9B64C2B0,0xEC63F226,0x756AA39C,0x026D930A,0x9C0906A9,0xEB0E363F,0x72076785,0x05005713, + 0x95BF4A82,0xE2B87A14,0x7BB12BAE,0x0CB61B38,0x92D28E9B,0xE5D5BE0D,0x7CDCEFB7,0x0BDBDF21, + 0x86D3D2D4,0xF1D4E242,0x68DDB3F8,0x1FDA836E,0x81BE16CD,0xF6B9265B,0x6FB077E1,0x18B74777, + 0x88085AE6,0xFF0F6A70,0x66063BCA,0x11010B5C,0x8F659EFF,0xF862AE69,0x616BFFD3,0x166CCF45, + 0xA00AE278,0xD70DD2EE,0x4E048354,0x3903B3C2,0xA7672661,0xD06016F7,0x4969474D,0x3E6E77DB, + 0xAED16A4A,0xD9D65ADC,0x40DF0B66,0x37D83BF0,0xA9BCAE53,0xDEBB9EC5,0x47B2CF7F,0x30B5FFE9, + 0xBDBDF21C,0xCABAC28A,0x53B39330,0x24B4A3A6,0xBAD03605,0xCDD70693,0x54DE5729,0x23D967BF, + 0xB3667A2E,0xC4614AB8,0x5D681B02,0x2A6F2B94,0xB40BBE37,0xC30C8EA1,0x5A05DF1B,0x2D02EF8D, + } + +#if defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_4) || defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_8) || defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_16) + // beyond this point only relevant for Slicing-by-4, Slicing-by-8 and Slicing-by-16 + ,{ + 0x00000000,0x191B3141,0x32366282,0x2B2D53C3,0x646CC504,0x7D77F445,0x565AA786,0x4F4196C7, + 0xC8D98A08,0xD1C2BB49,0xFAEFE88A,0xE3F4D9CB,0xACB54F0C,0xB5AE7E4D,0x9E832D8E,0x87981CCF, + 0x4AC21251,0x53D92310,0x78F470D3,0x61EF4192,0x2EAED755,0x37B5E614,0x1C98B5D7,0x05838496, + 0x821B9859,0x9B00A918,0xB02DFADB,0xA936CB9A,0xE6775D5D,0xFF6C6C1C,0xD4413FDF,0xCD5A0E9E, + 0x958424A2,0x8C9F15E3,0xA7B24620,0xBEA97761,0xF1E8E1A6,0xE8F3D0E7,0xC3DE8324,0xDAC5B265, + 0x5D5DAEAA,0x44469FEB,0x6F6BCC28,0x7670FD69,0x39316BAE,0x202A5AEF,0x0B07092C,0x121C386D, + 0xDF4636F3,0xC65D07B2,0xED705471,0xF46B6530,0xBB2AF3F7,0xA231C2B6,0x891C9175,0x9007A034, + 0x179FBCFB,0x0E848DBA,0x25A9DE79,0x3CB2EF38,0x73F379FF,0x6AE848BE,0x41C51B7D,0x58DE2A3C, + 0xF0794F05,0xE9627E44,0xC24F2D87,0xDB541CC6,0x94158A01,0x8D0EBB40,0xA623E883,0xBF38D9C2, + 0x38A0C50D,0x21BBF44C,0x0A96A78F,0x138D96CE,0x5CCC0009,0x45D73148,0x6EFA628B,0x77E153CA, + 0xBABB5D54,0xA3A06C15,0x888D3FD6,0x91960E97,0xDED79850,0xC7CCA911,0xECE1FAD2,0xF5FACB93, + 0x7262D75C,0x6B79E61D,0x4054B5DE,0x594F849F,0x160E1258,0x0F152319,0x243870DA,0x3D23419B, + 0x65FD6BA7,0x7CE65AE6,0x57CB0925,0x4ED03864,0x0191AEA3,0x188A9FE2,0x33A7CC21,0x2ABCFD60, + 0xAD24E1AF,0xB43FD0EE,0x9F12832D,0x8609B26C,0xC94824AB,0xD05315EA,0xFB7E4629,0xE2657768, + 0x2F3F79F6,0x362448B7,0x1D091B74,0x04122A35,0x4B53BCF2,0x52488DB3,0x7965DE70,0x607EEF31, + 0xE7E6F3FE,0xFEFDC2BF,0xD5D0917C,0xCCCBA03D,0x838A36FA,0x9A9107BB,0xB1BC5478,0xA8A76539, + 0x3B83984B,0x2298A90A,0x09B5FAC9,0x10AECB88,0x5FEF5D4F,0x46F46C0E,0x6DD93FCD,0x74C20E8C, + 0xF35A1243,0xEA412302,0xC16C70C1,0xD8774180,0x9736D747,0x8E2DE606,0xA500B5C5,0xBC1B8484, + 0x71418A1A,0x685ABB5B,0x4377E898,0x5A6CD9D9,0x152D4F1E,0x0C367E5F,0x271B2D9C,0x3E001CDD, + 0xB9980012,0xA0833153,0x8BAE6290,0x92B553D1,0xDDF4C516,0xC4EFF457,0xEFC2A794,0xF6D996D5, + 0xAE07BCE9,0xB71C8DA8,0x9C31DE6B,0x852AEF2A,0xCA6B79ED,0xD37048AC,0xF85D1B6F,0xE1462A2E, + 0x66DE36E1,0x7FC507A0,0x54E85463,0x4DF36522,0x02B2F3E5,0x1BA9C2A4,0x30849167,0x299FA026, + 0xE4C5AEB8,0xFDDE9FF9,0xD6F3CC3A,0xCFE8FD7B,0x80A96BBC,0x99B25AFD,0xB29F093E,0xAB84387F, + 0x2C1C24B0,0x350715F1,0x1E2A4632,0x07317773,0x4870E1B4,0x516BD0F5,0x7A468336,0x635DB277, + 0xCBFAD74E,0xD2E1E60F,0xF9CCB5CC,0xE0D7848D,0xAF96124A,0xB68D230B,0x9DA070C8,0x84BB4189, + 0x03235D46,0x1A386C07,0x31153FC4,0x280E0E85,0x674F9842,0x7E54A903,0x5579FAC0,0x4C62CB81, + 0x8138C51F,0x9823F45E,0xB30EA79D,0xAA1596DC,0xE554001B,0xFC4F315A,0xD7626299,0xCE7953D8, + 0x49E14F17,0x50FA7E56,0x7BD72D95,0x62CC1CD4,0x2D8D8A13,0x3496BB52,0x1FBBE891,0x06A0D9D0, + 0x5E7EF3EC,0x4765C2AD,0x6C48916E,0x7553A02F,0x3A1236E8,0x230907A9,0x0824546A,0x113F652B, + 0x96A779E4,0x8FBC48A5,0xA4911B66,0xBD8A2A27,0xF2CBBCE0,0xEBD08DA1,0xC0FDDE62,0xD9E6EF23, + 0x14BCE1BD,0x0DA7D0FC,0x268A833F,0x3F91B27E,0x70D024B9,0x69CB15F8,0x42E6463B,0x5BFD777A, + 0xDC656BB5,0xC57E5AF4,0xEE530937,0xF7483876,0xB809AEB1,0xA1129FF0,0x8A3FCC33,0x9324FD72, + }, + + { + 0x00000000,0x01C26A37,0x0384D46E,0x0246BE59,0x0709A8DC,0x06CBC2EB,0x048D7CB2,0x054F1685, + 0x0E1351B8,0x0FD13B8F,0x0D9785D6,0x0C55EFE1,0x091AF964,0x08D89353,0x0A9E2D0A,0x0B5C473D, + 0x1C26A370,0x1DE4C947,0x1FA2771E,0x1E601D29,0x1B2F0BAC,0x1AED619B,0x18ABDFC2,0x1969B5F5, + 0x1235F2C8,0x13F798FF,0x11B126A6,0x10734C91,0x153C5A14,0x14FE3023,0x16B88E7A,0x177AE44D, + 0x384D46E0,0x398F2CD7,0x3BC9928E,0x3A0BF8B9,0x3F44EE3C,0x3E86840B,0x3CC03A52,0x3D025065, + 0x365E1758,0x379C7D6F,0x35DAC336,0x3418A901,0x3157BF84,0x3095D5B3,0x32D36BEA,0x331101DD, + 0x246BE590,0x25A98FA7,0x27EF31FE,0x262D5BC9,0x23624D4C,0x22A0277B,0x20E69922,0x2124F315, + 0x2A78B428,0x2BBADE1F,0x29FC6046,0x283E0A71,0x2D711CF4,0x2CB376C3,0x2EF5C89A,0x2F37A2AD, + 0x709A8DC0,0x7158E7F7,0x731E59AE,0x72DC3399,0x7793251C,0x76514F2B,0x7417F172,0x75D59B45, + 0x7E89DC78,0x7F4BB64F,0x7D0D0816,0x7CCF6221,0x798074A4,0x78421E93,0x7A04A0CA,0x7BC6CAFD, + 0x6CBC2EB0,0x6D7E4487,0x6F38FADE,0x6EFA90E9,0x6BB5866C,0x6A77EC5B,0x68315202,0x69F33835, + 0x62AF7F08,0x636D153F,0x612BAB66,0x60E9C151,0x65A6D7D4,0x6464BDE3,0x662203BA,0x67E0698D, + 0x48D7CB20,0x4915A117,0x4B531F4E,0x4A917579,0x4FDE63FC,0x4E1C09CB,0x4C5AB792,0x4D98DDA5, + 0x46C49A98,0x4706F0AF,0x45404EF6,0x448224C1,0x41CD3244,0x400F5873,0x4249E62A,0x438B8C1D, + 0x54F16850,0x55330267,0x5775BC3E,0x56B7D609,0x53F8C08C,0x523AAABB,0x507C14E2,0x51BE7ED5, + 0x5AE239E8,0x5B2053DF,0x5966ED86,0x58A487B1,0x5DEB9134,0x5C29FB03,0x5E6F455A,0x5FAD2F6D, + 0xE1351B80,0xE0F771B7,0xE2B1CFEE,0xE373A5D9,0xE63CB35C,0xE7FED96B,0xE5B86732,0xE47A0D05, + 0xEF264A38,0xEEE4200F,0xECA29E56,0xED60F461,0xE82FE2E4,0xE9ED88D3,0xEBAB368A,0xEA695CBD, + 0xFD13B8F0,0xFCD1D2C7,0xFE976C9E,0xFF5506A9,0xFA1A102C,0xFBD87A1B,0xF99EC442,0xF85CAE75, + 0xF300E948,0xF2C2837F,0xF0843D26,0xF1465711,0xF4094194,0xF5CB2BA3,0xF78D95FA,0xF64FFFCD, + 0xD9785D60,0xD8BA3757,0xDAFC890E,0xDB3EE339,0xDE71F5BC,0xDFB39F8B,0xDDF521D2,0xDC374BE5, + 0xD76B0CD8,0xD6A966EF,0xD4EFD8B6,0xD52DB281,0xD062A404,0xD1A0CE33,0xD3E6706A,0xD2241A5D, + 0xC55EFE10,0xC49C9427,0xC6DA2A7E,0xC7184049,0xC25756CC,0xC3953CFB,0xC1D382A2,0xC011E895, + 0xCB4DAFA8,0xCA8FC59F,0xC8C97BC6,0xC90B11F1,0xCC440774,0xCD866D43,0xCFC0D31A,0xCE02B92D, + 0x91AF9640,0x906DFC77,0x922B422E,0x93E92819,0x96A63E9C,0x976454AB,0x9522EAF2,0x94E080C5, + 0x9FBCC7F8,0x9E7EADCF,0x9C381396,0x9DFA79A1,0x98B56F24,0x99770513,0x9B31BB4A,0x9AF3D17D, + 0x8D893530,0x8C4B5F07,0x8E0DE15E,0x8FCF8B69,0x8A809DEC,0x8B42F7DB,0x89044982,0x88C623B5, + 0x839A6488,0x82580EBF,0x801EB0E6,0x81DCDAD1,0x8493CC54,0x8551A663,0x8717183A,0x86D5720D, + 0xA9E2D0A0,0xA820BA97,0xAA6604CE,0xABA46EF9,0xAEEB787C,0xAF29124B,0xAD6FAC12,0xACADC625, + 0xA7F18118,0xA633EB2F,0xA4755576,0xA5B73F41,0xA0F829C4,0xA13A43F3,0xA37CFDAA,0xA2BE979D, + 0xB5C473D0,0xB40619E7,0xB640A7BE,0xB782CD89,0xB2CDDB0C,0xB30FB13B,0xB1490F62,0xB08B6555, + 0xBBD72268,0xBA15485F,0xB853F606,0xB9919C31,0xBCDE8AB4,0xBD1CE083,0xBF5A5EDA,0xBE9834ED, + }, + + { + 0x00000000,0xB8BC6765,0xAA09C88B,0x12B5AFEE,0x8F629757,0x37DEF032,0x256B5FDC,0x9DD738B9, + 0xC5B428EF,0x7D084F8A,0x6FBDE064,0xD7018701,0x4AD6BFB8,0xF26AD8DD,0xE0DF7733,0x58631056, + 0x5019579F,0xE8A530FA,0xFA109F14,0x42ACF871,0xDF7BC0C8,0x67C7A7AD,0x75720843,0xCDCE6F26, + 0x95AD7F70,0x2D111815,0x3FA4B7FB,0x8718D09E,0x1ACFE827,0xA2738F42,0xB0C620AC,0x087A47C9, + 0xA032AF3E,0x188EC85B,0x0A3B67B5,0xB28700D0,0x2F503869,0x97EC5F0C,0x8559F0E2,0x3DE59787, + 0x658687D1,0xDD3AE0B4,0xCF8F4F5A,0x7733283F,0xEAE41086,0x525877E3,0x40EDD80D,0xF851BF68, + 0xF02BF8A1,0x48979FC4,0x5A22302A,0xE29E574F,0x7F496FF6,0xC7F50893,0xD540A77D,0x6DFCC018, + 0x359FD04E,0x8D23B72B,0x9F9618C5,0x272A7FA0,0xBAFD4719,0x0241207C,0x10F48F92,0xA848E8F7, + 0x9B14583D,0x23A83F58,0x311D90B6,0x89A1F7D3,0x1476CF6A,0xACCAA80F,0xBE7F07E1,0x06C36084, + 0x5EA070D2,0xE61C17B7,0xF4A9B859,0x4C15DF3C,0xD1C2E785,0x697E80E0,0x7BCB2F0E,0xC377486B, + 0xCB0D0FA2,0x73B168C7,0x6104C729,0xD9B8A04C,0x446F98F5,0xFCD3FF90,0xEE66507E,0x56DA371B, + 0x0EB9274D,0xB6054028,0xA4B0EFC6,0x1C0C88A3,0x81DBB01A,0x3967D77F,0x2BD27891,0x936E1FF4, + 0x3B26F703,0x839A9066,0x912F3F88,0x299358ED,0xB4446054,0x0CF80731,0x1E4DA8DF,0xA6F1CFBA, + 0xFE92DFEC,0x462EB889,0x549B1767,0xEC277002,0x71F048BB,0xC94C2FDE,0xDBF98030,0x6345E755, + 0x6B3FA09C,0xD383C7F9,0xC1366817,0x798A0F72,0xE45D37CB,0x5CE150AE,0x4E54FF40,0xF6E89825, + 0xAE8B8873,0x1637EF16,0x048240F8,0xBC3E279D,0x21E91F24,0x99557841,0x8BE0D7AF,0x335CB0CA, + 0xED59B63B,0x55E5D15E,0x47507EB0,0xFFEC19D5,0x623B216C,0xDA874609,0xC832E9E7,0x708E8E82, + 0x28ED9ED4,0x9051F9B1,0x82E4565F,0x3A58313A,0xA78F0983,0x1F336EE6,0x0D86C108,0xB53AA66D, + 0xBD40E1A4,0x05FC86C1,0x1749292F,0xAFF54E4A,0x322276F3,0x8A9E1196,0x982BBE78,0x2097D91D, + 0x78F4C94B,0xC048AE2E,0xD2FD01C0,0x6A4166A5,0xF7965E1C,0x4F2A3979,0x5D9F9697,0xE523F1F2, + 0x4D6B1905,0xF5D77E60,0xE762D18E,0x5FDEB6EB,0xC2098E52,0x7AB5E937,0x680046D9,0xD0BC21BC, + 0x88DF31EA,0x3063568F,0x22D6F961,0x9A6A9E04,0x07BDA6BD,0xBF01C1D8,0xADB46E36,0x15080953, + 0x1D724E9A,0xA5CE29FF,0xB77B8611,0x0FC7E174,0x9210D9CD,0x2AACBEA8,0x38191146,0x80A57623, + 0xD8C66675,0x607A0110,0x72CFAEFE,0xCA73C99B,0x57A4F122,0xEF189647,0xFDAD39A9,0x45115ECC, + 0x764DEE06,0xCEF18963,0xDC44268D,0x64F841E8,0xF92F7951,0x41931E34,0x5326B1DA,0xEB9AD6BF, + 0xB3F9C6E9,0x0B45A18C,0x19F00E62,0xA14C6907,0x3C9B51BE,0x842736DB,0x96929935,0x2E2EFE50, + 0x2654B999,0x9EE8DEFC,0x8C5D7112,0x34E11677,0xA9362ECE,0x118A49AB,0x033FE645,0xBB838120, + 0xE3E09176,0x5B5CF613,0x49E959FD,0xF1553E98,0x6C820621,0xD43E6144,0xC68BCEAA,0x7E37A9CF, + 0xD67F4138,0x6EC3265D,0x7C7689B3,0xC4CAEED6,0x591DD66F,0xE1A1B10A,0xF3141EE4,0x4BA87981, + 0x13CB69D7,0xAB770EB2,0xB9C2A15C,0x017EC639,0x9CA9FE80,0x241599E5,0x36A0360B,0x8E1C516E, + 0x866616A7,0x3EDA71C2,0x2C6FDE2C,0x94D3B949,0x090481F0,0xB1B8E695,0xA30D497B,0x1BB12E1E, + 0x43D23E48,0xFB6E592D,0xE9DBF6C3,0x516791A6,0xCCB0A91F,0x740CCE7A,0x66B96194,0xDE0506F1, + } +#endif // defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_4) || defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_8) || defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_16) +#if defined (CRC32_USE_LOOKUP_TABLE_SLICING_BY_8) || defined(CRC32_USE_LOOKUP_TABLE_SLICING_BY_16) + // beyond this point only relevant for Slicing-by-8 and Slicing-by-16 + ,{ + 0x00000000,0x3D6029B0,0x7AC05360,0x47A07AD0,0xF580A6C0,0xC8E08F70,0x8F40F5A0,0xB220DC10, + 0x30704BC1,0x0D106271,0x4AB018A1,0x77D03111,0xC5F0ED01,0xF890C4B1,0xBF30BE61,0x825097D1, + 0x60E09782,0x5D80BE32,0x1A20C4E2,0x2740ED52,0x95603142,0xA80018F2,0xEFA06222,0xD2C04B92, + 0x5090DC43,0x6DF0F5F3,0x2A508F23,0x1730A693,0xA5107A83,0x98705333,0xDFD029E3,0xE2B00053, + 0xC1C12F04,0xFCA106B4,0xBB017C64,0x866155D4,0x344189C4,0x0921A074,0x4E81DAA4,0x73E1F314, + 0xF1B164C5,0xCCD14D75,0x8B7137A5,0xB6111E15,0x0431C205,0x3951EBB5,0x7EF19165,0x4391B8D5, + 0xA121B886,0x9C419136,0xDBE1EBE6,0xE681C256,0x54A11E46,0x69C137F6,0x2E614D26,0x13016496, + 0x9151F347,0xAC31DAF7,0xEB91A027,0xD6F18997,0x64D15587,0x59B17C37,0x1E1106E7,0x23712F57, + 0x58F35849,0x659371F9,0x22330B29,0x1F532299,0xAD73FE89,0x9013D739,0xD7B3ADE9,0xEAD38459, + 0x68831388,0x55E33A38,0x124340E8,0x2F236958,0x9D03B548,0xA0639CF8,0xE7C3E628,0xDAA3CF98, + 0x3813CFCB,0x0573E67B,0x42D39CAB,0x7FB3B51B,0xCD93690B,0xF0F340BB,0xB7533A6B,0x8A3313DB, + 0x0863840A,0x3503ADBA,0x72A3D76A,0x4FC3FEDA,0xFDE322CA,0xC0830B7A,0x872371AA,0xBA43581A, + 0x9932774D,0xA4525EFD,0xE3F2242D,0xDE920D9D,0x6CB2D18D,0x51D2F83D,0x167282ED,0x2B12AB5D, + 0xA9423C8C,0x9422153C,0xD3826FEC,0xEEE2465C,0x5CC29A4C,0x61A2B3FC,0x2602C92C,0x1B62E09C, + 0xF9D2E0CF,0xC4B2C97F,0x8312B3AF,0xBE729A1F,0x0C52460F,0x31326FBF,0x7692156F,0x4BF23CDF, + 0xC9A2AB0E,0xF4C282BE,0xB362F86E,0x8E02D1DE,0x3C220DCE,0x0142247E,0x46E25EAE,0x7B82771E, + 0xB1E6B092,0x8C869922,0xCB26E3F2,0xF646CA42,0x44661652,0x79063FE2,0x3EA64532,0x03C66C82, + 0x8196FB53,0xBCF6D2E3,0xFB56A833,0xC6368183,0x74165D93,0x49767423,0x0ED60EF3,0x33B62743, + 0xD1062710,0xEC660EA0,0xABC67470,0x96A65DC0,0x248681D0,0x19E6A860,0x5E46D2B0,0x6326FB00, + 0xE1766CD1,0xDC164561,0x9BB63FB1,0xA6D61601,0x14F6CA11,0x2996E3A1,0x6E369971,0x5356B0C1, + 0x70279F96,0x4D47B626,0x0AE7CCF6,0x3787E546,0x85A73956,0xB8C710E6,0xFF676A36,0xC2074386, + 0x4057D457,0x7D37FDE7,0x3A978737,0x07F7AE87,0xB5D77297,0x88B75B27,0xCF1721F7,0xF2770847, + 0x10C70814,0x2DA721A4,0x6A075B74,0x576772C4,0xE547AED4,0xD8278764,0x9F87FDB4,0xA2E7D404, + 0x20B743D5,0x1DD76A65,0x5A7710B5,0x67173905,0xD537E515,0xE857CCA5,0xAFF7B675,0x92979FC5, + 0xE915E8DB,0xD475C16B,0x93D5BBBB,0xAEB5920B,0x1C954E1B,0x21F567AB,0x66551D7B,0x5B3534CB, + 0xD965A31A,0xE4058AAA,0xA3A5F07A,0x9EC5D9CA,0x2CE505DA,0x11852C6A,0x562556BA,0x6B457F0A, + 0x89F57F59,0xB49556E9,0xF3352C39,0xCE550589,0x7C75D999,0x4115F029,0x06B58AF9,0x3BD5A349, + 0xB9853498,0x84E51D28,0xC34567F8,0xFE254E48,0x4C059258,0x7165BBE8,0x36C5C138,0x0BA5E888, + 0x28D4C7DF,0x15B4EE6F,0x521494BF,0x6F74BD0F,0xDD54611F,0xE03448AF,0xA794327F,0x9AF41BCF, + 0x18A48C1E,0x25C4A5AE,0x6264DF7E,0x5F04F6CE,0xED242ADE,0xD044036E,0x97E479BE,0xAA84500E, + 0x4834505D,0x755479ED,0x32F4033D,0x0F942A8D,0xBDB4F69D,0x80D4DF2D,0xC774A5FD,0xFA148C4D, + 0x78441B9C,0x4524322C,0x028448FC,0x3FE4614C,0x8DC4BD5C,0xB0A494EC,0xF704EE3C,0xCA64C78C, + }, + + { + 0x00000000,0xCB5CD3A5,0x4DC8A10B,0x869472AE,0x9B914216,0x50CD91B3,0xD659E31D,0x1D0530B8, + 0xEC53826D,0x270F51C8,0xA19B2366,0x6AC7F0C3,0x77C2C07B,0xBC9E13DE,0x3A0A6170,0xF156B2D5, + 0x03D6029B,0xC88AD13E,0x4E1EA390,0x85427035,0x9847408D,0x531B9328,0xD58FE186,0x1ED33223, + 0xEF8580F6,0x24D95353,0xA24D21FD,0x6911F258,0x7414C2E0,0xBF481145,0x39DC63EB,0xF280B04E, + 0x07AC0536,0xCCF0D693,0x4A64A43D,0x81387798,0x9C3D4720,0x57619485,0xD1F5E62B,0x1AA9358E, + 0xEBFF875B,0x20A354FE,0xA6372650,0x6D6BF5F5,0x706EC54D,0xBB3216E8,0x3DA66446,0xF6FAB7E3, + 0x047A07AD,0xCF26D408,0x49B2A6A6,0x82EE7503,0x9FEB45BB,0x54B7961E,0xD223E4B0,0x197F3715, + 0xE82985C0,0x23755665,0xA5E124CB,0x6EBDF76E,0x73B8C7D6,0xB8E41473,0x3E7066DD,0xF52CB578, + 0x0F580A6C,0xC404D9C9,0x4290AB67,0x89CC78C2,0x94C9487A,0x5F959BDF,0xD901E971,0x125D3AD4, + 0xE30B8801,0x28575BA4,0xAEC3290A,0x659FFAAF,0x789ACA17,0xB3C619B2,0x35526B1C,0xFE0EB8B9, + 0x0C8E08F7,0xC7D2DB52,0x4146A9FC,0x8A1A7A59,0x971F4AE1,0x5C439944,0xDAD7EBEA,0x118B384F, + 0xE0DD8A9A,0x2B81593F,0xAD152B91,0x6649F834,0x7B4CC88C,0xB0101B29,0x36846987,0xFDD8BA22, + 0x08F40F5A,0xC3A8DCFF,0x453CAE51,0x8E607DF4,0x93654D4C,0x58399EE9,0xDEADEC47,0x15F13FE2, + 0xE4A78D37,0x2FFB5E92,0xA96F2C3C,0x6233FF99,0x7F36CF21,0xB46A1C84,0x32FE6E2A,0xF9A2BD8F, + 0x0B220DC1,0xC07EDE64,0x46EAACCA,0x8DB67F6F,0x90B34FD7,0x5BEF9C72,0xDD7BEEDC,0x16273D79, + 0xE7718FAC,0x2C2D5C09,0xAAB92EA7,0x61E5FD02,0x7CE0CDBA,0xB7BC1E1F,0x31286CB1,0xFA74BF14, + 0x1EB014D8,0xD5ECC77D,0x5378B5D3,0x98246676,0x852156CE,0x4E7D856B,0xC8E9F7C5,0x03B52460, + 0xF2E396B5,0x39BF4510,0xBF2B37BE,0x7477E41B,0x6972D4A3,0xA22E0706,0x24BA75A8,0xEFE6A60D, + 0x1D661643,0xD63AC5E6,0x50AEB748,0x9BF264ED,0x86F75455,0x4DAB87F0,0xCB3FF55E,0x006326FB, + 0xF135942E,0x3A69478B,0xBCFD3525,0x77A1E680,0x6AA4D638,0xA1F8059D,0x276C7733,0xEC30A496, + 0x191C11EE,0xD240C24B,0x54D4B0E5,0x9F886340,0x828D53F8,0x49D1805D,0xCF45F2F3,0x04192156, + 0xF54F9383,0x3E134026,0xB8873288,0x73DBE12D,0x6EDED195,0xA5820230,0x2316709E,0xE84AA33B, + 0x1ACA1375,0xD196C0D0,0x5702B27E,0x9C5E61DB,0x815B5163,0x4A0782C6,0xCC93F068,0x07CF23CD, + 0xF6999118,0x3DC542BD,0xBB513013,0x700DE3B6,0x6D08D30E,0xA65400AB,0x20C07205,0xEB9CA1A0, + 0x11E81EB4,0xDAB4CD11,0x5C20BFBF,0x977C6C1A,0x8A795CA2,0x41258F07,0xC7B1FDA9,0x0CED2E0C, + 0xFDBB9CD9,0x36E74F7C,0xB0733DD2,0x7B2FEE77,0x662ADECF,0xAD760D6A,0x2BE27FC4,0xE0BEAC61, + 0x123E1C2F,0xD962CF8A,0x5FF6BD24,0x94AA6E81,0x89AF5E39,0x42F38D9C,0xC467FF32,0x0F3B2C97, + 0xFE6D9E42,0x35314DE7,0xB3A53F49,0x78F9ECEC,0x65FCDC54,0xAEA00FF1,0x28347D5F,0xE368AEFA, + 0x16441B82,0xDD18C827,0x5B8CBA89,0x90D0692C,0x8DD55994,0x46898A31,0xC01DF89F,0x0B412B3A, + 0xFA1799EF,0x314B4A4A,0xB7DF38E4,0x7C83EB41,0x6186DBF9,0xAADA085C,0x2C4E7AF2,0xE712A957, + 0x15921919,0xDECECABC,0x585AB812,0x93066BB7,0x8E035B0F,0x455F88AA,0xC3CBFA04,0x089729A1, + 0xF9C19B74,0x329D48D1,0xB4093A7F,0x7F55E9DA,0x6250D962,0xA90C0AC7,0x2F987869,0xE4C4ABCC, + }, + + { + 0x00000000,0xA6770BB4,0x979F1129,0x31E81A9D,0xF44F2413,0x52382FA7,0x63D0353A,0xC5A73E8E, + 0x33EF4E67,0x959845D3,0xA4705F4E,0x020754FA,0xC7A06A74,0x61D761C0,0x503F7B5D,0xF64870E9, + 0x67DE9CCE,0xC1A9977A,0xF0418DE7,0x56368653,0x9391B8DD,0x35E6B369,0x040EA9F4,0xA279A240, + 0x5431D2A9,0xF246D91D,0xC3AEC380,0x65D9C834,0xA07EF6BA,0x0609FD0E,0x37E1E793,0x9196EC27, + 0xCFBD399C,0x69CA3228,0x582228B5,0xFE552301,0x3BF21D8F,0x9D85163B,0xAC6D0CA6,0x0A1A0712, + 0xFC5277FB,0x5A257C4F,0x6BCD66D2,0xCDBA6D66,0x081D53E8,0xAE6A585C,0x9F8242C1,0x39F54975, + 0xA863A552,0x0E14AEE6,0x3FFCB47B,0x998BBFCF,0x5C2C8141,0xFA5B8AF5,0xCBB39068,0x6DC49BDC, + 0x9B8CEB35,0x3DFBE081,0x0C13FA1C,0xAA64F1A8,0x6FC3CF26,0xC9B4C492,0xF85CDE0F,0x5E2BD5BB, + 0x440B7579,0xE27C7ECD,0xD3946450,0x75E36FE4,0xB044516A,0x16335ADE,0x27DB4043,0x81AC4BF7, + 0x77E43B1E,0xD19330AA,0xE07B2A37,0x460C2183,0x83AB1F0D,0x25DC14B9,0x14340E24,0xB2430590, + 0x23D5E9B7,0x85A2E203,0xB44AF89E,0x123DF32A,0xD79ACDA4,0x71EDC610,0x4005DC8D,0xE672D739, + 0x103AA7D0,0xB64DAC64,0x87A5B6F9,0x21D2BD4D,0xE47583C3,0x42028877,0x73EA92EA,0xD59D995E, + 0x8BB64CE5,0x2DC14751,0x1C295DCC,0xBA5E5678,0x7FF968F6,0xD98E6342,0xE86679DF,0x4E11726B, + 0xB8590282,0x1E2E0936,0x2FC613AB,0x89B1181F,0x4C162691,0xEA612D25,0xDB8937B8,0x7DFE3C0C, + 0xEC68D02B,0x4A1FDB9F,0x7BF7C102,0xDD80CAB6,0x1827F438,0xBE50FF8C,0x8FB8E511,0x29CFEEA5, + 0xDF879E4C,0x79F095F8,0x48188F65,0xEE6F84D1,0x2BC8BA5F,0x8DBFB1EB,0xBC57AB76,0x1A20A0C2, + 0x8816EAF2,0x2E61E146,0x1F89FBDB,0xB9FEF06F,0x7C59CEE1,0xDA2EC555,0xEBC6DFC8,0x4DB1D47C, + 0xBBF9A495,0x1D8EAF21,0x2C66B5BC,0x8A11BE08,0x4FB68086,0xE9C18B32,0xD82991AF,0x7E5E9A1B, + 0xEFC8763C,0x49BF7D88,0x78576715,0xDE206CA1,0x1B87522F,0xBDF0599B,0x8C184306,0x2A6F48B2, + 0xDC27385B,0x7A5033EF,0x4BB82972,0xEDCF22C6,0x28681C48,0x8E1F17FC,0xBFF70D61,0x198006D5, + 0x47ABD36E,0xE1DCD8DA,0xD034C247,0x7643C9F3,0xB3E4F77D,0x1593FCC9,0x247BE654,0x820CEDE0, + 0x74449D09,0xD23396BD,0xE3DB8C20,0x45AC8794,0x800BB91A,0x267CB2AE,0x1794A833,0xB1E3A387, + 0x20754FA0,0x86024414,0xB7EA5E89,0x119D553D,0xD43A6BB3,0x724D6007,0x43A57A9A,0xE5D2712E, + 0x139A01C7,0xB5ED0A73,0x840510EE,0x22721B5A,0xE7D525D4,0x41A22E60,0x704A34FD,0xD63D3F49, + 0xCC1D9F8B,0x6A6A943F,0x5B828EA2,0xFDF58516,0x3852BB98,0x9E25B02C,0xAFCDAAB1,0x09BAA105, + 0xFFF2D1EC,0x5985DA58,0x686DC0C5,0xCE1ACB71,0x0BBDF5FF,0xADCAFE4B,0x9C22E4D6,0x3A55EF62, + 0xABC30345,0x0DB408F1,0x3C5C126C,0x9A2B19D8,0x5F8C2756,0xF9FB2CE2,0xC813367F,0x6E643DCB, + 0x982C4D22,0x3E5B4696,0x0FB35C0B,0xA9C457BF,0x6C636931,0xCA146285,0xFBFC7818,0x5D8B73AC, + 0x03A0A617,0xA5D7ADA3,0x943FB73E,0x3248BC8A,0xF7EF8204,0x519889B0,0x6070932D,0xC6079899, + 0x304FE870,0x9638E3C4,0xA7D0F959,0x01A7F2ED,0xC400CC63,0x6277C7D7,0x539FDD4A,0xF5E8D6FE, + 0x647E3AD9,0xC209316D,0xF3E12BF0,0x55962044,0x90311ECA,0x3646157E,0x07AE0FE3,0xA1D90457, + 0x579174BE,0xF1E67F0A,0xC00E6597,0x66796E23,0xA3DE50AD,0x05A95B19,0x34414184,0x92364A30, + }, + + { + 0x00000000,0xCCAA009E,0x4225077D,0x8E8F07E3,0x844A0EFA,0x48E00E64,0xC66F0987,0x0AC50919, + 0xD3E51BB5,0x1F4F1B2B,0x91C01CC8,0x5D6A1C56,0x57AF154F,0x9B0515D1,0x158A1232,0xD92012AC, + 0x7CBB312B,0xB01131B5,0x3E9E3656,0xF23436C8,0xF8F13FD1,0x345B3F4F,0xBAD438AC,0x767E3832, + 0xAF5E2A9E,0x63F42A00,0xED7B2DE3,0x21D12D7D,0x2B142464,0xE7BE24FA,0x69312319,0xA59B2387, + 0xF9766256,0x35DC62C8,0xBB53652B,0x77F965B5,0x7D3C6CAC,0xB1966C32,0x3F196BD1,0xF3B36B4F, + 0x2A9379E3,0xE639797D,0x68B67E9E,0xA41C7E00,0xAED97719,0x62737787,0xECFC7064,0x205670FA, + 0x85CD537D,0x496753E3,0xC7E85400,0x0B42549E,0x01875D87,0xCD2D5D19,0x43A25AFA,0x8F085A64, + 0x562848C8,0x9A824856,0x140D4FB5,0xD8A74F2B,0xD2624632,0x1EC846AC,0x9047414F,0x5CED41D1, + 0x299DC2ED,0xE537C273,0x6BB8C590,0xA712C50E,0xADD7CC17,0x617DCC89,0xEFF2CB6A,0x2358CBF4, + 0xFA78D958,0x36D2D9C6,0xB85DDE25,0x74F7DEBB,0x7E32D7A2,0xB298D73C,0x3C17D0DF,0xF0BDD041, + 0x5526F3C6,0x998CF358,0x1703F4BB,0xDBA9F425,0xD16CFD3C,0x1DC6FDA2,0x9349FA41,0x5FE3FADF, + 0x86C3E873,0x4A69E8ED,0xC4E6EF0E,0x084CEF90,0x0289E689,0xCE23E617,0x40ACE1F4,0x8C06E16A, + 0xD0EBA0BB,0x1C41A025,0x92CEA7C6,0x5E64A758,0x54A1AE41,0x980BAEDF,0x1684A93C,0xDA2EA9A2, + 0x030EBB0E,0xCFA4BB90,0x412BBC73,0x8D81BCED,0x8744B5F4,0x4BEEB56A,0xC561B289,0x09CBB217, + 0xAC509190,0x60FA910E,0xEE7596ED,0x22DF9673,0x281A9F6A,0xE4B09FF4,0x6A3F9817,0xA6959889, + 0x7FB58A25,0xB31F8ABB,0x3D908D58,0xF13A8DC6,0xFBFF84DF,0x37558441,0xB9DA83A2,0x7570833C, + 0x533B85DA,0x9F918544,0x111E82A7,0xDDB48239,0xD7718B20,0x1BDB8BBE,0x95548C5D,0x59FE8CC3, + 0x80DE9E6F,0x4C749EF1,0xC2FB9912,0x0E51998C,0x04949095,0xC83E900B,0x46B197E8,0x8A1B9776, + 0x2F80B4F1,0xE32AB46F,0x6DA5B38C,0xA10FB312,0xABCABA0B,0x6760BA95,0xE9EFBD76,0x2545BDE8, + 0xFC65AF44,0x30CFAFDA,0xBE40A839,0x72EAA8A7,0x782FA1BE,0xB485A120,0x3A0AA6C3,0xF6A0A65D, + 0xAA4DE78C,0x66E7E712,0xE868E0F1,0x24C2E06F,0x2E07E976,0xE2ADE9E8,0x6C22EE0B,0xA088EE95, + 0x79A8FC39,0xB502FCA7,0x3B8DFB44,0xF727FBDA,0xFDE2F2C3,0x3148F25D,0xBFC7F5BE,0x736DF520, + 0xD6F6D6A7,0x1A5CD639,0x94D3D1DA,0x5879D144,0x52BCD85D,0x9E16D8C3,0x1099DF20,0xDC33DFBE, + 0x0513CD12,0xC9B9CD8C,0x4736CA6F,0x8B9CCAF1,0x8159C3E8,0x4DF3C376,0xC37CC495,0x0FD6C40B, + 0x7AA64737,0xB60C47A9,0x3883404A,0xF42940D4,0xFEEC49CD,0x32464953,0xBCC94EB0,0x70634E2E, + 0xA9435C82,0x65E95C1C,0xEB665BFF,0x27CC5B61,0x2D095278,0xE1A352E6,0x6F2C5505,0xA386559B, + 0x061D761C,0xCAB77682,0x44387161,0x889271FF,0x825778E6,0x4EFD7878,0xC0727F9B,0x0CD87F05, + 0xD5F86DA9,0x19526D37,0x97DD6AD4,0x5B776A4A,0x51B26353,0x9D1863CD,0x1397642E,0xDF3D64B0, + 0x83D02561,0x4F7A25FF,0xC1F5221C,0x0D5F2282,0x079A2B9B,0xCB302B05,0x45BF2CE6,0x89152C78, + 0x50353ED4,0x9C9F3E4A,0x121039A9,0xDEBA3937,0xD47F302E,0x18D530B0,0x965A3753,0x5AF037CD, + 0xFF6B144A,0x33C114D4,0xBD4E1337,0x71E413A9,0x7B211AB0,0xB78B1A2E,0x39041DCD,0xF5AE1D53, + 0x2C8E0FFF,0xE0240F61,0x6EAB0882,0xA201081C,0xA8C40105,0x646E019B,0xEAE10678,0x264B06E6, + } +#endif // CRC32_USE_LOOKUP_TABLE_SLICING_BY_8 || CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 + // beyond this point only relevant for Slicing-by-16 + ,{ + 0x00000000,0x177B1443,0x2EF62886,0x398D3CC5,0x5DEC510C,0x4A97454F,0x731A798A,0x64616DC9, + 0xBBD8A218,0xACA3B65B,0x952E8A9E,0x82559EDD,0xE634F314,0xF14FE757,0xC8C2DB92,0xDFB9CFD1, + 0xACC04271,0xBBBB5632,0x82366AF7,0x954D7EB4,0xF12C137D,0xE657073E,0xDFDA3BFB,0xC8A12FB8, + 0x1718E069,0x0063F42A,0x39EEC8EF,0x2E95DCAC,0x4AF4B165,0x5D8FA526,0x640299E3,0x73798DA0, + 0x82F182A3,0x958A96E0,0xAC07AA25,0xBB7CBE66,0xDF1DD3AF,0xC866C7EC,0xF1EBFB29,0xE690EF6A, + 0x392920BB,0x2E5234F8,0x17DF083D,0x00A41C7E,0x64C571B7,0x73BE65F4,0x4A335931,0x5D484D72, + 0x2E31C0D2,0x394AD491,0x00C7E854,0x17BCFC17,0x73DD91DE,0x64A6859D,0x5D2BB958,0x4A50AD1B, + 0x95E962CA,0x82927689,0xBB1F4A4C,0xAC645E0F,0xC80533C6,0xDF7E2785,0xE6F31B40,0xF1880F03, + 0xDE920307,0xC9E91744,0xF0642B81,0xE71F3FC2,0x837E520B,0x94054648,0xAD887A8D,0xBAF36ECE, + 0x654AA11F,0x7231B55C,0x4BBC8999,0x5CC79DDA,0x38A6F013,0x2FDDE450,0x1650D895,0x012BCCD6, + 0x72524176,0x65295535,0x5CA469F0,0x4BDF7DB3,0x2FBE107A,0x38C50439,0x014838FC,0x16332CBF, + 0xC98AE36E,0xDEF1F72D,0xE77CCBE8,0xF007DFAB,0x9466B262,0x831DA621,0xBA909AE4,0xADEB8EA7, + 0x5C6381A4,0x4B1895E7,0x7295A922,0x65EEBD61,0x018FD0A8,0x16F4C4EB,0x2F79F82E,0x3802EC6D, + 0xE7BB23BC,0xF0C037FF,0xC94D0B3A,0xDE361F79,0xBA5772B0,0xAD2C66F3,0x94A15A36,0x83DA4E75, + 0xF0A3C3D5,0xE7D8D796,0xDE55EB53,0xC92EFF10,0xAD4F92D9,0xBA34869A,0x83B9BA5F,0x94C2AE1C, + 0x4B7B61CD,0x5C00758E,0x658D494B,0x72F65D08,0x169730C1,0x01EC2482,0x38611847,0x2F1A0C04, + 0x6655004F,0x712E140C,0x48A328C9,0x5FD83C8A,0x3BB95143,0x2CC24500,0x154F79C5,0x02346D86, + 0xDD8DA257,0xCAF6B614,0xF37B8AD1,0xE4009E92,0x8061F35B,0x971AE718,0xAE97DBDD,0xB9ECCF9E, + 0xCA95423E,0xDDEE567D,0xE4636AB8,0xF3187EFB,0x97791332,0x80020771,0xB98F3BB4,0xAEF42FF7, + 0x714DE026,0x6636F465,0x5FBBC8A0,0x48C0DCE3,0x2CA1B12A,0x3BDAA569,0x025799AC,0x152C8DEF, + 0xE4A482EC,0xF3DF96AF,0xCA52AA6A,0xDD29BE29,0xB948D3E0,0xAE33C7A3,0x97BEFB66,0x80C5EF25, + 0x5F7C20F4,0x480734B7,0x718A0872,0x66F11C31,0x029071F8,0x15EB65BB,0x2C66597E,0x3B1D4D3D, + 0x4864C09D,0x5F1FD4DE,0x6692E81B,0x71E9FC58,0x15889191,0x02F385D2,0x3B7EB917,0x2C05AD54, + 0xF3BC6285,0xE4C776C6,0xDD4A4A03,0xCA315E40,0xAE503389,0xB92B27CA,0x80A61B0F,0x97DD0F4C, + 0xB8C70348,0xAFBC170B,0x96312BCE,0x814A3F8D,0xE52B5244,0xF2504607,0xCBDD7AC2,0xDCA66E81, + 0x031FA150,0x1464B513,0x2DE989D6,0x3A929D95,0x5EF3F05C,0x4988E41F,0x7005D8DA,0x677ECC99, + 0x14074139,0x037C557A,0x3AF169BF,0x2D8A7DFC,0x49EB1035,0x5E900476,0x671D38B3,0x70662CF0, + 0xAFDFE321,0xB8A4F762,0x8129CBA7,0x9652DFE4,0xF233B22D,0xE548A66E,0xDCC59AAB,0xCBBE8EE8, + 0x3A3681EB,0x2D4D95A8,0x14C0A96D,0x03BBBD2E,0x67DAD0E7,0x70A1C4A4,0x492CF861,0x5E57EC22, + 0x81EE23F3,0x969537B0,0xAF180B75,0xB8631F36,0xDC0272FF,0xCB7966BC,0xF2F45A79,0xE58F4E3A, + 0x96F6C39A,0x818DD7D9,0xB800EB1C,0xAF7BFF5F,0xCB1A9296,0xDC6186D5,0xE5ECBA10,0xF297AE53, + 0x2D2E6182,0x3A5575C1,0x03D84904,0x14A35D47,0x70C2308E,0x67B924CD,0x5E341808,0x494F0C4B, + }, + + { + 0x00000000,0xEFC26B3E,0x04F5D03D,0xEB37BB03,0x09EBA07A,0xE629CB44,0x0D1E7047,0xE2DC1B79, + 0x13D740F4,0xFC152BCA,0x172290C9,0xF8E0FBF7,0x1A3CE08E,0xF5FE8BB0,0x1EC930B3,0xF10B5B8D, + 0x27AE81E8,0xC86CEAD6,0x235B51D5,0xCC993AEB,0x2E452192,0xC1874AAC,0x2AB0F1AF,0xC5729A91, + 0x3479C11C,0xDBBBAA22,0x308C1121,0xDF4E7A1F,0x3D926166,0xD2500A58,0x3967B15B,0xD6A5DA65, + 0x4F5D03D0,0xA09F68EE,0x4BA8D3ED,0xA46AB8D3,0x46B6A3AA,0xA974C894,0x42437397,0xAD8118A9, + 0x5C8A4324,0xB348281A,0x587F9319,0xB7BDF827,0x5561E35E,0xBAA38860,0x51943363,0xBE56585D, + 0x68F38238,0x8731E906,0x6C065205,0x83C4393B,0x61182242,0x8EDA497C,0x65EDF27F,0x8A2F9941, + 0x7B24C2CC,0x94E6A9F2,0x7FD112F1,0x901379CF,0x72CF62B6,0x9D0D0988,0x763AB28B,0x99F8D9B5, + 0x9EBA07A0,0x71786C9E,0x9A4FD79D,0x758DBCA3,0x9751A7DA,0x7893CCE4,0x93A477E7,0x7C661CD9, + 0x8D6D4754,0x62AF2C6A,0x89989769,0x665AFC57,0x8486E72E,0x6B448C10,0x80733713,0x6FB15C2D, + 0xB9148648,0x56D6ED76,0xBDE15675,0x52233D4B,0xB0FF2632,0x5F3D4D0C,0xB40AF60F,0x5BC89D31, + 0xAAC3C6BC,0x4501AD82,0xAE361681,0x41F47DBF,0xA32866C6,0x4CEA0DF8,0xA7DDB6FB,0x481FDDC5, + 0xD1E70470,0x3E256F4E,0xD512D44D,0x3AD0BF73,0xD80CA40A,0x37CECF34,0xDCF97437,0x333B1F09, + 0xC2304484,0x2DF22FBA,0xC6C594B9,0x2907FF87,0xCBDBE4FE,0x24198FC0,0xCF2E34C3,0x20EC5FFD, + 0xF6498598,0x198BEEA6,0xF2BC55A5,0x1D7E3E9B,0xFFA225E2,0x10604EDC,0xFB57F5DF,0x14959EE1, + 0xE59EC56C,0x0A5CAE52,0xE16B1551,0x0EA97E6F,0xEC756516,0x03B70E28,0xE880B52B,0x0742DE15, + 0xE6050901,0x09C7623F,0xE2F0D93C,0x0D32B202,0xEFEEA97B,0x002CC245,0xEB1B7946,0x04D91278, + 0xF5D249F5,0x1A1022CB,0xF12799C8,0x1EE5F2F6,0xFC39E98F,0x13FB82B1,0xF8CC39B2,0x170E528C, + 0xC1AB88E9,0x2E69E3D7,0xC55E58D4,0x2A9C33EA,0xC8402893,0x278243AD,0xCCB5F8AE,0x23779390, + 0xD27CC81D,0x3DBEA323,0xD6891820,0x394B731E,0xDB976867,0x34550359,0xDF62B85A,0x30A0D364, + 0xA9580AD1,0x469A61EF,0xADADDAEC,0x426FB1D2,0xA0B3AAAB,0x4F71C195,0xA4467A96,0x4B8411A8, + 0xBA8F4A25,0x554D211B,0xBE7A9A18,0x51B8F126,0xB364EA5F,0x5CA68161,0xB7913A62,0x5853515C, + 0x8EF68B39,0x6134E007,0x8A035B04,0x65C1303A,0x871D2B43,0x68DF407D,0x83E8FB7E,0x6C2A9040, + 0x9D21CBCD,0x72E3A0F3,0x99D41BF0,0x761670CE,0x94CA6BB7,0x7B080089,0x903FBB8A,0x7FFDD0B4, + 0x78BF0EA1,0x977D659F,0x7C4ADE9C,0x9388B5A2,0x7154AEDB,0x9E96C5E5,0x75A17EE6,0x9A6315D8, + 0x6B684E55,0x84AA256B,0x6F9D9E68,0x805FF556,0x6283EE2F,0x8D418511,0x66763E12,0x89B4552C, + 0x5F118F49,0xB0D3E477,0x5BE45F74,0xB426344A,0x56FA2F33,0xB938440D,0x520FFF0E,0xBDCD9430, + 0x4CC6CFBD,0xA304A483,0x48331F80,0xA7F174BE,0x452D6FC7,0xAAEF04F9,0x41D8BFFA,0xAE1AD4C4, + 0x37E20D71,0xD820664F,0x3317DD4C,0xDCD5B672,0x3E09AD0B,0xD1CBC635,0x3AFC7D36,0xD53E1608, + 0x24354D85,0xCBF726BB,0x20C09DB8,0xCF02F686,0x2DDEEDFF,0xC21C86C1,0x292B3DC2,0xC6E956FC, + 0x104C8C99,0xFF8EE7A7,0x14B95CA4,0xFB7B379A,0x19A72CE3,0xF66547DD,0x1D52FCDE,0xF29097E0, + 0x039BCC6D,0xEC59A753,0x076E1C50,0xE8AC776E,0x0A706C17,0xE5B20729,0x0E85BC2A,0xE147D714, + }, + + { + 0x00000000,0xC18EDFC0,0x586CB9C1,0x99E26601,0xB0D97382,0x7157AC42,0xE8B5CA43,0x293B1583, + 0xBAC3E145,0x7B4D3E85,0xE2AF5884,0x23218744,0x0A1A92C7,0xCB944D07,0x52762B06,0x93F8F4C6, + 0xAEF6C4CB,0x6F781B0B,0xF69A7D0A,0x3714A2CA,0x1E2FB749,0xDFA16889,0x46430E88,0x87CDD148, + 0x1435258E,0xD5BBFA4E,0x4C599C4F,0x8DD7438F,0xA4EC560C,0x656289CC,0xFC80EFCD,0x3D0E300D, + 0x869C8FD7,0x47125017,0xDEF03616,0x1F7EE9D6,0x3645FC55,0xF7CB2395,0x6E294594,0xAFA79A54, + 0x3C5F6E92,0xFDD1B152,0x6433D753,0xA5BD0893,0x8C861D10,0x4D08C2D0,0xD4EAA4D1,0x15647B11, + 0x286A4B1C,0xE9E494DC,0x7006F2DD,0xB1882D1D,0x98B3389E,0x593DE75E,0xC0DF815F,0x01515E9F, + 0x92A9AA59,0x53277599,0xCAC51398,0x0B4BCC58,0x2270D9DB,0xE3FE061B,0x7A1C601A,0xBB92BFDA, + 0xD64819EF,0x17C6C62F,0x8E24A02E,0x4FAA7FEE,0x66916A6D,0xA71FB5AD,0x3EFDD3AC,0xFF730C6C, + 0x6C8BF8AA,0xAD05276A,0x34E7416B,0xF5699EAB,0xDC528B28,0x1DDC54E8,0x843E32E9,0x45B0ED29, + 0x78BEDD24,0xB93002E4,0x20D264E5,0xE15CBB25,0xC867AEA6,0x09E97166,0x900B1767,0x5185C8A7, + 0xC27D3C61,0x03F3E3A1,0x9A1185A0,0x5B9F5A60,0x72A44FE3,0xB32A9023,0x2AC8F622,0xEB4629E2, + 0x50D49638,0x915A49F8,0x08B82FF9,0xC936F039,0xE00DE5BA,0x21833A7A,0xB8615C7B,0x79EF83BB, + 0xEA17777D,0x2B99A8BD,0xB27BCEBC,0x73F5117C,0x5ACE04FF,0x9B40DB3F,0x02A2BD3E,0xC32C62FE, + 0xFE2252F3,0x3FAC8D33,0xA64EEB32,0x67C034F2,0x4EFB2171,0x8F75FEB1,0x169798B0,0xD7194770, + 0x44E1B3B6,0x856F6C76,0x1C8D0A77,0xDD03D5B7,0xF438C034,0x35B61FF4,0xAC5479F5,0x6DDAA635, + 0x77E1359F,0xB66FEA5F,0x2F8D8C5E,0xEE03539E,0xC738461D,0x06B699DD,0x9F54FFDC,0x5EDA201C, + 0xCD22D4DA,0x0CAC0B1A,0x954E6D1B,0x54C0B2DB,0x7DFBA758,0xBC757898,0x25971E99,0xE419C159, + 0xD917F154,0x18992E94,0x817B4895,0x40F59755,0x69CE82D6,0xA8405D16,0x31A23B17,0xF02CE4D7, + 0x63D41011,0xA25ACFD1,0x3BB8A9D0,0xFA367610,0xD30D6393,0x1283BC53,0x8B61DA52,0x4AEF0592, + 0xF17DBA48,0x30F36588,0xA9110389,0x689FDC49,0x41A4C9CA,0x802A160A,0x19C8700B,0xD846AFCB, + 0x4BBE5B0D,0x8A3084CD,0x13D2E2CC,0xD25C3D0C,0xFB67288F,0x3AE9F74F,0xA30B914E,0x62854E8E, + 0x5F8B7E83,0x9E05A143,0x07E7C742,0xC6691882,0xEF520D01,0x2EDCD2C1,0xB73EB4C0,0x76B06B00, + 0xE5489FC6,0x24C64006,0xBD242607,0x7CAAF9C7,0x5591EC44,0x941F3384,0x0DFD5585,0xCC738A45, + 0xA1A92C70,0x6027F3B0,0xF9C595B1,0x384B4A71,0x11705FF2,0xD0FE8032,0x491CE633,0x889239F3, + 0x1B6ACD35,0xDAE412F5,0x430674F4,0x8288AB34,0xABB3BEB7,0x6A3D6177,0xF3DF0776,0x3251D8B6, + 0x0F5FE8BB,0xCED1377B,0x5733517A,0x96BD8EBA,0xBF869B39,0x7E0844F9,0xE7EA22F8,0x2664FD38, + 0xB59C09FE,0x7412D63E,0xEDF0B03F,0x2C7E6FFF,0x05457A7C,0xC4CBA5BC,0x5D29C3BD,0x9CA71C7D, + 0x2735A3A7,0xE6BB7C67,0x7F591A66,0xBED7C5A6,0x97ECD025,0x56620FE5,0xCF8069E4,0x0E0EB624, + 0x9DF642E2,0x5C789D22,0xC59AFB23,0x041424E3,0x2D2F3160,0xECA1EEA0,0x754388A1,0xB4CD5761, + 0x89C3676C,0x484DB8AC,0xD1AFDEAD,0x1021016D,0x391A14EE,0xF894CB2E,0x6176AD2F,0xA0F872EF, + 0x33008629,0xF28E59E9,0x6B6C3FE8,0xAAE2E028,0x83D9F5AB,0x42572A6B,0xDBB54C6A,0x1A3B93AA, + }, + + { + 0x00000000,0x9BA54C6F,0xEC3B9E9F,0x779ED2F0,0x03063B7F,0x98A37710,0xEF3DA5E0,0x7498E98F, + 0x060C76FE,0x9DA93A91,0xEA37E861,0x7192A40E,0x050A4D81,0x9EAF01EE,0xE931D31E,0x72949F71, + 0x0C18EDFC,0x97BDA193,0xE0237363,0x7B863F0C,0x0F1ED683,0x94BB9AEC,0xE325481C,0x78800473, + 0x0A149B02,0x91B1D76D,0xE62F059D,0x7D8A49F2,0x0912A07D,0x92B7EC12,0xE5293EE2,0x7E8C728D, + 0x1831DBF8,0x83949797,0xF40A4567,0x6FAF0908,0x1B37E087,0x8092ACE8,0xF70C7E18,0x6CA93277, + 0x1E3DAD06,0x8598E169,0xF2063399,0x69A37FF6,0x1D3B9679,0x869EDA16,0xF10008E6,0x6AA54489, + 0x14293604,0x8F8C7A6B,0xF812A89B,0x63B7E4F4,0x172F0D7B,0x8C8A4114,0xFB1493E4,0x60B1DF8B, + 0x122540FA,0x89800C95,0xFE1EDE65,0x65BB920A,0x11237B85,0x8A8637EA,0xFD18E51A,0x66BDA975, + 0x3063B7F0,0xABC6FB9F,0xDC58296F,0x47FD6500,0x33658C8F,0xA8C0C0E0,0xDF5E1210,0x44FB5E7F, + 0x366FC10E,0xADCA8D61,0xDA545F91,0x41F113FE,0x3569FA71,0xAECCB61E,0xD95264EE,0x42F72881, + 0x3C7B5A0C,0xA7DE1663,0xD040C493,0x4BE588FC,0x3F7D6173,0xA4D82D1C,0xD346FFEC,0x48E3B383, + 0x3A772CF2,0xA1D2609D,0xD64CB26D,0x4DE9FE02,0x3971178D,0xA2D45BE2,0xD54A8912,0x4EEFC57D, + 0x28526C08,0xB3F72067,0xC469F297,0x5FCCBEF8,0x2B545777,0xB0F11B18,0xC76FC9E8,0x5CCA8587, + 0x2E5E1AF6,0xB5FB5699,0xC2658469,0x59C0C806,0x2D582189,0xB6FD6DE6,0xC163BF16,0x5AC6F379, + 0x244A81F4,0xBFEFCD9B,0xC8711F6B,0x53D45304,0x274CBA8B,0xBCE9F6E4,0xCB772414,0x50D2687B, + 0x2246F70A,0xB9E3BB65,0xCE7D6995,0x55D825FA,0x2140CC75,0xBAE5801A,0xCD7B52EA,0x56DE1E85, + 0x60C76FE0,0xFB62238F,0x8CFCF17F,0x1759BD10,0x63C1549F,0xF86418F0,0x8FFACA00,0x145F866F, + 0x66CB191E,0xFD6E5571,0x8AF08781,0x1155CBEE,0x65CD2261,0xFE686E0E,0x89F6BCFE,0x1253F091, + 0x6CDF821C,0xF77ACE73,0x80E41C83,0x1B4150EC,0x6FD9B963,0xF47CF50C,0x83E227FC,0x18476B93, + 0x6AD3F4E2,0xF176B88D,0x86E86A7D,0x1D4D2612,0x69D5CF9D,0xF27083F2,0x85EE5102,0x1E4B1D6D, + 0x78F6B418,0xE353F877,0x94CD2A87,0x0F6866E8,0x7BF08F67,0xE055C308,0x97CB11F8,0x0C6E5D97, + 0x7EFAC2E6,0xE55F8E89,0x92C15C79,0x09641016,0x7DFCF999,0xE659B5F6,0x91C76706,0x0A622B69, + 0x74EE59E4,0xEF4B158B,0x98D5C77B,0x03708B14,0x77E8629B,0xEC4D2EF4,0x9BD3FC04,0x0076B06B, + 0x72E22F1A,0xE9476375,0x9ED9B185,0x057CFDEA,0x71E41465,0xEA41580A,0x9DDF8AFA,0x067AC695, + 0x50A4D810,0xCB01947F,0xBC9F468F,0x273A0AE0,0x53A2E36F,0xC807AF00,0xBF997DF0,0x243C319F, + 0x56A8AEEE,0xCD0DE281,0xBA933071,0x21367C1E,0x55AE9591,0xCE0BD9FE,0xB9950B0E,0x22304761, + 0x5CBC35EC,0xC7197983,0xB087AB73,0x2B22E71C,0x5FBA0E93,0xC41F42FC,0xB381900C,0x2824DC63, + 0x5AB04312,0xC1150F7D,0xB68BDD8D,0x2D2E91E2,0x59B6786D,0xC2133402,0xB58DE6F2,0x2E28AA9D, + 0x489503E8,0xD3304F87,0xA4AE9D77,0x3F0BD118,0x4B933897,0xD03674F8,0xA7A8A608,0x3C0DEA67, + 0x4E997516,0xD53C3979,0xA2A2EB89,0x3907A7E6,0x4D9F4E69,0xD63A0206,0xA1A4D0F6,0x3A019C99, + 0x448DEE14,0xDF28A27B,0xA8B6708B,0x33133CE4,0x478BD56B,0xDC2E9904,0xABB04BF4,0x3015079B, + 0x428198EA,0xD924D485,0xAEBA0675,0x351F4A1A,0x4187A395,0xDA22EFFA,0xADBC3D0A,0x36197165, + }, + + { + 0x00000000,0xDD96D985,0x605CB54B,0xBDCA6CCE,0xC0B96A96,0x1D2FB313,0xA0E5DFDD,0x7D730658, + 0x5A03D36D,0x87950AE8,0x3A5F6626,0xE7C9BFA3,0x9ABAB9FB,0x472C607E,0xFAE60CB0,0x2770D535, + 0xB407A6DA,0x69917F5F,0xD45B1391,0x09CDCA14,0x74BECC4C,0xA92815C9,0x14E27907,0xC974A082, + 0xEE0475B7,0x3392AC32,0x8E58C0FC,0x53CE1979,0x2EBD1F21,0xF32BC6A4,0x4EE1AA6A,0x937773EF, + 0xB37E4BF5,0x6EE89270,0xD322FEBE,0x0EB4273B,0x73C72163,0xAE51F8E6,0x139B9428,0xCE0D4DAD, + 0xE97D9898,0x34EB411D,0x89212DD3,0x54B7F456,0x29C4F20E,0xF4522B8B,0x49984745,0x940E9EC0, + 0x0779ED2F,0xDAEF34AA,0x67255864,0xBAB381E1,0xC7C087B9,0x1A565E3C,0xA79C32F2,0x7A0AEB77, + 0x5D7A3E42,0x80ECE7C7,0x3D268B09,0xE0B0528C,0x9DC354D4,0x40558D51,0xFD9FE19F,0x2009381A, + 0xBD8D91AB,0x601B482E,0xDDD124E0,0x0047FD65,0x7D34FB3D,0xA0A222B8,0x1D684E76,0xC0FE97F3, + 0xE78E42C6,0x3A189B43,0x87D2F78D,0x5A442E08,0x27372850,0xFAA1F1D5,0x476B9D1B,0x9AFD449E, + 0x098A3771,0xD41CEEF4,0x69D6823A,0xB4405BBF,0xC9335DE7,0x14A58462,0xA96FE8AC,0x74F93129, + 0x5389E41C,0x8E1F3D99,0x33D55157,0xEE4388D2,0x93308E8A,0x4EA6570F,0xF36C3BC1,0x2EFAE244, + 0x0EF3DA5E,0xD36503DB,0x6EAF6F15,0xB339B690,0xCE4AB0C8,0x13DC694D,0xAE160583,0x7380DC06, + 0x54F00933,0x8966D0B6,0x34ACBC78,0xE93A65FD,0x944963A5,0x49DFBA20,0xF415D6EE,0x29830F6B, + 0xBAF47C84,0x6762A501,0xDAA8C9CF,0x073E104A,0x7A4D1612,0xA7DBCF97,0x1A11A359,0xC7877ADC, + 0xE0F7AFE9,0x3D61766C,0x80AB1AA2,0x5D3DC327,0x204EC57F,0xFDD81CFA,0x40127034,0x9D84A9B1, + 0xA06A2517,0x7DFCFC92,0xC036905C,0x1DA049D9,0x60D34F81,0xBD459604,0x008FFACA,0xDD19234F, + 0xFA69F67A,0x27FF2FFF,0x9A354331,0x47A39AB4,0x3AD09CEC,0xE7464569,0x5A8C29A7,0x871AF022, + 0x146D83CD,0xC9FB5A48,0x74313686,0xA9A7EF03,0xD4D4E95B,0x094230DE,0xB4885C10,0x691E8595, + 0x4E6E50A0,0x93F88925,0x2E32E5EB,0xF3A43C6E,0x8ED73A36,0x5341E3B3,0xEE8B8F7D,0x331D56F8, + 0x13146EE2,0xCE82B767,0x7348DBA9,0xAEDE022C,0xD3AD0474,0x0E3BDDF1,0xB3F1B13F,0x6E6768BA, + 0x4917BD8F,0x9481640A,0x294B08C4,0xF4DDD141,0x89AED719,0x54380E9C,0xE9F26252,0x3464BBD7, + 0xA713C838,0x7A8511BD,0xC74F7D73,0x1AD9A4F6,0x67AAA2AE,0xBA3C7B2B,0x07F617E5,0xDA60CE60, + 0xFD101B55,0x2086C2D0,0x9D4CAE1E,0x40DA779B,0x3DA971C3,0xE03FA846,0x5DF5C488,0x80631D0D, + 0x1DE7B4BC,0xC0716D39,0x7DBB01F7,0xA02DD872,0xDD5EDE2A,0x00C807AF,0xBD026B61,0x6094B2E4, + 0x47E467D1,0x9A72BE54,0x27B8D29A,0xFA2E0B1F,0x875D0D47,0x5ACBD4C2,0xE701B80C,0x3A976189, + 0xA9E01266,0x7476CBE3,0xC9BCA72D,0x142A7EA8,0x695978F0,0xB4CFA175,0x0905CDBB,0xD493143E, + 0xF3E3C10B,0x2E75188E,0x93BF7440,0x4E29ADC5,0x335AAB9D,0xEECC7218,0x53061ED6,0x8E90C753, + 0xAE99FF49,0x730F26CC,0xCEC54A02,0x13539387,0x6E2095DF,0xB3B64C5A,0x0E7C2094,0xD3EAF911, + 0xF49A2C24,0x290CF5A1,0x94C6996F,0x495040EA,0x342346B2,0xE9B59F37,0x547FF3F9,0x89E92A7C, + 0x1A9E5993,0xC7088016,0x7AC2ECD8,0xA754355D,0xDA273305,0x07B1EA80,0xBA7B864E,0x67ED5FCB, + 0x409D8AFE,0x9D0B537B,0x20C13FB5,0xFD57E630,0x8024E068,0x5DB239ED,0xE0785523,0x3DEE8CA6, + }, + + { + 0x00000000,0x9D0FE176,0xE16EC4AD,0x7C6125DB,0x19AC8F1B,0x84A36E6D,0xF8C24BB6,0x65CDAAC0, + 0x33591E36,0xAE56FF40,0xD237DA9B,0x4F383BED,0x2AF5912D,0xB7FA705B,0xCB9B5580,0x5694B4F6, + 0x66B23C6C,0xFBBDDD1A,0x87DCF8C1,0x1AD319B7,0x7F1EB377,0xE2115201,0x9E7077DA,0x037F96AC, + 0x55EB225A,0xC8E4C32C,0xB485E6F7,0x298A0781,0x4C47AD41,0xD1484C37,0xAD2969EC,0x3026889A, + 0xCD6478D8,0x506B99AE,0x2C0ABC75,0xB1055D03,0xD4C8F7C3,0x49C716B5,0x35A6336E,0xA8A9D218, + 0xFE3D66EE,0x63328798,0x1F53A243,0x825C4335,0xE791E9F5,0x7A9E0883,0x06FF2D58,0x9BF0CC2E, + 0xABD644B4,0x36D9A5C2,0x4AB88019,0xD7B7616F,0xB27ACBAF,0x2F752AD9,0x53140F02,0xCE1BEE74, + 0x988F5A82,0x0580BBF4,0x79E19E2F,0xE4EE7F59,0x8123D599,0x1C2C34EF,0x604D1134,0xFD42F042, + 0x41B9F7F1,0xDCB61687,0xA0D7335C,0x3DD8D22A,0x581578EA,0xC51A999C,0xB97BBC47,0x24745D31, + 0x72E0E9C7,0xEFEF08B1,0x938E2D6A,0x0E81CC1C,0x6B4C66DC,0xF64387AA,0x8A22A271,0x172D4307, + 0x270BCB9D,0xBA042AEB,0xC6650F30,0x5B6AEE46,0x3EA74486,0xA3A8A5F0,0xDFC9802B,0x42C6615D, + 0x1452D5AB,0x895D34DD,0xF53C1106,0x6833F070,0x0DFE5AB0,0x90F1BBC6,0xEC909E1D,0x719F7F6B, + 0x8CDD8F29,0x11D26E5F,0x6DB34B84,0xF0BCAAF2,0x95710032,0x087EE144,0x741FC49F,0xE91025E9, + 0xBF84911F,0x228B7069,0x5EEA55B2,0xC3E5B4C4,0xA6281E04,0x3B27FF72,0x4746DAA9,0xDA493BDF, + 0xEA6FB345,0x77605233,0x0B0177E8,0x960E969E,0xF3C33C5E,0x6ECCDD28,0x12ADF8F3,0x8FA21985, + 0xD936AD73,0x44394C05,0x385869DE,0xA55788A8,0xC09A2268,0x5D95C31E,0x21F4E6C5,0xBCFB07B3, + 0x8373EFE2,0x1E7C0E94,0x621D2B4F,0xFF12CA39,0x9ADF60F9,0x07D0818F,0x7BB1A454,0xE6BE4522, + 0xB02AF1D4,0x2D2510A2,0x51443579,0xCC4BD40F,0xA9867ECF,0x34899FB9,0x48E8BA62,0xD5E75B14, + 0xE5C1D38E,0x78CE32F8,0x04AF1723,0x99A0F655,0xFC6D5C95,0x6162BDE3,0x1D039838,0x800C794E, + 0xD698CDB8,0x4B972CCE,0x37F60915,0xAAF9E863,0xCF3442A3,0x523BA3D5,0x2E5A860E,0xB3556778, + 0x4E17973A,0xD318764C,0xAF795397,0x3276B2E1,0x57BB1821,0xCAB4F957,0xB6D5DC8C,0x2BDA3DFA, + 0x7D4E890C,0xE041687A,0x9C204DA1,0x012FACD7,0x64E20617,0xF9EDE761,0x858CC2BA,0x188323CC, + 0x28A5AB56,0xB5AA4A20,0xC9CB6FFB,0x54C48E8D,0x3109244D,0xAC06C53B,0xD067E0E0,0x4D680196, + 0x1BFCB560,0x86F35416,0xFA9271CD,0x679D90BB,0x02503A7B,0x9F5FDB0D,0xE33EFED6,0x7E311FA0, + 0xC2CA1813,0x5FC5F965,0x23A4DCBE,0xBEAB3DC8,0xDB669708,0x4669767E,0x3A0853A5,0xA707B2D3, + 0xF1930625,0x6C9CE753,0x10FDC288,0x8DF223FE,0xE83F893E,0x75306848,0x09514D93,0x945EACE5, + 0xA478247F,0x3977C509,0x4516E0D2,0xD81901A4,0xBDD4AB64,0x20DB4A12,0x5CBA6FC9,0xC1B58EBF, + 0x97213A49,0x0A2EDB3F,0x764FFEE4,0xEB401F92,0x8E8DB552,0x13825424,0x6FE371FF,0xF2EC9089, + 0x0FAE60CB,0x92A181BD,0xEEC0A466,0x73CF4510,0x1602EFD0,0x8B0D0EA6,0xF76C2B7D,0x6A63CA0B, + 0x3CF77EFD,0xA1F89F8B,0xDD99BA50,0x40965B26,0x255BF1E6,0xB8541090,0xC435354B,0x593AD43D, + 0x691C5CA7,0xF413BDD1,0x8872980A,0x157D797C,0x70B0D3BC,0xEDBF32CA,0x91DE1711,0x0CD1F667, + 0x5A454291,0xC74AA3E7,0xBB2B863C,0x2624674A,0x43E9CD8A,0xDEE62CFC,0xA2870927,0x3F88E851, + }, + + { + 0x00000000,0xB9FBDBE8,0xA886B191,0x117D6A79,0x8A7C6563,0x3387BE8B,0x22FAD4F2,0x9B010F1A, + 0xCF89CC87,0x7672176F,0x670F7D16,0xDEF4A6FE,0x45F5A9E4,0xFC0E720C,0xED731875,0x5488C39D, + 0x44629F4F,0xFD9944A7,0xECE42EDE,0x551FF536,0xCE1EFA2C,0x77E521C4,0x66984BBD,0xDF639055, + 0x8BEB53C8,0x32108820,0x236DE259,0x9A9639B1,0x019736AB,0xB86CED43,0xA911873A,0x10EA5CD2, + 0x88C53E9E,0x313EE576,0x20438F0F,0x99B854E7,0x02B95BFD,0xBB428015,0xAA3FEA6C,0x13C43184, + 0x474CF219,0xFEB729F1,0xEFCA4388,0x56319860,0xCD30977A,0x74CB4C92,0x65B626EB,0xDC4DFD03, + 0xCCA7A1D1,0x755C7A39,0x64211040,0xDDDACBA8,0x46DBC4B2,0xFF201F5A,0xEE5D7523,0x57A6AECB, + 0x032E6D56,0xBAD5B6BE,0xABA8DCC7,0x1253072F,0x89520835,0x30A9D3DD,0x21D4B9A4,0x982F624C, + 0xCAFB7B7D,0x7300A095,0x627DCAEC,0xDB861104,0x40871E1E,0xF97CC5F6,0xE801AF8F,0x51FA7467, + 0x0572B7FA,0xBC896C12,0xADF4066B,0x140FDD83,0x8F0ED299,0x36F50971,0x27886308,0x9E73B8E0, + 0x8E99E432,0x37623FDA,0x261F55A3,0x9FE48E4B,0x04E58151,0xBD1E5AB9,0xAC6330C0,0x1598EB28, + 0x411028B5,0xF8EBF35D,0xE9969924,0x506D42CC,0xCB6C4DD6,0x7297963E,0x63EAFC47,0xDA1127AF, + 0x423E45E3,0xFBC59E0B,0xEAB8F472,0x53432F9A,0xC8422080,0x71B9FB68,0x60C49111,0xD93F4AF9, + 0x8DB78964,0x344C528C,0x253138F5,0x9CCAE31D,0x07CBEC07,0xBE3037EF,0xAF4D5D96,0x16B6867E, + 0x065CDAAC,0xBFA70144,0xAEDA6B3D,0x1721B0D5,0x8C20BFCF,0x35DB6427,0x24A60E5E,0x9D5DD5B6, + 0xC9D5162B,0x702ECDC3,0x6153A7BA,0xD8A87C52,0x43A97348,0xFA52A8A0,0xEB2FC2D9,0x52D41931, + 0x4E87F0BB,0xF77C2B53,0xE601412A,0x5FFA9AC2,0xC4FB95D8,0x7D004E30,0x6C7D2449,0xD586FFA1, + 0x810E3C3C,0x38F5E7D4,0x29888DAD,0x90735645,0x0B72595F,0xB28982B7,0xA3F4E8CE,0x1A0F3326, + 0x0AE56FF4,0xB31EB41C,0xA263DE65,0x1B98058D,0x80990A97,0x3962D17F,0x281FBB06,0x91E460EE, + 0xC56CA373,0x7C97789B,0x6DEA12E2,0xD411C90A,0x4F10C610,0xF6EB1DF8,0xE7967781,0x5E6DAC69, + 0xC642CE25,0x7FB915CD,0x6EC47FB4,0xD73FA45C,0x4C3EAB46,0xF5C570AE,0xE4B81AD7,0x5D43C13F, + 0x09CB02A2,0xB030D94A,0xA14DB333,0x18B668DB,0x83B767C1,0x3A4CBC29,0x2B31D650,0x92CA0DB8, + 0x8220516A,0x3BDB8A82,0x2AA6E0FB,0x935D3B13,0x085C3409,0xB1A7EFE1,0xA0DA8598,0x19215E70, + 0x4DA99DED,0xF4524605,0xE52F2C7C,0x5CD4F794,0xC7D5F88E,0x7E2E2366,0x6F53491F,0xD6A892F7, + 0x847C8BC6,0x3D87502E,0x2CFA3A57,0x9501E1BF,0x0E00EEA5,0xB7FB354D,0xA6865F34,0x1F7D84DC, + 0x4BF54741,0xF20E9CA9,0xE373F6D0,0x5A882D38,0xC1892222,0x7872F9CA,0x690F93B3,0xD0F4485B, + 0xC01E1489,0x79E5CF61,0x6898A518,0xD1637EF0,0x4A6271EA,0xF399AA02,0xE2E4C07B,0x5B1F1B93, + 0x0F97D80E,0xB66C03E6,0xA711699F,0x1EEAB277,0x85EBBD6D,0x3C106685,0x2D6D0CFC,0x9496D714, + 0x0CB9B558,0xB5426EB0,0xA43F04C9,0x1DC4DF21,0x86C5D03B,0x3F3E0BD3,0x2E4361AA,0x97B8BA42, + 0xC33079DF,0x7ACBA237,0x6BB6C84E,0xD24D13A6,0x494C1CBC,0xF0B7C754,0xE1CAAD2D,0x583176C5, + 0x48DB2A17,0xF120F1FF,0xE05D9B86,0x59A6406E,0xC2A74F74,0x7B5C949C,0x6A21FEE5,0xD3DA250D, + 0x8752E690,0x3EA93D78,0x2FD45701,0x962F8CE9,0x0D2E83F3,0xB4D5581B,0xA5A83262,0x1C53E98A, + }, + + { + 0x00000000,0xAE689191,0x87A02563,0x29C8B4F2,0xD4314C87,0x7A59DD16,0x539169E4,0xFDF9F875, + 0x73139F4F,0xDD7B0EDE,0xF4B3BA2C,0x5ADB2BBD,0xA722D3C8,0x094A4259,0x2082F6AB,0x8EEA673A, + 0xE6273E9E,0x484FAF0F,0x61871BFD,0xCFEF8A6C,0x32167219,0x9C7EE388,0xB5B6577A,0x1BDEC6EB, + 0x9534A1D1,0x3B5C3040,0x129484B2,0xBCFC1523,0x4105ED56,0xEF6D7CC7,0xC6A5C835,0x68CD59A4, + 0x173F7B7D,0xB957EAEC,0x909F5E1E,0x3EF7CF8F,0xC30E37FA,0x6D66A66B,0x44AE1299,0xEAC68308, + 0x642CE432,0xCA4475A3,0xE38CC151,0x4DE450C0,0xB01DA8B5,0x1E753924,0x37BD8DD6,0x99D51C47, + 0xF11845E3,0x5F70D472,0x76B86080,0xD8D0F111,0x25290964,0x8B4198F5,0xA2892C07,0x0CE1BD96, + 0x820BDAAC,0x2C634B3D,0x05ABFFCF,0xABC36E5E,0x563A962B,0xF85207BA,0xD19AB348,0x7FF222D9, + 0x2E7EF6FA,0x8016676B,0xA9DED399,0x07B64208,0xFA4FBA7D,0x54272BEC,0x7DEF9F1E,0xD3870E8F, + 0x5D6D69B5,0xF305F824,0xDACD4CD6,0x74A5DD47,0x895C2532,0x2734B4A3,0x0EFC0051,0xA09491C0, + 0xC859C864,0x663159F5,0x4FF9ED07,0xE1917C96,0x1C6884E3,0xB2001572,0x9BC8A180,0x35A03011, + 0xBB4A572B,0x1522C6BA,0x3CEA7248,0x9282E3D9,0x6F7B1BAC,0xC1138A3D,0xE8DB3ECF,0x46B3AF5E, + 0x39418D87,0x97291C16,0xBEE1A8E4,0x10893975,0xED70C100,0x43185091,0x6AD0E463,0xC4B875F2, + 0x4A5212C8,0xE43A8359,0xCDF237AB,0x639AA63A,0x9E635E4F,0x300BCFDE,0x19C37B2C,0xB7ABEABD, + 0xDF66B319,0x710E2288,0x58C6967A,0xF6AE07EB,0x0B57FF9E,0xA53F6E0F,0x8CF7DAFD,0x229F4B6C, + 0xAC752C56,0x021DBDC7,0x2BD50935,0x85BD98A4,0x784460D1,0xD62CF140,0xFFE445B2,0x518CD423, + 0x5CFDEDF4,0xF2957C65,0xDB5DC897,0x75355906,0x88CCA173,0x26A430E2,0x0F6C8410,0xA1041581, + 0x2FEE72BB,0x8186E32A,0xA84E57D8,0x0626C649,0xFBDF3E3C,0x55B7AFAD,0x7C7F1B5F,0xD2178ACE, + 0xBADAD36A,0x14B242FB,0x3D7AF609,0x93126798,0x6EEB9FED,0xC0830E7C,0xE94BBA8E,0x47232B1F, + 0xC9C94C25,0x67A1DDB4,0x4E696946,0xE001F8D7,0x1DF800A2,0xB3909133,0x9A5825C1,0x3430B450, + 0x4BC29689,0xE5AA0718,0xCC62B3EA,0x620A227B,0x9FF3DA0E,0x319B4B9F,0x1853FF6D,0xB63B6EFC, + 0x38D109C6,0x96B99857,0xBF712CA5,0x1119BD34,0xECE04541,0x4288D4D0,0x6B406022,0xC528F1B3, + 0xADE5A817,0x038D3986,0x2A458D74,0x842D1CE5,0x79D4E490,0xD7BC7501,0xFE74C1F3,0x501C5062, + 0xDEF63758,0x709EA6C9,0x5956123B,0xF73E83AA,0x0AC77BDF,0xA4AFEA4E,0x8D675EBC,0x230FCF2D, + 0x72831B0E,0xDCEB8A9F,0xF5233E6D,0x5B4BAFFC,0xA6B25789,0x08DAC618,0x211272EA,0x8F7AE37B, + 0x01908441,0xAFF815D0,0x8630A122,0x285830B3,0xD5A1C8C6,0x7BC95957,0x5201EDA5,0xFC697C34, + 0x94A42590,0x3ACCB401,0x130400F3,0xBD6C9162,0x40956917,0xEEFDF886,0xC7354C74,0x695DDDE5, + 0xE7B7BADF,0x49DF2B4E,0x60179FBC,0xCE7F0E2D,0x3386F658,0x9DEE67C9,0xB426D33B,0x1A4E42AA, + 0x65BC6073,0xCBD4F1E2,0xE21C4510,0x4C74D481,0xB18D2CF4,0x1FE5BD65,0x362D0997,0x98459806, + 0x16AFFF3C,0xB8C76EAD,0x910FDA5F,0x3F674BCE,0xC29EB3BB,0x6CF6222A,0x453E96D8,0xEB560749, + 0x839B5EED,0x2DF3CF7C,0x043B7B8E,0xAA53EA1F,0x57AA126A,0xF9C283FB,0xD00A3709,0x7E62A698, + 0xF088C1A2,0x5EE05033,0x7728E4C1,0xD9407550,0x24B98D25,0x8AD11CB4,0xA319A846,0x0D7139D7, + } +#endif // CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 +}; +#endif // NO_LUT \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/crc32.h b/image_capture/third_party/percipio/common/crc32.h new file mode 100644 index 0000000..1737091 --- /dev/null +++ b/image_capture/third_party/percipio/common/crc32.h @@ -0,0 +1,69 @@ +// ////////////////////////////////////////////////////////// +// Crc32.h +// Copyright (c) 2011-2019 Stephan Brumme. All rights reserved. +// Slicing-by-16 contributed by Bulat Ziganshin +// Tableless bytewise CRC contributed by Hagai Gold +// see http://create.stephan-brumme.com/disclaimer.html +// + +// if running on an embedded system, you might consider shrinking the +// big Crc32Lookup table by undefining these lines: +#define CRC32_USE_LOOKUP_TABLE_BYTE +#define CRC32_USE_LOOKUP_TABLE_SLICING_BY_4 +#define CRC32_USE_LOOKUP_TABLE_SLICING_BY_8 +#define CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 +// - crc32_bitwise doesn't need it at all +// - crc32_halfbyte has its own small lookup table +// - crc32_1byte_tableless and crc32_1byte_tableless2 don't need it at all +// - crc32_1byte needs only Crc32Lookup[0] +// - crc32_4bytes needs only Crc32Lookup[0..3] +// - crc32_8bytes needs only Crc32Lookup[0..7] +// - crc32_4x8bytes needs only Crc32Lookup[0..7] +// - crc32_16bytes needs all of Crc32Lookup +// using the aforementioned #defines the table is automatically fitted to your needs + +// uint8_t, uint32_t, int32_t +#include +// size_t +#include + +// crc32_fast selects the fastest algorithm depending on flags (CRC32_USE_LOOKUP_...) +/// compute CRC32 using the fastest algorithm for large datasets on modern CPUs +uint32_t crc32_fast (const void* data, size_t length, uint32_t previousCrc32 = 0); + +/// merge two CRC32 such that result = crc32(dataB, lengthB, crc32(dataA, lengthA)) +uint32_t crc32_combine (uint32_t crcA, uint32_t crcB, size_t lengthB); + +/// compute CRC32 (bitwise algorithm) +uint32_t crc32_bitwise (const void* data, size_t length, uint32_t previousCrc32 = 0); +/// compute CRC32 (half-byte algoritm) +uint32_t crc32_halfbyte(const void* data, size_t length, uint32_t previousCrc32 = 0); + +#ifdef CRC32_USE_LOOKUP_TABLE_BYTE +/// compute CRC32 (standard algorithm) +uint32_t crc32_1byte (const void* data, size_t length, uint32_t previousCrc32 = 0); +#endif + +/// compute CRC32 (byte algorithm) without lookup tables +uint32_t crc32_1byte_tableless (const void* data, size_t length, uint32_t previousCrc32 = 0); +/// compute CRC32 (byte algorithm) without lookup tables +uint32_t crc32_1byte_tableless2(const void* data, size_t length, uint32_t previousCrc32 = 0); + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_4 +/// compute CRC32 (Slicing-by-4 algorithm) +uint32_t crc32_4bytes (const void* data, size_t length, uint32_t previousCrc32 = 0); +#endif + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_8 +/// compute CRC32 (Slicing-by-8 algorithm) +uint32_t crc32_8bytes (const void* data, size_t length, uint32_t previousCrc32 = 0); +/// compute CRC32 (Slicing-by-8 algorithm), unroll inner loop 4 times +uint32_t crc32_4x8bytes(const void* data, size_t length, uint32_t previousCrc32 = 0); +#endif + +#ifdef CRC32_USE_LOOKUP_TABLE_SLICING_BY_16 +/// compute CRC32 (Slicing-by-16 algorithm) +uint32_t crc32_16bytes (const void* data, size_t length, uint32_t previousCrc32 = 0); +/// compute CRC32 (Slicing-by-16 algorithm, prefetch upcoming data blocks) +uint32_t crc32_16bytes_prefetch(const void* data, size_t length, uint32_t previousCrc32 = 0, size_t prefetchAhead = 256); +#endif \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/huffman.cpp b/image_capture/third_party/percipio/common/huffman.cpp new file mode 100644 index 0000000..bb7ddc2 --- /dev/null +++ b/image_capture/third_party/percipio/common/huffman.cpp @@ -0,0 +1,464 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#ifndef WIN32 +#include +#endif + +#include "huffman.h" + +struct ersel{ //this structure will be used to create the translation tree + ersel *left,*right; + long int number; + unsigned char character; + std::string bit; +}; + +struct translation{ + translation *zero,*one; + unsigned char character; +}; + +bool erselcompare0(ersel a,ersel b){ + return a.number>current_bit_count); + ss.write(reinterpret_cast(¤t_byte), sizeof(current_byte)); + current_byte=uChar; +} + +//below function is writing number of files we re going to translate inside current folder to compressed file's 2 bytes +//It is done like this to make sure that it can work on little, big or middle-endian systems +void write_file_count(int file_count,unsigned char ¤t_byte,int current_bit_count,std::stringstream& ss){ + unsigned char temp=file_count%256; + write_from_uChar(temp,current_byte,current_bit_count,ss); + temp=file_count/256; + write_from_uChar(temp,current_byte,current_bit_count,ss); +} + +//This function is writing byte count of current input file to compressed file using 8 bytes +//It is done like this to make sure that it can work on little, big or middle-endian systems +void write_file_size(long int size,unsigned char ¤t_byte,int current_bit_count,std::stringstream& ss){ + for(int i=0;i<8;i++){ + write_from_uChar(size%256,current_byte,current_bit_count,ss); + size/=256; + } +} + +// Below function translates and writes bytes from current input file to the compressed file. +void write_the_file_content(const std::string& text, std::string *str_arr, unsigned char ¤t_byte, int ¤t_bit_count, std::stringstream& ss){ + unsigned char x; + char *str_pointer; + long size = text.length(); + x = text.at(0); + for(long int i=0;i(¤t_byte), sizeof(current_byte)); + current_bit_count=0; + } + switch(*str_pointer){ + case '1':current_byte<<=1;current_byte|=1;current_bit_count++;break; + case '0':current_byte<<=1;current_bit_count++;break; + default: std::cout<<"An error has occurred"<< std::endl <<"Process has been aborted"; + exit(2); + } + str_pointer++; + } + if(i != size - 1) { + x = (unsigned char)text.at(i + 1); + } + } +} + +//checks if next input is either a file or a folder +//returns 1 if it is a file +//returns 0 if it is a folder +bool this_is_a_file(unsigned char ¤t_byte,int ¤t_bit_count, std::stringstream& ss){ + bool val; + if(current_bit_count==0){ + ss.read((char*)¤t_byte, 1); + current_bit_count=8; + } + val=current_byte✓ + current_byte<<=1; + current_bit_count--; + return val; +} + +// process_8_bits_NUMBER reads 8 successive bits from compressed file +//(does not have to be in the same byte) +// and returns it in unsigned char form +unsigned char process_8_bits_NUMBER(unsigned char ¤t_byte,int current_bit_count, std::stringstream& ss){ + unsigned char val,temp_byte; + ss.read((char*)&temp_byte, 1); + val=current_byte|(temp_byte>>current_bit_count); + current_byte=temp_byte<<8-current_bit_count; + return val; +} + +// returns file's size +long int read_file_size(unsigned char ¤t_byte,int current_bit_count, std::stringstream& ss){ + long int size=0; + { + long int multiplier=1; + for(int i=0;i<8;i++){ + size+=process_8_bits_NUMBER(current_byte,current_bit_count,ss)*multiplier; + multiplier*=256; + } + } + return size; + // Size was written to the compressed file from least significiant byte + // to the most significiant byte to make sure system's endianness + // does not affect the process and that is why we are processing size information like this +} + + +// This function translates compressed file from info that is now stored in the translation tree + // then writes it to a newly created file +void translate_file(long int size,unsigned char ¤t_byte,int ¤t_bit_count,translation *root, std::stringstream& ss, std::string& text){ + translation *node; + for(long int i=0;izero||node->one){ + if(current_bit_count==0){ + ss.read((char*)¤t_byte, 1); + current_bit_count=8; + } + if(current_byte&check){ + node=node->one; + } + else{ + node=node->zero; + } + current_byte<<=1; + current_bit_count--; + } + text.at(i) = node->character; + } +} + + +// process_n_bits_TO_STRING function reads n successive bits from the compressed file +// and stores it in a leaf of the translation tree, +// after creating that leaf and sometimes after creating nodes that are binding that leaf to the tree. +void process_n_bits_TO_STRING(unsigned char ¤t_byte,int n,int ¤t_bit_count,std::stringstream& ss,translation *node,unsigned char uChar){ + for(int i=0;izero)){ + node->zero=(translation*)malloc(sizeof(translation)); + node->zero->zero=NULL; + node->zero->one=NULL; + } + node=node->zero; + break; + case 128: + if(!(node->one)){ + node->one=(translation*)malloc(sizeof(translation)); + node->one->zero=NULL; + node->one->one=NULL; + } + node=node->one; + break; + } + current_byte<<=1; + current_bit_count--; + } + node->character=uChar; +} + +// burn_tree function is used for deallocating translation tree +void burn_tree(translation *node){ + if(node->zero)burn_tree(node->zero); + if(node->one)burn_tree(node->one); + free(node); +} + +////////////////////////////////////////////////////////////////////// + +bool TextHuffmanCompression(const std::string& text, std::string& result) +{ + unsigned char x; //these are temp variables to take input from the file + long int total_size=0,size; + + std::stringstream ss; + + long int number[256]; + long int total_bits=0; + unsigned char letter_count=0; + for(long int *i=number;iright=NULL; + e->left=NULL; + e->number=*i; + e->character=i-number; + e++; + } + } + std::sort(array,array+letter_count,erselcompare0); + //--------------------------------------------- + + // min1 and min2 represents nodes that has minimum weights + // isleaf is the pointer that traverses through leafs and + // notleaf is the pointer that traverses through nodes that are not leafs + ersel *min1=array,*min2=array+1,*current=array+letter_count,*notleaf=array+letter_count,*isleaf=array+2; + for(int i=0;inumber=min1->number+min2->number; + current->left=min1; + current->right=min2; + min1->bit="1"; + min2->bit="0"; + current++; + + if(isleaf>=array+letter_count){ + min1=notleaf; + notleaf++; + } + else{ + if(isleaf->numbernumber){ + min1=isleaf; + isleaf++; + } + else{ + min1=notleaf; + notleaf++; + } + } + + if(isleaf>=array+letter_count){ + min2=notleaf; + notleaf++; + } + else if(notleaf>=current){ + min2=isleaf; + isleaf++; + } + else{ + if(isleaf->numbernumber){ + min2=isleaf; + isleaf++; + } + else{ + min2=notleaf; + notleaf++; + } + } + + } + + for(e=array+letter_count*2-2;e>array-1;e--){ + if(e->left){ + e->left->bit=e->bit+e->left->bit; + } + if(e->right){ + e->right->bit=e->bit+e->right->bit; + } + + } + + // In this block we are adding the bytes from root to leafs + // and after this is done every leaf will have a transformation string that corresponds to it + // Note: It is actually a very neat process. Using 4th and 5th code blocks, we are making sure that + // the most used character is using least number of bits. + // Specific number of bits we re going to use for that character is determined by weight distribution + //--------------------------------------------- + + int current_bit_count=0; + unsigned char current_byte; + ss.write(reinterpret_cast(&letter_count), sizeof(letter_count)); + total_bits+=8; + //---------------------------------------- + + char *str_pointer; + unsigned char len,current_character; + std::string str_arr[256]; + for(e=array;echaracter)]=e->bit; //we are putting the transformation string to str_arr array to make the compression process more time efficient + len=e->bit.length(); + current_character=e->character; + + write_from_uChar(current_character,current_byte,current_bit_count,ss); + write_from_uChar(len,current_byte,current_bit_count,ss); + + total_bits+=len+16; + // above lines will write the byte and the number of bits + // we re going to need to represent this specific byte's transformated version + // after here we are going to write the transformed version of the number bit by bit. + + str_pointer=&e->bit[0]; + while(*str_pointer){ + if(current_bit_count==8){ + ss.write(reinterpret_cast(¤t_byte), sizeof(current_byte)); + current_bit_count=0; + } + switch(*str_pointer){ + case '1':current_byte<<=1;current_byte|=1;current_bit_count++;break; + case '0':current_byte<<=1;current_bit_count++;break; + default:std::cout<<"An error has occurred"<number); + } + if(total_bits%8){ + total_bits=(total_bits/8+1)*8; + // from this point on total bits doesnt represent total bits + // instead it represents 8*number_of_bytes we are gonna use on our compressed file + } + + delete[]array; + // Above loop writes the translation script into compressed file and the str_arr array + //---------------------------------------- + + + std::cout<<"The size of the sum of ORIGINAL files is: "<total_size){ + std::cout<(¤t_byte), sizeof(current_byte)); + current_bit_count=0; + } + current_byte<<=1; + current_byte|=1; + current_bit_count++; + write_file_size(size,current_byte,current_bit_count,ss); //writes sixth + write_the_file_content(text,str_arr,current_byte,current_bit_count,ss); //writes eighth + + if(current_bit_count==8){ // here we are writing the last byte of the file + ss.write(reinterpret_cast(¤t_byte), sizeof(current_byte)); + } + else{ + current_byte<<=8-current_bit_count; + ss.write(reinterpret_cast(¤t_byte), sizeof(current_byte)); + } + + result = ss.str(); + + return true; +} + +bool TextHuffmanDecompression(const std::string& huffman, std::string& text) +{ + unsigned char letter_count=0; + std::stringstream ss(huffman); + + //---------reads .first----------- + ss.read((char*)&letter_count, 1); + + int m_letter_count; + if(letter_count==0) + m_letter_count=256; + else + m_letter_count = letter_count; + //------------------------------- + + //----------------reads .second--------------------- + // and stores transformation info into binary translation tree for later use + unsigned char current_byte=0,current_character; + int current_bit_count=0,len; + translation *root=(translation*)malloc(sizeof(translation)); + root->zero=NULL; + root->one=NULL; + + for(int i=0;i + +bool TextHuffmanCompression(const std::string& text, std::string& result); +bool TextHuffmanDecompression(const std::string& huffman, std::string& text); \ No newline at end of file diff --git a/image_capture/third_party/percipio/common/json11.cpp b/image_capture/third_party/percipio/common/json11.cpp new file mode 100644 index 0000000..88024e9 --- /dev/null +++ b/image_capture/third_party/percipio/common/json11.cpp @@ -0,0 +1,790 @@ +/* Copyright (c) 2013 Dropbox, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#include "json11.hpp" +#include +#include +#include +#include +#include + +namespace json11 { + +static const int max_depth = 200; + +using std::string; +using std::vector; +using std::map; +using std::make_shared; +using std::initializer_list; +using std::move; + +/* Helper for representing null - just a do-nothing struct, plus comparison + * operators so the helpers in JsonValue work. We can't use nullptr_t because + * it may not be orderable. + */ +struct NullStruct { + bool operator==(NullStruct) const { return true; } + bool operator<(NullStruct) const { return false; } +}; + +/* * * * * * * * * * * * * * * * * * * * + * Serialization + */ + +static void dump(NullStruct, string &out) { + out += "null"; +} + +static void dump(double value, string &out) { + if (std::isfinite(value)) { + char buf[32]; + snprintf(buf, sizeof buf, "%.17g", value); + out += buf; + } else { + out += "null"; + } +} + +static void dump(int value, string &out) { + char buf[32]; + snprintf(buf, sizeof buf, "%d", value); + out += buf; +} + +static void dump(bool value, string &out) { + out += value ? "true" : "false"; +} + +static void dump(const string &value, string &out) { + out += '"'; + for (size_t i = 0; i < value.length(); i++) { + const char ch = value[i]; + if (ch == '\\') { + out += "\\\\"; + } else if (ch == '"') { + out += "\\\""; + } else if (ch == '\b') { + out += "\\b"; + } else if (ch == '\f') { + out += "\\f"; + } else if (ch == '\n') { + out += "\\n"; + } else if (ch == '\r') { + out += "\\r"; + } else if (ch == '\t') { + out += "\\t"; + } else if (static_cast(ch) <= 0x1f) { + char buf[8]; + snprintf(buf, sizeof buf, "\\u%04x", ch); + out += buf; + } else if (static_cast(ch) == 0xe2 && static_cast(value[i+1]) == 0x80 + && static_cast(value[i+2]) == 0xa8) { + out += "\\u2028"; + i += 2; + } else if (static_cast(ch) == 0xe2 && static_cast(value[i+1]) == 0x80 + && static_cast(value[i+2]) == 0xa9) { + out += "\\u2029"; + i += 2; + } else { + out += ch; + } + } + out += '"'; +} + +static void dump(const Json::array &values, string &out) { + bool first = true; + out += "["; + for (const auto &value : values) { + if (!first) + out += ", "; + value.dump(out); + first = false; + } + out += "]"; +} + +static void dump(const Json::object &values, string &out) { + bool first = true; + out += "{"; + for (const auto &kv : values) { + if (!first) + out += ", "; + dump(kv.first, out); + out += ": "; + kv.second.dump(out); + first = false; + } + out += "}"; +} + +void Json::dump(string &out) const { + m_ptr->dump(out); +} + +/* * * * * * * * * * * * * * * * * * * * + * Value wrappers + */ + +template +class Value : public JsonValue { +protected: + + // Constructors + explicit Value(const T &value) : m_value(value) {} + explicit Value(T &&value) : m_value(move(value)) {} + + // Get type tag + Json::Type type() const override { + return tag; + } + + // Comparisons + bool equals(const JsonValue * other) const override { + return m_value == static_cast *>(other)->m_value; + } + bool less(const JsonValue * other) const override { + return m_value < static_cast *>(other)->m_value; + } + + const T m_value; + void dump(string &out) const override { json11::dump(m_value, out); } +}; + +class JsonDouble final : public Value { + double number_value() const override { return m_value; } + int int_value() const override { return static_cast(m_value); } + bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } + bool less(const JsonValue * other) const override { return m_value < other->number_value(); } +public: + explicit JsonDouble(double value) : Value(value) {} +}; + +class JsonInt final : public Value { + double number_value() const override { return m_value; } + int int_value() const override { return m_value; } + bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } + bool less(const JsonValue * other) const override { return m_value < other->number_value(); } +public: + explicit JsonInt(int value) : Value(value) {} +}; + +class JsonBoolean final : public Value { + bool bool_value() const override { return m_value; } +public: + explicit JsonBoolean(bool value) : Value(value) {} +}; + +class JsonString final : public Value { + const string &string_value() const override { return m_value; } +public: + explicit JsonString(const string &value) : Value(value) {} + explicit JsonString(string &&value) : Value(move(value)) {} +}; + +class JsonArray final : public Value { + const Json::array &array_items() const override { return m_value; } + const Json & operator[](size_t i) const override; +public: + explicit JsonArray(const Json::array &value) : Value(value) {} + explicit JsonArray(Json::array &&value) : Value(move(value)) {} +}; + +class JsonObject final : public Value { + const Json::object &object_items() const override { return m_value; } + const Json & operator[](const string &key) const override; +public: + explicit JsonObject(const Json::object &value) : Value(value) {} + explicit JsonObject(Json::object &&value) : Value(move(value)) {} +}; + +class JsonNull final : public Value { +public: + JsonNull() : Value({}) {} +}; + +/* * * * * * * * * * * * * * * * * * * * + * Static globals - static-init-safe + */ +struct Statics { + const std::shared_ptr null = make_shared(); + const std::shared_ptr t = make_shared(true); + const std::shared_ptr f = make_shared(false); + const string empty_string; + const vector empty_vector; + const map empty_map; + Statics() {} +}; + +static const Statics & statics() { + static const Statics s {}; + return s; +} + +static const Json & static_null() { + // This has to be separate, not in Statics, because Json() accesses statics().null. + static const Json json_null; + return json_null; +} + +/* * * * * * * * * * * * * * * * * * * * + * Constructors + */ + +Json::Json() noexcept : m_ptr(statics().null) {} +Json::Json(std::nullptr_t) noexcept : m_ptr(statics().null) {} +Json::Json(double value) : m_ptr(make_shared(value)) {} +Json::Json(int value) : m_ptr(make_shared(value)) {} +Json::Json(bool value) : m_ptr(value ? statics().t : statics().f) {} +Json::Json(const string &value) : m_ptr(make_shared(value)) {} +Json::Json(string &&value) : m_ptr(make_shared(move(value))) {} +Json::Json(const char * value) : m_ptr(make_shared(value)) {} +Json::Json(const Json::array &values) : m_ptr(make_shared(values)) {} +Json::Json(Json::array &&values) : m_ptr(make_shared(move(values))) {} +Json::Json(const Json::object &values) : m_ptr(make_shared(values)) {} +Json::Json(Json::object &&values) : m_ptr(make_shared(move(values))) {} + +/* * * * * * * * * * * * * * * * * * * * + * Accessors + */ + +Json::Type Json::type() const { return m_ptr->type(); } +double Json::number_value() const { return m_ptr->number_value(); } +int Json::int_value() const { return m_ptr->int_value(); } +bool Json::bool_value() const { return m_ptr->bool_value(); } +const string & Json::string_value() const { return m_ptr->string_value(); } +const vector & Json::array_items() const { return m_ptr->array_items(); } +const map & Json::object_items() const { return m_ptr->object_items(); } +const Json & Json::operator[] (size_t i) const { return (*m_ptr)[i]; } +const Json & Json::operator[] (const string &key) const { return (*m_ptr)[key]; } + +double JsonValue::number_value() const { return 0; } +int JsonValue::int_value() const { return 0; } +bool JsonValue::bool_value() const { return false; } +const string & JsonValue::string_value() const { return statics().empty_string; } +const vector & JsonValue::array_items() const { return statics().empty_vector; } +const map & JsonValue::object_items() const { return statics().empty_map; } +const Json & JsonValue::operator[] (size_t) const { return static_null(); } +const Json & JsonValue::operator[] (const string &) const { return static_null(); } + +const Json & JsonObject::operator[] (const string &key) const { + auto iter = m_value.find(key); + return (iter == m_value.end()) ? static_null() : iter->second; +} +const Json & JsonArray::operator[] (size_t i) const { + if (i >= m_value.size()) return static_null(); + else return m_value[i]; +} + +/* * * * * * * * * * * * * * * * * * * * + * Comparison + */ + +bool Json::operator== (const Json &other) const { + if (m_ptr == other.m_ptr) + return true; + if (m_ptr->type() != other.m_ptr->type()) + return false; + + return m_ptr->equals(other.m_ptr.get()); +} + +bool Json::operator< (const Json &other) const { + if (m_ptr == other.m_ptr) + return false; + if (m_ptr->type() != other.m_ptr->type()) + return m_ptr->type() < other.m_ptr->type(); + + return m_ptr->less(other.m_ptr.get()); +} + +/* * * * * * * * * * * * * * * * * * * * + * Parsing + */ + +/* esc(c) + * + * Format char c suitable for printing in an error message. + */ +static inline string esc(char c) { + char buf[12]; + if (static_cast(c) >= 0x20 && static_cast(c) <= 0x7f) { + snprintf(buf, sizeof buf, "'%c' (%d)", c, c); + } else { + snprintf(buf, sizeof buf, "(%d)", c); + } + return string(buf); +} + +static inline bool in_range(long x, long lower, long upper) { + return (x >= lower && x <= upper); +} + +namespace { +/* JsonParser + * + * Object that tracks all state of an in-progress parse. + */ +struct JsonParser final { + + /* State + */ + const string &str; + size_t i; + string &err; + bool failed; + const JsonParse strategy; + + /* fail(msg, err_ret = Json()) + * + * Mark this parse as failed. + */ + Json fail(string &&msg) { + return fail(move(msg), Json()); + } + + template + T fail(string &&msg, const T err_ret) { + if (!failed) + err = std::move(msg); + failed = true; + return err_ret; + } + + /* consume_whitespace() + * + * Advance until the current character is non-whitespace. + */ + void consume_whitespace() { + while (str[i] == ' ' || str[i] == '\r' || str[i] == '\n' || str[i] == '\t') + i++; + } + + /* consume_comment() + * + * Advance comments (c-style inline and multiline). + */ + bool consume_comment() { + bool comment_found = false; + if (str[i] == '/') { + i++; + if (i == str.size()) + return fail("unexpected end of input after start of comment", false); + if (str[i] == '/') { // inline comment + i++; + // advance until next line, or end of input + while (i < str.size() && str[i] != '\n') { + i++; + } + comment_found = true; + } + else if (str[i] == '*') { // multiline comment + i++; + if (i > str.size()-2) + return fail("unexpected end of input inside multi-line comment", false); + // advance until closing tokens + while (!(str[i] == '*' && str[i+1] == '/')) { + i++; + if (i > str.size()-2) + return fail( + "unexpected end of input inside multi-line comment", false); + } + i += 2; + comment_found = true; + } + else + return fail("malformed comment", false); + } + return comment_found; + } + + /* consume_garbage() + * + * Advance until the current character is non-whitespace and non-comment. + */ + void consume_garbage() { + consume_whitespace(); + if(strategy == JsonParse::COMMENTS) { + bool comment_found = false; + do { + comment_found = consume_comment(); + if (failed) return; + consume_whitespace(); + } + while(comment_found); + } + } + + /* get_next_token() + * + * Return the next non-whitespace character. If the end of the input is reached, + * flag an error and return 0. + */ + char get_next_token() { + consume_garbage(); + if (failed) return static_cast(0); + if (i == str.size()) + return fail("unexpected end of input", static_cast(0)); + + return str[i++]; + } + + /* encode_utf8(pt, out) + * + * Encode pt as UTF-8 and add it to out. + */ + void encode_utf8(long pt, string & out) { + if (pt < 0) + return; + + if (pt < 0x80) { + out += static_cast(pt); + } else if (pt < 0x800) { + out += static_cast((pt >> 6) | 0xC0); + out += static_cast((pt & 0x3F) | 0x80); + } else if (pt < 0x10000) { + out += static_cast((pt >> 12) | 0xE0); + out += static_cast(((pt >> 6) & 0x3F) | 0x80); + out += static_cast((pt & 0x3F) | 0x80); + } else { + out += static_cast((pt >> 18) | 0xF0); + out += static_cast(((pt >> 12) & 0x3F) | 0x80); + out += static_cast(((pt >> 6) & 0x3F) | 0x80); + out += static_cast((pt & 0x3F) | 0x80); + } + } + + /* parse_string() + * + * Parse a string, starting at the current position. + */ + string parse_string() { + string out; + long last_escaped_codepoint = -1; + while (true) { + if (i == str.size()) + return fail("unexpected end of input in string", ""); + + char ch = str[i++]; + + if (ch == '"') { + encode_utf8(last_escaped_codepoint, out); + return out; + } + + if (in_range(ch, 0, 0x1f)) + return fail("unescaped " + esc(ch) + " in string", ""); + + // The usual case: non-escaped characters + if (ch != '\\') { + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = -1; + out += ch; + continue; + } + + // Handle escapes + if (i == str.size()) + return fail("unexpected end of input in string", ""); + + ch = str[i++]; + + if (ch == 'u') { + // Extract 4-byte escape sequence + string esc = str.substr(i, 4); + // Explicitly check length of the substring. The following loop + // relies on std::string returning the terminating NUL when + // accessing str[length]. Checking here reduces brittleness. + if (esc.length() < 4) { + return fail("bad \\u escape: " + esc, ""); + } + for (size_t j = 0; j < 4; j++) { + if (!in_range(esc[j], 'a', 'f') && !in_range(esc[j], 'A', 'F') + && !in_range(esc[j], '0', '9')) + return fail("bad \\u escape: " + esc, ""); + } + + long codepoint = strtol(esc.data(), nullptr, 16); + + // JSON specifies that characters outside the BMP shall be encoded as a pair + // of 4-hex-digit \u escapes encoding their surrogate pair components. Check + // whether we're in the middle of such a beast: the previous codepoint was an + // escaped lead (high) surrogate, and this is a trail (low) surrogate. + if (in_range(last_escaped_codepoint, 0xD800, 0xDBFF) + && in_range(codepoint, 0xDC00, 0xDFFF)) { + // Reassemble the two surrogate pairs into one astral-plane character, per + // the UTF-16 algorithm. + encode_utf8((((last_escaped_codepoint - 0xD800) << 10) + | (codepoint - 0xDC00)) + 0x10000, out); + last_escaped_codepoint = -1; + } else { + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = codepoint; + } + + i += 4; + continue; + } + + encode_utf8(last_escaped_codepoint, out); + last_escaped_codepoint = -1; + + if (ch == 'b') { + out += '\b'; + } else if (ch == 'f') { + out += '\f'; + } else if (ch == 'n') { + out += '\n'; + } else if (ch == 'r') { + out += '\r'; + } else if (ch == 't') { + out += '\t'; + } else if (ch == '"' || ch == '\\' || ch == '/') { + out += ch; + } else { + return fail("invalid escape character " + esc(ch), ""); + } + } + } + + /* parse_number() + * + * Parse a double. + */ + Json parse_number() { + size_t start_pos = i; + + if (str[i] == '-') + i++; + + // Integer part + if (str[i] == '0') { + i++; + if (in_range(str[i], '0', '9')) + return fail("leading 0s not permitted in numbers"); + } else if (in_range(str[i], '1', '9')) { + i++; + while (in_range(str[i], '0', '9')) + i++; + } else { + return fail("invalid " + esc(str[i]) + " in number"); + } + + if (str[i] != '.' && str[i] != 'e' && str[i] != 'E' + && (i - start_pos) <= static_cast(std::numeric_limits::digits10)) { + return std::atoi(str.c_str() + start_pos); + } + + // Decimal part + if (str[i] == '.') { + i++; + if (!in_range(str[i], '0', '9')) + return fail("at least one digit required in fractional part"); + + while (in_range(str[i], '0', '9')) + i++; + } + + // Exponent part + if (str[i] == 'e' || str[i] == 'E') { + i++; + + if (str[i] == '+' || str[i] == '-') + i++; + + if (!in_range(str[i], '0', '9')) + return fail("at least one digit required in exponent"); + + while (in_range(str[i], '0', '9')) + i++; + } + + return std::strtod(str.c_str() + start_pos, nullptr); + } + + /* expect(str, res) + * + * Expect that 'str' starts at the character that was just read. If it does, advance + * the input and return res. If not, flag an error. + */ + Json expect(const string &expected, Json res) { + assert(i != 0); + i--; + if (str.compare(i, expected.length(), expected) == 0) { + i += expected.length(); + return res; + } else { + return fail("parse error: expected " + expected + ", got " + str.substr(i, expected.length())); + } + } + + /* parse_json() + * + * Parse a JSON object. + */ + Json parse_json(int depth) { + if (depth > max_depth) { + return fail("exceeded maximum nesting depth"); + } + + char ch = get_next_token(); + if (failed) + return Json(); + + if (ch == '-' || (ch >= '0' && ch <= '9')) { + i--; + return parse_number(); + } + + if (ch == 't') + return expect("true", true); + + if (ch == 'f') + return expect("false", false); + + if (ch == 'n') + return expect("null", Json()); + + if (ch == '"') + return parse_string(); + + if (ch == '{') { + map data; + ch = get_next_token(); + if (ch == '}') + return data; + + while (1) { + if (ch != '"') + return fail("expected '\"' in object, got " + esc(ch)); + + string key = parse_string(); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch != ':') + return fail("expected ':' in object, got " + esc(ch)); + + data[std::move(key)] = parse_json(depth + 1); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch == '}') + break; + if (ch != ',') + return fail("expected ',' in object, got " + esc(ch)); + + ch = get_next_token(); + } + return data; + } + + if (ch == '[') { + vector data; + ch = get_next_token(); + if (ch == ']') + return data; + + while (1) { + i--; + data.push_back(parse_json(depth + 1)); + if (failed) + return Json(); + + ch = get_next_token(); + if (ch == ']') + break; + if (ch != ',') + return fail("expected ',' in list, got " + esc(ch)); + + ch = get_next_token(); + (void)ch; + } + return data; + } + + return fail("expected value, got " + esc(ch)); + } +}; +}//namespace { + +Json Json::parse(const string &in, string &err, JsonParse strategy) { + JsonParser parser { in, 0, err, false, strategy }; + Json result = parser.parse_json(0); + + // Check for any trailing garbage + parser.consume_garbage(); + if (parser.failed) + return Json(); + if (parser.i != in.size()) + return parser.fail("unexpected trailing " + esc(in[parser.i])); + + return result; +} + +// Documented in json11.hpp +vector Json::parse_multi(const string &in, + std::string::size_type &parser_stop_pos, + string &err, + JsonParse strategy) { + JsonParser parser { in, 0, err, false, strategy }; + parser_stop_pos = 0; + vector json_vec; + while (parser.i != in.size() && !parser.failed) { + json_vec.push_back(parser.parse_json(0)); + if (parser.failed) + break; + + // Check for another object + parser.consume_garbage(); + if (parser.failed) + break; + parser_stop_pos = parser.i; + } + return json_vec; +} + +/* * * * * * * * * * * * * * * * * * * * + * Shape-checking + */ + +bool Json::has_shape(const shape & types, string & err) const { + if (!is_object()) { + err = "expected JSON object, got " + dump(); + return false; + } + + const auto& obj_items = object_items(); + for (auto & item : types) { + const auto it = obj_items.find(item.first); + if (it == obj_items.cend() || it->second.type() != item.second) { + err = "bad type for " + item.first + " in " + dump(); + return false; + } + } + + return true; +} + +} // namespace json11 diff --git a/image_capture/third_party/percipio/common/json11.hpp b/image_capture/third_party/percipio/common/json11.hpp new file mode 100644 index 0000000..0c47d05 --- /dev/null +++ b/image_capture/third_party/percipio/common/json11.hpp @@ -0,0 +1,232 @@ +/* json11 + * + * json11 is a tiny JSON library for C++11, providing JSON parsing and serialization. + * + * The core object provided by the library is json11::Json. A Json object represents any JSON + * value: null, bool, number (int or double), string (std::string), array (std::vector), or + * object (std::map). + * + * Json objects act like values: they can be assigned, copied, moved, compared for equality or + * order, etc. There are also helper methods Json::dump, to serialize a Json to a string, and + * Json::parse (static) to parse a std::string as a Json object. + * + * Internally, the various types of Json object are represented by the JsonValue class + * hierarchy. + * + * A note on numbers - JSON specifies the syntax of number formatting but not its semantics, + * so some JSON implementations distinguish between integers and floating-point numbers, while + * some don't. In json11, we choose the latter. Because some JSON implementations (namely + * Javascript itself) treat all numbers as the same type, distinguishing the two leads + * to JSON that will be *silently* changed by a round-trip through those implementations. + * Dangerous! To avoid that risk, json11 stores all numbers as double internally, but also + * provides integer helpers. + * + * Fortunately, double-precision IEEE754 ('double') can precisely store any integer in the + * range +/-2^53, which includes every 'int' on most systems. (Timestamps often use int64 + * or long long to avoid the Y2038K problem; a double storing microseconds since some epoch + * will be exact for +/- 275 years.) + */ + +/* Copyright (c) 2013 Dropbox, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#pragma once + +#include +#include +#include +#include +#include + +#ifdef _MSC_VER + #if _MSC_VER <= 1800 // VS 2013 + #ifndef noexcept + #define noexcept throw() + #endif + + #ifndef snprintf + #define snprintf _snprintf_s + #endif + #endif +#endif + +namespace json11 { + +enum JsonParse { + STANDARD, COMMENTS +}; + +class JsonValue; + +class Json final { +public: + // Types + enum Type { + NUL, NUMBER, BOOL, STRING, ARRAY, OBJECT + }; + + // Array and object typedefs + typedef std::vector array; + typedef std::map object; + + // Constructors for the various types of JSON value. + Json() noexcept; // NUL + Json(std::nullptr_t) noexcept; // NUL + Json(double value); // NUMBER + Json(int value); // NUMBER + Json(bool value); // BOOL + Json(const std::string &value); // STRING + Json(std::string &&value); // STRING + Json(const char * value); // STRING + Json(const array &values); // ARRAY + Json(array &&values); // ARRAY + Json(const object &values); // OBJECT + Json(object &&values); // OBJECT + + // Implicit constructor: anything with a to_json() function. + template + Json(const T & t) : Json(t.to_json()) {} + + // Implicit constructor: map-like objects (std::map, std::unordered_map, etc) + template ().begin()->first)>::value + && std::is_constructible().begin()->second)>::value, + int>::type = 0> + Json(const M & m) : Json(object(m.begin(), m.end())) {} + + // Implicit constructor: vector-like objects (std::list, std::vector, std::set, etc) + template ().begin())>::value, + int>::type = 0> + Json(const V & v) : Json(array(v.begin(), v.end())) {} + + // This prevents Json(some_pointer) from accidentally producing a bool. Use + // Json(bool(some_pointer)) if that behavior is desired. + Json(void *) = delete; + + // Accessors + Type type() const; + + bool is_null() const { return type() == NUL; } + bool is_number() const { return type() == NUMBER; } + bool is_bool() const { return type() == BOOL; } + bool is_string() const { return type() == STRING; } + bool is_array() const { return type() == ARRAY; } + bool is_object() const { return type() == OBJECT; } + + // Return the enclosed value if this is a number, 0 otherwise. Note that json11 does not + // distinguish between integer and non-integer numbers - number_value() and int_value() + // can both be applied to a NUMBER-typed object. + double number_value() const; + int int_value() const; + + // Return the enclosed value if this is a boolean, false otherwise. + bool bool_value() const; + // Return the enclosed string if this is a string, "" otherwise. + const std::string &string_value() const; + // Return the enclosed std::vector if this is an array, or an empty vector otherwise. + const array &array_items() const; + // Return the enclosed std::map if this is an object, or an empty map otherwise. + const object &object_items() const; + + // Return a reference to arr[i] if this is an array, Json() otherwise. + const Json & operator[](size_t i) const; + // Return a reference to obj[key] if this is an object, Json() otherwise. + const Json & operator[](const std::string &key) const; + + // Serialize. + void dump(std::string &out) const; + std::string dump() const { + std::string out; + dump(out); + return out; + } + + // Parse. If parse fails, return Json() and assign an error message to err. + static Json parse(const std::string & in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD); + static Json parse(const char * in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD) { + if (in) { + return parse(std::string(in), err, strategy); + } else { + err = "null input"; + return nullptr; + } + } + // Parse multiple objects, concatenated or separated by whitespace + static std::vector parse_multi( + const std::string & in, + std::string::size_type & parser_stop_pos, + std::string & err, + JsonParse strategy = JsonParse::STANDARD); + + static inline std::vector parse_multi( + const std::string & in, + std::string & err, + JsonParse strategy = JsonParse::STANDARD) { + std::string::size_type parser_stop_pos; + return parse_multi(in, parser_stop_pos, err, strategy); + } + + bool operator== (const Json &rhs) const; + bool operator< (const Json &rhs) const; + bool operator!= (const Json &rhs) const { return !(*this == rhs); } + bool operator<= (const Json &rhs) const { return !(rhs < *this); } + bool operator> (const Json &rhs) const { return (rhs < *this); } + bool operator>= (const Json &rhs) const { return !(*this < rhs); } + + /* has_shape(types, err) + * + * Return true if this is a JSON object and, for each item in types, has a field of + * the given type. If not, return false and set err to a descriptive message. + */ + typedef std::initializer_list> shape; + bool has_shape(const shape & types, std::string & err) const; + +private: + std::shared_ptr m_ptr; +}; + +// Internal class hierarchy - JsonValue objects are not exposed to users of this API. +class JsonValue { +protected: + friend class Json; + friend class JsonInt; + friend class JsonDouble; + virtual Json::Type type() const = 0; + virtual bool equals(const JsonValue * other) const = 0; + virtual bool less(const JsonValue * other) const = 0; + virtual void dump(std::string &out) const = 0; + virtual double number_value() const; + virtual int int_value() const; + virtual bool bool_value() const; + virtual const std::string &string_value() const; + virtual const Json::array &array_items() const; + virtual const Json &operator[](size_t i) const; + virtual const Json::object &object_items() const; + virtual const Json &operator[](const std::string &key) const; + virtual ~JsonValue() {} +}; + +} // namespace json11 diff --git a/image_capture/third_party/percipio/include/TYApi.h b/image_capture/third_party/percipio/include/TYApi.h new file mode 100644 index 0000000..bb19dbf --- /dev/null +++ b/image_capture/third_party/percipio/include/TYApi.h @@ -0,0 +1,2951 @@ +/**@file TYApi.h + * @brief TYApi.h includes camera control and data receiving interface, + * which supports configuration for image resolution, frame rate, exposure + * time, gain, working mode,etc. + * + */ + +/**@mainpage +* +* Copyright(C)2016-2023 Percipio All Rights Reserved +* +* +* +* @section Note +* Depth camera, called "device", consists of several components. Each component +* is a hardware module or virtual module, such as RGB sensor, depth sensor. +* Each component has its own features, such as image width, exposure time, etc.. +* +* NOTE: The component TY_COMPONENT_DEVICE is a virtual component that contains +* all features related to the whole device, such as trigger mode, device IP. +* +* Each frame consists of several images. Normally, all the images have identical +* timestamp, means they are captured at the same time. +* +* */ + +#ifndef TY_API_H_ +#define TY_API_H_ + +#include "TYDefs.h" +typedef void (*TY_EVENT_CALLBACK) (TY_EVENT_INFO*, void* userdata); +typedef void (*TY_IMU_CALLBACK) (TY_IMU_DATA*, void* userdata); + + +//------------------------------------------------------------------------------ +// inlines +//------------------------------------------------------------------------------ +static inline bool TYIsNetworkInterface(int32_t interfaceType) +{ + return (interfaceType == TY_INTERFACE_ETHERNET) || + (interfaceType == TY_INTERFACE_IEEE80211); +} + +static inline void TYIntToIPv4(uint32_t addr, uint8_t out[4]) +{ + out[0] = (addr>>24) & 0xff; + out[1] = (addr>>16) & 0xff; + out[2] = (addr>>8) & 0xff; + out[3] = (addr>>0) & 0xff; +} + +static inline uint32_t TYIPv4ToInt(uint8_t ip[4]) +{ + return (ip[0] << 24) | (ip[1] << 16) | (ip[2] << 8) | ip[3]; +} + +///init a TY_IMAGE_DATA struct +static inline TY_IMAGE_DATA TYInitImageData(size_t size, void* buffer + , size_t width, size_t height) +{ + TY_IMAGE_DATA out; + out.timestamp = 0; + out.imageIndex = 0; + out.status = 0; + out.componentID = 0; + out.size = size; + out.buffer = buffer; + out.width = width; + out.height = height; + out.pixelFormat = 0; + return out; +} + +///get feature format type from feature id +static inline TY_FEATURE_TYPE TYFeatureType(TY_FEATURE_ID id) +{ + return id & 0xf000; +} + +///deprecated: get pixel size in byte, Invalid for 10/12/14bit mode +static inline int32_t TYPixelSize(TY_IMAGE_MODE imageMode) +{ + return ((imageMode >> 28) & 0xf); +} + +///get pixel size in bits +static inline int32_t TYBitsPerPixel(TY_IMAGE_MODE imageMode) +{ + TY_PIXEL_BITS bits = imageMode & (0xf << 28); + switch(bits){ + case TY_PIXEL_16BIT: + return 16; + case TY_PIXEL_24BIT: + return 24; + case TY_PIXEL_32BIT: + return 32; + case TY_PIXEL_48BIT: + return 48; + case TY_PIXEL_64BIT: + return 64; + case TY_PIXEL_10BIT: + return 10; + case TY_PIXEL_12BIT: + return 12; + case TY_PIXEL_8BIT: + default: + return 8; + } +} + +///get line size in bytes +static inline int32_t TYPixelLineSize(int width, TY_IMAGE_MODE imageMode) +{ + return (width * TYBitsPerPixel(imageMode)) >> 3; +} + +///make a image mode from pixel format & resolution mode +static inline TY_IMAGE_MODE TYImageMode(TY_PIXEL_FORMAT pix, TY_RESOLUTION_MODE res) +{ + return pix | res; +} + +///get a resoltuion mode from width & height +static inline TY_RESOLUTION_MODE TYResolutionMode2(int width, int height){ + return (TY_RESOLUTION_MODE)((width << 12) + height); +} + +///create a image mode from pixel format , width & height +static inline TY_IMAGE_MODE TYImageMode2(TY_PIXEL_FORMAT pix, int width,int height) +{ + return pix | TYResolutionMode2(width, height); +} + +///get pixel format from image mode +static inline TY_PIXEL_FORMAT TYPixelFormat(TY_IMAGE_MODE imageMode) +{ + return imageMode & 0xff000000; +} + +///get a resoltuion mode from image mode +static inline TY_RESOLUTION_MODE TYResolutionMode(TY_IMAGE_MODE imageMode) +{ + return imageMode & 0x00ffffff; +} + +///get image width from image mode +static inline int32_t TYImageWidth(TY_IMAGE_MODE imageMode) +{ + return TYResolutionMode(imageMode) >> 12; +} + +///get image height from image mode +static inline int32_t TYImageHeight(TY_IMAGE_MODE imageMode) +{ + return TYResolutionMode(imageMode) & 0x0fff; +} + +//------------------------------------------------------------------------------ +// C API +//------------------------------------------------------------------------------ + +//------------------------------------------------------------------------------ +// Version check +//------------------------------------------------------------------------------ +TY_CAPI _TYInitLib(void); +TY_CAPI TYLibVersion (TY_VERSION_INFO* version); +static inline TY_STATUS TYInitLib(void) +{ + TY_VERSION_INFO soVersion; + TYLibVersion(&soVersion); + if(!(soVersion.major == TY_LIB_VERSION_MAJOR && soVersion.minor >= TY_LIB_VERSION_MINOR)){ + abort(); // generate fault directly + } + return _TYInitLib(); +} +///@brief Get error information. +///@param [in] errorID Error id. +///@retval Error string. +TY_EXTC TY_EXPORT const char* TY_STDC TYErrorString (TY_STATUS errorID); + +///@brief Init this library. +/// +/// We make this function to be static inline, because we do a version check here. +/// Some user may use the mismatched header file and dynamic library, and +/// that's quite difficult to locate the error. +/// +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_ERROR Has been inited. +inline TY_STATUS TYInitLib (void); + +///@brief Deinit this library. +///@retval TY_STATUS_OK Succeed. +TY_CAPI TYDeinitLib (void); + +///@brief Get current library version. +///@param [out] version Version infomation to be filled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NULL_POINTER TYLibVersion called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYLibVersion(ver); +/// ^ is NULL +/// +TY_CAPI TYLibVersion (TY_VERSION_INFO* version); + +///@brief Set log level. +///@param [in] lvl Log level. +///@retval TY_STATUS_OK Succeed. +TY_CAPI TYSetLogLevel (TY_LOG_LEVEL lvl); + +///@brief set log prefix +///@param [in] prefix Prefix string. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_PARAMETER Prefix is empty or prefix is too long +/// +/// Suggestions: +/// Prefix is empty or prefix is too long, cannot be set +/// Like this: +/// TYSetLogPrefix(prefix); +/// ^ prefix is empty or prefix is too long +/// +TY_CAPI TYSetLogPrefix (const char* prefix); + +///@brief Append log to specified file. +///@param [in] filePath Path to the log file. +///@param [in] lvl Log level. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_ERROR Failed to add file +/// +/// Suggestions: +/// Please check if the file path is correct and if you have permission to write to the file +/// +TY_CAPI TYAppendLogToFile (const char* filePath, TY_LOG_LEVEL lvl); + +///@brief Remove log file. +///@param [in] filePath Path to the log file. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_ERROR Failed to remove file +/// +/// Suggestions: +/// Please check if the file path is correct +/// +TY_CAPI TYRemoveLogFile (const char* filePath); + +///@brief Append log to Tcp/Udp server. +///@param [in] protocol Protocol of the server, "tcp" or "udp". +///@param [in] ip IP address of the server. +///@param [in] port Port of the server. +///@param [in] lvl Log level. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_ERROR Failed to add server +/// +/// Suggestions: +/// Please check if the ip and port are correct +/// +///@retval TY_STATUS_INVALID_PARAMETER Unsupported protocol +/// +/// Suggestions: +/// Unsupported protocol, please use tcp or udp +/// +TY_CAPI TYAppendLogToServer (const char* protocol, const char* ip, uint16_t port, TY_LOG_LEVEL lvl); + +///@brief Remove log server. +///@param [in] protocol Protocol of the server, "tcp" or "udp". +///@param [in] ip IP address of the server. +///@param [in] port Port of the server. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_ERROR Failed to remove server +/// +/// Suggestions: +/// Please check if the ip and port are correct +/// +///@retval TY_STATUS_INVALID_PARAMETER Unsupported protocol +/// +/// Suggestions: +/// Unsupported protocol, please use tcp or udp +/// +TY_CAPI TYRemoveLogServer (const char* protocol, const char* ip, uint16_t port); + +///@brief Update current interfaces. +/// call before TYGetInterfaceList +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +TY_CAPI TYUpdateInterfaceList (void); + +///@brief Get number of current interfaces. +///@param [out] pNumIfaces Number of interfaces. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_NULL_POINTER TYGetInterfaceNumber called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetInterfaceNumber(pNumIfaces); +/// ^ is NULL +/// +TY_CAPI TYGetInterfaceNumber (uint32_t* pNumIfaces); + +///@brief Get interface info list. +///@param [out] pIfaceInfos Array of interface infos to be filled. +///@param [in] bufferCount Array size of interface infos. +///@param [out] filledCount Number of filled TY_INTERFACE_INFO. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_NULL_POINTER TYGetInterfaceList called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetInterfaceList(pIfaceInfos, bufferCount, filledCount); +/// ^ or ^ is NULL +/// +TY_CAPI TYGetInterfaceList (TY_INTERFACE_INFO* pIfaceInfos, uint32_t bufferCount, uint32_t* filledCount); + +///@brief Check if has interface. +///@param [in] ifaceID Interface ID string, can be get from TY_INTERFACE_INFO. +///@param [out] value True if the interface exists. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_NULL_POINTER TYHasInterface called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYHasInterface(ifaceID, value); +/// ^ is NULL +/// +///@see TYGetInterfaceList +TY_CAPI TYHasInterface (const char* ifaceID, bool* value); + +///@brief Open specified interface. +///@param [in] ifaceID Interface ID string, can be get from TY_INTERFACE_INFO. +///@param [out] outHandle Handle of opened interface. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_NULL_POINTER TYOpenInterface called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYOpenInterface(ifaceID, outHandle); +/// ^ or ^ is NULL +/// +///@retval TY_STATUS_INVALID_INTERFACE TYOpenInterface called with invalid interface ID +/// +/// Suggestions: +/// Please check ifaceID parameter +/// Like this: +/// TYOpenInterface(ifaceID, outHandle); +/// ^ is invalid +/// Usually you get interface information by calling TYUpdateInterfaceList, TYGetInterfaceList +/// and then open interface by calling TYOpenInterface +/// When your host interface (network or USB) changes); +/// you may need to update interface list again +/// +///@see TYGetInterfaceList +TY_CAPI TYOpenInterface (const char* ifaceID, TY_INTERFACE_HANDLE* outHandle); + +///@brief Close interface. +///@param [in] ifaceHandle Interface to be closed. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYCloseInterface called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYCloseInterface(ifaceHandle); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +TY_CAPI TYCloseInterface (TY_INTERFACE_HANDLE ifaceHandle); + +///@brief Update current connected devices. +///@param [in] ifaceHandle Interface handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYUpdateDeviceList called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYUpdateDeviceList(ifaceHandle); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +TY_CAPI TYUpdateDeviceList (TY_INTERFACE_HANDLE ifaceHandle); + +///@brief Update current connected devices. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +TY_CAPI TYUpdateAllDeviceList (void); + +///@brief Get number of current connected devices. +///@param [in] ifaceHandle Interface handle. +///@param [out] deviceNumber Number of connected devices. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYGetDeviceNumber called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYGetDeviceNumber(ifaceHandle, pDeviceNumber); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceNumber called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceNumber(ifaceHandle, deviceNumber); +/// ^ is NULL +/// +TY_CAPI TYGetDeviceNumber (TY_INTERFACE_HANDLE ifaceHandle, uint32_t* deviceNumber); + +///@brief Get device info list. +///@param [in] ifaceHandle Interface handle. +///@param [out] deviceInfos Device info array to be filled. +///@param [in] bufferCount Array size of deviceInfos. +///@param [out] filledDeviceCount Number of filled TY_DEVICE_BASE_INFO. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYGetDeviceList called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYGetDeviceList(ifaceHandle, pDeviceInfos, bufferCount, pFilledDeviceCount); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceList called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceList(ifaceHandle, pDeviceInfos, bufferCount, pFilledCount); +/// ^ is NULL or ^ is 0 or ^ is NULL +/// +TY_CAPI TYGetDeviceList (TY_INTERFACE_HANDLE ifaceHandle, TY_DEVICE_BASE_INFO* deviceInfos, uint32_t bufferCount, uint32_t* filledDeviceCount); + +///@brief Check whether the interface has the specified device. +///@param [in] ifaceHandle Interface handle. +///@param [in] deviceID Device ID string, can be get from TY_DEVICE_BASE_INFO. +///@param [out] value True if the device exists. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYHasDevice called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYHasDevice(ifaceHandle, deviceID, value); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_NULL_POINTER TYHasDevice called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYHasDevice(ifaceHandle, deviceID, value); +/// ^ or ^ is NULL +/// +TY_CAPI TYHasDevice (TY_INTERFACE_HANDLE ifaceHandle, const char* deviceID, bool* value); + +///@brief Open device by device ID. +///@param [in] ifaceHandle Interface handle. +///@param [in] deviceID Device ID string, can be get from TY_DEVICE_BASE_INFO. +///@param [out] outDeviceHandle Handle of opened device. Valid only if TY_STATUS_OK or TY_FW_ERRORCODE returned. +///@param [out] outFwErrorcode Firmware errorcode. Valid only if TY_FW_ERRORCODE returned. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYOpenDevice called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYOpenDevice(ifaceHandle, deviceID, outDeviceHandle, outFwErrorcode); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_NULL_POINTER TYOpenDevice called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYOpenDevice(ifaceHandle, deviceID, outDeviceHandle, outFwErrorcode); +/// ^ or ^ is NULL +/// +///@retval TY_STATUS_INVALID_PARAMETER TYOpenDevice called with invalid device ID: %s +/// +/// Suggestions: +/// Please check deviceID parameter +/// Like this: +/// TYOpenDevice(ifaceHandle, deviceID, outDeviceHandle, outFwErrorcode); +/// ^ is invalid +/// Usually you get device information by calling TYUpdateDeviceList, TYGetDeviceList +/// and then open device by calling TYOpenDevice +/// When your device online status changes); +/// you may need to update device list again +/// +///@retval TY_STATUS_BUSY Failed to open device +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera is occupied, please check if other processes on this machine (such as Percipio Viewer tool) +/// or other host machines are occupying the camera. If the camera is occupied, release the occupation. +/// 2.A third-party program is written into the camera, please contact Percipio after-sales support. +/// +///@retval TY_STATUS_FIRMWARE_ERROR Device opened successfully, but firmware error code is not 0 +/// +/// Suggestions: +/// Some functions of the device may have exceptions, please check the firmware error code for details +/// TY_FW_ERRORCODE outFwErrorcode; +/// TYOpenDevice(ifaceHandle, deviceID, outDeviceHandle, &outFwErrorcode); +/// if(outFwErrorcode != 0) { +/// parse_firmware_errcode(outFwErrorcode); +/// } +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to open device +/// +/// Suggestions: +/// Possible reasons: +/// 1.A third-party program is written into the camera, please contact Percipio after-sales support. +/// 2.The camera IP address is not in the same network segment as the host IP address. +/// If the camera IP address is not in the same network segment as the host IP address, the host can discover the camera across network segments, but may not be able to open it. +/// If there is a routing connection between your host and the camera, you can try to open the camera with TYOpenDeviceWithIP. +/// Otherwise, you can modify the camera IP address or the host IP address. +/// If you need to modify the camera IP address, please refer to Setting the IP address of the network depth camera. +/// 3.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +TY_CAPI TYOpenDevice (TY_INTERFACE_HANDLE ifaceHandle, const char* deviceID, TY_DEV_HANDLE* outDeviceHandle, TY_FW_ERRORCODE* outFwErrorcode=NULL); + +///@brief Open device by device IP, useful when a device is not listed. +///@param [in] ifaceHandle Interface handle. +///@param [in] IP Device IP. +///@param [out] deviceHandle Handle of opened device. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYOpenDeviceWithIP called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYOpenDeviceWithIP(ifaceHandle, IP, outDeviceHandle); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_NULL_POINTER TYOpenDeviceWithIP called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYOpenDeviceWithIP(ifaceHandle, IP, outDeviceHandle); +/// ^ or^ is NULL +/// +///@retval TY_STATUS_INVALID_PARAMETER TYOpenDeviceWithIP called with invalid IP address +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYOpenDeviceWithIP(ifaceHandle, IP, outDeviceHandle); +/// ^ is invalid +/// A valid IP address should be like: +/// 192.168.31.1 +/// Usually you get device information by calling TYUpdateDeviceList, TYGetDeviceList +/// and then open device by calling TYOpenDevice +/// When your device online status changes, +/// you may need to update device list again +/// +///@retval TY_STATUS_BUSY Failed to open device +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera is occupied, please check if other processes on this machine (such as Percipio Viewer tool) +/// or other host machines are occupying the camera. If the camera is occupied, release the occupation. +/// 2.A third-party program is written into the camera, please contact Percipio after-sales support. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to open device +/// +/// Suggestions: +/// Possible reasons: +/// 1.A third-party program is written into the camera, please contact Percipio after-sales support. +/// 2.The camera IP address cannot communicate with the host IP address through routing. +/// 3.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +TY_CAPI TYOpenDeviceWithIP (TY_INTERFACE_HANDLE ifaceHandle, const char* IP, TY_DEV_HANDLE* deviceHandle); + +///@brief Get interface handle by device handle. +///@param [in] hDevice Device handle. +///@param [out] pIface Interface handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceInterface called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceInterface(hDevice, pIface); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceInterface called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceInterface(hDevice, pIface); +/// ^ is NULL +/// +TY_CAPI TYGetDeviceInterface (TY_DEV_HANDLE hDevice, TY_INTERFACE_HANDLE* pIface); + +///@brief Force a ethernet device to use new IP address, useful when device use persistent IP and cannot be found. +///@param [in] ifaceHandle Interface handle. +///@param [in] MAC Device MAC, should be "xx:xx:xx:xx:xx:xx". +///@param [in] newIP New IP. +///@param [in] newNetMask New subnet mask. +///@param [in] newGateway New gateway. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED TYInitLib not called. +///@retval TY_STATUS_INVALID_INTERFACE TYForceDeviceIP called with invalid interface handle +/// +/// Suggestions: +/// Please check interface handle +/// Like this: +/// TYForceDeviceIP(ifaceHandle, MAC, newIP, newNetMask, newGateway); +/// ^ is invalid +/// The ifaceHandle parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenInterface failed to open interface and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated interface list by calling TYUpdateInterfaceList +/// +///@retval TY_STATUS_WRONG_TYPE TYForceDeviceIP called with invalid interface type +/// +/// Suggestions: +/// Please check interface type +/// Usually you can get interface information by calling TYGetInterfaceList +/// You can use TYIsNetworkInterface to check the interface type +/// Only network interfaces can call TYForceDeviceIP +/// Like this: +/// TY_INTERFACE_INFO info; uint32_t num; +/// TYGetInterfaceList(&info, 1, &num); +/// if(TYIsNetworkInterface(info[0].type)) { +/// TY_INTERFACE_HANDLE hIface; +/// TYOpenInterface(info[0].id, &hIface); +/// TYForceDeviceIP(hIface, MAC, newIP, newNetMask, newGateway); +/// } +/// +///@retval TY_STATUS_NULL_POINTER TYForceDeviceIP called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYForceDeviceIP(ifaceHandle, MAC, newIP, newNetMask, newGateway); +/// ^ or ^ or ^ or ^ is NULL +/// +///@retval TY_STATUS_INVALID_PARAMETER Invalid MAC address: +/// +/// Suggestions: +/// Please check MAC parameter +/// Like this: +/// TYForceDeviceIP(ifaceHandle, MAC, newIP, newNetMask, newGateway); +/// ^ is invalid +/// MAC address should be six bytes of hexadecimal separated by colons +/// For example: 00:11:22:aa:bb:cc +/// +///@retval TY_STATUS_TIMEOUT Failed to force set IP +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// 2.There is no camera with a matching target MAC address in the network. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to force set IP +/// +/// Suggestions: +/// Possible reasons: +/// 1.New IP, NetMask, Gateway are incorrect, camera device refuses to set, or camera device is abnormal. +/// +TY_CAPI TYForceDeviceIP (TY_INTERFACE_HANDLE ifaceHandle, const char* MAC, const char* newIP, const char* newNetMask, const char* newGateway); + +///@brief Close device by device handle. +///@param [in] hDevice Device handle. +///@param [in] reboot Reboot device after close. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYCloseDevice called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYCloseDevice(hDevice, reboot); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_TIMEOUT Failed to close device +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to close device +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot be closed normally. +/// +///@retval TY_STATUS_IDLE Device has been closed. +TY_CAPI TYCloseDevice (TY_DEV_HANDLE hDevice, bool reboot=false); + +///@brief Get base info of the open device. +///@param [in] hDevice Device handle. +///@param [out] info Base info out. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceInfo called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceInfo(hDevice, info); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceInfo called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceInfo(hDevice, info); +/// ^ is NULL +/// +TY_CAPI TYGetDeviceInfo (TY_DEV_HANDLE hDevice, TY_DEVICE_BASE_INFO* info); + +///@brief Get all components IDs. +///@param [in] hDevice Device handle. +///@param [out] componentIDs All component IDs this device has. (bit flag). +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetComponentIDs called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetComponentIDs(hDevice, outComponentIDs); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetComponentIDs called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetComponentIDs(hDevice, outComponentIDs); +/// ^ is NULL +/// +///@see TY_DEVICE_COMPONENT_LIST +TY_CAPI TYGetComponentIDs (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID* componentIDs); + +///@brief Get all enabled components IDs. +///@param [in] hDevice Device handle. +///@param [out] componentIDs Enabled component IDs.(bit flag) +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetEnabledComponents called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetEnabledComponents(hDevice, componentIDs); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER componentIDs is NULL. +///@see TY_DEVICE_COMPONENT_LIST +TY_CAPI TYGetEnabledComponents (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID* componentIDs); + +///@brief Enable components. +///@param [in] hDevice Device handle. +///@param [in] componentIDs Components to be enabled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYEnableComponents called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYEnableComponents(hDevice, componentIDs); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_PARAMETER Invalid component IDs +/// +/// Suggestions: +/// Please check componentIDs parameter +/// Like this: +/// TYEnableComponents(hDevice, componentIDs); +/// ^ is invalid +/// componentIDs should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_COMPONENT Some components specified by componentIDs are invalid. +///@retval TY_STATUS_BUSY Camera device is capturing +/// +/// Suggestions: +/// Please call TYEnableComponents when the camera device is stopped +/// Like this: +/// TYStopCapture(hDevice); +/// TYEnableComponents(hDevice, componentIDs); +/// +///@see TY_DEVICE_COMPONENT_LIST +TY_CAPI TYEnableComponents (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentIDs); + +///@brief Disable components. +///@param [in] hDevice Device handle. +///@param [in] componentIDs Components to be disabled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYDisableComponents called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYDisableComponents(hDevice, componentIDs); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_PARAMETER Invalid component IDs +/// +/// Suggestions: +/// Please check componentIDs parameter +/// Like this: +/// TYDisableComponents(hDevice, componentIDs); +/// ^ is invalid +/// componentIDs should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_COMPONENT Some components specified by componentIDs are invalid. +///@retval TY_STATUS_BUSY Camera device is capturing +/// +/// Suggestions: +/// Please call TYEnableComponents when the camera device is stopped +/// Like this: +/// TYStopCapture(hDevice); +/// TYDisableComponents(hDevice, componentIDs); +/// +///@see TY_DEVICE_COMPONENT_LIST +TY_CAPI TYDisableComponents (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentIDs); + +///@brief Get total buffer size of one frame in current configuration. +///@param [in] hDevice Device handle. +///@param [out] bufferSize Buffer size per frame. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetFrameBufferSize called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetFrameBufferSize(hDevice, outSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetFrameBufferSize called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetFrameBufferSize(hDevice, outSize); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get frame buffer size +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get frame buffer size +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot get the frame buffer size. +/// +TY_CAPI TYGetFrameBufferSize (TY_DEV_HANDLE hDevice, uint32_t* bufferSize); + +///@brief Enqueue a user allocated buffer. +///@param [in] hDevice Device handle. +///@param [in] buffer Buffer to be enqueued. +///@param [in] bufferSize Size of the input buffer. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYEnqueueBuffer called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYEnqueueBuffer(hDevice, buffer, bufferSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYEnqueueBuffer called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYEnqueueBuffer(hDevice, buffer, bufferSize); +/// ^ is NULL +/// +///@retval TY_STATUS_WRONG_SIZE TYEnqueueBuffer called with wrong size +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYEnqueueBuffer(hDevice, buffer, bufferSize); +/// ^ is 0 or negative value +/// +///@retval TY_STATUS_TIMEOUT Failed to enqueue frame buffer +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to enqueue frame buffer +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot get the frame buffer size. +/// +TY_CAPI TYEnqueueBuffer (TY_DEV_HANDLE hDevice, void* buffer, uint32_t bufferSize); + +///@brief Clear the internal buffer queue, so that user can release all the buffer. +///@param [in] hDevice Device handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYClearBufferQueue called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYClearBufferQueue(hDevice); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_BUSY Device is capturing. +TY_CAPI TYClearBufferQueue (TY_DEV_HANDLE hDevice); + +///@brief Start capture. +///@param [in] hDevice Device handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYStartCapture called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYStartCapture(hDevice); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT No components are enabled +/// +/// Suggestions: +/// Please enable the components of the camera device first +/// Like this: +/// TYEnableComponents(hDevice, componentIDs); +/// TYStartCapture(hDevice); +/// +///@retval TY_STATUS_BUSY Camera device has been started. +/// +/// Suggestions: +/// Please stop the camera device first +/// Like this: +/// TYStopCapture(hDevice); +/// TYStartCapture(hDevice); +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to start camera +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot start the camera. +/// 2.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// 3.Camera is busy, please try again +/// +TY_CAPI TYStartCapture (TY_DEV_HANDLE hDevice); + +///@brief Stop capture. +///@param [in] hDevice Device handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYStopCapture called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYStopCapture(hDevice); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_IDLE Camera device has been stopped +/// +/// Suggestions: +/// The camera device has stopped, usually after starting +/// Like this: +/// TYStartCapture(hDevice); +/// TYStopCapture(hDevice); +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to stop camera +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot stop the camera. +/// +TY_CAPI TYStopCapture (TY_DEV_HANDLE hDevice); + +///@brief Send a software trigger to capture a frame when device works in trigger mode. +///@param [in] hDevice Device handle. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSendSoftTrigger called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSendSoftTrigger(hDevice); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_FEATURE Not support soft trigger. +///@retval TY_STATUS_IDLE Camera device is not started +/// +/// Suggestions: +/// Please start the camera device first +/// Like this: +/// TYStartCapture(hDevice); +/// TYSendSoftTrigger(hDevice); +/// +///@retval TY_STATUS_WRONG_MODE Not in trigger mode. +///@retval TY_STATUS_DEVICE_ERROR Failed to send soft trigger +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot send soft trigger. +/// 2.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_BUSY Failed to send soft trigger +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera is busy, the last soft trigger is not completed, please try again. +/// +TY_CAPI TYSendSoftTrigger (TY_DEV_HANDLE hDevice); + +///@brief Register device status callback. Register NULL to clean callback. +///@param [in] hDevice Device handle. +///@param [in] callback Callback function. +///@param [in] userdata User private data. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYRegisterEventCallback called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYRegisterEventCallback(hDevice, callback, userdata); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_BUSY Device is capturing. +TY_CAPI TYRegisterEventCallback (TY_DEV_HANDLE hDevice, TY_EVENT_CALLBACK callback, void* userdata); + +///@brief Register imu callback. Register NULL to clean callback. +///@param [in] hDevice Device handle. +///@param [in] callback Callback function. +///@param [in] userdata User private data. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYRegisterImuCallback called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYRegisterImuCallback(hDevice, callback, userdata); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_BUSY Device is capturing. +TY_CAPI TYRegisterImuCallback (TY_DEV_HANDLE hDevice, TY_IMU_CALLBACK callback, void* userdata); + +///@brief Fetch one frame. +///@param [in] hDevice Device handle. +///@param [out] frame Frame data to be filled. +///@param [in] timeout Timeout in milliseconds. <0 for infinite. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYFetchFrame called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYFetchFrame(hDevice, pFrame, timeout); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYFetchFrame called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYFetchFrame(hDevice, pFrame, timeout); +/// ^ is NULL +/// +///@retval TY_STATUS_IDLE Camera device is not started +/// +/// Suggestions: +/// Please start the camera device first +/// Like this: +/// TYStartCapture(hDevice); +/// TYFetchFrame(hDevice, pFrame, timeout); +/// +///@retval TY_STATUS_WRONG_MODE Callback has been registered, this function is disabled. +///@retval TY_STATUS_TIMEOUT Failed to get frame +/// +/// Suggestions: +/// Possible reasons: +/// 1.Camera device is abnormal and cannot get frame. +/// 2.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// 3.Timeout, frame acquisition timeout +/// +TY_CAPI TYFetchFrame (TY_DEV_HANDLE hDevice, TY_FRAME_DATA* frame, int32_t timeout); + +///@brief Check whether a component has a specific feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] value Whether has feature. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYHasFeature called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYHasFeature(hDevice, componentID, featureID, value); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYHasFeature(hDevice, componentID, featureID, value); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_NULL_POINTER TYHasFeature called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYHasFeature(hDevice, componentID, featureID, value); +/// ^ is NULL +/// +TY_CAPI TYHasFeature (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, bool* value); + +///@brief Get feature info. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] featureInfo Feature info. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetFeatureInfo called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetFeatureInfo(hDevice, componentID, featureID, pFeatureInfo); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetFeatureInfo(hDevice, componentID, featureID, pFeatureInfo); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetFeatureInfo(hDevice, componentID, featureID, pFeatureInfo); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NULL_POINTER TYGetFeatureInfo called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetFeatureInfo(hDevice, componentID, featureID, pFeatureInfo); +/// ^ is NULL +/// +TY_CAPI TYGetFeatureInfo (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, TY_FEATURE_INFO* featureInfo); + +///@brief Get value range of integer feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] intRange Integer range to be filled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetIntRange called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetIntRange(hDevice, componentID, featureID, pIntRange); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetIntRange(hDevice, componentID, featureID, pIntRange); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetIntRange(hDevice, componentID, featureID, pIntRange); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetIntRange(hDevice, componentID, featureID, pIntRange); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetIntRange called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetIntRange(hDevice, componentID, featureID, pIntRange); +/// ^ is NULL +/// +TY_CAPI TYGetIntRange (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, TY_INT_RANGE* intRange); + +///@brief Get value of integer feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] value Integer value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetInt called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetInt(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetInt(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetInt(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetInt(hDevice, componentID, featureID, pValue); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetInt called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetInt(hDevice, componentID, featureID, pValue); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get int feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get int feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get int feature +/// +TY_CAPI TYGetInt (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, int32_t* value); + +///@brief Set value of integer feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] value Integer value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetInt called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetInt(hDevice, componentID, featureID, value); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetInt(hDevice, componentID, featureID, value); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetInt(hDevice, componentID, featureID, value); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetInt(hDevice, componentID, featureID, value); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_OUT_OF_RANGE Out of range +/// +/// Suggestions: +/// Please check the value +/// Like this: +/// TYSetInt(hDevice, componentID, featureID, value); +/// ^ is out of range +/// The value is out of range, please use TYGetIntRange to get the range or check the camera xml description file +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set int feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set int feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set int feature +/// +TY_CAPI TYSetInt (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, int32_t value); + +///@brief Get value range of float feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] floatRange Float range to be filled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetFloatRange called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetFloatRange(hDevice, componentID, featureID, pFloatRange); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetFloatRange(hDevice, componentID, featureID, pFloatRange); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetFloatRange(hDevice, componentID, featureID, pFloatRange); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetFloatRange(hDevice, componentID, featureID, pFloatRange); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetFloatRange called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetFloatRange(hDevice, componentID, featureID, pFloatRange); +/// ^ is NULL +/// +TY_CAPI TYGetFloatRange (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, TY_FLOAT_RANGE* floatRange); + +///@brief Get value of float feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] value Float value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetFloat called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetFloat(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetFloat(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetFloat(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetFloat(hDevice, componentID, featureID, pValue); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetFloat called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetFloat(hDevice, componentID, featureID, pValue); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get float feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get float feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get float feature +/// +TY_CAPI TYGetFloat (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, float* value); + +///@brief Set value of float feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] value Float value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetFloat called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetFloat(hDevice, componentID, featureID, value); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetFloat(hDevice, componentID, featureID, value); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetFloat(hDevice, componentID, featureID, value); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetFloat(hDevice, componentID, featureID, value); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_OUT_OF_RANGE Out of range +/// +/// Suggestions: +/// Please check the value +/// Like this: +/// TYSetFloat(hDevice, componentID, featureID, value); +/// ^ is out of range +/// The value is out of range, please use TYGetFloatRange to get the range or check the camera xml description file +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set float feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set float feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set float feature +/// +TY_CAPI TYSetFloat (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, float value); + +///@brief Get number of enum entries. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] entryCount Entry count. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetEnumEntryCount called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetEnumEntryCount(hDevice, componentID, featureID, pEntryCount); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetEnumEntryCount(hDevice, componentID, featureID, pEntryCount); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetEnumEntryCount(hDevice, componentID, featureID, pEntryCount); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetEnumEntryCount(hDevice, componentID, featureID, pEntryCount); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetEnumEntryCount called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetEnumEntryCount(hDevice, componentID, featureID, pEntryCount); +/// ^ is NULL +/// +TY_CAPI TYGetEnumEntryCount (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint32_t* entryCount); + +///@brief Get list of enum entries. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] entries Output entries. +///@param [in] entryCount Array size of input parameter "entries". +///@param [out] filledEntryCount Number of filled entries. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetEnumEntryInfo called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetEnumEntryInfo(hDevice, componentID, featureID, entries, entryCount, filledEntryCount); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID: %d +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetEnumEntryInfo(hDevice, componentID, featureID, entries, entryCount, filledEntryCount); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID: %d +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetEnumEntryInfo(hDevice, componentID, featureID, entries, entryCount, filledEntryCount); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetEnumEntryInfo(hDevice, componentID, featureID, entries, entryCount, filledEntryCount); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetEnumEntryInfo called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetEnumEntryInfo(hDevice, componentID, featureID, pEnumDescription, entryCount, pFilledEntryCount); +/// ^ or ^ is NULL +/// +TY_CAPI TYGetEnumEntryInfo (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, TY_ENUM_ENTRY* entries, uint32_t entryCount, uint32_t* filledEntryCount); + +///@brief Get current value of enum feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] value Enum value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetEnum called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetEnum(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID: %d +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetEnum(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID: %d +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetEnum(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetEnum(hDevice, componentID, featureID, pValue); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetEnum called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetEnum(hDevice, componentID, featureID, pValue); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get enum feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get enum feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get enum feature +/// +TY_CAPI TYGetEnum (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint32_t* value); + +///@brief Set value of enum feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] value Enum value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetEnum called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetEnum(hDevice, componentID, featureID, value); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetEnum(hDevice, componentID, featureID, value); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetEnum(hDevice, componentID, featureID, value); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetEnum(hDevice, componentID, featureID, value); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_INVALID_PARAMETER Out of range +/// +/// Suggestions: +/// Please check the value +/// Like this: +/// TYSetEnum(hDevice, componentID, featureID, value); +/// ^ is out of range +/// The value is out of range, please use TYGetEnumEntryInfo to get the range or check the camera xml description file +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set enum feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set enum feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set enum feature +/// +TY_CAPI TYSetEnum (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint32_t value); + +///@brief Get value of bool feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] value Bool value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetBool called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetBool(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetBool(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetBool(hDevice, componentID, featureID, pValue); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetBool(hDevice, componentID, featureID, pValue); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetBool called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetBool(hDevice, componentID, featureID, pValue); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get bool feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get bool feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get bool feature +/// +TY_CAPI TYGetBool (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, bool* value); + +///@brief Set value of bool feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] value Bool value. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetBool called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetBool(hDevice, componentID, featureID, value); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetBool(hDevice, componentID, featureID, value); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetBool(hDevice, componentID, featureID, value); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetBool(hDevice, componentID, featureID, value); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set bool feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set bool feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set bool feature +/// +TY_CAPI TYSetBool (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, bool value); + +///@brief Get internal buffer size of string feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] size String length including '\0'. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetString called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetStringLength called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetStringLength(hDevice, componentID, featureID, pLength); +/// ^ is NULL +/// +///@see TYGetString +TY_CAPI TYGetStringLength (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint32_t* size); + +///@brief Get value of string feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] buffer String buffer. +///@param [in] bufferSize Size of buffer. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetString called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetString(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetString called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetString(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get string feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get string feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get string feature +/// +///@see TYGetStringLength +TY_CAPI TYGetString (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, char* buffer, uint32_t bufferSize); + +///@brief Set value of string feature. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] buffer String buffer. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetString called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetString(hDevice, componentID, featureID, pBuffer); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetString(hDevice, componentID, featureID, pBuffer); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetString(hDevice, componentID, featureID, pBuffer); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetString(hDevice, componentID, featureID, pBuffer); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYSetString called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYSetString(hDevice, componentID, featureID, pBuffer); +/// ^ is NULL +/// +///@retval TY_STATUS_OUT_OF_RANGE Input string is too long. +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set string feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set string feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set string feature +/// +TY_CAPI TYSetString (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, const char* buffer); + +///@brief Get value of struct. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] pStruct Pointer of struct. +///@param [in] structSize Size of input buffer pStruct. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetStruct called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetStruct called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is NULL +/// +///@retval TY_STATUS_WRONG_SIZE Struct size mismatch +/// +/// Suggestions: +/// Please check the struct size +/// Like this: +/// TYGetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// The struct size you entered does not match +/// +///@retval TY_STATUS_TIMEOUT Failed to get struct feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get struct feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get struct feature +/// +TY_CAPI TYGetStruct (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, void* pStruct, uint32_t structSize); + +///@brief Set value of struct. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [in] pStruct Pointer of struct. +///@param [in] structSize Size of struct. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetStruct called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYSetStruct called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is NULL +/// +///@retval TY_STATUS_WRONG_SIZE Struct size mismatch +/// +/// Suggestions: +/// Please check the struct size +/// Like this: +/// TYSetStruct(hDevice, componentID, featureID, pStruct, structSize); +/// ^ is invalid +/// The struct size you entered does not match +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set struct feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set struct feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set struct feature +/// +TY_CAPI TYSetStruct (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, void* pStruct, uint32_t structSize); + +///@brief Get the size of specified byte array zone. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] pSize Size of specified byte array zone. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetByteArraySize called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetByteArraySize(hDevice, componentID, featureID, pSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetByteArraySize(hDevice, componentID, featureID, pSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetByteArraySize(hDevice, componentID, featureID, pSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetByteArraySize(hDevice, componentID, featureID, pSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetByteArraySize called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetByteArraySize(hDevice, componentID, featureID, pSize); +/// ^ is NULL +/// +TY_CAPI TYGetByteArraySize (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint32_t* pSize); + +///@brief Read byte array from device. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] pBuffer Byte buffer. +///@param [in] bufferSize Size of buffer. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetByteArray called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetByteArray called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is NULL +/// +///@retval TY_STATUS_WRONG_SIZE Array size mismatch +/// +/// Suggestions: +/// Please check the array size +/// Like this: +/// TYGetByteArray(hDevice, componentID, featureID, buffer, bufferSize); +/// ^ is invalid +/// The array size you entered does not match +/// +///@retval TY_STATUS_TIMEOUT Failed to get byte array feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get byte array feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get byte array feature +/// +TY_CAPI TYGetByteArray (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, uint8_t* pBuffer, uint32_t bufferSize); + +///@brief Write byte array to device. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] pBuffer Byte buffer. +///@param [in] bufferSize Size of buffer. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYSetByteArray called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYSetByteArray called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is NULL +/// +///@retval TY_STATUS_WRONG_SIZE Array size mismatch +/// +/// Suggestions: +/// Please check the array size +/// Like this: +/// TYSetByteArray(hDevice, componentID, featureID, pBuffer, bufferSize); +/// ^ is invalid +/// The array size you entered does not match +/// +///@retval TY_STATUS_BUSY Device is capturing, the feature is locked. +///@retval TY_STATUS_TIMEOUT Failed to set byte array feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to set byte array feature +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot set byte array feature +/// +TY_CAPI TYSetByteArray (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, const uint8_t* pBuffer, uint32_t bufferSize); + +///@brief Write byte array to device. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [in] featureID Feature ID. +///@param [out] pAttr Byte array attribute to be filled. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetByteArrayAttr called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetByteArrayAttr(hDevice, componentID, featureID, pAttr); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetByteArrayAttr(hDevice, componentID, featureID, pAttr); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_INVALID_FEATURE Invalid feature ID +/// +/// Suggestions: +/// Please check featureID parameter +/// Like this: +/// TYGetByteArrayAttr(hDevice, componentID, featureID, pAttr); +/// ^ is invalid +/// You entered an invalid featureID parameter +/// You can get a list of features of the camera device through TYGetFeatureList +/// You can also view the features of the camera device by obtaining the xml description file of the camera +/// +///@retval TY_STATUS_NOT_PERMITTED The feature is not writable. +///@retval TY_STATUS_WRONG_TYPE Feature type mismatch +/// +/// Suggestions: +/// Please check the feature type +/// Like this: +/// TYGetByteArrayAttr(hDevice, componentID, featureID, pAttr); +/// ^ type mismatch +/// The feature type you entered does not match. You can use TYFeatureType to check the feature type +/// +///@retval TY_STATUS_NULL_POINTER TYGetByteArrayAttr called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetByteArrayAttr(hDevice, componentID, featureID, pAttr); +/// ^ is NULL +/// +TY_CAPI TYGetByteArrayAttr (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_ID featureID, TY_BYTEARRAY_ATTR* pAttr); + +///@brief Get the size of device features. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [out] size Size of all feature cnt. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceFeatureNumber called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceFeatureNumber(hDevice, componentID, size); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetDeviceFeatureNumber(hDevice, componentID, size); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceFeatureNumber called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceFeatureNumber(hDevice, componentID, size); +/// ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get feature number +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get feature number +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get feature number +/// +TY_CAPI TYGetDeviceFeatureNumber (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, uint32_t* size); + +///@brief Get the all features by comp id. +///@param [in] hDevice Device handle. +///@param [in] componentID Component ID. +///@param [out] featureInfo Output feature info. +///@param [in] entryCount Array size of input parameter "featureInfo". +///@param [out] filledEntryCount Number of filled featureInfo. +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceFeatureInfo called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceFeatureInfo(hDevice, componentID, featureInfo, entryCount, filledEntryCount); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_INVALID_COMPONENT Invalid component ID +/// +/// Suggestions: +/// Please check componentID parameter +/// Like this: +/// TYGetDeviceFeatureInfo(hDevice, componentID, featureInfo, entryCount, filledEntryCount); +/// ^ is invalid +/// componentID should be the value returned by TYGetComponentIDs +/// You can also view the components of the camera by obtaining the xml description file of the camera device +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceFeatureInfo called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceFeatureInfo(hDevice, componentID, featureInfo, entryCount, filledEntryCount); +/// ^ or ^ is NULL +/// +///@retval TY_STATUS_TIMEOUT Failed to get feature info +/// +/// Suggestions: +/// Possible reasons: +/// 1.Network communication is abnormal, please check whether the network connection is normal, whether firewall and other software block the communication, and whether the packet loss rate is too high. +/// +///@retval TY_STATUS_DEVICE_ERROR Failed to get feature info +/// +/// Suggestions: +/// Possible reasons: +/// 1.The feature of the camera device is not available or not implemented +/// 2.Camera device is abnormal and cannot get feature info +/// +TY_CAPI TYGetDeviceFeatureInfo (TY_DEV_HANDLE hDevice, TY_COMPONENT_ID componentID, TY_FEATURE_INFO* featureInfo, uint32_t entryCount, uint32_t* filledEntryCount); + +///@brief Get the Device xml size. +///@param [in] hDevice Device handle. +///@param [out] size The size of device xml string +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED Not call TYInitLib +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceXMLSize called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceXMLSize(hDevice, size); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceXMLSize called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceXMLSize(hDevice, size); +/// ^ is NULL +/// +TY_CAPI TYGetDeviceXMLSize (TY_DEV_HANDLE hDevice, uint32_t* size); + +///@brief Get the Device xml string. +///@param [in] hDevice Device handle. +///@param [in] xml The buffer to store xml +///@param [in] in_size The size buffer +///@param [out] out_size The actual size write in buffer +///@retval TY_STATUS_OK Succeed. +///@retval TY_STATUS_NOT_INITED Not call TYInitLib +///@retval TY_STATUS_WRONG_SIZE XML buffer size is not enough +/// +/// Suggestions: +/// XML buffer size is not enough +/// Like this: +/// TYGetDeviceXML(hDevice, xml, in_size, out_size); +/// ^ is invalid +/// XML buffer size is not enough, please use TYGetDeviceXMLSize to get the xml size +/// +///@retval TY_STATUS_INVALID_HANDLE TYGetDeviceXML called with invalid device handle +/// +/// Suggestions: +/// Please check device handle +/// Like this: +/// TYGetDeviceXML(hDevice, xml, in_size, out_size); +/// ^ is invalid +/// The hDevice parameter you input is not recorded +/// Possible reasons: +/// 1.TYOpenDevice failed to open device and get correct handle +/// 2.Memory in stack to store handle data is corrupted +/// 3.After getting handle, you updated device list by calling TYUpdateDeviceList +/// +///@retval TY_STATUS_NULL_POINTER TYGetDeviceXML called with NULL pointer +/// +/// Suggestions: +/// Please check your code +/// Like this: +/// TYGetDeviceXML(hDevice, xml, in_size, out_size); +/// ^ or ^ is NULL +/// +TY_CAPI TYGetDeviceXML (TY_DEV_HANDLE hDevice, char *xml, const uint32_t in_size, uint32_t* out_size); + +#endif //TY_API_H_ diff --git a/image_capture/third_party/percipio/include/TYCoordinateMapper.h b/image_capture/third_party/percipio/include/TYCoordinateMapper.h new file mode 100644 index 0000000..68de264 --- /dev/null +++ b/image_capture/third_party/percipio/include/TYCoordinateMapper.h @@ -0,0 +1,560 @@ +/**@file TYCoordinateMapper.h + * @brief Coordinate Conversion API + * @note Considering performance, we leave the responsibility of parameters check to users. + * @copyright Copyright(C)2016-2018 Percipio All Rights Reserved + **/ +#ifndef TY_COORDINATE_MAPPER_H_ +#define TY_COORDINATE_MAPPER_H_ + +#include +#include "TYApi.h" + +typedef struct TY_PIXEL_DESC +{ + int16_t x; // x coordinate in pixels + int16_t y; // y coordinate in pixels + uint16_t depth; // depth value + uint16_t rsvd; +}TY_PIXEL_DESC; + +typedef struct TY_PIXEL_COLOR_DESC +{ + int16_t x; // x coordinate in pixels + int16_t y; // y coordinate in pixels + uint8_t bgr_ch1; // color info + uint8_t bgr_ch2; // color info + uint8_t bgr_ch3; // color info + uint8_t rsvd; +}TY_PIXEL_COLOR_DESC; + +// ------------------------------ +// base convertion +// ------------------------------ + +/// @brief Calculate 4x4 extrinsic matrix's inverse matrix. +/// @param [in] orgExtrinsic Input extrinsic matrix. +/// @param [out] invExtrinsic Inverse matrix. +/// @retval TY_STATUS_OK Succeed. +/// @retval TY_STATUS_ERROR Calculation failed. +TY_CAPI TYInvertExtrinsic (const TY_CAMERA_EXTRINSIC* orgExtrinsic, + TY_CAMERA_EXTRINSIC* invExtrinsic); + +/// @brief Map pixels on depth image to 3D points. +/// @param [in] src_calib Depth image's calibration data. +/// @param [in] depthW Width of depth image. +/// @param [in] depthH Height of depth image. +/// @param [in] depthPixels Pixels on depth image. +/// @param [in] count Number of depth pixels. +/// @param [out] point3d Output point3D. +/// @retval TY_STATUS_OK Succeed. +TY_CAPI TYMapDepthToPoint3d (const TY_CAMERA_CALIB_INFO* src_calib, + uint32_t depthW, uint32_t depthH, + const TY_PIXEL_DESC* depthPixels, uint32_t count, + TY_VECT_3F* point3d, + float f_scale_unit = 1.0f); + +/// @brief Map 3D points to pixels on depth image. Reverse operation of TYMapDepthToPoint3d. +/// @param [in] dst_calib Target depth image's calibration data. +/// @param [in] point3d Input 3D points. +/// @param [in] count Number of points. +/// @param [in] depthW Width of target depth image. +/// @param [in] depthH Height of target depth image. +/// @param [out] depth Output depth pixels. +/// @retval TY_STATUS_OK Succeed. +TY_CAPI TYMapPoint3dToDepth (const TY_CAMERA_CALIB_INFO* dst_calib, + const TY_VECT_3F* point3d, uint32_t count, + uint32_t depthW, uint32_t depthH, + TY_PIXEL_DESC* depth, + float f_scale_unit = 1.0f); + +/// @brief Map depth image to 3D points. 0 depth pixels maps to (NAN, NAN, NAN). +/// @param [in] src_calib Depth image's calibration data. +/// @param [in] depthW Width of depth image. +/// @param [in] depthH Height of depth image. +/// @param [in] depth Depth image. +/// @param [out] point3d Output point3D image. +/// @retval TY_STATUS_OK Succeed. +TY_CAPI TYMapDepthImageToPoint3d (const TY_CAMERA_CALIB_INFO* src_calib, + int32_t imageW, int32_t imageH, + const uint16_t* depth, + TY_VECT_3F* point3d, + float f_scale_unit = 1.0f); + +/// @brief Fill depth image empty region. +/// @param [in] depth Depth image pixels. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +TY_CAPI TYDepthImageFillEmptyRegion(uint16_t* depth, uint32_t depthW, uint32_t depthH); + +/// @brief Map 3D points to depth image. (NAN, NAN, NAN) will be skipped. +/// @param [in] dst_calib Target depth image's calibration data. +/// @param [in] point3d Input 3D points. +/// @param [in] count Number of points. +/// @param [in] depthW Width of target depth image. +/// @param [in] depthH Height of target depth image. +/// @param [in,out] depth Depth image buffer. +/// @retval TY_STATUS_OK Succeed. +TY_CAPI TYMapPoint3dToDepthImage (const TY_CAMERA_CALIB_INFO* dst_calib, + const TY_VECT_3F* point3d, uint32_t count, + uint32_t depthW, uint32_t depthH, uint16_t* depth, + float f_target_scale = 1.0f); + +/// @brief Map 3D points to another coordinate. +/// @param [in] extrinsic Extrinsic matrix. +/// @param [in] point3dFrom Source 3D points. +/// @param [in] count Number of source 3D points. +/// @param [out] point3dTo Target 3D points. +/// @retval TY_STATUS_OK Succeed. +TY_CAPI TYMapPoint3dToPoint3d (const TY_CAMERA_EXTRINSIC* extrinsic, + const TY_VECT_3F* point3dFrom, int32_t count, + TY_VECT_3F* point3dTo); + +// ------------------------------ +// inlines +// ------------------------------ + +/// @brief Map depth pixels to color coordinate pixels. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Depth image pixels. +/// @param [in] count Number of depth image pixels. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] mappedW Width of target depth image. +/// @param [in] mappedH Height of target depth image. +/// @param [out] mappedDepth Output pixels. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapDepthToColorCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, + const TY_PIXEL_DESC* depth, uint32_t count, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, + TY_PIXEL_DESC* mappedDepth, + float f_scale_unit = 1.0f); + +/// @brief Map original depth image to color coordinate depth image. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] mappedW Width of target depth image. +/// @param [in] mappedH Height of target depth image. +/// @param [out] mappedDepth Output pixels. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapDepthImageToColorCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, uint16_t* mappedDepth, + float f_scale_unit = 1.0f); + +/// @brief Create depth image to color coordinate lookup table. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] mappedW Width of target depth image. +/// @param [in] mappedH Height of target depth image. +/// @param [out] lut Output lookup table. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYCreateDepthToColorCoordinateLookupTable( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, + TY_PIXEL_DESC* lut, + float f_scale_unit = 1.0f); + +/// @brief Map original RGB pixels to depth coordinate. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Current depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] rgbW Width of RGB image. +/// @param [in] rgbH Height of RGB image. +/// @param [in] src Input RGB pixels info. +/// @param [in] cnt Input src RGB pixels cnt +/// @param [in] min_distance The min distance(mm), which is generally set to the minimum measured distance of the current camera +/// @param [in] max_distance The longest distance(mm), which is generally set to the longest measuring distance of the current camera +/// @param [out] dst Output RGB pixels info. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapRGBPixelsToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, + TY_PIXEL_COLOR_DESC* src, uint32_t cnt, + uint32_t min_distance, + uint32_t max_distance, + TY_PIXEL_COLOR_DESC* dst, + float f_scale_unit = 1.0f); + +/// @brief Map original RGB image to depth coordinate RGB image. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Current depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] rgbW Width of RGB image. +/// @param [in] rgbH Height of RGB image. +/// @param [in] inRgb Current RGB image. +/// @param [out] mappedRgb Output RGB image. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapRGBImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint8_t* inRgb, + uint8_t* mappedRgb, + float f_scale_unit = 1.0f); + +/// @brief Map original RGB48 image to depth coordinate RGB image. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Current depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] rgbW Width of RGB48 image. +/// @param [in] rgbH Height of RGB48 image. +/// @param [in] inRgb Current RGB48 image. +/// @param [out] mappedRgb Output RGB48 image. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapRGB48ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint16_t* inRgb, + uint16_t* mappedRgb, + float f_scale_unit = 1.0f); + +/// @brief Map original MONO16 image to depth coordinate MONO16 image. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Current depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] rgbW Width of MONO16 image. +/// @param [in] rgbH Height of MONO16 image. +/// @param [in] gray Current MONO16 image. +/// @param [out] mappedGray Output MONO16 image. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapMono16ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint16_t* gray, + uint16_t* mappedGray, + float f_scale_unit = 1.0f); + + +/// @brief Map original MONO8 image to depth coordinate MONO8 image. +/// @param [in] depth_calib Depth image's calibration data. +/// @param [in] depthW Width of current depth image. +/// @param [in] depthH Height of current depth image. +/// @param [in] depth Current depth image. +/// @param [in] color_calib Color image's calibration data. +/// @param [in] monoW Width of MONO8 image. +/// @param [in] monoH Height of MONO8 image. +/// @param [in] inMono Current MONO8 image. +/// @param [out] mappedMono Output MONO8 image. +/// @retval TY_STATUS_OK Succeed. +static inline TY_STATUS TYMapMono8ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t monoW, uint32_t monoH, const uint8_t* inMono, + uint8_t* mappedMono, + float f_scale_unit = 1.0f); + + +#define TYMAP_CHECKRET(f, bufToFree) \ + do{ \ + TY_STATUS err = (f); \ + if(err){ \ + if(bufToFree) \ + free(bufToFree); \ + return err; \ + } \ + } while(0) + + +static inline TY_STATUS TYMapDepthToColorCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, + const TY_PIXEL_DESC* depth, uint32_t count, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, + TY_PIXEL_DESC* mappedDepth, + float f_scale_unit) +{ + TY_VECT_3F* p3d = (TY_VECT_3F*)malloc(sizeof(TY_VECT_3F) * count); + TYMAP_CHECKRET(TYMapDepthToPoint3d(depth_calib, depthW, depthH, depth, count, p3d, f_scale_unit), p3d ); + TY_CAMERA_EXTRINSIC extri_inv; + TYMAP_CHECKRET(TYInvertExtrinsic(&color_calib->extrinsic, &extri_inv), p3d); + TYMAP_CHECKRET(TYMapPoint3dToPoint3d(&extri_inv, p3d, count, p3d), p3d ); + TYMAP_CHECKRET(TYMapPoint3dToDepth(color_calib, p3d, count, mappedW, mappedH, mappedDepth, f_scale_unit), p3d ); + free(p3d); + return TY_STATUS_OK; +} + + +static inline TY_STATUS TYMapDepthImageToColorCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, uint16_t* mappedDepth, float f_scale_unit) +{ + TY_VECT_3F* p3d = (TY_VECT_3F*)malloc(sizeof(TY_VECT_3F) * depthW * depthH); + TYMAP_CHECKRET(TYMapDepthImageToPoint3d(depth_calib, depthW, depthH, depth, p3d, f_scale_unit), p3d); + TY_CAMERA_EXTRINSIC extri_inv; + TYMAP_CHECKRET(TYInvertExtrinsic(&color_calib->extrinsic, &extri_inv), p3d); + TYMAP_CHECKRET(TYMapPoint3dToPoint3d(&extri_inv, p3d, depthW * depthH, p3d), p3d); + TYMAP_CHECKRET(TYMapPoint3dToDepthImage( + color_calib, p3d, depthW * depthH, mappedW, mappedH, mappedDepth, f_scale_unit), p3d); + free(p3d); + return TY_STATUS_OK; +} + +static inline TY_STATUS TYMapRGBPixelsToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, + TY_PIXEL_COLOR_DESC* src, uint32_t cnt, + uint32_t min_distance, + uint32_t max_distance, + TY_PIXEL_COLOR_DESC* dst, + float f_scale_unit) +{ + uint32_t m_distance_range = max_distance - min_distance; + TY_CAMERA_EXTRINSIC extri = color_calib->extrinsic; + + TY_PIXEL_DESC* pixels_array = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * m_distance_range); + TY_PIXEL_DESC* pixels_mapped_array = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * m_distance_range); + TY_VECT_3F* p3d_array = (TY_VECT_3F*)malloc(sizeof(TY_VECT_3F) * m_distance_range); + for (uint32_t i = 0; i < cnt; i++) { + for (uint32_t m = 0; m < m_distance_range; m++) { + pixels_array[m].x = src[i].x; + pixels_array[m].y = src[i].y; + pixels_array[m].depth = m + min_distance; + } + + TYMapDepthToPoint3d(color_calib, rgbW, rgbH, pixels_array, m_distance_range, &p3d_array[0], f_scale_unit); + TYMapPoint3dToPoint3d(&extri, &p3d_array[0], m_distance_range, &p3d_array[0]); + + TYMapPoint3dToDepth(depth_calib, p3d_array, m_distance_range, depthW, depthH, pixels_mapped_array, f_scale_unit); + + uint16_t m_min_delt = 0xffff; + dst[i].x = -1; + dst[i].y = -1; + for (uint32_t m = 0; m < m_distance_range; m++) { + int16_t pixel_x = pixels_mapped_array[m].x; + int16_t pixel_y = pixels_mapped_array[m].y; + uint16_t delt = abs(pixels_mapped_array[m].depth - depth[pixel_y*depthW + pixel_x]); + if (delt < m_min_delt) { + m_min_delt = delt; + if (m_min_delt < 10) { + dst[i].x = pixel_x; + dst[i].y = pixel_y; + dst[i].bgr_ch1 = src[i].bgr_ch1; + dst[i].bgr_ch2 = src[i].bgr_ch2; + dst[i].bgr_ch3 = src[i].bgr_ch3; + } + } + } + } + + free(pixels_array); + free(pixels_mapped_array); + free(p3d_array); + + + return TY_STATUS_OK; +} + +static inline TY_STATUS TYCreateDepthToColorCoordinateLookupTable( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t mappedW, uint32_t mappedH, + TY_PIXEL_DESC* lut, + float f_scale_unit) +{ + TY_VECT_3F* p3d = (TY_VECT_3F*)malloc(sizeof(TY_VECT_3F) * depthW * depthH); + TYMAP_CHECKRET(TYMapDepthImageToPoint3d(depth_calib, depthW, depthH, depth, p3d, f_scale_unit), p3d); + TY_CAMERA_EXTRINSIC extri_inv; + TYMAP_CHECKRET(TYInvertExtrinsic(&color_calib->extrinsic, &extri_inv), p3d); + TYMAP_CHECKRET(TYMapPoint3dToPoint3d(&extri_inv, p3d, depthW * depthH, p3d), p3d); + TYMAP_CHECKRET(TYMapPoint3dToDepth(color_calib, p3d, depthW * depthH, mappedW, mappedH, lut, f_scale_unit), p3d ); + free(p3d); + return TY_STATUS_OK; +} + +inline void TYPixelsOverlapRemove(TY_PIXEL_DESC* lut, uint32_t count, uint32_t imageW, uint32_t imageH) +{ + uint16_t* mappedDepth = (uint16_t*)calloc(imageW*imageH, sizeof(uint16_t)); + for(size_t i = 0; i < count; i++) { + if(lut[i].x < 0 || lut[i].y < 0 || lut[i].x >= imageW || lut[i].y >= imageH) continue; + uint32_t offset = lut[i].y * imageW + lut[i].x; + if(lut[i].depth && (mappedDepth[offset] == 0 || mappedDepth[offset] >= lut[i].depth)) + mappedDepth[offset] = lut[i].depth; + } + TYDepthImageFillEmptyRegion(mappedDepth, imageW, imageH); + for(size_t i = 0; i < count; i++) { + if(lut[i].x < 0 || lut[i].y < 0 || lut[i].x >= imageW || lut[i].y >= imageH) { + continue; + } else { + uint32_t offset = lut[i].y * imageW + lut[i].x; + int32_t delt = lut[i].depth - mappedDepth[offset]; + if(lut[i].depth && delt > 10) { + lut[i].x = -1; + lut[i].y = -1; + lut[i].depth = 0; + } + } + } + free(mappedDepth); +} + +static inline TY_STATUS TYMapRGBImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint8_t* inRgb, + uint8_t* mappedRgb, float f_scale_unit) +{ + TY_PIXEL_DESC* lut = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * depthW * depthH); + TYMAP_CHECKRET(TYCreateDepthToColorCoordinateLookupTable( + depth_calib, depthW, depthH, depth, + color_calib, depthW, depthH, lut, f_scale_unit), lut); + TYPixelsOverlapRemove(lut, depthW * depthH, depthW, depthH); + + for(uint32_t depthr = 0; depthr < depthH; depthr++) + for(uint32_t depthc = 0; depthc < depthW; depthc++) + { + TY_PIXEL_DESC* plut = &lut[depthr * depthW + depthc]; + uint8_t* outPtr = &mappedRgb[depthW * depthr * 3 + depthc * 3]; + if(plut->x < 0 || plut->x >= (int)depthW || plut->y < 0 || plut->y >= (int)depthH){ + outPtr[0] = outPtr[1] = outPtr[2] = 0; + } else { + uint16_t scale_x = (uint16_t)(1.f * plut->x * rgbW / depthW + 0.5); + uint16_t scale_y = (uint16_t)(1.f * plut->y * rgbH / depthH + 0.5); + if(scale_x >= rgbW) scale_x = rgbW - 1; + if(scale_y >= rgbH) scale_y = rgbH - 1; + const uint8_t* inPtr = &inRgb[rgbW * scale_y * 3 + scale_x * 3]; + outPtr[0] = inPtr[0]; + outPtr[1] = inPtr[1]; + outPtr[2] = inPtr[2]; + } + } + free(lut); + return TY_STATUS_OK; +} + +static inline TY_STATUS TYMapRGB48ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint16_t* inRgb, + uint16_t* mappedRgb, float f_scale_unit) +{ + TY_PIXEL_DESC* lut = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * depthW * depthH); + TYMAP_CHECKRET(TYCreateDepthToColorCoordinateLookupTable( + depth_calib, depthW, depthH, depth, + color_calib, depthW, depthH, lut, f_scale_unit), lut); + TYPixelsOverlapRemove(lut, depthW * depthH, depthW, depthH); + + for(uint32_t depthr = 0; depthr < depthH; depthr++) + for(uint32_t depthc = 0; depthc < depthW; depthc++) + { + TY_PIXEL_DESC* plut = &lut[depthr * depthW + depthc]; + uint16_t* outPtr = &mappedRgb[depthW * depthr * 3 + depthc * 3]; + if(plut->x < 0 || plut->x >= (int)depthW || plut->y < 0 || plut->y >= (int)depthH){ + outPtr[0] = outPtr[1] = outPtr[2] = 0; + } else { + uint16_t scale_x = (uint16_t)(1.f * plut->x * rgbW / depthW + 0.5); + uint16_t scale_y = (uint16_t)(1.f * plut->y * rgbH / depthH + 0.5); + if(scale_x >= rgbW) scale_x = rgbW - 1; + if(scale_y >= rgbH) scale_y = rgbH - 1; + const uint16_t* inPtr = &inRgb[rgbW * scale_y * 3 + scale_x * 3]; + outPtr[0] = inPtr[0]; + outPtr[1] = inPtr[1]; + outPtr[2] = inPtr[2]; + } + } + free(lut); + return TY_STATUS_OK; +} + +static inline TY_STATUS TYMapMono16ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t rgbW, uint32_t rgbH, const uint16_t* gray, + uint16_t* mappedGray, float f_scale_unit) +{ + TY_PIXEL_DESC* lut = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * depthW * depthH); + TYMAP_CHECKRET(TYCreateDepthToColorCoordinateLookupTable( + depth_calib, depthW, depthH, depth, + color_calib, depthW, depthH, lut, f_scale_unit), lut); + TYPixelsOverlapRemove(lut, depthW * depthH, depthW, depthH); + + for(uint32_t depthr = 0; depthr < depthH; depthr++) + for(uint32_t depthc = 0; depthc < depthW; depthc++) + { + TY_PIXEL_DESC* plut = &lut[depthr * depthW + depthc]; + uint16_t* outPtr = &mappedGray[depthW * depthr + depthc]; + if(plut->x < 0 || plut->x >= (int)depthW || plut->y < 0 || plut->y >= (int)depthH){ + outPtr[0] = 0; + } else { + uint16_t scale_x = (uint16_t)(1.f * plut->x * rgbW / depthW + 0.5); + uint16_t scale_y = (uint16_t)(1.f * plut->y * rgbH / depthH + 0.5); + if(scale_x >= rgbW) scale_x = rgbW - 1; + if(scale_y >= rgbH) scale_y = rgbH - 1; + const uint16_t* inPtr = &gray[rgbW * scale_y + scale_x]; + outPtr[0] = inPtr[0]; + } + } + free(lut); + return TY_STATUS_OK; +} + +static inline TY_STATUS TYMapMono8ImageToDepthCoordinate( + const TY_CAMERA_CALIB_INFO* depth_calib, + uint32_t depthW, uint32_t depthH, const uint16_t* depth, + const TY_CAMERA_CALIB_INFO* color_calib, + uint32_t monoW, uint32_t monoH, const uint8_t* inMono, + uint8_t* mappedMono, float f_scale_unit) +{ + TY_PIXEL_DESC* lut = (TY_PIXEL_DESC*)malloc(sizeof(TY_PIXEL_DESC) * depthW * depthH); + TYMAP_CHECKRET(TYCreateDepthToColorCoordinateLookupTable( + depth_calib, depthW, depthH, depth, + color_calib, depthW, depthH, lut, f_scale_unit), lut); + TYPixelsOverlapRemove(lut, depthW * depthH, depthW, depthH); + + for(uint32_t depthr = 0; depthr < depthH; depthr++) + for(uint32_t depthc = 0; depthc < depthW; depthc++) + { + TY_PIXEL_DESC* plut = &lut[depthr * depthW + depthc]; + uint8_t* outPtr = &mappedMono[depthW * depthr + depthc]; + if(plut->x < 0 || plut->x >= (int)depthW || plut->y < 0 || plut->y >= (int)depthH){ + outPtr[0] = 0; + } else { + uint16_t scale_x = (uint16_t)(1.f * plut->x * monoW / depthW + 0.5); + uint16_t scale_y = (uint16_t)(1.f * plut->y * monoH / depthH + 0.5); + if(scale_x >= monoW) scale_x = monoW - 1; + if(scale_y >= monoH) scale_y = monoH - 1; + const uint8_t* inPtr = &inMono[monoW * scale_y + scale_x]; + outPtr[0] = inPtr[0]; + } + } + free(lut); + return TY_STATUS_OK; +} + + +#endif diff --git a/image_capture/third_party/percipio/include/TYDefs.h b/image_capture/third_party/percipio/include/TYDefs.h new file mode 100644 index 0000000..a090b0b --- /dev/null +++ b/image_capture/third_party/percipio/include/TYDefs.h @@ -0,0 +1,1224 @@ +/**@file TYDefs.h + * @brief TYDefs.h includes camera control and data receiving data definitions + * which supports configuration for image resolution, frame rate, exposure + * time, gain, working mode,etc. + * + */ +#ifndef TY_DEFS_H_ +#define TY_DEFS_H_ +#include +#include + +#ifdef WIN32 +# ifndef _WIN32 +# define _WIN32 +# endif +#endif + +#ifdef _WIN32 +# ifndef _STDINT_H +# if defined(_MSC_VER) && _MSC_VER < 1600 + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; +# else +# include +# endif +# endif +#else +# include +#endif + +// copy stdbool.h here in case bool not defined or cant be found +#ifndef _STDBOOL_H +# define _STDBOOL_H +# define __bool_true_false_are_defined 1 +# ifndef __cplusplus +# define bool _Bool +# define true 1 +# define false 0 +# endif +#endif + +#ifdef _WIN32 +# include +# include +# ifdef TY_STATIC_LIB +# define TY_DLLIMPORT +# define TY_DLLEXPORT +# else +# define TY_DLLIMPORT __declspec(dllimport) +# define TY_DLLEXPORT __declspec(dllexport) +# endif +# define TY_STDC __stdcall +# define TY_CDEC __cdecl +# ifdef RGB +# undef RGB +# endif +#else +# ifdef TY_STATIC_LIB +# define TY_DLLIMPORT +# define TY_DLLEXPORT +# else +# define TY_DLLIMPORT __attribute__((visibility("default"))) +# define TY_DLLEXPORT __attribute__((visibility("default"))) +# endif +# if defined(__i386__) +# define TY_STDC __attribute__((stdcall)) +# define TY_CDEC __attribute__((cdecl)) +# else +# define TY_STDC +# define TY_CDEC +# endif +#endif + +#ifdef TY_BUILDING_LIB +# define TY_EXPORT TY_DLLEXPORT +#else +# define TY_EXPORT TY_DLLIMPORT +#endif + +#if !defined(TY_EXTC) +# if defined(__cplusplus) +# define TY_EXTC extern "C" +# else +# define TY_EXTC +# endif +#endif + +#define TY_CAPI TY_EXTC TY_EXPORT TY_STATUS TY_STDC + +#include "TYVer.h" + +//------------------------------------------------------------------------------ +///@brief API call return status +typedef enum TY_STATUS_LIST :int32_t +{ + TY_STATUS_OK = 0, + TY_STATUS_ERROR = -1001, + TY_STATUS_NOT_INITED = -1002, + TY_STATUS_NOT_IMPLEMENTED = -1003, + TY_STATUS_NOT_PERMITTED = -1004, + TY_STATUS_DEVICE_ERROR = -1005, + TY_STATUS_INVALID_PARAMETER = -1006, + TY_STATUS_INVALID_HANDLE = -1007, + TY_STATUS_INVALID_COMPONENT = -1008, + TY_STATUS_INVALID_FEATURE = -1009, + TY_STATUS_WRONG_TYPE = -1010, + TY_STATUS_WRONG_SIZE = -1011, + TY_STATUS_OUT_OF_MEMORY = -1012, + TY_STATUS_OUT_OF_RANGE = -1013, + TY_STATUS_TIMEOUT = -1014, + TY_STATUS_WRONG_MODE = -1015, + TY_STATUS_BUSY = -1016, + TY_STATUS_IDLE = -1017, + TY_STATUS_NO_DATA = -1018, + TY_STATUS_NO_BUFFER = -1019, + TY_STATUS_NULL_POINTER = -1020, + TY_STATUS_READONLY_FEATURE = -1021, + TY_STATUS_INVALID_DESCRIPTOR= -1022, + TY_STATUS_INVALID_INTERFACE = -1023, + TY_STATUS_FIRMWARE_ERROR = -1024, + + /* ret_code from remote device */ + TY_STATUS_DEV_EPERM = -1, + TY_STATUS_DEV_EIO = -5, + TY_STATUS_DEV_ENOMEM = -12, + TY_STATUS_DEV_EBUSY = -16, + TY_STATUS_DEV_EINVAL = -22, + /* endof ret_code from remote device */ +}TY_STATUS_LIST; +typedef int32_t TY_STATUS; + +typedef enum TY_FW_ERRORCODE_LIST:uint32_t +{ + TY_FW_ERRORCODE_CAM0_NOT_DETECTED = 0x00000001, + TY_FW_ERRORCODE_CAM1_NOT_DETECTED = 0x00000002, + TY_FW_ERRORCODE_CAM2_NOT_DETECTED = 0x00000004, + TY_FW_ERRORCODE_POE_NOT_INIT = 0x00000008, + TY_FW_ERRORCODE_RECMAP_NOT_CORRECT = 0x00000010, + TY_FW_ERRORCODE_LOOKUPTABLE_NOT_CORRECT = 0x00000020, + TY_FW_ERRORCODE_DRV8899_NOT_INIT = 0x00000040, + TY_FW_ERRORCODE_FOC_START_ERR = 0x00000080, + TY_FW_ERRORCODE_CONFIG_NOT_FOUND = 0x00010000, + TY_FW_ERRORCODE_CONFIG_NOT_CORRECT = 0x00020000, + TY_FW_ERRORCODE_XML_NOT_FOUND = 0x00040000, + TY_FW_ERRORCODE_XML_NOT_CORRECT = 0x00080000, + TY_FW_ERRORCODE_XML_OVERRIDE_FAILED = 0x00100000, + TY_FW_ERRORCODE_CAM_INIT_FAILED = 0x00200000, + TY_FW_ERRORCODE_LASER_INIT_FAILED = 0x00400000, +}TY_FW_ERRORCODE_LIST; +typedef uint32_t TY_FW_ERRORCODE; + +typedef enum TY_EVENT_LIST :int32_t +{ + TY_EVENT_DEVICE_OFFLINE = -2001, + TY_EVENT_LICENSE_ERROR = -2002, + TY_EVENT_FW_INIT_ERROR = -2003, +}TY_ENENT_LIST; +typedef int32_t TY_EVENT; + + +typedef void* TY_INTERFACE_HANDLE; ///buffer, dstImage->buffer is NULL. +/// @retval TY_STATUS_INVALID_PARAMETER Invalid srcImage->width, srcImage->height, dstImage->width, dstImage->height or unsupported pixel format. +TY_CAPI TYUndistortImage (const TY_CAMERA_CALIB_INFO *srcCalibInfo + , const TY_IMAGE_DATA *srcImage + , const TY_CAMERA_INTRINSIC *cameraNewIntrinsic + , TY_IMAGE_DATA *dstImage + ); + + +// ----------------------------------------------------------- +struct DepthSpeckleFilterParameters { + int max_speckle_size; // blob size smaller than this will be removed + int max_speckle_diff; // Maximum difference between neighbor disparity pixels +}; + +///buffer is NULL. +/// @retval TY_STATUS_INVALID_PARAMETER param->max_speckle_size <= 0 or param->max_speckle_diff <= 0 +TY_CAPI TYDepthSpeckleFilter (TY_IMAGE_DATA* depthImage + , const DepthSpeckleFilterParameters* param + ); + + +// ----------------------------------------------------------- +struct DepthEnhenceParameters{ + float sigma_s; ///< filter param on space + float sigma_r; ///< filter param on range + int outlier_win_sz; ///< outlier filter windows ize + float outlier_rate; +}; + +///buffer is NULL. +/// @retval TY_STATUS_INVALID_PARAMETER imageNum >= 11 or imageNum <= 0, or any image invalid +/// @retval TY_STATUS_OUT_OF_MEMORY Output image not suitable. +TY_CAPI TYDepthEnhenceFilter (const TY_IMAGE_DATA* depthImages + , int imageNum + , TY_IMAGE_DATA *guide + , TY_IMAGE_DATA *output + , const DepthEnhenceParameters* param + ); + + +#endif diff --git a/image_capture/third_party/percipio/include/TYVer.h b/image_capture/third_party/percipio/include/TYVer.h new file mode 100644 index 0000000..9fe1a01 --- /dev/null +++ b/image_capture/third_party/percipio/include/TYVer.h @@ -0,0 +1,3 @@ +#define TY_LIB_VERSION_MAJOR 3 +#define TY_LIB_VERSION_MINOR 6 +#define TY_LIB_VERSION_PATCH 75 diff --git a/image_capture/third_party/percipio/include/TyIsp.h b/image_capture/third_party/percipio/include/TyIsp.h new file mode 100644 index 0000000..3323fcd --- /dev/null +++ b/image_capture/third_party/percipio/include/TyIsp.h @@ -0,0 +1,109 @@ +/**@file TyIsp.h + * @breif this file Include interface declare for raw color image (bayer format) + * process functions + * + * Copyright(C)2016-2019 Percipio All Rights Reserved + * + */ + +#ifndef TY_COLOR_ISP_H_ +#define TY_COLOR_ISP_H_ +#include "TYApi.h" + +#define TYISP_CAPI TY_CAPI + +typedef void* TY_ISP_HANDLE; + +typedef enum{ + TY_ISP_FEATURE_CAM_MODEL = 0x000000, + TY_ISP_FEATURE_CAM_DEV_HANDLE = 0x000001, /// to_string & operator << (const T & val) { ss << val; return *this; } + operator std::string() const { return ss.str(); } +}; + +static std::string TY_ERROR(TY_STATUS status) +{ + return to_string() << status << "(" << TYErrorString(status) << ")."; +} + +static inline TY_STATUS searchDevice(std::vector& out, const char *inf_id = nullptr, TY_INTERFACE_TYPE type = TY_INTERFACE_ALL) +{ + out.clear(); + ASSERT_OK( TYUpdateInterfaceList() ); + + uint32_t n = 0; + ASSERT_OK( TYGetInterfaceNumber(&n) ); + if(n == 0) return TY_STATUS_ERROR; + + std::vector ifaces(n); + ASSERT_OK( TYGetInterfaceList(&ifaces[0], n, &n) ); + + bool found = false; + std::vector hIfaces; + for(uint32_t i = 0; i < ifaces.size(); i++){ + TY_INTERFACE_HANDLE hIface; + if(type & ifaces[i].type) { + //Interface Not setted + if (nullptr == inf_id || + //Interface been setted and matched + strcmp(inf_id, ifaces[i].id) == 0) { + ASSERT_OK( TYOpenInterface(ifaces[i].id, &hIface) ); + hIfaces.push_back(hIface); + found = true; + //Interface been setted, found and just break + if(nullptr != inf_id) { + break; + } + } + } + + } + if(!found) return TY_STATUS_ERROR; + updateDevicesParallel(hIfaces); + + for (uint32_t i = 0; i < hIfaces.size(); i++) { + TY_INTERFACE_HANDLE hIface = hIfaces[i]; + uint32_t n = 0; + TYGetDeviceNumber(hIface, &n); + if(n > 0){ + std::vector devs(n); + TYGetDeviceList(hIface, &devs[0], n, &n); + for(uint32_t j = 0; j < n; j++) { + out.push_back(devs[j]); + } + } + TYCloseInterface(hIface); + } + + if(out.size() == 0){ + std::cout << "not found any device" << std::endl; + return TY_STATUS_ERROR; + } + + return TY_STATUS_OK; +} + +namespace percipio_layer { + +TYDeviceInfo::TYDeviceInfo(const TY_DEVICE_BASE_INFO& info) +{ + _info = info; +} + +TYDeviceInfo::~TYDeviceInfo() +{ + +} + +const char* TYDeviceInfo::mac() +{ + if(!TYIsNetworkInterface(_info.iface.type)) { + return nullptr; + } + return _info.netInfo.mac; +} + +const char* TYDeviceInfo::ip() +{ + if(!TYIsNetworkInterface(_info.iface.type)) + return nullptr; + return _info.netInfo.ip; +} + +const char* TYDeviceInfo::netmask() +{ + if(!TYIsNetworkInterface(_info.iface.type)) + return nullptr; + return _info.netInfo.netmask; +} + +const char* TYDeviceInfo::gateway() +{ + if(!TYIsNetworkInterface(_info.iface.type)) + return nullptr; + return _info.netInfo.gateway; +} + +const char* TYDeviceInfo::broadcast() +{ + if(!TYIsNetworkInterface(_info.iface.type)) + return nullptr; + return _info.netInfo.broadcast; +} + +static void eventCallback(TY_EVENT_INFO *event_info, void *userdata) { + TYDevice* handle = (TYDevice*)userdata; + handle->_event_callback(event_info); +} + + TYCamInterface::TYCamInterface() + { + TYContext::getInstance(); + Reset(); + } + +TYCamInterface::~TYCamInterface() +{ + +} + +TY_STATUS TYCamInterface::Reset() +{ + TY_STATUS status; + status = TYUpdateInterfaceList(); + if(status != TY_STATUS_OK) return status; + + uint32_t n = 0; + status = TYGetInterfaceNumber(&n); + if(status != TY_STATUS_OK) return status; + + if(n == 0) return TY_STATUS_OK; + + ifaces.resize(n); + status = TYGetInterfaceList(&ifaces[0], n, &n); + return status; +} + +void TYCamInterface::List(std::vector& interfaces) +{ + for(auto& iter : ifaces) { + std::cout << iter.id << std::endl; + interfaces.push_back(iter.id); + } +} + +FastCamera::FastCamera() +{ + +} + +FastCamera::FastCamera(const char* sn) +{ + const char *inf = nullptr; + if (!mIfaceId.empty()) { + inf = mIfaceId.c_str(); + } + auto devList = TYContext::getInstance().queryDeviceList(inf); + if(devList->empty()) { + return; + } + + device = (sn && strlen(sn) != 0) ? devList->getDeviceBySN(sn) : devList->getDevice(0); + if(!device) { + return; + } + + TYGetComponentIDs(device->_handle, &components); +} + +TY_STATUS FastCamera::open(const char* sn) +{ + const char *inf = nullptr; + if (!mIfaceId.empty()) { + inf = mIfaceId.c_str(); + } + + auto devList = TYContext::getInstance().queryDeviceList(inf); + if(devList->empty()) { + std::cout << "deivce list is empty!" << std::endl; + return TY_STATUS_ERROR; + } + + device = (sn && strlen(sn) != 0) ? devList->getDeviceBySN(sn) : devList->getDevice(0); + if(!device) { + return TY_STATUS_ERROR; + } + + return TYGetComponentIDs(device->_handle, &components); +} + +TY_STATUS FastCamera::openByIP(const char* ip) +{ + const char *inf = nullptr; + if (!mIfaceId.empty()) { + inf = mIfaceId.c_str(); + } + + std::unique_lock lock(_dev_lock); + auto devList = TYContext::getInstance().queryNetDeviceList(inf); + if(devList->empty()) { + std::cout << "net deivce list is empty!" << std::endl; + return TY_STATUS_ERROR; + } + + device = (ip && strlen(ip) != 0) ? devList->getDeviceByIP(ip) : devList->getDevice(0); + if(!device) { + std::cout << "open device failed!" << std::endl; + return TY_STATUS_ERROR; + } + + return TYGetComponentIDs(device->_handle, &components); +} + +TY_STATUS FastCamera::setIfaceId(const char* inf) +{ + mIfaceId = inf; + return TY_STATUS_OK; +} + +FastCamera::~FastCamera() +{ + if(isRuning) { + doStop(); + } +} + +void FastCamera::close() +{ + std::unique_lock lock(_dev_lock); + if(isRuning) { + doStop(); + } + + if(device) device.reset(); +} + +std::shared_ptr FastCamera::fetchFrames(uint32_t timeout_ms) +{ + TY_FRAME_DATA tyframe; + TY_STATUS status = TYFetchFrame(handle(), &tyframe, timeout_ms); + if(status != TY_STATUS_OK) { + std::cout << "Frame fetch failed with err code: " << status << "(" << TYErrorString(status) << ")."<< std::endl; + return std::shared_ptr(); + } + + std::shared_ptr frame = std::shared_ptr(new TYFrame(tyframe)); + CHECK_RET(TYEnqueueBuffer(handle(), tyframe.userBuffer, tyframe.bufferSize)); + return frame; +} + +static TY_COMPONENT_ID StreamIdx2CompID(FastCamera::stream_idx idx) +{ + TY_COMPONENT_ID comp = 0; + switch (idx) + { + case FastCamera::stream_depth: + comp = TY_COMPONENT_DEPTH_CAM; + break; + case FastCamera::stream_color: + comp = TY_COMPONENT_RGB_CAM; + break; + case FastCamera::stream_ir_left: + comp = TY_COMPONENT_IR_CAM_LEFT; + break; + case FastCamera::stream_ir_right: + comp = TY_COMPONENT_IR_CAM_RIGHT; + break; + default: + break; + } + + return comp; +} +bool FastCamera::has_stream(stream_idx idx) +{ + return components & StreamIdx2CompID(idx); +} + +TY_STATUS FastCamera::stream_enable(stream_idx idx) +{ + std::unique_lock lock(_dev_lock); + return TYEnableComponents(handle(), StreamIdx2CompID(idx)); +} + +TY_STATUS FastCamera::stream_disable(stream_idx idx) +{ + std::unique_lock lock(_dev_lock); + return TYDisableComponents(handle(), StreamIdx2CompID(idx)); +} + +TY_STATUS FastCamera::start() +{ + std::unique_lock lock(_dev_lock); + if(isRuning) { + std::cout << "Device is busy!" << std::endl; + return TY_STATUS_BUSY; + } + + uint32_t stream_buffer_size; + TY_STATUS status = TYGetFrameBufferSize(handle(), &stream_buffer_size); + if(status != TY_STATUS_OK) { + std::cout << "Get frame buffer size failed with error code: " << TY_ERROR(status) << std::endl; + return status; + } + if(stream_buffer_size == 0) { + std::cout << "Frame buffer size is 0, is the data flow component not enabled?" << std::endl; + return TY_STATUS_DEVICE_ERROR; + } + + for(int i = 0; i < BUF_CNT; i++) { + stream_buffer[i].resize(stream_buffer_size); + TYEnqueueBuffer(handle(), &stream_buffer[i][0], stream_buffer_size); + } + + status = TYStartCapture(handle()); + if(TY_STATUS_OK != status) { + std::cout << "Start capture failed with error code: " << TY_ERROR(status) << std::endl; + return status; + } + + isRuning = true; + return TY_STATUS_OK; +} + +TY_STATUS FastCamera::stop() +{ + std::unique_lock lock(_dev_lock); + return doStop(); +} + +TY_STATUS FastCamera::doStop() +{ + if(!isRuning) + return TY_STATUS_IDLE; + + isRuning = false; + + TY_STATUS status = TYStopCapture(handle()); + if(TY_STATUS_OK != status) { + std::cout << "Stop capture failed with error code: " << TY_ERROR(status) << std::endl; + } + //Stop will stop receive, need TYClearBufferQueue any way + //Ignore TYClearBufferQueue ret val + TYClearBufferQueue(handle()); + for(int i = 0; i < BUF_CNT; i++) { + stream_buffer[i].clear(); + } + + return status; +} + +std::shared_ptr FastCamera::tryGetFrames(uint32_t timeout_ms) +{ + std::unique_lock lock(_dev_lock); + return fetchFrames(timeout_ms); +} + +TYDevice::TYDevice(const TY_DEV_HANDLE handle, const TY_DEVICE_BASE_INFO& info) +{ + _handle = handle; + _dev_info = info; + _event_callback = std::bind(&TYDevice::onDeviceEventCallback, this, std::placeholders::_1); + TYRegisterEventCallback(_handle, eventCallback, this); +} + +TYDevice::~TYDevice() +{ + CHECK_RET(TYCloseDevice(_handle)); +} + +void TYDevice::registerEventCallback(const TY_EVENT eventID, void* data, EventCallback cb) +{ + _eventCallbackMap[eventID] = {data, cb}; +} + +void TYDevice::onDeviceEventCallback(const TY_EVENT_INFO *event_info) +{ + if(_eventCallbackMap[event_info->eventId].second != nullptr) { + _eventCallbackMap[event_info->eventId].second(_eventCallbackMap[event_info->eventId].first); + } +} + +std::shared_ptr TYDevice::getDeviceInfo() +{ + return std::shared_ptr(new TYDeviceInfo(_dev_info)); +} + +std::set DeviceList::gifaces; +DeviceList::DeviceList(std::vector& devices) +{ + devs = devices; +} + +DeviceList::~DeviceList() +{ + for (TY_INTERFACE_HANDLE iface : gifaces) { + TYCloseInterface(iface); + } + gifaces.clear(); +} + +std::shared_ptr DeviceList::getDeviceInfo(int idx) +{ + if((idx < 0) || (idx > devCount())) { + std::cout << "idx out of range" << std::endl; + return nullptr; + } + + return std::shared_ptr(new TYDeviceInfo(devs[idx])); +} + +std::shared_ptr DeviceList::getDevice(int idx) +{ + if((idx < 0) || (idx > devCount())) { + std::cout << "idx out of range" << std::endl; + return nullptr; + } + + TY_INTERFACE_HANDLE hIface = NULL; + TY_DEV_HANDLE hDevice = NULL; + + TY_STATUS status = TY_STATUS_OK; + status = TYOpenInterface(devs[idx].iface.id, &hIface); + if(status != TY_STATUS_OK) { + std::cout << "Open interface failed with error code: " << TY_ERROR(status) << std::endl; + return nullptr; + } + + gifaces.insert(hIface); + std::string ifaceId = devs[idx].iface.id; + std::string open_log = std::string("open device ") + devs[idx].id + + "\non interface " + parseInterfaceID(ifaceId); + std::cout << open_log << std::endl; + status = TYOpenDevice(hIface, devs[idx].id, &hDevice); + if(status != TY_STATUS_OK) { + std::cout << "Open device < " << devs[idx].id << "> failed with error code: " << TY_ERROR(status) << std::endl; + return nullptr; + } + + TY_DEVICE_BASE_INFO info; + status = TYGetDeviceInfo(hDevice, &info); + if(status != TY_STATUS_OK) { + std::cout << "Get device info failed with error code: " << TY_ERROR(status) << std::endl; + return nullptr; + } + + return std::shared_ptr(new TYDevice(hDevice, info)); +} + +std::shared_ptr DeviceList::getDeviceBySN(const char* sn) +{ + TY_STATUS status = TY_STATUS_OK; + TY_INTERFACE_HANDLE hIface = NULL; + TY_DEV_HANDLE hDevice = NULL; + + if(!sn) { + std::cout << "Invalid parameters" << std::endl; + return nullptr; + } + + for(size_t i = 0; i < devs.size(); i++) { + if(strcmp(devs[i].id, sn) == 0) { + status = TYOpenInterface(devs[i].iface.id, &hIface); + if(status != TY_STATUS_OK) continue; + + gifaces.insert(hIface); + std::string ifaceId = devs[i].iface.id; + std::string open_log = std::string("open device ") + devs[i].id + + "\non interface " + parseInterfaceID(ifaceId); + std::cout << open_log << std::endl; + status = TYOpenDevice(hIface, devs[i].id, &hDevice); + if(status != TY_STATUS_OK) continue; + + TY_DEVICE_BASE_INFO info; + status = TYGetDeviceInfo(hDevice, &info); + if(status != TY_STATUS_OK) { + TYCloseDevice(hDevice); + continue; + } + return std::shared_ptr(new TYDevice(hDevice, info)); + } + } + + std::cout << "Device not found!" << std::endl; + return nullptr; +} + +std::shared_ptr DeviceList::getDeviceByIP(const char* ip) +{ + TY_STATUS status = TY_STATUS_OK; + TY_INTERFACE_HANDLE hIface = NULL; + TY_DEV_HANDLE hDevice = NULL; + + if(!ip) { + std::cout << "Invalid parameters" << std::endl; + return nullptr; + } + + for(size_t i = 0; i < devs.size(); i++) { + if(TYIsNetworkInterface(devs[i].iface.type)) { + status = TYOpenInterface(devs[i].iface.id, &hIface); + if(status != TY_STATUS_OK) continue; + std::string open_log = "open device "; + if(ip && strlen(ip)) { + open_log += ip; + status = TYOpenDeviceWithIP(hIface, ip, &hDevice); + } else { + open_log += devs[i].id; + status = TYOpenDevice(hIface, devs[i].id, &hDevice); + } + std::string ifaceId = devs[i].iface.id; + open_log += "\non interface " + parseInterfaceID(ifaceId); + std::cout << open_log << std::endl; + + if(status != TY_STATUS_OK) continue; + + TY_DEVICE_BASE_INFO info; + status = TYGetDeviceInfo(hDevice, &info); + if(status != TY_STATUS_OK) { + TYCloseDevice(hDevice); + continue;; + } + + return std::shared_ptr(new TYDevice(hDevice, info)); + } + } + + std::cout << "Device not found!" << std::endl; + return nullptr; +} + +std::shared_ptr TYContext::queryDeviceList(const char *iface) +{ + std::vector devs; + searchDevice(devs, iface); + return std::shared_ptr(new DeviceList(devs)); +} + +std::shared_ptr TYContext::queryNetDeviceList(const char *iface) +{ + std::vector devs; + searchDevice(devs, iface, TY_INTERFACE_ETHERNET | TY_INTERFACE_IEEE80211); + return std::shared_ptr(new DeviceList(devs)); +} + +bool TYContext::ForceNetDeviceIP(const ForceIPStyle style, const std::string& mac, const std::string& ip, const std::string& mask, const std::string& gateway) +{ + ASSERT_OK( TYUpdateInterfaceList() ); + + uint32_t n = 0; + ASSERT_OK( TYGetInterfaceNumber(&n) ); + if(n == 0) return false; + + std::vector ifaces(n); + ASSERT_OK( TYGetInterfaceList(&ifaces[0], n, &n) ); + ASSERT( n == ifaces.size() ); + + bool open_needed = false; + const char * ip_save = ip.c_str(); + const char * netmask_save = mask.c_str(); + const char * gateway_save = gateway.c_str(); + switch(style) + { + case ForceIPStyleDynamic: + if(strcmp(ip_save, "0.0.0.0") != 0) { + open_needed = true; + } + ip_save = "0.0.0.0"; + netmask_save = "0.0.0.0"; + gateway_save = "0.0.0.0"; + break; + case ForceIPStyleStatic: + open_needed = true; + break; + default: + break; + } + + bool result = false; + for(uint32_t i = 0; i < n; i++) { + if(TYIsNetworkInterface(ifaces[i].type)) { + TY_INTERFACE_HANDLE hIface; + ASSERT_OK( TYOpenInterface(ifaces[i].id, &hIface) ); + if (TYForceDeviceIP(hIface, mac.c_str(), ip.c_str(), mask.c_str(), gateway.c_str()) == TY_STATUS_OK) { + LOGD("**** Set Temporary IP/Netmask/Gateway ...Done! ****"); + if(open_needed) { + TYUpdateDeviceList(hIface); + TY_DEV_HANDLE hDev; + if(TYOpenDeviceWithIP(hIface, ip.c_str(), &hDev) == TY_STATUS_OK){ + int32_t ip_i[4]; + uint8_t ip_b[4]; + int32_t ip32; + sscanf(ip_save, "%d.%d.%d.%d", &ip_i[0], &ip_i[1], &ip_i[2], &ip_i[3]); + ip_b[0] = ip_i[0];ip_b[1] = ip_i[1];ip_b[2] = ip_i[2];ip_b[3] = ip_i[3]; + ip32 = TYIPv4ToInt(ip_b); + ASSERT_OK( TYSetInt(hDev, TY_COMPONENT_DEVICE, TY_INT_PERSISTENT_IP, ip32) ); + sscanf(netmask_save, "%d.%d.%d.%d", &ip_i[0], &ip_i[1], &ip_i[2], &ip_i[3]); + ip_b[0] = ip_i[0];ip_b[1] = ip_i[1];ip_b[2] = ip_i[2];ip_b[3] = ip_i[3]; + ip32 = TYIPv4ToInt(ip_b); + ASSERT_OK( TYSetInt(hDev, TY_COMPONENT_DEVICE, TY_INT_PERSISTENT_SUBMASK, ip32) ); + sscanf(gateway_save, "%d.%d.%d.%d", &ip_i[0], &ip_i[1], &ip_i[2], &ip_i[3]); + ip_b[0] = ip_i[0];ip_b[1] = ip_i[1];ip_b[2] = ip_i[2];ip_b[3] = ip_i[3]; + ip32 = TYIPv4ToInt(ip_b); + ASSERT_OK( TYSetInt(hDev, TY_COMPONENT_DEVICE, TY_INT_PERSISTENT_GATEWAY, ip32) ); + + result = true; + std::cout << "**** Set Persistent IP/Netmask/Gateway ...Done! ****" < + +#include "Frame.hpp" +#include "TYImageProc.h" + +namespace percipio_layer { + + +TYImage::TYImage() +{ + memset(&image_data, 0, sizeof(image_data)); +} + +TYImage::TYImage(const TY_IMAGE_DATA& image) : + m_isOwner(false) +{ + memcpy(&image_data, &image, sizeof(TY_IMAGE_DATA)); +} + +TYImage::TYImage(const TYImage& src) +{ + image_data.timestamp = src.timestamp(); + image_data.imageIndex = src.imageIndex(); + image_data.status = src.status(); + image_data.componentID = src.componentID(); + image_data.size = src.size(); + image_data.width = src.width(); + image_data.height = src.height(); + image_data.pixelFormat = src.pixelFormat(); + if(image_data.size) { + m_isOwner = true; + image_data.buffer = malloc(image_data.size); + memcpy(image_data.buffer, src.buffer(), image_data.size); + } +} + +TYImage::TYImage(int32_t width, int32_t height, TY_COMPONENT_ID compID, TY_PIXEL_FORMAT format, int32_t size) +{ + image_data.size = size; + image_data.width = width; + image_data.height = height; + image_data.componentID = compID; + image_data.pixelFormat = format; + if(image_data.size) { + m_isOwner = true; + image_data.buffer = calloc(image_data.size, 1); + } +} + +bool TYImage::resize(int w, int h) +{ +#ifdef OPENCV_DEPENDENCIES + cv::Mat src, dst; + switch(image_data.pixelFormat) + { + case TY_PIXEL_FORMAT_BGR: + case TY_PIXEL_FORMAT_RGB: + src = cv::Mat(cv::Size(width(), height()), CV_8UC3, buffer()); + break; + case TY_PIXEL_FORMAT_MONO: + src = cv::Mat(cv::Size(width(), height()), CV_8U, buffer()); + break; + case TY_PIXEL_FORMAT_MONO16: + src = cv::Mat(cv::Size(width(), height()), CV_16U, buffer()); + break; + case TY_PIXEL_FORMAT_BGR48: + src = cv::Mat(cv::Size(width(), height()), CV_16UC3, buffer()); + break; + case TY_PIXEL_FORMAT_RGB48: + src = cv::Mat(cv::Size(width(), height()), CV_16UC3, buffer()); + break; + case TY_PIXEL_FORMAT_DEPTH16: + src = cv::Mat(cv::Size(width(), height()), CV_16U, buffer()); + break; + default: + return false; + } + + if(image_data.pixelFormat == TY_PIXEL_FORMAT_DEPTH16) + cv::resize(src, dst, cv::Size(w, h), 0, 0, cv::INTER_NEAREST); + else + cv::resize(src, dst, cv::Size(w, h)); + image_data.size = dst.cols * dst.rows * dst.elemSize() * dst.channels(); + image_data.width = dst.cols; + image_data.height = dst.rows; + if(m_isOwner) free(image_data.buffer); + image_data.buffer = malloc(image_data.size); + memcpy(image_data.buffer, dst.data, image_data.size); + return true; +#else + std::cout << "not support!" << std::endl; + return false; +#endif +} + +TYImage::~TYImage() +{ + if(m_isOwner) { + free(image_data.buffer); + } +} + +ImageProcesser::ImageProcesser(const char* win, const TY_CAMERA_CALIB_INFO* calib_data, const TY_ISP_HANDLE isp_handle) +{ + win_name = win; + hasWin = false; + color_isp_handle = isp_handle; + if(calib_data != nullptr) { + _calib_data = std::shared_ptr(new TY_CAMERA_CALIB_INFO(*calib_data)); + } +} + +int ImageProcesser::parse(const std::shared_ptr& image) +{ + if(!image) return -1; + TY_PIXEL_FORMAT format = image->pixelFormat(); +#ifndef OPENCV_DEPENDENCIES + std::cout << win() << " image size : " << image->width() << " x " << image->height() << std::endl; +#endif + switch(format) { + /* + case TY_PIXEL_FORMAT_BGR: + case TY_PIXEL_FORMAT_RGB: + case TY_PIXEL_FORMAT_MONO: + case TY_PIXEL_FORMAT_MONO16: + case TY_PIXEL_FORMAT_BGR48: + case TY_PIXEL_FORMAT_RGB48: + */ + case TY_PIXEL_FORMAT_DEPTH16: + { + _image = std::shared_ptr(new TYImage(*image)); + return 0; + } + case TY_PIXEL_FORMAT_XYZ48: + { + std::vector depth_data(image->width() * image->height()); + int16_t* src = static_cast(image->buffer()); + for (int pix = 0; pix < image->width()*image->height(); pix++) { + depth_data[pix] = *(src + 3*pix + 2); + } + + _image = std::shared_ptr(new TYImage(image->width(), image->height(), image->componentID(), TY_PIXEL_FORMAT_DEPTH16, depth_data.size() * sizeof(int16_t))); + memcpy(_image->buffer(), depth_data.data(), image->size()); + return 0; + } + default: + { +#ifdef OPENCV_DEPENDENCIES + cv::Mat cvImage; + int32_t image_size; + TY_PIXEL_FORMAT image_fmt; + TY_COMPONENT_ID comp_id; + comp_id = image->componentID(); + parseImage(image->image(), &cvImage, color_isp_handle); + switch(cvImage.type()) + { + case CV_8U: + //MONO8 + image_size = cvImage.size().area(); + image_fmt = TY_PIXEL_FORMAT_MONO; + break; + case CV_16U: + //MONO16 + image_size = cvImage.size().area() * 2; + image_fmt = TY_PIXEL_FORMAT_MONO16; + break; + case CV_16UC3: + //BGR48 + image_size = cvImage.size().area() * 6; + image_fmt = TY_PIXEL_FORMAT_BGR48; + break; + default: + //BGR888 + image_size = cvImage.size().area() * 3; + image_fmt = TY_PIXEL_FORMAT_BGR; + break; + } + _image = std::shared_ptr(new TYImage(cvImage.cols, cvImage.rows, comp_id, image_fmt, image_size)); + memcpy(_image->buffer(), cvImage.data, image_size); + return 0; + #else + + //Without the OpenCV library, image decoding is not supported yet. + return -1; + #endif + } + } +} + + +int ImageProcesser::DepthImageRender() +{ + if(!_image) return -1; + TY_PIXEL_FORMAT format = _image->pixelFormat(); + if(format != TY_PIXEL_FORMAT_DEPTH16) return -1; + +#ifdef OPENCV_DEPENDENCIES + static DepthRender render; + cv::Mat depth = cv::Mat(_image->height(), _image->width(), CV_16U, _image->buffer()); + cv::Mat bgr = render.Compute(depth); + + _image = std::shared_ptr(new TYImage(_image->width(), _image->height(), _image->componentID(), TY_PIXEL_FORMAT_BGR, bgr.size().area() * 3)); + memcpy(_image->buffer(), bgr.data, _image->size()); + return 0; +#else + return -1; +#endif +} + +TY_STATUS ImageProcesser::doUndistortion() +{ + int ret = 0; + if(ret == 0) { + if(!_calib_data) { + std::cout << "Calib data is empty!" << std::endl; + return TY_STATUS_ERROR; + } + + int32_t image_size = _image->size(); + TY_PIXEL_FORMAT image_fmt = _image->pixelFormat(); + TY_COMPONENT_ID comp_id = _image->componentID(); + + std::vector undistort_image(image_size); + + TY_IMAGE_DATA src; + src.width = _image->width(); + src.height = _image->height(); + src.size = image_size; + src.pixelFormat = image_fmt; + src.buffer = _image->buffer(); + + TY_IMAGE_DATA dst; + dst.width = _image->width(); + dst.height = _image->height(); + dst.size = image_size; + dst.pixelFormat = image_fmt; + dst.buffer = undistort_image.data(); + + TY_STATUS status = TYUndistortImage(&*_calib_data, &src, NULL, &dst); + if(status != TY_STATUS_OK) { + std::cout << "Do image undistortion failed!" << std::endl; + return status; + } + + _image = std::shared_ptr(new TYImage(_image->width(), _image->height(), comp_id, image_fmt, image_size)); + memcpy(_image->buffer(), undistort_image.data(), image_size); + return TY_STATUS_OK; + } else { + std::cout << "Image decoding failed." << std::endl; + return TY_STATUS_ERROR; + } +} + +int ImageProcesser::show() +{ + if(!_image) return -1; +#ifdef OPENCV_DEPENDENCIES + cv::Mat display; + switch(_image->pixelFormat()) + { + case TY_PIXEL_FORMAT_MONO: + { + display = cv::Mat(_image->height(), _image->width(), CV_8U, _image->buffer()); + break; + } + case TY_PIXEL_FORMAT_MONO16: + { + display = cv::Mat(_image->height(), _image->width(), CV_16U, _image->buffer()); + break; + } + case TY_PIXEL_FORMAT_BGR: + { + display = cv::Mat(_image->height(), _image->width(), CV_8UC3, _image->buffer()); + break; + } + case TY_PIXEL_FORMAT_BGR48: + { + display = cv::Mat(_image->height(), _image->width(), CV_16UC3, _image->buffer()); + break; + } + case TY_PIXEL_FORMAT_DEPTH16: + { + DepthImageRender(); + display = cv::Mat(_image->height(), _image->width(), CV_8UC3, _image->buffer()); + break; + } + default: + { + break; + } + } + + if(!display.empty()) { + hasWin = true; + cv::imshow(win_name.c_str(), display); + int key = cv::waitKey(1); + return key; + } + else + std::cout << "Unknown image encoding format." << std::endl; + #endif + return 0; +} + +void ImageProcesser::clear() +{ +#ifdef OPENCV_DEPENDENCIES + if (hasWin) { + cv::destroyWindow(win_name.c_str()); + } +#endif +} + +TYFrame::TYFrame(const TY_FRAME_DATA& frame) +{ + bufferSize = frame.bufferSize; + userBuffer.resize(bufferSize); + memcpy(userBuffer.data(), frame.userBuffer, bufferSize); + +#define TY_IMAGE_MOVE(src, dst, from, to) do { \ + (to) = (from); \ + (to.buffer) = reinterpret_cast((std::intptr_t(dst)) + (std::intptr_t(from.buffer) - std::intptr_t(src)));\ +}while(0) + + for (int i = 0; i < frame.validCount; i++) { + TY_IMAGE_DATA img; + if (frame.image[i].status != TY_STATUS_OK) continue; + + // get depth image + if (frame.image[i].componentID == TY_COMPONENT_DEPTH_CAM) { + TY_IMAGE_MOVE(frame.userBuffer, userBuffer.data(), frame.image[i], img); + _images[TY_COMPONENT_DEPTH_CAM] = std::shared_ptr(new TYImage(img)); + } + // get left ir image + if (frame.image[i].componentID == TY_COMPONENT_IR_CAM_LEFT) { + TY_IMAGE_MOVE(frame.userBuffer, userBuffer.data(), frame.image[i], img); + _images[TY_COMPONENT_IR_CAM_LEFT] = std::shared_ptr(new TYImage(img)); + } + // get right ir image + if (frame.image[i].componentID == TY_COMPONENT_IR_CAM_RIGHT) { + TY_IMAGE_MOVE(frame.userBuffer, userBuffer.data(), frame.image[i], img); + _images[TY_COMPONENT_IR_CAM_RIGHT] = std::shared_ptr(new TYImage(img)); + } + // get color image + if (frame.image[i].componentID == TY_COMPONENT_RGB_CAM) { + TY_IMAGE_MOVE(frame.userBuffer, userBuffer.data(), frame.image[i], img); + _images[TY_COMPONENT_RGB_CAM] = std::shared_ptr(new TYImage(img)); + } + } +} + +TYFrame::~TYFrame() +{ + +} + + +TYFrameParser::TYFrameParser(uint32_t max_queue_size, const TY_ISP_HANDLE isp_handle) +{ + _max_queue_size = max_queue_size; + isRuning = true; + + setImageProcesser(TY_COMPONENT_DEPTH_CAM, std::shared_ptr(new ImageProcesser("depth"))); + setImageProcesser(TY_COMPONENT_IR_CAM_LEFT, std::shared_ptr(new ImageProcesser("Left-IR"))); + setImageProcesser(TY_COMPONENT_IR_CAM_RIGHT, std::shared_ptr(new ImageProcesser("Right-IR"))); + setImageProcesser(TY_COMPONENT_RGB_CAM, std::shared_ptr(new ImageProcesser("color", nullptr, isp_handle))); + + processThread_ = std::thread(&TYFrameParser::display, this); +} + +TYFrameParser::~TYFrameParser() +{ + isRuning = false; + processThread_.join(); +} + +int TYFrameParser::setImageProcesser(TY_COMPONENT_ID id, std::shared_ptr proc) +{ + stream[id] = proc; + return 0; +} + +int TYFrameParser::doProcess(const std::shared_ptr& img) +{ + auto depth = img->depthImage(); + auto color = img->colorImage(); + auto left_ir = img->leftIRImage(); + auto right_ir = img->rightIRImage(); + + if (left_ir) { + stream[TY_COMPONENT_IR_CAM_LEFT]->parse(left_ir); + } + + if (right_ir) { + stream[TY_COMPONENT_IR_CAM_RIGHT]->parse(right_ir); + } + + if (color) { + stream[TY_COMPONENT_RGB_CAM]->parse(color); + } + + if (depth) { + stream[TY_COMPONENT_DEPTH_CAM]->parse(depth); + } + return 0; +} + +void TYFrameParser::display() +{ + int ret = 0; + while(isRuning) { + if(images.size()) { + std::unique_lock lock(_queue_lock); + std::shared_ptr img = images.front(); + + if(img) { + images.pop(); + doProcess(img); + } + } + + for(auto& iter : stream) { + ret = iter.second->show(); + if(ret > 0) { + if(func_keyboard_event) func_keyboard_event(ret, user_data); + } + } + } +} + +inline void TYFrameParser::ImageQueueSizeCheck() +{ + while(images.size() >= _max_queue_size) + images.pop(); +} + +void TYFrameParser::update(const std::shared_ptr& frame) +{ + std::unique_lock lock(_queue_lock); + if(frame) { + ImageQueueSizeCheck(); + images.push(frame); +#ifndef OPENCV_DEPENDENCIES + auto depth = frame->depthImage(); + auto color = frame->colorImage(); + auto left_ir = frame->leftIRImage(); + auto right_ir = frame->rightIRImage(); + + if (left_ir) { + auto image = left_ir; + std::cout << "Left" << " image size : " << image->width() << " x " << image->height() << std::endl; + } + + if (right_ir) { + auto image = right_ir; + std::cout << "Right" << " image size : " << image->width() << " x " << image->height() << std::endl; + } + + if (color) { + auto image = color; + std::cout << "Color" << " image size : " << image->width() << " x " << image->height() << std::endl; + } + + if (depth) { + auto image = depth; + std::cout << "Depth" << " image size : " << image->width() << " x " << image->height() << std::endl; + } + +#endif + } +} +}//namespace percipio_layer diff --git a/image_capture/third_party/percipio/sample_v2/hpp/Device.hpp b/image_capture/third_party/percipio/sample_v2/hpp/Device.hpp new file mode 100644 index 0000000..c421796 --- /dev/null +++ b/image_capture/third_party/percipio/sample_v2/hpp/Device.hpp @@ -0,0 +1,239 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "Frame.hpp" + +namespace percipio_layer { + +class TYDevice; +class DeviceList; +class TYContext; +class TYFrame; +class FastCamera; + +static std::string parseInterfaceID(std::string &ifaceId) +{ + std::string type_s = ifaceId.substr(0, ifaceId.find('-')); + if ("usb" == type_s) { + //add usb specific parse if needed + } + if ("eth" == type_s || "wifi" == type_s) { + //eth-2c:f0:5d:ac:5d:6265eea8c0 + //eth-2c:f0:5d:ac:5d:62 + size_t IdLength = 18 + type_s.length(); + std::string new_id = ifaceId.substr(0, IdLength); + // 65eea8c0 + std::string ip_s = ifaceId.substr(IdLength, ifaceId.size() - IdLength); + //base = 16 + uint32_t ip = static_cast(std::stoul(ip_s, nullptr, 16)); + uint8_t *ip_arr = (uint8_t *)&ip; + new_id += " ip:"; + for(int i = 0; i < 3; i++) { + new_id += std::to_string((uint32_t) ip_arr[i]) + "."; + } + new_id += std::to_string((uint32_t) ip_arr[3]); + return new_id; + } + return ifaceId; +} + +class TYDeviceInfo +{ + public: + ~TYDeviceInfo(); + TYDeviceInfo(TYDeviceInfo const&) = delete; + void operator=(TYDeviceInfo const&) = delete; + + friend class TYDevice; + friend class DeviceList; + + const char* id() { return _info.id; } + const TY_INTERFACE_INFO& Interface() { return _info.iface; } + + const char* vendorName() + { + //specific Vendor name for some camera + if (strlen(_info.userDefinedName) != 0) { + return _info.userDefinedName; + } else { + return _info.vendorName; + } + } + const char* modelName() { return _info.modelName; } + const char* buildHash() { return _info.buildHash; } + const char* configVersion() { return _info.configVersion; } + + const TY_VERSION_INFO& hardwareVersion() { return _info.hardwareVersion; } + const TY_VERSION_INFO& firmwareVersion() { return _info.firmwareVersion; } + + const char* mac(); + const char* ip(); + const char* netmask(); + const char* gateway(); + const char* broadcast(); + private: + TYDeviceInfo(const TY_DEVICE_BASE_INFO& info); + TY_DEVICE_BASE_INFO _info; +}; + +typedef std::function EventCallback; +typedef std::pair event_pair; +static void eventCallback(TY_EVENT_INFO *event_info, void *userdata); +class TYDevice +{ + public: + ~TYDevice(); + void operator=(TYDevice const&) = delete; + + friend class FastCamera; + friend class TYStream; + friend class DeviceList; + friend class TYPropertyManager; + friend void eventCallback(TY_EVENT_INFO *event_info, void *userdata); + + std::shared_ptr getDeviceInfo(); + void registerEventCallback (const TY_EVENT eventID, void* data, EventCallback cb); + + private: + TYDevice(const TY_DEV_HANDLE handle, const TY_DEVICE_BASE_INFO& info); + + TY_DEV_HANDLE _handle; + TY_DEVICE_BASE_INFO _dev_info; + + std::map _eventCallbackMap; + + std::function _event_callback; + void onDeviceEventCallback(const TY_EVENT_INFO *event_info); +}; + +class DeviceList { + public: + ~DeviceList(); + DeviceList(DeviceList const&) = delete; + void operator=(DeviceList const&) = delete; + + bool empty() { return devs.size() == 0; } + int devCount() { return devs.size(); } + + std::shared_ptr getDeviceInfo(int idx); + std::shared_ptr getDevice(int idx); + std::shared_ptr getDeviceBySN(const char* sn); + std::shared_ptr getDeviceByIP(const char* ip); + + friend class TYContext; + private: + std::vector devs; + static std::set gifaces; + DeviceList(std::vector& devices); +}; + +enum ForceIPStyle { + ForceIPStyleDynamic = 0, + ForceIPStyleForce = 1, + ForceIPStyleStatic = 2 +}; + +class TYContext { +public: + static TYContext& getInstance() { + static TYContext instance; + return instance; + } + + TYContext(TYContext const&) = delete; + void operator=(TYContext const&) = delete; + + std::shared_ptr queryDeviceList(const char *iface = nullptr); + std::shared_ptr queryNetDeviceList(const char *iface = nullptr); + + bool ForceNetDeviceIP(const ForceIPStyle style, const std::string& mac, const std::string& ip, const std::string& mask, const std::string& gateway); + +private: + TYContext() { + ASSERT_OK(TYInitLib()); + TY_VERSION_INFO ver; + ASSERT_OK( TYLibVersion(&ver) ); + std::cout << "=== lib version: " << ver.major << "." << ver.minor << "." << ver.patch << std::endl; + } + + ~TYContext() { + ASSERT_OK(TYDeinitLib()); + } +}; + +class TYCamInterface +{ + public: + TYCamInterface(); + ~TYCamInterface(); + + TY_STATUS Reset(); + void List(std::vector& ); + private: + std::vector ifaces; +}; + +class FastCamera +{ + public: + enum stream_idx + { + stream_depth = 0x1, + stream_color = 0x2, + stream_ir_left = 0x4, + stream_ir_right = 0x8, + stream_ir = stream_ir_left + }; + friend class TYFrame; + FastCamera(); + FastCamera(const char* sn); + ~FastCamera(); + + virtual TY_STATUS open(const char* sn); + TY_STATUS setIfaceId(const char* inf); + virtual TY_STATUS openByIP(const char* ip); + virtual bool has_stream(stream_idx idx); + virtual TY_STATUS stream_enable(stream_idx idx); + virtual TY_STATUS stream_disable(stream_idx idx); + + virtual TY_STATUS start(); + virtual TY_STATUS stop(); + virtual void close(); + + std::shared_ptr tryGetFrames(uint32_t timeout_ms); + + TY_DEV_HANDLE handle() { + if (!device) { + // std::cerr << "Error: Device handle accessed but device is null!" << std::endl; + return 0; + } + return device->_handle; + } + + void RegisterOfflineEventCallback(EventCallback cb, void* data) { device->registerEventCallback(TY_EVENT_DEVICE_OFFLINE, data, cb); } + + private: + std::string mIfaceId; + std::mutex _dev_lock; + + TY_COMPONENT_ID components = 0; +#define BUF_CNT (3) + + bool isRuning = false; + std::shared_ptr fetchFrames(uint32_t timeout_ms); + TY_STATUS doStop(); + + std::shared_ptr device; + std::vector stream_buffer[BUF_CNT]; +}; + +} diff --git a/image_capture/third_party/percipio/sample_v2/hpp/Frame.hpp b/image_capture/third_party/percipio/sample_v2/hpp/Frame.hpp new file mode 100644 index 0000000..b094d17 --- /dev/null +++ b/image_capture/third_party/percipio/sample_v2/hpp/Frame.hpp @@ -0,0 +1,126 @@ +#pragma once + +#include + +#include +#include +#include +#include + +#include "common.hpp" + +namespace percipio_layer { + +class TYImage +{ + public: + TYImage(); + TYImage(const TY_IMAGE_DATA& image); + TYImage(const TYImage& src); + TYImage(int32_t width, int32_t height, TY_COMPONENT_ID compID, TY_PIXEL_FORMAT format, int32_t size); + + ~TYImage(); + + int32_t size() const { return image_data.size; } + int32_t width() const { return image_data.width; } + int32_t height() const { return image_data.height; } + void* buffer() const { return image_data.buffer; } + int32_t status() const { return image_data.status; } + uint64_t timestamp() const { return image_data.timestamp; } + int32_t imageIndex() const { return image_data.imageIndex; } + + bool resize(int w, int h); + + TY_PIXEL_FORMAT pixelFormat() const { return image_data.pixelFormat; } + TY_COMPONENT_ID componentID() const { return image_data.componentID; } + + const TY_IMAGE_DATA* image() const { return &image_data; } + + private: + bool m_isOwner = false; + TY_IMAGE_DATA image_data; +}; + +class TYFrame +{ + public: + ~TYFrame(); + void operator=(TYFrame const&) = delete; + TYFrame(TYFrame const&) = delete; + TYFrame(const TY_FRAME_DATA& frame); + + std::shared_ptr depthImage() { return _images[TY_COMPONENT_DEPTH_CAM];} + std::shared_ptr colorImage() { return _images[TY_COMPONENT_RGB_CAM];} + std::shared_ptr leftIRImage() { return _images[TY_COMPONENT_IR_CAM_LEFT];} + std::shared_ptr rightIRImage() { return _images[TY_COMPONENT_IR_CAM_RIGHT];} + + private: + int32_t bufferSize = 0; + std::vector userBuffer; + + typedef std::map> ty_image; + ty_image _images; +}; + +class ImageProcesser +{ + public: + ImageProcesser(const char* win, const TY_CAMERA_CALIB_INFO* calib_data = nullptr, const TY_ISP_HANDLE isp_handle = nullptr); + ~ImageProcesser() {clear();} + + virtual int parse(const std::shared_ptr& image); + int DepthImageRender(); + TY_STATUS doUndistortion(); + int show(); + void clear(); + + TY_ISP_HANDLE isp_handle() const { return color_isp_handle; } + + const std::shared_ptr& image() const { return _image; } + const std::string& win() { return win_name; } +protected: + std::shared_ptr _image; + + private: + std::string win_name; + TY_ISP_HANDLE color_isp_handle; + std::shared_ptr _calib_data; + bool hasWin; +}; + + +typedef void (*TYFrameKeyBoardEventCallback) (int, void*); + +typedef std::map> ty_stream; +class TYFrameParser +{ + public: + TYFrameParser(uint32_t max_queue_size = 4, const TY_ISP_HANDLE isp_handle = nullptr); + ~TYFrameParser(); + + void RegisterKeyBoardEventCallback(TYFrameKeyBoardEventCallback cb, void* data) { + user_data = data; + func_keyboard_event = cb; + } + int setImageProcesser(TY_COMPONENT_ID id, std::shared_ptr proc); + virtual int doProcess(const std::shared_ptr& frame); + void update(const std::shared_ptr& frame); + +protected: + ty_stream stream; + private: + std::mutex _queue_lock; + uint32_t _max_queue_size; + + bool isRuning; + std::thread processThread_; + + void* user_data; + TYFrameKeyBoardEventCallback func_keyboard_event; + + std::queue> images; + + inline void ImageQueueSizeCheck(); + inline void display(); +}; +} diff --git a/image_capture/third_party/percipio/sample_v2/如何设置分辨率和帧率.md b/image_capture/third_party/percipio/sample_v2/如何设置分辨率和帧率.md new file mode 100644 index 0000000..23ee2b1 --- /dev/null +++ b/image_capture/third_party/percipio/sample_v2/如何设置分辨率和帧率.md @@ -0,0 +1,221 @@ +# 如何设置图像分辨率和帧率 + +根据示例程序 `sample_v2` 的代码,以下是设置图像分辨率和帧率的方法: + +## 设置图像分辨率 + +有两种方式可以设置图像分辨率: + +### 方法1:使用 TY_INT_WIDTH 和 TY_INT_HEIGHT(直接设置宽高) + +```cpp +#include "TYApi.h" + +// 获取设备句柄(假设已经打开设备) +TY_DEV_HANDLE hDevice = ...; // 从 FastCamera::handle() 获取 + +// 设置宽度和高度 +TY_STATUS status; + +// 设置宽度(例如:1280) +status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_WIDTH, 1280); +if (status != TY_STATUS_OK) { + // 处理错误 +} + +// 设置高度(例如:960) +status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_HEIGHT, 960); +if (status != TY_STATUS_OK) { + // 处理错误 +} + +// 对于深度相机 +status = TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_WIDTH, 640); +status = TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_HEIGHT, 480); +``` + +### 方法2:使用 TY_ENUM_IMAGE_MODE(推荐,同时设置分辨率和像素格式) + +```cpp +#include "TYApi.h" +#include "TYDefs.h" + +TY_DEV_HANDLE hDevice = ...; + +// 获取支持的图像模式列表 +TY_ENUM_ENTRY mode_entry[10]; +uint32_t num; +TY_STATUS status = TYGetEnumEntryInfo(hDevice, TY_COMPONENT_RGB_CAM, + TY_ENUM_IMAGE_MODE, mode_entry, 10, &num); +if (status == TY_STATUS_OK) { + // 查看所有支持的模式 + for (uint32_t i = 0; i < num; i++) { + printf("Mode %d: %s (value: 0x%x)\n", i, mode_entry[i].description, mode_entry[i].value); + } + + // 设置图像模式(例如:RGB 1280x960) + // 可以使用预定义的模式,如: + // TY_IMAGE_MODE_RGB_1280x960 + // TY_IMAGE_MODE_RGB_640x480 + // TY_IMAGE_MODE_MONO_640x480 + // 等等 + + TY_IMAGE_MODE img_mode = TY_IMAGE_MODE_RGB_1280x960; + status = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, img_mode); + if (status != TY_STATUS_OK) { + // 处理错误 + } +} + +// 或者使用第一个可用的模式(默认模式) +TY_IMAGE_MODE img_mode; +status = TYGetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, &img_mode); +if (status == TY_STATUS_OK) { + // 从图像模式中解析宽高 + int width = TYImageWidth(img_mode); + int height = TYImageHeight(img_mode); + printf("Current mode: %dx%d\n", width, height); +} +``` + +### 使用示例代码中的辅助函数 + +在 `Utils.hpp` 中提供了辅助函数: + +```cpp +#include "Utils.hpp" + +TY_DEV_HANDLE hDevice = ...; +TY_COMPONENT_ID compID = TY_COMPONENT_RGB_CAM; +TY_IMAGE_MODE image_mode; + +// 获取默认图像模式 +TY_STATUS status = get_default_image_mode(hDevice, compID, image_mode); + +// 或者获取指定索引的模式 +status = get_image_mode(hDevice, compID, image_mode, 0); // 获取第一个模式 + +// 设置图像模式 +status = TYSetEnum(hDevice, compID, TY_ENUM_IMAGE_MODE, image_mode); +``` + +## 设置帧率 + +帧率通常通过触发参数(Trigger Parameter)来设置: + +```cpp +#include "TYApi.h" +#include "TYDefs.h" + +TY_DEV_HANDLE hDevice = ...; + +// 设置触发参数(包含帧率) +TY_TRIGGER_PARAM trigger_param; +trigger_param.mode = TY_TRIGGER_MODE_M_PER; // 主模式,周期性触发 +trigger_param.fps = 30; // 设置帧率为 30 FPS +trigger_param.rsvd = 0; + +TY_STATUS status = TYSetStruct(hDevice, TY_COMPONENT_DEVICE, + TY_STRUCT_TRIGGER_PARAM, + &trigger_param, sizeof(trigger_param)); +if (status != TY_STATUS_OK) { + // 处理错误 +} +``` + +### 触发模式说明 + +- `TY_TRIGGER_MODE_OFF`: 关闭触发(连续模式) +- `TY_TRIGGER_MODE_ON`: 单次触发 +- `TY_TRIGGER_MODE_M_PER`: 主模式,周期性发送触发信号,需要设置 `fps` 参数 + +## 完整示例 + +以下是一个完整的示例,展示如何在 `FastCamera` 类中设置分辨率和帧率: + +```cpp +#include "Device.hpp" +#include "TYApi.h" + +using namespace percipio_layer; + +// 打开相机 +FastCamera cam; +TY_STATUS status = cam.open(nullptr); // 或使用序列号 +if (status != TY_STATUS_OK) { + return; +} + +TY_DEV_HANDLE hDevice = cam.handle(); + +// 1. 设置图像分辨率(方法1:直接设置宽高) +status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_WIDTH, 1280); +status = TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_HEIGHT, 960); + +// 或者(方法2:使用图像模式) +TY_IMAGE_MODE img_mode = TY_IMAGE_MODE_RGB_1280x960; +status = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, img_mode); + +// 2. 设置帧率 +TY_TRIGGER_PARAM trigger_param; +trigger_param.mode = TY_TRIGGER_MODE_M_PER; +trigger_param.fps = 30; // 30 FPS +trigger_param.rsvd = 0; +status = TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, + &trigger_param, sizeof(trigger_param)); + +// 3. 启用流并开始采集 +cam.stream_enable(FastCamera::stream_color); +status = cam.start(); + +// 4. 获取帧 +auto frame = cam.tryGetFrames(1000); // 超时时间 1000ms +if (frame) { + auto color_img = frame->colorImage(); + if (color_img) { + printf("Image size: %dx%d\n", color_img->width(), color_img->height()); + } +} +``` + +## 注意事项 + +1. **设置时机**:分辨率应该在启动采集(`start()`)之前设置 +2. **组件ID**:不同的组件(RGB相机、深度相机、IR相机)需要分别设置 + - `TY_COMPONENT_RGB_CAM`: RGB彩色相机 + - `TY_COMPONENT_DEPTH_CAM`: 深度相机 + - `TY_COMPONENT_IR_CAM_LEFT`: 左IR相机 + - `TY_COMPONENT_IR_CAM_RIGHT`: 右IR相机 +3. **支持的参数**:不是所有设备都支持所有分辨率和帧率,建议先查询支持的参数范围 +4. **图像模式**:使用 `TY_ENUM_IMAGE_MODE` 时,模式同时包含分辨率和像素格式信息 + +## 查询支持的参数 + +```cpp +// 查询宽度范围 +TY_INT_RANGE width_range; +TY_STATUS status = TYGetIntRange(hDevice, TY_COMPONENT_RGB_CAM, + TY_INT_WIDTH, &width_range); +if (status == TY_STATUS_OK) { + printf("Width range: %d - %d\n", width_range.min, width_range.max); +} + +// 查询所有支持的图像模式 +TY_ENUM_ENTRY mode_entry[20]; +uint32_t num; +status = TYGetEnumEntryInfo(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, + mode_entry, 20, &num); +if (status == TY_STATUS_OK) { + for (uint32_t i = 0; i < num; i++) { + printf("Mode %d: %s\n", i, mode_entry[i].description); + } +} +``` + +## 参考文件 + +- `image_capture/camera_sdk/sample_v2/cpp/Device.cpp`: 设备操作示例 +- `image_capture/camera_sdk/common/BayerISP.hpp`: 图像模式使用示例(第97-111行) +- `image_capture/camera_sdk/include/TYDefs.h`: 定义和枚举值 +- `image_capture/camera_sdk/include/TYApi.h`: API函数声明 + diff --git a/scripts/clear_redis_task_keys_DB0_DB1.bat b/scripts/clear_redis_task_keys_DB0_DB1.bat new file mode 100644 index 0000000..d67fba3 --- /dev/null +++ b/scripts/clear_redis_task_keys_DB0_DB1.bat @@ -0,0 +1,54 @@ +@echo off +REM 清空 Redis 中的任务触发键,避免旧任务在程序重启后再次触发 +REM 依赖:redis-cli 已加入 PATH +REM 参数:可根据需要修改下方 HOST / PORT + +set HOST=127.0.0.1 +set PORT=6379 + +echo Clearing task trigger keys on %HOST%:%PORT% DB0 and result keys on DB1 ... + +for %%D in (0 1) do ( + echo - Clearing on DB %%D ... + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET vision_task_flag 0 + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET vision_task_side "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET vision_task_time "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET vision_task_beam_length 0 + + REM 仅在 DB1 追加清空结果键 + if %%D==1 ( + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET result_status "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET result_type "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET slot_occupied "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lat_mm_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lat_mm_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lat_mm_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lon_mm_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lon_mm_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET offset_lon_mm_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_left_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_left_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_left_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_right_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_right_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET hole_def_mm_right_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rotation_angle_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rotation_angle_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rotation_angle_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET beam_def_mm_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET beam_def_mm_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET beam_def_mm_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rack_def_mm_value "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rack_def_mm_threshold "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET rack_def_mm_warning_alarm "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET result_barcodes "" + redis-cli -a 123456 -h %HOST% -p %PORT% -n %%D SET last_update_time "" + ) +) + +if %ERRORLEVEL% EQU 0 ( + echo Done. +) else ( + echo Failed. Please check redis-cli availability and connection. +) + diff --git a/scripts/simulate_wms_task - flag=3.bat b/scripts/simulate_wms_task - flag=3.bat new file mode 100644 index 0000000..ba268f9 --- /dev/null +++ b/scripts/simulate_wms_task - flag=3.bat @@ -0,0 +1,17 @@ +@echo off +echo [1/3] Setting task side to 'left'... +redis-cli -a 123456 SET vision_task_side "left" + +echo [2/3] Setting task time... +redis-cli -a 123456 SET vision_task_time "%date% %time%" + +echo [3/4] Setting task beam length... +redis-cli -a 123456 SET vision_task_beam_length 2180 + +echo [4/4] Triggering task (flag=3)... +redis-cli -a 123456 SET vision_task_flag 3 + +echo. +echo Task triggered! Check the application logs. +echo. +pause diff --git a/scripts/simulate_wms_task - flag=5.bat b/scripts/simulate_wms_task - flag=5.bat new file mode 100644 index 0000000..c23aa14 --- /dev/null +++ b/scripts/simulate_wms_task - flag=5.bat @@ -0,0 +1,14 @@ +@echo off +echo [1/3] Setting task side to 'left'... +redis-cli -a 123456 SET vision_task_side "left" + +echo [2/3] Setting task time... +redis-cli -a 123456 SET vision_task_time "%date% %time%" + +echo [3/3] Triggering task (flag=5)... +redis-cli -a 123456 SET vision_task_flag 5 + +echo. +echo Task triggered! Check the application logs. +echo. +pause diff --git a/scripts/simulate_wms_task-flag=4.bat b/scripts/simulate_wms_task-flag=4.bat new file mode 100644 index 0000000..cc5702a --- /dev/null +++ b/scripts/simulate_wms_task-flag=4.bat @@ -0,0 +1,14 @@ +@echo off +echo [1/3] Setting task side to 'left'... +redis-cli -a 123456 SET vision_task_side "left" + +echo [2/3] Setting task time... +redis-cli -a 123456 SET vision_task_time "%date% %time%" + +echo [3/3] Triggering task (flag=4)... +redis-cli -a 123456 SET vision_task_flag 4 + +echo. +echo Task triggered! Check the application logs. +echo. +pause diff --git a/scripts/读写分离脚本DB0-4个值_DB1-26个值.bat b/scripts/读写分离脚本DB0-4个值_DB1-26个值.bat new file mode 100644 index 0000000..24d8e8d --- /dev/null +++ b/scripts/读写分离脚本DB0-4个值_DB1-26个值.bat @@ -0,0 +1,109 @@ +@echo off +REM ======================================================== +REM 批量创建 Redis 测试数据 (读写分离版) +REM Task Keys -> DB 0 +REM Result Keys -> DB 1 +REM ======================================================== + +echo [1/2] Creating WMS Trigger Keys in DB 0... + +REM 1. vision_task_flag (int) +redis-cli -a 123456 -n 0 SET vision_task_flag 2 + +REM 2. vision_task_side (string) +redis-cli -a 123456 -n 0 SET vision_task_side "left" + +REM 3. vision_task_time (string) +redis-cli -a 123456 -n 0 SET vision_task_time "2025-12-08 10:22:13" + +REM 4. vision_task_beam_length (int) +redis-cli -a 123456 -n 0 SET vision_task_beam_length 0 + + +echo [2/2] Creating Vision Result Keys in DB 1 (Initialized to Defaults)... + +REM 1. result_status (string) +redis-cli -a 123456 -n 1 SET result_status "" + +REM 2. result_type (int) +redis-cli -a 123456 -n 1 SET result_type 0 + +REM 3. slot_occupied (bool) +redis-cli -a 123456 -n 1 SET slot_occupied "false" + +REM 4. offset_lat_mm_value (float) +redis-cli -a 123456 -n 1 SET offset_lat_mm_value 0.0 + +REM 5. offset_lat_mm_threshold (JSON) +redis-cli -a 123456 -n 1 SET offset_lat_mm_threshold "{}" + +REM 6. offset_lat_mm_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET offset_lat_mm_warning_alarm "{}" + +REM 7. offset_lon_mm_value (float) +redis-cli -a 123456 -n 1 SET offset_lon_mm_value 0.0 + +REM 8. offset_lon_mm_threshold (JSON) +redis-cli -a 123456 -n 1 SET offset_lon_mm_threshold "{}" + +REM 9. offset_lon_mm_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET offset_lon_mm_warning_alarm "{}" + +REM 10. hole_def_mm_left_value (float) +redis-cli -a 123456 -n 1 SET hole_def_mm_left_value 0.0 + +REM 11. hole_def_mm_left_threshold (JSON) +redis-cli -a 123456 -n 1 SET hole_def_mm_left_threshold "{}" + +REM 12. hole_def_mm_left_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET hole_def_mm_left_warning_alarm "{}" + +REM 13. hole_def_mm_right_value (float) +redis-cli -a 123456 -n 1 SET hole_def_mm_right_value 0.0 + +REM 14. hole_def_mm_right_threshold (JSON) +redis-cli -a 123456 -n 1 SET hole_def_mm_right_threshold "{}" + +REM 15. hole_def_mm_right_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET hole_def_mm_right_warning_alarm "{}" + +REM 16. rotation_angle_value (float) +redis-cli -a 123456 -n 1 SET rotation_angle_value 0.0 + +REM 17. rotation_angle_threshold (JSON) +redis-cli -a 123456 -n 1 SET rotation_angle_threshold "{}" + +REM 18. rotation_angle_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET rotation_angle_warning_alarm "{}" + +REM 19. beam_def_mm_value (float) +redis-cli -a 123456 -n 1 SET beam_def_mm_value 0.0 + +REM 20. beam_def_mm_threshold (JSON) +redis-cli -a 123456 -n 1 SET beam_def_mm_threshold "{}" + +REM 21. beam_def_mm_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET beam_def_mm_warning_alarm "{}" + +REM 22. rack_def_mm_value (float) +redis-cli -a 123456 -n 1 SET rack_def_mm_value 0.0 + +REM 23. rack_def_mm_threshold (JSON) +redis-cli -a 123456 -n 1 SET rack_def_mm_threshold "{}" + +REM 24. rack_def_mm_warning_alarm (JSON) +redis-cli -a 123456 -n 1 SET rack_def_mm_warning_alarm "{}" + +REM 25. result_barcodes (JSON) +redis-cli -a 123456 -n 1 SET result_barcodes "{}" + +REM 26. last_update_time (string) +redis-cli -a 123456 -n 1 SET last_update_time "" + +echo. +echo ========================================== +echo Data populated: +echo DB 0: 4 Tasks keys +echo DB 1: 26 Result keys (Defaults) +echo ========================================== +pause