Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update yolo example for stereo/no-stereo - Humble #447

Merged
merged 5 commits into from
Dec 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions depthai_examples/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,11 @@ dai_add_node_ros2(stereo_inertial_node src/stereo_inertial_publisher.cpp)
dai_add_node_ros2(feature_tracker src/feature_tracker_publisher.cpp)
dai_add_node_ros2(stereo_node src/stereo_publisher.cpp)
dai_add_node_ros2(yolov4_spatial_node src/yolov4_spatial_publisher.cpp)
dai_add_node_ros2(yolov4_node src/yolov4_publisher.cpp)

target_compile_definitions(mobilenet_node PRIVATE BLOB_NAME="${mobilenet_blob_name}")
target_compile_definitions(yolov4_spatial_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")
target_compile_definitions(yolov4_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")
target_compile_definitions(stereo_inertial_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}")

if($ENV{ROS_DISTRO} STREQUAL "galactic")
Expand All @@ -124,6 +126,7 @@ install(TARGETS
stereo_inertial_node
stereo_node
yolov4_spatial_node
yolov4_node
feature_tracker
DESTINATION lib/${PROJECT_NAME})

Expand Down
54 changes: 40 additions & 14 deletions depthai_examples/launch/yolov4_publisher.launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from launch import LaunchDescription, launch_description_sources
from launch.actions import IncludeLaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.conditions import IfCondition,UnlessCondition
from launch.substitutions import LaunchConfiguration
import launch_ros.actions
import launch_ros.descriptions
Expand Down Expand Up @@ -131,7 +132,14 @@ def generate_launch_description():
'monoResolution',
default_value=monoResolution,
description='Contains the resolution of the Mono Cameras. Available resolutions are 800p, 720p & 400p for OAK-D & 480p for OAK-D-Lite.')


# declare_spatial_camera_cmd = DeclareLaunchArgument(
# 'spatial_camera',
# default_value="true",
# description='Enable spatial camera'
# )


urdf_launch = IncludeLaunchDescription(
launch_description_sources.PythonLaunchDescriptionSource(
os.path.join(urdf_launch_dir, 'urdf_launch.py')),
Expand All @@ -146,15 +154,32 @@ def generate_launch_description():
'cam_pitch' : cam_pitch,
'cam_yaw' : cam_yaw}.items())
yolov4_spatial_node = launch_ros.actions.Node(
package='depthai_examples', executable='yolov4_spatial_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
{'monoResolution': monoResolution},
{'spatial_camera': spatial_camera}])
package='depthai_examples', executable='yolov4_spatial_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
{'monoResolution': monoResolution},
{'subpixel': subpixel},
{'lrCheckTresh': lrCheckTresh},
{'confidence': confidence},
],
condition=IfCondition(LaunchConfiguration('spatial_camera'))
)

yolov4_node = launch_ros.actions.Node(
package='depthai_examples', executable='yolov4_node',
output='screen',
parameters=[{'tf_prefix': tf_prefix},
{'camera_param_uri': camera_param_uri},
{'sync_nn': sync_nn},
{'nnName': nnName},
{'resourceBaseFolder': resourceBaseFolder},
],
condition=UnlessCondition(LaunchConfiguration('spatial_camera'))
)

rviz_node = launch_ros.actions.Node(
package='rviz2', executable='rviz2', output='screen',
Expand All @@ -181,11 +206,12 @@ def generate_launch_description():
ld.add_action(declare_sync_nn_cmd)
ld.add_action(urdf_launch)

if spatial_camera == True:
ld.add_action(declare_subpixel_cmd)
ld.add_action(declare_lrCheckTresh_cmd)
ld.add_action(declare_monoResolution_cmd)
ld.add_action(declare_subpixel_cmd)
ld.add_action(declare_lrCheckTresh_cmd)
ld.add_action(declare_monoResolution_cmd)

ld.add_action(yolov4_spatial_node)
ld.add_action(yolov4_node)

return ld

141 changes: 141 additions & 0 deletions depthai_examples/src/yolov4_publisher.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@

#include <cstdio>
#include <iostream>

#include "camera_info_manager/camera_info_manager.hpp"
#include "depthai_bridge/BridgePublisher.hpp"
#include "depthai_bridge/ImageConverter.hpp"
#include "depthai_bridge/ImgDetectionConverter.hpp"
#include "rclcpp/executors.hpp"
#include "rclcpp/node.hpp"
#include "sensor_msgs/msg/image.hpp"
#include "vision_msgs/msg/detection2_d_array.hpp"

// Inludes common necessary includes for development using depthai library
#include "depthai/device/DataQueue.hpp"
#include "depthai/device/Device.hpp"
#include "depthai/pipeline/Pipeline.hpp"
#include "depthai/pipeline/node/ColorCamera.hpp"
#include "depthai/pipeline/node/DetectionNetwork.hpp"
#include "depthai/pipeline/node/XLinkOut.hpp"

const std::vector<std::string> label_map = {
"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat",
"traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse",
"sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag",
"tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove",
"skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon",
"bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza",
"donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
"laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink",
"refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"};

dai::Pipeline createPipeline(bool syncNN, std::string nnPath) {
dai::Pipeline pipeline;
auto colorCam = pipeline.create<dai::node::ColorCamera>();
auto detectionNetwork = pipeline.create<dai::node::YoloDetectionNetwork>();

// create xlink connections
auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
auto xoutNN = pipeline.create<dai::node::XLinkOut>();

xoutRgb->setStreamName("preview");
xoutNN->setStreamName("detections");

// Properties
colorCam->setPreviewSize(416, 416);
colorCam->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
colorCam->setInterleaved(false);
colorCam->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
colorCam->setFps(40);

// Network specific settings
detectionNetwork->setConfidenceThreshold(0.5f);
detectionNetwork->setNumClasses(80);
detectionNetwork->setCoordinateSize(4);
detectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
detectionNetwork->setAnchorMasks({{"side26", {1, 2, 3}}, {"side13", {3, 4, 5}}});
detectionNetwork->setIouThreshold(0.5f);
detectionNetwork->setBlobPath(nnPath);
detectionNetwork->setNumInferenceThreads(2);
detectionNetwork->input.setBlocking(false);

// Linking
colorCam->preview.link(detectionNetwork->input);
if(syncNN)
detectionNetwork->passthrough.link(xoutRgb->input);
else
colorCam->preview.link(xoutRgb->input);

detectionNetwork->out.link(xoutNN->input);
return pipeline;
}

int main(int argc, char** argv) {
rclcpp::init(argc, argv);
auto node = rclcpp::Node::make_shared("yolov4_node");

std::string tfPrefix, resourceBaseFolder, nnPath;
std::string camera_param_uri;
std::string nnName(BLOB_NAME); // Set your blob name for the model here
bool syncNN;
std::string monoResolution = "400p";

node->declare_parameter("tf_prefix", "oak");
node->declare_parameter("camera_param_uri", camera_param_uri);
node->declare_parameter("sync_nn", true);
node->declare_parameter("nnName", "");
node->declare_parameter("resourceBaseFolder", "");

node->get_parameter("tf_prefix", tfPrefix);
node->get_parameter("camera_param_uri", camera_param_uri);
node->get_parameter("sync_nn", syncNN);
node->get_parameter("resourceBaseFolder", resourceBaseFolder);

if(resourceBaseFolder.empty()) {
throw std::runtime_error("Send the path to the resouce folder containing NNBlob in \'resourceBaseFolder\' ");
}

std::string nnParam;
node->get_parameter("nnName", nnParam);
if(nnParam != "x") {
node->get_parameter("nnName", nnName);
}

nnPath = resourceBaseFolder + "/" + nnName;
dai::Pipeline pipeline = createPipeline(syncNN, nnPath);
dai::Device device(pipeline);

auto colorQueue = device.getOutputQueue("preview", 30, false);
auto detectionQueue = device.getOutputQueue("detections", 30, false);
auto calibrationHandler = device.readCalibration();

dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false);
auto rgbCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::CAM_A, -1, -1);
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Image, dai::ImgFrame> rgbPublish(colorQueue,
node,
std::string("color/image"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg,
&rgbConverter, // since the converter has the same frame name
// and image type is also same we can reuse it
std::placeholders::_1,
std::placeholders::_2),
30,
rgbCameraInfo,
"color");

dai::rosBridge::ImgDetectionConverter detConverter(tfPrefix + "_rgb_camera_optical_frame", 416, 416, false);
dai::rosBridge::BridgePublisher<vision_msgs::msg::Detection2DArray, dai::ImgDetections> detectionPublish(
detectionQueue,
node,
std::string("color/yolov4_detections"),
std::bind(&dai::rosBridge::ImgDetectionConverter::toRosMsg, &detConverter, std::placeholders::_1, std::placeholders::_2),
30);

detectionPublish.addPublisherCallback();
rgbPublish.addPublisherCallback(); // addPublisherCallback works only when the dataqueue is non blocking.

rclcpp::spin(node);

return 0;
}
Loading
Loading