Skip to content

Commit

Permalink
object_trackingにおいて画像トピックをサブスクライブするように変更 (#43)
Browse files Browse the repository at this point in the history
* 画像トピックのサブスクライバを実装

* 画像トピックがRGBなのでBGRに変換

* mouseとcamera_nodeのオンオフを実装

* 実機で動作するように変更

* 物体追従のパラメータ調整

* 不要なメンバ変数を削除

* コールバック関数の名前を変更

* コメントアウトしていた不要な行を削除

* rclcpp::SensorDataQoS()を使用するように変更

* std::placeholders::_1を直接書くように修正

* raspimouseノードにComposableNodeを使用

* usb_camノードにもComposableNodeを使用

* 各ノードのuse_intra_process_commsをtrueに変更

* image_pub_を削除

* READMEのobject_trackingのコマンドを修正

* Gazeboの場合のコマンド例を削除
  • Loading branch information
YusukeKato authored Nov 1, 2023
1 parent 5546840 commit 7c323ab
Show file tree
Hide file tree
Showing 7 changed files with 65 additions and 44 deletions.
4 changes: 3 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ find_package(OpenCV REQUIRED)
find_package(raspimouse_msgs REQUIRED)
find_package(raspimouse REQUIRED)
find_package(rt_usb_9axisimu_driver REQUIRED)
find_package(cv_bridge REQUIRED)


include_directories(include)
Expand All @@ -46,7 +47,8 @@ ament_target_dependencies(object_tracking_component
std_srvs
sensor_msgs
geometry_msgs
OpenCV)
OpenCV
cv_bridge)
rclcpp_components_register_nodes(object_tracking_component "object_tracking::Tracker")

add_library(line_follower_component SHARED
Expand Down
4 changes: 2 additions & 2 deletions README.en.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,10 @@ $ ./configure_camera.bash
Then, launch nodes with the following command:

```sh
$ ros2 launch raspimouse_ros2_examples object_tracking.launch.py
$ ros2 launch raspimouse_ros2_examples object_tracking.launch.py video_device:=/dev/video0
```

This sample publishes two topics: `raw_image` for the camera image and `result_image` for the object detection image.
This sample publishes two topics: `camera/color/image_raw` for the camera image and `result_image` for the object detection image.
These images can be viewed with [RViz](https://index.ros.org/r/rviz/)
or [rqt_image_view](https://index.ros.org/doc/ros2/Tutorials/RQt-Overview-Usage/).

Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -157,10 +157,10 @@ $ ./configure_camera.bash
次のコマンドでノードを起動します。

```sh
$ ros2 launch raspimouse_ros2_examples object_tracking.launch.py
$ ros2 launch raspimouse_ros2_examples object_tracking.launch.py video_device:=/dev/video0
```

カメラ画像は`raw_image`、物体検出画像は`result_image`というトピックとして発行されます。
カメラ画像は`camera/color/image_raw`、物体検出画像は`result_image`というトピックとして発行されます。
これらの画像は[RViz](https://index.ros.org/r/rviz/)
[rqt_image_view](https://index.ros.org/doc/ros2/Tutorials/RQt-Overview-Usage/)
で表示できます。
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,23 +38,19 @@ class Tracker : public rclcpp_lifecycle::LifecycleNode
explicit Tracker(const rclcpp::NodeOptions & options);

protected:
void on_image_timer();
void image_callback(const sensor_msgs::msg::Image::SharedPtr msg_image);
void on_cmd_vel_timer();

private:
cv::VideoCapture cap_;
int device_index_;
double image_width_;
double image_height_;
bool object_is_detected_;
cv::Point2d object_normalized_point_;
double object_normalized_area_;
geometry_msgs::msg::Twist cmd_vel_;
std::shared_ptr<rclcpp_lifecycle::LifecyclePublisher<sensor_msgs::msg::Image>> image_pub_;
std::shared_ptr<rclcpp_lifecycle::LifecyclePublisher<sensor_msgs::msg::Image>> result_image_pub_;
std::shared_ptr<rclcpp_lifecycle::LifecyclePublisher<geometry_msgs::msg::Twist>> cmd_vel_pub_;
std::shared_ptr<rclcpp::Client<std_srvs::srv::SetBool>> motor_power_client_;
rclcpp::TimerBase::SharedPtr image_timer_;
rclcpp::Subscription<sensor_msgs::msg::Image>::SharedPtr image_sub_;
rclcpp::TimerBase::SharedPtr cmd_vel_timer_;

std::string mat_type2encoding(int mat_type);
Expand Down
47 changes: 43 additions & 4 deletions launch/object_tracking.launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,31 @@
# limitations under the License.

import launch
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
from launch.conditions import IfCondition
from launch_ros.actions import ComposableNodeContainer
from launch_ros.actions import Node
from launch_ros.descriptions import ComposableNode


def generate_launch_description():
declare_mouse = DeclareLaunchArgument(
'mouse',
default_value="true",
description='Launch raspimouse node'
)
declare_use_camera_node = DeclareLaunchArgument(
'use_camera_node',
default_value='true',
description='Use camera node.'
)
declare_video_device = DeclareLaunchArgument(
'video_device',
default_value='/dev/video0',
description='Set video device.'
)

"""Generate launch description with multiple components."""
container = ComposableNodeContainer(
name='object_tracking_container',
Expand All @@ -29,12 +48,27 @@ def generate_launch_description():
ComposableNode(
package='raspimouse_ros2_examples',
plugin='object_tracking::Tracker',
name='tracker'),
name='tracker',
extra_arguments=[{'use_intra_process_comms': True}]),
ComposableNode(
package='raspimouse',
plugin='raspimouse::Raspimouse',
name='raspimouse',
parameters=[{'use_light_sensors': False}]),
parameters=[{'use_light_sensors': False}],
extra_arguments=[{'use_intra_process_comms': True}],
condition=IfCondition(LaunchConfiguration('mouse'))),
ComposableNode(
package='usb_cam',
plugin='usb_cam::UsbCamNode',
name='usb_cam',
remappings=[('image_raw', 'camera/color/image_raw')],
parameters=[
{'video_device': LaunchConfiguration('video_device')},
{'frame_id': 'camera_color_optical_frame'},
{'pixel_format': 'yuyv2rgb'}
],
extra_arguments=[{'use_intra_process_comms': True}],
condition=IfCondition(LaunchConfiguration('use_camera_node'))),
],
output='screen',
)
Expand All @@ -45,7 +79,12 @@ def generate_launch_description():
executable='lifecycle_node_manager',
output='screen',
parameters=[{'components': ['raspimouse', 'tracker']}]

)

return launch.LaunchDescription([container, manager])
return launch.LaunchDescription([
declare_mouse,
declare_use_camera_node,
declare_video_device,
container,
manager
])
1 change: 1 addition & 0 deletions package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
<depend>hls_lfcd_lds_driver</depend>
<depend>nav2_map_server</depend>
<depend>slam_toolbox</depend>
<depend>cv_bridge</depend>

<test_depend>ament_lint_auto</test_depend>
<test_depend>ament_lint_common</test_depend>
Expand Down
41 changes: 12 additions & 29 deletions src/object_tracking_component.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
#include "rclcpp/rclcpp.hpp"
#include "std_msgs/msg/string.hpp"
#include "lifecycle_msgs/srv/change_state.hpp"
#include "cv_bridge/cv_bridge.h"

using namespace std::chrono_literals;
using CallbackReturn = rclcpp_lifecycle::node_interfaces::LifecycleNodeInterface::CallbackReturn;
Expand All @@ -34,37 +35,34 @@ namespace object_tracking

Tracker::Tracker(const rclcpp::NodeOptions & options)
: rclcpp_lifecycle::LifecycleNode("tracker", options),
device_index_(0), image_width_(320), image_height_(240),
object_is_detected_(false)
{
}

void Tracker::on_image_timer()
void Tracker::image_callback(const sensor_msgs::msg::Image::SharedPtr msg_image)
{
auto cv_img = cv_bridge::toCvShare(msg_image, msg_image->encoding);
auto msg = std::make_unique<sensor_msgs::msg::Image>();
auto result_msg = std::make_unique<sensor_msgs::msg::Image>();
msg->is_bigendian = false;
result_msg->is_bigendian = false;

cv::Mat frame, result_frame;
cap_ >> frame;
cv::cvtColor(cv_img->image, frame, CV_RGB2BGR);

if (!frame.empty()) {
tracking(frame, result_frame);
convert_frame_to_message(result_frame, *result_msg);
result_image_pub_->publish(std::move(result_msg));

convert_frame_to_message(frame, *msg);
image_pub_->publish(std::move(msg));
}
}

void Tracker::on_cmd_vel_timer()
{
const double LINEAR_VEL = -0.5; // unit: m/s
const double ANGULAR_VEL = -0.8; // unit: rad/s
const double TARGET_AREA = 0.3; // 0.0 ~ 1.0
const double OBJECT_AREA_THRESHOLD = 0.06; // 0.0 ~ 1.0
const double TARGET_AREA = 0.1; // 0.0 ~ 1.0
const double OBJECT_AREA_THRESHOLD = 0.01; // 0.0 ~ 1.0

// Detects an object and tracks it
// when the number of pixels of the object is greater than the threshold.
Expand Down Expand Up @@ -117,7 +115,7 @@ void Tracker::tracking(const cv::Mat & input_frame, cv::Mat & result_frame)
cv::Mat hsv;
cv::cvtColor(input_frame, hsv, cv::COLOR_BGR2HSV);
cv::Mat extracted_bin;
cv::inRange(hsv, cv::Scalar(9, 100, 100), cv::Scalar(29, 255, 255), extracted_bin); // Orange
cv::inRange(hsv, cv::Scalar(0, 100, 100), cv::Scalar(29, 255, 255), extracted_bin); // Red-Orange
// cv::inRange(hsv, cv::Scalar(60, 100, 100), cv::Scalar(80, 255, 255), extracted_bin); // Green
// cv::inRange(hsv, cv::Scalar(100, 100, 100), cv::Scalar(120, 255, 255), extracted_bin); // Blue
input_frame.copyTo(result_frame, extracted_bin);
Expand Down Expand Up @@ -173,24 +171,15 @@ CallbackReturn Tracker::on_configure(const rclcpp_lifecycle::State &)
{
RCLCPP_INFO(this->get_logger(), "on_configure() is called.");

image_timer_ = create_wall_timer(100ms, std::bind(&Tracker::on_image_timer, this));
cmd_vel_timer_ = create_wall_timer(50ms, std::bind(&Tracker::on_cmd_vel_timer, this));
// Don't actually start publishing data until activated
image_timer_->cancel();
cmd_vel_timer_->cancel();

image_pub_ = create_publisher<sensor_msgs::msg::Image>("raw_image", 1);
result_image_pub_ = create_publisher<sensor_msgs::msg::Image>("result_image", 1);
cmd_vel_pub_ = create_publisher<geometry_msgs::msg::Twist>("cmd_vel", 1);

// Initialize OpenCV video capture stream.
cap_.open(device_index_);
cap_.set(cv::CAP_PROP_FRAME_WIDTH, image_width_);
cap_.set(cv::CAP_PROP_FRAME_HEIGHT, image_height_);
if (!cap_.isOpened()) {
RCLCPP_ERROR(this->get_logger(), "Could not open video stream");
return CallbackReturn::FAILURE;
}
image_sub_ = create_subscription<sensor_msgs::msg::Image>(
"camera/color/image_raw", rclcpp::SensorDataQoS(),
std::bind(&Tracker::image_callback, this, std::placeholders::_1));

return CallbackReturn::SUCCESS;
}
Expand All @@ -210,10 +199,8 @@ CallbackReturn Tracker::on_activate(const rclcpp_lifecycle::State &)
request->data = true;
auto future_result = motor_power_client_->async_send_request(request);

image_pub_->on_activate();
result_image_pub_->on_activate();
cmd_vel_pub_->on_activate();
image_timer_->reset();
cmd_vel_timer_->reset();

return CallbackReturn::SUCCESS;
Expand All @@ -222,10 +209,8 @@ CallbackReturn Tracker::on_activate(const rclcpp_lifecycle::State &)
CallbackReturn Tracker::on_deactivate(const rclcpp_lifecycle::State &)
{
RCLCPP_INFO(this->get_logger(), "on_deactivate() is called.");
image_pub_->on_deactivate();
result_image_pub_->on_deactivate();
cmd_vel_pub_->on_deactivate();
image_timer_->cancel();
cmd_vel_timer_->cancel();

object_is_detected_ = false;
Expand All @@ -238,11 +223,10 @@ CallbackReturn Tracker::on_cleanup(const rclcpp_lifecycle::State &)
{
RCLCPP_INFO(this->get_logger(), "on_cleanup() is called.");

image_pub_.reset();
result_image_pub_.reset();
cmd_vel_pub_.reset();
image_timer_.reset();
cmd_vel_timer_.reset();
image_sub_.reset();

return CallbackReturn::SUCCESS;
}
Expand All @@ -251,11 +235,10 @@ CallbackReturn Tracker::on_shutdown(const rclcpp_lifecycle::State &)
{
RCLCPP_INFO(this->get_logger(), "on_shutdown() is called.");

image_pub_.reset();
result_image_pub_.reset();
cmd_vel_pub_.reset();
image_timer_.reset();
cmd_vel_timer_.reset();
image_sub_.reset();

return CallbackReturn::SUCCESS;
}
Expand Down

0 comments on commit 7c323ab

Please sign in to comment.