Added rover catkin packages

This commit is contained in:
2017-12-03 13:46:55 -08:00
parent 9cbec88ebe
commit 08a7add471
55 changed files with 51834 additions and 0 deletions

View File

@@ -0,0 +1,146 @@
cmake_minimum_required(VERSION 2.8.7)
project(zed_wrapper)
# if CMAKE_BUILD_TYPE is not specified, take 'Release' as default
IF(NOT CMAKE_BUILD_TYPE)
SET(CMAKE_BUILD_TYPE Release)
ENDIF(NOT CMAKE_BUILD_TYPE)
function(checkPackage package customMessage)
set(varName "${package}_FOUND")
if (NOT "${${varName}}")
string(REPLACE "_" "-" aptPackage ${package})
if("${customMessage}" STREQUAL "")
message(FATAL_ERROR "\n\n ${package} is missing, please try to install it with:\n sudo apt-get install ros-$(rosversion -d)-${aptPackage}\n\n")
else()
message(FATAL_ERROR "\n\n ${customMessage} \n\n")
endif()
endif()
endfunction(checkPackage)
find_package(ZED 2.1)
checkPackage("ZED" "ZED SDK not found, install it from:\n https://www.stereolabs.com/developers/")
exec_program(uname ARGS -p OUTPUT_VARIABLE CMAKE_SYSTEM_NAME2)
if ( CMAKE_SYSTEM_NAME2 MATCHES "aarch64" ) # Jetson TX
SET(CUDA_USE_STATIC_CUDA_RUNTIME OFF)
endif()
find_package(OpenCV 3 COMPONENTS core highgui imgproc)
checkPackage("OPENCV_CORE" "OpenCV core not found, install it from the tutorial at:\n https://www.stereolabs.com/documentation/overview/getting-started/getting-started.html")
checkPackage("OPENCV_HIGHGUI" "OpenCV highgui not found, install it from the tutorial at:\n https://www.stereolabs.com/documentation/overview/getting-started/getting-started.html")
checkPackage("OPENCV_IMGPROC" "OpenCV imgproc not found, install it from the tutorial at:\n https://www.stereolabs.com/documentation/overview/getting-started/getting-started.html")
find_package(CUDA)
checkPackage("CUDA" "CUDA not found, install it from:\n https://developer.nvidia.com/cuda-downloads")
find_package(PCL)
checkPackage("PCL" "PCL not found, try to install it with:\n sudo apt-get install libpcl1 ros-$(rosversion -d)-pcl-ros")
find_package(catkin COMPONENTS
image_transport
roscpp
rosconsole
sensor_msgs
dynamic_reconfigure
tf2_ros
pcl_conversions
nodelet
tf2_geometry_msgs
)
checkPackage("image_transport" "")
checkPackage("roscpp" "")
checkPackage("rosconsole" "")
checkPackage("sensor_msgs" "")
checkPackage("dynamic_reconfigure" "")
checkPackage("tf2_ros" "")
checkPackage("pcl_conversions" "")
checkPackage("nodelet" "")
checkPackage("tf2_geometry_msgs" "")
generate_dynamic_reconfigure_options(
cfg/Zed.cfg
)
catkin_package(
CATKIN_DEPENDS
roscpp
rosconsole
sensor_msgs
opencv
image_transport
dynamic_reconfigure
tf2_ros
tf2_geometry_msgs
pcl_conversions
)
###############################################################################
# INCLUDES
# Specify locations of header files.
include_directories(
${catkin_INCLUDE_DIRS}
${CUDA_INCLUDE_DIRS}
${ZED_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
${PCL_INCLUDE_DIRS}
)
link_directories(${ZED_LIBRARY_DIR})
link_directories(${CUDA_LIBRARY_DIRS})
link_directories(${OpenCV_LIBRARY_DIRS})
link_directories(${PCL_LIBRARY_DIRS})
###############################################################################
###############################################################################
# EXECUTABLE
if(NOT DEFINED CUDA_NPP_LIBRARIES_ZED)
#To deal with cuda 9 nppi libs and previous versions of ZED SDK
set(CUDA_NPP_LIBRARIES_ZED ${CUDA_npp_LIBRARY} ${CUDA_npps_LIBRARY} ${CUDA_nppi_LIBRARY})
endif()
add_definitions(-std=c++11)
list(REMOVE_ITEM PCL_LIBRARIES "vtkproj4") ## if using from pcl package (pcl/vtk bug)
set(LINK_LIBRARIES
${catkin_LIBRARIES}
${ZED_LIBRARIES}
${CUDA_LIBRARIES} ${CUDA_NPP_LIBRARIES_ZED}
${OpenCV_LIBS}
${PCL_LIBRARIES})
add_library(ZEDWrapper src/zed_wrapper_nodelet.cpp)
target_link_libraries(ZEDWrapper ${LINK_LIBRARIES})
add_dependencies(ZEDWrapper ${PROJECT_NAME}_gencfg)
add_executable(zed_wrapper_node src/zed_wrapper_node.cpp)
target_link_libraries(zed_wrapper_node ZEDWrapper ${LINK_LIBRARIES})
add_dependencies(zed_wrapper_node ${PROJECT_NAME}_gencfg)
###############################################################################
#Add all files in subdirectories of the project in
# a dummy_target so qtcreator have access to all files
FILE(GLOB_RECURSE extra_files ${CMAKE_SOURCE_DIR}/*)
add_custom_target(dummy_${PROJECT_NAME} SOURCES ${extra_files})
###############################################################################
# INSTALL
install(TARGETS
ZEDWrapper
zed_wrapper_node
ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
install(FILES
nodelet_plugins.xml
DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION})
install(DIRECTORY
launch
DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION})

28
rover/zed_wrapper/LICENSE Normal file
View File

@@ -0,0 +1,28 @@
Copyright (c) 2017, Stereolabs
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of zed-ros-wrapper nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -0,0 +1,51 @@
# Stereolabs ZED Camera - ROS Integration
This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, odometry information and supports the use of multiple ZED cameras.
## Getting started
- First, download the latest version of the ZED SDK on [stereolabs.com](https://www.stereolabs.com/developers/)
- Download the ZED ROS wrapper [here](https://github.com/stereolabs/zed-ros-wrapper/archive/master.zip).
- For more information, check out our [ROS documentation](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html), our [ROS wiki](http://wiki.ros.org/zed-ros-wrapper) or our [blog post](https://www.stereolabs.com/blog/index.php/2015/09/07/use-your-zed-camera-with-ros/). If you want to customize the wrapper, check the [ZED API documentation](https://www.stereolabs.com/developers/documentation/API/).
### Prerequisites
- Ubuntu 16.04
- [ZED SDK **≥ 2.1**](https://www.stereolabs.com/developers/) and its dependencies ([OpenCV](http://docs.opencv.org/3.1.0/d7/d9f/tutorial_linux_install.html), [CUDA](https://developer.nvidia.com/cuda-downloads))
- [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu)
- [Point Cloud Library (PCL)](https://github.com/PointCloudLibrary/pcl)
### Build the program
The zed_ros_wrapper is a catkin package. It depends on the following ROS packages:
- tf2_ros
- tf2_geometry_msgs
- nav_msgs
- roscpp
- rosconsole
- sensor_msgs
- opencv
- image_transport
- dynamic_reconfigure
- urdf
Place the package folder `zed_wrapper` in your catkin workspace source folder `~/catkin_ws/src`.
Open a terminal and build the package:
cd ~/catkin_ws/
catkin_make
source ./devel/setup.bash
### Run the program
To launch the wrapper along with an Rviz preview, open a terminal and launch:
roslaunch zed_wrapper display.launch
To launch the wrapper without Rviz, use:
roslaunch zed_wrapper zed.launch
[More](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html)

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env python
PACKAGE = "zed_wrapper"
from dynamic_reconfigure.parameter_generator_catkin import *
gen = ParameterGenerator()
gen.add("confidence", int_t, 0, "Confidence threshold, the lower the better", 80, 1, 100)
exit(gen.generate(PACKAGE, "zed_wrapper", "Zed"))

View File

@@ -0,0 +1,27 @@
# Launch Files
Launch files provide a convenient way to start up multiple nodes and a master, as well as setting parameters.
Use **roslaunch** to open ZED launch files.
```
roslaunch zed_wrapper zed.launch
```
### Start the node with one ZED
Use the **zed.launch** file to launch a single camera nodelet. It includes one instance of **zed\_camera.launch**.
```
roslaunch zed_wrapper zed.launch
```
### Start the node with multiple ZED
To use multiple ZED, launch the **zed\_multi\_cam.launch** file which describes the different cameras. This example includes two instances of **zed\_camera.launch**.
```
roslaunch zed_wrapper zed_multi_cam.launch
```
### Start the node with multiple ZED and GPUs
You can configure the wrapper to assign a GPU to a ZED. In that case, it it is not possible to use several instances of **zed\_camera.launch** because different parameters need to be set for each ZED.
A sample **zed\_multi\_gpu.launch** file is available to show how to work with different ZED and GPUs.
```
roslaunch zed_wrapper zed_multi_gpu.launch
```

View File

@@ -0,0 +1,27 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<launch>
<!-- Launch ZED camera wrapper -->
<include file="$(find zed_wrapper)/launch/zed.launch" />
<!-- Launch rivz display -->
<node name="rviz" pkg="rviz" type="rviz" args="-d $(find zed_wrapper)/rviz/zed.rviz" output="screen" />
</launch>

View File

@@ -0,0 +1,30 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<launch>
<!-- Odometry coordinate frame -->
<arg name="odometry_frame" default="map" />
<group ns="zed">
<include file="$(find zed_wrapper)/launch/zed_camera.launch">
<!-- compliant mode for rviz -->
<arg name="odometry_frame" value="$(arg odometry_frame)" />
</include>
</group>
</launch>

View File

@@ -0,0 +1,89 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<launch>
<arg name="svo_file" default="" /><!-- <arg name="svo_file" default="path/to/svo/file.svo"> -->
<arg name="zed_id" default="0" />
<!-- GPU ID-->
<arg name="gpu_id" default="-1" />
<!-- Definition coordinate frames -->
<arg name="publish_tf" default="true" />
<arg name="odometry_frame" default="odom" />
<arg name="base_frame" default="zed_center" />
<arg name="camera_frame" default="zed_left_camera" />
<arg name="depth_frame" default="zed_depth_camera" />
<!-- Publish urdf zed -->
<arg name="publish_urdf" default="true" />
<node name="zed_wrapper_node" pkg="zed_wrapper" type="zed_wrapper_node" output="screen">
<!-- publish odometry frame -->
<param name="publish_tf" value="$(arg publish_tf)" />
<!-- Configuration frame camera -->
<param name="odometry_frame" value="$(arg odometry_frame)" />
<param name="base_frame" value="$(arg base_frame)" />
<param name="camera_frame" value="$(arg camera_frame)" />
<param name="depth_frame" value="$(arg depth_frame)" />
<!-- SVO file path -->
<param name="svo_filepath" value="$(arg svo_file)" />
<!-- ZED parameters -->
<param name="zed_id" value="$(arg zed_id)" />
<param name="resolution" value="3" />
<param name="quality" value="1" />
<param name="sensing_mode" value="0" />
<param name="frame_rate" value="60" />
<param name="odometry_db" value="" />
<param name="openni_depth_mode" value="0" />
<param name="gpu_id" value="$(arg gpu_id)" />
<param name="confidence" value="100" />
<param name="depth_stabilization" value="1" />
<!-- ROS topic names -->
<param name="rgb_topic" value="rgb/image_rect_color" />
<param name="rgb_raw_topic" value="rgb/image_raw_color" />
<param name="rgb_cam_info_topic" value="rgb/camera_info" />
<param name="left_topic" value="left/image_rect_color" />
<param name="left_raw_topic" value="left/image_raw_color" />
<param name="left_cam_info_topic" value="left/camera_info" />
<param name="right_topic" value="right/image_rect_color" />
<param name="right_raw_topic" value="right/image_raw_color" />
<param name="right_cam_info_topic" value="right/camera_info" />
<param name="depth_topic" value="depth/depth_registered" />
<param name="depth_cam_info_topic" value="depth/camera_info" />
<param name="point_cloud_topic" value="point_cloud/cloud_registered" />
<param name="odometry_topic" value="odom" />
</node>
<!-- ROS URDF description of the ZED -->
<group if="$(arg publish_urdf)">
<param name="zed_description" textfile="$(find zed_wrapper)/urdf/zed.urdf" />
<node name="zed_state_publisher" pkg="robot_state_publisher" type="state_publisher">
<remap from="robot_description" to="zed_description" />
</node>
</group>
</launch>

View File

@@ -0,0 +1,45 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<launch>
<!-- Odometry coordinate frame -->
<arg name="odometry_frame" default="map" />
<arg name="zed_id1" default="0" />
<arg name="zed_id2" default="1" />
<!-- First ZED camera -->
<group ns="zed1">
<include file="$(find zed_wrapper)/launch/zed_camera.launch">
<arg name="zed_id" value="$(arg zed_id1)" />
<!-- compliant mode for rviz -->
<arg name="odometry_frame" value="$(arg odometry_frame)" />
</include>
</group>
<!-- Second ZED camera -->
<group ns="zed2">
<include file="$(find zed_wrapper)/launch/zed_camera.launch">
<arg name="zed_id" value="$(arg zed_id2)" />
<!-- compliant mode for rviz -->
<arg name="odometry_frame" value="$(arg odometry_frame)" />
</include>
</group>
</launch>

View File

@@ -0,0 +1,49 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<launch>
<!-- Odometry coordinate frame -->
<arg name="odometry_frame" default="map" />
<arg name="zed_id1" default="0" />
<arg name="zed_id2" default="1" />
<!-- First ZED camera on GPU 0 -->
<group ns="zed_GPU0">
<include file="$(find zed_wrapper)/launch/zed_camera.launch">
<arg name="zed_id" value="$(arg zed_id1)" />
<!-- Set GPU -->
<arg name="gpu_id" value="0" />
<!-- compliant mode for rviz -->
<arg name="odometry_frame" value="$(arg odometry_frame)" />
</include>
</group>
<!-- Second ZED camera on GPU 1 -->
<group ns="zed_GPU1">
<include file="$(find zed_wrapper)/launch/zed_camera.launch">
<arg name="zed_id" value="$(arg zed_id2)" />
<!-- Set GPU -->
<arg name="gpu_id" value="1" />
<!-- compliant mode for rviz -->
<arg name="odometry_frame" value="$(arg odometry_frame)" />
</include>
</group>
</launch>

View File

@@ -0,0 +1,7 @@
<library path="lib/libZEDWrapper">
<class name="zed_wrapper/ZEDWrapperNodelet"
type="zed_wrapper::ZEDWrapperNodelet"
base_class_type="nodelet::Nodelet">
<description>This is the nodelet of ROS interface for Stereolabs ZED Camera.</description>
</class>
</library>

View File

@@ -0,0 +1,41 @@
<?xml version="1.0"?>
<package>
<name>zed_wrapper</name>
<version>1.0.0</version>
<description>zed_wrapper is a ROS wrapper for the <a href="http://www.stereolabs.com/developers/">ZED library</a>
for interfacing with the ZED camera.
</description>
<maintainer email="developers@stereolabs.com">STEREOLABS</maintainer>
<license>BSD</license>
<buildtool_depend>catkin</buildtool_depend>
<build_depend>tf2_ros</build_depend>
<build_depend>tf2_geometry_msgs</build_depend>
<build_depend>nav_msgs</build_depend>
<build_depend>roscpp</build_depend>
<build_depend>rosconsole</build_depend>
<build_depend>sensor_msgs</build_depend>
<build_depend>opencv</build_depend>
<build_depend>image_transport</build_depend>
<build_depend>dynamic_reconfigure</build_depend>
<build_depend>pcl_conversions</build_depend>
<build_depend>urdf</build_depend>
<build_depend>nodelet</build_depend>
<run_depend>tf2_ros</run_depend>
<run_depend>tf2_geometry_msgs</run_depend>
<run_depend>nav_msgs</run_depend>
<run_depend>roscpp</run_depend>
<run_depend>rosconsole</run_depend>
<run_depend>sensor_msgs</run_depend>
<run_depend>opencv</run_depend>
<run_depend>image_transport</run_depend>
<run_depend>dynamic_reconfigure</run_depend>
<run_depend>pcl_conversions</run_depend>
<run_depend>nodelet</run_depend>
<run_depend>robot_state_publisher</run_depend>
<export>
<nodelet plugin="${prefix}/nodelet_plugins.xml" />
</export>
</package>

View File

@@ -0,0 +1,2 @@
#!/bin/bash
rosbag record /camera/rgb/camera_info /camera/depth_registered/image_raw/compressedDepth /camera/rgb/image_rect_color/compressed /tf

View File

@@ -0,0 +1,2 @@
#!/bin/bash
rosbag record /camera/left/camera_info /camera/right/camera_info /camera/left/image_raw/compressed /camera/right/image_raw/compressed /tf

View File

@@ -0,0 +1,301 @@
Panels:
- Class: rviz/Displays
Help Height: 78
Name: Displays
Property Tree Widget:
Expanded:
- /RobotModel1/Links1
- /TF1/Frames1
- /Odometry1/Shape1
Splitter Ratio: 0.5
Tree Height: 281
- Class: rviz/Selection
Name: Selection
- Class: rviz/Tool Properties
Expanded:
- /2D Pose Estimate1
- /2D Nav Goal1
- /Publish Point1
Name: Tool Properties
Splitter Ratio: 0.588679016
- Class: rviz/Views
Expanded:
- /Current View1
Name: Views
Splitter Ratio: 0.5
- Class: rviz/Time
Experimental: false
Name: Time
SyncMode: 0
SyncSource: Camera
Visualization Manager:
Class: ""
Displays:
- Alpha: 0.5
Cell Size: 1
Class: rviz/Grid
Color: 160; 160; 164
Enabled: true
Line Style:
Line Width: 0.0299999993
Value: Lines
Name: Grid
Normal Cell Count: 0
Offset:
X: 0
Y: 0
Z: 0
Plane: XY
Plane Cell Count: 10
Reference Frame: <Fixed Frame>
Value: true
- Alpha: 1
Class: rviz/RobotModel
Collision Enabled: false
Enabled: true
Links:
All Links Enabled: true
Expand Joint Details: false
Expand Link Details: false
Expand Tree: false
Link Tree Style: Links in Alphabetic Order
zed_center:
Alpha: 1
Show Axes: false
Show Trail: false
Value: true
zed_depth_camera:
Alpha: 1
Show Axes: false
Show Trail: false
zed_left_camera:
Alpha: 1
Show Axes: false
Show Trail: false
Value: true
zed_right_camera:
Alpha: 1
Show Axes: false
Show Trail: false
Value: true
Name: RobotModel
Robot Description: zed/zed_description
TF Prefix: ""
Update Interval: 0
Value: true
Visual Enabled: true
- Class: rviz/TF
Enabled: true
Frame Timeout: 15
Frames:
All Enabled: false
map:
Value: false
zed_center:
Value: false
zed_depth_camera:
Value: false
zed_left_camera:
Value: true
zed_right_camera:
Value: false
Marker Scale: 1
Name: TF
Show Arrows: false
Show Axes: true
Show Names: true
Tree:
map:
zed_center:
zed_left_camera:
zed_depth_camera:
{}
zed_right_camera:
{}
Update Interval: 0
Value: true
- Angle Tolerance: 0
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.300000012
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: true
Keep: 1
Name: Odometry
Position Tolerance: 0
Shape:
Alpha: 0
Axes Length: 1
Axes Radius: 0.100000001
Color: 255; 25; 0
Head Length: 0.200000003
Head Radius: 0.100000001
Shaft Length: 0.300000012
Shaft Radius: 0.0199999996
Value: Arrow
Topic: /zed/odom
Unreliable: false
Value: true
- Alpha: 1
Auto Size:
Auto Size Factor: 1
Value: true
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/DepthCloud
Color: 255; 255; 255
Color Image Topic: /zed/rgb/image_raw_color
Color Transformer: RGB8
Color Transport Hint: raw
Decay Time: 0
Depth Map Topic: /zed/depth/depth_registered
Depth Map Transport Hint: raw
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 4096
Min Color: 0; 0; 0
Min Intensity: 0
Name: DepthCloud
Occlusion Compensation:
Occlusion Time-Out: 30
Value: false
Position Transformer: XYZ
Queue Size: 5
Selectable: true
Size (Pixels): 3
Style: Flat Squares
Topic Filter: true
Use Fixed Frame: true
Use rainbow: true
Value: false
- Class: rviz/Camera
Enabled: true
Image Rendering: background and overlay
Image Topic: /zed/rgb/image_rect_color
Name: Camera
Overlay Alpha: 0.5
Queue Size: 2
Transport Hint: raw
Unreliable: false
Value: true
Visibility:
DepthCloud: true
Grid: true
Odometry: true
PointCloud2: true
RobotModel: true
TF: true
Value: false
Zoom Factor: 1
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: RGB8
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 4096
Min Color: 0; 0; 0
Min Intensity: 0
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.00999999978
Style: Flat Squares
Topic: /zed/point_cloud/cloud_registered
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
Enabled: true
Global Options:
Background Color: 48; 48; 48
Fixed Frame: map
Frame Rate: 30
Name: root
Tools:
- Class: rviz/Interact
Hide Inactive Objects: true
- Class: rviz/MoveCamera
- Class: rviz/Select
- Class: rviz/FocusCamera
- Class: rviz/Measure
- Class: rviz/SetInitialPose
Topic: /initialpose
- Class: rviz/SetGoal
Topic: /move_base_simple/goal
- Class: rviz/PublishPoint
Single click: true
Topic: /clicked_point
Value: true
Views:
Current:
Class: rviz/Orbit
Distance: 3.33887124
Enable Stereo Rendering:
Stereo Eye Separation: 0.0599999987
Stereo Focal Distance: 1
Swap Stereo Eyes: false
Value: false
Focal Point:
X: 0.00562699512
Y: -0.0610139929
Z: -0.0259050336
Focal Shape Fixed Size: false
Focal Shape Size: 0.0500000007
Invert Z Axis: false
Name: Current View
Near Clip Distance: 0.00999999978
Pitch: 0.310398251
Target Frame: zed_left_camera
Value: Orbit (rviz)
Yaw: 3.44725299
Saved: ~
Window Geometry:
Camera:
collapsed: false
Displays:
collapsed: false
Height: 793
Hide Left Dock: false
Hide Right Dock: true
QMainWindow State: 000000ff00000000fd00000004000000000000016a0000028ffc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000001a8000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006101000001d6000000e10000001600ffffff000000010000010f00000327fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000327000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000045d0000003efc0100000002fb0000000800540069006d006501000000000000045d0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000002ed0000028f00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Time:
collapsed: false
Tool Properties:
collapsed: false
Views:
collapsed: true
Width: 1117
X: 1474
Y: 432

View File

@@ -0,0 +1,37 @@
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2017, STEREOLABS.
//
// All rights reserved.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
#include <ros/ros.h>
#include <nodelet/loader.h>
int main(int argc, char** argv) {
ros::init(argc, argv, "zed_wrapper_node");
nodelet::Loader nodelet;
nodelet::M_string remap(ros::names::getRemappings());
nodelet::V_string nargv;
nodelet.load(ros::this_node::getName(),
"zed_wrapper/ZEDWrapperNodelet",
remap, nargv);
ros::spin();
return 0;
}

View File

@@ -0,0 +1,813 @@
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2017, STEREOLABS.
//
// All rights reserved.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
/****************************************************************************************************
** This sample is a wrapper for the ZED library in order to use the ZED Camera with ROS. **
** A set of parameters can be specified in the launch file. **
****************************************************************************************************/
#include <csignal>
#include <cstdio>
#include <math.h>
#include <limits>
#include <thread>
#include <chrono>
#include <memory>
#include <sys/stat.h>
#include <ros/ros.h>
#include <nodelet/nodelet.h>
#include <sensor_msgs/Image.h>
#include <sensor_msgs/CameraInfo.h>
#include <sensor_msgs/distortion_models.h>
#include <sensor_msgs/image_encodings.h>
#include <image_transport/image_transport.h>
#include <dynamic_reconfigure/server.h>
#include <zed_wrapper/ZedConfig.h>
#include <nav_msgs/Odometry.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#include <tf2_ros/buffer.h>
#include <tf2_ros/transform_listener.h>
#include <tf2/LinearMath/Quaternion.h>
#include <tf2_ros/transform_broadcaster.h>
#include <geometry_msgs/TransformStamped.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <boost/make_shared.hpp>
#include <sensor_msgs/PointCloud2.h>
#include <pcl_conversions/pcl_conversions.h>
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <sl/Camera.hpp>
using namespace std;
namespace zed_wrapper {
class ZEDWrapperNodelet : public nodelet::Nodelet {
ros::NodeHandle nh;
ros::NodeHandle nh_ns;
boost::shared_ptr<boost::thread> device_poll_thread;
image_transport::Publisher pub_rgb;
image_transport::Publisher pub_raw_rgb;
image_transport::Publisher pub_left;
image_transport::Publisher pub_raw_left;
image_transport::Publisher pub_right;
image_transport::Publisher pub_raw_right;
image_transport::Publisher pub_depth;
ros::Publisher pub_cloud;
ros::Publisher pub_rgb_cam_info;
ros::Publisher pub_left_cam_info;
ros::Publisher pub_right_cam_info;
ros::Publisher pub_depth_cam_info;
ros::Publisher pub_odom;
// tf
tf2_ros::TransformBroadcaster transform_odom_broadcaster;
std::string left_frame_id;
std::string right_frame_id;
std::string rgb_frame_id;
std::string depth_frame_id;
std::string cloud_frame_id;
std::string odometry_frame_id;
std::string base_frame_id;
std::string camera_frame_id;
// initialization Transform listener
boost::shared_ptr<tf2_ros::Buffer> tfBuffer;
boost::shared_ptr<tf2_ros::TransformListener> tf_listener;
bool publish_tf;
// Launch file parameters
int resolution;
int quality;
int sensing_mode;
int rate;
int gpu_id;
int zed_id;
int depth_stabilization;
std::string odometry_DB;
std::string svo_filepath;
//Tracking variables
sl::Pose pose;
tf2::Transform base_transform;
// zed object
sl::InitParameters param;
std::unique_ptr<sl::Camera> zed;
// flags
int confidence;
bool computeDepth;
bool grabbing = false;
int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html
// Point cloud variables
sl::Mat cloud;
string point_cloud_frame_id = "";
ros::Time point_cloud_time;
/* \brief Convert an sl:Mat to a cv::Mat
* \param mat : the sl::Mat to convert
*/
cv::Mat toCVMat(sl::Mat &mat) {
if (mat.getMemoryType() == sl::MEM_GPU)
mat.updateCPUfromGPU();
int cvType;
switch (mat.getDataType()) {
case sl::MAT_TYPE_32F_C1:
cvType = CV_32FC1;
break;
case sl::MAT_TYPE_32F_C2:
cvType = CV_32FC2;
break;
case sl::MAT_TYPE_32F_C3:
cvType = CV_32FC3;
break;
case sl::MAT_TYPE_32F_C4:
cvType = CV_32FC4;
break;
case sl::MAT_TYPE_8U_C1:
cvType = CV_8UC1;
break;
case sl::MAT_TYPE_8U_C2:
cvType = CV_8UC2;
break;
case sl::MAT_TYPE_8U_C3:
cvType = CV_8UC3;
break;
case sl::MAT_TYPE_8U_C4:
cvType = CV_8UC4;
break;
}
return cv::Mat((int) mat.getHeight(), (int) mat.getWidth(), cvType, mat.getPtr<sl::uchar1>(sl::MEM_CPU), mat.getStepBytes(sl::MEM_CPU));
}
/* \brief Test if a file exist
* \param name : the path to the file
*/
bool file_exist(const std::string& name) {
struct stat buffer;
return (stat(name.c_str(), &buffer) == 0);
}
/* \brief Image to ros message conversion
* \param img : the image to publish
* \param encodingType : the sensor_msgs::image_encodings encoding type
* \param frameId : the id of the reference frame of the image
* \param t : the ros::Time to stamp the image
*/
sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t) {
sensor_msgs::ImagePtr ptr = boost::make_shared<sensor_msgs::Image>();
sensor_msgs::Image& imgMessage = *ptr;
imgMessage.header.stamp = t;
imgMessage.header.frame_id = frameId;
imgMessage.height = img.rows;
imgMessage.width = img.cols;
imgMessage.encoding = encodingType;
int num = 1; //for endianness detection
imgMessage.is_bigendian = !(*(char *) &num == 1);
imgMessage.step = img.cols * img.elemSize();
size_t size = imgMessage.step * img.rows;
imgMessage.data.resize(size);
if (img.isContinuous())
memcpy((char*) (&imgMessage.data[0]), img.data, size);
else {
uchar* opencvData = img.data;
uchar* rosData = (uchar*) (&imgMessage.data[0]);
for (unsigned int i = 0; i < img.rows; i++) {
memcpy(rosData, opencvData, imgMessage.step);
rosData += imgMessage.step;
opencvData += img.step;
}
}
return ptr;
}
/* \brief Publish the pose of the camera with a ros Publisher
* \param base_transform : Transformation representing the camera pose from base frame
* \param pub_odom : the publisher object to use
* \param odom_frame_id : the id of the reference frame of the pose
* \param t : the ros::Time to stamp the image
*/
void publishOdom(tf2::Transform base_transform, ros::Publisher &pub_odom, string odom_frame_id, ros::Time t) {
nav_msgs::Odometry odom;
odom.header.stamp = t;
odom.header.frame_id = odom_frame_id; // odom_frame
odom.child_frame_id = base_frame_id; // base_frame
// conversion from Tranform to message
geometry_msgs::Transform base2 = tf2::toMsg(base_transform);
// Add all value in odometry message
odom.pose.pose.position.x = base2.translation.x;
odom.pose.pose.position.y = base2.translation.y;
odom.pose.pose.position.z = base2.translation.z;
odom.pose.pose.orientation.x = base2.rotation.x;
odom.pose.pose.orientation.y = base2.rotation.y;
odom.pose.pose.orientation.z = base2.rotation.z;
odom.pose.pose.orientation.w = base2.rotation.w;
// Publish odometry message
pub_odom.publish(odom);
}
/* \brief Publish the pose of the camera as a transformation
* \param base_transform : Transformation representing the camera pose from base frame
* \param trans_br : the TransformBroadcaster object to use
* \param odometry_transform_frame_id : the id of the transformation
* \param t : the ros::Time to stamp the image
*/
void publishTrackedFrame(tf2::Transform base_transform, tf2_ros::TransformBroadcaster &trans_br, string odometry_transform_frame_id, ros::Time t) {
geometry_msgs::TransformStamped transformStamped;
transformStamped.header.stamp = ros::Time::now();
transformStamped.header.frame_id = odometry_frame_id;
transformStamped.child_frame_id = odometry_transform_frame_id;
// conversion from Tranform to message
transformStamped.transform = tf2::toMsg(base_transform);
// Publish transformation
trans_br.sendTransform(transformStamped);
}
/* \brief Publish a cv::Mat image with a ros Publisher
* \param img : the image to publish
* \param pub_img : the publisher object to use
* \param img_frame_id : the id of the reference frame of the image
* \param t : the ros::Time to stamp the image
*/
void publishImage(cv::Mat img, image_transport::Publisher &pub_img, string img_frame_id, ros::Time t) {
pub_img.publish(imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, img_frame_id, t));
}
/* \brief Publish a cv::Mat depth image with a ros Publisher
* \param depth : the depth image to publish
* \param pub_depth : the publisher object to use
* \param depth_frame_id : the id of the reference frame of the depth image
* \param t : the ros::Time to stamp the depth image
*/
void publishDepth(cv::Mat depth, image_transport::Publisher &pub_depth, string depth_frame_id, ros::Time t) {
string encoding;
if (openniDepthMode) {
depth *= 1000.0f;
depth.convertTo(depth, CV_16UC1); // in mm, rounded
encoding = sensor_msgs::image_encodings::TYPE_16UC1;
} else {
encoding = sensor_msgs::image_encodings::TYPE_32FC1;
}
pub_depth.publish(imageToROSmsg(depth, encoding, depth_frame_id, t));
}
/* \brief Publish a pointCloud with a ros Publisher
* \param width : the width of the point cloud
* \param height : the height of the point cloud
* \param pub_cloud : the publisher object to use
*/
void publishPointCloud(int width, int height, ros::Publisher &pub_cloud) {
pcl::PointCloud<pcl::PointXYZRGB> point_cloud;
point_cloud.width = width;
point_cloud.height = height;
int size = width*height;
point_cloud.points.resize(size);
sl::Vector4<float>* cpu_cloud = cloud.getPtr<sl::float4>();
for (int i = 0; i < size; i++) {
point_cloud.points[i].x = cpu_cloud[i][2];
point_cloud.points[i].y = -cpu_cloud[i][0];
point_cloud.points[i].z = -cpu_cloud[i][1];
point_cloud.points[i].rgb = cpu_cloud[i][3];
}
sensor_msgs::PointCloud2 output;
pcl::toROSMsg(point_cloud, output); // Convert the point cloud to a ROS message
output.header.frame_id = point_cloud_frame_id; // Set the header values of the ROS message
output.header.stamp = point_cloud_time;
output.height = height;
output.width = width;
output.is_bigendian = false;
output.is_dense = false;
pub_cloud.publish(output);
}
/* \brief Publish the informations of a camera with a ros Publisher
* \param cam_info_msg : the information message to publish
* \param pub_cam_info : the publisher object to use
* \param t : the ros::Time to stamp the message
*/
void publishCamInfo(sensor_msgs::CameraInfoPtr cam_info_msg, ros::Publisher pub_cam_info, ros::Time t) {
static int seq = 0;
cam_info_msg->header.stamp = t;
cam_info_msg->header.seq = seq;
pub_cam_info.publish(cam_info_msg);
seq++;
}
/* \brief Get the information of the ZED cameras and store them in an information message
* \param zed : the sl::zed::Camera* pointer to an instance
* \param left_cam_info_msg : the information message to fill with the left camera informations
* \param right_cam_info_msg : the information message to fill with the right camera informations
* \param left_frame_id : the id of the reference frame of the left camera
* \param right_frame_id : the id of the reference frame of the right camera
*/
void fillCamInfo(sl::Camera* zed, sensor_msgs::CameraInfoPtr left_cam_info_msg, sensor_msgs::CameraInfoPtr right_cam_info_msg,
string left_frame_id, string right_frame_id) {
int width = zed->getResolution().width;
int height = zed->getResolution().height;
sl::CameraInformation zedParam = zed->getCameraInformation();
float baseline = zedParam.calibration_parameters.T[0] * 0.001; // baseline converted in meters
float fx = zedParam.calibration_parameters.left_cam.fx;
float fy = zedParam.calibration_parameters.left_cam.fy;
float cx = zedParam.calibration_parameters.left_cam.cx;
float cy = zedParam.calibration_parameters.left_cam.cy;
// There is no distorsions since the images are rectified
double k1 = 0;
double k2 = 0;
double k3 = 0;
double p1 = 0;
double p2 = 0;
left_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
right_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
left_cam_info_msg->D.resize(5);
right_cam_info_msg->D.resize(5);
left_cam_info_msg->D[0] = right_cam_info_msg->D[0] = k1;
left_cam_info_msg->D[1] = right_cam_info_msg->D[1] = k2;
left_cam_info_msg->D[2] = right_cam_info_msg->D[2] = k3;
left_cam_info_msg->D[3] = right_cam_info_msg->D[3] = p1;
left_cam_info_msg->D[4] = right_cam_info_msg->D[4] = p2;
left_cam_info_msg->K.fill(0.0);
right_cam_info_msg->K.fill(0.0);
left_cam_info_msg->K[0] = right_cam_info_msg->K[0] = fx;
left_cam_info_msg->K[2] = right_cam_info_msg->K[2] = cx;
left_cam_info_msg->K[4] = right_cam_info_msg->K[4] = fy;
left_cam_info_msg->K[5] = right_cam_info_msg->K[5] = cy;
left_cam_info_msg->K[8] = right_cam_info_msg->K[8] = 1.0;
left_cam_info_msg->R.fill(0.0);
right_cam_info_msg->R.fill(0.0);
left_cam_info_msg->P.fill(0.0);
right_cam_info_msg->P.fill(0.0);
left_cam_info_msg->P[0] = right_cam_info_msg->P[0] = fx;
left_cam_info_msg->P[2] = right_cam_info_msg->P[2] = cx;
left_cam_info_msg->P[5] = right_cam_info_msg->P[5] = fy;
left_cam_info_msg->P[6] = right_cam_info_msg->P[6] = cy;
left_cam_info_msg->P[10] = right_cam_info_msg->P[10] = 1.0;
right_cam_info_msg->P[3] = (-1 * fx * baseline);
left_cam_info_msg->width = right_cam_info_msg->width = width;
left_cam_info_msg->height = right_cam_info_msg->height = height;
left_cam_info_msg->header.frame_id = left_frame_id;
right_cam_info_msg->header.frame_id = right_frame_id;
}
void callback(zed_wrapper::ZedConfig &config, uint32_t level) {
NODELET_INFO("Reconfigure confidence : %d", config.confidence);
confidence = config.confidence;
}
void device_poll() {
ros::Rate loop_rate(rate);
ros::Time old_t = ros::Time::now();
bool old_image = false;
bool tracking_activated = false;
// Get the parameters of the ZED images
int width = zed->getResolution().width;
int height = zed->getResolution().height;
NODELET_DEBUG_STREAM("Image size : " << width << "x" << height);
cv::Size cvSize(width, height);
cv::Mat leftImRGB(cvSize, CV_8UC3);
cv::Mat rightImRGB(cvSize, CV_8UC3);
// Create and fill the camera information messages
sensor_msgs::CameraInfoPtr rgb_cam_info_msg(new sensor_msgs::CameraInfo());
sensor_msgs::CameraInfoPtr left_cam_info_msg(new sensor_msgs::CameraInfo());
sensor_msgs::CameraInfoPtr right_cam_info_msg(new sensor_msgs::CameraInfo());
sensor_msgs::CameraInfoPtr depth_cam_info_msg(new sensor_msgs::CameraInfo());
fillCamInfo(zed.get(), left_cam_info_msg, right_cam_info_msg, left_frame_id, right_frame_id);
rgb_cam_info_msg = depth_cam_info_msg = left_cam_info_msg; // the reference camera is the Left one (next to the ZED logo)
sl::RuntimeParameters runParams;
runParams.sensing_mode = static_cast<sl::SENSING_MODE> (sensing_mode);
sl::TrackingParameters trackParams;
trackParams.area_file_path = odometry_DB.c_str();
sl::Mat leftZEDMat, rightZEDMat, depthZEDMat;
// Main loop
while (nh_ns.ok()) {
// Check for subscribers
int rgb_SubNumber = pub_rgb.getNumSubscribers();
int rgb_raw_SubNumber = pub_raw_rgb.getNumSubscribers();
int left_SubNumber = pub_left.getNumSubscribers();
int left_raw_SubNumber = pub_raw_left.getNumSubscribers();
int right_SubNumber = pub_right.getNumSubscribers();
int right_raw_SubNumber = pub_raw_right.getNumSubscribers();
int depth_SubNumber = pub_depth.getNumSubscribers();
int cloud_SubNumber = pub_cloud.getNumSubscribers();
int odom_SubNumber = pub_odom.getNumSubscribers();
bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber + left_raw_SubNumber + right_SubNumber + right_raw_SubNumber + depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0;
runParams.enable_point_cloud = false;
if (cloud_SubNumber > 0)
runParams.enable_point_cloud = true;
// Run the loop only if there is some subscribers
if (runLoop) {
if ( (depth_stabilization || odom_SubNumber > 0) && !tracking_activated) { //Start the tracking
if (odometry_DB != "" && !file_exist(odometry_DB)) {
odometry_DB = "";
NODELET_WARN("odometry_DB path doesn't exist or is unreachable.");
}
zed->enableTracking(trackParams);
tracking_activated = true;
} else if (!depth_stabilization && odom_SubNumber == 0 && tracking_activated) { //Stop the tracking
zed->disableTracking();
tracking_activated = false;
}
computeDepth = (depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0; // Detect if one of the subscriber need to have the depth information
ros::Time t = ros::Time::now(); // Get current time
grabbing = true;
if (computeDepth) {
int actual_confidence = zed->getConfidenceThreshold();
if (actual_confidence != confidence)
zed->setConfidenceThreshold(confidence);
runParams.enable_depth = true; // Ask to compute the depth
} else
runParams.enable_depth = false;
old_image = zed->grab(runParams); // Ask to not compute the depth
grabbing = false;
if (old_image) { // Detect if a error occurred (for example: the zed have been disconnected) and re-initialize the ZED
NODELET_DEBUG("Wait for a new image to proceed");
std::this_thread::sleep_for(std::chrono::milliseconds(2));
if ((t - old_t).toSec() > 5) {
// delete the old object before constructing a new one
zed.reset();
zed.reset(new sl::Camera());
NODELET_INFO("Re-openning the ZED");
sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
while (err != sl::SUCCESS) {
err = zed->open(param); // Try to initialize the ZED
NODELET_INFO_STREAM(errorCode2str(err));
std::this_thread::sleep_for(std::chrono::milliseconds(2000));
}
tracking_activated = false;
if (depth_stabilization || odom_SubNumber > 0) { //Start the tracking
if (odometry_DB != "" && !file_exist(odometry_DB)) {
odometry_DB = "";
NODELET_WARN("odometry_DB path doesn't exist or is unreachable.");
}
zed->enableTracking(trackParams);
tracking_activated = true;
}
}
continue;
}
old_t = ros::Time::now();
// Publish the left == rgb image if someone has subscribed to
if (left_SubNumber > 0 || rgb_SubNumber > 0) {
// Retrieve RGBA Left image
zed->retrieveImage(leftZEDMat, sl::VIEW_LEFT);
cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
if (left_SubNumber > 0) {
publishCamInfo(left_cam_info_msg, pub_left_cam_info, t);
publishImage(leftImRGB, pub_left, left_frame_id, t);
}
if (rgb_SubNumber > 0) {
publishCamInfo(rgb_cam_info_msg, pub_rgb_cam_info, t);
publishImage(leftImRGB, pub_rgb, rgb_frame_id, t); // rgb is the left image
}
}
// Publish the left_raw == rgb_raw image if someone has subscribed to
if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) {
// Retrieve RGBA Left image
zed->retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED);
cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
if (left_raw_SubNumber > 0) {
publishCamInfo(left_cam_info_msg, pub_left_cam_info, t);
publishImage(leftImRGB, pub_raw_left, left_frame_id, t);
}
if (rgb_raw_SubNumber > 0) {
publishCamInfo(rgb_cam_info_msg, pub_rgb_cam_info, t);
publishImage(leftImRGB, pub_raw_rgb, rgb_frame_id, t);
}
}
// Publish the right image if someone has subscribed to
if (right_SubNumber > 0) {
// Retrieve RGBA Right image
zed->retrieveImage(rightZEDMat, sl::VIEW_RIGHT);
cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
publishCamInfo(right_cam_info_msg, pub_right_cam_info, t);
publishImage(rightImRGB, pub_right, right_frame_id, t);
}
// Publish the right image if someone has subscribed to
if (right_raw_SubNumber > 0) {
// Retrieve RGBA Right image
zed->retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED);
cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
publishCamInfo(right_cam_info_msg, pub_right_cam_info, t);
publishImage(rightImRGB, pub_raw_right, right_frame_id, t);
}
// Publish the depth image if someone has subscribed to
if (depth_SubNumber > 0) {
zed->retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH);
publishCamInfo(depth_cam_info_msg, pub_depth_cam_info, t);
publishDepth(toCVMat(depthZEDMat), pub_depth, depth_frame_id, t); // in meters
}
// Publish the point cloud if someone has subscribed to
if (cloud_SubNumber > 0) {
// Run the point cloud convertion asynchronously to avoid slowing down all the program
// Retrieve raw pointCloud data
zed->retrieveMeasure(cloud, sl::MEASURE_XYZBGRA);
point_cloud_frame_id = cloud_frame_id;
point_cloud_time = t;
publishPointCloud(width, height, pub_cloud);
}
// Transform from base to sensor
tf2::Transform base_to_sensor;
// Look up the transformation from base frame to camera link
try {
// Save the transformation from base to frame
geometry_msgs::TransformStamped b2s = tfBuffer->lookupTransform(base_frame_id, camera_frame_id, t);
// Get the TF2 transformation
tf2::fromMsg(b2s.transform, base_to_sensor);
} catch (tf2::TransformException &ex) {
ROS_WARN_THROTTLE(10.0, "The tf from '%s' to '%s' does not seem to be available, "
"will assume it as identity!",
base_frame_id.c_str(),
camera_frame_id.c_str());
ROS_DEBUG("Transform error: %s", ex.what());
base_to_sensor.setIdentity();
}
// Publish the odometry if someone has subscribed to
if (odom_SubNumber > 0) {
zed->getPosition(pose);
// Transform ZED pose in TF2 Transformation
tf2::Transform camera_transform;
geometry_msgs::Transform c2s;
sl::Translation translation = pose.getTranslation();
c2s.translation.x = translation(2);
c2s.translation.y = -translation(0);
c2s.translation.z = -translation(1);
sl::Orientation quat = pose.getOrientation();
c2s.rotation.x = quat(2);
c2s.rotation.y = -quat(0);
c2s.rotation.z = -quat(1);
c2s.rotation.w = quat(3);
tf2::fromMsg(c2s, camera_transform);
// Transformation from camera sensor to base frame
base_transform = base_to_sensor * camera_transform * base_to_sensor.inverse();
// Publish odometry message
publishOdom(base_transform, pub_odom, odometry_frame_id, t);
}
// Publish odometry tf only if enabled
if(publish_tf) {
//Note, the frame is published, but its values will only change if someone has subscribed to odom
publishTrackedFrame(base_transform, transform_odom_broadcaster, base_frame_id, t); //publish the tracked Frame
}
loop_rate.sleep();
} else {
// Publish odometry tf only if enabled
if(publish_tf) {
publishTrackedFrame(base_transform, transform_odom_broadcaster, base_frame_id, ros::Time::now()); //publish the tracked Frame before the sleep
}
std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait
}
} // while loop
zed.reset();
}
boost::shared_ptr<dynamic_reconfigure::Server<zed_wrapper::ZedConfig>> server;
void onInit() {
// Launch file parameters
resolution = sl::RESOLUTION_HD720;
quality = sl::DEPTH_MODE_PERFORMANCE;
sensing_mode = sl::SENSING_MODE_STANDARD;
rate = 30;
gpu_id = -1;
zed_id = 0;
odometry_DB = "";
nh = getMTNodeHandle();
nh_ns = getMTPrivateNodeHandle();
// Set default cordinate frames
// If unknown left and right frames are set in the same camera coordinate frame
nh_ns.param<std::string>("odometry_frame", odometry_frame_id, "odometry_frame");
nh_ns.param<std::string>("base_frame", base_frame_id, "base_frame");
nh_ns.param<std::string>("camera_frame", camera_frame_id, "camera_frame");
nh_ns.param<std::string>("depth_frame", depth_frame_id, "depth_frame");
// Get parameters from launch file
nh_ns.getParam("resolution", resolution);
nh_ns.getParam("quality", quality);
nh_ns.getParam("sensing_mode", sensing_mode);
nh_ns.getParam("frame_rate", rate);
nh_ns.getParam("odometry_DB", odometry_DB);
nh_ns.getParam("openni_depth_mode", openniDepthMode);
nh_ns.getParam("gpu_id", gpu_id);
nh_ns.getParam("zed_id", zed_id);
nh_ns.getParam("depth_stabilization", depth_stabilization);
// Publish odometry tf
nh_ns.param<bool>("publish_tf", publish_tf, true);
// Print order frames
ROS_INFO_STREAM("odometry_frame: " << odometry_frame_id);
ROS_INFO_STREAM("base_frame: " << base_frame_id);
ROS_INFO_STREAM("camera_frame: " << camera_frame_id);
ROS_INFO_STREAM("depth_frame: " << depth_frame_id);
// Status of odometry TF
ROS_INFO_STREAM("Publish " << odometry_frame_id << " [" << (publish_tf ? "TRUE" : "FALSE") << "]");
std::string img_topic = "image_rect_color";
std::string img_raw_topic = "image_raw_color";
// Set the default topic names
string left_topic = "left/" + img_topic;
string left_raw_topic = "left/" + img_raw_topic;
string left_cam_info_topic = "left/camera_info";
left_frame_id = camera_frame_id;
string right_topic = "right/" + img_topic;
string right_raw_topic = "right/" + img_raw_topic;
string right_cam_info_topic = "right/camera_info";
right_frame_id = camera_frame_id;
string rgb_topic = "rgb/" + img_topic;
string rgb_raw_topic = "rgb/" + img_raw_topic;
string rgb_cam_info_topic = "rgb/camera_info";
rgb_frame_id = depth_frame_id;
string depth_topic = "depth/";
if (openniDepthMode) {
NODELET_INFO_STREAM("Openni depth mode activated");
depth_topic += "depth_raw_registered";
} else {
depth_topic += "depth_registered";
}
string depth_cam_info_topic = "depth/camera_info";
string point_cloud_topic = "point_cloud/cloud_registered";
cloud_frame_id = camera_frame_id;
string odometry_topic = "odom";
nh_ns.getParam("rgb_topic", rgb_topic);
nh_ns.getParam("rgb_raw_topic", rgb_raw_topic);
nh_ns.getParam("rgb_cam_info_topic", rgb_cam_info_topic);
nh_ns.getParam("left_topic", left_topic);
nh_ns.getParam("left_raw_topic", left_raw_topic);
nh_ns.getParam("left_cam_info_topic", left_cam_info_topic);
nh_ns.getParam("right_topic", right_topic);
nh_ns.getParam("right_raw_topic", right_raw_topic);
nh_ns.getParam("right_cam_info_topic", right_cam_info_topic);
nh_ns.getParam("depth_topic", depth_topic);
nh_ns.getParam("depth_cam_info_topic", depth_cam_info_topic);
nh_ns.getParam("point_cloud_topic", point_cloud_topic);
nh_ns.getParam("odometry_topic", odometry_topic);
nh_ns.param<std::string>("svo_filepath", svo_filepath, std::string());
// Initialization transformation listener
tfBuffer.reset( new tf2_ros::Buffer );
tf_listener.reset( new tf2_ros::TransformListener(*tfBuffer) );
// Create the ZED object
zed.reset(new sl::Camera());
// Initialize tf2 transformation
base_transform.setIdentity();
// Try to initialize the ZED
if (!svo_filepath.empty())
param.svo_input_filename = svo_filepath.c_str();
else {
param.camera_fps = rate;
param.camera_resolution = static_cast<sl::RESOLUTION> (resolution);
param.camera_linux_id = zed_id;
}
param.coordinate_units = sl::UNIT_METER;
param.coordinate_system = sl::COORDINATE_SYSTEM_IMAGE;
param.depth_mode = static_cast<sl::DEPTH_MODE> (quality);
param.sdk_verbose = true;
param.sdk_gpu_id = gpu_id;
param.depth_stabilization = depth_stabilization;
sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
while (err != sl::SUCCESS) {
err = zed->open(param);
NODELET_INFO_STREAM(errorCode2str(err));
std::this_thread::sleep_for(std::chrono::milliseconds(2000));
}
//Reconfigure confidence
server = boost::make_shared<dynamic_reconfigure::Server<zed_wrapper::ZedConfig>>();
dynamic_reconfigure::Server<zed_wrapper::ZedConfig>::CallbackType f;
f = boost::bind(&ZEDWrapperNodelet::callback, this, _1, _2);
server->setCallback(f);
nh_ns.getParam("confidence", confidence);
// Create all the publishers
// Image publishers
image_transport::ImageTransport it_zed(nh);
pub_rgb = it_zed.advertise(rgb_topic, 1); //rgb
NODELET_INFO_STREAM("Advertized on topic " << rgb_topic);
pub_raw_rgb = it_zed.advertise(rgb_raw_topic, 1); //rgb raw
NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic);
pub_left = it_zed.advertise(left_topic, 1); //left
NODELET_INFO_STREAM("Advertized on topic " << left_topic);
pub_raw_left = it_zed.advertise(left_raw_topic, 1); //left raw
NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic);
pub_right = it_zed.advertise(right_topic, 1); //right
NODELET_INFO_STREAM("Advertized on topic " << right_topic);
pub_raw_right = it_zed.advertise(right_raw_topic, 1); //right raw
NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic);
pub_depth = it_zed.advertise(depth_topic, 1); //depth
NODELET_INFO_STREAM("Advertized on topic " << depth_topic);
//PointCloud publisher
pub_cloud = nh.advertise<sensor_msgs::PointCloud2> (point_cloud_topic, 1);
NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic);
// Camera info publishers
pub_rgb_cam_info = nh.advertise<sensor_msgs::CameraInfo>(rgb_cam_info_topic, 1); //rgb
NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic);
pub_left_cam_info = nh.advertise<sensor_msgs::CameraInfo>(left_cam_info_topic, 1); //left
NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic);
pub_right_cam_info = nh.advertise<sensor_msgs::CameraInfo>(right_cam_info_topic, 1); //right
NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic);
pub_depth_cam_info = nh.advertise<sensor_msgs::CameraInfo>(depth_cam_info_topic, 1); //depth
NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic);
//Odometry publisher
pub_odom = nh.advertise<nav_msgs::Odometry>(odometry_topic, 1);
NODELET_INFO_STREAM("Advertized on topic " << odometry_topic);
device_poll_thread = boost::shared_ptr<boost::thread>
(new boost::thread(boost::bind(&ZEDWrapperNodelet::device_poll, this)));
}
}; // class ZEDROSWrapperNodelet
} // namespace
#include <pluginlib/class_list_macros.h>
PLUGINLIB_EXPORT_CLASS(zed_wrapper::ZEDWrapperNodelet, nodelet::Nodelet);

Binary file not shown.

View File

@@ -0,0 +1,73 @@
<?xml version="1.0"?>
<!--
Copyright (c) 2017, STEREOLABS.
All rights reserved.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<robot name="zed_camera">
<link name="zed_left_camera">
<visual>
<origin xyz="0 0 0" rpy="0 1.57 0"/>
<geometry>
<cylinder radius="0.007" length=".031"/>
</geometry>
<material name="dark_grey">
<color rgba="0.1 0.1 0.1 1"/>
</material>
</visual>
</link>
<link name="zed_center">
<visual>
<origin xyz="0 0 0" rpy="0 0 0"/>
<geometry>
<mesh filename="package://zed_wrapper/urdf/ZED.stl" />
</geometry>
<material name="light_grey">
<color rgba="0.8 0.8 0.8 0.8"/>
</material>
</visual>
</link>
<link name="zed_right_camera">
<visual>
<origin xyz="0 0 0" rpy="0 1.57 0"/>
<geometry>
<cylinder radius="0.007" length=".031"/>
</geometry>
<material name="dark_grey">
<color rgba="0.1 0.1 0.1 1"/>
</material>
</visual>
</link>
<link name="zed_depth_camera" />
<joint name="zed_left_camera_joint" type="fixed">
<parent link="zed_center"/>
<child link="zed_left_camera"/>
<origin xyz="0 0.06 0" rpy="0 0 0" />
</joint>
<joint name="zed_depth_camera_joint" type="fixed">
<parent link="zed_left_camera"/>
<child link="zed_depth_camera"/>
<origin xyz="0 0 0" rpy="-1.5707963267948966 0 -1.5707963267948966" />
</joint>
<joint name="zed_right_camera_joint" type="fixed">
<parent link="zed_center"/>
<child link="zed_right_camera"/>
<origin xyz="0 -0.06 0" rpy="0 0 0" />
</joint>
</robot>