Skip to content

Commit 08bec80

Browse files
iserverobotics-bonaalexlin2
authored andcommitted
feat: add Docker build and deployment setup
- Multi-stage Dockerfile (build + runtime) for all ROS 2 packages - docker-compose.yml for deployment (Docker 24.x compatible) - build.sh to build, tag, and push to iserverobotics/nav_autonomy - .env for hardware configuration (lidar, motor, network) - .dockerignore to exclude build artifacts from context
1 parent 3f203de commit 08bec80

File tree

5 files changed

+524
-0
lines changed

5 files changed

+524
-0
lines changed

.dockerignore

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
.git
2+
build
3+
install
4+
log
5+
docker
6+
map
7+
base_station
8+
chrony_conf
9+
desktop_buttons
10+
img
11+
*.md
12+
LICENSE

docker/.env

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
# Hardware Configuration Environment Variables
2+
# Customize for your hardware setup
3+
4+
# ============================================
5+
# Docker Runtime
6+
# ============================================
7+
# Set to nvidia for GPU support (requires nvidia-container-toolkit)
8+
#DOCKER_RUNTIME=nvidia
9+
10+
# Image tag (default: latest)
11+
IMAGE_TAG=latest
12+
13+
# ============================================
14+
# ROS Configuration
15+
# ============================================
16+
# ROS domain ID for multi-robot setups
17+
ROS_DOMAIN_ID=42
18+
19+
# ============================================
20+
# Mid-360 Lidar Configuration
21+
# ============================================
22+
# Network interface connected to the lidar (e.g., eth0, enp0s3)
23+
# Find with: ip addr show
24+
LIDAR_INTERFACE=eth0
25+
26+
# Processing computer IP address on the lidar subnet
27+
# Must be on the same subnet as the lidar (e.g., 192.168.1.5)
28+
LIDAR_COMPUTER_IP=192.168.1.5
29+
30+
# Gateway IP address for the lidar subnet
31+
LIDAR_GATEWAY=192.168.1.1
32+
33+
# Full IP address of your Mid-360 lidar
34+
# Common pattern: 192.168.1.1XX where XX = last 2 digits of serial
35+
LIDAR_IP=192.168.1.116
36+
37+
# ============================================
38+
# Motor Controller Configuration
39+
# ============================================
40+
# Serial device for motor controller
41+
# Check with: ls /dev/ttyACM* or ls /dev/ttyUSB*
42+
MOTOR_SERIAL_DEVICE=/dev/ttyACM0
43+
44+
# ============================================
45+
# Network Communication
46+
# ============================================
47+
# Enable WiFi buffer optimization for wireless data transmission
48+
ENABLE_WIFI_BUFFER=false
49+
50+
# ============================================
51+
# Navigation Options
52+
# ============================================
53+
# Enable RViz visualization (set to true for debugging)
54+
USE_RVIZ=false
55+
56+
# Map path for localization mode (leave empty for SLAM/mapping mode)
57+
# Set to file prefix (no .pcd extension), e.g., /ros2_ws/maps/warehouse
58+
MAP_PATH=
59+
60+
# ============================================
61+
# Device Group IDs
62+
# ============================================
63+
# Group ID for /dev/input devices (joystick)
64+
# Find with: getent group input | cut -d: -f3
65+
INPUT_GID=995
66+
67+
# Group ID for serial devices
68+
# Find with: getent group dialout | cut -d: -f3
69+
DIALOUT_GID=20

docker/Dockerfile

Lines changed: 278 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,278 @@
1+
# =============================================================================
2+
# Navigation Autonomy Stack Docker Image
3+
# =============================================================================
4+
#
5+
# Multi-stage build for ROS 2 navigation with SLAM support.
6+
# Includes arise_slam and FASTLIO2.
7+
#
8+
# Build:
9+
# ./docker/build.sh # Build for ROS 2 Jazzy (default)
10+
# ./docker/build.sh --humble # Build for ROS 2 Humble
11+
#
12+
# =============================================================================
13+
14+
ARG ROS_DISTRO=jazzy
15+
ARG TARGETARCH
16+
17+
# Platform-specific base images
18+
FROM osrf/ros:${ROS_DISTRO}-desktop-full AS base-amd64
19+
FROM ros:${ROS_DISTRO}-ros-base AS base-arm64
20+
21+
# =============================================================================
22+
# STAGE 1: Build
23+
# =============================================================================
24+
FROM base-${TARGETARCH} AS builder
25+
26+
ARG ROS_DISTRO
27+
ENV DEBIAN_FRONTEND=noninteractive
28+
ENV ROS_DISTRO=${ROS_DISTRO}
29+
ENV WORKSPACE=/ros2_ws
30+
31+
# Install build dependencies
32+
RUN apt-get update && apt-get install -y --no-install-recommends \
33+
git \
34+
cmake \
35+
build-essential \
36+
python3-colcon-common-extensions \
37+
libpcl-dev \
38+
libgoogle-glog-dev \
39+
libgflags-dev \
40+
libatlas-base-dev \
41+
libeigen3-dev \
42+
libsuitesparse-dev \
43+
ros-${ROS_DISTRO}-pcl-ros \
44+
ros-${ROS_DISTRO}-cv-bridge \
45+
&& rm -rf /var/lib/apt/lists/*
46+
47+
# On arm64, ros-base doesn't include rviz2; install separately for building rviz plugins
48+
ARG TARGETARCH
49+
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
50+
apt-get update && apt-get install -y --no-install-recommends \
51+
ros-${ROS_DISTRO}-rviz2 \
52+
&& rm -rf /var/lib/apt/lists/*; \
53+
fi
54+
55+
# On arm64, build or-tools from source (pre-built binaries are x86_64 only)
56+
ENV OR_TOOLS_VERSION=9.8
57+
RUN if [ "${TARGETARCH}" = "arm64" ]; then \
58+
echo "Building or-tools v${OR_TOOLS_VERSION} from source for arm64..." && \
59+
apt-get update && apt-get install -y --no-install-recommends \
60+
lsb-release \
61+
wget \
62+
&& rm -rf /var/lib/apt/lists/* && \
63+
cd /tmp && \
64+
wget -q https://github.com/google/or-tools/archive/refs/tags/v${OR_TOOLS_VERSION}.tar.gz && \
65+
tar xzf v${OR_TOOLS_VERSION}.tar.gz && \
66+
cd or-tools-${OR_TOOLS_VERSION} && \
67+
cmake -S . -B build \
68+
-DCMAKE_BUILD_TYPE=Release \
69+
-DBUILD_DEPS=ON \
70+
-DBUILD_SAMPLES=OFF \
71+
-DBUILD_EXAMPLES=OFF \
72+
-DBUILD_FLATZINC=OFF \
73+
-DUSE_SCIP=OFF \
74+
-DUSE_COINOR=OFF && \
75+
cmake --build build --config Release -j$(($(nproc) > 4 ? 4 : $(nproc))) && \
76+
cmake --install build --prefix /opt/or-tools && \
77+
rm -rf /tmp/or-tools-${OR_TOOLS_VERSION} /tmp/v${OR_TOOLS_VERSION}.tar.gz; \
78+
fi
79+
80+
# Create workspace and copy source
81+
RUN mkdir -p ${WORKSPACE}/src
82+
COPY src ${WORKSPACE}/src
83+
84+
# On arm64, replace x86_64 or-tools with arm64 build
85+
RUN if [ "${TARGETARCH}" = "arm64" ] && [ -d "/opt/or-tools" ]; then \
86+
echo "Replacing x86_64 or-tools with arm64 build..." && \
87+
OR_TOOLS_DIR=${WORKSPACE}/src/exploration_planner/tare_planner/or-tools && \
88+
if [ -d "${OR_TOOLS_DIR}" ]; then \
89+
rm -rf ${OR_TOOLS_DIR}/lib/*.so* ${OR_TOOLS_DIR}/lib/*.a && \
90+
cp -r /opt/or-tools/lib/* ${OR_TOOLS_DIR}/lib/ && \
91+
rm -rf ${OR_TOOLS_DIR}/include && \
92+
cp -r /opt/or-tools/include ${OR_TOOLS_DIR}/ && \
93+
ldconfig; \
94+
fi; \
95+
fi
96+
97+
# Compatibility fix: In Humble, cv_bridge uses .h extension; Jazzy uses .hpp
98+
RUN if [ "${ROS_DISTRO}" = "humble" ]; then \
99+
CV_BRIDGE_DIR=$(find /opt/ros/humble/include -name "cv_bridge.h" -printf "%h\n" 2>/dev/null | head -1) && \
100+
if [ -n "$CV_BRIDGE_DIR" ]; then \
101+
ln -sf "$CV_BRIDGE_DIR/cv_bridge.h" "$CV_BRIDGE_DIR/cv_bridge.hpp"; \
102+
fi; \
103+
fi
104+
105+
# Build Livox-SDK2
106+
RUN cd ${WORKSPACE}/src/utilities/livox_ros_driver2/Livox-SDK2 && \
107+
mkdir -p build && cd build && \
108+
cmake .. && make -j$(nproc) && make install && ldconfig && \
109+
rm -rf ${WORKSPACE}/src/utilities/livox_ros_driver2/Livox-SDK2/build
110+
111+
# Build Sophus
112+
RUN cd ${WORKSPACE}/src/slam/dependency/Sophus && \
113+
mkdir -p build && cd build && \
114+
cmake .. -DBUILD_TESTS=OFF && make -j$(nproc) && make install && \
115+
rm -rf ${WORKSPACE}/src/slam/dependency/Sophus/build
116+
117+
# Build Ceres Solver
118+
RUN cd ${WORKSPACE}/src/slam/dependency/ceres-solver && \
119+
mkdir -p build && cd build && \
120+
cmake .. && make -j$(nproc) && make install && \
121+
rm -rf ${WORKSPACE}/src/slam/dependency/ceres-solver/build
122+
123+
# Build GTSAM
124+
RUN cd ${WORKSPACE}/src/slam/dependency/gtsam && \
125+
mkdir -p build && cd build && \
126+
cmake .. -DGTSAM_USE_SYSTEM_EIGEN=ON -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF && \
127+
make -j$(nproc) && make install && ldconfig && \
128+
rm -rf ${WORKSPACE}/src/slam/dependency/gtsam/build
129+
130+
# Build ROS workspace
131+
RUN /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && \
132+
cd ${WORKSPACE} && \
133+
colcon build --cmake-args -DCMAKE_BUILD_TYPE=Release"
134+
135+
# =============================================================================
136+
# STAGE 2: Runtime
137+
# =============================================================================
138+
ARG ROS_DISTRO
139+
ARG TARGETARCH
140+
FROM base-${TARGETARCH} AS runtime
141+
142+
ARG ROS_DISTRO
143+
ENV DEBIAN_FRONTEND=noninteractive
144+
ENV ROS_DISTRO=${ROS_DISTRO}
145+
ENV WORKSPACE=/ros2_ws
146+
ENV RMW_IMPLEMENTATION=rmw_fastrtps_cpp
147+
148+
# Install runtime dependencies
149+
RUN apt-get update && apt-get install -y --no-install-recommends \
150+
ros-${ROS_DISTRO}-pcl-ros \
151+
ros-${ROS_DISTRO}-cv-bridge \
152+
ros-${ROS_DISTRO}-rviz2 \
153+
ros-${ROS_DISTRO}-joy \
154+
ros-${ROS_DISTRO}-rmw-fastrtps-cpp \
155+
libpcl-dev \
156+
libgoogle-glog-dev \
157+
libgflags-dev \
158+
libatlas-base-dev \
159+
libeigen3-dev \
160+
libsuitesparse-dev \
161+
libx11-6 \
162+
libxext6 \
163+
libxrender1 \
164+
libgl1 \
165+
libglib2.0-0 \
166+
iputils-ping \
167+
net-tools \
168+
iproute2 \
169+
usbutils \
170+
joystick \
171+
&& rm -rf /var/lib/apt/lists/*
172+
173+
# Copy installed libraries from builder (Livox-SDK2, Sophus, Ceres, GTSAM)
174+
COPY --from=builder /usr/local/lib /usr/local/lib
175+
COPY --from=builder /usr/local/include /usr/local/include
176+
RUN ldconfig
177+
178+
# Copy built ROS workspace
179+
COPY --from=builder ${WORKSPACE}/install ${WORKSPACE}/install
180+
181+
# Copy rviz config files from source
182+
COPY --from=builder ${WORKSPACE}/src/base_autonomy/vehicle_simulator/rviz ${WORKSPACE}/src/base_autonomy/vehicle_simulator/rviz
183+
COPY --from=builder ${WORKSPACE}/src/route_planner/far_planner/rviz ${WORKSPACE}/src/route_planner/far_planner/rviz
184+
COPY --from=builder ${WORKSPACE}/src/exploration_planner/tare_planner/rviz ${WORKSPACE}/src/exploration_planner/tare_planner/rviz
185+
186+
# Copy lidar config
187+
COPY --from=builder ${WORKSPACE}/src/utilities/livox_ros_driver2/config ${WORKSPACE}/src/utilities/livox_ros_driver2/config
188+
189+
# Copy SLAM config files (arise_slam + FASTLIO2)
190+
RUN --mount=from=builder,source=${WORKSPACE}/src,target=/tmp/src \
191+
mkdir -p ${WORKSPACE}/src/slam/arise_slam_mid360 && \
192+
cp -r /tmp/src/slam/arise_slam_mid360/config ${WORKSPACE}/src/slam/arise_slam_mid360/ 2>/dev/null || true && \
193+
mkdir -p ${WORKSPACE}/src/slam/FASTLIO2_ROS2 && \
194+
for pkg in fastlio2 localizer pgo hba; do \
195+
if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/config" ]; then \
196+
mkdir -p ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg && \
197+
cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/config ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg/; \
198+
fi; \
199+
if [ -d "/tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz" ]; then \
200+
cp -r /tmp/src/slam/FASTLIO2_ROS2/$pkg/rviz ${WORKSPACE}/src/slam/FASTLIO2_ROS2/$pkg/; \
201+
fi; \
202+
done
203+
204+
# Copy system launch scripts
205+
COPY system_real_robot_with_route_planner.sh ${WORKSPACE}/
206+
COPY system_real_robot.sh ${WORKSPACE}/
207+
COPY system_real_robot_with_exploration_planner.sh ${WORKSPACE}/
208+
COPY system_bagfile.sh ${WORKSPACE}/
209+
COPY system_bagfile_with_route_planner.sh ${WORKSPACE}/
210+
COPY system_bagfile_with_exploration_planner.sh ${WORKSPACE}/
211+
212+
# Create directories for maps and logs
213+
RUN mkdir -p ${WORKSPACE}/maps ${WORKSPACE}/logs
214+
215+
# Set up shell environment
216+
RUN echo "source /opt/ros/${ROS_DISTRO}/setup.bash" >> ~/.bashrc && \
217+
echo "source ${WORKSPACE}/install/setup.bash" >> ~/.bashrc && \
218+
echo "export RMW_IMPLEMENTATION=rmw_fastrtps_cpp" >> ~/.bashrc
219+
220+
# Entrypoint script
221+
RUN cat > /ros_entrypoint.sh <<'ENTRYPOINT_EOF'
222+
#!/bin/bash
223+
set -e
224+
225+
# Source ROS environment
226+
source /opt/ros/${ROS_DISTRO}/setup.bash
227+
source ${WORKSPACE}/install/setup.bash
228+
export RMW_IMPLEMENTATION=rmw_fastrtps_cpp
229+
230+
# Configure lidar network interface if specified
231+
if [ -n "${LIDAR_INTERFACE}" ] && [ -n "${LIDAR_COMPUTER_IP}" ]; then
232+
ip addr add ${LIDAR_COMPUTER_IP}/24 dev ${LIDAR_INTERFACE} 2>/dev/null || true
233+
ip link set ${LIDAR_INTERFACE} up 2>/dev/null || true
234+
fi
235+
236+
# Generate MID360_config.json if lidar IPs are set
237+
if [ -n "${LIDAR_COMPUTER_IP}" ] && [ -n "${LIDAR_IP}" ]; then
238+
cat > ${WORKSPACE}/src/utilities/livox_ros_driver2/config/MID360_config.json <<EOF
239+
{
240+
"lidar_summary_info": { "lidar_type": 8 },
241+
"MID360": {
242+
"lidar_net_info": {
243+
"cmd_data_port": 56100, "push_msg_port": 56200,
244+
"point_data_port": 56300, "imu_data_port": 56400, "log_data_port": 56500
245+
},
246+
"host_net_info": {
247+
"cmd_data_ip": "${LIDAR_COMPUTER_IP}", "cmd_data_port": 56101,
248+
"push_msg_ip": "${LIDAR_COMPUTER_IP}", "push_msg_port": 56201,
249+
"point_data_ip": "${LIDAR_COMPUTER_IP}", "point_data_port": 56301,
250+
"imu_data_ip": "${LIDAR_COMPUTER_IP}", "imu_data_port": 56401,
251+
"log_data_ip": "${LIDAR_COMPUTER_IP}", "log_data_port": 56501
252+
}
253+
},
254+
"lidar_configs": [{
255+
"ip": "${LIDAR_IP}",
256+
"pcl_data_type": 1, "pattern_mode": 0,
257+
"extrinsic_parameter": { "roll": 0.0, "pitch": 0.0, "yaw": 0.0, "x": 0, "y": 0, "z": 0 }
258+
}]
259+
}
260+
EOF
261+
cp ${WORKSPACE}/src/utilities/livox_ros_driver2/config/MID360_config.json \
262+
${WORKSPACE}/install/livox_ros_driver2/share/livox_ros_driver2/config/MID360_config.json 2>/dev/null || true
263+
echo "Generated MID360_config.json (LIDAR_IP=${LIDAR_IP}, COMPUTER_IP=${LIDAR_COMPUTER_IP})"
264+
fi
265+
266+
# WiFi buffer optimization
267+
if [ "${ENABLE_WIFI_BUFFER}" = "true" ]; then
268+
sysctl -w net.core.rmem_max=67108864 net.core.rmem_default=67108864 2>/dev/null || true
269+
sysctl -w net.core.wmem_max=67108864 net.core.wmem_default=67108864 2>/dev/null || true
270+
fi
271+
272+
exec "$@"
273+
ENTRYPOINT_EOF
274+
RUN chmod +x /ros_entrypoint.sh
275+
276+
WORKDIR ${WORKSPACE}
277+
ENTRYPOINT ["/ros_entrypoint.sh"]
278+
CMD ["bash"]

0 commit comments

Comments
 (0)