|
| 1 | +# Base image with ROS Jazzy desktop full |
| 2 | +FROM osrf/ros:jazzy-desktop-full |
| 3 | + |
| 4 | +# Set environment variables |
| 5 | +ENV DEBIAN_FRONTEND=noninteractive |
| 6 | +ENV ROS_DISTRO=jazzy |
| 7 | +ENV WORKSPACE=/ros2_ws |
| 8 | +ENV DIMOS_PATH=/workspace/dimos |
| 9 | + |
| 10 | +# Install system dependencies |
| 11 | +RUN apt-get update && apt-get install -y \ |
| 12 | + # ROS packages |
| 13 | + ros-jazzy-pcl-ros \ |
| 14 | + # Development tools |
| 15 | + git \ |
| 16 | + git-lfs \ |
| 17 | + cmake \ |
| 18 | + build-essential \ |
| 19 | + python3-colcon-common-extensions \ |
| 20 | + # PCL and system libraries |
| 21 | + libpcl-dev \ |
| 22 | + libgoogle-glog-dev \ |
| 23 | + libgflags-dev \ |
| 24 | + libatlas-base-dev \ |
| 25 | + libeigen3-dev \ |
| 26 | + libsuitesparse-dev \ |
| 27 | + # X11 and GUI support for RVIZ |
| 28 | + x11-apps \ |
| 29 | + xorg \ |
| 30 | + openbox \ |
| 31 | + # Networking tools |
| 32 | + iputils-ping \ |
| 33 | + net-tools \ |
| 34 | + iproute2 \ |
| 35 | + ethtool \ |
| 36 | + # USB and serial tools (for hardware support) |
| 37 | + usbutils \ |
| 38 | + udev \ |
| 39 | + # Time synchronization (for multi-computer setup) |
| 40 | + chrony \ |
| 41 | + # Editor (optional but useful) |
| 42 | + nano \ |
| 43 | + vim \ |
| 44 | + # Python tools |
| 45 | + python3-pip \ |
| 46 | + python3-setuptools \ |
| 47 | + python3-venv \ |
| 48 | + # Additional dependencies for dimos |
| 49 | + ffmpeg \ |
| 50 | + portaudio19-dev \ |
| 51 | + libsndfile1 \ |
| 52 | + # For OpenCV |
| 53 | + libgl1 \ |
| 54 | + libglib2.0-0 \ |
| 55 | + # For Open3D |
| 56 | + libgomp1 \ |
| 57 | + # For TurboJPEG |
| 58 | + libturbojpeg0-dev \ |
| 59 | + # Clean up |
| 60 | + && rm -rf /var/lib/apt/lists/* |
| 61 | + |
| 62 | +# Create workspace directory |
| 63 | +RUN mkdir -p ${WORKSPACE}/src |
| 64 | + |
| 65 | +# Copy the autonomy stack repository (should be cloned by build.sh) |
| 66 | +COPY docker/navigation/ros-navigation-autonomy-stack ${WORKSPACE}/src/ros-navigation-autonomy-stack |
| 67 | + |
| 68 | +# Set working directory |
| 69 | +WORKDIR ${WORKSPACE} |
| 70 | + |
| 71 | +# Set up ROS environment |
| 72 | +RUN echo "source /opt/ros/${ROS_DISTRO}/setup.bash" >> ~/.bashrc |
| 73 | + |
| 74 | +# Build all hardware dependencies |
| 75 | +RUN \ |
| 76 | + # Build Livox-SDK2 for Mid-360 lidar |
| 77 | + cd ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/utilities/livox_ros_driver2/Livox-SDK2 && \ |
| 78 | + mkdir -p build && cd build && \ |
| 79 | + cmake .. && make -j$(nproc) && make install && ldconfig && \ |
| 80 | + # Install Sophus |
| 81 | + cd ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/dependency/Sophus && \ |
| 82 | + mkdir -p build && cd build && \ |
| 83 | + cmake .. -DBUILD_TESTS=OFF && make -j$(nproc) && make install && \ |
| 84 | + # Install Ceres Solver |
| 85 | + cd ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/dependency/ceres-solver && \ |
| 86 | + mkdir -p build && cd build && \ |
| 87 | + cmake .. && make -j$(nproc) && make install && \ |
| 88 | + # Install GTSAM |
| 89 | + cd ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/slam/dependency/gtsam && \ |
| 90 | + mkdir -p build && cd build && \ |
| 91 | + cmake .. -DGTSAM_USE_SYSTEM_EIGEN=ON -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF && \ |
| 92 | + make -j$(nproc) && make install && ldconfig |
| 93 | + |
| 94 | +# Build the autonomy stack |
| 95 | +RUN /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && \ |
| 96 | + cd ${WORKSPACE} && \ |
| 97 | + colcon build --symlink-install --cmake-args -DCMAKE_BUILD_TYPE=Release" |
| 98 | + |
| 99 | +# Source the workspace setup |
| 100 | +RUN echo "source ${WORKSPACE}/install/setup.bash" >> ~/.bashrc |
| 101 | + |
| 102 | +# Create directory for Unity environment models |
| 103 | +RUN mkdir -p ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/base_autonomy/vehicle_simulator/mesh/unity |
| 104 | + |
| 105 | +# Copy the dimos repository |
| 106 | +RUN mkdir -p ${DIMOS_PATH} |
| 107 | +COPY . ${DIMOS_PATH}/ |
| 108 | + |
| 109 | +# Create a virtual environment in /opt (not in /workspace/dimos) |
| 110 | +# This ensures the venv won't be overwritten when we mount the host dimos directory |
| 111 | +# The container will always use its own dependencies, independent of the host |
| 112 | +RUN python3 -m venv /opt/dimos-venv |
| 113 | + |
| 114 | +# Activate Python virtual environment in interactive shells |
| 115 | +RUN echo "source /opt/dimos-venv/bin/activate" >> ~/.bashrc |
| 116 | + |
| 117 | +# Install Python dependencies for dimos |
| 118 | +WORKDIR ${DIMOS_PATH} |
| 119 | +RUN /bin/bash -c "source /opt/dimos-venv/bin/activate && \ |
| 120 | + pip install --upgrade pip setuptools wheel && \ |
| 121 | + pip install -e .[cpu,dev] 'mmengine>=0.10.3' 'mmcv>=2.1.0'" |
| 122 | + |
| 123 | +# Copy helper scripts |
| 124 | +COPY docker/navigation/run_both.sh /usr/local/bin/run_both.sh |
| 125 | +COPY docker/navigation/ros_launch_wrapper.py /usr/local/bin/ros_launch_wrapper.py |
| 126 | +RUN chmod +x /usr/local/bin/run_both.sh /usr/local/bin/ros_launch_wrapper.py |
| 127 | + |
| 128 | +# Set up udev rules for USB devices (motor controller) |
| 129 | +RUN echo 'SUBSYSTEM=="tty", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", MODE="0666", GROUP="dialout"' > /etc/udev/rules.d/99-motor-controller.rules && \ |
| 130 | + usermod -a -G dialout root || true |
| 131 | + |
| 132 | +# Set up entrypoint script |
| 133 | +RUN echo '#!/bin/bash\n\ |
| 134 | +set -e\n\ |
| 135 | +\n\ |
| 136 | +git config --global --add safe.directory /workspace/dimos\n\ |
| 137 | +\n\ |
| 138 | +# Source ROS setup\n\ |
| 139 | +source /opt/ros/${ROS_DISTRO}/setup.bash\n\ |
| 140 | +source ${WORKSPACE}/install/setup.bash\n\ |
| 141 | +\n\ |
| 142 | +# Activate Python virtual environment for dimos\n\ |
| 143 | +source /opt/dimos-venv/bin/activate\n\ |
| 144 | +\n\ |
| 145 | +# Export ROBOT_CONFIG_PATH for autonomy stack\n\ |
| 146 | +export ROBOT_CONFIG_PATH="${ROBOT_CONFIG_PATH:-mechanum_drive}"\n\ |
| 147 | +\n\ |
| 148 | +# Hardware-specific configurations\n\ |
| 149 | +if [ "${HARDWARE_MODE}" = "true" ]; then\n\ |
| 150 | + # Set network buffer sizes for WiFi data transmission (if needed)\n\ |
| 151 | + if [ "${ENABLE_WIFI_BUFFER}" = "true" ]; then\n\ |
| 152 | + sysctl -w net.core.rmem_max=67108864 net.core.rmem_default=67108864 2>/dev/null || true\n\ |
| 153 | + sysctl -w net.core.wmem_max=67108864 net.core.wmem_default=67108864 2>/dev/null || true\n\ |
| 154 | + fi\n\ |
| 155 | + \n\ |
| 156 | + # Configure network interface for Mid-360 lidar if specified\n\ |
| 157 | + if [ -n "${LIDAR_INTERFACE}" ] && [ -n "${LIDAR_COMPUTER_IP}" ]; then\n\ |
| 158 | + ip addr add ${LIDAR_COMPUTER_IP}/24 dev ${LIDAR_INTERFACE} 2>/dev/null || true\n\ |
| 159 | + ip link set ${LIDAR_INTERFACE} up 2>/dev/null || true\n\ |
| 160 | + if [ -n "${LIDAR_GATEWAY}" ]; then\n\ |
| 161 | + ip route add default via ${LIDAR_GATEWAY} dev ${LIDAR_INTERFACE} 2>/dev/null || true\n\ |
| 162 | + fi\n\ |
| 163 | + fi\n\ |
| 164 | + \n\ |
| 165 | + # Generate MID360_config.json if LIDAR_COMPUTER_IP and LIDAR_IP are set\n\ |
| 166 | + if [ -n "${LIDAR_COMPUTER_IP}" ] && [ -n "${LIDAR_IP}" ]; then\n\ |
| 167 | + cat > ${WORKSPACE}/src/ros-navigation-autonomy-stack/src/utilities/livox_ros_driver2/config/MID360_config.json <<EOF\n\ |
| 168 | +{\n\ |
| 169 | + "lidar_summary_info": {\n\ |
| 170 | + "lidar_type": 8\n\ |
| 171 | + },\n\ |
| 172 | + "MID360": {\n\ |
| 173 | + "lidar_net_info": {\n\ |
| 174 | + "cmd_data_port": 56100,\n\ |
| 175 | + "push_msg_port": 56200,\n\ |
| 176 | + "point_data_port": 56300,\n\ |
| 177 | + "imu_data_port": 56400,\n\ |
| 178 | + "log_data_port": 56500\n\ |
| 179 | + },\n\ |
| 180 | + "host_net_info": {\n\ |
| 181 | + "cmd_data_ip": "${LIDAR_COMPUTER_IP}",\n\ |
| 182 | + "cmd_data_port": 56101,\n\ |
| 183 | + "push_msg_ip": "${LIDAR_COMPUTER_IP}",\n\ |
| 184 | + "push_msg_port": 56201,\n\ |
| 185 | + "point_data_ip": "${LIDAR_COMPUTER_IP}",\n\ |
| 186 | + "point_data_port": 56301,\n\ |
| 187 | + "imu_data_ip": "${LIDAR_COMPUTER_IP}",\n\ |
| 188 | + "imu_data_port": 56401,\n\ |
| 189 | + "log_data_ip": "${LIDAR_COMPUTER_IP}",\n\ |
| 190 | + "log_data_port": 56501\n\ |
| 191 | + }\n\ |
| 192 | + },\n\ |
| 193 | + "lidar_configs": [\n\ |
| 194 | + {\n\ |
| 195 | + "ip": "${LIDAR_IP}",\n\ |
| 196 | + "pcl_data_type": 1,\n\ |
| 197 | + "pattern_mode": 0,\n\ |
| 198 | + "extrinsic_parameter": {\n\ |
| 199 | + "roll": 0.0,\n\ |
| 200 | + "pitch": 0.0,\n\ |
| 201 | + "yaw": 0.0,\n\ |
| 202 | + "x": 0,\n\ |
| 203 | + "y": 0,\n\ |
| 204 | + "z": 0\n\ |
| 205 | + }\n\ |
| 206 | + }\n\ |
| 207 | + ]\n\ |
| 208 | +}\n\ |
| 209 | +EOF\n\ |
| 210 | + echo "Generated MID360_config.json with LIDAR_COMPUTER_IP=${LIDAR_COMPUTER_IP} and LIDAR_IP=${LIDAR_IP}"\n\ |
| 211 | + fi\n\ |
| 212 | + \n\ |
| 213 | +fi\n\ |
| 214 | +\n\ |
| 215 | +# Execute the command\n\ |
| 216 | +exec "$@"' > /ros_entrypoint.sh && \ |
| 217 | + chmod +x /ros_entrypoint.sh |
| 218 | + |
| 219 | +# Set the entrypoint |
| 220 | +ENTRYPOINT ["/ros_entrypoint.sh"] |
| 221 | + |
| 222 | +# Default command |
| 223 | +CMD ["bash"] |
0 commit comments