diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..65d41db --- /dev/null +++ b/Dockerfile @@ -0,0 +1,79 @@ +FROM tiryoh/ros-desktop-vnc:noetic + +ENV DEBCONF_NOWARNINGS=yes +ENV DEBIAN_FRONTEND noninteractive +ENV ROS_PYTHON_VERSION 3 +SHELL ["/bin/bash", "-c"] + +RUN sed -i 's@archive.ubuntu.com@ftp.jaist.ac.jp/pub/Linux@g' /etc/apt/sources.list + +RUN apt-key adv --keyserver 'hkp://keyserver.ubuntu.com:80' --recv-keys F42ED6FBAB17C654 && \ + apt-get update && \ + apt-get upgrade -y && \ + apt-get install --no-install-recommends curl -y && \ + curl -k https://raw.githubusercontent.com/ros/rosdistro/master/ros.key | sudo apt-key add - && \ + apt-get install --no-install-recommends -y \ + build-essential \ + dkms \ + openssh-server \ + ros-noetic-joy ros-noetic-teleop-twist-joy \ + ros-noetic-teleop-twist-keyboard ros-noetic-laser-proc \ + ros-noetic-rgbd-launch ros-noetic-rosserial-arduino \ + ros-noetic-rosserial-python ros-noetic-rosserial-client \ + ros-noetic-rosserial-msgs ros-noetic-amcl ros-noetic-map-server \ + ros-noetic-move-base ros-noetic-urdf ros-noetic-xacro \ + ros-noetic-compressed-image-transport ros-noetic-rqt* ros-noetic-rviz \ + ros-noetic-gmapping ros-noetic-navigation ros-noetic-interactive-markers \ + ros-noetic-dynamixel-sdk \ + ros-noetic-multirobot-map-merge \ + ros-noetic-image-transport \ + ros-noetic-cv-bridge \ + ros-noetic-vision-opencv \ + ros-noetic-gazebo-ros \ + ros-noetic-turtlebot3 \ + ros-noetic-turtlebot3-msgs \ + python3-opencv \ + libopencv-dev \ + ros-noetic-image-proc \ + ros-noetic-robot \ + nano gedit \ + python3-pip \ + python-dev && \ + apt-get -y autoremove && \ + apt-get -y clean && \ + rm -rf /var/lib/apt/lists/* && \ + pip3 install --upgrade pip && \ + pip3 install opencv-contrib-python + + +RUN mkdir /var/run/sshd && \ + echo 'root:ubuntu' | chpasswd && \ + sed -i 's/PermitRootLogin prohibit-password/PermitRootLogin yes/' /etc/ssh/sshd_config && \ + sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd + +EXPOSE 22 + +RUN mkdir -p /home/ubuntu/catkin_ws/src && \ + /bin/bash -c "source /opt/ros/noetic/setup.bash ; cd /home/ubuntu/catkin_ws/src ; catkin_init_workspace" && \ + /bin/bash -c "source /opt/ros/noetic/setup.bash ; cd /home/ubuntu/catkin_ws && catkin build" && \ + echo "source /opt/ros/noetic/setup.bash" >> ~/.bashrc && \ + echo "source /home/ubuntu/catkin_ws/devel/setup.bash" >> ~/.bashrc && \ + echo "export ROS_PACKAGE_PATH=\${ROS_PACKAGE_PATH}:/home/ubuntu/catkin_ws" >> ~/.bashrc && \ + echo "export ROS_WORKSPACE=/home/ubuntu/catkin_ws" >> ~/.bashrc && \ + echo "alias cm='cd ~/catkin_ws;catkin build'" >> ~/.bashrc && \ + echo "alias cs='cd ~/catkin_ws/src'" >> ~/.bashrc && \ + echo "export TURTLEBOT3_MODEL=burger" >> ~/.bashrc && \ + . ~/.bashrc && \ + cd /home/ubuntu/catkin_ws && \ + catkin build + +RUN cd /home/ubuntu/catkin_ws/src && \ + git clone -b noetic-devel https://github.com/ROBOTIS-GIT/turtlebot3_simulations.git && \ + git clone -b noetic-devel https://github.com/ROBOTIS-GIT/turtlebot3_autorace_2020.git + +COPY ./startup.sh /startup.sh +COPY ./scripts /home/ubuntu/scripts +COPY ./src /home/ubuntu/catkin_ws/src + +RUN cd /home/ubuntu/catkin_ws && \ + catkin build diff --git a/README.md b/README.md new file mode 100644 index 0000000..55ee5b1 --- /dev/null +++ b/README.md @@ -0,0 +1,107 @@ +# Turtlebot3_noetic Dockerイメージ +## Extrinsic Camera Calibration +```bash +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch mode:=calibration +rqt +``` +Execute rqt_reconfigure +```bash +rosrun rqt_reconfigure rqt_reconfigure +``` + +## Lane Detection +Calibration +```bash +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch +roslaunch turtlebot3_autorace_detect detect_lane.launch mode:=calibration +rqt +``` +Start lane detection +```bash +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch +roslaunch turtlebot3_autorace_detect detect_lane.launch +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch +``` +## Traffic Sign Detection +```bash +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch +roslaunch turtlebot3_teleop turtlebot3_teleop_key.launch +rqt_image_view +``` +SELECT_MISSION intersection, construction, parking, level_crossing, tunnel +```bash +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch +roslaunch turtlebot3_autorace_detect detect_sign.launch mission:=SELECT_MISSION +rqt_image_view +``` + + + +# Manegement + +本レポジトリでは、GitHub flowに従います. +clone, push, pull, mergeの流れを説明しておきます。 + +## 1. Clone +本レポジトリは、/home/kbknにcloneしてください。 + +```bash +cd /home/kbkn +git clone http://192.168.100.210/git/ikko/turtlebot3_noetic.git +``` + +## 2. Branchを作成 +masterブランチ直下に作業内容がわかるような、任意の名前のブランチを作成してください。 +尚、以下のには、任意のブランチ名が入ります。 + +```bash +git checkout maser +git checkout -b +``` +## 3. Cording +開発してください。その後、コメントをつけておきます。 +```bash +git add . +git commit -m "fix bug" +``` + + +## 4. Push +自分で作成したブランチにPushしてください。 + +```bash +git push origin +``` +## 5. Pull Request + +https://192.168.100.210/gitbucket/ + +Gitbucket上で、Pull Requestを作成してください。 + +Pull Requestの結果、 + +許可されたら(リーダが、masterブランチへmergeします。) +もし、開発が終了していなければ、再び手順3. へ。 +開発が終了していれば、ブランチを削除してください。 + +```bash +git checkout master +git branch -d +git push --delete origin +``` + +許可されなかったら(GitHub上のPullRequestに、いちゃもんメッセージが付けられます。) +手順3. へ。頑張ってください。 + +## 6. Pull +手順5. が終わっている方のみ、Pullしてください。 + +```bash +git pull origin master +``` +以上が、GitHub-flowの手順です。参考リンクにわかりやすいフローチャートがあります。ぜひ見てください。 diff --git a/build.sh b/build.sh new file mode 100755 index 0000000..950d489 --- /dev/null +++ b/build.sh @@ -0,0 +1,3 @@ +#!/bin/bash +docker image prune +docker build --rm -t turtlebot3_noetic . diff --git a/commit.sh b/commit.sh new file mode 100755 index 0000000..fb410da --- /dev/null +++ b/commit.sh @@ -0,0 +1 @@ +docker commit $(docker ps -q) turtlebot3_noetic diff --git a/run.sh b/run.sh new file mode 100755 index 0000000..4af57d7 --- /dev/null +++ b/run.sh @@ -0,0 +1 @@ +docker run --rm -p 6080:80 -p 2222:22 --shm-size=512m -e HOME=/home/ubuntu -e QT_X11_NO_MITSHM=1 -e RESOLUTION=1920x1080 -e SHELL=/bin/bash --entrypoint '/startup.sh' turtlebot3_noetic diff --git a/scripts/autorace2020.sh b/scripts/autorace2020.sh new file mode 100755 index 0000000..5f4cee4 --- /dev/null +++ b/scripts/autorace2020.sh @@ -0,0 +1,28 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.8" y_pos:="-1.747" z_pos:="0" yaw:="0" & +sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +#sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +#roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch mode:=action & +#sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch & +roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=traffic_light & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=intersection & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=construction & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=parking & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=level_crossing & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=tunnel & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 3" & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & diff --git a/scripts/autorace2020_all.sh b/scripts/autorace2020_all.sh new file mode 100755 index 0000000..a5facd1 --- /dev/null +++ b/scripts/autorace2020_all.sh @@ -0,0 +1,29 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.8" y_pos:="-1.747" z_pos:="0" yaw:="0" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 + +#roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch mode:=action & +#sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core_all.launch & + +roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=all & + +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=intersection & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=construction & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=parking & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=level_crossing & +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=tunnel & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 3" & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & diff --git a/scripts/autorace2020_construction.sh b/scripts/autorace2020_construction.sh new file mode 100755 index 0000000..2fb647b --- /dev/null +++ b/scripts/autorace2020_construction.sh @@ -0,0 +1,25 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.246" y_pos:="-0.1145" z_pos:="0" yaw:="1.57" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & + +#roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +#roslaunch turtlebot3_autorace_detect detect_sign.launch mission:=construction & +#roslaunch turtlebot3_autorace_detect detect_lane.launch mode:=action & +#roslaunch turtlebot3_autorace_detect detect_construction.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + +roslaunch turtlebot3_autorace_core turtlebot3_autorace_construction.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_construction_lane.sh b/scripts/autorace2020_construction_lane.sh new file mode 100755 index 0000000..e7aacb4 --- /dev/null +++ b/scripts/autorace2020_construction_lane.sh @@ -0,0 +1,26 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.246" y_pos:="-0.1145" z_pos:="0" yaw:="1.57" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & + +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=construction & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_sign.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +roslaunch turtlebot3_autorace_detect detect_construction.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + + + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_intersection.sh b/scripts/autorace2020_intersection.sh new file mode 100755 index 0000000..f39e1ac --- /dev/null +++ b/scripts/autorace2020_intersection.sh @@ -0,0 +1,20 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="1.7518" y_pos:="-1.642" z_pos:="0" yaw:="1.57" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_intersection.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & +#sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & diff --git a/scripts/autorace2020_intersection_lane.sh b/scripts/autorace2020_intersection_lane.sh new file mode 100755 index 0000000..720bfa1 --- /dev/null +++ b/scripts/autorace2020_intersection_lane.sh @@ -0,0 +1,26 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="1.7518" y_pos:="-1.642" z_pos:="0" yaw:="1.57" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=intersection & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_sign.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +roslaunch turtlebot3_autorace_detect detect_intersection.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & +#sleep 3 +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & diff --git a/scripts/autorace2020_level_crossing.sh b/scripts/autorace2020_level_crossing.sh new file mode 100755 index 0000000..c38193e --- /dev/null +++ b/scripts/autorace2020_level_crossing.sh @@ -0,0 +1,19 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="-1.1821" y_pos:="1.756" z_pos:="0" yaw:="3.14" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 + +roslaunch turtlebot3_autorace_core turtlebot3_autorace_level_crossing.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_level_crossing_lane.sh b/scripts/autorace2020_level_crossing_lane.sh new file mode 100755 index 0000000..c0f3512 --- /dev/null +++ b/scripts/autorace2020_level_crossing_lane.sh @@ -0,0 +1,25 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="-1.1821" y_pos:="1.756" z_pos:="0" yaw:="3.14" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 + +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=level_crossing & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_sign.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +roslaunch turtlebot3_autorace_detect detect_level_crossing.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_parking.sh b/scripts/autorace2020_parking.sh new file mode 100755 index 0000000..629593f --- /dev/null +++ b/scripts/autorace2020_parking.sh @@ -0,0 +1,20 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="1.6464" y_pos:="1.756" z_pos:="0" yaw:="3.14" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & + +sleep 3 + +roslaunch turtlebot3_autorace_core turtlebot3_autorace_parking.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_parking_lane.sh b/scripts/autorace2020_parking_lane.sh new file mode 100755 index 0000000..d6b5fe6 --- /dev/null +++ b/scripts/autorace2020_parking_lane.sh @@ -0,0 +1,26 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="1.6464" y_pos:="1.756" z_pos:="0" yaw:="3.14" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & + +sleep 3 + +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=parking & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_sign.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +roslaunch turtlebot3_autorace_detect detect_parking.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_traffic_light.sh b/scripts/autorace2020_traffic_light.sh new file mode 100755 index 0000000..9be7ecf --- /dev/null +++ b/scripts/autorace2020_traffic_light.sh @@ -0,0 +1,23 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.8" y_pos:="-1.747" z_pos:="0" yaw:="0" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 + +#roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +#roslaunch turtlebot3_autorace_detect detect_lane.launch mode:=action & +#roslaunch turtlebot3_autorace_detect detect_traffic_light.launch mode:=action & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +roslaunch turtlebot3_autorace_core turtlebot3_autorace_traffic_light.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 3" & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & diff --git a/scripts/autorace2020_traffic_light_lane.sh b/scripts/autorace2020_traffic_light_lane.sh new file mode 100755 index 0000000..5868d4a --- /dev/null +++ b/scripts/autorace2020_traffic_light_lane.sh @@ -0,0 +1,18 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="0.8" y_pos:="-1.747" z_pos:="0" yaw:="0" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +#roslaunch turtlebot3_autorace_detect detect_traffic_light.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +rqt_image_view & +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 3" & diff --git a/scripts/autorace2020_tunnel.sh b/scripts/autorace2020_tunnel.sh new file mode 100755 index 0000000..f19f56a --- /dev/null +++ b/scripts/autorace2020_tunnel.sh @@ -0,0 +1,19 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="-0.7" y_pos:="1.24" z_pos:="0" yaw:="0" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 + +roslaunch turtlebot3_autorace_core turtlebot3_autorace_tunnel.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/autorace2020_tunnel_lane.sh b/scripts/autorace2020_tunnel_lane.sh new file mode 100755 index 0000000..4bb9048 --- /dev/null +++ b/scripts/autorace2020_tunnel_lane.sh @@ -0,0 +1,26 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch x_pos:="-0.7" y_pos:="1.24" z_pos:="0" yaw:="0" & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 + +#roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch mission:=tunnel & +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch & +roslaunch turtlebot3_autorace_detect detect_sign.launch & +roslaunch turtlebot3_autorace_detect detect_lane.launch & +roslaunch turtlebot3_autorace_detect detect_tunnel.launch & +roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_lane.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_tunnel.launch & +#roslaunch turtlebot3_autorace_driving turtlebot3_autorace_control_moving.launch & + +rqt_image_view & +read -p "Press [Enter] key to start." +echo " 3" +sleep 1 +echo " 2" +sleep 1 +echo " 1" +sleep 1 +echo " 0 Start " +rostopic pub -1 /core/decided_mode std_msgs/UInt8 "data: 2" & diff --git a/scripts/collisionavoidance.sh b/scripts/collisionavoidance.sh new file mode 100755 index 0000000..d97473a --- /dev/null +++ b/scripts/collisionavoidance.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_simulation.launch diff --git a/scripts/copy4machinelearning.sh b/scripts/copy4machinelearning.sh new file mode 100755 index 0000000..da0a52b --- /dev/null +++ b/scripts/copy4machinelearning.sh @@ -0,0 +1 @@ +cp turtlebot3_burger.gazebo.xacro ~/catkin_ws/src/turtlebot3/turtlebot3_description/urdf/turtlebot3_burger.gazebo.xacro diff --git a/scripts/fakenode.sh b/scripts/fakenode.sh new file mode 100755 index 0000000..dfc4b1e --- /dev/null +++ b/scripts/fakenode.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_fake turtlebot3_fake.launch diff --git a/scripts/killros.sh b/scripts/killros.sh new file mode 100755 index 0000000..a945732 --- /dev/null +++ b/scripts/killros.sh @@ -0,0 +1 @@ +pkill -9 -f ros diff --git a/scripts/launchautorace.sh b/scripts/launchautorace.sh new file mode 100755 index 0000000..6f47ee5 --- /dev/null +++ b/scripts/launchautorace.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace.launch diff --git a/scripts/launchautoracecore.sh b/scripts/launchautoracecore.sh new file mode 100755 index 0000000..4f5d1dc --- /dev/null +++ b/scripts/launchautoracecore.sh @@ -0,0 +1,4 @@ +export AUTO_EX_CALIB=action +export AUTO_DT_CALIB=action +export TURTLEBOT3_MODEL=burger +roslaunch turtlebot3_autorace_core turtlebot3_autorace_core.launch diff --git a/scripts/launchautoracemission.sh b/scripts/launchautoracemission.sh new file mode 100755 index 0000000..9dbee3d --- /dev/null +++ b/scripts/launchautoracemission.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_mission.launch diff --git a/scripts/launchemptyworld.sh b/scripts/launchemptyworld.sh new file mode 100755 index 0000000..aea3388 --- /dev/null +++ b/scripts/launchemptyworld.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_empty_world.launch diff --git a/scripts/launchrviz2.sh b/scripts/launchrviz2.sh new file mode 100755 index 0000000..b8f9bdf --- /dev/null +++ b/scripts/launchrviz2.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_gazebo_rviz.launch diff --git a/scripts/launchturtleboteach.sh b/scripts/launchturtleboteach.sh new file mode 100755 index 0000000..bb7af67 --- /dev/null +++ b/scripts/launchturtleboteach.sh @@ -0,0 +1,3 @@ +ROS_NAMESPACE=tb3_0 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_0/base_footprint set_odom_frame:=tb3_0/odom set_map_frame:=tb3_0/map +ROS_NAMESPACE=tb3_1 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_1/base_footprint set_odom_frame:=tb3_1/odom set_map_frame:=tb3_1/map +ROS_NAMESPACE=tb3_2 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_2/base_footprint set_odom_frame:=tb3_2/odom set_map_frame:=tb3_2/map diff --git a/scripts/launchturtlebothouse.sh b/scripts/launchturtlebothouse.sh new file mode 100755 index 0000000..45df2ee --- /dev/null +++ b/scripts/launchturtlebothouse.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_house.launch diff --git a/scripts/launchturtlebotworld.sh b/scripts/launchturtlebotworld.sh new file mode 100755 index 0000000..4a56175 --- /dev/null +++ b/scripts/launchturtlebotworld.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo turtlebot3_world.launch diff --git a/scripts/multimapmerge.sh b/scripts/multimapmerge.sh new file mode 100755 index 0000000..c83df92 --- /dev/null +++ b/scripts/multimapmerge.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_gazebo multi_map_merge.launch diff --git a/scripts/multirunslam.sh b/scripts/multirunslam.sh new file mode 100755 index 0000000..e4c63a2 --- /dev/null +++ b/scripts/multirunslam.sh @@ -0,0 +1,4 @@ +ROS_NAMESPACE=tb3_0 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_0/base_footprint set_odom_frame:=tb3_0/odom set_map_frame:=tb3_0/map +ROS_NAMESPACE=tb3_1 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_1/base_footprint set_odom_frame:=tb3_1/odom set_map_frame:=tb3_1/map +ROS_NAMESPACE=tb3_2 roslaunch turtlebot3_slam turtlebot3_gmapping.launch set_base_frame:=tb3_2/base_footprint set_odom_frame:=tb3_2/odom set_map_frame:=tb3_2/map + diff --git a/scripts/multirviz.sh b/scripts/multirviz.sh new file mode 100755 index 0000000..0e90b98 --- /dev/null +++ b/scripts/multirviz.sh @@ -0,0 +1 @@ +rosrun rviz rviz -d `rospack find turtlebot3_gazebo`/rviz/multi_turtlebot3_slam.rviz diff --git a/scripts/multiteleop0.sh b/scripts/multiteleop0.sh new file mode 100755 index 0000000..ff485a9 --- /dev/null +++ b/scripts/multiteleop0.sh @@ -0,0 +1 @@ +ROS_NAMESPACE=tb3_0 rosrun turtlebot3_teleop turtlebot3_teleop_key diff --git a/scripts/multiteleop1.sh b/scripts/multiteleop1.sh new file mode 100755 index 0000000..2a1803c --- /dev/null +++ b/scripts/multiteleop1.sh @@ -0,0 +1 @@ +ROS_NAMESPACE=tb3_1 rosrun turtlebot3_teleop turtlebot3_teleop_key diff --git a/scripts/multiteleop2.sh b/scripts/multiteleop2.sh new file mode 100755 index 0000000..9e5d2e1 --- /dev/null +++ b/scripts/multiteleop2.sh @@ -0,0 +1 @@ +ROS_NAMESPACE=tb3_2 rosrun turtlebot3_teleop turtlebot3_teleop_key diff --git a/scripts/multitourtlebot.sh b/scripts/multitourtlebot.sh new file mode 100755 index 0000000..d1da3c9 --- /dev/null +++ b/scripts/multitourtlebot.sh @@ -0,0 +1,2 @@ +roslaunch turtlebot3_gazebo multi_turtlebot3.launch + diff --git a/scripts/runnavigationnode.sh b/scripts/runnavigationnode.sh new file mode 100755 index 0000000..0dad7ad --- /dev/null +++ b/scripts/runnavigationnode.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_navigation turtlebot3_navigation.launch map_file:=$HOME/map.yaml diff --git a/scripts/runslam.sh b/scripts/runslam.sh new file mode 100755 index 0000000..5585e7d --- /dev/null +++ b/scripts/runslam.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_slam turtlebot3_slam.launch slam_methods:=gmapping diff --git a/scripts/savemap.sh b/scripts/savemap.sh new file mode 100755 index 0000000..9d114c8 --- /dev/null +++ b/scripts/savemap.sh @@ -0,0 +1 @@ +rosrun map_server map_saver -f ~/map diff --git a/scripts/telop.sh b/scripts/telop.sh new file mode 100755 index 0000000..7570ccc --- /dev/null +++ b/scripts/telop.sh @@ -0,0 +1 @@ +roslaunch turtlebot3_teleop turtlebot3_teleop_key.launch diff --git a/scripts/traffic_light.sh b/scripts/traffic_light.sh new file mode 100755 index 0000000..cee4e6e --- /dev/null +++ b/scripts/traffic_light.sh @@ -0,0 +1,15 @@ +roslaunch turtlebot3_gazebo turtlebot3_autorace_2020.launch & +sleep 3 +roslaunch turtlebot3_autorace_camera intrinsic_camera_calibration.launch & +sleep 3 +roslaunch turtlebot3_autorace_camera extrinsic_camera_calibration.launch mode:=calibration & +sleep 3 +roslaunch turtlebot3_autorace_detect detect_traffic_light.launch & +sleep 3 +roslaunch turtlebot3_autorace_core turtlebot3_autorace_mission.launch & +sleep 3 +rqt & +sleep 3 +rqt_image_view & +./telop.sh + diff --git a/scripts/turtlebot3_burger.gazebo.xacro b/scripts/turtlebot3_burger.gazebo.xacro new file mode 100644 index 0000000..4dd8f43 --- /dev/null +++ b/scripts/turtlebot3_burger.gazebo.xacro @@ -0,0 +1,135 @@ + + + + + + + Gazebo/DarkGrey + + + + 0.1 + 0.1 + 500000.0 + 10.0 + 0.001 + 0.1 + 1 0 0 + Gazebo/FlatBlack + + + + 0.1 + 0.1 + 500000.0 + 10.0 + 0.001 + 0.1 + 1 0 0 + Gazebo/FlatBlack + + + + 0.1 + 0.1 + 1000000.0 + 100.0 + 0.001 + 1.0 + Gazebo/FlatBlack + + + + + true + $(arg imu_visual) + + Gazebo/FlatBlack + + + + + cmd_vel + odom + odom + world + true + base_footprint + false + true + true + false + 30 + wheel_left_joint + wheel_right_joint + 0.160 + 0.066 + 1 + 10 + na + + + + + + true + imu_link + imu_link + imu + imu_service + 0.0 + 0 + + + gaussian + + 0.0 + 2e-4 + 0.0000075 + 0.0000008 + + + 0.0 + 1.7e-2 + 0.1 + 0.001 + + + + + + + + Gazebo/FlatBlack + + 0 0 0 0 0 0 + $(arg laser_visual) + 5 + + + + 24 + 1 + 0.0 + 6.28319 + + + + 0.120 + 3.5 + 0.015 + + + gaussian + 0.0 + 0.01 + + + + scan + base_scan + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_camera.taz b/src/turtlebot3_autorace_2020/turtlebot3_autorace_camera.taz new file mode 100644 index 0000000..a1ee4ba --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_camera.taz Binary files differ diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_construction.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_construction.launch new file mode 100755 index 0000000..2a96b05 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_construction.launch @@ -0,0 +1,5 @@ + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_core.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_core.launch new file mode 100755 index 0000000..a3a4575 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_core.launch @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_intersection.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_intersection.launch new file mode 100755 index 0000000..d39dd73 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_intersection.launch @@ -0,0 +1,5 @@ + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_level_crossing.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_level_crossing.launch new file mode 100755 index 0000000..f697fd8 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_level_crossing.launch @@ -0,0 +1,5 @@ + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_mission.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_mission.launch new file mode 100644 index 0000000..bc69a4f --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_mission.launch @@ -0,0 +1,3 @@ + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_parking.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_parking.launch new file mode 100755 index 0000000..5529c60 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_parking.launch @@ -0,0 +1,5 @@ + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_traffic_light.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_traffic_light.launch new file mode 100755 index 0000000..a3a4575 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_traffic_light.launch @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_tunnel.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_tunnel.launch new file mode 100755 index 0000000..6cd118e --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/launch/turtlebot3_autorace_tunnel.launch @@ -0,0 +1,5 @@ + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_mode_decider new file mode 100755 index 0000000..6f16472 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_mode_decider @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 +from turtlebot3_autorace_msgs.msg import MovingParam + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_light = rospy.Subscriber('/detect/traffic_light', UInt8, self.cbInvokedByTrafficLight, queue_size=1) + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + self.InvokedObject = Enum('InvokedObject', 'traffic_light traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light intersection construction parking_lot level_crossing tunnel') + self.fnInitMode() + # Invoke if traffic light is detected + def cbInvokedByTrafficLight(self, invoke_type_msg): + rospy.loginfo("Invoke Traffic light detected") + self.fnDecideMode(self.InvokedObject.traffic_light.value, invoke_type_msg) + def cbInvokedByTrafficSign(self, traffic_sign_msg): + rospy.loginfo("Invoke Traffic sign detected") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_msg) + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + rospy.loginfo("mode --all_core_mode_decider-----------------------------") + rospy.loginfo("mode current_mode=%d",self.current_mode) + rospy.loginfo("mode invoked_object=%d",invoked_object) + rospy.loginfo("mode msg_data=%d",msg_data.data) + rospy.loginfo("mode-------------------------------") + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("mode current_mode %d",self.current_mode) + if invoked_object == self.InvokedObject.traffic_light.value: # Traffic Light detected + rospy.loginfo("mode invoked object %d",invoked_object) + if msg_data.data == self.CurrentMode.intersection.value: + rospy.loginfo("mode detect traffic_light and chenge mode") + self.current_mode = self.CurrentMode.intersection.value + elif invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("mode Any Sign detected") + if msg_data.data == self.TrafficSign.intersection.value: + rospy.loginfo("detect sign : intersection") + self.current_mode = self.CurrentMode.intersection.value + elif msg_data.data == self.TrafficSign.left.value: + rospy.loginfo("detect sign : intersection left") + self.current_mode = self.CurrentMode.intersection.value + elif msg_data.data == self.TrafficSign.right.value: + rospy.loginfo("detect sign : inteersection right") + self.current_mode = self.CurrentMode.intersection.value + elif msg_data.data == self.TrafficSign.construction.value: + rospy.loginfo("detect sign : construction") + self.current_mode = self.CurrentMode.construction.value + elif msg_data.data == self.TrafficSign.parking_lot.value: + rospy.loginfo("detect sign : parking_log") + self.current_mode = self.CurrentMode.parking_lot.value + elif msg_data.data == self.TrafficSign.level_crossing.value: + rospy.loginfo("detect sign : level_crossing") + self.current_mode = self.CurrentMode.level_crossing.value + elif msg_data.data == self.TrafficSign.tunnel.value: + rospy.loginfo("detect sign : tunnel") + self.current_mode = self.CurrentMode.tunnel.value + else: + pass + elif self.current_mode == self.CurrentMode.traffic_light.value: + if invoked_object == self.InvokedObject.traffic_light.value: # Traffic Light detected + rospy.loginfo("traffic_light to lane_following") + self.current_mode = self.CurrentMode.lane_follwoing.value + else: + pass + else: + pass + self.fnPublishMode() + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_node_controller new file mode 100755 index 0000000..ead94f1 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/all_core_node_controller @@ -0,0 +1,720 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + # subscribes : status returned + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + self.sub_intersection_stamped = rospy.Subscriber('/detect/intersection_stamped', UInt8, self.cbIntersectionStamped, queue_size=1) + self.sub_construction_stamped = rospy.Subscriber('/detect/construction_stamped', UInt8, self.cbconstructionStamped, queue_size=1) + self.sub_parking_stamped = rospy.Subscriber('/detect/parking_stamped', UInt8, self.cbParkingStamped, queue_size=1) + self.sub_level_crossing_stamped = rospy.Subscriber('/detect/level_crossing_stamped', UInt8, self.cbLevelCrossingStamped, queue_size=1) + self.sub_tunnel_stamped = rospy.Subscriber('/detect/tunnel_stamped', UInt8, self.cbTunnelStamped, queue_size=1) + + # publishes orders + self.pub_traffic_light_order = rospy.Publisher('/detect/traffic_light_order', UInt8, queue_size=1) + self.pub_intersection_order = rospy.Publisher('/detect/intersection_order', UInt8, queue_size=1) + self.pub_parking_order = rospy.Publisher('/detect/parking_order', UInt8, queue_size=1) + self.pub_level_crossing_order = rospy.Publisher('/detect/level_crossing_order', UInt8, queue_size=1) + self.pub_tunnel_order = rospy.Publisher('/detect/tunnel_order', UInt8, queue_size=1) + + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light intersection construction parking_lot level_crossing tunnel') + self.current_mode = self.CurrentMode.idle.value + + self.InvokedObject = Enum('InvokedObject', 'traffic_light traffic_sign') + + self.StepOfTrafficLight = Enum('StepOfTrafficLight', 'searching_traffic_light in_traffic_light') + self.StepOfIntersection = Enum('StepOfIntersection', 'detect_direction exit') + self.StepOfConstruction = Enum('StepOfConstruction', 'find_obstacle avoid_obstacle exit') + self.StepOfParking = Enum('StepOfParking', 'parking exit') + self.StepOfTunnel = Enum('StepOfTunnel', 'searching_tunnel_sign go_in_to_tunnel navigation go_out_from_tunnel exit') + + self.current_step_traffic_light = self.StepOfTrafficLight.searching_traffic_light.value + self.current_step_intersection = self.StepOfIntersection.detect_direction.value + self.current_step_construction = self.StepOfConstruction.find_obstacle.value + self.current_step_parking = self.StepOfParking.parking.value + self.current_step_tunnel = self.StepOfTunnel.searching_tunnel_sign.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_lane launch_detect_traffic_light launch_control_lane launch_detect_sign_intersection launch_detect_sign_construction launch_detect_sign_parking launch_detect_sign_level_crossing launch_detect_sign_tunnel launch_detect_intersection launch_control_moving launch_detect_parking launch_detect_construction launch_detect_level launch_detect_tunnel launch_control_tunnel') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_intersection_launched = False + self.launch_detect_sign_construction_launched = False + self.launch_detect_sign_parking_launched = False + self.launch_detect_sign_level_crossing_launched = False + self.launch_detect_sign_tunnel_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_traffic_light_launched = False + self.launch_detect_intersection_launched = False + self.launch_detect_construction_launched = False + self.launch_detect_parking = False + self.launch_detect_level_launched = False + self.launch_control_moving_launched = False + self.launch_control_lane_launched = False + self.launch_detect_tunnel_launched = False + self.launch_control_tunnel_launched = False + + self.is_triggered = False + self.is_go = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("node starts the progress with %d", mode_msg.data) + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbIntersectionStamped(self, intersection_msg): + rospy.loginfo("intersection Step changed from %d", self.current_step_intersection) + self.current_step_intersection = intersection_msg.data + + if self.current_step_intersection == self.StepOfIntersection.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def cbconstructionStamped(self, construction_msg): + rospy.loginfo("construction Step changed from %d", self.current_step_construction) + self.current_step_construction = construction_msg.data + + if self.current_step_construction == self.StepOfConstruction.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + # Which step is in Tunnel + def cbTunnelStamped(self, tunnel_msg): + rospy.loginfo("Tunnel Step changed from %d", self.current_step_tunnel) + + self.current_step_tunnel = tunnel_msg.data + + rospy.loginfo("into %d", self.current_step_tunnel) + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + self.current_mode = self.CurrentMode.tunnel.value + msg_mode_return = UInt8 + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def cbParkingStamped(self, parking_msg): + rospy.loginfo("Parking Step changed from %d", self.current_step_parking) + self.current_step_parking = parking_msg.data + + if self.current_step_parking == self.StepOfParking.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def cbLevelCrossingStamped(self, level_crossing_msg): + rospy.loginfo("LevelCrossing Step changed from %d", self.current_step_level_crossing) + + self.current_step_level_crossing = level_crossing_msg.data + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + self.current_mode = self.CurrentMode.level_crossing.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + rospy.loginfo("node all_core_node_controller current_mode=%d",self.current_mode) + rospy.loginfo("node Current step traffic_light=%d",self.current_step_traffic_light) + rospy.loginfo("node Current step construction=%d",self.current_step_construction) + rospy.loginfo("node Current step parking=%d",self.current_step_parking) + rospy.loginfo("node Current step tunnel=%d",self.current_step_tunnel) + + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + + # traffic_light + elif self.current_mode == self.CurrentMode.traffic_light.value: + rospy.loginfo("New trigger for traffic_light") + msg_pub_traffic_light_order = UInt8() + if self.current_step_traffic_light == self.StepOfTrafficLight.searching_traffic_light.value: + rospy.loginfo("Current step : searching_traffic_light") + rospy.loginfo("Go to next step : in_traffic_light %d",self.is_go) + + msg_pub_traffic_light_order.data = self.StepOfTrafficLight.in_traffic_light.value + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + if self.is_go == False: + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + else: + rospy.loginfo("---------------------------------------------------") + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + rospy.sleep(2) + #self.current_mode = self.CurrentMode.lane_following.value + self.current_mode = self.CurrentMode.intersection.value + #self.current_step_traffic_light = self.StepOfTrafficLight.in_traffic_light.value + self.current_step_intersection = self.StepOfIntersection.detect_direction.value + rospy.sleep(2) + self.current_mode = self.CurrentMode.intersection.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + rospy.sleep(2) + self.pub_traffic_light_order.publish(msg_pub_traffic_light_order) + + # intersection + elif self.current_mode == self.CurrentMode.intersection.value: + rospy.loginfo("New trigger for intersection") + msg_pub_intersection_order = UInt8() + + if self.current_step_intersection == self.StepOfIntersection.detect_direction.value: + rospy.loginfo("Current step : searching_intersection_sign") + rospy.loginfo("Go to next step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.detect_direction.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_intersection.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_intersection == self.StepOfIntersection.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.exit.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_intersection.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(3) + self.pub_intersection_order.publish(msg_pub_intersection_order) + + # construction + elif self.current_mode == self.CurrentMode.construction.value: + rospy.loginfo("New trigger for construction") + msg_pub_construction_order = UInt8() + + if self.current_step_construction == self.StepOfConstruction.find_obstacle.value: + rospy.loginfo("Current step : find_obstacle") + rospy.loginfo("Go to next setp : avoid_obstacle") + + msg_pub_construction_order.data = self.StepOfConstruction.find_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_construction == self.StepOfConstruction.avoid_obstacle.value: + rospy.loginfo("Current step : avoid_obstacle") + rospy.loginfo("Go to next step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.avoid_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_construction == self.StepOfConstruction.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_construction.value, True) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_construction_order.publish(msg_pub_construction_order) + + # parking + elif self.current_mode == self.CurrentMode.parking.value: + rospy.loginfo("New trigger for parking") + msg_pub_parking_order = UInt8() + + if self.current_step_parking == self.StepOfParking.parking.value: + rospy.loginfo("Current step : parking") + rospy.loginfo("Go to next step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.parking.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_parking == self.StepOfParking.exit.value: + rospy.loginfo("Current step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_parking_order.publish(msg_pub_parking_order) + + # level_crossing + elif self.current_mode == self.CurrentMode.level_crossing.value: + rospy.loginfo("New trigger for level_crossing") + msg_pub_level_crossing_order = UInt8() + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + rospy.loginfo("Current step : pass_level") + rospy.loginfo("Go to next step : exit") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.pass_level.value + + self.fnLaunch(self.Launcher.launch_detect_sign_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_level.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_level_crossing == self.StepOfLevelCrossing.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.exit.value + + self.fnLaunch(self.Launcher.launch_detect_sign_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + self.pub_level_crossing_order.publish(msg_pub_level_crossing_order) + rospy.sleep(2) + + # tunnel + elif self.current_mode == self.CurrentMode.tunnel.value: + rospy.loginfo("New trigger for tunnel") + msg_pub_tunnel_order = UInt8() + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + rospy.loginfo("Current step : searching_tunnel_sign") + rospy.loginfo("Go to next step : go_in_to_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_in_to_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_in_to_tunnel.value: + rospy.loginfo("Current step : go_in_to_tunnel") + rospy.loginfo("Go to next step : navigation") + + msg_pub_tunnel_order.data = self.StepOfTunnel.navigation.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + elif self.current_step_tunnel == self.StepOfTunnel.navigation.value: + rospy.loginfo("Current step : navigation") + rospy.loginfo("Go to next step : go_out_from_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_out_from_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_out_from_tunnel.value: + rospy.loginfo("Current step : go_out_from_tunnel") + rospy.loginfo("Go to next step : exit") + + msg_pub_tunnel_order.data = self.StepOfTunnel.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_tunnel == self.StepOfTunnel.exit.value: + rospy.loginfo("Current step : exit") + rospy.loginfo("Go to next step : searching_tunnel_sign") + + msg_pub_tunnel_order.data = self.StepOfTunnel.searching_tunneROSLaunchParentvalue + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign_tunnel.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + + self.pub_tunnel_order.publish(msg_pub_tunnel_order) + + self.is_triggered = False + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign_intersection.value: + if is_start == True: + if self.launch_detect_sign_intersection_launched == False: + self.launch_detect_sign_intersection = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign_intersection = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_intersection.launch"]) + self.launch_detect_sign_intersection_launched = True + self.launch_detect_sign_intersection.start() + else: + pass + else: + if self.launch_detect_sign_intersection_launched == True: + self.launch_detect_sign_intersection_launched = False + self.launch_detect_sign_intersection.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign_construction.value: + if is_start == True: + if self.launch_detect_sign_construction_launched == False: + self.launch_detect_sign_construction = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign_construction = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_construction.launch"]) + self.launch_detect_sign_construction_launched = True + self.launch_detect_sign_construction.start() + else: + pass + else: + if self.launch_detect_sign_construction_launched == True: + self.launch_detect_sign_construction_launched = False + self.launch_detect_sign_construction.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign_parking.value: + if is_start == True: + if self.launch_detect_sign_parking_launched == False: + self.launch_detect_sign_parking = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign_parking = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_parking.launch"]) + self.launch_detect_sign_parking_launched = True + self.launch_detect_sign_parking.start() + else: + pass + else: + if self.launch_detect_sign_parking_launched == True: + self.launch_detect_sign_parking_launched = False + self.launch_detect_sign_parking.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign_level_crossing.value: + if is_start == True: + if self.launch_detect_sign_level_crossing_launched == False: + self.launch_detect_sign_level_crossing = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign_level_crossing = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_level_crossing.launch"]) + self.launch_detect_sign_level_crossing_launched = True + self.launch_detect_sign_level_crossing.start() + else: + pass + else: + if self.launch_detect_sign_level_crossing_launched == True: + self.launch_detect_sign_level_crossing_launched = False + self.launch_detect_sign_level_crossing.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign_tunnel.value: + if is_start == True: + if self.launch_detect_sign_tunnel_launched == False: + self.launch_detect_sign_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_tunnel.launch"]) + self.launch_detect_sign_tunnel_launched = True + self.launch_detect_sign_tunnel.start() + else: + pass + else: + if self.launch_detect_sign_tunnel_launched == True: + self.launch_detect_sign_tunnel_launched = False + self.launch_detect_sign_tunnel.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_traffic_light.value: + if is_start == True: + if self.launch_detect_traffic_light_launched == False: + self.launch_detect_traffic_light = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_traffic_light = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_traffic_light.launch"]) + self.launch_detect_traffic_light_launched = True + self.launch_detect_traffic_light.start() + else: + pass + else: + if self.launch_detect_traffic_light_launched == True: + self.launch_detect_traffic_light_launched = False + self.launch_detect_traffic_light.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_intersection.value: + if is_start == True: + if self.launch_detect_intersection_launched == False: + self.launch_detect_intersection = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_intersection = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_intersection.launch"]) + self.launch_detect_intersection_launched = True + self.launch_detect_intersection.start() + else: + pass + else: + if self.launch_detect_intersection_launched == True: + self.launch_detect_intersection_launched = False + self.launch_detect_intersection.shutdown() + pass + elif launch_num == self.Launcher.launch_detect_construction.value: + if is_start == True: + if self.launch_detect_construction_launched == False: + self.launch_detect_construction = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_construction = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_construction.launch"]) + self.launch_detect_construction_launched = True + self.launch_detect_construction.start() + else: + pass + else: + if self.launch_detect_construction_launched == True: + self.launch_detect_construction_launched = False + self.launch_detect_construction.shutdown() + pass + elif launch_num == self.Launcher.launch_detect_level.value: + if is_start == True: + if self.launch_detect_level_launched == False: + self.launch_detect_level = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_level = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_level_crossing.launch"]) + self.launch_detect_level_launched = True + self.launch_detect_level.start() + else: + pass + else: + if self.launch_detect_level_launched == True: + self.launch_detect_level_launched = False + self.launch_detect_level.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_parking.value: + if is_start == True: + if self.launch_detect_parking_launched == False: + self.launch_detect_parking = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_parking = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_parking.launch"]) + self.launch_detect_parking_launched = True + self.launch_detect_parking.start() + else: + pass + else: + if self.launch_detect_parking_launched == True: + self.launch_detect_parking_launched = False + self.launch_detect_parking.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_tunnel.value: + if is_start == True: + if self.launch_detect_tunnel_launched == False: + self.launch_detect_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_tunnel.launch"]) + self.launch_detect_tunnel_launched = True + self.launch_detect_tunnel.start() + else: + pass + else: + if self.launch_detect_tunnel_launched == True: + self.launch_detect_tunnel_launched = False + self.launch_detect_tunnel.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + elif launch_num == self.Launcher.launch_control_tunnel.value: + if is_start == True: + if self.launch_control_tunnel_launched == False: + self.launch_control_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_control_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_tunnel.launch"]) + self.launch_control_tunnel_launched = True + self.launch_control_tunnel.start() + else: + pass + else: + if self.launch_control_tunnel_launched == True: + self.launch_control_tunnel_launched = False + self.launch_control_tunnel.shutdown() + else: + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_mode_decider new file mode 100755 index 0000000..05bd134 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_mode_decider @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'construction') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following construction') + + self.fnInitMode() + + # Invoke if traffic sign is detected + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + rospy.loginfo("invoke sign") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + rospy.loginfo("DecideMode") + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("currentmode : lane following") + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("currentmode : any sign detected") + if msg_data.data == self.TrafficSign.construction.value: + rospy.loginfo("currentmode : construction detected") + self.current_mode = self.CurrentMode.construction.value + + else: + pass + + self.fnPublishMode() + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller new file mode 100755 index 0000000..001f695 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller @@ -0,0 +1,251 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_construction_stamped = rospy.Subscriber('/detect/construction_stamped', UInt8, self.cbconstructionStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_construction_order = rospy.Publisher('/detect/construction_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following construction') + self.current_mode = self.CurrentMode.lane_following.value + + self.StepOfConstruction = Enum('StepOfConstruction', 'find_obstacle avoid_obstacle exit') + self.current_step_construction = self.StepOfConstruction.find_obstacle.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_construction launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_construction_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbconstructionStamped(self, construction_msg): + rospy.loginfo("construction Step changed from %d", self.current_step_construction) + self.current_step_construction = construction_msg.data + + if self.current_step_construction == self.StepOfConstruction.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("Current step : searching construction sign") + rospy.loginfo("Go to next step : construction") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # construction + elif self.current_mode == self.CurrentMode.construction.value: + rospy.loginfo("New trigger for construction") + msg_pub_construction_order = UInt8() + + if self.current_step_construction == self.StepOfConstruction.find_obstacle.value: + rospy.loginfo("Current step : find_obstacle") + rospy.loginfo("Go to next setp : avoid_obstacle") + + msg_pub_construction_order.data = self.StepOfConstruction.find_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_construction == self.StepOfConstruction.avoid_obstacle.value: + rospy.loginfo("Current step : avoid_obstacle") + rospy.loginfo("Go to next step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.avoid_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_construction == self.StepOfConstruction.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_construction_order.publish(msg_pub_construction_order) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_construction.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_construction.value: + if is_start == True: + if self.launch_detect_construction_launched == False: + self.launch_detect_construction = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_construction = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_construction.launch"]) + self.launch_detect_construction_launched = True + self.launch_detect_construction.start() + else: + pass + else: + if self.launch_detect_construction_launched == True: + self.launch_detect_construction_launched = False + self.launch_detect_construction.shutdown() + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller.org new file mode 100755 index 0000000..7d3278d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/construction_core_node_controller.org @@ -0,0 +1,251 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_construction_stamped = rospy.Subscriber('/detect/construction_stamped', UInt8, self.cbconstructionStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_construction_order = rospy.Publisher('/detect/construction_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following construction') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfConstruction = Enum('StepOfConstruction', 'find_obstacle avoid_obstacle exit') + self.current_step_construction = self.StepOfConstruction.find_obstacle.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_construction launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_construction_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbconstructionStamped(self, construction_msg): + rospy.loginfo("construction Step changed from %d", self.current_step_construction) + self.current_step_construction = construction_msg.data + + if self.current_step_construction == self.StepOfConstruction.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("Current step : searching construction sign") + rospy.loginfo("Go to next step : construction") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # construction + elif self.current_mode == self.CurrentMode.construction.value: + rospy.loginfo("New trigger for construction") + msg_pub_construction_order = UInt8() + + if self.current_step_construction == self.StepOfConstruction.find_obstacle.value: + rospy.loginfo("Current step : find_obstacle") + rospy.loginfo("Go to next setp : avoid_obstacle") + + msg_pub_construction_order.data = self.StepOfConstruction.find_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_construction == self.StepOfConstruction.avoid_obstacle.value: + rospy.loginfo("Current step : avoid_obstacle") + rospy.loginfo("Go to next step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.avoid_obstacle.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_construction == self.StepOfConstruction.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_construction_order.data = self.StepOfConstruction.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_construction_order.publish(msg_pub_construction_order) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_construction.value: + if is_start == True: + if self.launch_detect_construction_launched == False: + self.launch_detect_construction = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_construction = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_construction.launch"]) + self.launch_detect_construction_launched = True + self.launch_detect_construction.start() + else: + pass + else: + if self.launch_detect_construction_launched == True: + self.launch_detect_construction_launched = False + self.launch_detect_construction.shutdown() + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_mode_decider new file mode 100755 index 0000000..d611cc4 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_mode_decider @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 +class CoreModeDecider(): + def __init__(self): + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + self.InvokedObject = Enum('InvokedObject', 'traffic_sign traffic_light') + self.TrafficSign = Enum('TrafficSign', 'divide stop parking tunnel') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light parking_lot level_crossing tunnel intersection construction') + self.fnInitMode() + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + def fnInitMode(self): + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + if msg_data.data == self.TrafficSign.stop.value: # Stop Sign detected + self.current_mode = self.CurrentMode.level_crossing.value + elif msg_data.data == self.TrafficSign.parking.value: # Parking Sign detected + self.current_mode = self.CurrentMode.parking_lot.value + elif msg_data.data == self.TrafficSign.tunnel.value: # Tunnel Sign detected + self.current_mode = self.CurrentMode.tunnel.value + else: + pass + self.fnPublishMode() + else: + pass + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + def main(self): + rospy.spin() +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_controller new file mode 100755 index 0000000..0d40b33 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_controller @@ -0,0 +1,530 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import rospy, roslaunch +import numpy as np +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8 +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + self.traffic_light = rospy.Subscriber('/detect/traffic_light',UInt8,self.cbTrafficLight, queue_size=1) + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light parking_lot level_crossing tunnel intersection construction') + self.sub_intersection_stamped = rospy.Subscriber('/detect/intersection_stamped', UInt8, self.cbIntersectionStamped, queue_size=1) + self.sub_construction_stamped = rospy.Subscriber('/detect/construction_stamped', UInt8, self.cbConstructionStamped, queue_size=1) + self.sub_parking_lot_stamped = rospy.Subscriber('/detect/parking_lot_stamped', UInt8, self.cbParkingLotStamped, queue_size=1) + self.sub_level_crossing_stamped = rospy.Subscriber('/detect/level_crossing_stamped', UInt8, self.cbLevelCrossingStamped, queue_size=1) + self.sub_tunnel_stamped = rospy.Subscriber('/detect/tunnel_stamped', UInt8, self.cbTunnelStamped, queue_size=1) + self.pub_traffic_light_order = rospy.Publisher('/detect/traffic_light_order', UInt8, queue_size=1) + self.pub_intersection_order = rospy.Publisher('/detect/intersection_order', UInt8, queue_size=1) + self.pub_construction_order = rospy.Publisher('/detect/construction_order', UInt8, queue_size=1) + self.pub_parking_lot_order = rospy.Publisher('/detect/parking_lot_order', UInt8, queue_size=1) + self.pub_level_crossing_order = rospy.Publisher('/detect/level_crossing_order', UInt8, queue_size=1) + self.pub_tunnel_order = rospy.Publisher('/detect/tunnel_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + self.StepOfIntersection = Enum('StepOfIntersection', 'searching_intersection_sign searching_parking_point_line searching_nonreserved_parking_area parking') + self.StepOfConstruction = Enum('StepOfConstruction', 'searching_construction_sign searching_parking_point_line searching_nonreserved_parking_area parking') + self.StepOfParkingLot = Enum('StepOfParkingLot', 'searching_parking_sign searching_parking_point_line searching_nonreserved_parking_area parking') + self.StepOfLevelCrossing = Enum('StepOfLevelCrossing', 'searching_stop_sign searching_level_crossing watching_level_crossing stop pass_level_crossing') + self.StepOfTunnel = Enum('StepOfTunnel', 'searching_tunnel_sign go_in_to_tunnel navigation go_out_from_tunnel exit') + self.current_step_parking_lot = self.StepOfParkingLot.searching_parking_sign.value + self.current_step_level_crossing = self.StepOfLevelCrossing.searching_stop_sign.value + self.current_step_tunnel = self.StepOfTunnel.searching_tunnel_sign.value + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_traffic_light launch_control_traffic_light launch_detect_parking launch_control_parking launch_detect_level_crossing launch_control_level_crossing launch_detect_tunnel launch_control_tunnel detect_intersection detect_construction') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_control_lane_launched = False + self.launch_detect_parking_launched = False + self.launch_control_parking_launched = False + self.launch_detect_level_crossing_launched = False + self.launch_detect_traffic_light_launched = False + self.launch_detect_tunnel_launched = False + self.launch_control_tunnel_launched = False + self.launch_detect_intersection_launched = False + self.launch_detect_construction_launched = False + self.current_mode = self.CurrentMode.idle.value + self.is_triggered = False + self.is_go = False + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + loop_rate.sleep() + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + self.current_mode = mode_msg.data + self.is_triggered = True + def cbTrafficLight(self, mode_msg): + self.is_go = True + def cbParkingLotStamped(self, parking_lot_msg): + rospy.loginfo("ParkingLot Step changed from %d", self.current_step_parking_lot) + self.current_step_parking_lot = parking_lot_msg.data + rospy.loginfo("into %d", self.current_step_parking_lot) + if self.current_step_parking_lot == self.StepOfParkingLot.parking.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.current_step_parking_lot = self.StepOfParkingLot.searching_parking_sign.value + rospy.loginfo("pub_mode_return") + self.pub_mode_return.publish(msg_mode_return) + self.is_triggered = True + def cbIntersectionStamped(self, intersection_msg): + rospy.loginfo("Intersection Step changed from %d", self.current_step_intersection) + self.current_step_intersection = intersection_msg.data + rospy.loginfo("into %d", self.current_step_intersection) + if self.current_step_intersection == self.StepOfIntersection.intersection.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.current_step_intersection = self.StepOfParkingLot.searching_intersection_sign.value + rospy.loginfo("pub_mode_return") + self.pub_mode_return.publish(msg_mode_return) + self.is_triggered = True + def cbConstructionStamped(self, construction_msg): + rospy.loginfo("Construction Step changed from %d", self.current_step_construction) + self.current_step_construction = construction_msg.data + rospy.loginfo("into %d", self.current_step_construction) + if self.current_step_construction == self.StepOfConstruction.construction.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.current_step_construction = self.StepOfConstruction.searching_construction_sign.value + rospy.loginfo("pub_mode_return") + self.pub_mode_return.publish(msg_mode_return) + self.is_triggered = True + def cbLevelCrossingStamped(self, level_crossing_msg): + rospy.loginfo("LevelCrossing Step changed from %d", self.current_step_level_crossing) + self.current_step_level_crossing = level_crossing_msg.data + rospy.loginfo("into %d", self.current_step_level_crossing) + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level_crossing.value: + self.current_mode = self.CurrentMode.level_crossing.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + self.is_triggered = True + def cbTunnelStamped(self, tunnel_msg): + rospy.loginfo("Tunnel Step changed from %d", self.current_step_tunnel) + self.current_step_tunnel = tunnel_msg.data + rospy.loginfo("into %d", self.current_step_tunnel) + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + self.current_mode = self.CurrentMode.tunnel.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + self.is_triggered = True + def fnControlNode(self): + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_parking.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + elif self.current_mode == self.CurrentMode.traffic_light.value: + rospy.loginfo("New trigger for traffic_light") + msg_pub_traffic_light_order = UInt8() + if self.current_step_traffic_light == self.StepOfTrafficLight.searching_traffic_light.value: + rospy.loginfo("Current step : searching_traffic_light") + rospy.loginfo("Go to next step : in_traffic_light") + msg_pub_traffic_light_order.data = self.StepOfTrafficLight.in_traffic_light.value + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_parking.value, False) + self.fnLaunch(self.Launcher.launch_control_level_crossing.value, False) + elif self.current_step_traffic_light == self.StepOfTrafficLight.in_traffic_light.value: + rospy.loginfo("Current step : in_traffic_light") + rospy.loginfo("Go to next step : pass_traffic_light") + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_parking.value, False) + self.fnLaunch(self.Launcher.launch_control_level_crossing.value, False) + rospy.sleep(2) + self.pub_traffic_light_order.publish(msg_pub_traffic_light_order) + elif self.current_mode == self.CurrentMode.parking_lot.value: + rospy.loginfo("New trigger for parking_lot") + msg_pub_parking_lot_order = UInt8() + if self.current_step_parking_lot == self.StepOfParkingLot.searching_parking_sign.value: + rospy.loginfo("Current step : searching_parking_sign") + rospy.loginfo("Go to next step : searching_parking_point_line") + msg_pub_parking_lot_order.data = self.StepOfParkingLot.searching_parking_point_line.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_parking_lot == self.StepOfParkingLot.searching_parking_point_line.value: + rospy.loginfo("Current step : searching_parking_point_line") + rospy.loginfo("Go to next step : searching_nonreserved_parking_area") + msg_pub_parking_lot_order.data = self.StepOfParkingLot.searching_nonreserved_parking_area.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_parking_lot == self.StepOfParkingLot.searching_nonreserved_parking_area.value: + rospy.loginfo("Current step : searching_nonreserved_parking_area") + rospy.loginfo("Go to next step : parking") + msg_pub_parking_lot_order.data = self.StepOfParkingLot.parking.value + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_parking_lot == self.StepOfParkingLot.parking.value: + rospy.loginfo("Current step : parking") + rospy.loginfo("Go to next step : searching_parking_sign") + msg_pub_parking_lot_order.data = self.StepOfParkingLot.searching_parking_sign.value + self.fnLaunch(self.Launcher.launch_control_parking.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + rospy.sleep(2) + self.pub_parking_lot_order.publish(msg_pub_parking_lot_order) + elif self.current_mode == self.CurrentMode.level_crossing.value: + rospy.loginfo("New trigger for level_crossing") + msg_pub_level_crossing_order = UInt8() + if self.current_step_level_crossing == self.StepOfLevelCrossing.searching_stop_sign.value: + rospy.loginfo("Current step : searching_stop_sign") + rospy.loginfo("Go to next step : searching_level_crossing") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.searching_level_crossing.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_level_crossing == self.StepOfLevelCrossing.searching_level_crossing.value: + rospy.loginfo("Current step : searching_level_crossing") + rospy.loginfo("Go to next step : watching_level_crossing") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.watching_level_crossing.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_level_crossing == self.StepOfLevelCrossing.watching_level_crossing.value: + rospy.loginfo("Current step : watching_level_crossing") + rospy.loginfo("Go to next step : stop") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.stop.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_level_crossing == self.StepOfLevelCrossing.stop.value: + rospy.loginfo("Current step : stop") + rospy.loginfo("Go to next step : pass_level_crossing") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.pass_level_crossing.value + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + elif self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level_crossing.value: + rospy.loginfo("Current step : pass_level_crossing") + rospy.loginfo("Go to next step : searching_stop_sign") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.searching_stop_sign.value + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + rospy.sleep(2) + self.pub_level_crossing_order.publish(msg_pub_level_crossing_order) + elif self.current_mode == self.CurrentMode.tunnel.value: + rospy.loginfo("New trigger for tunnel") + msg_pub_tunnel_order = UInt8() + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + rospy.loginfo("Current step : searching_tunnel_sign") + rospy.loginfo("Go to next step : go_in_to_tunnel") + msg_pub_tunnel_order.data = self.StepOfTunnel.go_in_to_tunnel.value + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + elif self.current_step_tunnel == self.StepOfTunnel.go_in_to_tunnel.value: + rospy.loginfo("Current step : go_in_to_tunnel") + rospy.loginfo("Go to next step : navigation") + msg_pub_tunnel_order.data = self.StepOfTunnel.navigation.value + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, True) + elif self.current_step_tunnel == self.StepOfTunnel.navigation.value: + rospy.loginfo("Current step : navigation") + rospy.loginfo("Go to next step : go_out_from_tunnel") + msg_pub_tunnel_order.data = self.StepOfTunnel.go_out_from_tunnel.value + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + elif self.current_step_tunnel == self.StepOfTunnel.go_out_from_tunnel.value: + rospy.loginfo("Current step : go_out_from_tunnel") + rospy.loginfo("Go to next step : exit") + msg_pub_tunnel_order.data = self.StepOfTunnel.exit.value + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + elif self.current_step_tunnel == self.StepOfTunnel.exit.value: + rospy.loginfo("Current step : exit") + rospy.loginfo("Go to next step : searching_tunnel_sign") + msg_pub_tunnel_order.data = self.StepOfTunnel.searching_tunnel_sign.value + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + self.fnLaunch(self.Launcher.launch_detect_construction.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level_crossing.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + rospy.sleep(2) + self.pub_tunnel_order.publish(msg_pub_tunnel_order) + self.is_triggered = False + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/turtlebot3_autorace_extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_construction.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_construction.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_intersection.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_intersection.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_traffic_light.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_traffic_light.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_level_crossing.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_level_crossing.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_control/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_parking_launched == False: + self.launch_control_parking = roslaunch.scriptapi.ROSLaunch() + self.launch_control_parking = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_control/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_parking_launched = True + self.launch_control_parking.start() + else: + pass + else: + if self.launch_control_parking_launched == True: + self.launch_control_parking_launched = False + self.launch_control_parking.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_tunnel.value: + if is_start == True: + if self.launch_detect_tunnel_launched == False: + self.launch_detect_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/turtlebot3_autorace_detect_tunnel.launch"]) + self.launch_detect_tunnel_launched = True + self.launch_detect_tunnel.start() + else: + pass + else: + if self.launch_detect_tunnel_launched == True: + self.launch_detect_tunnel_launched = False + self.launch_detect_tunnel.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_tunnel.value: + if is_start == True: + if self.launch_control_tunnel_launched == False: + self.launch_control_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_control_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_tunnel.launch"]) + self.launch_control_tunnel_launched = True + self.launch_control_tunnel.start() + else: + pass + else: + if self.launch_control_tunnel_launched == True: + self.launch_control_tunnel_launched = False + self.launch_control_tunnel.shutdown() + else: + pass + def main(self): + rospy.spin() +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_mission b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_mission new file mode 100755 index 0000000..1a05a27 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/core_node_mission @@ -0,0 +1,219 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################# +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################# + +# Authors: Gilbert, Ashe kim + +import rospy +import os +import time +import numpy as np +from nav_msgs.msg import Odometry +from geometry_msgs.msg import Pose +from std_srvs.srv import Empty +from gazebo_msgs.srv import SpawnModel, DeleteModel + +class ControlMission(): + def __init__(self): + self.reset_proxy = rospy.ServiceProxy('/gazebo/reset_simulation', Empty) + self.sub_odom = rospy.Subscriber('odom', Odometry, self.getOdom, queue_size = 1) + self.rate = rospy.Rate(5) + self.initial_pose = Pose() + self.traffic_state = 1 + self.loadMissionModel() + self.setTraffic() + self.controlMission() + + def getOdom(self, msg): + pose_x = msg.pose.pose.position.x + pose_y = msg.pose.pose.position.y + + # down_bar + if abs(pose_x + 1.4) < 0.15 and abs(pose_y - 1.25) < 0.05 and self.traffic_state == 5: + self.traffic_state = 6 + + # up_bar + elif abs(pose_x + 1.3) < 0.15 and (pose_y - 1.25) < 0.05 and self.traffic_state == 7: + self.traffic_state = 8 + self.current_time = time.time() + + def loadMissionModel(self): + model_dir_path = os.path.dirname(os.path.realpath(__file__)) + model_dir_path = model_dir_path.replace( + '/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes', + '/turtlebot3_simulations/turtlebot3_gazebo/models/turtlebot3_autorace_2020') + + turtlebot_dir_path = os.path.dirname(os.path.realpath(__file__)) + turtlebot_dir_path = turtlebot_dir_path.replace( + '/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes', + '/turtlebot3_simulations/turtlebot3_gazebo/models/turtlebot3_burger/model.sdf') + + red_light_path = model_dir_path + '/traffic_light_red/model.sdf' + red_light_model = open(red_light_path,'r') + self.red_light_model = red_light_model.read() + + yellow_light_path = model_dir_path + '/traffic_light_yellow/model.sdf' + yellow_light_model = open(yellow_light_path, 'r') + self.yellow_light_model = yellow_light_model.read() + + green_light_path = model_dir_path + '/traffic_light_green/model.sdf' + green_light_model = open(green_light_path, 'r') + self.green_light_model = green_light_model.read() + + traffic_left_path = model_dir_path + '/traffic_left/model.sdf' + traffic_left_model = open(traffic_left_path, 'r') + self.traffic_left_model = traffic_left_model.read() + + traffic_right_path = model_dir_path + '/traffic_right/model.sdf' + traffic_right_model = open(traffic_right_path, 'r') + self.traffic_right_model = traffic_right_model.read() + + up_bar_path = model_dir_path + '/traffic_bar_up/model.sdf' + up_bar_model = open(up_bar_path, 'r') + self.up_bar_model = up_bar_model.read() + + down_bar_path = model_dir_path + '/traffic_bar_down/model.sdf' + down_bar_model = open(down_bar_path, 'r') + self.down_bar_model = down_bar_model.read() + + parking_model = open(turtlebot_dir_path, 'r') + self.parking_model = parking_model.read().replace('0', '1') + + def setTraffic(self): + rospy.wait_for_service('gazebo/spawn_sdf_model') + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox( + 'traffic_left', + self.traffic_left_model, + "robotos_name_space", + self.initial_pose, + "world") + spawn_model_prox( + 'traffic_right', + self.traffic_right_model, + "robotos_name_space", + self.initial_pose, + "world") + spawn_model_prox( + 'down_bar', + self.down_bar_model, + "robotos_name_space", + self.initial_pose, + "world") + + parking_pose = Pose() + parking_stop = np.random.rand() + parking_pose.position.x = 0.73 if parking_stop < 0.5 else 0.23 + parking_pose.position.y = 0.8 + parking_pose.position.z = 0.03 + parking_pose.orientation.x = 0 + parking_pose.orientation.y = 0 + parking_pose.orientation.z = -1 + parking_pose.orientation.w = -1 + spawn_model_prox( + 'praking_turtlebot3', + self.parking_model, + "robotos_name_space", + parking_pose, + "world") + + def controlMission(self): + while not rospy.is_shutdown(): + if self.traffic_state == 1: # turn on red light + rospy.wait_for_service('gazebo/spawn_sdf_model') + + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox( + 'traffic_light_red', + self.red_light_model, + "robotos_name_space", + self.initial_pose, + "world") + + self.traffic_state = 2 + self.current_time = time.time() + + elif self.traffic_state == 2: + if abs(self.current_time - time.time()) > 2: # turn on yellow light after 3s. + rospy.wait_for_service('gazebo/spawn_sdf_model') + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox( + 'traffic_light_yellow', + self.yellow_light_model, + "robotos_name_space", + self.initial_pose, + "world") + + del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel) + del_model_prox('traffic_light_red') + self.traffic_state = 3 + self.current_time = time.time() + + elif self.traffic_state == 3: + if abs(self.current_time - time.time()) > 5: # turn on green light after 5s. + rospy.wait_for_service('gazebo/spawn_sdf_model') + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox('traffic_light_green', self.green_light_model, "robotos_name_space", + self.initial_pose, "world") + del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel) + del_model_prox('traffic_light_yellow') + self.traffic_state = 4 + + elif self.traffic_state == 4: + if abs(self.current_time - time.time()) > 5: # intersections + rospy.wait_for_service('gazebo/spawn_sdf_model') + del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel) + intersection_direction = np.random.rand() + + if intersection_direction < 0.5: + del_model_prox('traffic_right') + + else: + del_model_prox('traffic_left') + + self.traffic_state = 5 + + elif self.traffic_state == 6: # bar down. + rospy.wait_for_service('gazebo/spawn_sdf_model') + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox('up_bar', self.up_bar_model, "robotos_name_space", + self.initial_pose, "world") + del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel) + del_model_prox('down_bar') + self.traffic_state = 7 + + elif self.traffic_state == 8: # bar up + if abs(self.current_time - time.time()) > 10: + rospy.wait_for_service('gazebo/spawn_sdf_model') + spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel) + spawn_model_prox('down_bar', self.down_bar_model, "robotos_name_space", + self.initial_pose, "world") + del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel) + del_model_prox('up_bar') + rospy.signal_shutdown('shutdown') + self.rate.sleep() + +def main(): + rospy.init_node('mission_control') + try: + controlmission = ControlMission() + except rospy.ROSInterruptException: + pass + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_mode_decider new file mode 100755 index 0000000..8346ac6 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_mode_decider @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 +from geometry_msgs.msg import Twist +from turtlebot3_autorace_msgs.msg import MovingParam + +class CoreModeDecider(): + def __init__(self): + # subscribes + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'intersection left right') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following intersection') + + self.fnInitMode() + + # Invoke if traffic sign is detected + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + rospy.loginfo("invoke sign") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("lane_following") + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("Any Sign detected") + if msg_data.data == self.TrafficSign.intersection.value: # intersection sign deteced + self.current_mode = self.CurrentMode.intersection.value + rospy.loginfo("detect sign : intersection") + else: + pass + self.fnPublishMode() + + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller new file mode 100755 index 0000000..a0992cf --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + self.sub_intersection_stamped = rospy.Subscriber('/detect/intersection_stamped', UInt8, self.cbIntersectionStamped, queue_size=1) + + # publishes orders + self.pub_intersection_order = rospy.Publisher('/detect/intersection_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following intersection') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfIntersection = Enum('StepOfIntersection', 'detect_direction exit') + self.current_step_intersection = self.StepOfIntersection.detect_direction.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_detect_intersection launch_control_lane launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_intersection_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbIntersectionStamped(self, intersection_msg): + rospy.loginfo("intersection Step changed from %d", self.current_step_intersection) + self.current_step_intersection = intersection_msg.data + + if self.current_step_intersection == self.StepOfIntersection.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # intersection + elif self.current_mode == self.CurrentMode.intersection.value: + rospy.loginfo("New trigger for intersection") + msg_pub_intersection_order = UInt8() + + if self.current_step_intersection == self.StepOfIntersection.detect_direction.value: + rospy.loginfo("Current step : searching_intersection_sign") + rospy.loginfo("Go to next step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.detect_direction.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_intersection == self.StepOfIntersection.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.exit.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(3) + self.pub_intersection_order.publish(msg_pub_intersection_order) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_intersection.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_intersection.value: + if is_start == True: + if self.launch_detect_intersection_launched == False: + self.launch_detect_intersection = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_intersection = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_intersection.launch"]) + self.launch_detect_intersection_launched = True + self.launch_detect_intersection.start() + else: + pass + else: + if self.launch_detect_intersection_launched == True: + self.launch_detect_intersection_launched = False + self.launch_detect_intersection.shutdown() + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller.org new file mode 100755 index 0000000..9d4192f --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/intersection_core_node_controller.org @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + self.sub_intersection_stamped = rospy.Subscriber('/detect/intersection_stamped', UInt8, self.cbIntersectionStamped, queue_size=1) + + # publishes orders + self.pub_intersection_order = rospy.Publisher('/detect/intersection_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following intersection') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfIntersection = Enum('StepOfIntersection', 'detect_direction exit') + self.current_step_intersection = self.StepOfIntersection.detect_direction.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_detect_intersection launch_control_lane launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_intersection_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbIntersectionStamped(self, intersection_msg): + rospy.loginfo("intersection Step changed from %d", self.current_step_intersection) + self.current_step_intersection = intersection_msg.data + + if self.current_step_intersection == self.StepOfIntersection.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # intersection + elif self.current_mode == self.CurrentMode.intersection.value: + rospy.loginfo("New trigger for intersection") + msg_pub_intersection_order = UInt8() + + if self.current_step_intersection == self.StepOfIntersection.detect_direction.value: + rospy.loginfo("Current step : searching_intersection_sign") + rospy.loginfo("Go to next step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.detect_direction.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_intersection == self.StepOfIntersection.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_intersection_order.data = self.StepOfIntersection.exit.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_intersection.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(3) + self.pub_intersection_order.publish(msg_pub_intersection_order) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_intersection.value: + if is_start == True: + if self.launch_detect_intersection_launched == False: + self.launch_detect_intersection = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_intersection = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_intersection.launch"]) + self.launch_detect_intersection_launched = True + self.launch_detect_intersection.start() + else: + pass + else: + if self.launch_detect_intersection_launched == True: + self.launch_detect_intersection_launched = False + self.launch_detect_intersection.shutdown() + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_mode_decider new file mode 100755 index 0000000..7fd4511 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_mode_decider @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'stop') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following level_crossing') + + self.fnInitMode() + + # Invoke if traffic sign is detected + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + rospy.loginfo("invoke sign") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("lane_following") + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("traffic sign") + if msg_data.data == self.TrafficSign.stop.value: # Stop Sign detected + self.current_mode = self.CurrentMode.level_crossing.value + rospy.loginfo("level crossing") + + else: + pass + + self.fnPublishMode() + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller new file mode 100755 index 0000000..1716f7a --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_level_crossing_stamped = rospy.Subscriber('/detect/level_crossing_stamped', UInt8, self.cbLevelCrossingStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_level_crossing_order = rospy.Publisher('/detect/level_crossing_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following level_crossing') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfLevelCrossing = Enum('StepOfLevelCrossing', 'pass_level exit') + self.current_step_level_crossing = self.StepOfLevelCrossing.pass_level.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_level launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_level_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + # Which step is in Level Crossing + def cbLevelCrossingStamped(self, level_crossing_msg): + rospy.loginfo("LevelCrossing Step changed from %d", self.current_step_level_crossing) + + self.current_step_level_crossing = level_crossing_msg.data + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + self.current_mode = self.CurrentMode.level_crossing.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_level.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # level_crossing + elif self.current_mode == self.CurrentMode.level_crossing.value: + rospy.loginfo("New trigger for level_crossing") + msg_pub_level_crossing_order = UInt8() + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + rospy.loginfo("Current step : pass_level") + rospy.loginfo("Go to next step : exit") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.pass_level.value + + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_level.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_level_crossing == self.StepOfLevelCrossing.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.exit.value + + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + self.pub_level_crossing_order.publish(msg_pub_level_crossing_order) + rospy.sleep(2) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_level_crossing.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_level.value: + if is_start == True: + if self.launch_detect_level_launched == False: + self.launch_detect_level = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_level = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_level_crossing.launch"]) + self.launch_detect_level_launched = True + self.launch_detect_level.start() + else: + pass + else: + if self.launch_detect_level_launched == True: + self.launch_detect_level_launched = False + self.launch_detect_level.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller.org new file mode 100755 index 0000000..437ef25 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/level_crossing_core_node_controller.org @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_level_crossing_stamped = rospy.Subscriber('/detect/level_crossing_stamped', UInt8, self.cbLevelCrossingStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_level_crossing_order = rospy.Publisher('/detect/level_crossing_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following level_crossing') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfLevelCrossing = Enum('StepOfLevelCrossing', 'pass_level exit') + self.current_step_level_crossing = self.StepOfLevelCrossing.pass_level.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_level launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_level_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + # Which step is in Level Crossing + def cbLevelCrossingStamped(self, level_crossing_msg): + rospy.loginfo("LevelCrossing Step changed from %d", self.current_step_level_crossing) + + self.current_step_level_crossing = level_crossing_msg.data + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + self.current_mode = self.CurrentMode.level_crossing.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_level.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # level_crossing + elif self.current_mode == self.CurrentMode.level_crossing.value: + rospy.loginfo("New trigger for level_crossing") + msg_pub_level_crossing_order = UInt8() + + if self.current_step_level_crossing == self.StepOfLevelCrossing.pass_level.value: + rospy.loginfo("Current step : pass_level") + rospy.loginfo("Go to next step : exit") + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.pass_level.value + + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_level.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_level_crossing == self.StepOfLevelCrossing.exit.value: + rospy.loginfo("Current step : exit") + + msg_pub_level_crossing_order.data = self.StepOfLevelCrossing.exit.value + + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_level.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + self.pub_level_crossing_order.publish(msg_pub_level_crossing_order) + rospy.sleep(2) + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_level.value: + if is_start == True: + if self.launch_detect_level_launched == False: + self.launch_detect_level = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_level = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_level_crossing.launch"]) + self.launch_detect_level_launched = True + self.launch_detect_level.start() + else: + pass + else: + if self.launch_detect_level_launched == True: + self.launch_detect_level_launched = False + self.launch_detect_level.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_mode_decider new file mode 100755 index 0000000..cd0753b --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_mode_decider @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +from enum import Enum +from std_msgs.msg import UInt8 + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'parking') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following parking') + + self.fnInitMode() + + # Invoke if traffic sign is detected + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + rospy.loginfo("invoke sign") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("currentmode : lane following") + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("currentmode : any sign detected") + if msg_data.data == self.TrafficSign.parking.value: # Parking Sign detected + rospy.loginfo("currentmode : parking detected") + self.current_mode = self.CurrentMode.parking.value + else: + pass + + self.fnPublishMode() + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller new file mode 100755 index 0000000..797cf6d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_parking_stamped = rospy.Subscriber('/detect/parking_stamped', UInt8, self.cbParkingStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_parking_order = rospy.Publisher('/detect/parking_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following parking') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfParking = Enum('StepOfParking', 'parking exit') + self.current_step_parking = self.StepOfParking.parking.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_parking launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_parking_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbParkingStamped(self, parking_msg): + rospy.loginfo("Parking Step changed from %d", self.current_step_parking) + self.current_step_parking = parking_msg.data + + if self.current_step_parking == self.StepOfParking.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("Current step : searching sign") + rospy.loginfo("Go to next step : parking") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # parking + elif self.current_mode == self.CurrentMode.parking.value: + rospy.loginfo("New trigger for parking") + msg_pub_parking_order = UInt8() + + if self.current_step_parking == self.StepOfParking.parking.value: + rospy.loginfo("Current step : parking") + rospy.loginfo("Go to next step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.parking.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_parking == self.StepOfParking.exit.value: + rospy.loginfo("Current step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_parking_order.publish(msg_pub_parking_order) + + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_parking.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_parking.value: + if is_start == True: + if self.launch_detect_parking_launched == False: + self.launch_detect_parking = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_parking = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_parking.launch"]) + self.launch_detect_parking_launched = True + self.launch_detect_parking.start() + else: + pass + else: + if self.launch_detect_parking_launched == True: + self.launch_detect_parking_launched = False + self.launch_detect_parking.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller.org new file mode 100755 index 0000000..29639c4 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/parking_core_node_controller.org @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_parking_stamped = rospy.Subscriber('/detect/parking_stamped', UInt8, self.cbParkingStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_parking_order = rospy.Publisher('/detect/parking_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following parking') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfParking = Enum('StepOfParking', 'parking exit') + self.current_step_parking = self.StepOfParking.parking.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_control_lane launch_detect_parking launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_parking_launched = False + self.launch_control_lane_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + def cbParkingStamped(self, parking_msg): + rospy.loginfo("Parking Step changed from %d", self.current_step_parking) + self.current_step_parking = parking_msg.data + + if self.current_step_parking == self.StepOfParking.exit.value: + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("Current step : searching sign") + rospy.loginfo("Go to next step : parking") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + # parking + elif self.current_mode == self.CurrentMode.parking.value: + rospy.loginfo("New trigger for parking") + msg_pub_parking_order = UInt8() + + if self.current_step_parking == self.StepOfParking.parking.value: + rospy.loginfo("Current step : parking") + rospy.loginfo("Go to next step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.parking.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_parking.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_parking == self.StepOfParking.exit.value: + rospy.loginfo("Current step : finish parking") + + msg_pub_parking_order.data = self.StepOfParking.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_parking.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + self.pub_parking_order.publish(msg_pub_parking_order) + + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + elif launch_num == self.Launcher.launch_detect_parking.value: + if is_start == True: + if self.launch_detect_parking_launched == False: + self.launch_detect_parking = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_parking = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_parking.launch"]) + self.launch_detect_parking_launched = True + self.launch_detect_parking.start() + else: + pass + else: + if self.launch_detect_parking_launched == True: + self.launch_detect_parking_launched = False + self.launch_detect_parking.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_mode_decider new file mode 100755 index 0000000..34ed17d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_mode_decider @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +from enum import Enum +from std_msgs.msg import UInt8 +from turtlebot3_autorace_msgs.msg import MovingParam + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_light = rospy.Subscriber('/detect/traffic_light', UInt8, self.cbInvokedByTrafficLight, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_light') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light') + + self.fnInitMode() + + # Invoke if traffic light is detected + def cbInvokedByTrafficLight(self, traffic_light_type_msg): + rospy.loginfo("invoke light") + self.fnDecideMode(self.InvokedObject.traffic_light.value, traffic_light_type_msg) + rospy.loginfo("Traffic light detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + if invoked_object == self.InvokedObject.traffic_light.value: # Traffic Light detected + self.current_mode = self.CurrentMode.traffic_light.value + else: + pass + self.fnPublishMode() + + elif self.current_mode == self.CurrentMode.traffic_light.value: + if invoked_object == self.InvokedObject.traffic_light.value: + self.current_mode = self.CurrentMode.lane_following.value + else: + pass + self.fnPublishMode() + + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_node_controller new file mode 100755 index 0000000..f29f304 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/traffic_light_core_node_controller @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_traffic_light_order = rospy.Publisher('/detect/traffic_light_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + self.pub_timer_start = rospy.Publisher('/detect/timer/start', Float64, queue_size= 1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfTrafficLight = Enum('StepOfTrafficLight', 'searching_traffic_light in_traffic_light') + self.current_step_traffic_light = self.StepOfTrafficLight.searching_traffic_light.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_lane launch_detect_traffic_light launch_driving_lane') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_traffic_light_launched = False + self.launch_driving_lane_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + + self.fnLaunch(self.Launcher.launch_driving_lane.value, True) + + # traffic_light + elif self.current_mode == self.CurrentMode.traffic_light.value: + rospy.loginfo("New trigger for traffic_light") + msg_pub_traffic_light_order = UInt8() + + if self.current_step_traffic_light == self.StepOfTrafficLight.searching_traffic_light.value: + rospy.loginfo("Current step : searching_traffic_light") + rospy.loginfo("Go to next step : in_traffic_light detect lane traffic_light driving lane") + + msg_pub_traffic_light_order.data = self.StepOfTrafficLight.in_traffic_light.value + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, True) + + self.fnLaunch(self.Launcher.launch_driving_lane.value, True) + + + elif self.current_step_traffic_light == self.StepOfTrafficLight.in_traffic_light.value: + rospy.loginfo("Current step : in_traffic_light") + rospy.loginfo("Go to next step : pass_traffic_light") + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_traffic_light.value, False) + + self.fnLaunch(self.Launcher.launch_driving_lane.value, True) + + self.current_mode = self.CurrentMode.lane_following.value + msg_mode_return = UInt8() + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + rospy.sleep(2) + + self.pub_traffic_light_order.publish(msg_pub_traffic_light_order) + + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_traffic_light.value: + if is_start == True: + if self.launch_detect_traffic_light_launched == False: + self.launch_detect_traffic_light = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_traffic_light = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_traffic_light.launch"]) + self.launch_detect_traffic_light_launched = True + self.launch_detect_traffic_light.start() + else: + pass + else: + if self.launch_detect_traffic_light_launched == True: + self.launch_detect_traffic_light_launched = False + self.launch_detect_traffic_light.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_driving_lane.value: + if is_start == True: + if self.launch_driving_lane_launched == False: + self.launch_driving_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_driving_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_driving_lane_launched = True + self.launch_driving_lane.start() + else: + pass + else: + if self.launch_driving_lane_launched == True: + self.launch_driving_lane_launched = False + self.launch_driving_lane.shutdown() + else: + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_mode_decider b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_mode_decider new file mode 100755 index 0000000..812c4ae --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_mode_decider @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +from enum import Enum +from std_msgs.msg import UInt8 + +class CoreModeDecider(): + def __init__(self): + # subscribes : invoking object detected + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_returned_mode = rospy.Subscriber('/core/returned_mode', UInt8, self.cbReturnedMode, queue_size=1) + + # publishes : decided mode + self.pub_decided_mode = rospy.Publisher('/core/decided_mode', UInt8, queue_size=1) + + self.InvokedObject = Enum('InvokedObject', 'traffic_sign') + self.TrafficSign = Enum('TrafficSign', 'tunnel') + self.CurrentMode = Enum('CurrentMode', 'idle lane_following tunnel') + + self.fnInitMode() + + # Invoke if traffic sign is detected + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + rospy.loginfo("invoke sign") + self.fnDecideMode(self.InvokedObject.traffic_sign.value, traffic_sign_type_msg) + rospy.loginfo("Traffic sign detected") + + def cbReturnedMode(self, mode): + rospy.loginfo("Init Mode") + self.fnInitMode() + + def fnInitMode(self): # starts only when the program is started initially or any mission is completed + self.current_mode = self.CurrentMode.lane_following.value + self.fnPublishMode() + + def fnDecideMode(self, invoked_object, msg_data): # starts only when the traffic sign / traffic light is detected & current_mode is lane_following + rospy.loginfo("decide mode") + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("current mode : lane following") + if invoked_object == self.InvokedObject.traffic_sign.value: # Any Sign detected + rospy.loginfo("invoked object : traffic sign") + if msg_data.data == self.TrafficSign.tunnel.value: # Tunnel Sign detected + rospy.loginfo("traffic sign : tunnel sign") + self.current_mode = self.CurrentMode.tunnel.value + else: + pass + self.fnPublishMode() + else: + pass + + def fnPublishMode(self): + decided_mode = UInt8() + decided_mode.data = self.current_mode + self.pub_decided_mode.publish(decided_mode) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_mode_decider') + node = CoreModeDecider() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller new file mode 100755 index 0000000..2b8dd1a --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_tunnel_stamped = rospy.Subscriber('/detect/tunnel_stamped', UInt8, self.cbTunnelStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_tunnel_order = rospy.Publisher('/detect/tunnel_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following tunnel') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfTunnel = Enum('StepOfTunnel', 'searching_tunnel_sign go_in_to_tunnel navigation go_out_from_tunnel exit') + self.current_step_tunnel = self.StepOfTunnel.searching_tunnel_sign.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_detect_tunnel launch_control_lane launch_control_tunnel launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_tunnel_launched = False + self.launch_control_lane_launched = False + self.launch_control_tunnel_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + + # Which step is in Tunnel + def cbTunnelStamped(self, tunnel_msg): + rospy.loginfo("Tunnel Step changed from %d", self.current_step_tunnel) + + self.current_step_tunnel = tunnel_msg.data + + rospy.loginfo("into %d", self.current_step_tunnel) + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + self.current_mode = self.CurrentMode.tunnel.value + msg_mode_return = UInt8 + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + + # tunnel + elif self.current_mode == self.CurrentMode.tunnel.value: + rospy.loginfo("New trigger for tunnel") + msg_pub_tunnel_order = UInt8() + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + rospy.loginfo("Current step : searching_tunnel_sign") + rospy.loginfo("Go to next step : go_in_to_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_in_to_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_in_to_tunnel.value: + rospy.loginfo("Current step : go_in_to_tunnel") + rospy.loginfo("Go to next step : navigation") + + msg_pub_tunnel_order.data = self.StepOfTunnel.navigation.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_tunnel == self.StepOfTunnel.navigation.value: + rospy.loginfo("Current step : navigation") + rospy.loginfo("Go to next step : go_out_from_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_out_from_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_out_from_tunnel.value: + rospy.loginfo("Current step : go_out_from_tunnel") + rospy.loginfo("Go to next step : exit") + + msg_pub_tunnel_order.data = self.StepOfTunnel.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_tunnel == self.StepOfTunnel.exit.value: + rospy.loginfo("Current step : exit") + rospy.loginfo("Go to next step : searching_tunnel_sign") + + msg_pub_tunnel_order.data = self.StepOfTunnel.searching_tunneROSLaunchParentvalue + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + + self.pub_tunnel_order.publish(msg_pub_tunnel_order) + + self.is_triggered = False + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign_tunnel.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_tunnel.value: + if is_start == True: + if self.launch_detect_tunnel_launched == False: + self.launch_detect_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_tunnel.launch"]) + self.launch_detect_tunnel_launched = True + self.launch_detect_tunnel.start() + else: + pass + else: + if self.launch_detect_tunnel_launched == True: + self.launch_detect_tunnel_launched = False + self.launch_detect_tunnel.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_tunnel.value: + if is_start == True: + if self.launch_control_tunnel_launched == False: + self.launch_control_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_control_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_tunnel.launch"]) + self.launch_control_tunnel_launched = True + self.launch_control_tunnel.start() + else: + pass + else: + if self.launch_control_tunnel_launched == True: + self.launch_control_tunnel_launched = False + self.launch_control_tunnel.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller.org new file mode 100755 index 0000000..19ab96e --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_core/nodes/tunnel_core_node_controller.org @@ -0,0 +1,316 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy, roslaunch +import subprocess +import os +import sys +from enum import Enum +from std_msgs.msg import UInt8, Float64 + +class CoreNodeController(): + def __init__(self): + self.ros_package_path = os.path.dirname(os.path.realpath(__file__)) + self.ros_package_path = self.ros_package_path.replace('turtlebot3_autorace_core/nodes', '') + + # subscribes : status returned + self.sub_tunnel_stamped = rospy.Subscriber('/detect/tunnel_stamped', UInt8, self.cbTunnelStamped, queue_size=1) + self.sub_mode_control = rospy.Subscriber('/core/decided_mode', UInt8, self.cbReceiveMode, queue_size=1) + + # publishes orders + self.pub_tunnel_order = rospy.Publisher('/detect/tunnel_order', UInt8, queue_size=1) + self.pub_mode_return = rospy.Publisher('/core/returned_mode', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following tunnel') + self.current_mode = self.CurrentMode.idle.value + + self.StepOfTunnel = Enum('StepOfTunnel', 'searching_tunnel_sign go_in_to_tunnel navigation go_out_from_tunnel exit') + self.current_step_tunnel = self.StepOfTunnel.searching_tunnel_sign.value + + self.Launcher = Enum('Launcher', 'launch_camera_ex_calib launch_detect_sign launch_detect_lane launch_detect_tunnel launch_control_lane launch_control_tunnel launch_control_moving') + self.uuid = roslaunch.rlutil.get_or_generate_uuid(None, False) + + self.launch_camera_launched = False + self.launch_detect_sign_launched = False + self.launch_detect_lane_launched = False + self.launch_detect_tunnel_launched = False + self.launch_control_lane_launched = False + self.launch_control_tunnel_launched = False + self.launch_control_moving_launched = False + + self.is_triggered = False + + loop_rate = rospy.Rate(10) # 10hz + while not rospy.is_shutdown(): + if self.is_triggered == True: + self.fnControlNode() + + loop_rate.sleep() + + def cbReceiveMode(self, mode_msg): + rospy.loginfo("starts the progress with %d", mode_msg.data) + + self.current_mode = mode_msg.data + self.is_triggered = True + + + # Which step is in Tunnel + def cbTunnelStamped(self, tunnel_msg): + rospy.loginfo("Tunnel Step changed from %d", self.current_step_tunnel) + + self.current_step_tunnel = tunnel_msg.data + + rospy.loginfo("into %d", self.current_step_tunnel) + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + self.current_mode = self.CurrentMode.tunnel.value + msg_mode_return = UInt8 + msg_mode_return.data = self.current_mode + self.pub_mode_return.publish(msg_mode_return) + + self.is_triggered = True + + def fnControlNode(self): + # lane_following + if self.current_mode == self.CurrentMode.lane_following.value: + rospy.loginfo("New trigger for lane_following") + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + + # tunnel + elif self.current_mode == self.CurrentMode.tunnel.value: + rospy.loginfo("New trigger for tunnel") + msg_pub_tunnel_order = UInt8() + + if self.current_step_tunnel == self.StepOfTunnel.searching_tunnel_sign.value: + rospy.loginfo("Current step : searching_tunnel_sign") + rospy.loginfo("Go to next step : go_in_to_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_in_to_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_in_to_tunnel.value: + rospy.loginfo("Current step : go_in_to_tunnel") + rospy.loginfo("Go to next step : navigation") + + msg_pub_tunnel_order.data = self.StepOfTunnel.navigation.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, False) + + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, True) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_tunnel == self.StepOfTunnel.navigation.value: + rospy.loginfo("Current step : navigation") + rospy.loginfo("Go to next step : go_out_from_tunnel") + + msg_pub_tunnel_order.data = self.StepOfTunnel.go_out_from_tunnel.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_sign.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, False) + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, False) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, True) + + elif self.current_step_tunnel == self.StepOfTunnel.go_out_from_tunnel.value: + rospy.loginfo("Current step : go_out_from_tunnel") + rospy.loginfo("Go to next step : exit") + + msg_pub_tunnel_order.data = self.StepOfTunnel.exit.value + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + elif self.current_step_tunnel == self.StepOfTunnel.exit.value: + rospy.loginfo("Current step : exit") + rospy.loginfo("Go to next step : searching_tunnel_sign") + + msg_pub_tunnel_order.data = self.StepOfTunnel.searching_tunneROSLaunchParentvalue + + self.fnLaunch(self.Launcher.launch_camera_ex_calib.value, True) + + self.fnLaunch(self.Launcher.launch_detect_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_detect_lane.value, True) + self.fnLaunch(self.Launcher.launch_detect_sign.value, True) + + self.fnLaunch(self.Launcher.launch_control_lane.value, True) + self.fnLaunch(self.Launcher.launch_control_tunnel.value, False) + self.fnLaunch(self.Launcher.launch_control_moving.value, False) + + rospy.sleep(2) + + self.pub_tunnel_order.publish(msg_pub_tunnel_order) + + self.is_triggered = False + + def fnLaunch(self, launch_num, is_start): + if launch_num == self.Launcher.launch_camera_ex_calib.value: + if is_start == True: + if self.launch_camera_launched == False: + self.launch_camera = roslaunch.scriptapi.ROSLaunch() + self.launch_camera = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_camera/launch/extrinsic_camera_calibration.launch"]) + self.launch_camera_launched = True + self.launch_camera.start() + else: + pass + else: + if self.launch_camera_launched == True: + self.launch_camera_launched = False + self.launch_camera.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_sign.value: + if is_start == True: + if self.launch_detect_sign_launched == False: + self.launch_detect_sign = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_sign = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_sign.launch"]) + self.launch_detect_sign_launched = True + self.launch_detect_sign.start() + else: + pass + else: + if self.launch_detect_sign_launched == True: + self.launch_detect_sign_launched = False + self.launch_detect_sign.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_lane.value: + if is_start == True: + if self.launch_detect_lane_launched == False: + self.launch_detect_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_lane.launch"]) + self.launch_detect_lane_launched = True + self.launch_detect_lane.start() + else: + pass + else: + if self.launch_detect_lane_launched == True: + self.launch_detect_lane_launched = False + self.launch_detect_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_detect_tunnel.value: + if is_start == True: + if self.launch_detect_tunnel_launched == False: + self.launch_detect_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_detect_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_detect/launch/detect_tunnel.launch"]) + self.launch_detect_tunnel_launched = True + self.launch_detect_tunnel.start() + else: + pass + else: + if self.launch_detect_tunnel_launched == True: + self.launch_detect_tunnel_launched = False + self.launch_detect_tunnel.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_lane.value: + if is_start == True: + if self.launch_control_lane_launched == False: + self.launch_control_lane = roslaunch.scriptapi.ROSLaunch() + self.launch_control_lane = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_lane.launch"]) + self.launch_control_lane_launched = True + self.launch_control_lane.start() + else: + pass + else: + if self.launch_control_lane_launched == True: + self.launch_control_lane_launched = False + self.launch_control_lane.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_tunnel.value: + if is_start == True: + if self.launch_control_tunnel_launched == False: + self.launch_control_tunnel = roslaunch.scriptapi.ROSLaunch() + self.launch_control_tunnel = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_tunnel.launch"]) + self.launch_control_tunnel_launched = True + self.launch_control_tunnel.start() + else: + pass + else: + if self.launch_control_tunnel_launched == True: + self.launch_control_tunnel_launched = False + self.launch_control_tunnel.shutdown() + else: + pass + + elif launch_num == self.Launcher.launch_control_moving.value: + if is_start == True: + if self.launch_control_moving_launched == False: + self.launch_control_moving = roslaunch.scriptapi.ROSLaunch() + self.launch_control_moving = roslaunch.parent.ROSLaunchParent(self.uuid, [self.ros_package_path + "turtlebot3_autorace_driving/launch/turtlebot3_autorace_control_moving.launch"]) + self.launch_control_moving_launched = True + self.launch_control_moving.start() + else: + pass + else: + if self.launch_control_moving_launched == True: + self.launch_control_moving_launched = False + self.launch_control_moving.shutdown() + pass + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('core_node_controller') + node = CoreNodeController() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_construction.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_construction.launch new file mode 100644 index 0000000..f276572 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_construction.launch @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_intersection.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_intersection.launch new file mode 100644 index 0000000..81077e7 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_intersection.launch @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_lane.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_lane.launch new file mode 100644 index 0000000..6c29a79 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_lane.launch @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_level_crossing.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_level_crossing.launch new file mode 100644 index 0000000..cd3cd39 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_level_crossing.launch @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_parking.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_parking.launch new file mode 100644 index 0000000..cae115d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_parking.launch @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign.launch new file mode 100644 index 0000000..f259265 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign.launch @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_construction.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_construction.launch new file mode 100644 index 0000000..ed89977 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_construction.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_intersection.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_intersection.launch new file mode 100644 index 0000000..f259265 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_intersection.launch @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_level_crossing.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_level_crossing.launch new file mode 100644 index 0000000..409dad7 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_level_crossing.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_parking.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_parking.launch new file mode 100644 index 0000000..e5b0be4 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_parking.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_tunnel.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_tunnel.launch new file mode 100644 index 0000000..b712674 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_sign_tunnel.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_traffic_light.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_traffic_light.launch new file mode 100644 index 0000000..13caae5 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_traffic_light.launch @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_tunnel.launch b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_tunnel.launch new file mode 100644 index 0000000..2412284 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/launch/detect_tunnel.launch @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction new file mode 100755 index 0000000..40eaffc --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction @@ -0,0 +1,219 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import os +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import LaserScan +from turtlebot3_autorace_msgs.msg import MovingParam + +class DetectContruction(): + def __init__(self): + # subscribes state + self.sub_scan_obstacle = rospy.Subscriber('/detect/scan', LaserScan, self.cbScanObstacle, queue_size=1) + self.sub_construction_order = rospy.Subscriber('/detect/construction_order', UInt8, self.cbConstructionOrder, queue_size=1) + self.sub_moving_completed = rospy.Subscriber('/control/moving/complete', UInt8, self.cbMovingComplete, queue_size = 1) + + # publishes state + self.pub_construction_return = rospy.Publisher('/detect/construction_stamped', UInt8, queue_size=1) + self.pub_moving = rospy.Publisher('/control/moving/state', MovingParam, queue_size= 1) + self.pub_max_vel = rospy.Publisher('/control/max_vel', Float64, queue_size = 1) + + self.StepOfConstruction = Enum('StepOfConstruction', 'find_obstacle avoid_obstacle exit') + self.is_obstacle_detected = False + self.is_moving_complete = False + + def cbMovingComplete(self, data): + self.is_moving_complete = True + + def cbConstructionOrder(self, order): + msg_pub_construction_return = UInt8() + + if order.data == self.StepOfConstruction.find_obstacle.value: + rospy.loginfo("find obstacle") + + while True: + if self.is_obstacle_detected == True: + rospy.loginfo("Encounter obstacle") + break + else: + pass + + msg_pub_max_vel = Float64() + msg_pub_max_vel.data = 0.0 + self.pub_max_vel.publish(msg_pub_max_vel) + + rospy.sleep(1) + + msg_pub_construction_return.data = self.StepOfConstruction.avoid_obstacle.value + + elif order.data == self.StepOfConstruction.avoid_obstacle.value: + rospy.loginfo("avoid obstacle") + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.25 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go right") + msg_moving.moving_type=3 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.5 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go right") + msg_moving.moving_type=3 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.4 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.4 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving.moving_type=2 + msg_moving.moving_value_angular=80 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.4 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.loginfo("construction finished") + msg_pub_construction_return.data = self.StepOfConstruction.exit.value + + elif order.data == self.StepOfConstruction.exit.value: + rospy.loginfo("construction finished") + msg_pub_construction_return.data = self.StepOfConstruction.exit.value + + self.pub_construction_return.publish(msg_pub_construction_return) + rospy.sleep(3) + + def cbScanObstacle(self, scan): + angle_scan = 25 + scan_start = 0 - angle_scan + scan_end = 0 + angle_scan + threshold_distance = 0.2 + is_obstacle_detected = False + + for i in range(scan_start, scan_end): + if scan.ranges[i] < threshold_distance and scan.ranges[i] > 0.01: + is_obstacle_detected = True + + self.is_obstacle_detected = is_obstacle_detected + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_contruction') + node = DetectContruction() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction_sign b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction_sign new file mode 100755 index 0000000..3539d4b --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_construction_sign @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import os +import cv2 +from enum import Enum +from std_msgs.msg import UInt8 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +class DetectSign(): + def __init__(self): + self.fnPreproc() + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + #subscribes + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindTrafficSign, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindTrafficSign, queue_size = 1) + + # publishes + self.pub_traffic_sign = rospy.Publisher('/detect/traffic_sign', UInt8, queue_size=1) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + self.cvBridge = CvBridge() + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.counter = 1 + + def fnPreproc(self): + # Initiate SIFT detector + self.sift = cv2.SIFT_create() + + dir_path = os.path.dirname(os.path.realpath(__file__)) + dir_path = dir_path.replace('turtlebot3_autorace_detect/nodes', 'turtlebot3_autorace_detect/') + dir_path += 'image/' + + self.img_construction = cv2.imread(dir_path + 'construction.png',0) + self.kp_construction, self.des_construction = self.sift.detectAndCompute(self.img_construction, None) + + FLANN_INDEX_KDTREE = 0 + index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) + search_params = dict(checks = 50) + + self.flann = cv2.FlannBasedMatcher(index_params, search_params) + + def fnCalcMSE(self, arr1, arr2): + squared_diff = (arr1 - arr2) ** 2 + sum = np.sum(squared_diff) + num_all = arr1.shape[0] * arr1.shape[1] #cv_image_input and 2 should have same shape + err = sum / num_all + return err + + def cbFindTrafficSign(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image_input = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image_input = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + MIN_MATCH_COUNT = 8 #9 + MIN_MSE_DECISION = 50000 + + # find the keypoints and descriptors with SIFT + kp1, des1 = self.sift.detectAndCompute(cv_image_input,None) + + matches_construction = self.flann.knnMatch(des1,self.des_construction,k=2) + + image_out_num = 1 + + good_construction = [] + for m,n in matches_construction: + if m.distance < 0.7*n.distance: + good_construction.append(m) + if len(good_construction)>MIN_MATCH_COUNT: + src_pts = np.float32([kp1[m.queryIdx].pt for m in good_construction ]).reshape(-1,1,2) + dst_pts = np.float32([self.kp_construction[m.trainIdx].pt for m in good_construction]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matches_construction = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.construction.value + + self.pub_traffic_sign.publish(msg_sign) + + rospy.loginfo("construction") + image_out_num = 2 + else: + matches_construction = None + rospy.loginfo("not found") + + if image_out_num == 1: + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(cv_image_input, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(cv_image_input, "bgr8")) + + elif image_out_num == 2: + draw_params_construction = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matches_construction, # draw only inliers + flags = 2) + + final_construction = cv2.drawMatches(cv_image_input,kp1,self.img_construction,self.kp_construction,good_construction,None,**draw_params_construction) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_construction, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_construction, "bgr8")) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_sign') + node = DetectSign() + node.main() + + + + + + + + + + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection new file mode 100755 index 0000000..8ea7868 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection @@ -0,0 +1,171 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import os +from enum import Enum +from std_msgs.msg import UInt8 +from turtlebot3_autorace_msgs.msg import MovingParam + +class DetectIntersection(): + def __init__(self): + + # subscribes state + self.sub_traffic_sign = rospy.Subscriber('/detect/traffic_sign', UInt8, self.cbInvokedByTrafficSign, queue_size=1) + self.sub_intersection_order = rospy.Subscriber('/detect/intersection_order', UInt8, self.cbIntersectionOrder, queue_size=2) + self.sub_moving_completed = rospy.Subscriber('/control/moving/complete', UInt8, self.cbMovingComplete, queue_size = 1) + + # publisher state + self.pub_intersection_return = rospy.Publisher('/detect/intersection_stamped', UInt8, queue_size=1) + self.pub_moving = rospy.Publisher('/control/moving/state', MovingParam, queue_size= 1) + + self.StepOfIntersection = Enum('StepOfIntersection', 'detect_direction exit') + self.TrafficSign = Enum('TrafficSign', 'intersection left right') + + self.is_moving_complete = False + self.is_intersection_detected = False + self.is_left_detected = False + self.is_right_detected = False + + def cbInvokedByTrafficSign(self, traffic_sign_type_msg): + if self.is_intersection_detected == True: + if traffic_sign_type_msg.data == self.TrafficSign.left.value: + self.is_left_detected = True + self.is_right_detected = False + + elif traffic_sign_type_msg.data == self.TrafficSign.right.value: + self.is_right_detected = True + self.is_left_detected = False + self.is_intersection_detected == False + + def cbMovingComplete(self, data): + self.is_moving_complete = True + + def cbIntersectionOrder(self, order): + msg_pub_intersection_return = UInt8() + + if order.data == self.StepOfIntersection.detect_direction.value: + + self.is_intersection_detected = True + while True: + if self.is_left_detected == True or self.is_right_detected == True: + break + + if self.is_right_detected == True: + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0 + msg_moving.moving_value_linear=0.15 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go right") + msg_moving = MovingParam() + msg_moving.moving_type=3 + msg_moving.moving_value_angular=45 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0 + msg_moving.moving_value_linear=0.1 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + elif self.is_left_detected == True: + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0 + msg_moving.moving_value_linear=0.15 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=85 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0 + msg_moving.moving_value_linear=0.2 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("moving finished") + msg_pub_intersection_return.data = self.StepOfIntersection.exit.value + + elif order.data == self.StepOfIntersection.exit.value: + rospy.loginfo("Now finished") + rospy.sleep(2) + msg_pub_intersection_return.data = self.StepOfIntersection.exit.value + + self.pub_intersection_return.publish(msg_pub_intersection_return) + rospy.sleep(3) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_intersection') + node = DetectIntersection() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection_sign b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection_sign new file mode 100755 index 0000000..73eb019 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_intersection_sign @@ -0,0 +1,266 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import os +import cv2 +from enum import Enum +from std_msgs.msg import UInt8 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +class DetectSign(): + def __init__(self): + self.fnPreproc() + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + #subscribes + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindTrafficSign, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindTrafficSign, queue_size = 1) + + #publishes + self.pub_traffic_sign = rospy.Publisher('/detect/traffic_sign', UInt8, queue_size=1) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + self.cvBridge = CvBridge() + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.counter = 1 + + def fnPreproc(self): + # Initiate SIFT detector + self.sift = cv2.SIFT_create() + + dir_path = os.path.dirname(os.path.realpath(__file__)) + dir_path = dir_path.replace('turtlebot3_autorace_detect/nodes', 'turtlebot3_autorace_detect/') + dir_path += 'image/' + + self.img_intersection = cv2.imread(dir_path + 'intersection.png',0) + self.img_left = cv2.imread(dir_path + 'left.png',0) + self.img_right = cv2.imread(dir_path + 'right.png',0) + + self.kp_intersection, self.des_intersection = self.sift.detectAndCompute(self.img_intersection, None) + self.kp_left, self.des_left = self.sift.detectAndCompute(self.img_left, None) + self.kp_right, self.des_right = self.sift.detectAndCompute(self.img_right, None) + + FLANN_INDEX_KDTREE = 0 + index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) + search_params = dict(checks = 50) + + self.flann = cv2.FlannBasedMatcher(index_params, search_params) + + def fnCalcMSE(self, arr1, arr2): + squared_diff = (arr1 - arr2) ** 2 + sum = np.sum(squared_diff) + num_all = arr1.shape[0] * arr1.shape[1] #cv_image_input and 2 should have same shape + err = sum / num_all + return err + + def cbFindTrafficSign(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image_input = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image_input = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + MIN_MATCH_COUNT = 9 #9 + MIN_MSE_DECISION = 50000 + + # find the keypoints and descriptors with SIFT + kp1, des1 = self.sift.detectAndCompute(cv_image_input,None) + + matches_intersection = self.flann.knnMatch(des1,self.des_intersection,k=2) + matches_left = self.flann.knnMatch(des1,self.des_left,k=2) + matches_right = self.flann.knnMatch(des1,self.des_right,k=2) + + image_out_num = 1 + + good_intersection = [] + for m,n in matches_intersection: + if m.distance < 0.7*n.distance: + good_intersection.append(m) + if len(good_intersection)>MIN_MATCH_COUNT: + src_pts = np.float32([kp1[m.queryIdx].pt for m in good_intersection ]).reshape(-1,1,2) + dst_pts = np.float32([self.kp_intersection[m.trainIdx].pt for m in good_intersection]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matches_intersection = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.intersection.value + + self.pub_traffic_sign.publish(msg_sign) + rospy.loginfo("detect intersection sign") + image_out_num = 2 + + good_left = [] + for m,n in matches_left: + if m.distance < 0.7*n.distance: + good_left.append(m) + if len(good_left)>MIN_MATCH_COUNT: + src_pts = np.float32([kp1[m.queryIdx].pt for m in good_left ]).reshape(-1,1,2) + dst_pts = np.float32([self.kp_left[m.trainIdx].pt for m in good_left]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matches_left = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.left.value + + self.pub_traffic_sign.publish(msg_sign) + rospy.loginfo("detect left sign") + image_out_num = 3 + + else: + matches_left = None + # rospy.loginfo("nothing detected") + + good_right = [] + for m,n in matches_right: + if m.distance < 0.7*n.distance: + good_right.append(m) + if len(good_right)>MIN_MATCH_COUNT: + src_pts = np.float32([kp1[m.queryIdx].pt for m in good_right ]).reshape(-1,1,2) + dst_pts = np.float32([self.kp_right[m.trainIdx].pt for m in good_right]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matches_right = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.right.value + + self.pub_traffic_sign.publish(msg_sign) + rospy.loginfo("detect right sign") + image_out_num = 4 + + else: + matches_right = None + # rospy.loginfo("nothing detected") + + if image_out_num == 1: + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(cv_image_input, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(cv_image_input, "bgr8")) + + elif image_out_num == 2: + draw_params_intersection = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matches_intersection, # draw only inliers + flags = 2) + + final_intersection = cv2.drawMatches(cv_image_input,kp1,self.img_intersection,self.kp_intersection,good_intersection,None,**draw_params_intersection) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_intersection, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_intersection, "bgr8")) + + elif image_out_num == 3: + draw_params_left = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matches_left, # draw only inliers + flags = 2) + + final_left = cv2.drawMatches(cv_image_input,kp1,self.img_left,self.kp_left,good_left,None,**draw_params_left) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_left, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_left, "bgr8")) + + elif image_out_num == 4: + draw_params_right = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matches_right, # draw only inliers + flags = 2) + + fianl_right = cv2.drawMatches(cv_image_input,kp1,self.img_right,self.kp_right,good_right,None,**draw_params_right) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(fianl_right, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(fianl_right, "bgr8")) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_sign') + node = DetectSign() + node.main() + + + + + + + + + + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_lane b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_lane new file mode 100755 index 0000000..69f1950 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_lane @@ -0,0 +1,491 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, Gilbert, Ashe Kim, Special Thanks : Roger Sacchelli + +import rospy +import numpy as np +import cv2 +from cv_bridge import CvBridge +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import Image, CompressedImage +from dynamic_reconfigure.server import Server +from turtlebot3_autorace_detect.cfg import DetectLaneParamsConfig + +class DetectLane(): + def __init__(self): + self.hue_white_l = rospy.get_param("~detect/lane/white/hue_l", 0) + self.hue_white_h = rospy.get_param("~detect/lane/white/hue_h", 179) + self.saturation_white_l = rospy.get_param("~detect/lane/white/saturation_l", 0) + self.saturation_white_h = rospy.get_param("~detect/lane/white/saturation_h", 70) + self.lightness_white_l = rospy.get_param("~detect/lane/white/lightness_l", 105) + self.lightness_white_h = rospy.get_param("~detect/lane/white/lightness_h", 255) + + self.hue_yellow_l = rospy.get_param("~detect/lane/yellow/hue_l", 10) + self.hue_yellow_h = rospy.get_param("~detect/lane/yellow/hue_h", 127) + self.saturation_yellow_l = rospy.get_param("~detect/lane/yellow/saturation_l", 70) + self.saturation_yellow_h = rospy.get_param("~detect/lane/yellow/saturation_h", 255) + self.lightness_yellow_l = rospy.get_param("~detect/lane/yellow/lightness_l", 95) + self.lightness_yellow_h = rospy.get_param("~detect/lane/yellow/lightness_h", 255) + + self.is_calibration_mode = rospy.get_param("~is_detection_calibration_mode", False) + if self.is_calibration_mode == True: + srv_detect_lane = Server(DetectLaneParamsConfig, self.cbGetDetectLaneParam) + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindLane, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindLane, queue_size = 1) + + if self.pub_image_type == "compressed": + # publishes lane image in compressed type + self.pub_image_lane = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes lane image in raw type + self.pub_image_lane = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes lane image in compressed type + self.pub_image_white_lane = rospy.Publisher('/detect/image_output_sub1/compressed', CompressedImage, queue_size = 1) + self.pub_image_yellow_lane = rospy.Publisher('/detect/image_output_sub2/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes lane image in raw type + self.pub_image_white_lane = rospy.Publisher('/detect/image_output_sub1', Image, queue_size = 1) + self.pub_image_yellow_lane = rospy.Publisher('/detect/image_output_sub2', Image, queue_size = 1) + + self.pub_lane = rospy.Publisher('/detect/lane', Float64, queue_size = 1) + + # subscribes state : yellow line reliability + self.pub_yellow_line_reliability = rospy.Publisher('/detect/yellow_line_reliability', UInt8, queue_size=1) + + # subscribes state : white line reliability + self.pub_white_line_reliability = rospy.Publisher('/detect/white_line_reliability', UInt8, queue_size=1) + + self.cvBridge = CvBridge() + + self.counter = 1 + + self.window_width = 1000. + self.window_height = 600. + + self.reliability_white_line = 100 + self.reliability_yellow_line = 100 + + def cbGetDetectLaneParam(self, config, level): + rospy.loginfo("[Detect Lane] Detect Lane Calibration Parameter reconfigured to") + rospy.loginfo("hue_white_l : %d", config.hue_white_l) + rospy.loginfo("hue_white_h : %d", config.hue_white_h) + rospy.loginfo("saturation_white_l : %d", config.saturation_white_l) + rospy.loginfo("saturation_white_h : %d", config.saturation_white_h) + rospy.loginfo("lightness_white_l : %d", config.lightness_white_l) + rospy.loginfo("lightness_white_h : %d", config.lightness_white_h) + rospy.loginfo("hue_yellow_l : %d", config.hue_yellow_l) + rospy.loginfo("hue_yellow_h : %d", config.hue_yellow_h) + rospy.loginfo("saturation_yellow_l : %d", config.saturation_yellow_l) + rospy.loginfo("saturation_yellow_h : %d", config.saturation_yellow_h) + rospy.loginfo("lightness_yellow_l : %d", config.lightness_yellow_l) + rospy.loginfo("lightness_yellow_h : %d", config.lightness_yellow_h) + + self.hue_white_l = config.hue_white_l + self.hue_white_h = config.hue_white_h + self.saturation_white_l = config.saturation_white_l + self.saturation_white_h = config.saturation_white_h + self.lightness_white_l = config.lightness_white_l + self.lightness_white_h = config.lightness_white_h + + self.hue_yellow_l = config.hue_yellow_l + self.hue_yellow_h = config.hue_yellow_h + self.saturation_yellow_l = config.saturation_yellow_l + self.saturation_yellow_h = config.saturation_yellow_h + self.lightness_yellow_l = config.lightness_yellow_l + self.lightness_yellow_h = config.lightness_yellow_h + + return config + + def cbFindLane(self, image_msg): + # Change the frame rate by yourself. Now, it is set to 1/3 (10fps). + # Unappropriate value of frame rate may cause huge delay on entire recognition process. + # This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + # find White and Yellow Lanes + white_fraction, cv_white_lane = self.maskWhiteLane(cv_image) + yellow_fraction, cv_yellow_lane = self.maskYellowLane(cv_image) + + try: + if yellow_fraction > 3000: + self.left_fitx, self.left_fit = self.fit_from_lines(self.left_fit, cv_yellow_lane) + self.mov_avg_left = np.append(self.mov_avg_left,np.array([self.left_fit]), axis=0) + + if white_fraction > 3000: + self.right_fitx, self.right_fit = self.fit_from_lines(self.right_fit, cv_white_lane) + self.mov_avg_right = np.append(self.mov_avg_right,np.array([self.right_fit]), axis=0) + except: + if yellow_fraction > 3000: + self.left_fitx, self.left_fit = self.sliding_windown(cv_yellow_lane, 'left') + self.mov_avg_left = np.array([self.left_fit]) + + if white_fraction > 3000: + self.right_fitx, self.right_fit = self.sliding_windown(cv_white_lane, 'right') + self.mov_avg_right = np.array([self.right_fit]) + + MOV_AVG_LENGTH = 5 + + self.left_fit = np.array([np.mean(self.mov_avg_left[::-1][:, 0][0:MOV_AVG_LENGTH]), + np.mean(self.mov_avg_left[::-1][:, 1][0:MOV_AVG_LENGTH]), + np.mean(self.mov_avg_left[::-1][:, 2][0:MOV_AVG_LENGTH])]) + self.right_fit = np.array([np.mean(self.mov_avg_right[::-1][:, 0][0:MOV_AVG_LENGTH]), + np.mean(self.mov_avg_right[::-1][:, 1][0:MOV_AVG_LENGTH]), + np.mean(self.mov_avg_right[::-1][:, 2][0:MOV_AVG_LENGTH])]) + + if self.mov_avg_left.shape[0] > 1000: + self.mov_avg_left = self.mov_avg_left[0:MOV_AVG_LENGTH] + + if self.mov_avg_right.shape[0] > 1000: + self.mov_avg_right = self.mov_avg_right[0:MOV_AVG_LENGTH] + + self.make_lane(cv_image, white_fraction, yellow_fraction) + + def maskWhiteLane(self, image): + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_white_l + Hue_h = self.hue_white_h + Saturation_l = self.saturation_white_l + Saturation_h = self.saturation_white_h + Lightness_l = self.lightness_white_l + Lightness_h = self.lightness_white_h + + # define range of white color in HSV + lower_white = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_white = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only white colors + mask = cv2.inRange(hsv, lower_white, upper_white) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + fraction_num = np.count_nonzero(mask) + + if self.is_calibration_mode == False: + if fraction_num > 35000: + if self.lightness_white_l < 250: + self.lightness_white_l += 5 + elif fraction_num < 5000: + if self.lightness_white_l > 50: + self.lightness_white_l -= 5 + + how_much_short = 0 + + for i in range(0, 600): + if np.count_nonzero(mask[i,::]) > 0: + how_much_short += 1 + + how_much_short = 600 - how_much_short + + if how_much_short > 100: + if self.reliability_white_line >= 5: + self.reliability_white_line -= 5 + elif how_much_short <= 100: + if self.reliability_white_line <= 99: + self.reliability_white_line += 5 + + msg_white_line_reliability = UInt8() + msg_white_line_reliability.data = self.reliability_white_line + self.pub_white_line_reliability.publish(msg_white_line_reliability) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes white lane filtered image in compressed type + self.pub_image_white_lane.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes white lane filtered image in raw type + self.pub_image_white_lane.publish(self.cvBridge.cv2_to_imgmsg(mask, "bgr8")) + + return fraction_num, mask + + def maskYellowLane(self, image): + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_yellow_l + Hue_h = self.hue_yellow_h + Saturation_l = self.saturation_yellow_l + Saturation_h = self.saturation_yellow_h + Lightness_l = self.lightness_yellow_l + Lightness_h = self.lightness_yellow_h + + # define range of yellow color in HSV + lower_yellow = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_yellow = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only yellow colors + mask = cv2.inRange(hsv, lower_yellow, upper_yellow) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + fraction_num = np.count_nonzero(mask) + + if self.is_calibration_mode == False: + if fraction_num > 35000: + if self.lightness_yellow_l < 250: + self.lightness_yellow_l += 20 + elif fraction_num < 5000: + if self.lightness_yellow_l > 90: + self.lightness_yellow_l -= 20 + + how_much_short = 0 + + for i in range(0, 600): + if np.count_nonzero(mask[i,::]) > 0: + how_much_short += 1 + + how_much_short = 600 - how_much_short + + if how_much_short > 100: + if self.reliability_yellow_line >= 5: + self.reliability_yellow_line -= 5 + elif how_much_short <= 100: + if self.reliability_yellow_line <= 99: + self.reliability_yellow_line += 5 + + msg_yellow_line_reliability = UInt8() + msg_yellow_line_reliability.data = self.reliability_yellow_line + self.pub_yellow_line_reliability.publish(msg_yellow_line_reliability) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes yellow lane filtered image in compressed type + self.pub_image_yellow_lane.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes yellow lane filtered image in raw type + self.pub_image_yellow_lane.publish(self.cvBridge.cv2_to_imgmsg(mask, "bgr8")) + + return fraction_num, mask + + def fit_from_lines(self, lane_fit, image): + nonzero = image.nonzero() + nonzeroy = np.array(nonzero[0]) + nonzerox = np.array(nonzero[1]) + margin = 100 + lane_inds = ((nonzerox > (lane_fit[0] * (nonzeroy ** 2) + lane_fit[1] * nonzeroy + lane_fit[2] - margin)) & ( + nonzerox < (lane_fit[0] * (nonzeroy ** 2) + lane_fit[1] * nonzeroy + lane_fit[2] + margin))) + + # Again, extract line pixel positions + x = nonzerox[lane_inds] + y = nonzeroy[lane_inds] + + # Fit a second order polynomial to each + lane_fit = np.polyfit(y, x, 2) + + # Generate x and y values for plotting + ploty = np.linspace(0, image.shape[0] - 1, image.shape[0]) + lane_fitx = lane_fit[0] * ploty ** 2 + lane_fit[1] * ploty + lane_fit[2] + + return lane_fitx, lane_fit + + def sliding_windown(self, img_w, left_or_right): + histogram = np.sum(img_w[int(img_w.shape[0] / 2):, :], axis=0) + + # Create an output image to draw on and visualize the result + out_img = np.dstack((img_w, img_w, img_w)) * 255 + + # Find the peak of the left and right halves of the histogram + # These will be the starting point for the left and right lines + midpoint = np.int(histogram.shape[0] / 2) + + if left_or_right == 'left': + lane_base = np.argmax(histogram[:midpoint]) + elif left_or_right == 'right': + lane_base = np.argmax(histogram[midpoint:]) + midpoint + + # Choose the number of sliding windows + nwindows = 20 + + # Set height of windows + window_height = np.int(img_w.shape[0] / nwindows) + + # Identify the x and y positions of all nonzero pixels in the image + nonzero = img_w.nonzero() + nonzeroy = np.array(nonzero[0]) + nonzerox = np.array(nonzero[1]) + + # Current positions to be updated for each window + x_current = lane_base + + # Set the width of the windows +/- margin + margin = 50 + + # Set minimum number of pixels found to recenter window + minpix = 50 + + # Create empty lists to receive lane pixel indices + lane_inds = [] + + # Step through the windows one by one + for window in range(nwindows): + # Identify window boundaries in x and y + win_y_low = img_w.shape[0] - (window + 1) * window_height + win_y_high = img_w.shape[0] - window * window_height + win_x_low = x_current - margin + win_x_high = x_current + margin + + # Draw the windows on the visualization image + cv2.rectangle(out_img, (win_x_low, win_y_low), (win_x_high, win_y_high), (0, 255, 0), 2) + + # Identify the nonzero pixels in x and y within the window + good_lane_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & (nonzerox >= win_x_low) & ( + nonzerox < win_x_high)).nonzero()[0] + + # Append these indices to the lists + lane_inds.append(good_lane_inds) + + # If you found > minpix pixels, recenter next window on their mean position + if len(good_lane_inds) > minpix: + x_current = np.int(np.mean(nonzerox[good_lane_inds])) + + # Concatenate the arrays of indices + lane_inds = np.concatenate(lane_inds) + + # Extract line pixel positions + x = nonzerox[lane_inds] + y = nonzeroy[lane_inds] + + # Fit a second order polynomial to each + try: + lane_fit = np.polyfit(y, x, 2) + self.lane_fit_bef = lane_fit + except: + lane_fit = self.lane_fit_bef + + # Generate x and y values for plotting + ploty = np.linspace(0, img_w.shape[0] - 1, img_w.shape[0]) + lane_fitx = lane_fit[0] * ploty ** 2 + lane_fit[1] * ploty + lane_fit[2] + + return lane_fitx, lane_fit + + def make_lane(self, cv_image, white_fraction, yellow_fraction): + # Create an image to draw the lines on + warp_zero = np.zeros((cv_image.shape[0], cv_image.shape[1], 1), dtype=np.uint8) + + color_warp = np.dstack((warp_zero, warp_zero, warp_zero)) + color_warp_lines = np.dstack((warp_zero, warp_zero, warp_zero)) + + ploty = np.linspace(0, cv_image.shape[0] - 1, cv_image.shape[0]) + + if yellow_fraction > 3000: + pts_left = np.array([np.flipud(np.transpose(np.vstack([self.left_fitx, ploty])))]) + cv2.polylines(color_warp_lines, np.int_([pts_left]), isClosed=False, color=(0, 0, 255), thickness=25) + + if white_fraction > 3000: + pts_right = np.array([np.transpose(np.vstack([self.right_fitx, ploty]))]) + cv2.polylines(color_warp_lines, np.int_([pts_right]), isClosed=False, color=(255, 255, 0), thickness=25) + + self.is_center_x_exist = True + + if self.reliability_white_line > 50 and self.reliability_yellow_line > 50: + if white_fraction > 3000 and yellow_fraction > 3000: + centerx = np.mean([self.left_fitx, self.right_fitx], axis=0) + pts = np.hstack((pts_left, pts_right)) + pts_center = np.array([np.transpose(np.vstack([centerx, ploty]))]) + + cv2.polylines(color_warp_lines, np.int_([pts_center]), isClosed=False, color=(0, 255, 255), thickness=12) + + # Draw the lane onto the warped blank image + cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0)) + + if white_fraction > 3000 and yellow_fraction <= 3000: + centerx = np.subtract(self.right_fitx, 320) + pts_center = np.array([np.transpose(np.vstack([centerx, ploty]))]) + + cv2.polylines(color_warp_lines, np.int_([pts_center]), isClosed=False, color=(0, 255, 255), thickness=12) + + if white_fraction <= 3000 and yellow_fraction > 3000: + centerx = np.add(self.left_fitx, 320) + pts_center = np.array([np.transpose(np.vstack([centerx, ploty]))]) + + cv2.polylines(color_warp_lines, np.int_([pts_center]), isClosed=False, color=(0, 255, 255), thickness=12) + + elif self.reliability_white_line <= 50 and self.reliability_yellow_line > 50: + centerx = np.add(self.left_fitx, 320) + pts_center = np.array([np.transpose(np.vstack([centerx, ploty]))]) + + cv2.polylines(color_warp_lines, np.int_([pts_center]), isClosed=False, color=(0, 255, 255), thickness=12) + + elif self.reliability_white_line > 50 and self.reliability_yellow_line <= 50: + centerx = np.subtract(self.right_fitx, 320) + pts_center = np.array([np.transpose(np.vstack([centerx, ploty]))]) + + cv2.polylines(color_warp_lines, np.int_([pts_center]), isClosed=False, color=(0, 255, 255), thickness=12) + + else: + self.is_center_x_exist = False + pass + + # Combine the result with the original image + final = cv2.addWeighted(cv_image, 1, color_warp, 0.2, 0) + final = cv2.addWeighted(final, 1, color_warp_lines, 1, 0) + + if self.pub_image_type == "compressed": + if self.is_center_x_exist == True: + # publishes lane center + msg_desired_center = Float64() + msg_desired_center.data = centerx.item(350) + self.pub_lane.publish(msg_desired_center) + + self.pub_image_lane.publish(self.cvBridge.cv2_to_compressed_imgmsg(final, "jpg")) + + elif self.pub_image_type == "raw": + if self.is_center_x_exist == True: + # publishes lane center + msg_desired_center = Float64() + msg_desired_center.data = centerx.item(350) + self.pub_lane.publish(msg_desired_center) + + self.pub_image_lane.publish(self.cvBridge.cv2_to_imgmsg(final, "bgr8")) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_lane') + node = DetectLane() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing new file mode 100755 index 0000000..58b5d7d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing @@ -0,0 +1,353 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Authors: Leon Jung, [AuTURBO] Kihoon Kim (https://github.com/auturbo), Gilbert, Ashe Kim + +import rospy +import numpy as np +import cv2 +import math +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError +from dynamic_reconfigure.server import Server +from turtlebot3_autorace_detect.cfg import DetectLevelParamsConfig + +def fnCalcDistanceDot2Line(a, b, c, x0, y0): + distance = abs(x0*a + y0*b + c)/math.sqrt(a*a + b*b) + return distance + +def fnCalcDistanceDot2Dot(x1, y1, x2, y2): + distance = math.sqrt((x2-x1)*(x2-x1) + (y2-y1)*(y2-y1)) + return distance + +def fnArrangeIndexOfPoint(arr): + new_arr = arr[:] + arr_idx = [0] * len(arr) + for i in range(len(arr)): + arr_idx[i] = i + + for i in range(len(arr)): + for j in range(i+1, len(arr)): + if arr[i] < arr[j]: + buffer = arr_idx[j] + arr_idx[j] = arr_idx[i] + arr_idx[i] = buffer + buffer = new_arr[j] + new_arr[j] = new_arr[i] + new_arr[i] = buffer + return arr_idx + +def fnCheckLinearity(point1, point2, point3): + threshold_linearity = 50 + x1, y1 = point1 + x2, y2 = point3 + if x2-x1 != 0: + a = (y2-y1)/(x2-x1) + else: + a = 1000 + b = -1 + c = y1 - a*x1 + err = fnCalcDistanceDot2Line(a, b, c, point2[0], point2[1]) + + if err < threshold_linearity: + return True + else: + return False + +def fnCheckDistanceIsEqual(point1, point2, point3): + + threshold_distance_equality = 3 + distance1 = fnCalcDistanceDot2Dot(point1[0], point1[1], point2[0], point2[1]) + distance2 = fnCalcDistanceDot2Dot(point2[0], point2[1], point3[0], point3[1]) + std = np.std([distance1, distance2]) + + if std < threshold_distance_equality: + return True + else: + return False + +class DetectLevel(): + def __init__(self): + self.hue_red_l = rospy.get_param("~detect/level/red/hue_l", 0) + self.hue_red_h = rospy.get_param("~detect/level/red/hue_h", 179) + self.saturation_red_l = rospy.get_param("~detect/level/red/saturation_l", 24) + self.saturation_red_h = rospy.get_param("~detect/level/red/saturation_h", 255) + self.lightness_red_l = rospy.get_param("~detect/level/red/lightness_l", 207) + self.lightness_red_h = rospy.get_param("~detect/level/red/lightness_h", 162) + + self.is_calibration_mode = rospy.get_param("~is_detection_calibration_mode", False) + if self.is_calibration_mode == True: + srv_detect_lane = Server(DetectLevelParamsConfig, self.cbGetDetectLevelParam) + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbGetImage, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbGetImage, queue_size = 1) + + if self.pub_image_type == "compressed": + # publishes level image in compressed type + self.pub_image_level = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes level image in raw type + self.pub_image_level = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes color filtered image in compressed type + self.pub_image_color_filtered = rospy.Publisher('/detect/image_output_sub1/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes color filtered image in raw type + self.pub_image_color_filtered = rospy.Publisher('/detect/image_output_sub1', Image, queue_size = 1) + + self.sub_level_crossing_order = rospy.Subscriber('/detect/level_crossing_order', UInt8, self.cbLevelCrossingOrder, queue_size=1) + self.sub_level_crossing_finished = rospy.Subscriber('/control/level_crossing_finished', UInt8, self.cbLevelCrossingFinished, queue_size = 1) + + self.pub_level_crossing_return = rospy.Publisher('/detect/level_crossing_stamped', UInt8, queue_size=1) + self.pub_max_vel = rospy.Publisher('/control/max_vel', Float64, queue_size = 1) + + self.StepOfLevelCrossing = Enum('StepOfLevelCrossing', 'pass_level exit') + self.is_level_crossing_finished = False + self.stop_bar_count = 0 + self.counter = 1 + + self.cvBridge = CvBridge() + self.cv_image = None + + rospy.sleep(1) + + loop_rate = rospy.Rate(15) + while not rospy.is_shutdown(): + self.fnFindLevel() + + loop_rate.sleep() + + def cbGetDetectLevelParam(self, config, level): + rospy.loginfo("[Detect Level] Detect Level Calibration Parameter reconfigured to") + rospy.loginfo("hue_red_l : %d", config.hue_red_l) + rospy.loginfo("hue_red_h : %d", config.hue_red_h) + rospy.loginfo("saturation_red_l : %d", config.saturation_red_l) + rospy.loginfo("saturation_red_h : %d", config.saturation_red_h) + rospy.loginfo("lightness_red_l : %d", config.lightness_red_l) + rospy.loginfo("lightness_red_h : %d", config.lightness_red_h) + + self.hue_red_l = config.hue_red_l + self.hue_red_h = config.hue_red_h + self.saturation_red_l = config.saturation_red_l + self.saturation_red_h = config.saturation_red_h + self.lightness_red_l = config.lightness_red_l + self.lightness_red_h = config.lightness_red_h + + return config + + def cbGetImage(self, image_msg): + # Change the frame rate by yourself. Now, it is set to 1/3 (10fps). + # Unappropriate value of frame rate may cause huge delay on entire recognition process. + # This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + np_arr = np.frombuffer(image_msg.data, np.uint8) + self.cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + else: + self.cv_image = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + def cbLevelCrossingOrder(self, order): + pub_level_crossing_return = UInt8() + + if order.data == self.StepOfLevelCrossing.pass_level.value: + + while True: + is_level_detected, _, _ = self.fnFindLevel() + if is_level_detected == True: + rospy.loginfo("Level Detect") + msg_pub_max_vel = Float64() + msg_pub_max_vel.data = 0.03 + self.pub_max_vel.publish(msg_pub_max_vel) + break + else: + pass + + while True: + _, is_level_close, _ = self.fnFindLevel() + if is_level_close == True: + rospy.loginfo("STOP") + msg_pub_max_vel = Float64() + msg_pub_max_vel.data = 0.0 + self.pub_max_vel.publish(msg_pub_max_vel) + break + else: + pass + + while True: + _, _, is_level_opened = self.fnFindLevel() + if is_level_opened == True: + rospy.loginfo("GO") + msg_pub_max_vel = Float64() + msg_pub_max_vel.data = 0.05 + self.pub_max_vel.publish(msg_pub_max_vel) + break + else: + pass + + pub_level_crossing_return.data = self.StepOfLevelCrossing.exit.value + + elif order.data == self.StepOfLevelCrossing.exit.value: + rospy.loginfo("Now pass_level") + pub_level_crossing_return.data = self.StepOfLevelCrossing.exit.value + + self.pub_level_crossing_return.publish(pub_level_crossing_return) + rospy.sleep(3) + + def fnFindLevel(self): + cv_image_mask = self.fnMaskRedOfLevel() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + return self.fnFindRectOfLevel(cv_image_mask) + + def fnMaskRedOfLevel(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_red_l + Hue_h = self.hue_red_h + Saturation_l = self.saturation_red_l + Saturation_h = self.saturation_red_h + Lightness_l = self.lightness_red_l + Lightness_h = self.lightness_red_h + + # define range of red color in HSV + lower_red = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_red = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only red colors + mask = cv2.inRange(hsv, lower_red, upper_red) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes yellow lane filtered image in compressed type + self.pub_image_color_filtered.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes yellow lane filtered image in raw type + self.pub_image_color_filtered.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnFindRectOfLevel(self, mask): + is_level_detected = False + is_level_close = False + is_level_opened = False + + params=cv2.SimpleBlobDetector_Params() + # Change thresholds + params.minThreshold = 0 + params.maxThreshold = 255 + + # Filter by Area. + params.filterByArea = True + params.minArea = 200 + params.maxArea = 2000 + + # Filter by Circularity + params.filterByCircularity = True + params.minCircularity = 0.3 + + # Filter by Convexity + params.filterByConvexity = True + params.minConvexity = 0.9 + + det=cv2.SimpleBlobDetector_create(params) + keypts=det.detect(mask) + frame=cv2.drawKeypoints(self.cv_image,keypts,np.array([]),(0,255,255),cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + mean_x = 0.0 + mean_y = 0.0 + + # if detected 3 red rectangular + if len(keypts) == 3: + for i in range(3): + mean_x = mean_x + keypts[i].pt[0]/3 + mean_y = mean_y + keypts[i].pt[1]/3 + arr_distances = [0]*3 + for i in range(3): + arr_distances[i] = fnCalcDistanceDot2Dot(mean_x, mean_y, keypts[i].pt[0], keypts[i].pt[1]) + + # finding thr farthest point from the center + idx1, idx2, idx3 = fnArrangeIndexOfPoint(arr_distances) + frame = cv2.line(frame, (int(keypts[idx1].pt[0]), int(keypts[idx1].pt[1])), (int(keypts[idx2].pt[0]), int(keypts[idx2].pt[1])), (255, 0, 0), 5) + frame = cv2.line(frame, (int(mean_x), int(mean_y)), (int(mean_x), int(mean_y)), (255, 255, 0), 5) + point1 = [int(keypts[idx1].pt[0]), int(keypts[idx1].pt[1]-1)] + point2 = [int(keypts[idx3].pt[0]), int(keypts[idx3].pt[1]-1)] + point3 = [int(keypts[idx2].pt[0]), int(keypts[idx2].pt[1]-1)] + + # test linearity and distance equality. If satisfy the both, continue to next process + is_rects_linear = fnCheckLinearity(point1, point2, point3) + is_rects_dist_equal = fnCheckDistanceIsEqual(point1, point2, point3) + + if is_rects_linear == True or is_rects_dist_equal == True: + # finding the angle of line + distance_bar2car = 50 / fnCalcDistanceDot2Dot(point1[0], point1[1], point2[0], point2[1]) + + self.stop_bar_count = 50 + if distance_bar2car > 1.0: + is_level_detected = True + self.stop_bar_state = 'slowdown' + else: + is_level_close = True + self.stop_bar_state = 'stop' + + elif len(keypts) <= 1: + is_level_opened = True + self.stop_bar_state = 'go' + + if self.pub_image_type == "compressed": + # publishes level image in compressed type + self.pub_image_level.publish(self.cvBridge.cv2_to_compressed_imgmsg(frame, "jpg")) + + elif self.pub_image_type == "raw": + # publishes level image in raw type + self.pub_image_level.publish(self.cvBridge.cv2_to_imgmsg(frame, "bgr8")) + + return is_level_detected, is_level_close, is_level_opened + + def cbLevelCrossingFinished(self, level_crossing_finished_msg): + self.is_level_crossing_finished = True + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_level') + node = DetectLevel() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing_sign b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing_sign new file mode 100755 index 0000000..082661e --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_level_crossing_sign @@ -0,0 +1,185 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import os +import cv2 +from enum import Enum +from std_msgs.msg import UInt8 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +class DetectSign(): + def __init__(self): + self.fnPreproc() + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + #subscribes + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindTrafficSign, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindTrafficSign, queue_size = 1) + + # publishes + self.pub_traffic_sign = rospy.Publisher('/detect/traffic_sign', UInt8, queue_size=1) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + self.cvBridge = CvBridge() + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.counter = 1 + + def fnPreproc(self): + # Initiate SIFT detector + self.sift = cv2.SIFT_create() + + dir_path = os.path.dirname(os.path.realpath(__file__)) + dir_path = dir_path.replace('turtlebot3_autorace_detect/nodes', 'turtlebot3_autorace_detect/') + dir_path += 'image/' + + self.img_stop = cv2.imread(dir_path + 'stop.png',0) # trainImage1 + self.kp_stop, self.des_stop = self.sift.detectAndCompute(self.img_stop,None) + + FLANN_INDEX_KDTREE = 0 + index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) + search_params = dict(checks = 50) + + self.flann = cv2.FlannBasedMatcher(index_params, search_params) + + def fnCalcMSE(self, arr1, arr2): + squared_diff = (arr1 - arr2) ** 2 + sum = np.sum(squared_diff) + num_all = arr1.shape[0] * arr1.shape[1] #cv_image_input and 2 should have same shape + err = sum / num_all + return err + + def cbFindTrafficSign(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image_input = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image_input = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + MIN_MATCH_COUNT = 4 #9 + MIN_MSE_DECISION = 50000 + + # find the keypoints and descriptors with SIFT + kp1, des1 = self.sift.detectAndCompute(cv_image_input,None) + + matches_stop = self.flann.knnMatch(des1,self.des_stop,k=2) + + image_out_num = 1 + + good_stop = [] + for m,n in matches_stop: + if m.distance < 0.7*n.distance: + good_stop.append(m) + + if len(good_stop)>MIN_MATCH_COUNT: + src_pts = np.float32([ kp1[m.queryIdx].pt for m in good_stop ]).reshape(-1,1,2) + dst_pts = np.float32([ self.kp_stop[m.trainIdx].pt for m in good_stop ]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matches_stop = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.stop.value + + self.pub_traffic_sign.publish(msg_sign) + + rospy.loginfo("stop") + image_out_num = 2 + + else: + matches_stop = None + rospy.loginfo("nothing") + + if image_out_num == 1: + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(cv_image_input, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(cv_image_input, "bgr8")) + + elif image_out_num == 2: + draw_params2 = dict(matchColor = (0,0,255), # draw matches in green color + singlePointColor = None, + matchesMask = matches_stop, # draw only inliers + flags = 2) + + final_stop = cv2.drawMatches(cv_image_input,kp1,self.img_stop,self.kp_stop,good_stop,None,**draw_params2) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_stop, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_stop, "bgr8")) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_sign') + node = DetectSign() + node.main() + + + + + + + + + + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking new file mode 100755 index 0000000..347e749 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking @@ -0,0 +1,329 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import os +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import LaserScan +from turtlebot3_autorace_msgs.msg import MovingParam + +class DetectParking(): + def __init__(self): + # subscribes state + self.sub_scan_obstacle = rospy.Subscriber('/detect/scan', LaserScan, self.cbScanObstacle, queue_size=1) + self.sub_parking_order = rospy.Subscriber('/detect/parking_order', UInt8, self.cbParkingOrder, queue_size=1) + self.sub_moving_completed = rospy.Subscriber('/control/moving/complete', UInt8, self.cbMovingComplete, queue_size = 1) + + # publishes state + self.pub_parking_return = rospy.Publisher('/detect/parking_stamped', UInt8, queue_size=1) + self.pub_moving = rospy.Publisher('/control/moving/state', MovingParam, queue_size= 1) + self.pub_max_vel = rospy.Publisher('/control/max_vel', Float64, queue_size = 1) + + self.StepOfParking = Enum('StepOfParking', 'parking exit') + self.start_obstacle_detection = False + self.is_obstacle_detected_R = False + self.is_obstacle_detected_L = False + self.is_moving_complete = False + + def cbMovingComplete(self, data): + self.is_moving_complete = True + + def cbParkingOrder(self, order): + msg_pub_parking_return = UInt8() + + if order.data == self.StepOfParking.parking.value: + rospy.loginfo("Now motion") + msg_pub_max_vel = Float64() + msg_pub_max_vel.data = 0.00 + self.pub_max_vel.publish(msg_pub_max_vel) + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.45 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=1.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + self.start_obstacle_detection = True + while True: + if self.is_obstacle_detected_L == True or self.is_obstacle_detected_R == True: + break + + if self.is_obstacle_detected_R == False: + rospy.loginfo("right parking is clear") + rospy.loginfo("go right") + msg_moving = MovingParam() + msg_moving.moving_type=3 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.20 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go back straight") + msg_moving = MovingParam() + msg_moving.moving_type= 5 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.20 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go right") + msg_moving = MovingParam() + msg_moving.moving_type= 3 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.9 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.2 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + elif self.is_obstacle_detected_L == False: + rospy.loginfo("left parking is clear") + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.20 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go back straight") + msg_moving = MovingParam() + msg_moving.moving_type= 5 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.20 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type= 2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=1.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=90 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type=4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.2 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.loginfo("parking finished") + msg_pub_parking_return.data = self.StepOfParking.exit.value + + elif order.data == self.StepOfParking.exit.value: + rospy.loginfo("parking finished") + msg_pub_parking_return.data = self.StepOfParking.exit.value + + self.pub_parking_return.publish(msg_pub_parking_return) + rospy.sleep(3) + + def cbScanObstacle(self, scan): + angle_scan = 30 + + scan_start_left = 90 - angle_scan + scan_end_left = 90 + angle_scan + + scan_start_right = 270 - angle_scan + scan_end_right = 270 + angle_scan + + threshold_distance = 0.5 + + if self.start_obstacle_detection == True: + for i in range(scan_start_left, scan_end_left): + if scan.ranges[i] < threshold_distance and scan.ranges[i] > 0.01: + self.is_obstacle_detected_L = True + rospy.loginfo("left detected") + + + for i in range(scan_start_right, scan_end_right): + if scan.ranges[i] < threshold_distance and scan.ranges[i] > 0.01: + self.is_obstacle_detected_R = True + rospy.loginfo("right detected") + self.start_obstacle_detection = False + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_parking') + node = DetectParking() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking_sign b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking_sign new file mode 100755 index 0000000..ab9681a --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_parking_sign @@ -0,0 +1,186 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import os +import cv2 +from enum import Enum +from std_msgs.msg import UInt8 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +class DetectSign(): + def __init__(self): + self.fnPreproc() + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + #subscribes + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindTrafficSign, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindTrafficSign, queue_size = 1) + + # publishes + self.pub_traffic_sign = rospy.Publisher('/detect/traffic_sign', UInt8, queue_size=1) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + self.cvBridge = CvBridge() + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.counter = 1 + + def fnPreproc(self): + # Initiate SIFT detector + self.sift = cv2.SIFT_create() + + dir_path = os.path.dirname(os.path.realpath(__file__)) + dir_path = dir_path.replace('turtlebot3_autorace_detect/nodes', 'turtlebot3_autorace_detect/') + dir_path += 'image/' + + self.img_parking = cv2.imread(dir_path + 'parking.png',0) # trainImage2 + self.kp_parking, self.des_parking = self.sift.detectAndCompute(self.img_parking,None) + + FLANN_INDEX_KDTREE = 0 + index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) + search_params = dict(checks = 50) + + self.flann = cv2.FlannBasedMatcher(index_params, search_params) + + def fnCalcMSE(self, arr1, arr2): + squared_diff = (arr1 - arr2) ** 2 + sum = np.sum(squared_diff) + num_all = arr1.shape[0] * arr1.shape[1] #cv_image_input and 2 should have same shape + err = sum / num_all + return err + + def cbFindTrafficSign(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image_input = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image_input = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + MIN_MATCH_COUNT = 9 + MIN_MSE_DECISION = 50000 + + # find the keypoints and descriptors with SIFT + kp1, des1 = self.sift.detectAndCompute(cv_image_input,None) + + matches_parking = self.flann.knnMatch(des1,self.des_parking,k=2) + image_out_num = 1 + + good_parking = [] + for m,n in matches_parking: + if m.distance < 0.7*n.distance: + good_parking.append(m) + + if len(good_parking)>MIN_MATCH_COUNT: + src_pts = np.float32([ kp1[m.queryIdx].pt for m in good_parking ]).reshape(-1,1,2) + dst_pts = np.float32([ self.kp_parking[m.trainIdx].pt for m in good_parking ]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matchesMask_parking = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.parking.value + + self.pub_traffic_sign.publish(msg_sign) + + rospy.loginfo("parking") + image_out_num = 2 + + else: + matchesMask_parking = None + rospy.loginfo("nothing") + + + if image_out_num == 1: + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(cv_image_input, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(cv_image_input, "bgr8")) + + elif image_out_num == 2: + draw_params = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matchesMask_parking, # draw only inliers + flags = 2) + + final_parking = cv2.drawMatches(cv_image_input,kp1,self.img_parking,self.kp_parking,good_parking,None,**draw_params) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_parking, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_parking, "bgr8")) + + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_sign') + node = DetectSign() + node.main() + + + + + + + + + + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light new file mode 100755 index 0000000..2c5c70c --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light @@ -0,0 +1,419 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import cv2 +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError +from dynamic_reconfigure.server import Server +from turtlebot3_autorace_detect.cfg import DetectTrafficLightParamsConfig + +class DetectTrafficLight(): + def __init__(self): + self.hue_red_l = rospy.get_param("~detect/lane/red/hue_l", 0) + self.hue_red_h = rospy.get_param("~detect/lane/red/hue_h", 26) + self.saturation_red_l = rospy.get_param("~detect/lane/red/saturation_l", 239) + self.saturation_red_h = rospy.get_param("~detect/lane/red/saturfnDecideMode(self.InvokedObject.traffic_light.value, traffic_light_type_msg)ation_h", 255) + self.lightness_red_l = rospy.get_param("~detect/lane/red/lightness_l", 123) + self.lightness_red_h = rospy.get_param("~detect/lane/red/lightness_h", 250) + + self.hue_yellow_l = rospy.get_param("~detect/lane/yellow/hue_l", 19) + self.hue_yellow_h = rospy.get_param("~detect/lane/yellow/hue_h", 33) + self.saturation_yellow_l = rospy.get_param("~detect/lane/yellow/saturation_l", 237) + self.saturation_yellow_h = rospy.get_param("~detect/lane/yellow/saturation_h", 255) + self.lightness_yellow_l = rospy.get_param("~detect/lane/yellow/lightness_l", 231) + self.lightness_yellow_h = rospy.get_param("~detect/lane/yellow/lightness_h", 255) + + self.hue_green_l = rospy.get_param("~detect/lane/green/hue_l", 40) + self.hue_green_h = rospy.get_param("~detect/lane/green/hue_h", 113) + self.saturation_green_l = rospy.get_param("~detect/lane/green/saturation_l", 210) + self.saturation_green_h = rospy.get_param("~detect/lane/green/saturation_h", 255) + self.lightness_green_l = rospy.get_param("~detect/lane/green/lightness_l", 131) + self.lightness_green_h = rospy.get_param("~detect/lane/green/lightness_h", 255) + + self.is_calibration_mode = rospy.get_param("~is_detection_calibration_mode", False) + if self.is_calibration_mode == True: + srv_detect_lane = Server(DetectTrafficLightParamsConfig, self.cbGetDetectTrafficLightParam) + + self.sub_image_type = "compressed" # "compressed" / "raw" + self.pub_image_type = "compressed" # "compressed" / "raw" + + self.counter = 1 + + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbGetImage, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbGetImage, queue_size = 1) + + if self.pub_image_type == "compressed": + # publishes compensated image in compressed type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes compensated image in raw type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes light image in compressed type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1/compressed', CompressedImage, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2/compressed', CompressedImage, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes light image in raw type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1', Image, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2', Image, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3', Image, queue_size = 1) + + + self.sub_traffic_light_finished = rospy.Subscriber('/control/traffic_light_finished', UInt8, self.cbTrafficLightFinished, queue_size = 1) + + self.pub_traffic_light_return = rospy.Publisher('/detect/traffic_light_stamped', UInt8, queue_size=1) + self.pub_traffic_start = rospy.Publisher('/control/traffic_light_start', UInt8, queue_size = 1) + self.pub_traffic_light = rospy.Publisher('/detect/traffic_light', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light intersection construction parking_lot level_crossing tunnel') + self.InvokedObject = Enum('InvokedObject', 'traffic_light traffic_sign') + self.cvBridge = CvBridge() + self.cv_image = None + + self.is_image_available = False + self.is_traffic_light_finished = False + + self.green_count = 0 + self.yellow_count = 0 + self.red_count = 0 + self.stop_count = 0 + self.off_traffic = False + rospy.sleep(1) + + loop_rate = rospy.Rate(10) + while not rospy.is_shutdown(): + if self.is_image_available == True: + if self.is_traffic_light_finished == False: + self.fnFindTrafficLight() + + loop_rate.sleep() + + def cbGetDetectTrafficLightParam(self, config, level): + rospy.loginfo("[Detect Traffic Light] Detect Traffic Light Calibration Parameter reconfigured to") + rospy.loginfo("hue_red_l : %d", config.hue_red_l) + rospy.loginfo("hue_red_h : %d", config.hue_red_h) + rospy.loginfo("saturation_red_l : %d", config.saturation_red_l) + rospy.loginfo("saturation_red_h : %d", config.saturation_red_h) + rospy.loginfo("lightness_red_l : %d", config.lightness_red_l) + rospy.loginfo("lightness_red_h : %d", config.lightness_red_h) + + rospy.loginfo("hue_yellow_l : %d", config.hue_yellow_l) + rospy.loginfo("hue_yellow_h : %d", config.hue_yellow_h) + rospy.loginfo("saturation_yellow_l : %d", config.saturation_yellow_l) + rospy.loginfo("saturation_yellow_h : %d", config.saturation_yellow_h) + rospy.loginfo("lightness_yellow_l : %d", config.lightness_yellow_l) + rospy.loginfo("lightness_yellow_h : %d", config.lightness_yellow_h) + + rospy.loginfo("hue_green_l : %d", config.hue_green_l) + rospy.loginfo("hue_green_h : %d", config.hue_green_h) + rospy.loginfo("saturation_green_l : %d", config.saturation_green_l) + rospy.loginfo("saturation_green_h : %d", config.saturation_green_h) + rospy.loginfo("lightness_green_l : %d", config.lightness_green_l) + rospy.loginfo("lightness_green_h : %d", config.lightness_green_h) + + self.hue_red_l = config.hue_red_l + self.hue_red_h = config.hue_red_h + self.saturation_red_l = config.saturation_red_l + self.saturation_red_h = config.saturation_red_h + self.lightness_red_l = config.lightness_red_l + self.lightness_red_h = config.lightness_red_h + + self.hue_yellow_l = config.hue_yellow_l + self.hue_yellow_h = config.hue_yellow_h + self.saturation_yellow_l = config.saturation_yellow_l + self.saturation_yellow_h = config.saturation_yellow_h + self.lightness_yellow_l = config.lightness_yellow_l + self.lightness_yellow_h = config.lightness_yellow_h + + self.hue_green_l = config.hue_green_l + self.hue_green_h = config.hue_green_h + self.saturation_green_l = config.saturation_green_l + self.saturation_green_h = config.saturation_green_h + self.lightness_green_l = config.lightness_green_l + self.lightness_green_h = config.lightness_green_h + + return config + + + def cbGetImage(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + np_arr = np.frombuffer(image_msg.data, np.uint8) + self.cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + else: + self.cv_image = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + self.is_image_available = True + + def fnFindTrafficLight(self): + cv_image_mask = self.fnMaskGreenTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + status1 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'green') + if status1 == 1 or status1 == 5: + rospy.loginfo("detect GREEN") + self.stop_count = 0 + self.green_count += 1 + else: + self.green_count = 0 + + cv_image_mask = self.fnMaskYellowTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status2 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'yellow') + if status2 == 2: + rospy.loginfo("detect YELLOW") + self.yellow_count += 1 + else: + self.yellow_count = 0 + + cv_image_mask = self.fnMaskRedTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status3 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'red') + if status3 == 3: + rospy.loginfo("detect RED") + self.red_count += 1 + elif status3 == 4: + self.red_count = 0 + self.stop_count += 1 + else: + self.red_count = 0 + self.stop_count = 0 + + # rospy.loginfo("RGB = %d %d %d stop = %d status =%d",self.red_count,self.yellow_count,self.green_count,self.stop_count,status1) + if self.green_count >= 3: + rospy.loginfo("GREEN") + cv2.putText(self.cv_image,"GREEN", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (80, 255, 0)) + msg_sign = UInt8() + msg_sign.data = self.CurrentMode.intersection.value + rospy.loginfo("Publish intersection from detect") + self.pub_traffic_light.publish(msg_sign) + self.is_traffic_light_finished = True + + + if self.yellow_count >= 3: + rospy.loginfo("YELLOW") + cv2.putText(self.cv_image,"YELLOW", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 255)) + + if self.red_count >= 3: + rospy.loginfo("RED") + cv2.putText(self.cv_image,"RED", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.stop_count >= 8: + rospy.loginfo("STOP") + self.off_traffic = True + cv2.putText(self.cv_image,"STOP", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.pub_image_type == "compressed": + # publishes traffic light image in compressed type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(self.cv_image, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic light image in raw type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_imgmsg(self.cv_image, "bgr8")) + + def fnMaskRedTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_red_l + Hue_h = self.hue_red_h + Saturation_l = self.saturation_red_l + Saturation_h = self.saturation_red_h + Lightness_l = self.lightness_red_l + Lightness_h = self.lightness_red_h + + # define range of red color in HSV + lower_red = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_red = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only red colors + mask = cv2.inRange(hsv, lower_red, upper_red) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes red light filtered image in compressed type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes red light filtered image in raw type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskYellowTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_yellow_l + Hue_h = self.hue_yellow_h + Saturation_l = self.saturation_yellow_l + Saturation_h = self.saturation_yellow_h + Lightness_l = self.lightness_yellow_l + Lightness_h = self.lightness_yellow_h + + # define range of yellow color in HSV + lower_yellow = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_yellow = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only yellow colors + mask = cv2.inRange(hsv, lower_yellow, upper_yellow) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes yellow light filtered image in compressed type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes yellow light filtered image in raw type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskGreenTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_green_l + Hue_h = self.hue_green_h + Saturation_l = self.saturation_green_l + Saturation_h = self.saturation_green_h + Lightness_l = self.lightness_green_l + Lightness_h = self.lightness_green_h + + # define range of green color in HSV + lower_green = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_green = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only green colors + mask = cv2.inRange(hsv, lower_green, upper_green) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes green light filtered image in compressed type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes green light filtered image in raw type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnFindCircleOfTrafficLight(self, mask, find_color): + status = 0 + + params=cv2.SimpleBlobDetector_Params() + # Change thresholds + params.minThreshold = 0 + params.maxThreshold = 255 + + # Filter by Area. + params.filterByArea = True + params.minArea = 50 + params.maxArea = 600 + + # Filter by Circularity + params.filterByCircularity = True + params.minCircularity = 0.4 + + # Filter by Convexity + params.filterByConvexity = True + params.minConvexity = 0.6 + + det=cv2.SimpleBlobDetector_create(params) + keypts=det.detect(mask) + frame=cv2.drawKeypoints(self.cv_image,keypts,np.array([]),(0,255,255),cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + col1 = 180 + col2 = 270 + col3 = 305 + + low1 = 50 + low2 = 170 + low3 = 170 + + # if detected more than 1 light + for i in range(len(keypts)): + self.point_col = int(keypts[i].pt[0]) + self.point_low = int(keypts[i].pt[1]) + if self.point_col > col1 and self.point_col < col2 and self.point_low > low1 and self.point_low < low2: + if find_color == 'green': + status = 1 + elif find_color == 'yellow': + status = 2 + elif find_color == 'red': + status = 3 + elif self.point_col > col2 and self.point_col < col3 and self.point_low > low1 and self.point_low < low3: + if find_color == 'red': + status = 4 + elif find_color == 'green': + status = 5 +# else: +# status = 6 + return status + + def cbTrafficLightFinished(self, traffic_light_finished_msg): + self.is_traffic_light_finished = True + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_traffic_light') + node = DetectTrafficLight() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.org b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.org new file mode 100755 index 0000000..61b8203 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.org @@ -0,0 +1,421 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import cv2 +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError +from dynamic_reconfigure.server import Server +from turtlebot3_autorace_detect.cfg import DetectTrafficLightParamsConfig + +class DetectTrafficLight(): + def __init__(self): + self.hue_red_l = rospy.get_param("~detect/lane/red/hue_l", 0) + self.hue_red_h = rospy.get_param("~detect/lane/red/hue_h", 26) + self.saturation_red_l = rospy.get_param("~detect/lane/red/saturation_l", 239) + self.saturation_red_h = rospy.get_param("~detect/lane/red/saturation_h", 255) + self.lightness_red_l = rospy.get_param("~detect/lane/red/lightness_l", 123) + self.lightness_red_h = rospy.get_param("~detect/lane/red/lightness_h", 250) + + self.hue_yellow_l = rospy.get_param("~detect/lane/yellow/hue_l", 19) + self.hue_yellow_h = rospy.get_param("~detect/lane/yellow/hue_h", 33) + self.saturation_yellow_l = rospy.get_param("~detect/lane/yellow/saturation_l", 237) + self.saturation_yellow_h = rospy.get_param("~detect/lane/yellow/saturation_h", 255) + self.lightness_yellow_l = rospy.get_param("~detect/lane/yellow/lightness_l", 231) + self.lightness_yellow_h = rospy.get_param("~detect/lane/yellow/lightness_h", 255) + + self.hue_green_l = rospy.get_param("~detect/lane/green/hue_l", 40) + self.hue_green_h = rospy.get_param("~detect/lane/green/hue_h", 113) + self.saturation_green_l = rospy.get_param("~detect/lane/green/saturation_l", 210) + self.saturation_green_h = rospy.get_param("~detect/lane/green/saturation_h", 255) + self.lightness_green_l = rospy.get_param("~detect/lane/green/lightness_l", 131) + self.lightness_green_h = rospy.get_param("~detect/lane/green/lightness_h", 255) + + self.is_calibration_mode = rospy.get_param("~is_detection_calibration_mode", False) + if self.is_calibration_mode == True: + srv_detect_lane = Server(DetectTrafficLightParamsConfig, self.cbGetDetectTrafficLightParam) + + self.sub_image_type = "compressed" # "compressed" / "raw" + self.pub_image_type = "compressed" # "compressed" / "raw" + + self.counter = 1 + + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbGetImage, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbGetImage, queue_size = 1) + + if self.pub_image_type == "compressed": + # publishes compensated image in compressed type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes compensated image in raw type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes light image in compressed type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1/compressed', CompressedImage, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2/compressed', CompressedImage, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes light image in raw type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1', Image, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2', Image, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3', Image, queue_size = 1) + + + self.sub_traffic_light_finished = rospy.Subscriber('/control/traffic_light_finished', UInt8, self.cbTrafficLightFinished, queue_size = 1) + + self.pub_traffic_light_return = rospy.Publisher('/detect/traffic_light_stamped', UInt8, queue_size=1) + self.pub_parking_start = rospy.Publisher('/control/traffic_light_start', UInt8, queue_size = 1) + self.pub_traffic_light = rospy.Publisher('/detect/traffic_light', UInt8, queue_size=1) + + self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light') + + self.cvBridge = CvBridge() + self.cv_image = None + + self.is_image_available = False + self.is_traffic_light_finished = False + + self.green_count = 0 + self.yellow_count = 0 + self.red_count = 0 + self.stop_count = 0 + self.off_traffic = False + rospy.sleep(1) + + loop_rate = rospy.Rate(10) + while not rospy.is_shutdown(): + if self.is_image_available == True: + if self.is_traffic_light_finished == False: + self.fnFindTrafficLight() + + loop_rate.sleep() + + def cbGetDetectTrafficLightParam(self, config, level): + rospy.loginfo("[Detect Traffic Light] Detect Traffic Light Calibration Parameter reconfigured to") + rospy.loginfo("hue_red_l : %d", config.hue_red_l) + rospy.loginfo("hue_red_h : %d", config.hue_red_h) + rospy.loginfo("saturation_red_l : %d", config.saturation_red_l) + rospy.loginfo("saturation_red_h : %d", config.saturation_red_h) + rospy.loginfo("lightness_red_l : %d", config.lightness_red_l) + rospy.loginfo("lightness_red_h : %d", config.lightness_red_h) + + rospy.loginfo("hue_yellow_l : %d", config.hue_yellow_l) + rospy.loginfo("hue_yellow_h : %d", config.hue_yellow_h) + rospy.loginfo("saturation_yellow_l : %d", config.saturation_yellow_l) + rospy.loginfo("saturation_yellow_h : %d", config.saturation_yellow_h) + rospy.loginfo("lightness_yellow_l : %d", config.lightness_yellow_l) + rospy.loginfo("lightness_yellow_h : %d", config.lightness_yellow_h) + + rospy.loginfo("hue_green_l : %d", config.hue_green_l) + rospy.loginfo("hue_green_h : %d", config.hue_green_h) + rospy.loginfo("saturation_green_l : %d", config.saturation_green_l) + rospy.loginfo("saturation_green_h : %d", config.saturation_green_h) + rospy.loginfo("lightness_green_l : %d", config.lightness_green_l) + rospy.loginfo("lightness_green_h : %d", config.lightness_green_h) + + self.hue_red_l = config.hue_red_l + self.hue_red_h = config.hue_red_h + self.saturation_red_l = config.saturation_red_l + self.saturation_red_h = config.saturation_red_h + self.lightness_red_l = config.lightness_red_l + self.lightness_red_h = config.lightness_red_h + + self.hue_yellow_l = config.hue_yellow_l + self.hue_yellow_h = config.hue_yellow_h + self.saturation_yellow_l = config.saturation_yellow_l + self.saturation_yellow_h = config.saturation_yellow_h + self.lightness_yellow_l = config.lightness_yellow_l + self.lightness_yellow_h = config.lightness_yellow_h + + self.hue_green_l = config.hue_green_l + self.hue_green_h = config.hue_green_h + self.saturation_green_l = config.saturation_green_l + self.saturation_green_h = config.saturation_green_h + self.lightness_green_l = config.lightness_green_l + self.lightness_green_h = config.lightness_green_h + + return config + + + def cbGetImage(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + np_arr = np.frombuffer(image_msg.data, np.uint8) + self.cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + else: + self.cv_image = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + self.is_image_available = True + + def fnFindTrafficLight(self): + cv_image_mask = self.fnMaskGreenTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status1 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'green') + if status1 == 1 or status1 == 5: + rospy.loginfo("detect GREEN") + self.stop_count = 0 + self.green_count += 1 + else: + self.green_count = 0 + + cv_image_mask = self.fnMaskYellowTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status2 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'yellow') + if status2 == 2: + rospy.loginfo("detect YELLOW") + self.yellow_count += 1 + else: + self.yellow_count = 0 + + cv_image_mask = self.fnMaskRedTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status3 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'red') + if status3 == 3: + rospy.loginfo("detect RED") + self.red_count += 1 + elif status3 == 4: + self.red_count = 0 + self.stop_count += 1 + else: + self.red_count = 0 + self.stop_count = 0 + + if self.green_count >= 3: + rospy.loginfo("GREEN") + cv2.putText(self.cv_image,"GREEN", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (80, 255, 0)) + msg_sign = UInt8() + msg_sign.data = self.CurrentMode.lane_following.value + + self.pub_traffic_light.publish(msg_sign) + self.is_traffic_light_finished = True + + + if self.yellow_count >= 3: + rospy.loginfo("YELLOW") + cv2.putText(self.cv_image,"YELLOW", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 255)) + + if self.red_count >= 3: + rospy.loginfo("RED") + cv2.putText(self.cv_image,"RED", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.stop_count >= 8: + rospy.loginfo("STOP") + self.off_traffic = True + cv2.putText(self.cv_image,"STOP", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.pub_image_type == "compressed": + # publishes traffic light image in compressed type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(self.cv_image, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic light image in raw type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_imgmsg(self.cv_image, "bgr8")) + + def fnMaskRedTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_red_l + Hue_h = self.hue_red_h + Saturation_l = self.saturation_red_l + Saturation_h = self.saturation_red_h + Lightness_l = self.lightness_red_l + Lightness_h = self.lightness_red_h + + # define range of red color in HSV + lower_red = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_red = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only red colors + mask = cv2.inRange(hsv, lower_red, upper_red) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes red light filtered image in compressed type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes red light filtered image in raw type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskYellowTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_yellow_l + Hue_h = self.hue_yellow_h + Saturation_l = self.saturation_yellow_l + Saturation_h = self.saturation_yellow_h + Lightness_l = self.lightness_yellow_l + Lightness_h = self.lightness_yellow_h + + # define range of yellow color in HSV + lower_yellow = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_yellow = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only yellow colors + mask = cv2.inRange(hsv, lower_yellow, upper_yellow) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes yellow light filtered image in compressed type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes yellow light filtered image in raw type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskGreenTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_green_l + Hue_h = self.hue_green_h + Saturation_l = self.saturation_green_l + Saturation_h = self.saturation_green_h + Lightness_l = self.lightness_green_l + Lightness_h = self.lightness_green_h + + # define range of green color in HSV + lower_green = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_green = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only green colors + mask = cv2.inRange(hsv, lower_green, upper_green) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes green light filtered image in compressed type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes green light filtered image in raw type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnFindCircleOfTrafficLight(self, mask, find_color): + status = 0 + + params=cv2.SimpleBlobDetector_Params() + # Change thresholds + params.minThreshold = 0 + params.maxThreshold = 255 + + # Filter by Area. + params.filterByArea = True + params.minArea = 50 + params.maxArea = 600 + + # Filter by Circularity + params.filterByCircularity = True + params.minCircularity = 0.4 + + # Filter by Convexity + params.filterByConvexity = True + params.minConvexity = 0.6 + + det=cv2.SimpleBlobDetector_create(params) + keypts=det.detect(mask) + frame=cv2.drawKeypoints(self.cv_image,keypts,np.array([]),(0,255,255),cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + col1 = 180 + col2 = 270 + col3 = 305 + + low1 = 50 + low2 = 170 + low3 = 170 + + # if detected more than 1 light + for i in range(len(keypts)): + self.point_col = int(keypts[i].pt[0]) + self.point_low = int(keypts[i].pt[1]) + if self.point_col > col1 and self.point_col < col2 and self.point_low > low1 and self.point_low < low2: + if find_color == 'green': + status = 1 + elif find_color == 'yellow': + status = 2 + elif find_color == 'red': + status = 3 + elif self.point_col > col2 and self.point_col < col3 and self.point_low > low1 and self.point_low < low3: + if find_color == 'red': + status = 4 + elif find_color == 'green': + status = 5 + else: + status = 6 + + return status + + def cbTrafficLightFinished(self, traffic_light_finished_msg): + self.is_traffic_light_finished = True + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_traffic_light') + node = DetectTrafficLight() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.work b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.work new file mode 100755 index 0000000..a3b6d9d --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_traffic_light.work @@ -0,0 +1,423 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import cv2 +from enum import Enum +from std_msgs.msg import UInt8, Float64 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError +from dynamic_reconfigure.server import Server +from turtlebot3_autorace_detect.cfg import DetectTrafficLightParamsConfig + +class DetectTrafficLight(): + def __init__(self): + self.hue_red_l = rospy.get_param("~detect/lane/red/hue_l", 0) + self.hue_red_h = rospy.get_param("~detect/lane/red/hue_h", 26) + self.saturation_red_l = rospy.get_param("~detect/lane/red/saturation_l", 239) + self.saturation_red_h = rospy.get_param("~detect/lane/red/saturation_h", 255) + self.lightness_red_l = rospy.get_param("~detect/lane/red/lightness_l", 123) + self.lightness_red_h = rospy.get_param("~detect/lane/red/lightness_h", 250) + + self.hue_yellow_l = rospy.get_param("~detect/lane/yellow/hue_l", 19) + self.hue_yellow_h = rospy.get_param("~detect/lane/yellow/hue_h", 33) + self.saturation_yellow_l = rospy.get_param("~detect/lane/yellow/saturation_l", 237) + self.saturation_yellow_h = rospy.get_param("~detect/lane/yellow/saturation_h", 255) + self.lightness_yellow_l = rospy.get_param("~detect/lane/yellow/lightness_l", 231) + self.lightness_yellow_h = rospy.get_param("~detect/lane/yellow/lightness_h", 255) + + self.hue_green_l = rospy.get_param("~detect/lane/green/hue_l", 40) + self.hue_green_h = rospy.get_param("~detect/lane/green/hue_h", 113) + self.saturation_green_l = rospy.get_param("~detect/lane/green/saturation_l", 210) + self.saturation_green_h = rospy.get_param("~detect/lane/green/saturation_h", 255) + self.lightness_green_l = rospy.get_param("~detect/lane/green/lightness_l", 131) + self.lightness_green_h = rospy.get_param("~detect/lane/green/lightness_h", 255) + + self.is_calibration_mode = rospy.get_param("~is_detection_calibration_mode", False) + if self.is_calibration_mode == True: + srv_detect_lane = Server(DetectTrafficLightParamsConfig, self.cbGetDetectTrafficLightParam) + + self.sub_image_type = "compressed" # "compressed" / "raw" + self.pub_image_type = "compressed" # "compressed" / "raw" + + self.counter = 1 + + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbGetImage, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbGetImage, queue_size = 1) + + if self.pub_image_type == "compressed": + # publishes compensated image in compressed type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes compensated image in raw type + self.pub_image_traffic_light = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes light image in compressed type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1/compressed', CompressedImage, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2/compressed', CompressedImage, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes light image in raw type + self.pub_image_red_light = rospy.Publisher('/detect/image_output_sub1', Image, queue_size = 1) + self.pub_image_yellow_light = rospy.Publisher('/detect/image_output_sub2', Image, queue_size = 1) + self.pub_image_green_light = rospy.Publisher('/detect/image_output_sub3', Image, queue_size = 1) + + + self.sub_traffic_light_finished = rospy.Subscriber('/control/traffic_light_finished', UInt8, self.cbTrafficLightFinished, queue_size = 1) + + self.pub_traffic_light_return = rospy.Publisher('/detect/traffic_light_stamped', UInt8, queue_size=1) + self.pub_traffic_start = rospy.Publisher('/control/traffic_light_start', UInt8, queue_size = 1) + self.pub_traffic_light = rospy.Publisher('/detect/traffic_light', UInt8, queue_size=1) + +# self.CurrentMode = Enum('CurrentMode', 'idle lane_following traffic_light') + self.InvokedObject = Enum('InvokedObject', 'traffic_light traffic_sign') + + self.cvBridge = CvBridge() + self.cv_image = None + + self.is_image_available = False + self.is_traffic_light_finished = False + + self.green_count = 0 + self.yellow_count = 0 + self.red_count = 0 + self.stop_count = 0 + self.off_traffic = False + rospy.sleep(1) + + loop_rate = rospy.Rate(10) + while not rospy.is_shutdown(): + if self.is_image_available == True: + if self.is_traffic_light_finished == False: + self.fnFindTrafficLight() + + loop_rate.sleep() + + def cbGetDetectTrafficLightParam(self, config, level): + rospy.loginfo("[Detect Traffic Light] Detect Traffic Light Calibration Parameter reconfigured to") + rospy.loginfo("hue_red_l : %d", config.hue_red_l) + rospy.loginfo("hue_red_h : %d", config.hue_red_h) + rospy.loginfo("saturation_red_l : %d", config.saturation_red_l) + rospy.loginfo("saturation_red_h : %d", config.saturation_red_h) + rospy.loginfo("lightness_red_l : %d", config.lightness_red_l) + rospy.loginfo("lightness_red_h : %d", config.lightness_red_h) + + rospy.loginfo("hue_yellow_l : %d", config.hue_yellow_l) + rospy.loginfo("hue_yellow_h : %d", config.hue_yellow_h) + rospy.loginfo("saturation_yellow_l : %d", config.saturation_yellow_l) + rospy.loginfo("saturation_yellow_h : %d", config.saturation_yellow_h) + rospy.loginfo("lightness_yellow_l : %d", config.lightness_yellow_l) + rospy.loginfo("lightness_yellow_h : %d", config.lightness_yellow_h) + + rospy.loginfo("hue_green_l : %d", config.hue_green_l) + rospy.loginfo("hue_green_h : %d", config.hue_green_h) + rospy.loginfo("saturation_green_l : %d", config.saturation_green_l) + rospy.loginfo("saturation_green_h : %d", config.saturation_green_h) + rospy.loginfo("lightness_green_l : %d", config.lightness_green_l) + rospy.loginfo("lightness_green_h : %d", config.lightness_green_h) + + self.hue_red_l = config.hue_red_l + self.hue_red_h = config.hue_red_h + self.saturation_red_l = config.saturation_red_l + self.saturation_red_h = config.saturation_red_h + self.lightness_red_l = config.lightness_red_l + self.lightness_red_h = config.lightness_red_h + + self.hue_yellow_l = config.hue_yellow_l + self.hue_yellow_h = config.hue_yellow_h + self.saturation_yellow_l = config.saturation_yellow_l + self.saturation_yellow_h = config.saturation_yellow_h + self.lightness_yellow_l = config.lightness_yellow_l + self.lightness_yellow_h = config.lightness_yellow_h + + self.hue_green_l = config.hue_green_l + self.hue_green_h = config.hue_green_h + self.saturation_green_l = config.saturation_green_l + self.saturation_green_h = config.saturation_green_h + self.lightness_green_l = config.lightness_green_l + self.lightness_green_h = config.lightness_green_h + + return config + + + def cbGetImage(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + np_arr = np.frombuffer(image_msg.data, np.uint8) + self.cv_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + else: + self.cv_image = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + self.is_image_available = True + + def fnFindTrafficLight(self): + cv_image_mask = self.fnMaskGreenTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status1 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'green') + rospy.loginfo("traffic status = %d",status1) + if status1 == 1 or status1 == 5: + rospy.loginfo("detect GREEN") + self.stop_count = 0 + self.green_count += 1 + else: + self.green_count = 0 + + cv_image_mask = self.fnMaskYellowTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status2 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'yellow') + if status2 == 2: + rospy.loginfo("detect YELLOW") + self.yellow_count += 1 + else: + self.yellow_count = 0 + + cv_image_mask = self.fnMaskRedTrafficLight() + cv_image_mask = cv2.GaussianBlur(cv_image_mask,(5,5),0) + + status3 = self.fnFindCircleOfTrafficLight(cv_image_mask, 'red') + if status3 == 3: + rospy.loginfo("detect RED") + self.red_count += 1 + elif status3 == 4: + self.red_count = 0 + self.stop_count += 1 + else: + self.red_count = 0 + self.stop_count = 0 + + if self.green_count >= 3: + rospy.loginfo("GREEN") + cv2.putText(self.cv_image,"GREEN", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (80, 255, 0)) + msg_sign = UInt8() + msg_sign.data = self.InvokedObject.traffic_light.value + + self.pub_traffic_light.publish(msg_sign) + self.is_traffic_light_finished = True + + + if self.yellow_count >= 3: + rospy.loginfo("YELLOW") + cv2.putText(self.cv_image,"YELLOW", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 255, 255)) + + if self.red_count >= 3: + rospy.loginfo("RED") + cv2.putText(self.cv_image,"RED", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.stop_count >= 8: + rospy.loginfo("STOP") + self.off_traffic = True + cv2.putText(self.cv_image,"STOP", (self.point_col, self.point_low), cv2.FONT_HERSHEY_DUPLEX, 0.5, (0, 0, 255)) + + if self.pub_image_type == "compressed": + # publishes traffic light image in compressed type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(self.cv_image, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic light image in raw type + self.pub_image_traffic_light.publish(self.cvBridge.cv2_to_imgmsg(self.cv_image, "bgr8")) + + def fnMaskRedTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_red_l + Hue_h = self.hue_red_h + Saturation_l = self.saturation_red_l + Saturation_h = self.saturation_red_h + Lightness_l = self.lightness_red_l + Lightness_h = self.lightness_red_h + + # define range of red color in HSV + lower_red = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_red = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only red colors + mask = cv2.inRange(hsv, lower_red, upper_red) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes red light filtered image in compressed type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes red light filtered image in raw type + self.pub_image_red_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskYellowTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_yellow_l + Hue_h = self.hue_yellow_h + Saturation_l = self.saturation_yellow_l + Saturation_h = self.saturation_yellow_h + Lightness_l = self.lightness_yellow_l + Lightness_h = self.lightness_yellow_h + + # define range of yellow color in HSV + lower_yellow = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_yellow = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only yellow colors + mask = cv2.inRange(hsv, lower_yellow, upper_yellow) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes yellow light filtered image in compressed type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes yellow light filtered image in raw type + self.pub_image_yellow_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnMaskGreenTrafficLight(self): + image = np.copy(self.cv_image) + + # Convert BGR to HSV + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + Hue_l = self.hue_green_l + Hue_h = self.hue_green_h + Saturation_l = self.saturation_green_l + Saturation_h = self.saturation_green_h + Lightness_l = self.lightness_green_l + Lightness_h = self.lightness_green_h + + # define range of green color in HSV + lower_green = np.array([Hue_l, Saturation_l, Lightness_l]) + upper_green = np.array([Hue_h, Saturation_h, Lightness_h]) + + # Threshold the HSV image to get only green colors + mask = cv2.inRange(hsv, lower_green, upper_green) + + # Bitwise-AND mask and original image + res = cv2.bitwise_and(image, image, mask = mask) + + if self.is_calibration_mode == True: + if self.pub_image_type == "compressed": + # publishes green light filtered image in compressed type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_compressed_imgmsg(mask, "jpg")) + + elif self.pub_image_type == "raw": + # publishes green light filtered image in raw type + self.pub_image_green_light.publish(self.cvBridge.cv2_to_imgmsg(mask, "mono8")) + + mask = cv2.bitwise_not(mask) + + return mask + + def fnFindCircleOfTrafficLight(self, mask, find_color): + status = 0 + + params=cv2.SimpleBlobDetector_Params() + # Change thresholds + params.minThreshold = 0 + params.maxThreshold = 255 + + # Filter by Area. + params.filterByArea = True + params.minArea = 50 + params.maxArea = 600 + + # Filter by Circularity + params.filterByCircularity = True + params.minCircularity = 0.4 + + # Filter by Convexity + params.filterByConvexity = True + params.minConvexity = 0.6 + + det=cv2.SimpleBlobDetector_create(params) + keypts=det.detect(mask) + frame=cv2.drawKeypoints(self.cv_image,keypts,np.array([]),(0,255,255),cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) + + col1 = 180 + col2 = 270 + col3 = 305 + + low1 = 50 + low2 = 170 + low3 = 170 + + # if detected more than 1 light + for i in range(len(keypts)): + self.point_col = int(keypts[i].pt[0]) + self.point_low = int(keypts[i].pt[1]) + if self.point_col > col1 and self.point_col < col2 and self.point_low > low1 and self.point_low < low2: + if find_color == 'green': + status = 1 + elif find_color == 'yellow': + status = 2 + elif find_color == 'red': + status = 3 + elif self.point_col > col2 and self.point_col < col3 and self.point_low > low1 and self.point_low < low3: + if find_color == 'red': + status = 4 + elif find_color == 'green': + status = 5 + else: + status = 6 + + return status + + def cbTrafficLightFinished(self, traffic_light_finished_msg): + self.is_traffic_light_finished = True + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_traffic_light') + node = DetectTrafficLight() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel new file mode 100755 index 0000000..5b6b2ca --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Ryu Woon Jung (Leon), Gilbert, Ashe Kim + +import rospy +import math +import tf +from enum import Enum +from std_msgs.msg import UInt8 +from geometry_msgs.msg import Twist, PoseStamped, PoseWithCovarianceStamped +from nav_msgs.msg import Odometry +from move_base_msgs.msg import MoveBaseActionResult +from turtlebot3_autorace_msgs.msg import MovingParam + +class DetectTunnel(): + def __init__(self): + # subscribes state + self.sub_tunnel_order = rospy.Subscriber('/detect/tunnel_order', UInt8, self.cbTunnelOrder, queue_size=1) + self.sub_moving_completed = rospy.Subscriber('/control/moving/complete', UInt8, self.cbMovingComplete, queue_size = 1) + self.sub_arrival_status = rospy.Subscriber("/move_base/result", MoveBaseActionResult, self.cbGetNavigationResult, queue_size=1) + self.sub_odom = rospy.Subscriber('/odom', Odometry, self.cbOdom, queue_size=1) + + # publishes state + self.pub_tunnel_return = rospy.Publisher('/detect/tunnel_stamped', UInt8, queue_size=1) + self.pub_moving = rospy.Publisher('/control/moving/state', MovingParam, queue_size= 1) + self.pub_goal_pose_stamped = rospy.Publisher("/move_base_simple/goal", PoseStamped, queue_size=1) + + self.StepOfTunnel = Enum('StepOfTunnel', 'searching_tunnel_sign go_in_to_tunnel navigation go_out_from_tunnel exit') + self.is_navigation_finished = False + self.is_tunnel_finished = False + self.is_moving_complete = False + self.last_current_theta = 0.0 + + def cbGetNavigationResult(self, msg_nav_result): + if msg_nav_result.status.status == 3: + rospy.loginfo("Reached") + self.is_navigation_finished = True + + def cbMovingComplete(self, data): + self.is_moving_complete = True + + def cbTunnelOrder(self, order): + pub_tunnel_return = UInt8() + + if order.data == self.StepOfTunnel.searching_tunnel_sign.value: + rospy.loginfo("Now lane_following") + pub_tunnel_return.data = self.StepOfTunnel.searching_tunnel_sign.value + + + elif order.data == self.StepOfTunnel.go_in_to_tunnel.value: + rospy.loginfo("Now go_in_to_tunnel") + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.2 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go left") + msg_moving = MovingParam() + msg_moving.moving_type=2 + msg_moving.moving_value_angular=45 + msg_moving.moving_value_linear=0.0 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.15 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + rospy.loginfo("go_in_to_tunnel finished") + + pub_tunnel_return.data = self.StepOfTunnel.go_in_to_tunnel.value + + elif order.data == self.StepOfTunnel.navigation.value: + rospy.loginfo("Now navigation") + initialPose = PoseWithCovarianceStamped() + initialPose.header.frame_id = "map" + initialPose.header.stamp = rospy.Time.now() + initialPose.pose.pose = self.odom_msg.pose.pose + + self.fnPubGoalPose() + + while True: + if self.is_navigation_finished == True: + break + else: + pass + + pub_tunnel_return.data = self.StepOfTunnel.navigation.value + rospy.loginfo("navigation!") + + + elif order.data == self.StepOfTunnel.go_out_from_tunnel.value: + rospy.loginfo("Now go_out_from_tunnel") + + rospy.loginfo("go straight") + msg_moving = MovingParam() + msg_moving.moving_type= 4 + msg_moving.moving_value_angular=0.0 + msg_moving.moving_value_linear=0.45 + self.pub_moving.publish(msg_moving) + while True: + if self.is_moving_complete == True: + break + self.is_moving_complete = False + + rospy.sleep(1) + + pub_tunnel_return.data = self.StepOfTunnel.go_out_from_tunnel.value + + elif order.data == self.StepOfTunnel.exit.value: + rospy.loginfo("Now exit") + + pub_tunnel_return.data = self.StepOfTunnel.exit.value + + self.pub_tunnel_return.publish(pub_tunnel_return) + + def cbOdom(self, odom_msg): + quaternion = (odom_msg.pose.pose.orientation.x, odom_msg.pose.pose.orientation.y, odom_msg.pose.pose.orientation.z, odom_msg.pose.pose.orientation.w) + self.current_theta = self.euler_from_quaternion(quaternion) + self.odom_msg = odom_msg + if (self.current_theta - self.last_current_theta) < -math.pi: + self.current_theta = 2. * math.pi + self.current_theta + self.last_current_theta = math.pi + elif (self.current_theta - self.last_current_theta) > math.pi: + self.current_theta = -2. * math.pi + self.current_theta + self.last_current_theta = -math.pi + else: + self.last_current_theta = self.current_theta + + self.current_pos_x = odom_msg.pose.pose.position.x + self.current_pos_y = odom_msg.pose.pose.position.y + + def euler_from_quaternion(self, quaternion): + theta = tf.transformations.euler_from_quaternion(quaternion)[2] + return theta + + def fnPubGoalPose(self): + goalPoseStamped = PoseStamped() + + goalPoseStamped.header.frame_id = "map" + goalPoseStamped.header.stamp = rospy.Time.now() + + goalPoseStamped.pose.position.x = -0.3 + goalPoseStamped.pose.position.y = -1.78 + goalPoseStamped.pose.position.z = 0.0 + + goalPoseStamped.pose.orientation.x = 0.0 + goalPoseStamped.pose.orientation.y = 0.0 + goalPoseStamped.pose.orientation.z = 0.0 + goalPoseStamped.pose.orientation.w = 1.0 + + self.pub_goal_pose_stamped.publish(goalPoseStamped) + + def cbTunnelFinished(self, tunnel_finished_msg): + self.is_tunnel_finished = True + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_tunnel') + node = DetectTunnel() + node.main() diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel_sign b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel_sign new file mode 100755 index 0000000..d55a6d4 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_detect/nodes/detect_tunnel_sign @@ -0,0 +1,184 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +################################################################################ +# Copyright 2018 ROBOTIS CO., LTD. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# Author: Leon Jung, Gilbert, Ashe Kim + +import rospy +import numpy as np +import os +import cv2 +from enum import Enum +from std_msgs.msg import UInt8 +from sensor_msgs.msg import Image, CompressedImage +from cv_bridge import CvBridge, CvBridgeError + +class DetectSign(): + def __init__(self): + self.fnPreproc() + + self.sub_image_type = "raw" # you can choose image type "compressed", "raw" + self.pub_image_type = "compressed" # you can choose image type "compressed", "raw" + + #subscribes + if self.sub_image_type == "compressed": + # subscribes compressed image + self.sub_image_original = rospy.Subscriber('/detect/image_input/compressed', CompressedImage, self.cbFindTrafficSign, queue_size = 1) + elif self.sub_image_type == "raw": + # subscribes raw image + self.sub_image_original = rospy.Subscriber('/detect/image_input', Image, self.cbFindTrafficSign, queue_size = 1) + + # publishes + self.pub_traffic_sign = rospy.Publisher('/detect/traffic_sign', UInt8, queue_size=1) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output/compressed', CompressedImage, queue_size = 1) + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign = rospy.Publisher('/detect/image_output', Image, queue_size = 1) + + self.cvBridge = CvBridge() + self.TrafficSign = Enum('TrafficSign', 'intersection left right construction stop parking tunnel') + self.counter = 1 + + def fnPreproc(self): + # Initiate SIFT detector + self.sift = cv2.SIFT_create() + + dir_path = os.path.dirname(os.path.realpath(__file__)) + dir_path = dir_path.replace('turtlebot3_autorace_detect/nodes', 'turtlebot3_autorace_detect/') + dir_path += 'image/' + + self.img_tunnel = cv2.imread(dir_path + 'tunnel.png',0) # trainImage3 + self.kp_tunnel, self.des_tunnel = self.sift.detectAndCompute(self.img_tunnel,None) + + FLANN_INDEX_KDTREE = 0 + index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5) + search_params = dict(checks = 50) + + self.flann = cv2.FlannBasedMatcher(index_params, search_params) + + def fnCalcMSE(self, arr1, arr2): + squared_diff = (arr1 - arr2) ** 2 + sum = np.sum(squared_diff) + num_all = arr1.shape[0] * arr1.shape[1] #cv_image_input and 2 should have same shape + err = sum / num_all + return err + + def cbFindTrafficSign(self, image_msg): + # drop the frame to 1/5 (6fps) because of the processing speed. This is up to your computer's operating power. + if self.counter % 3 != 0: + self.counter += 1 + return + else: + self.counter = 1 + + if self.sub_image_type == "compressed": + #converting compressed image to opencv image + np_arr = np.frombuffer(image_msg.data, np.uint8) + cv_image_input = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) + elif self.sub_image_type == "raw": + cv_image_input = self.cvBridge.imgmsg_to_cv2(image_msg, "bgr8") + + MIN_MATCH_COUNT = 5 + MIN_MSE_DECISION = 50000 + + # find the keypoints and descriptors with SIFT + kp1, des1 = self.sift.detectAndCompute(cv_image_input,None) + matches_tunnel = self.flann.knnMatch(des1,self.des_tunnel,k=2) + + image_out_num = 1 + + good_tunnel = [] + for m,n in matches_tunnel: + if m.distance < 0.7*n.distance: + good_tunnel.append(m) + if len(good_tunnel)>MIN_MATCH_COUNT: + src_pts = np.float32([ kp1[m.queryIdx].pt for m in good_tunnel ]).reshape(-1,1,2) + dst_pts = np.float32([ self.kp_tunnel[m.trainIdx].pt for m in good_tunnel ]).reshape(-1,1,2) + + M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0) + matchesMask_tunnel = mask.ravel().tolist() + + mse = self.fnCalcMSE(src_pts, dst_pts) + if mse < MIN_MSE_DECISION: + msg_sign = UInt8() + msg_sign.data = self.TrafficSign.tunnel.value + + self.pub_traffic_sign.publish(msg_sign) + + rospy.loginfo("tunnel") + image_out_num = 4 + + else: + matchesMask_tunnel = None + rospy.loginfo("nothing") + + + if image_out_num == 1: + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(cv_image_input, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(cv_image_input, "bgr8")) + + elif image_out_num == 4: + draw_params_tunnel = dict(matchColor = (255,0,0), # draw matches in green color + singlePointColor = None, + matchesMask = matchesMask_tunnel, # draw only inliers + flags = 2) + + final_tunnel = cv2.drawMatches(cv_image_input,kp1,self.img_tunnel,self.kp_tunnel,good_tunnel,None,**draw_params_tunnel) + + if self.pub_image_type == "compressed": + # publishes traffic sign image in compressed type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_compressed_imgmsg(final_tunnel, "jpg")) + + elif self.pub_image_type == "raw": + # publishes traffic sign image in raw type + self.pub_image_traffic_sign.publish(self.cvBridge.cv2_to_imgmsg(final_tunnel, "bgr8")) + + def main(self): + rospy.spin() + +if __name__ == '__main__': + rospy.init_node('detect_sign') + node = DetectSign() + node.main() + + + + + + + + + + + + + + + + + + + diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_driving.taz b/src/turtlebot3_autorace_2020/turtlebot3_autorace_driving.taz new file mode 100644 index 0000000..940de92 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_driving.taz Binary files differ diff --git a/src/turtlebot3_autorace_2020/turtlebot3_autorace_msgs.taz b/src/turtlebot3_autorace_2020/turtlebot3_autorace_msgs.taz new file mode 100644 index 0000000..d840256 --- /dev/null +++ b/src/turtlebot3_autorace_2020/turtlebot3_autorace_msgs.taz Binary files differ diff --git a/src/turtlebot3_autorace_2020_new.taz b/src/turtlebot3_autorace_2020_new.taz new file mode 100644 index 0000000..acb0a83 --- /dev/null +++ b/src/turtlebot3_autorace_2020_new.taz Binary files differ diff --git a/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_autorace_2020.launch b/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_autorace_2020.launch new file mode 100644 index 0000000..a7820db --- /dev/null +++ b/src/turtlebot3_simulations/turtlebot3_gazebo/launch/turtlebot3_autorace_2020.launch @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/startup.sh b/startup.sh new file mode 100755 index 0000000..ea2f119 --- /dev/null +++ b/startup.sh @@ -0,0 +1,71 @@ +#!/bin/bash +/usr/sbin/sshd -D & +if [ -n "$VNC_PASSWORD" ]; then + echo -n "$VNC_PASSWORD" > /.password1 + x11vnc -storepasswd $(cat /.password1) /.password2 + chmod 400 /.password* + sed -i 's/^command=x11vnc.*/& -rfbauth \/.password2/' /etc/supervisor/conf.d/supervisord.conf + export VNC_PASSWORD= +fi + +if [ -n "$X11VNC_ARGS" ]; then + sed -i "s/^command=x11vnc.*/& ${X11VNC_ARGS}/" /etc/supervisor/conf.d/supervisord.conf +fi + +if [ -n "$OPENBOX_ARGS" ]; then + sed -i "s#^command=/usr/bin/openbox.*#& ${OPENBOX_ARGS}#" /etc/supervisor/conf.d/supervisord.conf +fi + +if [ -n "$RESOLUTION" ]; then + sed -i "s/1024x768/$RESOLUTION/" /usr/local/bin/xvfb.sh +fi + +USER=${USER:-root} +HOME=/root +if [ "$USER" != "root" ]; then + echo "* enable custom user: $USER" + useradd --create-home --shell /bin/bash --user-group --groups adm,sudo $USER + if [ -z "$PASSWORD" ]; then + echo " set default password to \"ubuntu\"" + PASSWORD=ubuntu + fi + HOME=/home/$USER + echo "$USER:$PASSWORD" | chpasswd + cp -r /root/{.gtkrc-2.0,.asoundrc} ${HOME} + [ -d "/dev/snd" ] && chgrp -R adm /dev/snd +fi +sed -i -e "s|%USER%|$USER|" -e "s|%HOME%|$HOME|" /etc/supervisor/conf.d/supervisord.conf + +# home folder +mkdir -p $HOME/.config/pcmanfm/LXDE/ +ln -sf /usr/local/share/doro-lxde-wallpapers/desktop-items-0.conf $HOME/.config/pcmanfm/LXDE/ +chown -R $USER:$USER $HOME + +# nginx workers +sed -i 's|worker_processes .*|worker_processes 1;|' /etc/nginx/nginx.conf + +# nginx ssl +if [ -n "$SSL_PORT" ] && [ -e "/etc/nginx/ssl/nginx.key" ]; then + echo "* enable SSL" + sed -i 's|#_SSL_PORT_#\(.*\)443\(.*\)|\1'$SSL_PORT'\2|' /etc/nginx/sites-enabled/default + sed -i 's|#_SSL_PORT_#||' /etc/nginx/sites-enabled/default +fi + +# nginx http base authentication +if [ -n "$HTTP_PASSWORD" ]; then + echo "* enable HTTP base authentication" + htpasswd -bc /etc/nginx/.htpasswd $USER $HTTP_PASSWORD + sed -i 's|#_HTTP_PASSWORD_#||' /etc/nginx/sites-enabled/default +fi + +# dynamic prefix path renaming +if [ -n "$RELATIVE_URL_ROOT" ]; then + echo "* enable RELATIVE_URL_ROOT: $RELATIVE_URL_ROOT" + sed -i 's|#_RELATIVE_URL_ROOT_||' /etc/nginx/sites-enabled/default + sed -i 's|_RELATIVE_URL_ROOT_|'$RELATIVE_URL_ROOT'|' /etc/nginx/sites-enabled/default +fi + +# clearup +PASSWORD= +HTTP_PASSWORD= +exec /bin/tini -- /usr/bin/supervisord -n -c /etc/supervisor/supervisord.conf