diff --git a/.github/workflows/build-image.yaml b/.github/workflows/build-image.yaml index a44f2f82a..34b7022d7 100644 --- a/.github/workflows/build-image.yaml +++ b/.github/workflows/build-image.yaml @@ -7,6 +7,7 @@ on: branches: [ master ] release: types: [ created ] + workflow_dispatch: jobs: build: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 92d9f9277..946004b95 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,6 +5,7 @@ on: branches: [ '*' ] pull_request: branches: [ master ] + workflow_dispatch: jobs: # melodic: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a9dc26a82..ef99d42a1 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -5,16 +5,13 @@ on: branches: [ '*' ] pull_request: branches: [ '*' ] + workflow_dispatch: permissions: contents: read pages: write id-token: write -concurrency: - group: "pages" - cancel-in-progress: true - defaults: run: shell: bash @@ -75,6 +72,9 @@ jobs: deploy-docs: if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }} + concurrency: + group: "pages" + cancel-in-progress: true environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} @@ -82,5 +82,8 @@ jobs: needs: docs steps: - name: Deploy to GitHub Pages + env: + FREEZE_DOCS: ${{ secrets.FREEZE_DOCS }} + if: ${{ !env.FREEZE_DOCS }} id: deployment uses: actions/deploy-pages@v1 diff --git a/.github/workflows/editorconfig.yaml b/.github/workflows/editorconfig.yaml index 2e4a1a48e..8e6fd491c 100644 --- a/.github/workflows/editorconfig.yaml +++ b/.github/workflows/editorconfig.yaml @@ -5,6 +5,7 @@ on: branches: [ '*' ] pull_request: branches: [ master ] + workflow_dispatch: jobs: editorconfig: diff --git a/README.md b/README.md index 0f9c156ae..2e695db1d 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Clover drone is used on a wide range of educational events, including [Copter Ha Preconfigured image for Raspberry Pi with installed and configured software, ready to fly, is available [in the Releases section](https://github.com/CopterExpress/clover/releases). -![GitHub Workflow Status](https://img.shields.io/github/workflow/status/CopterExpress/clover/CI) +![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/CopterExpress/clover/build-image.yaml?branch=master) ![GitHub all releases](https://img.shields.io/github/downloads/CopterExpress/clover/total) Image features: diff --git a/aruco_pose/README.md b/aruco_pose/README.md index d5c88b1f8..1e8990681 100644 --- a/aruco_pose/README.md +++ b/aruco_pose/README.md @@ -43,7 +43,8 @@ It's recommended to run it within the same nodelet manager with the camera nodel * `~frame_id_prefix` (*string*) – prefix for TF transforms names, marker's ID is appended (default: `aruco_`) * `~length` (*double*) – markers' sides length * `~length_override` (*map*) – lengths of markers with specified ids -* `~known_tilt` (*string*) – known tilt (pitch and roll) of all the markers as a frame +* `~known_vertical` (*string*) – known vertical (Z axis) of all the markers as a frame +* `~flip_vertical` – flip vertical vector ### Topics @@ -71,7 +72,8 @@ It's recommended to run it within the same nodelet manager with the camera nodel * `~map` – path to text file with markers list * `~frame_id` – published frame id (default: `aruco_map`) -* `~known_tilt` – known tilt (pitch and roll) of markers map as a frame +* `~known_vertical` – known vertical (Z axis) of markers map as a frame +* `~flip_vertical` – flip vertical vector * `~image_width` – debug image width (default: 2000) * `~image_height` – debug image height (default: 2000) * `~image_margin` – debug image margin (default: 200) diff --git a/aruco_pose/cfg/Detector.cfg b/aruco_pose/cfg/Detector.cfg index 581bc2622..98a29dce6 100755 --- a/aruco_pose/cfg/Detector.cfg +++ b/aruco_pose/cfg/Detector.cfg @@ -4,7 +4,10 @@ PACKAGE = "aruco_pose" from dynamic_reconfigure.parameter_generator_catkin import * import cv2.aruco -p = cv2.aruco.DetectorParameters_create() +try: + p = cv2.aruco.DetectorParameters_create() +except AttributeError: + p = cv2.aruco.DetectorParameters() gen = ParameterGenerator() diff --git a/aruco_pose/package.xml b/aruco_pose/package.xml index 039777534..8ee135c51 100644 --- a/aruco_pose/package.xml +++ b/aruco_pose/package.xml @@ -1,7 +1,7 @@ - + aruco_pose - 0.23.0 + 0.24.0 Positioning with ArUco markers Oleg Kalachev @@ -28,6 +28,8 @@ sensor_msgs rostest dynamic_reconfigure + python-docopt + python3-docopt image_publisher ros_pytest diff --git a/aruco_pose/src/aruco_detect.cpp b/aruco_pose/src/aruco_detect.cpp index fe1ad5be2..07199419c 100644 --- a/aruco_pose/src/aruco_detect.cpp +++ b/aruco_pose/src/aruco_detect.cpp @@ -50,6 +50,7 @@ #include #include +#include "draw.h" #include "utils.h" #include #include @@ -71,11 +72,12 @@ class ArucoDetect : public nodelet::Nodelet { ros::Publisher markers_pub_, vis_markers_pub_; ros::Subscriber map_markers_sub_; ros::ServiceServer set_markers_srv_; - bool estimate_poses_, send_tf_, auto_flip_; + bool estimate_poses_, send_tf_, flip_vertical_, auto_flip_, use_map_markers_; + bool waiting_for_map_; double length_; ros::Duration transform_timeout_; std::unordered_map length_override_; - std::string frame_id_prefix_, known_tilt_; + std::string frame_id_prefix_, known_vertical_; Mat camera_matrix_, dist_coeffs_; aruco_pose::MarkerArray array_; std::unordered_set map_markers_ids_; @@ -95,6 +97,8 @@ class ArucoDetect : public nodelet::Nodelet { dictionary = nh_priv_.param("dictionary", 2); estimate_poses_ = nh_priv_.param("estimate_poses", true); send_tf_ = nh_priv_.param("send_tf", true); + use_map_markers_ = nh_priv_.param("use_map_markers", false); + waiting_for_map_ = use_map_markers_; if (estimate_poses_ && !nh_priv_.getParam("length", length_)) { NODELET_FATAL("can't estimate marker's poses as ~length parameter is not defined"); ros::shutdown(); @@ -102,7 +106,8 @@ class ArucoDetect : public nodelet::Nodelet { readLengthOverride(nh_priv_); transform_timeout_ = ros::Duration(nh_priv_.param("transform_timeout", 0.02)); - known_tilt_ = nh_priv_.param("known_tilt", ""); + known_vertical_ = nh_priv_.param("known_vertical", nh_priv_.param("known_tilt", std::string(""))); // known_tilt is an old name + flip_vertical_ = nh_priv_.param("flip_vertical", false); auto_flip_ = nh_priv_.param("auto_flip", false); frame_id_prefix_ = nh_priv_.param("frame_id_prefix", "aruco_"); @@ -133,14 +138,15 @@ class ArucoDetect : public nodelet::Nodelet { void imageCallback(const sensor_msgs::ImageConstPtr& msg, const sensor_msgs::CameraInfoConstPtr &cinfo) { if (!enabled_) return; + if (waiting_for_map_) return; - Mat image = cv_bridge::toCvShare(msg, "bgr8")->image; + Mat image = cv_bridge::toCvShare(msg)->image; vector ids; vector> corners, rejected; vector rvecs, tvecs; vector obj_points; - geometry_msgs::TransformStamped snap_to; + geometry_msgs::TransformStamped vertical; // Detect markers cv::aruco::detectMarkers(image, dictionary_, corners, ids, parameters_, rejected); @@ -175,12 +181,12 @@ class ArucoDetect : public nodelet::Nodelet { } } - if (!known_tilt_.empty()) { + if (!known_vertical_.empty()) { try { - snap_to = tf_buffer_->lookupTransform(msg->header.frame_id, known_tilt_, - msg->header.stamp, transform_timeout_); + vertical = tf_buffer_->lookupTransform(msg->header.frame_id, known_vertical_, + msg->header.stamp, transform_timeout_); } catch (const tf2::TransformException& e) { - NODELET_WARN_THROTTLE(5, "can't snap: %s", e.what()); + NODELET_WARN_THROTTLE(5, "can't retrieve known vertical: %s", e.what()); } } } @@ -201,9 +207,9 @@ class ArucoDetect : public nodelet::Nodelet { if (estimate_poses_) { fillPose(marker.pose, rvecs[i], tvecs[i]); - // snap orientation (if enabled and snap frame available) - if (!known_tilt_.empty() && !snap_to.header.frame_id.empty()) { - snapOrientation(marker.pose.orientation, snap_to.transform.rotation, auto_flip_); + // apply known vertical (if enabled and vertical frame available) + if (!known_vertical_.empty() && !vertical.header.frame_id.empty()) { + applyVertical(marker.pose.orientation, vertical.transform.rotation, false, auto_flip_); } if (send_tf_) { @@ -259,8 +265,7 @@ class ArucoDetect : public nodelet::Nodelet { cv::aruco::drawDetectedMarkers(debug, corners, ids); // draw markers if (estimate_poses_) for (unsigned int i = 0; i < ids.size(); i++) - cv::aruco::drawAxis(debug, camera_matrix_, dist_coeffs_, - rvecs[i], tvecs[i], getMarkerLength(ids[i])); + _drawAxis(debug, camera_matrix_, dist_coeffs_, rvecs[i], tvecs[i], getMarkerLength(ids[i])); cv_bridge::CvImage out_msg; out_msg.header.frame_id = msg->header.frame_id; @@ -395,7 +400,13 @@ class ArucoDetect : public nodelet::Nodelet { map_markers_ids_.clear(); for (auto const& marker : msg.markers) { map_markers_ids_.insert(marker.id); + if (use_map_markers_) { + if (length_override_.find(marker.id) == length_override_.end()) { + length_override_[marker.id] = marker.length; + } + } } + waiting_for_map_ = false; } void paramCallback(aruco_pose::DetectorConfig &config, uint32_t level) diff --git a/aruco_pose/src/aruco_map.cpp b/aruco_pose/src/aruco_map.cpp index 41250be8a..e4292c0e9 100644 --- a/aruco_pose/src/aruco_map.cpp +++ b/aruco_pose/src/aruco_map.cpp @@ -81,9 +81,9 @@ class ArucoMap : public nodelet::Nodelet { bool enabled_ = true; std::string type_; visualization_msgs::MarkerArray vis_array_; - std::string known_tilt_, map_, markers_frame_, markers_parent_frame_; + std::string known_vertical_, map_, markers_frame_, markers_parent_frame_; int image_width_, image_height_, image_margin_; - bool auto_flip_, image_axis_; + bool flip_vertical_, auto_flip_, image_axis_, put_markers_count_to_covariance_; public: virtual void onInit() @@ -104,12 +104,14 @@ class ArucoMap : public nodelet::Nodelet { type_ = nh_priv_.param("type", "map"); transform_.child_frame_id = nh_priv_.param("frame_id", "aruco_map"); - known_tilt_ = nh_priv_.param("known_tilt", ""); + known_vertical_ = nh_priv_.param("known_vertical", nh_priv_.param("known_tilt", std::string(""))); // known_tilt is an old name + flip_vertical_ = nh_priv_.param("flip_vertical", false); auto_flip_ = nh_priv_.param("auto_flip", false); image_width_ = nh_priv_.param("image_width" , 2000); image_height_ = nh_priv_.param("image_height", 2000); image_margin_ = nh_priv_.param("image_margin", 200); image_axis_ = nh_priv_.param("image_axis", true); + put_markers_count_to_covariance_ = nh_priv_.param("put_markers_count_to_covariance", false); markers_parent_frame_ = nh_priv_.param("markers/frame_id", transform_.child_frame_id); markers_frame_ = nh_priv_.param("markers/child_frame_id_prefix", ""); @@ -177,7 +179,21 @@ class ArucoMap : public nodelet::Nodelet { corners.push_back(marker_corners); } - if (known_tilt_.empty()) { + if (put_markers_count_to_covariance_) { + // HACK: pass markers count using covariance field + int valid_markers = 0; + for (auto const &marker : markers->markers) { + for (auto const &board_marker : board_->ids) { + if (board_marker == marker.id) { + valid_markers++; + break; + } + } + } + pose_.pose.covariance[0] = valid_markers; + } + + if (known_vertical_.empty()) { // simple estimation valid = cv::aruco::estimatePoseBoard(corners, ids, board_, camera_matrix_, dist_coeffs_, rvec, tvec, false); @@ -191,7 +207,7 @@ class ArucoMap : public nodelet::Nodelet { } else { Mat obj_points, img_points; - // estimation with "snapping" + // estimation with known vertical cv::aruco::getBoardObjectAndImagePoints(board_, corners, ids, obj_points, img_points); if (obj_points.empty()) goto publish_debug; @@ -203,11 +219,11 @@ class ArucoMap : public nodelet::Nodelet { fillTransform(transform_.transform, rvec, tvec); try { - geometry_msgs::TransformStamped snap_to = tf_buffer_.lookupTransform(markers->header.frame_id, - known_tilt_, markers->header.stamp, ros::Duration(0.02)); - snapOrientation(transform_.transform.rotation, snap_to.transform.rotation, auto_flip_); + geometry_msgs::TransformStamped vertical = tf_buffer_.lookupTransform(markers->header.frame_id, + known_vertical_, markers->header.stamp, ros::Duration(0.02)); + applyVertical(transform_.transform.rotation, vertical.transform.rotation, flip_vertical_, auto_flip_); } catch (const tf2::TransformException& e) { - NODELET_WARN_THROTTLE(1, "can't snap: %s", e.what()); + NODELET_WARN_THROTTLE(1, "can't retrieve known vertical: %s", e.what()); } geometry_msgs::TransformStamped shift; @@ -503,7 +519,7 @@ class ArucoMap : public nodelet::Nodelet { vis_marker.pose.position.x = x; vis_marker.pose.position.y = y; vis_marker.pose.position.z = z; - tf::quaternionTFToMsg(q, marker.pose.orientation); + tf::quaternionTFToMsg(q, vis_marker.pose.orientation); vis_marker.frame_locked = true; vis_array_.markers.push_back(vis_marker); diff --git a/aruco_pose/src/utils.h b/aruco_pose/src/utils.h index b7ae04321..cef041bfd 100644 --- a/aruco_pose/src/utils.h +++ b/aruco_pose/src/utils.h @@ -106,26 +106,25 @@ inline bool isFlipped(tf::Quaternion& q) return (abs(pitch) > M_PI / 2) || (abs(roll) > M_PI / 2); } -/* Set roll and pitch from "from" to "to", keeping yaw */ -inline void snapOrientation(geometry_msgs::Quaternion& to, const geometry_msgs::Quaternion& from, bool auto_flip = false) +/* Apply a vertical to an orientation */ +inline void applyVertical(geometry_msgs::Quaternion& orientation, const geometry_msgs::Quaternion& vertical, + bool flip_vertical = false, bool auto_flip = false) // editorconfig-checker-disable-line { - tf::Quaternion _from, _to; - tf::quaternionMsgToTF(from, _from); - tf::quaternionMsgToTF(to, _to); - - if (auto_flip) { - if (!isFlipped(_from)) { - static const tf::Quaternion flip = tf::createQuaternionFromRPY(M_PI, 0, 0); - _from *= flip; // flip "from" - } + tf::Quaternion _vertical, _orientation; + tf::quaternionMsgToTF(vertical, _vertical); + tf::quaternionMsgToTF(orientation, _orientation); + + if (flip_vertical || (auto_flip && !isFlipped(_orientation))) { + static const tf::Quaternion flip = tf::createQuaternionFromRPY(M_PI, 0, 0); + _vertical *= flip; // flip vertical } - auto diff = tf::Matrix3x3(_to).transposeTimes(tf::Matrix3x3(_from)); + auto diff = tf::Matrix3x3(_orientation).transposeTimes(tf::Matrix3x3(_vertical)); double _, yaw; diff.getRPY(_, _, yaw); auto q = tf::createQuaternionFromRPY(0, 0, -yaw); - _from = _from * q; // set yaw from "to" to "from" - tf::quaternionTFToMsg(_from, to); // set "from" to "to" + _vertical = _vertical * q; // set yaw from orientation to vertical + tf::quaternionTFToMsg(_vertical, orientation); // set vertical to orientation } inline void transformToPose(const geometry_msgs::Transform& transform, geometry_msgs::Pose& pose) diff --git a/aruco_pose/test/basic.py b/aruco_pose/test/basic.py index 94203ef9d..171f70902 100755 --- a/aruco_pose/test/basic.py +++ b/aruco_pose/test/basic.py @@ -6,7 +6,7 @@ from geometry_msgs.msg import PoseWithCovarianceStamped from sensor_msgs.msg import Image from aruco_pose.msg import MarkerArray -from visualization_msgs.msg import MarkerArray as VisMarkerArray +from visualization_msgs.msg import MarkerArray as VisMarkerArray, Marker as VisMarker @pytest.fixture @@ -199,6 +199,36 @@ def test_map_markers(node): def test_map_visualization(node): vis = rospy.wait_for_message('aruco_map/visualization', VisMarkerArray, timeout=5) + assert len(vis.markers) == 7 + assert vis.markers[0].header.frame_id == 'aruco_map' + assert vis.markers[0].type == VisMarker.CUBE + assert vis.markers[0].action == VisMarker.ADD + assert vis.markers[0].pose.position.x == 0 + assert vis.markers[0].pose.position.y == 0 + assert vis.markers[0].pose.position.z == 0 + assert vis.markers[0].pose.orientation.x == 0 + assert vis.markers[0].pose.orientation.y == 0 + assert vis.markers[0].pose.orientation.z == 0 + assert vis.markers[0].pose.orientation.w == 1 + assert vis.markers[0].scale.x == approx(0.33) + assert vis.markers[0].scale.y == approx(0.33) + assert vis.markers[0].scale.z == approx(0.001) + assert vis.markers[1].pose.position.x == 1 + assert vis.markers[1].pose.position.y == 0 + assert vis.markers[1].pose.position.z == 0 + assert vis.markers[1].pose.orientation.x == 0 + assert vis.markers[1].pose.orientation.y == 0 + assert vis.markers[1].pose.orientation.z == 0 + assert vis.markers[1].pose.orientation.w == 1 + # non-zero yaw marker: + assert vis.markers[4].scale.x == approx(0.5) + assert vis.markers[4].pose.position.x == approx(0.5) + assert vis.markers[4].pose.position.y == 2 + assert vis.markers[4].pose.position.z == 0 + assert vis.markers[4].pose.orientation.x == 0 + assert vis.markers[4].pose.orientation.y == 0 + assert vis.markers[4].pose.orientation.z == approx(0.5646424733950354) + assert vis.markers[4].pose.orientation.w == approx(0.8253356149096783) def test_map_debug(node): img = rospy.wait_for_message('aruco_map/debug', Image, timeout=5) diff --git a/builder/assets/noetic-rosdep-clover.yaml b/builder/assets/noetic-rosdep-clover.yaml index a595b87e4..93aa3508b 100644 --- a/builder/assets/noetic-rosdep-clover.yaml +++ b/builder/assets/noetic-rosdep-clover.yaml @@ -16,3 +16,723 @@ web_video_server: ws281x: debian: buster: [ros-noetic-ws281x] +catkin: + debian: + buster: [ros-noetic-catkin] +genmsg: + debian: + buster: [ros-noetic-genmsg] +gencpp: + debian: + buster: [ros-noetic-gencpp] +geneus: + debian: + buster: [ros-noetic-geneus] +genlisp: + debian: + buster: [ros-noetic-genlisp] +gennodejs: + debian: + buster: [ros-noetic-gennodejs] +genpy: + debian: + buster: [ros-noetic-genpy] +bond_core: + debian: + buster: [ros-noetic-bond-core] +cmake_modules: + debian: + buster: [ros-noetic-cmake-modules] +class_loader: + debian: + buster: [ros-noetic-class-loader] +common_msgs: + debian: + buster: [ros-noetic-common-msgs] +common_tutorials: + debian: + buster: [ros-noetic-common-tutorials] +cpp_common: + debian: + buster: [ros-noetic-cpp-common] +desktop: + debian: + buster: [ros-noetic-desktop] +diagnostics: + debian: + buster: [ros-noetic-diagnostics] +executive_smach: + debian: + buster: [ros-noetic-executive-smach] +geometry: + debian: + buster: [ros-noetic-geometry] +geometry_tutorials: + debian: + buster: [ros-noetic-geometry-tutorials] +gl_dependency: + debian: + buster: [ros-noetic-gl-dependency] +image_common: + debian: + buster: [ros-noetic-image-common] +image_pipeline: + debian: + buster: [ros-noetic-image-pipeline] +image_transport_plugins: + debian: + buster: [ros-noetic-image-transport-plugins] +laser_pipeline: + debian: + buster: [ros-noetic-laser-pipeline] +mavlink: + debian: + buster: [ros-noetic-mavlink] +media_export: + debian: + buster: [ros-noetic-media-export] +message_generation: + debian: + buster: [ros-noetic-message-generation] +message_runtime: + debian: + buster: [ros-noetic-message-runtime] +mk: + debian: + buster: [ros-noetic-mk] +nodelet_core: + debian: + buster: [ros-noetic-nodelet-core] +orocos_kdl: + debian: + buster: [ros-noetic-orocos-kdl] +perception: + debian: + buster: [ros-noetic-perception] +perception_pcl: + debian: + buster: [ros-noetic-perception-pcl] +python_orocos_kdl: + debian: + buster: [ros-noetic-python-orocos-kdl] +qt_dotgraph: + debian: + buster: [ros-noetic-qt-dotgraph] +qt_gui: + debian: + buster: [ros-noetic-qt-gui] +qt_gui_py_common: + debian: + buster: [ros-noetic-qt-gui-py-common] +qwt_dependency: + debian: + buster: [ros-noetic-qwt-dependency] +robot: + debian: + buster: [ros-noetic-robot] +ros: + debian: + buster: [ros-noetic-ros] +ros_base: + debian: + buster: [ros-noetic-ros-base] +ros_comm: + debian: + buster: [ros-noetic-ros-comm] +ros_core: + debian: + buster: [ros-noetic-ros-core] +ros_environment: + debian: + buster: [ros-noetic-ros-environment] +ros_tutorials: + debian: + buster: [ros-noetic-ros-tutorials] +rosapi: + debian: + buster: [ros-noetic-rosapi] +rosbag_migration_rule: + debian: + buster: [ros-noetic-rosbag-migration-rule] +rosbash: + debian: + buster: [ros-noetic-rosbash] +rosboost_cfg: + debian: + buster: [ros-noetic-rosboost-cfg] +rosbridge_server: + debian: + buster: [ros-noetic-rosbridge-server] +rosbridge_suite: + debian: + buster: [ros-noetic-rosbridge-suite] +rosbuild: + debian: + buster: [ros-noetic-rosbuild] +rosclean: + debian: + buster: [ros-noetic-rosclean] +roscpp_core: + debian: + buster: [ros-noetic-roscpp-core] +roscpp_traits: + debian: + buster: [ros-noetic-roscpp-traits] +roscreate: + debian: + buster: [ros-noetic-roscreate] +rosgraph: + debian: + buster: [ros-noetic-rosgraph] +roslang: + debian: + buster: [ros-noetic-roslang] +roslint: + debian: + buster: [ros-noetic-roslint] +roslisp: + debian: + buster: [ros-noetic-roslisp] +rosmake: + debian: + buster: [ros-noetic-rosmake] +rosmaster: + debian: + buster: [ros-noetic-rosmaster] +rospack: + debian: + buster: [ros-noetic-rospack] +roslib: + debian: + buster: [ros-noetic-roslib] +rosparam: + debian: + buster: [ros-noetic-rosparam] +rospy: + debian: + buster: [ros-noetic-rospy] +rosserial: + debian: + buster: [ros-noetic-rosserial] +rosserial_msgs: + debian: + buster: [ros-noetic-rosserial-msgs] +rosserial_python: + debian: + buster: [ros-noetic-rosserial-python] +rosservice: + debian: + buster: [ros-noetic-rosservice] +rostime: + debian: + buster: [ros-noetic-rostime] +roscpp_serialization: + debian: + buster: [ros-noetic-roscpp-serialization] +python_qt_binding: + debian: + buster: [ros-noetic-python-qt-binding] +roslaunch: + debian: + buster: [ros-noetic-roslaunch] +rosunit: + debian: + buster: [ros-noetic-rosunit] +angles: + debian: + buster: [ros-noetic-angles] +libmavconn: + debian: + buster: [ros-noetic-libmavconn] +rosconsole: + debian: + buster: [ros-noetic-rosconsole] +pluginlib: + debian: + buster: [ros-noetic-pluginlib] +qt_gui_cpp: + debian: + buster: [ros-noetic-qt-gui-cpp] +resource_retriever: + debian: + buster: [ros-noetic-resource-retriever] +rosconsole_bridge: + debian: + buster: [ros-noetic-rosconsole-bridge] +roslz4: + debian: + buster: [ros-noetic-roslz4] +rosserial_client: + debian: + buster: [ros-noetic-rosserial-client] +rostest: + debian: + buster: [ros-noetic-rostest] +rqt_action: + debian: + buster: [ros-noetic-rqt-action] +rqt_bag: + debian: + buster: [ros-noetic-rqt-bag] +rqt_bag_plugins: + debian: + buster: [ros-noetic-rqt-bag-plugins] +rqt_common_plugins: + debian: + buster: [ros-noetic-rqt-common-plugins] +rqt_console: + debian: + buster: [ros-noetic-rqt-console] +rqt_dep: + debian: + buster: [ros-noetic-rqt-dep] +rqt_graph: + debian: + buster: [ros-noetic-rqt-graph] +rqt_gui: + debian: + buster: [ros-noetic-rqt-gui] +rqt_logger_level: + debian: + buster: [ros-noetic-rqt-logger-level] +rqt_moveit: + debian: + buster: [ros-noetic-rqt-moveit] +rqt_msg: + debian: + buster: [ros-noetic-rqt-msg] +rqt_nav_view: + debian: + buster: [ros-noetic-rqt-nav-view] +rqt_plot: + debian: + buster: [ros-noetic-rqt-plot] +rqt_pose_view: + debian: + buster: [ros-noetic-rqt-pose-view] +rqt_publisher: + debian: + buster: [ros-noetic-rqt-publisher] +rqt_py_console: + debian: + buster: [ros-noetic-rqt-py-console] +rqt_reconfigure: + debian: + buster: [ros-noetic-rqt-reconfigure] +rqt_robot_dashboard: + debian: + buster: [ros-noetic-rqt-robot-dashboard] +rqt_robot_monitor: + debian: + buster: [ros-noetic-rqt-robot-monitor] +rqt_robot_plugins: + debian: + buster: [ros-noetic-rqt-robot-plugins] +rqt_robot_steering: + debian: + buster: [ros-noetic-rqt-robot-steering] +rqt_runtime_monitor: + debian: + buster: [ros-noetic-rqt-runtime-monitor] +rqt_service_caller: + debian: + buster: [ros-noetic-rqt-service-caller] +rqt_shell: + debian: + buster: [ros-noetic-rqt-shell] +rqt_srv: + debian: + buster: [ros-noetic-rqt-srv] +rqt_tf_tree: + debian: + buster: [ros-noetic-rqt-tf-tree] +rqt_top: + debian: + buster: [ros-noetic-rqt-top] +rqt_topic: + debian: + buster: [ros-noetic-rqt-topic] +rqt_web: + debian: + buster: [ros-noetic-rqt-web] +smach: + debian: + buster: [ros-noetic-smach] +smclib: + debian: + buster: [ros-noetic-smclib] +std_msgs: + debian: + buster: [ros-noetic-std-msgs] +actionlib_msgs: + debian: + buster: [ros-noetic-actionlib-msgs] +bond: + debian: + buster: [ros-noetic-bond] +diagnostic_msgs: + debian: + buster: [ros-noetic-diagnostic-msgs] +geometry_msgs: + debian: + buster: [ros-noetic-geometry-msgs] +eigen_conversions: + debian: + buster: [ros-noetic-eigen-conversions] +kdl_conversions: + debian: + buster: [ros-noetic-kdl-conversions] +nav_msgs: + debian: + buster: [ros-noetic-nav-msgs] +rosbridge_msgs: + debian: + buster: [ros-noetic-rosbridge-msgs] +rosgraph_msgs: + debian: + buster: [ros-noetic-rosgraph-msgs] +rosmsg: + debian: + buster: [ros-noetic-rosmsg] +rqt_py_common: + debian: + buster: [ros-noetic-rqt-py-common] +shape_msgs: + debian: + buster: [ros-noetic-shape-msgs] +smach_msgs: + debian: + buster: [ros-noetic-smach-msgs] +std_srvs: + debian: + buster: [ros-noetic-std-srvs] +tf2_msgs: + debian: + buster: [ros-noetic-tf2-msgs] +tf2: + debian: + buster: [ros-noetic-tf2] +tf2_eigen: + debian: + buster: [ros-noetic-tf2-eigen] +trajectory_msgs: + debian: + buster: [ros-noetic-trajectory-msgs] +control_msgs: + debian: + buster: [ros-noetic-control-msgs] +urdf_parser_plugin: + debian: + buster: [ros-noetic-urdf-parser-plugin] +urdfdom_py: + debian: + buster: [ros-noetic-urdfdom-py] +uuid_msgs: + debian: + buster: [ros-noetic-uuid-msgs] +geographic_msgs: + debian: + buster: [ros-noetic-geographic-msgs] +vision_opencv: + debian: + buster: [ros-noetic-vision-opencv] +visualization_msgs: + debian: + buster: [ros-noetic-visualization-msgs] +visualization_tutorials: + debian: + buster: [ros-noetic-visualization-tutorials] +viz: + debian: + buster: [ros-noetic-viz] +webkit_dependency: + debian: + buster: [ros-noetic-webkit-dependency] +xmlrpcpp: + debian: + buster: [ros-noetic-xmlrpcpp] +roscpp: + debian: + buster: [ros-noetic-roscpp] +bondcpp: + debian: + buster: [ros-noetic-bondcpp] +bondpy: + debian: + buster: [ros-noetic-bondpy] +nodelet: + debian: + buster: [ros-noetic-nodelet] +nodelet_tutorial_math: + debian: + buster: [ros-noetic-nodelet-tutorial-math] +pluginlib_tutorials: + debian: + buster: [ros-noetic-pluginlib-tutorials] +roscpp_tutorials: + debian: + buster: [ros-noetic-roscpp-tutorials] +rosout: + debian: + buster: [ros-noetic-rosout] +camera_calibration: + debian: + buster: [ros-noetic-camera-calibration] +diagnostic_aggregator: + debian: + buster: [ros-noetic-diagnostic-aggregator] +diagnostic_updater: + debian: + buster: [ros-noetic-diagnostic-updater] +diagnostic_common_diagnostics: + debian: + buster: [ros-noetic-diagnostic-common-diagnostics] +dynamic_reconfigure: + debian: + buster: [ros-noetic-dynamic-reconfigure] +filters: + debian: + buster: [ros-noetic-filters] +joint_state_publisher: + debian: + buster: [ros-noetic-joint-state-publisher] +message_filters: + debian: + buster: [ros-noetic-message-filters] +rosauth: + debian: + buster: [ros-noetic-rosauth] +rosbag_storage: + debian: + buster: [ros-noetic-rosbag-storage] +rosnode: + debian: + buster: [ros-noetic-rosnode] +rospy_tutorials: + debian: + buster: [ros-noetic-rospy-tutorials] +rosshow: + debian: + buster: [ros-noetic-rosshow] +rostopic: + debian: + buster: [ros-noetic-rostopic] +rqt_gui_cpp: + debian: + buster: [ros-noetic-rqt-gui-cpp] +rqt_gui_py: + debian: + buster: [ros-noetic-rqt-gui-py] +self_test: + debian: + buster: [ros-noetic-self-test] +smach_ros: + debian: + buster: [ros-noetic-smach-ros] +tf2_py: + debian: + buster: [ros-noetic-tf2-py] +topic_tools: + debian: + buster: [ros-noetic-topic-tools] +rosbag: + debian: + buster: [ros-noetic-rosbag] +actionlib: + debian: + buster: [ros-noetic-actionlib] +actionlib_tutorials: + debian: + buster: [ros-noetic-actionlib-tutorials] +diagnostic_analysis: + debian: + buster: [ros-noetic-diagnostic-analysis] +nodelet_topic_tools: + debian: + buster: [ros-noetic-nodelet-topic-tools] +roswtf: + debian: + buster: [ros-noetic-roswtf] +rqt_launch: + debian: + buster: [ros-noetic-rqt-launch] +sensor_msgs: + debian: + buster: [ros-noetic-sensor-msgs] +camera_calibration_parsers: + debian: + buster: [ros-noetic-camera-calibration-parsers] +cv_bridge: + debian: + buster: [ros-noetic-cv-bridge] +image_geometry: + debian: + buster: [ros-noetic-image-geometry] +image_transport: + debian: + buster: [ros-noetic-image-transport] +camera_info_manager: + debian: + buster: [ros-noetic-camera-info-manager] +compressed_depth_image_transport: + debian: + buster: [ros-noetic-compressed-depth-image-transport] +compressed_image_transport: + debian: + buster: [ros-noetic-compressed-image-transport] +cv_camera: + debian: + buster: [ros-noetic-cv-camera] +image_proc: + debian: + buster: [ros-noetic-image-proc] +image_publisher: + debian: + buster: [ros-noetic-image-publisher] +map_msgs: + debian: + buster: [ros-noetic-map-msgs] +mavros_msgs: + debian: + buster: [ros-noetic-mavros-msgs] +pcl_msgs: + debian: + buster: [ros-noetic-pcl-msgs] +pcl_conversions: + debian: + buster: [ros-noetic-pcl-conversions] +polled_camera: + debian: + buster: [ros-noetic-polled-camera] +rqt_image_view: + debian: + buster: [ros-noetic-rqt-image-view] +stereo_msgs: + debian: + buster: [ros-noetic-stereo-msgs] +image_view: + debian: + buster: [ros-noetic-image-view] +rosbridge_library: + debian: + buster: [ros-noetic-rosbridge-library] +stereo_image_proc: + debian: + buster: [ros-noetic-stereo-image-proc] +tf2_ros: + debian: + buster: [ros-noetic-tf2-ros] +depth_image_proc: + debian: + buster: [ros-noetic-depth-image-proc] +mavros: + debian: + buster: [ros-noetic-mavros] +tf: + debian: + buster: [ros-noetic-tf] +interactive_markers: + debian: + buster: [ros-noetic-interactive-markers] +interactive_marker_tutorials: + debian: + buster: [ros-noetic-interactive-marker-tutorials] +laser_geometry: + debian: + buster: [ros-noetic-laser-geometry] +laser_assembler: + debian: + buster: [ros-noetic-laser-assembler] +laser_filters: + debian: + buster: [ros-noetic-laser-filters] +pcl_ros: + debian: + buster: [ros-noetic-pcl-ros] +tf2_geometry_msgs: + debian: + buster: [ros-noetic-tf2-geometry-msgs] +image_rotate: + debian: + buster: [ros-noetic-image-rotate] +tf2_kdl: + debian: + buster: [ros-noetic-tf2-kdl] +tf2_web_republisher: + debian: + buster: [ros-noetic-tf2-web-republisher] +tf_conversions: + debian: + buster: [ros-noetic-tf-conversions] +theora_image_transport: + debian: + buster: [ros-noetic-theora-image-transport] +turtlesim: + debian: + buster: [ros-noetic-turtlesim] +turtle_actionlib: + debian: + buster: [ros-noetic-turtle-actionlib] +turtle_tf: + debian: + buster: [ros-noetic-turtle-tf] +turtle_tf2: + debian: + buster: [ros-noetic-turtle-tf2] +urdf: + debian: + buster: [ros-noetic-urdf] +kdl_parser: + debian: + buster: [ros-noetic-kdl-parser] +kdl_parser_py: + debian: + buster: [ros-noetic-kdl-parser-py] +mavros_extras: + debian: + buster: [ros-noetic-mavros-extras] +robot_state_publisher: + debian: + buster: [ros-noetic-robot-state-publisher] +rviz: + debian: + buster: [ros-noetic-rviz] +librviz_tutorial: + debian: + buster: [ros-noetic-librviz-tutorial] +rqt_rviz: + debian: + buster: [ros-noetic-rqt-rviz] +rviz_plugin_tutorials: + debian: + buster: [ros-noetic-rviz-plugin-tutorials] +rviz_python_tutorial: + debian: + buster: [ros-noetic-rviz-python-tutorial] +urdf_tutorial: + debian: + buster: [ros-noetic-urdf-tutorial] +usb_cam: + debian: + buster: [ros-noetic-usb-cam] +visualization_marker_tutorials: + debian: + buster: [ros-noetic-visualization-marker-tutorials] +vl53l1x: + debian: + buster: [ros-noetic-vl53l1x] +xacro: + debian: + buster: [ros-noetic-xacro] +ddynamic_reconfigure: + debian: + buster: [ros-noetic-ddynamic-reconfigure] +librealsense2: + debian: + buster: [ros-noetic-librealsense2] +realsense2_camera: + debian: + buster: [ros-noetic-realsense2-camera] +realsense2_description: + debian: + buster: [ros-noetic-realsense2-description] diff --git a/builder/image-ros.sh b/builder/image-ros.sh index afa369b50..8acd9e170 100755 --- a/builder/image-ros.sh +++ b/builder/image-ros.sh @@ -49,7 +49,7 @@ echo_stamp() { my_travis_retry() { local result=0 local count=1 - local max_count=50 + local max_count=5 while [ $count -le $max_count ]; do [ $result -ne 0 ] && { echo -e "\nThe command \"$@\" failed. Retrying, $count of $max_count.\n" >&2 @@ -125,6 +125,8 @@ cd /home/pi/catkin_ws/src/clover builder/assets/install_gitbook.sh gitbook install gitbook build +# replace assets copy to assets symlink to save space +rm -rf _book/assets && ln -s ../docs/assets _book/assets touch node_modules/CATKIN_IGNORE docs/CATKIN_IGNORE _book/CATKIN_IGNORE clover/www/CATKIN_IGNORE apps/CATKIN_IGNORE # ignore documentation files by catkin echo_stamp "Installing additional ROS packages" @@ -137,7 +139,10 @@ my_travis_retry apt-get install -y --no-install-recommends \ ros-${ROS_DISTRO}-ws281x \ ros-${ROS_DISTRO}-rosshow \ ros-${ROS_DISTRO}-cmake-modules \ - ros-${ROS_DISTRO}-image-view + ros-${ROS_DISTRO}-image-view \ + ros-${ROS_DISTRO}-image-geometry \ + ros-${ROS_DISTRO}-nodelet-topic-tools \ + ros-${ROS_DISTRO}-stereo-msgs # TODO move GeographicLib datasets to Mavros debian package echo_stamp "Install GeographicLib datasets (needed for mavros)" \ @@ -151,6 +156,9 @@ catkin_make run_tests #&& catkin_test_results echo_stamp "Change permissions for catkin_ws" chown -Rf pi:pi /home/pi/catkin_ws +echo_stamp "Update www" +sudo -u pi sh -c ". devel/setup.sh && rosrun clover www" + echo_stamp "Make \$HOME/examples symlink" ln -s "$(catkin_find clover examples --first-only)" /home/pi chown -Rf pi:pi /home/pi/examples diff --git a/builder/image-validate.sh b/builder/image-validate.sh index c4c8a45b6..cf11647cd 100755 --- a/builder/image-validate.sh +++ b/builder/image-validate.sh @@ -37,3 +37,7 @@ apt-cache show openvpn echo "Move /etc/ld.so.preload back to its original position" mv /etc/ld.so.preload.disabled-for-build /etc/ld.so.preload + +echo "Largest packages installed" +sudo -E sh -c 'apt-get install -y debian-goodies' +dpigs -H -n 100 diff --git a/builder/test/tests.py b/builder/test/tests.py index 2af198529..2af206016 100755 --- a/builder/test/tests.py +++ b/builder/test/tests.py @@ -2,6 +2,7 @@ # validate all required modules installed +import os import rospy from geometry_msgs.msg import PoseStamped from sensor_msgs.msg import Range, BatteryState @@ -22,6 +23,7 @@ from led_msgs.srv import SetLEDs from led_msgs.msg import LEDStateArray, LEDState from aruco_pose.msg import Marker, MarkerArray, Point2D +from clover import long_callback import dynamic_reconfigure.client @@ -31,9 +33,15 @@ import VL53L1X import pymavlink from pymavlink import mavutil -import rpi_ws281x -import pigpio +from image_geometry import PinholeCameraModel, StereoCameraModel # from espeak import espeak from pyzbar import pyzbar +import docopt +import geopy +import flask print(cv2.getBuildInformation()) + +if not os.environ.get('VM'): + import rpi_ws281x + import pigpio diff --git a/builder/test/tests.sh b/builder/test/tests.sh index 8eaf9decf..1d0ccc18a 100755 --- a/builder/test/tests.sh +++ b/builder/test/tests.sh @@ -6,16 +6,10 @@ set -ex # validate required software is installed -python --version python2 --version python3 --version -ipython --version ipython3 --version -# ptvsd does not have a stand-alone binary -python -m ptvsd --version -python3 -m ptvsd --version - node -v npm -v @@ -25,42 +19,77 @@ lsof -v git --version vim --version pip --version -pip2 --version pip3 --version tcpdump --version monkey --version -pigpiod -v -i2cdetect -V -butterfly -h # espeak --version -mjpg_streamer --version systemctl --version +if [ -z $VM ]; then + # rpi only software + python --version + ipython --version + pip2 --version + # `python` is python2 for now + [[ $(python -c 'import sys;print(sys.version_info.major)') == "2" ]] + + # ptvsd does not have a stand-alone binary + python -m ptvsd --version + python3 -m ptvsd --version + + pigpiod -v + i2cdetect -V + butterfly -h + mjpg_streamer --version +fi + # ros stuff roscore -h rosversion clover rosversion aruco_pose -rosversion vl53l1x rosversion mavros rosversion mavros_extras rosversion ws281x rosversion led_msgs rosversion dynamic_reconfigure rosversion tf2_web_republisher -rosversion compressed_image_transport -rosversion rosbridge_suite -rosversion rosserial +rosversion rosbridge_server rosversion usb_cam rosversion cv_camera rosversion web_video_server -rosversion rosshow rosversion nodelet rosversion image_view -# validate some versions -[[ $(rosversion cv_camera) == "0.5.1" ]] # patched version with init fix [[ $(rosversion ws281x) == "0.0.13" ]] +if [ -z $VM ]; then + rosversion compressed_image_transport + rosversion rosshow + rosversion vl53l1x + rosversion rosserial + [[ $(rosversion cv_camera) == "0.5.1" ]] # patched version with init fix +fi + +# determine user home directory +[ $VM ] && H="/home/clover" || H="/home/pi" + +# test basic ros tool work +source $H/catkin_ws/devel/setup.bash +roscd +rosrun +rosmsg +rossrv +rosnode || [ $? -eq 64 ] # usage output code is 64 +rostopic || [ $? -eq 64 ] +rosservice || [ $? -eq 64 ] +rosparam +roslaunch -h + # validate examples are present -[[ $(ls /home/pi/examples/*) ]] +[[ $(ls $H/examples/*) ]] + +# validate web tools present +[ -d $H/.ros/www ] +[ "$(readlink $H/.ros/www/clover)" = "$H/catkin_ws/src/clover/clover/www" ] +[ "$(readlink $H/.ros/www/clover_blocks)" = "$H/catkin_ws/src/clover/clover_blocks/www" ] diff --git a/check_assets_size.py b/check_assets_size.py index ae52b9678..c6055b989 100755 --- a/check_assets_size.py +++ b/check_assets_size.py @@ -18,7 +18,7 @@ def human_size(num, suffix='B'): 'qgc-battery.png', 'qgc-radio.png', 'qgc-cal-acc.png', 'qgc-esc.png', 'qgc-cal-compass.png', \ 'qgc.png', 'qgc-parameters.png', 'clever4-front-white-large.png', 'qgc-modes.png', \ 'qgc-requires-setup.png', 'clever4-front-white.png', 'clever4-kit-white.png', '26_1.png', 'battery_holder.stl', \ - 'camera_case.stl', 'camera_mount.stl' + 'camera_case.stl', 'camera_mount.stl', 'grip_right.stl', 'grip_left.stl' code = 0 diff --git a/clover/CMakeLists.txt b/clover/CMakeLists.txt index abfaa9c33..7eca3f4c1 100644 --- a/clover/CMakeLists.txt +++ b/clover/CMakeLists.txt @@ -53,7 +53,7 @@ find_package(OpenCV ${_opencv_version} REQUIRED ## Uncomment this if the package has a setup.py. This macro ensures ## modules and global scripts declared therein get installed ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html -# catkin_python_setup() +catkin_python_setup() ################################################ ## Declare ROS messages, services and actions ## @@ -80,11 +80,10 @@ find_package(OpenCV ${_opencv_version} REQUIRED ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) ## Generate messages in the 'msg' folder -# add_message_files( -# FILES -# Message1.msg -# Message2.msg -# ) +add_message_files( + FILES + State.msg +) ## Generate services in the 'srv' folder add_service_files( @@ -92,6 +91,9 @@ add_service_files( GetTelemetry.srv Navigate.srv NavigateGlobal.srv + SetAltitude.srv + SetYaw.srv + SetYawRate.srv SetPosition.srv SetVelocity.srv SetAttitude.srv @@ -306,4 +308,5 @@ endif() if (CATKIN_ENABLE_TESTING) find_package(rostest REQUIRED) add_rostest(test/basic.test) + add_rostest(test/offboard.test) endif() diff --git a/clover/examples/camera.py b/clover/examples/camera.py new file mode 100644 index 000000000..335321467 --- /dev/null +++ b/clover/examples/camera.py @@ -0,0 +1,64 @@ +# Information: https://clover.coex.tech/camera + +# Example on basic working with the camera and image processing: + +# - cuts out a central square from the camera image; +# - publishes this cropped image to the topic `/cv/center`; +# - computes the average color of it; +# - prints its name to the console. + +import rospy +import cv2 +from sensor_msgs.msg import Image +from cv_bridge import CvBridge +from clover import long_callback + +rospy.init_node('cv') +bridge = CvBridge() + +printed_color = None +center_pub = rospy.Publisher('~center', Image, queue_size=1) + +def get_color_name(h): + if h < 15: return 'red' + elif h < 30: return 'orange' + elif h < 60: return 'yellow' + elif h < 90: return 'green' + elif h < 120: return 'cyan' + elif h < 150: return 'blue' + elif h < 170: return 'magenta' + else: return 'red' + + +@long_callback +def image_callback(msg): + img = bridge.imgmsg_to_cv2(msg, 'bgr8') + + # convert to HSV to work with color hue + img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + + # cut out a central square + w = img.shape[1] + h = img.shape[0] + r = 20 + center = img_hsv[h // 2 - r:h // 2 + r, w // 2 - r:w // 2 + r] + + # compute and print the average hue + mean_hue = center[:, :, 0].mean() + color = get_color_name(mean_hue) + global printed_color + if color != printed_color: + print(color) + printed_color = color + + # publish the cropped image + center = cv2.cvtColor(center, cv2.COLOR_HSV2BGR) + center_pub.publish(bridge.cv2_to_imgmsg(center, 'bgr8')) + +# process every frame: +image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback, queue_size=1) + +# process 5 frames per second: +# image_sub = rospy.Subscriber('main_camera/image_raw_throttled', Image, image_callback, queue_size=1) + +rospy.spin() diff --git a/clover/examples/navigate_wait.py b/clover/examples/navigate_wait.py index 99637caf2..acb37f707 100644 --- a/clover/examples/navigate_wait.py +++ b/clover/examples/navigate_wait.py @@ -16,11 +16,8 @@ set_rates = rospy.ServiceProxy('set_rates', srv.SetRates) land = rospy.ServiceProxy('land', Trigger) -def navigate_wait(x=0, y=0, z=0, yaw=float('nan'), yaw_rate=0, speed=0.5, \ - frame_id='body', tolerance=0.2, auto_arm=False): - - res = navigate(x=x, y=y, z=z, yaw=yaw, yaw_rate=yaw_rate, speed=speed, \ - frame_id=frame_id, auto_arm=auto_arm) +def navigate_wait(x=0, y=0, z=0, yaw=math.nan, speed=0.5, frame_id='body', tolerance=0.2, auto_arm=False): + res = navigate(x=x, y=y, z=z, yaw=yaw, speed=speed, frame_id=frame_id, auto_arm=auto_arm) if not res.success: return res diff --git a/clover/examples/red_circle.py b/clover/examples/red_circle.py new file mode 100644 index 000000000..fec548e26 --- /dev/null +++ b/clover/examples/red_circle.py @@ -0,0 +1,91 @@ +# This example makes the drone find and follow the red circle. +# To test in the simulator, place 'Red Circle' model on the floor. +# More information: https://clover.coex.tech/red_circle + +# Input topic: main_camera/image_raw (camera image) +# Output topics: +# cv/mask (red color mask) +# cv/red_circle (position of the center of the red circle in 3D space) + +import rospy +import cv2 +import numpy as np +from math import nan +from sensor_msgs.msg import Image, CameraInfo +from geometry_msgs.msg import PointStamped, Point +from cv_bridge import CvBridge +from clover import long_callback, srv +import tf2_ros +import tf2_geometry_msgs +import image_geometry + +rospy.init_node('cv', disable_signals=True) # disable signals to allow interrupting with ctrl+c + +get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) +set_position = rospy.ServiceProxy('set_position', srv.SetPosition) + +bridge = CvBridge() + +tf_buffer = tf2_ros.Buffer() +tf_listener = tf2_ros.TransformListener(tf_buffer) + +mask_pub = rospy.Publisher('~mask', Image, queue_size=1) +point_pub = rospy.Publisher('~red_circle', PointStamped, queue_size=1) + +# read camera info +camera_model = image_geometry.PinholeCameraModel() +camera_model.fromCameraInfo(rospy.wait_for_message('main_camera/camera_info', CameraInfo)) + + +def img_xy_to_point(xy, dist): + xy_rect = camera_model.rectifyPoint(xy) + ray = camera_model.projectPixelTo3dRay(xy_rect) + return Point(x=ray[0] * dist, y=ray[1] * dist, z=dist) + +def get_center_of_mass(mask): + M = cv2.moments(mask) + if M['m00'] == 0: + return None + return M['m10'] // M['m00'], M['m01'] // M['m00'] + +follow_red_circle = False + +@long_callback +def image_callback(msg): + img = bridge.imgmsg_to_cv2(msg, 'bgr8') + img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + + # we need to use two ranges for red color + mask1 = cv2.inRange(img_hsv, (0, 150, 150), (15, 255, 255)) + mask2 = cv2.inRange(img_hsv, (160, 150, 150), (180, 255, 255)) + + # combine two masks using bitwise OR + mask = cv2.bitwise_or(mask1, mask2) + + # publish the mask + if mask_pub.get_num_connections() > 0: + mask_pub.publish(bridge.cv2_to_imgmsg(mask, 'mono8')) + + # calculate x and y of the circle + xy = get_center_of_mass(mask) + if xy is None: + return + + # calculate and publish the position of the circle in 3D space + altitude = get_telemetry('terrain').z + xy3d = img_xy_to_point(xy, altitude) + target = PointStamped(header=msg.header, point=xy3d) + point_pub.publish(target) + + if follow_red_circle: + # follow the target + setpoint = tf_buffer.transform(target, 'map', timeout=rospy.Duration(0.2)) + set_position(x=setpoint.point.x, y=setpoint.point.y, z=nan, yaw=nan, frame_id=setpoint.header.frame_id) + +# process each camera frame: +image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback, queue_size=1) + +rospy.loginfo('Hit enter to follow the red circle') +input() +follow_red_circle = True +rospy.spin() diff --git a/clover/launch/aruco.launch b/clover/launch/aruco.launch index 61328f946..0a540645d 100644 --- a/clover/launch/aruco.launch +++ b/clover/launch/aruco.launch @@ -18,8 +18,9 @@ - - + + + @@ -35,8 +36,8 @@ - - + + diff --git a/clover/launch/clover.launch b/clover/launch/clover.launch index 1efe1b89d..aa5db4b1f 100644 --- a/clover/launch/clover.launch +++ b/clover/launch/clover.launch @@ -45,13 +45,13 @@ + - - + @@ -72,6 +72,9 @@ + + + @@ -86,8 +89,4 @@ - - - - diff --git a/clover/launch/led.launch b/clover/launch/led.launch index 3e9238979..c2923ce95 100644 --- a/clover/launch/led.launch +++ b/clover/launch/led.launch @@ -21,7 +21,8 @@ - + + diff --git a/clover/launch/main_camera.launch b/clover/launch/main_camera.launch index 312a0f158..0b97e1dda 100644 --- a/clover/launch/main_camera.launch +++ b/clover/launch/main_camera.launch @@ -4,6 +4,9 @@ + + + @@ -43,4 +46,15 @@ + + + + + + + + + + diff --git a/clover/launch/mavros.launch b/clover/launch/mavros.launch index 8b445d62b..397731b95 100644 --- a/clover/launch/mavros.launch +++ b/clover/launch/mavros.launch @@ -77,9 +77,6 @@ covariance: 1 # cm - - - diff --git a/clover/launch/simulator.launch b/clover/launch/simulator.launch index 12475a8f1..46e810bde 100644 --- a/clover/launch/simulator.launch +++ b/clover/launch/simulator.launch @@ -1,4 +1,4 @@ - + diff --git a/clover/msg/State.msg b/clover/msg/State.msg new file mode 100644 index 000000000..038df4fce --- /dev/null +++ b/clover/msg/State.msg @@ -0,0 +1,38 @@ +uint8 MODE_NONE = 0 +uint8 MODE_NAVIGATE = 1 +uint8 MODE_NAVIGATE_GLOBAL = 2 +uint8 MODE_POSITION = 3 +uint8 MODE_VELOCITY = 4 +uint8 MODE_ATTITUDE = 5 +uint8 MODE_RATES = 6 + +uint8 YAW_MODE_YAW = 0 +uint8 YAW_MODE_YAW_RATE = 1 +uint8 YAW_MODE_YAW_TOWARDS = 2 + +# type of offboard control +uint8 mode +uint8 yaw_mode + +# targets +float32 x +float32 y +float32 z +float32 speed +float32 lat +float32 lon +float32 vx +float32 vy +float32 vz +float32 roll +float32 pitch +float32 yaw +float32 roll_rate +float32 pitch_rate +float32 yaw_rate +float32 thrust + +# frames of reference +string xy_frame_id +string z_frame_id +string yaw_frame_id diff --git a/clover/package.xml b/clover/package.xml index 1c49e2b1a..397ea808a 100644 --- a/clover/package.xml +++ b/clover/package.xml @@ -1,7 +1,7 @@ clover - 0.23.0 + 0.24.0 The Clover package Oleg Kalachev @@ -42,9 +42,9 @@ python-lxml python3-lxml dynamic_reconfigure + image_proc python-pymavlink - - + ros_pytest diff --git a/clover/requirements.txt b/clover/requirements.txt index 81d0fda62..4ca1cc994 100644 --- a/clover/requirements.txt +++ b/clover/requirements.txt @@ -1,5 +1,4 @@ -flask==1.1.1 -docopt==0.6.2 -geopy==1.11.0 -smbus2==0.3.0 -VL53L1X==0.0.5 +flask +geopy +smbus2 +VL53L1X diff --git a/clover/setup.py b/clover/setup.py new file mode 100644 index 000000000..afdfcd709 --- /dev/null +++ b/clover/setup.py @@ -0,0 +1,11 @@ +## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD + +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +# fetch values from package.xml +setup_args = generate_distutils_setup( + packages=['clover'], + package_dir={'': 'src'}) + +setup(**setup_args) diff --git a/clover/src/autotest/autotest_aruco.py b/clover/src/autotest/autotest_aruco.py index 920eda47b..0fbe6fa7d 100755 --- a/clover/src/autotest/autotest_aruco.py +++ b/clover/src/autotest/autotest_aruco.py @@ -13,7 +13,12 @@ rospy.init_node('autotest_aruco', disable_signals=True) # disable signals to allow interrupting with ctrl+c -flow_client = dynamic_reconfigure.client.Client('optical_flow') +try: + flow_client = dynamic_reconfigure.client.Client('optical_flow', timeout=2) +except rospy.ROSException: + flow_client = None + print('Cannot configure optical flow, skip') + get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) navigate = handle_response(rospy.ServiceProxy('navigate', srv.Navigate)) land = handle_response(rospy.ServiceProxy('land', Trigger)) @@ -30,11 +35,8 @@ def print_current_map_position(): dist = rospy.wait_for_message('rangefinder/range', Range).range print('Map position:\tx={:.1f}\ty={:.1f}\tz={:.1f}\tyaw={:.1f}\tdist={:.2f}'.format(telem.x, telem.y, telem.z, telem.yaw, dist)) -def navigate_wait(x=0, y=0, z=0, yaw=float('nan'), yaw_rate=0, speed=0.5, \ - frame_id='body', tolerance=0.2, auto_arm=False): - - res = navigate(x=x, y=y, z=z, yaw=yaw, yaw_rate=yaw_rate, speed=speed, \ - frame_id=frame_id, auto_arm=auto_arm) +def navigate_wait(x=0, y=0, z=0, yaw=math.nan, speed=0.5, frame_id='body', tolerance=0.2, auto_arm=False): + res = navigate(x=x, y=y, z=z, yaw=yaw, speed=speed, frame_id=frame_id, auto_arm=auto_arm) if not res.success: return res @@ -67,12 +69,13 @@ def navigate_wait(x=0, y=0, z=0, yaw=float('nan'), yaw_rate=0, speed=0.5, \ navigate_wait(x=center_x, y=center_y, z=1.5, speed=5, frame_id='aruco_map') print_current_map_position() -input('Disable optical flow and keep hovering [enter] ') -flow_client.update_configuration({'enabled': False}) -rospy.sleep(5) +if flow_client: + input('Disable optical flow and keep hovering [enter] ') + flow_client.update_configuration({'enabled': False}) + rospy.sleep(5) -input('Enable optical flow back [enter] ') -flow_client.update_configuration({'enabled': True}) + input('Enable optical flow back [enter] ') + flow_client.update_configuration({'enabled': True}) input('Go to side 1 %g 2 heading top [enter] ' % (center_y)) navigate_wait(x=1, y=center_y, z=2, yaw=1.57, frame_id='aruco_map') diff --git a/clover/src/autotest/autotest_flight.py b/clover/src/autotest/autotest_flight.py index 19adfdbff..371db7df5 100755 --- a/clover/src/autotest/autotest_flight.py +++ b/clover/src/autotest/autotest_flight.py @@ -2,7 +2,7 @@ import rospy import math -from math import nan +from math import nan, inf import signal import sys from clover import srv @@ -15,6 +15,8 @@ get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) navigate = handle_response(rospy.ServiceProxy('navigate', srv.Navigate)) navigate_global = handle_response(rospy.ServiceProxy('navigate_global', srv.NavigateGlobal)) +set_yaw = handle_response(rospy.ServiceProxy('set_yaw', srv.SetYaw)) +set_yaw_rate = handle_response(rospy.ServiceProxy('set_yaw_rate', srv.SetYawRate)) set_position = handle_response(rospy.ServiceProxy('set_position', srv.SetPosition)) set_velocity = handle_response(rospy.ServiceProxy('set_velocity', srv.SetVelocity)) set_attitude = handle_response(rospy.ServiceProxy('set_attitude', srv.SetAttitude)) @@ -28,11 +30,8 @@ def interrupt(sig, frame): signal.signal(signal.SIGINT, interrupt) -def navigate_wait(x=0, y=0, z=0, yaw=nan, yaw_rate=0, speed=0.5, \ - frame_id='body', tolerance=0.2, auto_arm=False): - - res = navigate(x=x, y=y, z=z, yaw=yaw, yaw_rate=yaw_rate, speed=speed, \ - frame_id=frame_id, auto_arm=auto_arm) +def navigate_wait(x=0, y=0, z=0, yaw=nan, speed=0.5, frame_id='body', tolerance=0.2, auto_arm=False): + res = navigate(x=x, y=y, z=z, yaw=yaw, speed=speed, frame_id=frame_id, auto_arm=auto_arm) if not res.success: return res @@ -69,17 +68,17 @@ def print_distance(): rospy.sleep(2) set_position(frame_id='body') -input('Rotate right 90° [enter] ') -navigate(yaw=-math.pi / 2, frame_id='navigate_target') +input('Rotate right 90° using set_yaw [enter] ') +set_yaw(yaw=-math.pi / 2, frame_id='navigate_target') rospy.sleep(3) input('Use set_attitude to fly backwards [enter]') -set_attitude(pitch=-0.3, roll=0, yaw=0, thrust=0.5, frame_id='body') +set_attitude(roll=0, pitch=-0.3, yaw=0, thrust=0.5, frame_id='body') rospy.sleep(0.3) set_position(frame_id='body') input('Use set_attitude to fly right [enter]') -set_attitude(pitch=0, roll=0.3, yaw=0, thrust=0.5, frame_id='body') +set_attitude(roll=0.3, pitch=0, yaw=0, thrust=0.5, frame_id='body') rospy.sleep(0.5) set_position(frame_id='body') @@ -88,13 +87,13 @@ def print_distance(): rospy.sleep(0.4) set_position(frame_id='body') -input('Rotate 360° to the right using yaw_rate [enter]') -set_position(x=nan, y=nan, z=nan, frame_id='body', yaw=nan, yaw_rate=-1) +input('Rotate 360° to the right using set_yaw_rate [enter]') +set_yaw_rate(yaw_rate=-1) rospy.sleep(2 * math.pi) set_position(frame_id='body') -input('Return to start point [enter]') -navigate_wait(x=start.x, y=start.y, z=start.z, yaw=start.yaw, speed=1, frame_id='map') +input('Return to start point heading forward [enter]') +navigate_wait(x=start.x, y=start.y, z=start.z, yaw=inf, speed=1, frame_id='map') input('Land [enter]') land() diff --git a/clover/src/clover/__init__.py b/clover/src/clover/__init__.py new file mode 100644 index 000000000..6d398c424 --- /dev/null +++ b/clover/src/clover/__init__.py @@ -0,0 +1,35 @@ +import rospy +from threading import Thread, Event + +def long_callback(fn): + """ + Decorator fixing a rospy issue for long-running topic callbacks, primarily + for image processing. + + See: https://github.com/ros/ros_comm/issues/1901. + + Usage example: + + @long_callback + def image_callback(msg): + # perform image processing + # ... + + rospy.Subscriber('main_camera/image_raw', Image, image_callback, queue_size=1) + """ + e = Event() + + def thread(): + while not rospy.is_shutdown(): + e.wait() + e.clear() + fn(thread.current_msg) + + thread.current_msg = None + Thread(target=thread, daemon=True).start() + + def wrapper(msg): + thread.current_msg = msg + e.set() + + return wrapper diff --git a/clover/src/led.cpp b/clover/src/led.cpp index 8d437c055..b9927900f 100644 --- a/clover/src/led.cpp +++ b/clover/src/led.cpp @@ -309,18 +309,22 @@ int main(int argc, char **argv) nh_priv.param("notify/low_battery/threshold", low_battery_threshold, 3.7); nh_priv.param("notify/error/ignore", error_ignore, {}); - ros::service::waitForService("set_leds"); // cannot work without set_leds service - set_leds_srv = nh.serviceClient("set_leds", true); + std::string led; // led namespace + nh_priv.param("led", led, std::string("led")); + if (!led.empty()) led += "/"; + + ros::service::waitForService(led + "set_leds"); // cannot work without set_leds service + set_leds_srv = nh.serviceClient(led + "set_leds", true); // wait for leds count info - handleState(*ros::topic::waitForMessage("state", nh)); + handleState(*ros::topic::waitForMessage(led + "state", nh)); - auto state_sub = nh.subscribe("state", 1, &handleState); + auto state_sub = nh.subscribe(led + "state", 1, &handleState); - auto set_effect = nh.advertiseService("set_effect", &setEffect); + auto set_effect = nh.advertiseService(led + "set_effect", &setEffect); - auto mavros_state_sub = nh.subscribe("/mavros/state", 1, &handleMavrosState); - auto battery_sub = nh.subscribe("/mavros/battery", 1, &handleBattery); + auto mavros_state_sub = nh.subscribe("mavros/state", 1, &handleMavrosState); + auto battery_sub = nh.subscribe("mavros/battery", 1, &handleBattery); auto rosout_sub = nh.subscribe("/rosout_agg", 1, &handleLog); timer = nh.createTimer(ros::Duration(0), &proceed, false, false); diff --git a/clover/src/optical_flow.cpp b/clover/src/optical_flow.cpp index 7207f516a..36a98b6d5 100644 --- a/clover/src/optical_flow.cpp +++ b/clover/src/optical_flow.cpp @@ -25,6 +25,7 @@ #include #include #include +#include #include #include #include @@ -57,6 +58,9 @@ class OpticalFlow : public nodelet::Nodelet std::unique_ptr tf_listener_; bool calc_flow_gyro_; float flow_gyro_default_; + bool disable_on_vpe_; + ros::Subscriber vpe_sub_; + ros::Time last_vpe_time_; std::shared_ptr> dyn_srv_; void onInit() @@ -87,6 +91,11 @@ class OpticalFlow : public nodelet::Nodelet img_sub_ = it.subscribeCamera("image_raw", 1, &OpticalFlow::flow, this); + disable_on_vpe_ = nh_priv.param("disable_on_vpe", false); + if (disable_on_vpe_) { + vpe_sub_ = nh.subscribe("mavros/vision_pose/pose", 1, &OpticalFlow::vpeCallback, this); + } + dyn_srv_ = std::make_shared>(nh_priv); dynamic_reconfigure::Server::CallbackType cb; @@ -121,6 +130,12 @@ class OpticalFlow : public nodelet::Nodelet { if (!enabled_) return; + if (disable_on_vpe_ && + !last_vpe_time_.isZero() && + (msg->header.stamp - last_vpe_time_).toSec() < 0.1) { + return; + } + parseCameraInfo(cinfo); auto img = cv_bridge::toCvShare(msg, "mono8")->image; @@ -236,6 +251,14 @@ class OpticalFlow : public nodelet::Nodelet prev_ = curr_.clone(); prev_stamp_ = msg->header.stamp; + // Publish estimated angular velocity + geometry_msgs::TwistStamped velo; + velo.header.stamp = msg->header.stamp; + velo.header.frame_id = fcu_frame_id_; + velo.twist.angular.x = flow_fcu.vector.x / integration_time.toSec(); + velo.twist.angular.y = flow_fcu.vector.y / integration_time.toSec(); + velo_pub_.publish(velo); + publish_debug: // Publish debug image if (img_pub_.getNumSubscribers() > 0) { @@ -248,14 +271,6 @@ class OpticalFlow : public nodelet::Nodelet out_msg.image = img; img_pub_.publish(out_msg.toImageMsg()); } - - // Publish estimated angular velocity - geometry_msgs::TwistStamped velo; - velo.header.stamp = msg->header.stamp; - velo.header.frame_id = fcu_frame_id_; - velo.twist.angular.x = flow_fcu.vector.x / integration_time.toSec(); - velo.twist.angular.y = flow_fcu.vector.y / integration_time.toSec(); - velo_pub_.publish(velo); } } @@ -284,6 +299,10 @@ class OpticalFlow : public nodelet::Nodelet prev_ = Mat(); // clear previous frame } } + + void vpeCallback(const geometry_msgs::PoseStamped& vpe) { + last_vpe_time_ = vpe.header.stamp; + } }; PLUGINLIB_EXPORT_CLASS(OpticalFlow, nodelet::Nodelet) diff --git a/clover/src/selfcheck.py b/clover/src/selfcheck.py index 159fcad2e..274594b79 100755 --- a/clover/src/selfcheck.py +++ b/clover/src/selfcheck.py @@ -9,13 +9,14 @@ # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. -import os +import os, sys import math import subprocess import re from collections import OrderedDict import traceback -from threading import Event +import threading +from threading import Event, Thread, Lock import numpy import rospy import tf2_ros @@ -27,24 +28,16 @@ from mavros_msgs.srv import ParamGet from geometry_msgs.msg import PoseStamped, TwistStamped, PoseWithCovarianceStamped, Vector3Stamped from visualization_msgs.msg import MarkerArray as VisualizationMarkerArray +from diagnostic_msgs.msg import DiagnosticArray import tf.transformations as t from aruco_pose.msg import MarkerArray from mavros import mavlink import locale -# TODO: check attitude is present -# TODO: disk free space -# TODO: map, base_link, body -# TODO: rc service -# TODO: perform commander check, ekf2 status on PX4 -# TODO: check if FCU params setter succeed -# TODO: selfcheck ROS service (with blacklists for checks) - - rospy.init_node('selfcheck') -os.environ['ROSCONSOLE_FORMAT']='[${severity}]: ${message}' +os.environ['ROSCONSOLE_FORMAT']='${message}' # use user's locale to convert numbers, etc locale.setlocale(locale.LC_ALL, '') @@ -53,46 +46,68 @@ tf_listener = tf2_ros.TransformListener(tf_buffer) -failures = [] -infos = [] -current_check = None +thread_local = threading.local() +reports_lock = Lock() + + +# formatting colors +if sys.stdout.isatty(): + GREY = '\033[90m' + GREEN = '\033[92m' + RED = '\033[31m' + END = '\033[0m' +else: + GREY = GREEN = RED = END = '' def failure(text, *args): msg = text % args - rospy.logwarn('%s: %s', current_check, msg) - failures.append(msg) + thread_local.reports += [{'failure': msg}] def info(text, *args): msg = text % args - rospy.loginfo('%s: %s', current_check, msg) - infos.append(msg) + thread_local.reports += [{'info': msg}] def check(name): def inner(fn): def wrapper(*args, **kwargs): - failures[:] = [] - infos[:] = [] - global current_check - current_check = name + start = rospy.get_time() + thread_local.reports = [] try: fn(*args, **kwargs) except Exception as e: traceback.print_exc() rospy.logerr('%s: exception occurred', name) - return - if not failures and not infos: - rospy.loginfo('%s: OK', name) + with reports_lock: + for report in thread_local.reports: + if 'failure' in report: + rospy.logerr('%s: %s', name, report['failure']) + elif 'info' in report: + rospy.loginfo(GREY + name + END + ': ' + report['info']) + if not thread_local.reports: + rospy.loginfo(GREY + name + END + ': ' + GREEN + 'OK' + END) + if rospy.get_param('~time', False): + rospy.loginfo('%s: %.1f sec', name, rospy.get_time() - start) return wrapper return inner +def ff(value, precision=2): + # safely format float or int + if value is None: + return RED + '???' + END + if isinstance(value, float): + return ('{:.' + str(precision + 1) + '}').format(value) + elif isinstance(value, int): + return str(value) + + param_get = rospy.ServiceProxy('mavros/param/get', ParamGet) -def get_param(name): +def get_param(name, default=None, strict=True): try: res = param_get(param_id=name) except rospy.ServiceException as e: @@ -100,13 +115,19 @@ def get_param(name): return None if not res.success: - failure('unable to retrieve PX4 parameter %s', name) + if strict: + failure('unable to retrieve PX4 parameter %s', name) + return default else: if res.value.integer != 0: return res.value.integer return res.value.real +def get_paramf(name, precision=2): + return ff(get_param(name), precision) + + recv_event = Event() link = mavutil.mavlink.MAVLink('', 255, 1) mavlink_pub = rospy.Publisher('mavlink/to', Mavlink, queue_size=1) @@ -151,6 +172,24 @@ def mavlink_exec(cmd, timeout=3.0): return mavlink_recv +def read_diagnostics(name, key): + e = Event() + def cb(msg): + for status in msg.status: + if status.name.lower() == name.lower(): + for value in status.values: + if value.key.lower() == key.lower(): + cb.value = value.value + e.set() + return + + cb.value = None + sub = rospy.Subscriber('/diagnostics', DiagnosticArray, cb) + e.wait(1.0) # wait to read all the diagnostics from nodes publishing them + sub.unregister() + return cb.value + + BOARD_ROTATIONS = { 0: 'no rotation', 1: 'yaw 45°', @@ -196,34 +235,36 @@ def check_fcu(): state = rospy.wait_for_message('mavros/state', State, timeout=3) if not state.connected: failure('no connection to the FCU (check wiring)') + info('fcu_url = %s', rospy.get_param('mavros/fcu_url', '?')) return - clover_tag = re.compile(r'-cl[oe]ver\.\d+$') - clover_fw = False - - # Make sure the console is available to us - mavlink_exec('\n') - version_str = mavlink_exec('ver all') - if version_str == '': - info('no version data available from SITL') - - for line in version_str.split('\n'): - if line.startswith('FW version: '): - info(line[len('FW version: '):]) - elif line.startswith('FW git tag: '): # only Clover's firmware - tag = line[len('FW git tag: '):] - clover_fw = clover_tag.search(tag) - info(tag) - elif line.startswith('HW arch: '): - info(line[len('HW arch: '):]) - - if not clover_fw: - info('not Clover PX4 firmware, check https://clover.coex.tech/firmware') + if not is_process_running('px4', exact=True): # can't use px4 console in SITL + clover_tag = re.compile(r'-cl[oe]ver\.\d+$') + clover_fw = False + + # Make sure the console is available to us + mavlink_exec('\n') + version_str = mavlink_exec('ver all') + if version_str == '': + info('no version data available from SITL') + + for line in version_str.split('\n'): + if line.startswith('FW version: '): + info(line[len('FW version: '):]) + elif line.startswith('FW git tag: '): # only Clover's firmware + tag = line[len('FW git tag: '):] + clover_fw = clover_tag.search(tag) + info(tag) + elif line.startswith('HW arch: '): + info(line[len('HW arch: '):]) + + if not clover_fw: + info('not Clover PX4 firmware, check https://clover.coex.tech/firmware') est = get_param('SYS_MC_EST_GROUP') if est == 1: info('selected estimator: LPE') - fuse = get_param('LPE_FUSION') + fuse = int(get_param('LPE_FUSION')) if fuse & (1 << 4): info('LPE_FUSION: land detector fusion is enabled') else: @@ -255,21 +296,35 @@ def check_fcu(): if cbrk_usb_chk != 197848: failure('set parameter CBRK_USB_CHK to 197848 for flying with USB connected') + if not is_process_running('px4', exact=True): # skip battery check in SITL + try: + battery = rospy.wait_for_message('mavros/battery', BatteryState, timeout=3) + if not battery.cell_voltage: + failure('cell voltage is not available, https://clover.coex.tech/power') + else: + cell = battery.cell_voltage[0] + if cell > 4.3 or cell < 3.0: + failure('incorrect cell voltage: %.2f V, https://clover.coex.tech/power', cell) + elif cell < 3.7: + failure('critically low cell voltage: %.2f V, recharge battery', cell) + except rospy.ROSException: + failure('no battery state') + + # time sync check try: - battery = rospy.wait_for_message('mavros/battery', BatteryState, timeout=3) - if not battery.cell_voltage: - failure('cell voltage is not available, https://clover.coex.tech/power') - else: - cell = battery.cell_voltage[0] - if cell > 4.3 or cell < 3.0: - failure('incorrect cell voltage: %.2f V, https://clover.coex.tech/power', cell) - elif cell < 3.7: - failure('critically low cell voltage: %.2f V, recharge battery', cell) - except rospy.ROSException: - failure('no battery state') + info('time sync offset: %.2f s', float(read_diagnostics('mavros: Time Sync', 'Estimated time offset (s)'))) + except: + failure('cannot read time sync offset') except rospy.ROSException: - failure('no MAVROS state (check wiring)') + failure('no MAVROS state') + fcu_url = rospy.get_param('mavros/fcu_url', '?') + if fcu_url == '/dev/px4fmu': + if not os.path.exists('/lib/udev/rules.d/99-px4fmu.rules'): + info('udev rules are not installed, install udev rules or change usb_device to /dev/ttyACM0 in mavros.launch') + else: + info('udev did\'t recognize px4fmu device, check wiring or change usb_device to /dev/ttyACM0 in mavros.launch') + info('fcu_url = %s', rospy.get_param('mavros/fcu_url', '?')) def describe_direction(v): @@ -346,19 +401,24 @@ def is_process_running(binary, exact=False, full=False): @check('ArUco markers') def check_aruco(): + markers = None + if is_process_running('aruco_detect', full=True): try: - info('aruco_detect/length = %g m', rospy.get_param('aruco_detect/length')) + info('aruco_detect/length = %g m', rospy.get_param('aruco_detect/length', '?')) except KeyError: failure('aruco_detect/length parameter is not set') - known_tilt = rospy.get_param('aruco_detect/known_tilt', '') - if known_tilt == 'map': - known_tilt += ' (ALL markers are on the floor)' - elif known_tilt == 'map_flipped': - known_tilt += ' (ALL markers are on the ceiling)' - info('aruco_detector/known_tilt = %s', known_tilt) + known_vertical = rospy.get_param('aruco_detect/known_vertical', '') + flip_vertical = rospy.get_param('aruco_detect/flip_vertical', False) + description = '' + if known_vertical == 'map' and not flip_vertical: + description = ' (all markers are on the floor)' + elif known_vertical == 'map' and flip_vertical: + description = ' (all markers are on the ceiling)' + info('aruco_detect/known_vertical = %s', known_vertical) + info('aruco_detect/flip_vertical = %s%s', flip_vertical, description) try: - rospy.wait_for_message('aruco_detect/markers', MarkerArray, timeout=1) + markers = rospy.wait_for_message('aruco_detect/markers', MarkerArray, timeout=0.8) except rospy.ROSException: failure('no markers detection') return @@ -367,42 +427,61 @@ def check_aruco(): return if is_process_running('aruco_map', full=True): - known_tilt = rospy.get_param('aruco_map/known_tilt', '') - if known_tilt == 'map': - known_tilt += ' (marker\'s map is on the floor)' - elif known_tilt == 'map_flipped': - known_tilt += ' (marker\'s map is on the ceiling)' - info('aruco_map/known_tilt = %s', known_tilt) + known_vertical = rospy.get_param('aruco_map/known_vertical', '') + flip_vertical = rospy.get_param('aruco_map/flip_vertical', False) + description = '' + if known_vertical == 'map' and not flip_vertical: + description += ' (markers map is on the floor)' + elif known_vertical == 'map' and flip_vertical: + description += ' (markers map is on the ceiling)' + info('aruco_map/known_vertical = %s', known_vertical) + info('aruco_map/flip_vertical = %s%s', flip_vertical, description) try: - visualization = rospy.wait_for_message('aruco_map/visualization', VisualizationMarkerArray, timeout=1) + visualization = rospy.wait_for_message('aruco_map/visualization', VisualizationMarkerArray, timeout=0.8) info('map has %s markers', len(visualization.markers)) except: failure('cannot read aruco_map/visualization topic') try: - rospy.wait_for_message('aruco_map/pose', PoseWithCovarianceStamped, timeout=1) + rospy.wait_for_message('aruco_map/pose', PoseWithCovarianceStamped, timeout=0.8) except rospy.ROSException: - failure('no map detection') + if not markers: + info('no map detection as no markers detection') + elif not markers.markers: + info('no map detection as no markers detected') + else: + failure('no map detection') else: info('aruco_map is not running') +def is_on_the_floor(): + try: + dist = rospy.wait_for_message('rangefinder/range', Range, timeout=1) + return dist.range < 0.3 + except rospy.ROSException: + return False + + @check('Vision position estimate') def check_vpe(): vis = None try: - vis = rospy.wait_for_message('mavros/vision_pose/pose', PoseStamped, timeout=1) + vis = rospy.wait_for_message('mavros/vision_pose/pose', PoseStamped, timeout=0.8) except rospy.ROSException: try: - vis = rospy.wait_for_message('mavros/mocap/pose', PoseStamped, timeout=1) + vis = rospy.wait_for_message('mavros/mocap/pose', PoseStamped, timeout=0.8) except rospy.ROSException: - failure('no VPE or MoCap messages') - # check if vpe_publisher is running - try: - subprocess.check_output(['pgrep', '-x', 'vpe_publisher']) - except subprocess.CalledProcessError: - return # it's not running, skip following checks + if not is_process_running('vpe_publisher', full=True): + info('no vision position estimate, vpe_publisher is not running') + elif rospy.get_param('aruco_map/known_vertical', '') == 'map' \ + and rospy.get_param('aruco_map/flip_vertical', False): + failure('no vision position estimate, markers are on the ceiling') + elif is_on_the_floor(): + info('no vision position estimate, the drone is on the floor') + else: + failure('no vision position estimate') # check PX4 settings est = get_param('SYS_MC_EST_GROUP') @@ -414,26 +493,37 @@ def check_vpe(): if vision_yaw_w == 0: failure('vision yaw weight is zero, change ATT_W_EXT_HDG parameter') else: - info('Vision yaw weight: %.2f', vision_yaw_w) - fuse = get_param('LPE_FUSION') + info('vision yaw weight: %s', ff(vision_yaw_w)) + fuse = int(get_param('LPE_FUSION')) if not fuse & (1 << 2): failure('vision position fusion is disabled, change LPE_FUSION parameter') delay = get_param('LPE_VIS_DELAY') if delay != 0: - failure('LPE_VIS_DELAY parameter is %s, but it should be zero', delay) - info('LPE_VIS_XY is %.2f m, LPE_VIS_Z is %.2f m', get_param('LPE_VIS_XY'), get_param('LPE_VIS_Z')) + failure('LPE_VIS_DELAY = %s, but it should be zero', delay) + info('LPE_VIS_XY = %s m, LPE_VIS_Z = %s m', get_paramf('LPE_VIS_XY'), get_paramf('LPE_VIS_Z')) elif est == 2: - fuse = get_param('EKF2_AID_MASK') - if not fuse & (1 << 3): - failure('vision position fusion is disabled, change EKF2_AID_MASK parameter') - if not fuse & (1 << 4): - failure('vision yaw fusion is disabled, change EKF2_AID_MASK parameter') + ev_ctrl = get_param('EKF2_EV_CTRL', strict=False) + if ev_ctrl is not None: # PX4 after v1.14 + ev_ctrl = int(ev_ctrl) + if not ev_ctrl & (1 << 0): + failure('vision horizontal position fusion is disabled, change EKF2_EV_CTRL parameter') + if not ev_ctrl & (1 << 1): + failure('vision vertical position fusion is disabled, change EKF2_EV_CTRL parameter') + if not ev_ctrl & (1 << 3): + failure('vision yaw fusion is disabled, change EKF2_EV_CTRL parameter') + else: # PX4 before v1.14 + fuse = int(get_param('EKF2_AID_MASK')) + if not fuse & (1 << 3): + failure('vision position fusion is disabled, change EKF2_AID_MASK parameter') + if not fuse & (1 << 4): + failure('vision yaw fusion is disabled, change EKF2_AID_MASK parameter') + delay = get_param('EKF2_EV_DELAY') if delay != 0: - failure('EKF2_EV_DELAY is %.2f, but it should be zero', delay) - info('EKF2_EVA_NOISE is %.3f, EKF2_EVP_NOISE is %.3f', - get_param('EKF2_EVA_NOISE'), - get_param('EKF2_EVP_NOISE')) + failure('EKF2_EV_DELAY = %.2f, but it should be zero', delay) + info('EKF2_EVA_NOISE = %s, EKF2_EVP_NOISE = %s', + get_paramf('EKF2_EVA_NOISE', 3), + get_paramf('EKF2_EVP_NOISE', 3)) if not vis: return @@ -531,15 +621,25 @@ def check_velocity(): @check('Global position (GPS)') def check_global_position(): try: - rospy.wait_for_message('mavros/global_position/global', NavSatFix, timeout=1) + rospy.wait_for_message('mavros/global_position/global', NavSatFix, timeout=0.8) except rospy.ROSException: info('no global position') - if get_param('SYS_MC_EST_GROUP') == 2 and (get_param('EKF2_AID_MASK') & (1 << 0)): - failure('enabled GPS fusion may suppress vision position aiding') + if get_param('SYS_MC_EST_GROUP') == 2: + gps_ctrl = get_param('EKF2_GPS_CTRL', strict=False) + if gps_ctrl is not None: # PX4 after v1.14 + if int(gps_ctrl) & (1 << 0): + failure('GPS fusion enabled may suppress vision position aiding, change EKF2_GPS_CTRL') + else: # PX4 before v1.14 + if int(get_param('EKF2_AID_MASK', 0)) & (1 << 0): + failure('GPS fusion enabled may suppress vision position aiding, change EKF2_AID_MASK') @check('Optical flow') def check_optical_flow(): + if not is_process_running('optical_flow', full=True): + info('optical_flow is not running') + return + # TODO:check FPS! try: rospy.wait_for_message('mavros/px4flow/raw/send', OpticalFlowRad, timeout=0.5) @@ -547,40 +647,49 @@ def check_optical_flow(): # check PX4 settings rot = get_param('SENS_FLOW_ROT') if rot != 0: - failure('SENS_FLOW_ROT parameter is %s, but it should be zero', rot) + failure('SENS_FLOW_ROT = %s, but it should be zero', rot) est = get_param('SYS_MC_EST_GROUP') if est == 1: - fuse = get_param('LPE_FUSION') + fuse = int(get_param('LPE_FUSION')) if not fuse & (1 << 1): failure('optical flow fusion is disabled, change LPE_FUSION parameter') if not fuse & (1 << 1): failure('flow gyro compensation is disabled, change LPE_FUSION parameter') - scale = get_param('LPE_FLW_SCALE') + scale = get_param('LPE_FLW_SCALE', 1) if not numpy.isclose(scale, 1.0): - failure('LPE_FLW_SCALE parameter is %.2f, but it should be 1.0', scale) - - info('LPE_FLW_QMIN is %s, LPE_FLW_R is %.4f, LPE_FLW_RR is %.4f, SENS_FLOW_MINHGT is %.3f, SENS_FLOW_MAXHGT is %.3f', - get_param('LPE_FLW_QMIN'), - get_param('LPE_FLW_R'), - get_param('LPE_FLW_RR'), - get_param('SENS_FLOW_MINHGT'), - get_param('SENS_FLOW_MAXHGT')) + failure('LPE_FLW_SCALE = %.2f, but it should be 1.0', scale) + + info('LPE_FLW_QMIN = %s, LPE_FLW_R = %s, LPE_FLW_RR = %s', + get_paramf('LPE_FLW_QMIN'), + get_paramf('LPE_FLW_R', 4), + get_paramf('LPE_FLW_RR', 4)) elif est == 2: - fuse = get_param('EKF2_AID_MASK') - if not fuse & (1 << 1): - failure('optical flow fusion is disabled, change EKF2_AID_MASK parameter') - delay = get_param('EKF2_OF_DELAY') + of_ctrl = get_param('EKF2_OF_CTRL', strict=False) + if of_ctrl is not None: # PX4 after v1.14 + if of_ctrl == 0: + failure('optical flow fusion is disabled, change EKF2_OF_CTRL') + else: # PX4 before v1.14 + fuse = int(get_param('EKF2_AID_MASK', 0)) + if not fuse & (1 << 1): + failure('optical flow fusion is disabled, change EKF2_AID_MASK parameter') + delay = get_param('EKF2_OF_DELAY', 0) if delay != 0: - failure('EKF2_OF_DELAY is %.2f, but it should be zero', delay) - info('EKF2_OF_QMIN is %s, EKF2_OF_N_MIN is %.4f, EKF2_OF_N_MAX is %.4f, SENS_FLOW_MINHGT is %.3f, SENS_FLOW_MAXHGT is %.3f', - get_param('EKF2_OF_QMIN'), - get_param('EKF2_OF_N_MIN'), - get_param('EKF2_OF_N_MAX'), - get_param('SENS_FLOW_MINHGT'), - get_param('SENS_FLOW_MAXHGT')) + failure('EKF2_OF_DELAY = %.2f, but it should be zero', delay) + info('EKF2_OF_QMIN = %s, EKF2_OF_N_MIN = %s, EKF2_OF_N_MAX = %s', + get_paramf('EKF2_OF_QMIN'), + get_paramf('EKF2_OF_N_MIN', 4), + get_paramf('EKF2_OF_N_MAX', 4)) + info('SENS_FLOW_MINHGT = %s, SENS_FLOW_MAXHGT = %s', get_paramf('SENS_FLOW_MINHGT', 3), get_paramf('SENS_FLOW_MAXHGT', 3)) except rospy.ROSException: - failure('no optical flow data (from Raspberry)') + if rospy.get_param('optical_flow/disable_on_vpe', False): + try: + rospy.wait_for_message('mavros/vision_pose/pose', PoseStamped, timeout=1) + info('no optical flow as disable_on_vpe is true') + except: + failure('no optical flow on RPi, disable_on_vpe is true, but no vision pose also') + else: + failure('no optical flow on RPi') @check('Rangefinder') @@ -604,23 +713,26 @@ def check_rangefinder(): est = get_param('SYS_MC_EST_GROUP') if est == 1: - fuse = get_param('LPE_FUSION') + fuse = int(get_param('LPE_FUSION', 0)) if not fuse & (1 << 5): info('"pub agl as lpos down" in LPE_FUSION is disabled, NOT operating over flat surface') else: info('"pub agl as lpos down" in LPE_FUSION is enabled, operating over flat surface') elif est == 2: - hgt = get_param('EKF2_HGT_MODE') + hgt = get_param('EKF2_HGT_REF', strict=False) + if hgt is None: # PX4 before v1.14 + hgt = get_param('EKF2_HGT_MODE') if hgt != 2: info('EKF2_HGT_MODE != Range sensor, NOT operating over flat surface') else: info('EKF2_HGT_MODE = Range sensor, operating over flat surface') - aid = get_param('EKF2_RNG_AID') - if aid != 1: - info('EKF2_RNG_AID != 1, range sensor aiding disabled') - else: - info('EKF2_RNG_AID = 1, range sensor aiding enabled') + aid = get_param('EKF2_RNG_AID', strict=False) + if aid is not None: # PX4 before v1.14 + if aid != 1: + info('EKF2_RNG_AID != 1, range sensor aiding disabled') + else: + info('EKF2_RNG_AID = 1, range sensor aiding enabled') @check('Boot duration') @@ -638,7 +750,7 @@ def check_boot_duration(): @check('CPU usage') def check_cpu_usage(): - WHITELIST = 'nodelet', 'gzclient', 'gzserver' + WHITELIST = 'nodelet', 'gzclient', 'gzserver', 'selfcheck.py' CMD = "top -n 1 -b -i | tail -n +8 | awk '{ printf(\"%-8s\\t%-8s\\t%-8s\\n\", $1, $9, $12); }'" output = subprocess.check_output(CMD, shell=True).decode() processes = output.split('\n') @@ -707,7 +819,10 @@ def check_image(): try: info('version: %s', open('/etc/clover_version').read().strip()) except IOError: - info('no /etc/clover_version file, not the Clover image?') + try: + info('VM version: %s', open('/etc/clover_vm_version').read().strip()) + except IOError: + info('no /etc/clover_version file, not the Clover image?') @check('Preflight status') @@ -818,26 +933,47 @@ def check_board(): info('could not open /proc/device-tree/model, not a Raspberry Pi?') +def parallel_for(fns): + threads = [] + for fn in fns: + thread = Thread(target=fn) + thread.start() + threads.append(thread) + for thread in threads: + thread.join() + + +def consequentially_for(fns): + for fn in fns: + fn() + + def selfcheck(): - check_image() - check_board() - check_clover_service() - check_network() - check_fcu() - check_imu() - check_local_position() - check_velocity() - check_global_position() - check_preflight_status() - check_main_camera() - check_aruco() - check_simpleoffboard() - check_optical_flow() - check_vpe() - check_rangefinder() - check_rpi_health() - check_cpu_usage() - check_boot_duration() + checks = [ + check_image, + check_board, + check_clover_service, + check_network, + check_fcu, + check_imu, + check_local_position, + check_velocity, + check_global_position, + check_preflight_status, + check_main_camera, + check_aruco, + check_simpleoffboard, + check_optical_flow, + check_vpe, + check_rangefinder, + check_rpi_health, + check_cpu_usage, + check_boot_duration, + ] + if rospy.get_param('~parallel', False): + parallel_for(checks) + else: + consequentially_for(checks) if __name__ == '__main__': diff --git a/clover/src/simple_offboard.cpp b/clover/src/simple_offboard.cpp index b8f570332..6aeecb65b 100644 --- a/clover/src/simple_offboard.cpp +++ b/clover/src/simple_offboard.cpp @@ -23,12 +23,14 @@ #include #include #include +#include #include #include #include #include #include #include +#include #include #include #include @@ -37,14 +39,19 @@ #include #include #include +#include #include #include #include +#include +#include +#include #include #include #include #include +#include using std::string; using std::isnan; @@ -54,6 +61,7 @@ using namespace clover; using mavros_msgs::PositionTarget; using mavros_msgs::AttitudeTarget; using mavros_msgs::Thrust; +using mavros_msgs::Altitude; // tf2 tf2_ros::Buffer tf_buffer; @@ -79,35 +87,43 @@ float default_speed; bool auto_release; bool land_only_in_offboard, nav_from_sp, check_kill_switch; std::map reference_frames; +string terrain_frame_mode; // Publishers -ros::Publisher attitude_pub, attitude_raw_pub, position_pub, position_raw_pub, rates_pub, thrust_pub; +ros::Publisher attitude_pub, attitude_raw_pub, position_pub, position_raw_pub, rates_pub, thrust_pub, state_pub; // Service clients ros::ServiceClient arming, set_mode; // Containers ros::Timer setpoint_timer; -tf::Quaternion tq; PoseStamped position_msg; PositionTarget position_raw_msg; -AttitudeTarget att_raw_msg; -Thrust thrust_msg; -TwistStamped rates_msg; +//TwistStamped rates_msg; TransformStamped target, setpoint; geometry_msgs::TransformStamped body; +geometry_msgs::TransformStamped terrain; // State PoseStamped nav_start; -PoseStamped setpoint_position, setpoint_position_transformed; -Vector3Stamped setpoint_velocity, setpoint_velocity_transformed; -QuaternionStamped setpoint_attitude, setpoint_attitude_transformed; -float setpoint_yaw_rate; +PointStamped setpoint_position; +PointStamped setpoint_altitude; +Vector3Stamped setpoint_velocity; +float setpoint_yaw, setpoint_roll, setpoint_pitch; +Vector3 setpoint_rates; +string yaw_frame_id; +float setpoint_thrust; float nav_speed; +float setpoint_lat = NAN, setpoint_lon = NAN; bool busy = false; bool wait_armed = false; bool nav_from_sp_flag = false; +// Last published +PoseStamped setpoint_pose_local; +Vector3Stamped setpoint_velocity_local; +float yaw_local; + enum setpoint_type_t { NONE, NAVIGATE, @@ -115,7 +131,10 @@ enum setpoint_type_t { POSITION, VELOCITY, ATTITUDE, - RATES + RATES, + _ALTITUDE, + _YAW, + _YAW_RATE, }; enum setpoint_type_t setpoint_type = NONE; @@ -170,7 +189,7 @@ void handleLocalPosition(const PoseStamped& pose) { local_position = pose; publishBodyFrame(); - // TODO: terrain?, home? + // TODO: home? } // wait for transform without interrupting publishing setpoints @@ -188,6 +207,29 @@ inline bool waitTransform(const string& target, const string& source, return false; } +void publishTerrain(const double distance, const ros::Time& stamp) +{ + if (!waitTransform(local_frame, body.child_frame_id, stamp, ros::Duration(0.1))) return; + + auto t = tf_buffer.lookupTransform(local_frame, body.child_frame_id, stamp); + t.child_frame_id = terrain.child_frame_id; + t.transform.translation.z -= distance; + static_transform_broadcaster->sendTransform(t); +} + +void handleAltitude(const Altitude& alt) +{ + if (!std::isfinite(alt.bottom_clearance)) return; + publishTerrain(alt.bottom_clearance, alt.header.stamp); +} + +void handleRange(const Range& range) +{ + if (!std::isfinite(range.range)) return; + // TODO: check it's facing down + publishTerrain(range.range, range.header.stamp); +} + #define TIMEOUT(msg, timeout) (msg.header.stamp.isZero() || (ros::Time::now() - msg.header.stamp > timeout)) bool getTelemetry(GetTelemetry::Request& req, GetTelemetry::Response& res) @@ -207,11 +249,11 @@ bool getTelemetry(GetTelemetry::Request& req, GetTelemetry::Response& res) res.vx = NAN; res.vy = NAN; res.vz = NAN; - res.pitch = NAN; res.roll = NAN; + res.pitch = NAN; res.yaw = NAN; - res.pitch_rate = NAN; res.roll_rate = NAN; + res.pitch_rate = NAN; res.yaw_rate = NAN; res.voltage = NAN; res.cell_voltage = NAN; @@ -341,20 +383,20 @@ inline float getDistance(const Point& from, const Point& to) return hypot(from.x - to.x, from.y - to.y, from.z - to.z); } -void getNavigateSetpoint(const ros::Time& stamp, float speed, Point& nav_setpoint) +void getNavigateSetpoint(const ros::Time& stamp, const float speed, Point& nav_setpoint) { if (wait_armed) { // don't start navigating if we're waiting arming nav_start.header.stamp = stamp; } - float distance = getDistance(nav_start.pose.position, setpoint_position_transformed.pose.position); + float distance = getDistance(nav_start.pose.position, setpoint_pose_local.pose.position); float time = distance / speed; float passed = std::min((stamp - nav_start.header.stamp).toSec() / time, 1.0); - nav_setpoint.x = nav_start.pose.position.x + (setpoint_position_transformed.pose.position.x - nav_start.pose.position.x) * passed; - nav_setpoint.y = nav_start.pose.position.y + (setpoint_position_transformed.pose.position.y - nav_start.pose.position.y) * passed; - nav_setpoint.z = nav_start.pose.position.z + (setpoint_position_transformed.pose.position.z - nav_start.pose.position.z) * passed; + nav_setpoint.x = nav_start.pose.position.x + (setpoint_pose_local.pose.position.x - nav_start.pose.position.x) * passed; + nav_setpoint.y = nav_start.pose.position.y + (setpoint_pose_local.pose.position.y - nav_start.pose.position.y) * passed; + nav_setpoint.z = nav_start.pose.position.z + (setpoint_pose_local.pose.position.z - nav_start.pose.position.z) * passed; } PoseStamped globalToLocal(double lat, double lon) @@ -385,44 +427,101 @@ PoseStamped globalToLocal(double lat, double lon) return pose; } +// publish navigate_target frame +void publishTarget(ros::Time stamp, bool _static = false) +{ + bool single_frame = (setpoint_position.header.frame_id == setpoint_altitude.header.frame_id); + + // handle yaw for target frame + if (setpoint_yaw_type == YAW || setpoint_yaw_type == YAW_RATE) { // use last set yaw for yaw_rate + if (setpoint_altitude.header.frame_id == yaw_frame_id) { + target.transform.rotation = tf::createQuaternionMsgFromYaw(setpoint_yaw); + } else { + single_frame = false; + target.transform.rotation = tf::createQuaternionMsgFromYaw(yaw_local); + } + } else if (setpoint_yaw_type == TOWARDS) { + single_frame = false; + target.transform.rotation = tf::createQuaternionMsgFromYaw(yaw_local); + } + + if (_static && single_frame) { + // publish at user's command, if all frames are the same + target.header.frame_id = setpoint_position.header.frame_id; + target.header.stamp = stamp; + target.transform.translation.x = setpoint_position.point.x; + target.transform.translation.y = setpoint_position.point.y; + target.transform.translation.z = setpoint_position.point.z; + + } else if (!_static) { + // publish at each iteration, if frames are different + target.header = setpoint_pose_local.header; + target.transform.translation.x = setpoint_pose_local.pose.position.x; + target.transform.translation.y = setpoint_pose_local.pose.position.y; + target.transform.translation.z = setpoint_pose_local.pose.position.z; + } + + static_transform_broadcaster->sendTransform(target); +} + void publish(const ros::Time stamp) { if (setpoint_type == NONE) return; position_raw_msg.header.stamp = stamp; - thrust_msg.header.stamp = stamp; - rates_msg.header.stamp = stamp; - try { - // transform position and/or yaw - if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == POSITION || setpoint_type == VELOCITY || setpoint_type == ATTITUDE) { - setpoint_position.header.stamp = stamp; - tf_buffer.transform(setpoint_position, setpoint_position_transformed, local_frame, ros::Duration(0.05)); + // transform position + if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == POSITION) { + setpoint_position.header.stamp = stamp; + setpoint_altitude.header.stamp = stamp; + // transform xy + try { + auto xy = tf_buffer.transform(setpoint_position, local_frame, ros::Duration(0.05)); + setpoint_pose_local.header = xy.header; + setpoint_pose_local.pose.position.x = xy.point.x; + setpoint_pose_local.pose.position.y = xy.point.y; + } catch (tf2::TransformException& ex) { + // can't transform xy, use last known + ROS_WARN_THROTTLE(10, "can't transform: %s", ex.what()); } - - // transform velocity - if (setpoint_type == VELOCITY) { - setpoint_velocity.header.stamp = stamp; - tf_buffer.transform(setpoint_velocity, setpoint_velocity_transformed, local_frame, ros::Duration(0.05)); + // transform altitude + try { + setpoint_pose_local.pose.position.z = tf_buffer.transform(setpoint_altitude, local_frame, ros::Duration(0.05)).point.z; + } catch (tf2::TransformException& ex) { + // can't transform altitude, use last known + ROS_WARN_THROTTLE(10, "can't transform: %s", ex.what()); } + } - } catch (const tf2::TransformException& e) { - ROS_WARN_THROTTLE(10, "can't transform"); + // transform yaw + if (setpoint_yaw_type == YAW) { + try { + QuaternionStamped q; + q.header.stamp = stamp; + q.header.frame_id = yaw_frame_id; + q.quaternion = tf::createQuaternionMsgFromYaw(setpoint_yaw); + yaw_local = tf2::getYaw(tf_buffer.transform(q, local_frame, ros::Duration(0.05)).quaternion); + } catch (tf2::TransformException& ex) { + // can't transform yaw, use last known + ROS_WARN_THROTTLE(10, "can't transform: %s", ex.what()); + } } + // compute navigate setpoint if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL) { - position_msg.pose.orientation = setpoint_position_transformed.pose.orientation; // copy yaw getNavigateSetpoint(stamp, nav_speed, position_msg.pose.position); if (setpoint_yaw_type == TOWARDS) { - double yaw_towards = atan2(position_msg.pose.position.y - nav_start.pose.position.y, - position_msg.pose.position.x - nav_start.pose.position.x); - position_msg.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(0, 0, yaw_towards); + yaw_local = atan2(position_msg.pose.position.y - nav_start.pose.position.y, + position_msg.pose.position.x - nav_start.pose.position.x); } + + position_msg.pose.orientation = tf::createQuaternionMsgFromYaw(yaw_local); } if (setpoint_type == POSITION) { - position_msg = setpoint_position_transformed; + position_msg = setpoint_pose_local; + position_msg.pose.orientation = tf::createQuaternionMsgFromYaw(yaw_local); } if (setpoint_type == POSITION || setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL) { @@ -439,14 +538,14 @@ void publish(const ros::Time stamp) PositionTarget::IGNORE_AFY + PositionTarget::IGNORE_AFZ + PositionTarget::IGNORE_YAW; - position_raw_msg.yaw_rate = setpoint_yaw_rate; + position_raw_msg.yaw_rate = setpoint_rates.z; position_raw_msg.position = position_msg.pose.position; position_raw_pub.publish(position_raw_msg); } // publish setpoint frame if (!setpoint.child_frame_id.empty()) { - if (setpoint.header.stamp == position_msg.header.stamp) { + if (setpoint.header.stamp >= position_msg.header.stamp) { return; // avoid TF_REPEATED_DATA warnings } @@ -458,9 +557,22 @@ void publish(const ros::Time stamp) setpoint.header.stamp = position_msg.header.stamp; transform_broadcaster->sendTransform(setpoint); } + + // publish dynamic target frame + publishTarget(stamp); } if (setpoint_type == VELOCITY) { + // transform velocity to local frame + setpoint_velocity.header.stamp = stamp; + try { + setpoint_velocity_local = tf_buffer.transform(setpoint_velocity, local_frame, ros::Duration(0.05)); + } catch (tf2::TransformException& ex) { + // can't transform velocity, use last known + ROS_WARN_THROTTLE(10, "can't transform: %s", ex.what()); + } + + // publish velocity position_raw_msg.type_mask = PositionTarget::IGNORE_PX + PositionTarget::IGNORE_PY + PositionTarget::IGNORE_PZ + @@ -468,14 +580,22 @@ void publish(const ros::Time stamp) PositionTarget::IGNORE_AFY + PositionTarget::IGNORE_AFZ; position_raw_msg.type_mask += setpoint_yaw_type == YAW ? PositionTarget::IGNORE_YAW_RATE : PositionTarget::IGNORE_YAW; - position_raw_msg.velocity = setpoint_velocity_transformed.vector; - position_raw_msg.yaw = tf2::getYaw(setpoint_position_transformed.pose.orientation); - position_raw_msg.yaw_rate = setpoint_yaw_rate; + position_raw_msg.velocity = setpoint_velocity_local.vector; + position_raw_msg.yaw = yaw_local; + position_raw_msg.yaw_rate = setpoint_rates.z; position_raw_pub.publish(position_raw_msg); } if (setpoint_type == ATTITUDE) { - attitude_pub.publish(setpoint_position_transformed); + PoseStamped msg; + msg.header.stamp = stamp; + msg.header.frame_id = local_frame; + msg.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(setpoint_roll, setpoint_pitch, yaw_local); + attitude_pub.publish(msg); + + Thrust thrust_msg; + thrust_msg.header.stamp = stamp; + thrust_msg.thrust = setpoint_thrust; thrust_pub.publish(thrust_msg); } @@ -484,11 +604,12 @@ void publish(const ros::Time stamp) // thrust_pub.publish(thrust_msg); // mavros rates topics waits for rates in local frame // use rates in body frame for simplicity + AttitudeTarget att_raw_msg; att_raw_msg.header.stamp = stamp; att_raw_msg.header.frame_id = fcu_frame; att_raw_msg.type_mask = AttitudeTarget::IGNORE_ATTITUDE; - att_raw_msg.body_rate = rates_msg.twist.angular; - att_raw_msg.thrust = thrust_msg.thrust; + att_raw_msg.body_rate = setpoint_rates; + att_raw_msg.thrust = setpoint_thrust; attitude_raw_pub.publish(att_raw_msg); } } @@ -528,10 +649,59 @@ inline void checkState() throw std::runtime_error("No connection to FCU, https://clover.coex.tech/connection"); } +void publishState() +{ + clover::State msg; + msg.mode = setpoint_type; + msg.yaw_mode = setpoint_yaw_type; + + if (setpoint_position.header.frame_id.empty()) { + msg.x = NAN; + msg.y = NAN; + msg.z = NAN; + } else { + msg.x = setpoint_position.point.x; + msg.y = setpoint_position.point.y; + msg.z = setpoint_altitude.point.z; + } + + msg.speed = nav_speed; + msg.lat = setpoint_lat; + msg.lon = setpoint_lon; + msg.vx = setpoint_velocity.vector.x; + msg.vy = setpoint_velocity.vector.y; + msg.vz = setpoint_velocity.vector.z; + msg.roll = setpoint_roll; + msg.pitch = setpoint_pitch; + msg.yaw = !yaw_frame_id.empty() ? setpoint_yaw : NAN; + + msg.roll_rate = setpoint_rates.x; + msg.pitch_rate = setpoint_rates.y; + msg.yaw_rate = setpoint_rates.z; + msg.thrust = setpoint_thrust; + + if (setpoint_type == VELOCITY) { + msg.xy_frame_id = setpoint_velocity.header.frame_id; + msg.z_frame_id = setpoint_velocity.header.frame_id; + } else { + msg.xy_frame_id = setpoint_position.header.frame_id; + msg.z_frame_id = setpoint_altitude.header.frame_id; + } + msg.yaw_frame_id = yaw_frame_id; + + state_pub.publish(msg); +} + +inline float safe(float value) { + return std::isfinite(value) ? value : 0; +} + #define ENSURE_FINITE(var) { if (!std::isfinite(var)) throw std::runtime_error(#var " argument cannot be NaN or Inf"); } +#define ENSURE_NON_INF(var) { if (std::isinf(var)) throw std::runtime_error(#var " argument cannot be Inf"); } + bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, float vy, float vz, - float pitch, float roll, float yaw, float pitch_rate, float roll_rate, float yaw_rate, // editorconfig-checker-disable-line + float roll, float pitch, float yaw, float roll_rate, float pitch_rate, float yaw_rate, // editorconfig-checker-disable-line float lat, float lon, float thrust, float speed, string frame_id, bool auto_arm, // editorconfig-checker-disable-line uint8_t& success, string& message) // editorconfig-checker-disable-line { @@ -558,69 +728,40 @@ bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, fl auto search = reference_frames.find(frame_id); const string& reference_frame = search == reference_frames.end() ? frame_id : search->second; - // Serve "partial" commands - - if (!auto_arm && std::isfinite(yaw) && - isnan(x) && isnan(y) && isnan(z) && isnan(vx) && isnan(vy) && isnan(vz) && - isnan(pitch) && isnan(roll) && isnan(thrust) && - isnan(lat) && isnan(lon)) { - // change only the yaw - if (setpoint_type == POSITION || setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == VELOCITY) { - if (!waitTransform(setpoint_position.header.frame_id, frame_id, stamp, transform_timeout)) - throw std::runtime_error("Can't transform from " + frame_id + " to " + setpoint_position.header.frame_id); - - message = "Changing yaw only"; + ENSURE_NON_INF(x); + ENSURE_NON_INF(y); + ENSURE_NON_INF(z); + ENSURE_NON_INF(speed); // TODO: allow inf + ENSURE_NON_INF(vx); + ENSURE_NON_INF(vy); + ENSURE_NON_INF(vz); + ENSURE_NON_INF(roll); + ENSURE_NON_INF(pitch); + ENSURE_NON_INF(roll_rate); + ENSURE_NON_INF(pitch_rate); + ENSURE_NON_INF(yaw_rate); + ENSURE_NON_INF(thrust); - QuaternionStamped q; - q.header.frame_id = frame_id; - q.header.stamp = stamp; - q.quaternion = tf::createQuaternionMsgFromYaw(yaw); // TODO: pitch=0, roll=0 is not totally correct - setpoint_position.pose.orientation = tf_buffer.transform(q, setpoint_position.header.frame_id).quaternion; - setpoint_yaw_type = YAW; - goto publish_setpoint; - } else { - throw std::runtime_error("Setting yaw is possible only when position or velocity setpoints active"); - } + if (sp_type == NAVIGATE_GLOBAL) { + ENSURE_FINITE(lat); + ENSURE_FINITE(lon); } - if (!auto_arm && std::isfinite(yaw_rate) && - isnan(x) && isnan(y) && isnan(z) && isnan(vx) && isnan(vy) && isnan(vz) && - isnan(pitch) && isnan(roll) && isnan(yaw) && isnan(thrust) && - isnan(lat) && isnan(lon)) { - // change only the yaw rate - if (setpoint_type == POSITION || setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == VELOCITY) { - message = "Changing yaw rate only"; + if (isfinite(x) != isfinite(y)) { + throw std::runtime_error("x and y can be set only together"); + } - setpoint_yaw_type = YAW_RATE; - setpoint_yaw_rate = yaw_rate; - goto publish_setpoint; - } else { - throw std::runtime_error("Setting yaw rate is possible only when position or velocity setpoints active"); + if (isfinite(yaw_rate)) { + if (sp_type > RATES && setpoint_type == ATTITUDE) { + throw std::runtime_error("Yaw rate cannot be set in attitude mode."); } } - // Serve normal commands - - if (sp_type == NAVIGATE || sp_type == POSITION) { - ENSURE_FINITE(x); - ENSURE_FINITE(y); - ENSURE_FINITE(z); - } else if (sp_type == NAVIGATE_GLOBAL) { - ENSURE_FINITE(lat); - ENSURE_FINITE(lon); - ENSURE_FINITE(z); - } else if (sp_type == VELOCITY) { - ENSURE_FINITE(vx); - ENSURE_FINITE(vy); - ENSURE_FINITE(vz); - } else if (sp_type == ATTITUDE) { - ENSURE_FINITE(pitch); - ENSURE_FINITE(roll); - ENSURE_FINITE(thrust); - } else if (sp_type == RATES) { - ENSURE_FINITE(pitch_rate); - ENSURE_FINITE(roll_rate); - ENSURE_FINITE(thrust); + // set_altitude + if (sp_type == _ALTITUDE) { + if (setpoint_type == VELOCITY || setpoint_type == ATTITUDE || setpoint_type == RATES) { + throw std::runtime_error("Altitude cannot be set in velocity, attitude or rates mode."); + } } if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL) { @@ -634,20 +775,13 @@ bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, fl speed = default_speed; } - if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL || sp_type == POSITION || sp_type == VELOCITY) { - if (yaw_rate != 0 && !std::isnan(yaw)) - throw std::runtime_error("Yaw value should be NaN for setting yaw rate"); - - if (std::isnan(yaw_rate) && std::isnan(yaw)) - throw std::runtime_error("Both yaw and yaw_rate cannot be NaN"); - } - if (sp_type == NAVIGATE_GLOBAL) { if (TIMEOUT(global_position, global_position_timeout)) throw std::runtime_error("No global position"); } - if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL || sp_type == POSITION || sp_type == VELOCITY || sp_type == ATTITUDE) { + // if any value need to be transformed to reference frame + if (isfinite(x) || isfinite(y) || isfinite(z) || isfinite(vx) || isfinite(vy) || isfinite(vz) || isfinite(yaw)) { // make sure transform from frame_id to reference frame available if (!waitTransform(reference_frame, frame_id, stamp, transform_timeout)) throw std::runtime_error("Can't transform from " + frame_id + " to " + reference_frame); @@ -664,15 +798,27 @@ bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, fl auto xy_in_req_frame = tf_buffer.transform(pose_local, frame_id); x = xy_in_req_frame.pose.position.x; y = xy_in_req_frame.pose.position.y; + setpoint_lat = lat; + setpoint_lon = lon; } // Everything fine - switch setpoint type - setpoint_type = sp_type; + if (sp_type <= RATES) { + setpoint_type = sp_type; + } - if (sp_type != NAVIGATE && sp_type != NAVIGATE_GLOBAL) { + if (setpoint_type != NAVIGATE && setpoint_type != NAVIGATE_GLOBAL) { nav_from_sp_flag = false; } + bool to_auto_arm = auto_arm && (state.mode != "OFFBOARD" || !state.armed); + if (to_auto_arm || setpoint_type == VELOCITY || setpoint_type == ATTITUDE || setpoint_type == RATES) { + // invalidate position setpoint + setpoint_position.header.frame_id = ""; + setpoint_altitude.header.frame_id = ""; + yaw_frame_id = ""; + } + if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL) { // starting point if (nav_from_sp && nav_from_sp_flag) { @@ -681,89 +827,139 @@ bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, fl } else { nav_start = local_position; } - nav_speed = speed; + + if (!isnan(speed)) { + nav_speed = speed; + } + nav_from_sp_flag = true; } - // if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL || sp_type == POSITION || sp_type == VELOCITY) { - // if (std::isnan(yaw) && yaw_rate == 0) { - // // keep yaw unchanged - // // TODO: this is incorrect, because we need yaw in desired frame - // yaw = tf2::getYaw(local_position.pose.orientation); - // } - // } - - if (sp_type == POSITION || sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL || sp_type == VELOCITY || sp_type == ATTITUDE) { - // destination point and/or attitude - PoseStamped ps; - ps.header.frame_id = frame_id; - ps.header.stamp = stamp; - ps.pose.position.x = x; - ps.pose.position.y = y; - ps.pose.position.z = z; - ps.pose.orientation.w = 1.0; // Ensure quaternion is always valid - - if (sp_type == ATTITUDE) { - ps.pose.position.x = 0; - ps.pose.position.y = 0; - ps.pose.position.z = 0; - ps.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw); - } else if (std::isnan(yaw)) { - setpoint_yaw_type = YAW_RATE; - setpoint_yaw_rate = yaw_rate; - } else if (std::isinf(yaw) && yaw > 0) { + // handle position + if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == POSITION) { + + PointStamped desired; + desired.header.frame_id = frame_id; + desired.header.stamp = stamp; + desired.point.x = safe(x); + desired.point.y = safe(y); + desired.point.z = safe(z); + + // transform to reference frame + desired = tf_buffer.transform(desired, reference_frame); + + // set horizontal position + if (isfinite(x) && isfinite(y)) { + setpoint_position = desired; + } else if (setpoint_position.header.frame_id.empty()) { + // TODO: use transform for current stamp + setpoint_position.header = local_position.header; + setpoint_position.point = local_position.pose.position; + } + + // set altitude + if (isfinite(z)) { + setpoint_altitude = desired; + } else if (setpoint_altitude.header.frame_id.empty()) { + setpoint_altitude.header = local_position.header; + setpoint_altitude.point = local_position.pose.position; + } + } + + // handle velocity + if (sp_type == VELOCITY) { + // TODO: allow setting different modes by altitude and xy + Vector3Stamped desired; + desired.header.frame_id = frame_id; + desired.header.stamp = stamp; + desired.vector.x = safe(vx); + desired.vector.y = safe(vy); + desired.vector.z = safe(vz); + + // transform to reference frame + desired = tf_buffer.transform(desired, reference_frame); + setpoint_velocity.header = desired.header; + + // set horizontal velocity + if (isfinite(vx) && isfinite(vy)) { + setpoint_velocity.vector.x = desired.vector.x; + setpoint_velocity.vector.y = desired.vector.y; + } + + // set vertical velocity + if (isfinite(vz)) { + setpoint_velocity.vector.z = desired.vector.z; + } + } + + // handle yaw + if (sp_type == NAVIGATE || sp_type == NAVIGATE_GLOBAL || sp_type == POSITION || sp_type == VELOCITY || sp_type == ATTITUDE || sp_type == _YAW) { + if (isfinite(yaw)) { + setpoint_yaw_type = YAW; + QuaternionStamped desired; + desired.header.frame_id = frame_id; + desired.header.stamp = stamp; + desired.quaternion = tf::createQuaternionMsgFromYaw(yaw); + + // transform to reference frame + desired = tf_buffer.transform(desired, reference_frame); + setpoint_yaw = tf2::getYaw(desired.quaternion); + yaw_frame_id = reference_frame; + + } else if (isinf(yaw) && yaw > 0) { // yaw towards setpoint_yaw_type = TOWARDS; - yaw = 0; - setpoint_yaw_rate = 0; - } else { + + } else if (yaw_frame_id.empty() || sp_type == _YAW) { + // yaw is nan and not set previously OR set_yaw(yaw=nan) was called setpoint_yaw_type = YAW; - setpoint_yaw_rate = 0; - ps.pose.orientation = tf::createQuaternionMsgFromYaw(yaw); + setpoint_yaw = tf2::getYaw(local_position.pose.orientation); // set yaw to current yaw + yaw_frame_id = local_position.header.frame_id; } + } - tf_buffer.transform(ps, setpoint_position, reference_frame); + // handle roll + if (isfinite(roll)) { + setpoint_roll = roll; } - if (sp_type == VELOCITY) { - Vector3Stamped vel; - vel.header.frame_id = frame_id; - vel.header.stamp = stamp; - vel.vector.x = vx; - vel.vector.y = vy; - vel.vector.z = vz; - tf_buffer.transform(vel, setpoint_velocity, reference_frame); + // handle pitch + if (isfinite(pitch)) { + setpoint_pitch = pitch; + } + + // handle yaw rate + if (isfinite(yaw_rate)) { + setpoint_yaw_type = YAW_RATE; + setpoint_rates.z = yaw_rate; + } + + // handle pitch rate + if (isfinite(roll_rate)) { + setpoint_rates.x = roll_rate; } - if (sp_type == ATTITUDE || sp_type == RATES) { - thrust_msg.thrust = thrust; + // handle roll rate + if (isfinite(pitch_rate)) { + setpoint_rates.y = pitch_rate; } - if (sp_type == RATES) { - rates_msg.twist.angular.x = roll_rate; - rates_msg.twist.angular.y = pitch_rate; - rates_msg.twist.angular.z = yaw_rate; + // handle thrust + if (isfinite(thrust)) { + setpoint_thrust = thrust; } wait_armed = auto_arm; -publish_setpoint: publish(stamp); // calculate initial transformed messages first setpoint_timer.start(); - // publish target frame - if (!target.child_frame_id.empty()) { - if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == POSITION) { - target.header.frame_id = setpoint_position.header.frame_id; - target.header.stamp = stamp; - target.transform.translation.x = setpoint_position.pose.position.x; - target.transform.translation.y = setpoint_position.pose.position.y; - target.transform.translation.z = setpoint_position.pose.position.z; - target.transform.rotation = setpoint_position.pose.orientation; - static_transform_broadcaster->sendTransform(target); - } + if (setpoint_type == NAVIGATE || setpoint_type == NAVIGATE_GLOBAL || setpoint_type == POSITION) { + publishTarget(stamp, true); } + publishState(); + if (auto_arm) { offboardAndArm(); wait_armed = false; @@ -788,27 +984,39 @@ bool serve(enum setpoint_type_t sp_type, float x, float y, float z, float vx, fl } bool navigate(Navigate::Request& req, Navigate::Response& res) { - return serve(NAVIGATE, req.x, req.y, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, req.yaw_rate, NAN, NAN, NAN, req.speed, req.frame_id, req.auto_arm, res.success, res.message); + return serve(NAVIGATE, req.x, req.y, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, NAN, NAN, NAN, NAN, req.speed, req.frame_id, req.auto_arm, res.success, res.message); } bool navigateGlobal(NavigateGlobal::Request& req, NavigateGlobal::Response& res) { - return serve(NAVIGATE_GLOBAL, NAN, NAN, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, req.yaw_rate, req.lat, req.lon, NAN, req.speed, req.frame_id, req.auto_arm, res.success, res.message); + return serve(NAVIGATE_GLOBAL, NAN, NAN, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, NAN, req.lat, req.lon, NAN, req.speed, req.frame_id, req.auto_arm, res.success, res.message); +} + +bool setAltitude(SetAltitude::Request& req, SetAltitude::Response& res) { + return serve(_ALTITUDE, NAN, NAN, req.z, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.frame_id, false, res.success, res.message); +} + +bool setYaw(SetYaw::Request& req, SetYaw::Response& res) { + return serve(_YAW, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.frame_id, false, res.success, res.message); +} + +bool setYawRate(SetYawRate::Request& req, SetYawRate::Response& res) { + return serve(_YAW_RATE, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.yaw_rate, NAN, NAN, NAN, NAN, "", false, res.success, res.message); } bool setPosition(SetPosition::Request& req, SetPosition::Response& res) { - return serve(POSITION, req.x, req.y, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, req.yaw_rate, NAN, NAN, NAN, NAN, req.frame_id, req.auto_arm, res.success, res.message); + return serve(POSITION, req.x, req.y, req.z, NAN, NAN, NAN, NAN, NAN, req.yaw, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.frame_id, req.auto_arm, res.success, res.message); } bool setVelocity(SetVelocity::Request& req, SetVelocity::Response& res) { - return serve(VELOCITY, NAN, NAN, NAN, req.vx, req.vy, req.vz, NAN, NAN, req.yaw, NAN, NAN, req.yaw_rate, NAN, NAN, NAN, NAN, req.frame_id, req.auto_arm, res.success, res.message); + return serve(VELOCITY, NAN, NAN, NAN, req.vx, req.vy, req.vz, NAN, NAN, req.yaw, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.frame_id, req.auto_arm, res.success, res.message); } bool setAttitude(SetAttitude::Request& req, SetAttitude::Response& res) { - return serve(ATTITUDE, NAN, NAN, NAN, NAN, NAN, NAN, req.pitch, req.roll, req.yaw, NAN, NAN, NAN, NAN, NAN, req.thrust, NAN, req.frame_id, req.auto_arm, res.success, res.message); + return serve(ATTITUDE, NAN, NAN, NAN, NAN, NAN, NAN, req.roll, req.pitch, req.yaw, NAN, NAN, NAN, NAN, NAN, req.thrust, NAN, req.frame_id, req.auto_arm, res.success, res.message); } bool setRates(SetRates::Request& req, SetRates::Response& res) { - return serve(RATES, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.pitch_rate, req.roll_rate, req.yaw_rate, NAN, NAN, req.thrust, NAN, "", req.auto_arm, res.success, res.message); + return serve(RATES, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, NAN, req.roll_rate, req.pitch_rate, req.yaw_rate, NAN, NAN, req.thrust, NAN, "", req.auto_arm, res.success, res.message); } bool land(std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) @@ -840,9 +1048,7 @@ bool land(std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) auto start = ros::Time::now(); while (ros::ok()) { if (state.mode == "AUTO.LAND") { - res.success = true; - busy = false; - return true; + break; } if (ros::Time::now() - start > land_timeout) throw std::runtime_error("Land request timed out"); @@ -851,6 +1057,18 @@ bool land(std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) r.sleep(); } + // stop setpoints and invalidate position setpoint + setpoint_timer.stop(); + setpoint_type = NONE; + setpoint_position.header.frame_id = ""; + setpoint_altitude.header.frame_id = ""; + yaw_frame_id = ""; + publishState(); + + res.success = true; + busy = false; + return true; + } catch (const std::exception& e) { res.message = e.what(); ROS_INFO("%s", e.what()); @@ -860,6 +1078,18 @@ bool land(std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) return false; } +bool release(std_srvs::Trigger::Request& req, std_srvs::Trigger::Response& res) +{ + setpoint_timer.stop(); + setpoint_type = NONE; + setpoint_position.header.frame_id = ""; + setpoint_altitude.header.frame_id = ""; + yaw_frame_id = ""; + publishState(); + res.success = true; + return true; +} + int main(int argc, char **argv) { ros::init(argc, argv, "simple_offboard"); @@ -881,6 +1111,8 @@ int main(int argc, char **argv) nh_priv.param("check_kill_switch", check_kill_switch, true); nh_priv.param("default_speed", default_speed, 0.5f); nh_priv.param("body_frame", body.child_frame_id, "body"); + nh_priv.param("terrain_frame", terrain.child_frame_id, "terrain"); + nh_priv.param("terrain_frame_mode", terrain_frame_mode, "altitude"); nh_priv.getParam("reference_frames", reference_frames); // Default reference frames @@ -916,6 +1148,20 @@ int main(int argc, char **argv) auto manual_control_sub = nh.subscribe(mavros + "/manual_control/control", 1, &handleMessage); auto local_position_sub = nh.subscribe(mavros + "/local_position/pose", 1, &handleLocalPosition); + ros::Subscriber altitude_sub; + if (!body.child_frame_id.empty() && !terrain.child_frame_id.empty()) { + terrain.header.frame_id = local_frame; + if (terrain_frame_mode == "altitude") { + altitude_sub = nh.subscribe(mavros + "/altitude", 1, &handleAltitude); + } else if (terrain_frame_mode == "range") { + string range_topic = nh_priv.param("range_topic", string("rangefinder/range")); + altitude_sub = nh.subscribe(range_topic, 1, &handleRange); + } else { + ROS_FATAL("Unknown terrain_frame_mode: %s, valid values: altitude, range", terrain_frame_mode.c_str()); + ros::shutdown(); + } + } + // Setpoint publishers position_pub = nh.advertise(mavros + "/setpoint_position/local", 1); position_raw_pub = nh.advertise(mavros + "/setpoint_raw/local", 1); @@ -924,15 +1170,22 @@ int main(int argc, char **argv) rates_pub = nh.advertise(mavros + "/setpoint_attitude/cmd_vel", 1); thrust_pub = nh.advertise(mavros + "/setpoint_attitude/thrust", 1); + // State publisher + state_pub = nh_priv.advertise("state", 1, true); + // Service servers auto gt_serv = nh.advertiseService("get_telemetry", &getTelemetry); auto na_serv = nh.advertiseService("navigate", &navigate); auto ng_serv = nh.advertiseService("navigate_global", &navigateGlobal); + auto sl_serv = nh.advertiseService("set_altitude", &setAltitude); + auto ya_serv = nh.advertiseService("set_yaw", &setYaw); + auto yr_serv = nh.advertiseService("set_yaw_rate", &setYawRate); auto sp_serv = nh.advertiseService("set_position", &setPosition); auto sv_serv = nh.advertiseService("set_velocity", &setVelocity); auto sa_serv = nh.advertiseService("set_attitude", &setAttitude); auto sr_serv = nh.advertiseService("set_rates", &setRates); auto ld_serv = nh.advertiseService("land", &land); + auto rl_serv = nh_priv.advertiseService("release", &release); // Setpoint timer setpoint_timer = nh.createTimer(ros::Duration(1 / nh_priv.param("setpoint_rate", 30.0)), &publishSetpoint, false, false); @@ -940,7 +1193,7 @@ int main(int argc, char **argv) position_msg.header.frame_id = local_frame; position_raw_msg.header.frame_id = local_frame; position_raw_msg.coordinate_frame = PositionTarget::FRAME_LOCAL_NED; - rates_msg.header.frame_id = fcu_frame; + //rates_msg.header.frame_id = fcu_frame; ROS_INFO("ready"); ros::spin(); diff --git a/clover/src/vpe_publisher.cpp b/clover/src/vpe_publisher.cpp index 4a7778d78..4802310d0 100644 --- a/clover/src/vpe_publisher.cpp +++ b/clover/src/vpe_publisher.cpp @@ -11,12 +11,14 @@ #include #include +#include #include #include #include #include #include #include +#include #include #include #include @@ -25,7 +27,7 @@ using std::string; using namespace geometry_msgs; -bool reset_flag = false; +bool reset_flag = true; // offset should be reset on the start string local_frame_id, frame_id, child_frame_id, offset_frame_id; tf2_ros::Buffer tf_buffer; ros::Publisher vpe_pub; @@ -66,6 +68,13 @@ inline Pose getPose(const PoseStampedConstPtr& pose) { return pose->pose; } inline Pose getPose(const PoseWithCovarianceStampedConstPtr& pose) { return pose->pose.pose; } +inline void keepYaw(Quaternion& quaternion) +{ + tf::Quaternion q; + q.setRPY(0, 0, tf::getYaw(quaternion)); + tf::quaternionTFToMsg(q, quaternion); +} + template void callback(const T& msg) { @@ -88,10 +97,29 @@ void callback(const T& msg) if (!offset_frame_id.empty()) { if (reset_flag || msg->header.stamp - vpe.header.stamp > offset_timeout) { // calculate the offset - offset = tf_buffer.lookupTransform(local_frame_id, frame_id, - msg->header.stamp, ros::Duration(0.02)); - // offset.header.frame_id = vpe.header.frame_id; - offset.child_frame_id = offset_frame_id; + if (!frame_id.empty()) { + // calculate from TF + offset = tf_buffer.lookupTransform(local_frame_id, frame_id, + msg->header.stamp, ros::Duration(0.02)); + // offset.header.frame_id = vpe.header.frame_id; + offset.child_frame_id = offset_frame_id; + + } else { + // calculate transform between pose in vpe frame and pose in local frame + TransformStamped local_pose = tf_buffer.lookupTransform(local_frame_id, child_frame_id, + msg->header.stamp, ros::Duration(0.02)); + keepYaw(local_pose.transform.rotation); + + tf::Transform vpeTransform, poseTransform; + tf::poseMsgToTF(vpe.pose, vpeTransform); + tf::transformMsgToTF(local_pose.transform, poseTransform); + tf::Transform offset_tf = vpeTransform.inverseTimes(poseTransform); + tf::transformTFToMsg(offset_tf, offset.transform); + offset.header.frame_id = local_frame_id; + offset.header.stamp = msg->header.stamp; + offset.child_frame_id = offset_frame_id; + } + br.sendTransform(offset); reset_flag = false; ROS_INFO("offset reset"); @@ -122,8 +150,9 @@ int main(int argc, char **argv) { tf2_ros::TransformListener tf_listener(tf_buffer); - nh_priv.param("frame_id", frame_id, ""); - nh_priv.param("offset_frame_id", offset_frame_id, ""); + nh_priv.param("frame_id", frame_id, ""); // name for used visual pose frame + nh_priv.param("offset_frame_id", offset_frame_id, ""); // name for published offset frame + nh.param("mavros/local_position/frame_id", local_frame_id, "map"); nh.param("mavros/local_position/tf/child_frame_id", child_frame_id, "base_link"); offset_timeout = ros::Duration(nh_priv.param("offset_timeout", 3.0)); diff --git a/clover/src/www b/clover/src/www new file mode 100755 index 000000000..7a66c28e2 --- /dev/null +++ b/clover/src/www @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +export ROSWWW_DEFAULT=clover +rosrun roswww_static update diff --git a/clover/srv/GetTelemetry.srv b/clover/srv/GetTelemetry.srv index b9ae484e5..a5b6e9e7c 100644 --- a/clover/srv/GetTelemetry.srv +++ b/clover/srv/GetTelemetry.srv @@ -13,11 +13,11 @@ float32 alt float32 vx float32 vy float32 vz -float32 pitch float32 roll +float32 pitch float32 yaw -float32 pitch_rate float32 roll_rate +float32 pitch_rate float32 yaw_rate float32 voltage float32 cell_voltage diff --git a/clover/srv/Navigate.srv b/clover/srv/Navigate.srv index f607a437b..7a4943ede 100644 --- a/clover/srv/Navigate.srv +++ b/clover/srv/Navigate.srv @@ -2,7 +2,6 @@ float32 x float32 y float32 z float32 yaw -float32 yaw_rate float32 speed string frame_id bool auto_arm diff --git a/clover/srv/NavigateGlobal.srv b/clover/srv/NavigateGlobal.srv index 69fbc4444..5822f2b0f 100644 --- a/clover/srv/NavigateGlobal.srv +++ b/clover/srv/NavigateGlobal.srv @@ -2,7 +2,6 @@ float64 lat float64 lon float32 z float32 yaw -float32 yaw_rate float32 speed string frame_id bool auto_arm diff --git a/clover/srv/SetAltitude.srv b/clover/srv/SetAltitude.srv new file mode 100644 index 000000000..1b817ec56 --- /dev/null +++ b/clover/srv/SetAltitude.srv @@ -0,0 +1,5 @@ +float32 z +string frame_id +--- +bool success +string message diff --git a/clover/srv/SetAttitude.srv b/clover/srv/SetAttitude.srv index b41c1072b..ef8cf8b26 100644 --- a/clover/srv/SetAttitude.srv +++ b/clover/srv/SetAttitude.srv @@ -1,5 +1,5 @@ -float32 pitch float32 roll +float32 pitch float32 yaw float32 thrust string frame_id diff --git a/clover/srv/SetPosition.srv b/clover/srv/SetPosition.srv index 20c73f052..80a01b295 100644 --- a/clover/srv/SetPosition.srv +++ b/clover/srv/SetPosition.srv @@ -2,7 +2,6 @@ float32 x float32 y float32 z float32 yaw -float32 yaw_rate string frame_id bool auto_arm --- diff --git a/clover/srv/SetRates.srv b/clover/srv/SetRates.srv index f6ebddf9b..b284a5c8a 100644 --- a/clover/srv/SetRates.srv +++ b/clover/srv/SetRates.srv @@ -1,5 +1,5 @@ -float32 pitch_rate float32 roll_rate +float32 pitch_rate float32 yaw_rate float32 thrust bool auto_arm diff --git a/clover/srv/SetVelocity.srv b/clover/srv/SetVelocity.srv index 25e174ac4..908be1b84 100644 --- a/clover/srv/SetVelocity.srv +++ b/clover/srv/SetVelocity.srv @@ -2,7 +2,6 @@ float32 vx float32 vy float32 vz float32 yaw -float32 yaw_rate string frame_id bool auto_arm --- diff --git a/clover/srv/SetYaw.srv b/clover/srv/SetYaw.srv new file mode 100644 index 000000000..3e61d323f --- /dev/null +++ b/clover/srv/SetYaw.srv @@ -0,0 +1,5 @@ +float32 yaw +string frame_id +--- +bool success +string message diff --git a/clover/srv/SetYawRate.srv b/clover/srv/SetYawRate.srv new file mode 100644 index 000000000..82a21640b --- /dev/null +++ b/clover/srv/SetYawRate.srv @@ -0,0 +1,4 @@ +float32 yaw_rate +--- +bool success +string message diff --git a/clover/test/basic.py b/clover/test/basic.py index 5d9d6c297..088d6cfed 100755 --- a/clover/test/basic.py +++ b/clover/test/basic.py @@ -3,6 +3,7 @@ import pytest from mavros_msgs.msg import State from clover import srv +import time @pytest.fixture() def node(): @@ -24,6 +25,7 @@ def test_simple_offboard_services_available(): rospy.wait_for_service('set_attitude', timeout=5) rospy.wait_for_service('set_rates', timeout=5) rospy.wait_for_service('land', timeout=5) + rospy.wait_for_service('simple_offboard/release', timeout=5) def test_web_video_server(node): try: @@ -59,3 +61,18 @@ def wait_print(): t.join() assert wait_print.result == 'test' + +def test_long_callback(): + from clover import long_callback + from time import sleep + + # very basic test for long_callback + @long_callback + def cb(i): + cb.counter += i + cb.counter = 0 + cb(2) + sleep(0.1) + cb(3) + sleep(1) + assert cb.counter == 5 diff --git a/clover/test/basic.test b/clover/test/basic.test index 3aac7f104..a761f00a9 100755 --- a/clover/test/basic.test +++ b/clover/test/basic.test @@ -37,6 +37,19 @@ + + + + + + + + + + + + diff --git a/clover/test/offboard.py b/clover/test/offboard.py new file mode 100755 index 000000000..02f69e5a1 --- /dev/null +++ b/clover/test/offboard.py @@ -0,0 +1,437 @@ +import rospy +import pytest +from pytest import approx +import threading +import mavros_msgs.msg +from mavros_msgs.srv import SetMode +from geometry_msgs.msg import PoseStamped +from clover import srv +from clover.msg import State +from std_srvs.srv import Trigger +from math import nan, inf +import tf2_ros +import tf2_geometry_msgs + +@pytest.fixture() +def node(): + return rospy.init_node('offboard_test', anonymous=True) + +@pytest.fixture +def tf_buffer(): + buf = tf2_ros.Buffer() + tf2_ros.TransformListener(buf) + return buf + +def get_state(): + return rospy.wait_for_message('/simple_offboard/state', State, timeout=1) + +def get_navigate_target(tf_buffer): + target = tf_buffer.lookup_transform('map', 'navigate_target', rospy.get_rostime(), rospy.Duration(1)) + assert target.child_frame_id == 'navigate_target' + return target + +def test_offboard(node, tf_buffer): + navigate = rospy.ServiceProxy('navigate', srv.Navigate) + set_position = rospy.ServiceProxy('set_position', srv.SetPosition) + set_altitude = rospy.ServiceProxy('set_altitude', srv.SetAltitude) + set_yaw = rospy.ServiceProxy('set_yaw', srv.SetYaw) + set_yaw_rate = rospy.ServiceProxy('set_yaw_rate', srv.SetYawRate) + set_velocity = rospy.ServiceProxy('set_velocity', srv.SetVelocity) + set_attitude = rospy.ServiceProxy('set_attitude', srv.SetAttitude) + set_rates = rospy.ServiceProxy('set_rates', srv.SetRates) + get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) + land = rospy.ServiceProxy('land', Trigger) + + res = navigate() + assert res.success == False + assert res.message.startswith('State timeout') + + telem = get_telemetry() + assert telem.connected == False + + # mocked state publisher + state_pub = rospy.Publisher('/mavros/state', mavros_msgs.msg.State, latch=True, queue_size=1) + state_msg = mavros_msgs.msg.State(mode='OFFBOARD', armed=True) + + def publish_state(): + r = rospy.Rate(2) + while not rospy.is_shutdown(): + state_msg.header.stamp = rospy.Time.now() + state_pub.publish(state_msg) + r.sleep() + + # start publishing state + threading.Thread(target=publish_state, daemon=True).start() + rospy.sleep(0.5) + + # set_mode service mock + def set_mode(req): + state_msg.mode = req.custom_mode # set mocked mode to requested + return True, + + rospy.Service('/mavros/set_mode', SetMode, set_mode) + + telem = get_telemetry() + assert telem.connected == False + + res = navigate() + assert res.success == False + assert res.message.startswith('No connection to FCU') + + state_msg.connected = True + rospy.sleep(1) + + telem = get_telemetry() + assert telem.connected == True + + res = navigate() + assert res.success == False + assert res.message.startswith('No local position') + + local_position_pub = rospy.Publisher('/mavros/local_position/pose', PoseStamped, latch=True, queue_size=1) + local_position_msg = PoseStamped() + local_position_msg.header.frame_id = 'map' + local_position_msg.pose.position.x = 1 + local_position_msg.pose.position.y = 2 + local_position_msg.pose.position.z = 3 + local_position_msg.pose.orientation.w = 1 + + def publish_local_position(): + r = rospy.Rate(30) + while not rospy.is_shutdown(): + local_position_msg.header.stamp = rospy.Time.now() + local_position_pub.publish(local_position_msg) + r.sleep() + + # start publishing local position + threading.Thread(target=publish_local_position, daemon=True).start() + rospy.sleep(0.5) + + # check body frame + body = tf_buffer.lookup_transform('map', 'body', rospy.get_rostime(), rospy.Duration(1)) + assert body.child_frame_id == 'body' + assert body.transform.translation.x == approx(1) + assert body.transform.translation.y == approx(2) + assert body.transform.translation.z == approx(3) + + res = navigate(x=3, y=2, z=1, frame_id='map') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 3 + assert state.y == 2 + assert state.z == 1 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + target = get_navigate_target(tf_buffer) + assert target.header.frame_id == 'map' + assert target.transform.translation.x == approx(3) + assert target.transform.translation.y == approx(2) + assert target.transform.translation.z == approx(1) + assert target.transform.rotation.x == 0 + assert target.transform.rotation.y == 0 + assert target.transform.rotation.z == 0 + assert target.transform.rotation.w == 1 + + # try to set only the y + res = navigate(x=nan, y=1, z=nan) + assert res.success == False + assert res.message.startswith('x and y can be set only together') + + # set z in body frame + res = navigate(x=nan, y=nan, z=1, frame_id='body') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 3 + assert state.y == 2 + assert state.z == 4 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + + # set xy in test frame + res = navigate(x=1, y=2, z=nan, frame_id='test') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 1 + assert state.y == 2 + assert state.z == 4 + assert state.yaw == 0 + assert state.xy_frame_id == 'test' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'test' + + # auto_arm should not invalidate the setpoint if not effective + res = navigate(x=nan, y=nan, z=1, frame_id='map', auto_arm=True) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 1 + assert state.y == 2 + assert state.z == 1 + assert state.yaw == 0 + assert state.xy_frame_id == 'test' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + + # auto_arm should invalidate the setpoint if effective + state_msg.mode = 'STABILIZED' # pretend we are not in OFFBOARD mode + rospy.sleep(1) + res = navigate(x=nan, y=nan, z=1, frame_id='map', auto_arm=True) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 1 + assert state.y == 2 + assert state.z == 1 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + state_msg.mode = 'OFFBOARD' + rospy.sleep(1) + + # set_attitude should invalidate the setpoint + res = set_attitude() + assert res.success == True + + res = navigate(x=5, y=6, z=nan, yaw=nan, frame_id='map') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 3 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + + # test set_altitude + res = set_altitude(z=7, frame_id='test') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 7 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + assert state.yaw_frame_id == 'map' + + # test set_yaw + res = set_yaw(yaw=0.5, frame_id='test2') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 7 + assert state.yaw == 0.5 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + assert state.yaw_frame_id == 'test2' + + # test set_yaw_rate + res = set_yaw_rate(yaw_rate=2) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.x == 5 + assert state.y == 6 + assert state.z == 7 + assert state.yaw_rate == 2 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + + # navigate(yaw=nan) should keep yaw rate mode + res = navigate(x=nan, y=nan, z=nan, yaw=nan) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.x == 5 + assert state.y == 6 + assert state.z == 7 + assert state.yaw_rate == 2 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + + # set_yaw(nan) should change back to yaw mode + res = set_yaw(yaw=nan) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_NAVIGATE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.yaw == 0 + assert state.yaw_frame_id == 'map' + + # test set_position + res = set_position(x=nan, y=nan, z=13, yaw=nan, frame_id='test2') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_POSITION + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 13 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test2' + assert state.yaw_frame_id == 'map' + + # set_altitude should not change the mode + res = set_altitude(z=3, frame_id='test') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_POSITION + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 3 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + assert state.yaw_frame_id == 'map' + + # set_yaw should not change the main mode + res = set_yaw(yaw=1, frame_id='test2') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_POSITION + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.x == 5 + assert state.y == 6 + assert state.z == 3 + assert state.yaw == 1 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'test' + assert state.yaw_frame_id == 'test2' + + # test set_velocity + res = set_velocity(vx=1, frame_id='body') + state = get_state() + assert state.mode == State.MODE_VELOCITY + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.vx == 1 + assert state.vy == 0 + assert state.vz == 0 + assert state.yaw == 0 + assert state.xy_frame_id == 'map' + assert state.z_frame_id == 'map' + assert state.yaw_frame_id == 'map' + + # set_altitude should not work in velocity mode + res = set_altitude(z=3, frame_id='test') + assert res.success == False + assert res.message.startswith('Altitude cannot be set in') + + # test set_attitude + res = set_attitude(roll=0.1, pitch=0.2, yaw=0.3, thrust=0.5) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_ATTITUDE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.roll == approx(0.1) + assert state.pitch == approx(0.2) + assert state.yaw == approx(0.3) + assert state.thrust == approx(0.5) + assert state.yaw_frame_id == 'map' + msg = rospy.wait_for_message('/mavros/setpoint_attitude/attitude', PoseStamped, timeout=3) + # Tait-Bryan ZYX angle (rzyx) converted to quaternion + assert msg.pose.orientation.x == approx(0.0342708) + assert msg.pose.orientation.y == approx(0.10602051) + assert msg.pose.orientation.z == approx(0.14357218) + assert msg.pose.orientation.w == approx(0.98334744) + msg = rospy.wait_for_message('/mavros/setpoint_attitude/thrust', mavros_msgs.msg.Thrust, timeout=3) + assert msg.thrust == approx(0.5) + + # set_yaw should work in attitude mode + res = set_yaw(yaw=0.7, frame_id='test2') + assert res.success == True + state = get_state() + assert state.mode == State.MODE_ATTITUDE + assert state.yaw_mode == State.YAW_MODE_YAW + assert state.roll == approx(0.1) + assert state.pitch == approx(0.2) + assert state.yaw == approx(0.7) + assert state.thrust == approx(0.5) + assert state.yaw_frame_id == 'test2' + + # set_yaw_rate should not work in attitude mode + res = set_yaw_rate(yaw_rate=0.3) + assert res.success == False + assert res.message.startswith('Yaw rate cannot be set in') + + # test set_rates + res = set_rates(roll_rate=nan, pitch_rate=nan, yaw_rate=0.3, thrust=0.6) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_RATES + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.roll_rate == approx(0) + assert state.pitch_rate == approx(0) + assert state.yaw_rate == approx(0.3) + assert state.thrust == approx(0.6) + msg = rospy.wait_for_message('/mavros/setpoint_raw/attitude', mavros_msgs.msg.AttitudeTarget, timeout=3) + assert msg.thrust == approx(0.6) + + res = set_rates(roll_rate=0.3, pitch_rate=0.2, yaw_rate=0.1, thrust=0.4) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_RATES + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.roll_rate == approx(0.3) + assert state.pitch_rate == approx(0.2) + assert state.yaw_rate == approx(0.1) + assert state.thrust == approx(0.4) + + res = set_rates(roll_rate=nan, pitch_rate=nan, yaw_rate=nan, thrust=0.3) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_RATES + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.roll_rate == approx(0.3) + assert state.pitch_rate == approx(0.2) + assert state.yaw_rate == approx(0.1) + assert state.thrust == approx(0.3) + msg = rospy.wait_for_message('/mavros/setpoint_raw/attitude', mavros_msgs.msg.AttitudeTarget, timeout=3) + assert msg.type_mask == mavros_msgs.msg.AttitudeTarget.IGNORE_ATTITUDE + assert msg.body_rate.x == approx(0.3) + assert msg.body_rate.y == approx(0.2) + assert msg.body_rate.z == approx(0.1) + + # set_yaw_rate should work in rates mode + res = set_yaw_rate(yaw_rate=0.4) + assert res.success == True + state = get_state() + assert state.mode == State.MODE_RATES + assert state.yaw_mode == State.YAW_MODE_YAW_RATE + assert state.roll_rate == approx(0.3) + assert state.pitch_rate == approx(0.2) + assert state.yaw_rate == approx(0.4) + assert state.thrust == approx(0.3) + + res = set_rates(roll_rate=inf) + assert res.success == False + assert res.message == 'roll_rate argument cannot be Inf' + + # test land service + res = land() + assert res.success == True + assert state_msg.mode == 'AUTO.LAND' # check that the mode was set correctly diff --git a/clover/test/offboard.test b/clover/test/offboard.test new file mode 100644 index 000000000..470fe4fea --- /dev/null +++ b/clover/test/offboard.test @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/clover/udev/99-px4fmu.rules b/clover/udev/99-px4fmu.rules index 7e430b7d9..6fcef0984 100644 --- a/clover/udev/99-px4fmu.rules +++ b/clover/udev/99-px4fmu.rules @@ -1,17 +1,54 @@ -# PixHawk (px4fmu-v2), px4fmu-v3 +# Source files: PX4-Autopilot/boards/**/nuttx-config/nsh/defconfig + +# Additional info: +# https://docs.px4.io/main/en/flight_controller/ +# https://github.com/mavlink/qgroundcontrol/blob/master/src/comm/USBBoardInfo.json + +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0001", ATTRS{product}=="PX4 GNF405", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0001", ATTRS{product}=="PX4 OmnibusF4SD", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0016", ATTRS{product}=="PX4 Crazyflie v2.0", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1FC9", ATTRS{idProduct}=="001c", ATTRS{product}=="PX4 FMUK66 v3.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1FC9", ATTRS{idProduct}=="001c", ATTRS{product}=="PX4 FMUK66 E", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1FC9", ATTRS{idProduct}=="001d", ATTRS{product}=="PX4 FMURT1062 v1.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0001", ATTRS{product}=="DiatoneMambaF405 MK2", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="a32f", ATTRS{product}=="PX4 FMU ModalAI FCv1", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="a330", ATTRS{product}=="PX4 FMU ModalAI FCv2", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0012", ATTRS{product}=="PX4 FMU UVify Core", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="0050", ATTRS{product}=="PX4 KakuteH7", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="0050", ATTRS{product}=="PX4 KakuteH7v2", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="004b", ATTRS{product}=="PX4 DurandalV1", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0050", ATTRS{product}=="PX4 KakuteF7", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="0050", ATTRS{product}=="PX4 KakuteH7Mini-nand", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="004E", ATTRS{product}=="PX4 PIX32V5", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0061", ATTRS{product}=="PX4 ATL Mantis-EDU", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3163", ATTRS{idProduct}=="004c", ATTRS{product}=="PX4 CUAV Nora", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3163", ATTRS{idProduct}=="004c", ATTRS{product}=="PX4 CUAV X7Pro", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1B8C", ATTRS{idProduct}=="0036", ATTRS{product}=="MatekH743-mini", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1B8C", ATTRS{idProduct}=="0036", ATTRS{product}=="MatekH743", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="120A", ATTRS{idProduct}=="1004", ATTRS{product}=="Matekgnssm9nf4", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1209", ATTRS{idProduct}=="1013", ATTRS{product}=="MatekH743", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="0037", ATTRS{product}=="PX4 FMU SmartAP AIRLink", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="2DAE", ATTRS{idProduct}=="1058", ATTRS{product}=="CubeOrange+", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="2DAE", ATTRS{idProduct}=="1012", ATTRS{product}=="CubeYellow", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="2DAE", ATTRS{idProduct}=="1016", ATTRS{product}=="CubeOrange", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3185", ATTRS{idProduct}=="0035", ATTRS{product}=="PX4 FMU v6X.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3185", ATTRS{idProduct}=="0038", ATTRS{product}=="PX4 FMU v6C.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3185", ATTRS{idProduct}=="0033", ATTRS{product}=="PX4 FMU v5X.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="1B8C", ATTRS{idProduct}=="0036", ATTRS{product}=="PX4 FMU v6U.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0013", ATTRS{product}=="PX4 FMU v4.x PRO", SYMLINK+="px4fmu" SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0011", ATTRS{product}=="PX4 FMU v2.x", SYMLINK+="px4fmu" -# PixRacer (px4fmu-v4) SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0012", ATTRS{product}=="PX4 FMU v4.x", SYMLINK+="px4fmu" -# px4fmu-v5 SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0032", ATTRS{product}=="PX4 FMU v5.x", SYMLINK+="px4fmu" -# auav -SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0021", ATTRS{product}=="PX4 AUAV x2.1", SYMLINK+="px4fmu" -# crazyflie -SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0016", ATTRS{product}=="PX4 Crazyflie v2.0", SYMLINK+="px4fmu" -# px4fmu-v4pro -SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0013", ATTRS{product}=="PX4 FMU v4.x PRO", SYMLINK+="px4fmu" -# Omnibus -SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0001", ATTRS{product}=="PX4 OmnibusF4SD", SYMLINK+="px4fmu" -# CUAV X7 Pro -SUBSYSTEM=="tty", ATTRS{idVendor}=="3163", ATTRS{idProduct}=="004c", ATTRS{product}=="PX4 CUAV X7Pro", SYMLINK+="px4fmu" - +SUBSYSTEM=="tty", ATTRS{idVendor}=="3162", ATTRS{idProduct}=="004b", ATTRS{product}=="PX4 SP RACING H7 EXTREME", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0030", ATTRS{product}=="MindPX FMU v2.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="3185", ATTRS{idProduct}=="0039", ATTRS{product}=="ARK FMU v6X.x", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0016", ATTRS{product}=="PX4 FreeFly RTK GPS", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="1024", ATTRS{product}=="mRoControlZeroH7 OEM", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="1017", ATTRS{product}=="mRoPixracerPro", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="1023", ATTRS{product}=="mRoControlZeroH7", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="008D", ATTRS{product}=="mRoControlZeroF7", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0021", ATTRS{product}=="PX4 AUAV X2.1", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="1022", ATTRS{product}=="mRoControlZero Classic", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="26ac", ATTRS{idProduct}=="0088", ATTRS{product}=="mRo x2.1-777", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="35a7", ATTRS{idProduct}=="0002", ATTRS{product}=="FCC-R1", SYMLINK+="px4fmu" +SUBSYSTEM=="tty", ATTRS{idVendor}=="35a7", ATTRS{idProduct}=="0001", ATTRS{product}=="FCC-K1", SYMLINK+="px4fmu" diff --git a/clover_blocks/README.md b/clover_blocks/README.md index f00cb86c9..51d2dfaa9 100644 --- a/clover_blocks/README.md +++ b/clover_blocks/README.md @@ -47,6 +47,7 @@ http:///clover_blocks/?navigate_tolerance=0.5&sleep_time=0.1 * `~running` ([*std_msgs/Bool*](http://docs.ros.org/noetic/api/std_msgs/html/msg/Bool.html)) – indicates if the program is currently running. * `~block` ([*std_msgs/String*](http://docs.ros.org/noetic/api/std_msgs/html/msg/String.html)) – current executing block (maximum topic rate is limited). +* `~print` ([*std_msgs/String*](http://docs.ros.org/noetic/api/std_msgs/html/msg/String.html)) – user program output messages (published in *print* blocks). * `~error` ([*std_msgs/String*](http://docs.ros.org/noetic/api/std_msgs/html/msg/String.html)) – user program errors and exceptions. * `~prompt` ([*clover_blocks/Prompt*](msg/Prompt.msg)) – user input request (includes random request ID string). diff --git a/clover_blocks/package.xml b/clover_blocks/package.xml index 384d75f95..0423c4565 100644 --- a/clover_blocks/package.xml +++ b/clover_blocks/package.xml @@ -1,7 +1,7 @@ clover_blocks - 0.23.0 + 0.24.0 Blockly programming support for Clover Oleg Kalachev MIT diff --git a/clover_blocks/www/blocks.js b/clover_blocks/www/blocks.js index b10e315ff..1ddb528f9 100644 --- a/clover_blocks/www/blocks.js +++ b/clover_blocks/www/blocks.js @@ -15,6 +15,7 @@ const COLOR_GPIO = 200; const DOCS_URL = 'https://clover.coex.tech/en/blocks.html'; var frameIds = [["body", "BODY"], ["markers map", "ARUCO_MAP"], ["marker", "ARUCO"], ["last navigate target", "NAVIGATE_TARGET"], ["map", "MAP"]]; +var frameIdsWithTerrain = frameIds.concat([["terrain", "TERRAIN"]]); function considerFrameId(e) { if (!(e instanceof Blockly.Events.Change || e instanceof Blockly.Events.Create)) return; @@ -22,7 +23,7 @@ function considerFrameId(e) { var frameId = this.getFieldValue('FRAME_ID'); // set appropriate coordinates labels if (this.getInput('X')) { // block has x-y-z fields - if (frameId == 'BODY' || frameId == 'NAVIGATE_TARGET' || frameId == 'BASE_LINK') { + if (frameId == 'BODY' || frameId == 'NAVIGATE_TARGET' || frameId == 'BASE_LINK' || frameId == 'TERRAIN') { this.getInput('X').fieldRow[0].setValue('forward'); this.getInput('Y').fieldRow[0].setValue('left'); this.getInput('Z').fieldRow[0].setValue('up'); @@ -59,8 +60,8 @@ function updateSetpointBlock(e) { this.getInput('VY').setVisible(velocity); this.getInput('VZ').setVisible(velocity); this.getInput('YAW').setVisible(attitude); - this.getInput('PITCH').setVisible(attitude); this.getInput('ROLL').setVisible(attitude); + this.getInput('PITCH').setVisible(attitude); this.getInput('THRUST').setVisible(attitude); this.getInput('RELATIVE_TO').setVisible(type != 'RATES'); @@ -73,7 +74,7 @@ function updateSetpointBlock(e) { Blockly.Blocks['navigate'] = { init: function () { - let navFrameId = frameIds.slice(); + let navFrameId = frameIdsWithTerrain.slice(); navFrameId.push(['global', 'GLOBAL_LOCAL']) navFrameId.push(['global, WGS 84 alt.', 'GLOBAL']) this.appendDummyInput() @@ -163,14 +164,14 @@ Blockly.Blocks['setpoint'] = { this.appendValueInput("VZ") .setCheck("Number") .appendField("vz"); - this.appendValueInput("PITCH") - .setCheck("Number") - .appendField("pitch") - .setVisible(false); this.appendValueInput("ROLL") .setCheck("Number") .appendField("roll") .setVisible(false); + this.appendValueInput("PITCH") + .setCheck("Number") + .appendField("pitch") + .setVisible(false); this.appendValueInput("YAW") .setCheck("Number") .appendField("yaw") @@ -213,7 +214,7 @@ Blockly.Blocks['get_position'] = { .appendField("current") .appendField(new Blockly.FieldDropdown([["x", "X"], ["y", "Y"], ["z", "Z"], ["vx", "VX"], ["vy", "VY"], ["vz", "VZ"]]), "FIELD") .appendField("relative to") - .appendField(new Blockly.FieldDropdown(frameIds), "FRAME_ID"); + .appendField(new Blockly.FieldDropdown(frameIdsWithTerrain), "FRAME_ID"); this.appendValueInput("ID") .setCheck("Number") .appendField("with ID") @@ -247,7 +248,7 @@ Blockly.Blocks['get_attitude'] = { init: function () { this.appendDummyInput() .appendField("current") - .appendField(new Blockly.FieldDropdown([["pitch", "PITCH"], ["roll", "ROLL"], ["pitch rate", "PITCH_RATE"], ["roll rate", "ROLL_RATE"], ["yaw rate", "YAW_RATE"]]), "FIELD"); + .appendField(new Blockly.FieldDropdown([["roll", "ROLL"], ["pitch", "PITCH"], ["roll rate", "ROLL_RATE"], ["pitch rate", "PITCH_RATE"], ["yaw rate", "YAW_RATE"]]), "FIELD"); this.setOutput(true, "Number"); this.setColour(COLOR_STATE); this.setTooltip("Returns current orientation or angle rates in degree or degree per second (not radian)."); @@ -268,6 +269,19 @@ Blockly.Blocks['voltage'] = { } }; +Blockly.Blocks['get_rc'] = { + init: function () { + this.appendDummyInput() + .appendField("RC channel") + this.appendValueInput("CHANNEL") + .setCheck("Number"); + this.setInputsInline(true); + this.setOutput(true, "Number"); + this.setColour(COLOR_STATE); + this.setTooltip("Returns current RC channel value."); + this.setHelpUrl(DOCS_URL + '#' + this.type); + } +} Blockly.Blocks['armed'] = { init: function () { @@ -509,7 +523,7 @@ Blockly.Blocks['distance'] = { .appendField("z"); this.appendDummyInput() .appendField("relative to") - .appendField(new Blockly.FieldDropdown([["markers map", "ARUCO_MAP"], ["marker", "ARUCO"], ["last navigate target", "NAVIGATE_TARGET"]]), "FRAME_ID"); + .appendField(new Blockly.FieldDropdown([["markers map", "ARUCO_MAP"], ["marker", "ARUCO"], ["last navigate target", "NAVIGATE_TARGET"], ["terrain", "TERRAIN"]]), "FRAME_ID"); this.appendValueInput("ID") .setCheck("Number") .appendField("with ID") diff --git a/clover_blocks/www/index.html b/clover_blocks/www/index.html index 8f394198a..dfc85186b 100644 --- a/clover_blocks/www/index.html +++ b/clover_blocks/www/index.html @@ -69,8 +69,8 @@ 0 0 0 - 0 0 + 0 0 0.5 0 @@ -100,6 +100,9 @@ + + 0 + diff --git a/clover_blocks/www/python.js b/clover_blocks/www/python.js index 1589cfdf3..bc0ac0ba7 100644 --- a/clover_blocks/www/python.js +++ b/clover_blocks/www/python.js @@ -81,7 +81,10 @@ function generateROSDefinitions() { code += `get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry)\n`; code += `navigate = rospy.ServiceProxy('navigate', srv.Navigate)\n`; if (rosDefinitions.navigateGlobal) { - code += `navigate_global = rospy.ServiceProxy('navigate_global', srv.NavigateGlobal)\n`; + code += `navigate_global = rospy.ServiceProxy('navigate_global', srv.NavigateGlobal)\n`; + } + if (rosDefinitions.setYaw) { + code += `set_yaw = rospy.ServiceProxy('set_yaw', srv.SetYaw)\n`; } if (rosDefinitions.setVelocity) { code += `set_velocity = rospy.ServiceProxy('set_velocity', srv.SetVelocity)\n`; @@ -276,10 +279,11 @@ Blockly.Python.angle = function(block) { } Blockly.Python.set_yaw = function(block) { + rosDefinitions.setYaw = true; simpleOffboard(); let yaw = Blockly.Python.valueToCode(block, 'YAW', Blockly.Python.ORDER_NONE); let frameId = buildFrameId(block); - let code = `navigate(x=float('nan'), y=float('nan'), z=float('nan'), yaw=${yaw}, frame_id=${frameId})\n`; + let code = `set_yaw(yaw=${yaw}, frame_id=${frameId})\n`; if (block.getFieldValue('WAIT') == 'TRUE') { rosDefinitions.waitYaw = true; simpleOffboard(); @@ -328,11 +332,11 @@ Blockly.Python.setpoint = function(block) { } else if (type == 'ATTITUDE') { rosDefinitions.setAttitude = true; simpleOffboard(); - return `set_attitude(pitch=${pitch}, roll=${roll}, yaw=${yaw}, thrust=${thrust}, frame_id=${frameId})\n`; + return `set_attitude(roll=${roll}, pitch=${pitch}, yaw=${yaw}, thrust=${thrust}, frame_id=${frameId})\n`; } else if (type == 'RATES') { rosDefinitions.setRates = true; simpleOffboard(); - return `set_rates(pitch_rate=${pitch}, roll_rate=${roll}, yaw_rate=${yaw}, thrust=${thrust})\n`; + return `set_rates(roll_rate=${roll}, pitch_rate=${pitch}, yaw_rate=${yaw}, thrust=${thrust})\n`; } } @@ -398,6 +402,12 @@ Blockly.Python.voltage = function(block) { return [code, Blockly.Python.ORDER_FUNCTION_CALL]; } +Blockly.Python.get_rc = function(block) { + Blockly.Python.definitions_['import_rcin'] = 'from mavros_msgs.msg import RCIn'; + var channel = Blockly.Python.valueToCode(block, 'CHANNEL', Blockly.Python.ORDER_NONE); + return [`rospy.wait_for_message('mavros/rc/in', RCIn).channels[${channel}]`, Blockly.Python.ORDER_FUNCTION_CALL] +} + function parseColor(color) { return { r: parseInt(color.substr(2, 2), 16), diff --git a/clover_description/package.xml b/clover_description/package.xml index 1e5db705d..b5defff32 100644 --- a/clover_description/package.xml +++ b/clover_description/package.xml @@ -1,6 +1,6 @@ clover_description - 0.23.0 + 0.24.0 The clover_description package provides URDF models of the Clover series of quadcopters. Alexey Rogachevskiy diff --git a/clover_description/urdf/sensors/distance_sensor.urdf.xacro b/clover_description/urdf/sensors/distance_sensor.urdf.xacro index 4b539a29e..591c2cfd5 100644 --- a/clover_description/urdf/sensors/distance_sensor.urdf.xacro +++ b/clover_description/urdf/sensors/distance_sensor.urdf.xacro @@ -2,7 +2,7 @@ - + @@ -58,7 +58,7 @@ /rangefinder/range rangefinder infrared - 0.01 + ${fov} 0.001 ${rate} ${range_min} diff --git a/clover_simulation/airframes/4500_clover b/clover_simulation/airframes/4500_clover index 0039c50d0..57c7ca81d 100644 --- a/clover_simulation/airframes/4500_clover +++ b/clover_simulation/airframes/4500_clover @@ -31,7 +31,7 @@ param set-default EKF2_OF_DELAY 0 param set-default EKF2_OF_QMIN 10 param set-default EKF2_OF_N_MIN 0.05 param set-default EKF2_OF_N_MAX 0.2 -param set-default EKF2_HGT_MODE 2 # 0 = baro, 1 = gps, 2 = range, 3 = vision +param set-default EKF2_HGT_MODE 3 # 0 = baro, 1 = gps, 2 = range, 3 = vision param set-default EKF2_EVA_NOISE 0.1 param set-default EKF2_EVP_NOISE 0.1 param set-default EKF2_EV_DELAY 0 diff --git a/clover_simulation/models/red_circle/materials/scripts/red_circle.material b/clover_simulation/models/red_circle/materials/scripts/red_circle.material new file mode 100644 index 000000000..9741c598f --- /dev/null +++ b/clover_simulation/models/red_circle/materials/scripts/red_circle.material @@ -0,0 +1,16 @@ +material red_circle +{ + technique + { + pass + { + scene_blend alpha_blend + texture_unit + { + texture red_circle.png + filtering none + scale 1.0 1.0 + } + } + } +} diff --git a/clover_simulation/models/red_circle/materials/textures/red_circle.png b/clover_simulation/models/red_circle/materials/textures/red_circle.png new file mode 100644 index 000000000..66c963f09 Binary files /dev/null and b/clover_simulation/models/red_circle/materials/textures/red_circle.png differ diff --git a/clover_simulation/models/red_circle/model.config b/clover_simulation/models/red_circle/model.config new file mode 100644 index 000000000..87e956009 --- /dev/null +++ b/clover_simulation/models/red_circle/model.config @@ -0,0 +1,13 @@ + + + Red Circle + 1.0 + red_circle.sdf + + Oleg Kalachev + okalachev@gmail.com + + + Red circle of size 40 cm on the floor for recognizing by a drone + + diff --git a/clover_simulation/models/red_circle/red_circle.sdf b/clover_simulation/models/red_circle/red_circle.sdf new file mode 100644 index 000000000..caf0008e4 --- /dev/null +++ b/clover_simulation/models/red_circle/red_circle.sdf @@ -0,0 +1,24 @@ + + + + true + + 0 0 1e-3 0 0 0 + + false + + + 0.4 0.4 1e-3 + + + + + + + + + diff --git a/clover_simulation/models/red_circle/red_circle.svg b/clover_simulation/models/red_circle/red_circle.svg new file mode 100644 index 000000000..3321ece0f --- /dev/null +++ b/clover_simulation/models/red_circle/red_circle.svg @@ -0,0 +1,7 @@ + + + + red_circle + + + diff --git a/clover_simulation/package.xml b/clover_simulation/package.xml index a7d27462f..65885cc4b 100644 --- a/clover_simulation/package.xml +++ b/clover_simulation/package.xml @@ -1,6 +1,6 @@ - + clover_simulation - 0.23.0 + 0.24.0 The clover_simulation package provides worlds and launch files for Gazebo. Oleg Kalachev @@ -22,6 +22,8 @@ gazebo_ros gazebo_plugins rospy + python-docopt + python3-docopt diff --git a/clover_simulation/src/sim_leds.cpp b/clover_simulation/src/sim_leds.cpp index fb188af7b..f281d99ae 100644 --- a/clover_simulation/src/sim_leds.cpp +++ b/clover_simulation/src/sim_leds.cpp @@ -65,7 +65,8 @@ class LedController } role = (ros::this_node::getName() == "/gazebo") ? Role::Server : Role::Client; - ROS_INFO_NAMED(("LedController_" + robotNamespace).c_str(), "LedController has started (as %s)", role == Role::Client ? "client" : "server"); + ROS_INFO_NAMED(("LedController_" + robotNamespace).c_str(), "LedController has started (as %s) in namespace '%s'", + role == Role::Client ? "client" : "server", robotNamespace.c_str()); nh.reset(new ros::NodeHandle(robotNamespace)); @@ -109,7 +110,6 @@ LedController& get(std::string robotNamespace) std::lock_guard lock(controllerMutex); auto it = controllers.find(robotNamespace); if (it == controllers.end()) { - gzwarn << "Creating new LED controller for namespace " << robotNamespace << "\n"; controllers[robotNamespace].reset(new LedController(robotNamespace)); return *controllers[robotNamespace]; } diff --git a/docs/assets/ftl/acp_workflow1.png b/docs/assets/ftl/acp_workflow1.png new file mode 100644 index 000000000..45d15f974 Binary files /dev/null and b/docs/assets/ftl/acp_workflow1.png differ diff --git a/docs/assets/ftl/acp_workflow2.png b/docs/assets/ftl/acp_workflow2.png new file mode 100644 index 000000000..7f098c736 Binary files /dev/null and b/docs/assets/ftl/acp_workflow2.png differ diff --git a/docs/assets/ftl/acp_workflow3.png b/docs/assets/ftl/acp_workflow3.png new file mode 100644 index 000000000..09eb7fbab Binary files /dev/null and b/docs/assets/ftl/acp_workflow3.png differ diff --git a/docs/assets/ftl/acp_workflow4.png b/docs/assets/ftl/acp_workflow4.png new file mode 100644 index 000000000..a954e8a1e Binary files /dev/null and b/docs/assets/ftl/acp_workflow4.png differ diff --git a/docs/assets/mocap_clover/block_ROS.jpg b/docs/assets/mocap_clover/block_ROS.jpg new file mode 100644 index 000000000..4813abdc1 Binary files /dev/null and b/docs/assets/mocap_clover/block_ROS.jpg differ diff --git a/docs/assets/mocap_clover/block_udp.jpg b/docs/assets/mocap_clover/block_udp.jpg new file mode 100644 index 000000000..3113fd6e8 Binary files /dev/null and b/docs/assets/mocap_clover/block_udp.jpg differ diff --git a/docs/assets/mocap_clover/drone_approach_small.jpg b/docs/assets/mocap_clover/drone_approach_small.jpg new file mode 100644 index 000000000..9532a16fb Binary files /dev/null and b/docs/assets/mocap_clover/drone_approach_small.jpg differ diff --git a/docs/assets/mocap_clover/px4_control_structure.jpg b/docs/assets/mocap_clover/px4_control_structure.jpg new file mode 100644 index 000000000..ab055aa4e Binary files /dev/null and b/docs/assets/mocap_clover/px4_control_structure.jpg differ diff --git a/docs/assets/mocap_clover/semi_logo_small.jpg b/docs/assets/mocap_clover/semi_logo_small.jpg new file mode 100644 index 000000000..cfc957e7d Binary files /dev/null and b/docs/assets/mocap_clover/semi_logo_small.jpg differ diff --git a/docs/assets/stl/grip_left.stl b/docs/assets/stl/grip_left.stl new file mode 100644 index 000000000..a8a65687c Binary files /dev/null and b/docs/assets/stl/grip_left.stl differ diff --git a/docs/assets/stl/grip_right.stl b/docs/assets/stl/grip_right.stl new file mode 100644 index 000000000..5f802c0b6 Binary files /dev/null and b/docs/assets/stl/grip_right.stl differ diff --git a/docs/assets/swarm_in_blocks_2/capa_swarm_23_banner.png b/docs/assets/swarm_in_blocks_2/capa_swarm_23_banner.png new file mode 100644 index 000000000..6903afa7d Binary files /dev/null and b/docs/assets/swarm_in_blocks_2/capa_swarm_23_banner.png differ diff --git a/docs/en/SUMMARY.md b/docs/en/SUMMARY.md index ab2c3239e..d0fc7ce7e 100644 --- a/docs/en/SUMMARY.md +++ b/docs/en/SUMMARY.md @@ -36,7 +36,7 @@ * [Optical Flow](optical_flow.md) * [Autonomous flight (OFFBOARD)](simple_offboard.md) * [Coordinate systems (frames)](frames.md) - * [Code snippets](snippets.md) + * [Code examples](snippets.md) * [Interfacing with a laser rangefinder](laser.md) * [LED strip](leds.md) * [Working with GPIO](gpio.md) @@ -105,6 +105,12 @@ * [Video contest](video_contest.md) * [Educational contests](educational_contests.md) * [Clover-based projects](projects.md) + * [Clover Cloud Platform](clover-cloud-platform.md) + * [Autonomous Racing Drone](djs_phoenix_chetak.md) + * [Motion Capture System](mocap_clover.md) + * [Swarm in Blocks 2](swarm_in_blocks_2.md) + * [Advanced Clover 2](advanced_clover_simulator_platform.md) + * [Network of charging stations](liceu128.md) * [Swarm-in-blocks](swarm_in_blocks.md) * [Obstacle avoidance using artificial potential fields method](obstacle-avoidance-potential-fields.md) * [The Clover Rescue Project](clover-rescue-team.md) diff --git a/docs/en/advanced_clover_simulator_platform.md b/docs/en/advanced_clover_simulator_platform.md new file mode 100644 index 000000000..0647e3b94 --- /dev/null +++ b/docs/en/advanced_clover_simulator_platform.md @@ -0,0 +1,161 @@ +# Advanced Clover 3: The Platform + +[CopterHack-2023](copterhack2023.md), team **FTL**. + +## Team Information + +```cpp +#include "veryInterestingCommandDescription.h" +``` + +Team members: + +- Maxim Ramanouski, [@max8rr8](https://t.me/max8rr8). + +Country: Belarus. + +## Project Description + +Last year at CopterHack 2022, we created a [project](../ru/advanced_clover_simulator.html) that simplified the simulation of Clover, and in 2021, we created a [project](../ru/advanced_clover.html) that simplified the development of products for Clover (IDE and library for writing). The time has come to combine them and achieve unlimited power. + +### Project Idea + +The idea of the project is to combine CloverIDE and CloverSim (a tool for running Clover simulations). Thus, a platform is planned that allows developing products based on Clover using a simulator and an advanced IDE. The platform will include the following features: + +- Add a web interface that allows using CloverSim without touching the command line. +- Work both in the browser (without installing anything) and from CLI. +- Have a course that covers different aspects of clover. +- Simplify installation, especially in WSL. +- Running a simulation on a remote device (more powerful computer or cloud). + +### Project videos + +Video presentation of the project: [link](https://www.youtube.com/watch?v=T4RU9sfxsSI). + +Live presentation at CopterHack: TBD. + +CLI demonstration: [link](https://www.youtube.com/watch?v=Ao-ukR58sSQ). + +## Installation + +Installation process is described in the [project documentation](https://ftl-team.github.io/clover_sim/#/?id=installation). + +## Usage + +The CloverSim platform offers a seamless workflow for users: + +1. Users can effortlessly select or create a workspace and task and + launch them with ease. + + ![Step 1 screenshot](../assets/ftl/acp_workflow1.png) + +2. After launching the simulation, users are presented with CloverSim WebUI that + provides them with an intuitive way to view their scores and progress, + control the simulator, and access task descriptions and scoring information. + From it users can open terminal, gzweb and more importantly they can easily + access the CloverSim IDE to solve task. + + ![Step 2 screenshot](../assets/ftl/acp_workflow2.png) + +3. The IDE provides a full suite of tools and features for writing and + debugging code. One example is autocompletion to help streamline the + development process, making it more efficient and effective. + + ![Step 3 screenshot](../assets/ftl/acp_workflow3.png) + +4. Users can launch their programs with ease and monitor its progress via + the GZWeb, CopterStatus, and SimulatorStatus views of the IDE. + + ![Step 4 screenshot](../assets/ftl/acp_workflow4.png) + +5. Users can track their progress and scores in real-time and effortlessly + restart the simulator if necessary. Additionally, different randomization + seed can be set to check various inputs and outcomes. + +We also have video demonstration/tutorial: [link](https://www.youtube.com/watch?v=aPOPHD3M3ZM). + +## More features + +- Easy installation process. +- Efficient simulation launch, surpassing traditional virtual machines. +- Generation of dynamic Gazebo worlds with randomization based on seed. +- Real-time task completion verification and score presentation. +- Execution with security in isolated containers. +- Multiple project capability without the need for multiple virtual machine images. +- WebUI for ease of use, removing the need to use the command line. +- IDE similar to VSCode with support for C++ and Python, including autocompletion and autoformatting. +- Custom-patched GZWeb with bug fixes and additional features, including the display of the Clover LED strip. +- GZWeb provides a follow-objects feature superior to that of Gazebo. +- IDE includes tools to interact with ROS, such as topic visualization, service calling, and image topic visualization. +- IDE also includes Copter Status, displaying most of the drone's information, including position, camera, and LED strip, in one view. +- IDE integrates with the simulator by providing control from it, viewing task descriptions, and opening GZWeb. + +We also have developed a learning course based on CloverSim: [link](https://github.com/FTL-team/CloverSim_course). It currently has the following tasks: + +- 1_thesquare - First task of CloverSim course with goal to fly square. +- 2_iseeall - Task that teaches how to interact with camera. +- 3_landmid - Find and land onto randomly positioned object. +- 4_flybyline - Flying along the line. +- 5_posknown - Find position of objects relative to ArUco map. + +## More details + +At this point, our platform consists of four major parts: + +- [CloverSim](https://github.com/FTL-team/clover_sim) - tool that manages simulation. +- [CloverSim Basefs](https://github.com/FTL-team/clover_sim_basefs) - container image that is used in simulator. +- [Clover IDE](https://github.com/FTL-team/cloverIDE) - clover ide tools and theia. +- [CloverSim course](https://github.com/FTL-team/CloverSim_course) - course with tasks based on our platform. + +### CloverSim + +The simulation architecture is a continuation of work from CopterHack 2022, but while 2022 version was closer to Proof-of-Concept, the updated version is more robust. + +There are three major difference in simulator architecture + +- Replacement of `systemd-nspawn` with `runc` provides us higher degree of container control and seemingless support of non-systemd systems, for example WSL. +- Migration to squash fs images, which greatly reduced size of installed CloverSim from 13 gigabytes to just 3.5 gigabytes. +- Tasks are now mounted instead of being copied and also build before starting. + +Because of the way catkin_make works, it is incredibly slow when new packages are added (whole cmake configuration is rerun for all packages). catkin_make provides a way to build only some packages, but it caches this packages and to reset this cache you need to recompile whole catkin_make. But we have found a solution: `catkin_make -DCATKIN_WHITELIST_PACKAGES="task;CloverSim" --build build_CloverSim` This command, builds only CloverSim and task package in separate build directory, this drastically reduces time that catkin_make takes, and keeps expected behavior of catkin_make without arguments. + +There are also differences in tool that launches simulation: + +- Client-server architecture allows us to create web UI and run CloverSim on server. +- More robust error handling improves user experience. +- Rewritten in rust, better reliability and development experience. + +### CloverSim basefs + +Version 2 integrates CloverIDE into system. We also updated clover in simulator to v0.23 and added web terminal. Basefs is now squashed and doesn't require additional installation. It also uses patched(by us) version of gzweb that is more suitable for our use-case: + +- Unlike original GZWeb assets can be dynamically loaded, which is required to support dynamically generated tasks. +- It also implements multiple bugfixes for rendering, UI. +- Fixed performance, original gzweb had two constantly running loops that used 200% of cpu. We fixed this by instead using synchronization primitives. +- Clover LED strip is rendered, our gzweb connects to ROS and pulls LED data from there to render LED strip like Gazebo does. +- Users can now follow-objects like in Gazebo better actually. +- Reconnect on disconnect, when simulator is restarted gzweb looses connection and it now can automatically reconnect. + +Patched gzweb available there: [FTL-team/gzweb](https://github.com/FTL-team/gzweb). + +### CloverIDE + +CloverIDE got some updates too: + +- We have updated theia and extensions used. +- Better C++ support via clangd. +- Clover IDE tools can now reconnect after simulator restart. +- Copter Status now displays LED strip status. +- Tools ui has better support for different themes. + +But the most important change is CloverSim integration, there are new tools (task description, simulator control and gzweb). While gzweb tool is just an iframe (though it's very cool to have it in IDE). + +Task description and simulator control are more interesting as they have to interact with both IDE and CloverSim, to maintain different versions support we use quite interesting trick, extension webview after being initialized dynamically loads JavaScript from CloverSim. That provides better integration between two. + +### CloverSim course + +CloverSim course is a new part of our platform. It uses robust task API of CloverSim to create practical learning course. It currently teaches different aspects of clover development that i encountered during my participation in different contests involving clover. But we are happy to accpet suggestions about other aspects we should teach in out course. + +## Conclusion + +This project is a final (or maybe there is more?) project of our advanced clover saga. AdvancedClover is a project that is easy to use and greatly improves experience during learning about clover, participating in clover based competitions and development clover based projects. We thank COEX team for their support and look forward to further cooperation. diff --git a/docs/en/aruco_marker.md b/docs/en/aruco_marker.md index f2208f256..4fec49587 100644 --- a/docs/en/aruco_marker.md +++ b/docs/en/aruco_marker.md @@ -72,12 +72,6 @@ Sample code to fly to a point 1 metre to the left and 2 metres above marker with navigate(frame_id='aruco_7', x=-1, y=0, z=2) ``` -Sample code to rotate counterclockwise while hovering 1.5 metres above marker id 10: - -```python -navigate(frame_id='aruco_10', x=0, y=0, z=1.5, yaw_rate=0.5) -``` - Note that if the required marker isn't detected for 0.5 seconds after the `navigate` command, the command will be ignored. These frames may also be used in other services that accept TF frames (like `get_telemetry`). The following code will get the drone's position relative to the marker with id 3: diff --git a/docs/en/blocks.md b/docs/en/blocks.md index f7b1665a5..6e888c0a3 100644 --- a/docs/en/blocks.md +++ b/docs/en/blocks.md @@ -2,7 +2,7 @@ -Visual blocks programming feature has been added to the [RPi image](image.md), starting with the version **0.21**. Blocks programming is implemented using [Google Blockly](https://developers.google.com/blockly) platform. Blocks programming integration can lower the entry barrier to a minimum. +Visual blocks programming feature has been added to the [RPi image](image.md), starting with the version **0.21**. Blocks programming is implemented using [Google Blockly](https://developers.google.com/blockly) library. Blocks programming integration can lower the entry barrier to a minimum. ## Configuration diff --git a/docs/en/camera.md b/docs/en/camera.md index bbf68d87c..259b13d69 100644 --- a/docs/en/camera.md +++ b/docs/en/camera.md @@ -1,5 +1,7 @@ # Working with the camera +> **Note** The following applies to the [image version **0.24**](https://github.com/CopterExpress/clover/releases/tag/v0.24), which is not yet released. Older documentation is still available for [for version **0.23**](https://github.com/CopterExpress/clover/blob/f78a03ec8943b596d5a99b893188a159d5319888/docs/en/camera.md). + Make sure the camera is enabled in the `~/catkin_ws/src/clover/clover/launch/clover.launch` file: ```xml @@ -14,7 +16,7 @@ The `clover` service must be restarted after the launch-file has been edited: sudo systemctl restart clover ``` -You may use rqt or [web_video_server](web_video_server.md) to view the camera stream. +You may use [rqt](rviz.md) or [web_video_server](web_video_server.md) to view the camera stream. ## Troubleshooting @@ -52,8 +54,6 @@ The [SD card image](image.md) comes with a preinstalled [OpenCV](https://opencv. ### Python -Main article: http://wiki.ros.org/cv_bridge/Tutorials/ConvertingBetweenROSImagesAndOpenCVImagesPython. - An example of creating a subscriber for a topic with an image from the main camera for processing with OpenCV: ```python @@ -61,12 +61,14 @@ import rospy import cv2 from sensor_msgs.msg import Image from cv_bridge import CvBridge +from clover import long_callback -rospy.init_node('computer_vision_sample') +rospy.init_node('cv') bridge = CvBridge() +@long_callback def image_callback(data): - cv_image = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image + img = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image # Do any image processing with cv2... image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback) @@ -74,19 +76,31 @@ image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback) rospy.spin() ``` +> **Note** Image processing may take significant time to finish. This can cause an [issue](https://github.com/ros/ros_comm/issues/1901) in rospy library, which would lead to processing stale camera frames. To solve this problem you need to use `long_callback` decorator from `clover` library, as in the example above. + +#### Limiting CPU usage + +When using the `main_camera/image_raw` topic, the script will process the maximum number of frames from the camera, actively utilizing the CPU (up to 100%). In tasks, where processing each camera frame is not critical, you can use the topic, where the frames are published at rate 5 Hz: `main_camera/image_raw_throttled`: + +```python +image_sub = rospy.Subscriber('main_camera/image_raw_throttled', Image, image_callback, queue_size=1) +``` + +#### Publishing images + To debug image processing, you can publish a separate topic with the processed image: ```python image_pub = rospy.Publisher('~debug', Image) ``` -Publishing the processed image (at the end of the image_callback function): +Publishing the processed image: ```python -image_pub.publish(bridge.cv2_to_imgmsg(cv_image, 'bgr8')) +image_pub.publish(bridge.cv2_to_imgmsg(img, 'bgr8')) ``` -The obtained images can be viewed using [web_video_server](web_video_server.md). +The published images can be viewed using [web_video_server](web_video_server.md) or [rqt](rviz.md). #### Retrieving one frame @@ -97,7 +111,7 @@ import rospy from sensor_msgs.msg import Image from cv_bridge import CvBridge -rospy.init_node('computer_vision_sample') +rospy.init_node('cv') bridge = CvBridge() # ... @@ -119,38 +133,32 @@ QR codes recognition in Python: ```python import rospy from pyzbar import pyzbar +import cv2 from cv_bridge import CvBridge from sensor_msgs.msg import Image +from clover import long_callback +rospy.init_node('cv') bridge = CvBridge() -rospy.init_node('barcode_test') - -# Image subscriber callback function -def image_callback(data): - cv_image = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image - barcodes = pyzbar.decode(cv_image) +@long_callback +def image_callback(msg): + img = bridge.imgmsg_to_cv2(msg, 'bgr8') + barcodes = pyzbar.decode(img) for barcode in barcodes: - b_data = barcode.data.decode("utf-8") + b_data = barcode.data.decode('utf-8') b_type = barcode.type (x, y, w, h) = barcode.rect xc = x + w/2 yc = y + h/2 - print("Found {} with data {} with center at x={}, y={}".format(b_type, b_data, xc, yc)) + print('Found {} with data {} with center at x={}, y={}'.format(b_type, b_data, xc, yc)) -image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback, queue_size=1) +image_sub = rospy.Subscriber('main_camera/image_raw_throttled', Image, image_callback, queue_size=1) rospy.spin() ``` -The script will take up to 100% CPU capacity. To slow down the script artificially, you can use [throttling](http://wiki.ros.org/topic_tools/throttle) of frames from the camera, for example, at 5 Hz (`main_camera.launch`): - -```xml - -``` - -The topic for the subscriber in this case should be changed for `main_camera/image_raw_throttled`. +> **Hint** See other computer vision examples in the `~/examples` directory of the [RPi image](image.md). ## Video recording diff --git a/docs/en/clover-cloud-platform.md b/docs/en/clover-cloud-platform.md new file mode 100644 index 000000000..50eaba445 --- /dev/null +++ b/docs/en/clover-cloud-platform.md @@ -0,0 +1,93 @@ +# Clover Cloud Platform + +[CopterHack-2023](copterhack2023.md), team **Clover Cloud Team**. + +The list of our team members: + +* Кирилл Лещинский / Kirill Leshchinskiy, [@k_leshchinskiy](https://t.me/k_leshchinskiy) - Team Lead. +* Кузнецов Михаил / Mikhail Kuznetsov, [@bruhfloppa](https://t.me/bruhfloppa) - Frontend Developer. +* Даниил Валишин / Daniil Valishin, [@Astel_1](https://t.me/Astel_1) - Backend Developer. + +## Table of contents + +* [Introduction](#introduction) +* [Usability](#usability) +* [How to work with our platform?](#how-to-work-with-our-platform) +* [About the development of the platform](#about-the-development-of-the-platform) +* [Conclusion](#conclusion) + +## Video demonstration + +

+ +

+ +## Introduction + +Clover Cloud Platform is an innovative platform that enables users to access COEX Clover drone simulation online, without the need to download any programs or virtual machines. + +> **Note** Visit our [documentation](https://docs.clovercloud.software) to learn all about the platform, its development and how to use it. + +## Unleash Your Coding Power: Develop Autonomous Flight Code at Lightning Speed on Clover Cloud Platform + +If you're a developer working on autonomous flight projects, you know how time-consuming and distracting all of the routine activities can be. Between managing your hardware, debugging, and configuring your environment, it can feel like the real work of coding gets lost in the shuffle. + +That's where our platform comes in. Our streamlined interface and powerful tools make it easy to tackle all of those essential tasks so you can focus on what really matters: developing flawless, high-performance code for your autonomous flight project. + +So why wait to unleash your coding power? Sign up for our platform today and discover the difference it can make in the speed, quality, and focus of your autonomous flight coding work. + +## Usability + +Our platform is incredibly user-friendly and provides seamless access to the simulation in just a few clicks. Together with a simulator that displays simulation data accurately and without delay, there is a map editor allows users to edit the ArUco marker map and add or modify other objects on the scene directly within the simulation window. Additionally, users can create pre-configured workspaces complete with autonomous flight code and simulation scene configuration. Each user can also create their templates or apply a pre-made one to their workspace in just a few clicks. In addition to its other features, Clover Cloud Platform provides users with a convenient code editor for autonomous flight coding. Users can write code in the built-in editor and run it directly from the editor, viewing program output in real-time in the terminal. The platform also includes a file manager that simplifies file manipulation tasks, further enhancing the user's overall experience. With these tools at your fingertips, Clover Cloud Platform delivers an unparalleled level of accessibility and convenience for autonomous flight simulation. + +

+ Workspace screenshot +

+ +## The CodeSandbox for COEX Clover + +You can describe the usability and relevance of our platform in another way. Have you heard of CodeSandbox? Our platform offers the same convenience, flexibility, and accessibility as CodeSandbox, but is specifically designed to work with the COEX Clover drone simulation. + +## How to work with our platform? + +Let's dive into the sea of functionality that our platform offers. Detailed description of each feature is available in our [documentation](https://docs.clovercloud.software), here we will provide a general overview of the platform. + +### Creating an account + +First, you should create an account on our site. You can do this by clicking on this [link](https://clovercloud.software/signup). + +### Instance management + +After creating an account, you will be taken to the [dashboard](https://clovercloud.software/instances). Here you can create, start, stop and delete workspaces. + +>Workspaces are containers with Gazebo simulator and our software that provide data flow for simulation visualization, as well as handle requests from file manager, code editor and terminal. + +

+ Instance management +

+ +### Workspace overview + +In the workspace, in addition to the simulator, you have a file manager, code editor and terminal. There is also an editing mode in the simulator - one of the key features of our project. It allows you to quickly and conveniently edit the simulation scene, namely: move ArUco markers, change their size, change id of the marker, load instead of marker picture, add new markers or delete them. You can also add 3d objects to the scene and change their position, size and color. Below is an example of working with our workspace. + +

+ Workspace overview +

+ +### Templates + +Templates are another key feature of our platform.Is there something you can't do and you want to see how to properly perform a task? Look for the right template with ready-made code in the Template Browser and apply it to your workspace! Each user can create a template with an autonomous flight code and simulator configuration and share it. + +## About the development of the platform + +Our team has worked tirelessly to develop a simple yet multifunctional platform. We utilized the most modern standards and tools and implemented numerous optimization methods to ensure seamless performance and error-free operation. The frontend programming language chosen was JavaScript with the React framework, as a design system we utilizing Material Design style for an elegant and intuitive user interface. With the help of GitHub Actions the website is being built and deployed to Firebase hosting. The platform's backend is written in Python and contains multiple simultaneously running scripts. User data is secured and stored in a MongoDB database. Communication between the server and site is enabled through web sockets and the socket.io library, guaranteeing lightning-fast data transfer with minimal lag. + +You can view the source code of our platform by clicking on the links below: + +[Repository with the frontend-side code](https://github.com/Clover-Cloud-Platform/clover-cloud-platform-frontend) + +[Repository with the backend-side code](https://github.com/Clover-Cloud-Platform/clover-cloud-platform-backend) + +## Conclusion + +In conclusion, we have successfully created a truly convenient and useful platform, suitable for both novice and advanced COEX Clover drone users. Beginners can test their first autonomous flight code without the need for demanding simulator installation or virtual machines. They can also explore all of the drone's functions and capabilities without editing any configuration files. Advanced users benefit from access to their workspace from anywhere in the world and on any device, along with a convenient code-sharing system. In the future, we plan to add more new features to our platform, scale our network to serve a greater number of users, and collaborate with COEX to integrate their Clover quadcopter documentation into our platform, offering users a very simple and user-friendly way to learn to program autonomous drone flight. We also want to express gratitude to the COEX customer support team for their assistance in resolving complex issues that arose during development. diff --git a/docs/en/copterhack2023.md b/docs/en/copterhack2023.md index 2c5cb2f96..1e6bdf451 100644 --- a/docs/en/copterhack2023.md +++ b/docs/en/copterhack2023.md @@ -8,6 +8,36 @@ To learn more about the articles of the CopterHack finalist teams follow the lin The proposed projects are supposed to be open-source and be compatible with the Clover quadcopter platform. Teams-participants are supposed to work on their projects throughout the competition, bringing them closer to the state of the finished product while being assisted by industry experts through lectures and regular feedback. +Final of the CopterHack 2022 was held on May 27, 2023. The winner team was the team 🇷🇺 **[Clover Cloud Platform](clover-cloud-platform.md)**. + +## Full stream of the final + + + +## Projects of the contest's participants {#participants} + +|Place|Team|Project|Points| +|:-:|-|-|-| +|1|🇷🇺 Clover Cloud Team|[Clover Cloud Platform](clover-cloud-platform.md)|21.7| +|2|🇧🇾 FTL|[Advanced Clover 2](advanced_clover_simulator_platform.md)|21| +|3|🇨🇦 Clover with Motion Capture System|[Clover with Motion Capture System](mocap_clover.md)|20.5| +|4|🇧🇷 Atena|[Swarm in Blocks 2](swarm_in_blocks_2.md)|20.3| +|5|🇷🇺 C305|[Система радио-навигации](../ru/nav-beacon.html)|17.5| +|6|🇮🇳 DJS PHOENIX|[Autonomous Racing Drone](djs_phoenix_chetak.md)|14.6| +|7|🇷🇺 Lyceum №128|[Network of Clover charging stations](liceu128.md)|13.7| +|✕|🇰🇬 Zavarka|[Система обмена грузами с помощью конвейера](https://github.com/aiurobotics/clover/blob/conveyance/docs/ru/conveyance.md)|| +|✕|🇷🇺 FSOTM|[Drone Interceptor](https://github.com/deadln/clover/blob/interceptor/docs/ru/interceptor.md)|| +|✕|🇰🇬 Homelesses|[Trash Collector](https://github.com/Isa-jesus/clover/blob/trash-collector/docs/ru/show_maker.md)|| +|✕|🇷🇺 Digital otters|[Digital otters](https://github.com/Mentalsupernova/clover_cool/blob/new-article.md/docs/ru/new-article.md)|| +|✕|🇷🇺 Light Flight|[Сопровождение БПЛА при посадке](https://github.com/SirSerow/clover_inertial_ns/blob/inertial-1/Description.md)|| +|✕|🇰🇬 LiveSavers|[LiveSavers](https://github.com/Sarvar00/clover/blob/livesavers/docs/ru/livesaver.md)|| +|✕|🇷🇺 XenCOM|[Bound by fate](https://github.com/xenkek/clover/blob/xenkek-patch-1/docs/ru/bound_by_fate.md)|| +|✕|🇷🇺 Ava_Clover|[DoubleClover](https://github.com/bessiaka/clover/blob/Ava_Clover/docs/ru/soosocta.md)|| +|✕|🇷🇺 TPU_1|[Совместная транспортировка груза](https://github.com/shamoleg/clover/blob/tpu_1/docs/ru/tpu_1.md)|| +|✕|🇷🇺 TPU_2|[Алгоритм полета сквозь лесную местность](https://github.com/shamoleg/clover/blob/tpu_2/docs/ru/tpu_2.md)| | + +See all points by criteria in the [full table](https://docs.google.com/spreadsheets/d/1qTpW8zFVdSEGnbtOvMgQD6DcYwu8URFt1RKOCeUaOe8). + ## CopterHack 2023 stages The qualifying and project development stages will be held in an online format, however, the final round will be in a hybrid mode (offline + online). The competition involves monthly updates from the teams with regular feedback from the jury. All teams are required to prepare a final video and presentation on the project's results to participate in the final stage. diff --git a/docs/en/djs_phoenix_chetak.md b/docs/en/djs_phoenix_chetak.md new file mode 100644 index 000000000..6ebb0bf3d --- /dev/null +++ b/docs/en/djs_phoenix_chetak.md @@ -0,0 +1,55 @@ +# Autonomous Racing Drone: CHETAK + +[CopterHack-2023](copterhack2023.md), team **DJS PHOENIX**. + +## Team Information + +![Without bg](https://user-images.githubusercontent.com/93365067/195974501-0acef6b7-e4ea-4c47-bd7a-615caf73a625.png) + +We are the DJS Phoenix, the official drone team of Dwarkadas. J. Sanghvi College of Engineering + +The list of team members: + +* Shubham Mehta, @Just_me_05, Mentor. +* Harshal Warde, @kryptonisinert, Mechanical. +* Parth Sawjiyani, @Non_Active, Mechanical. +* Soham Dalvi, @devilsfootprint_1973, Mechanical. +* Vedant Patel, @VedantMP, Mechanical. +* Harsh Shah, @harssshhhhh, Mechanical. +* Lisha Mehta, @lishamehta, Mechanical. +* Shubh Pokarne, @Shubhpokarne, Electronics. +* Tushar Nagda, @tushar_n11, Electronics. +* Deep Tank, @Kraven, Electronics. +* Khushi Sanghvi, @Cryptoknigghtt, Programmer. +* Harshil Shah, @divine_fossil, Programmer. +* Omkar Parab, @Omkar_parab21, Programmer. +* Madhura Korgaonkar, @Madhura221, Programmer. +* Shruti Shah, @Shrutishah22, Programmer. +* Aditi Dubey, @aditi_0503, Marketing. +* Krisha Lakhani, @krishalakhani, Marketing. + +## Project Description + +This year, our team DJS Phoenix, presents to you a fully Autonomous Racing Drone. The drone scans for ArUco tags on the gates and passes through them. + +### Project Idea + +This project proposes to develop an autonomous racing drone that can navigate through complex courses at high speeds while avoiding obstacles and detecting changes in the environment. In racing competitions, autonomous drones can compete in high-speed, precision races that challenge their agility, speed, and accuracy. These competitions could be held in indoor arenas or outdoor tracks, and they could attract enthusiasts and spectators from all over the world. With their advanced capabilities, autonomous racing drones could usher in a new era of racing events that are more exciting and challenging than ever before. From racing competitions to search and rescue operations, the autonomous racing drone can be used in a wide range of applications that benefit individuals, businesses, and society as a whole. + +## Potential Outcome + +### Problem + +In many industries and applications, there is a need for fast, efficient, and safe movement of goods and information. Drones have become an increasingly popular tool for a wide range of applications, from aerial photography to surveying and monitoring. However, operating a drone requires a certain level of skill and experience, which can be a barrier for individuals or businesses who want to take advantage of this technology. Additionally, traditional drones can be expensive and time-consuming to operate, limiting their accessibility and effectiveness. Therefore, there is a need for a more user-friendly and affordable solution that can expand the use of drones to new audiences and applications. + +### Solution + +The solution to the above problem statement is an autonomous racing drone. An autonomous racing drone is equipped with a camera that scans the ArUco tags for gate detection which is supported by software used in autonomous flights that enable it to navigate through a predetermined course while avoiding obstacles and achieving high speeds. Unlike traditional drones, an autonomous racing drone does not require manual control, making it an ideal solution for those who do not have the skills or experience to operate a drone.Its autonomous capabilities make it a more accessible and user-friendly solution than traditional drones, enabling individuals or businesses to take advantage of this technology without requiring extensive training or expertise. + +![image](https://user-images.githubusercontent.com/93365067/235303281-f63e379d-c156-45ad-b554-2c84bd82781d.png) + +### Additional Information + +In 2017, a student committee for DJS Phoenix was formed. In India, our team has participated in a number of contests, including IDRL-IIT GandhiNagar (sixth rank), IDRL-SVPCET Nagpur(second rank) and TECHNOXIAN (second place out of 50 national teams). In CopterHack-2021, our team participated, and we placed eighth internationally. We are back with improved concepts after learning from the previous season. + +For more information checkout gitbook: https://djs-phoenix.gitbook.io/chetak-faster-than-you-can-imagine/. diff --git a/docs/en/educational_contests.md b/docs/en/educational_contests.md index ebd8192fb..30a22beb4 100644 --- a/docs/en/educational_contests.md +++ b/docs/en/educational_contests.md @@ -20,7 +20,7 @@ The main goal of the contest is aerial robotics popularization and community de * Third parties can provide technical support for recording a lecture. * The status of the participant is unlimited (student, representative of a general education institution, representative of the industry, amateur). -Applications deadline: September 1, 2022. +Applications deadline: November 30, 2022. ### How to apply? @@ -64,7 +64,7 @@ The main goal of the contest is aerial robotics popularization and community de The application to the contest is performed via the [Google Form](https://docs.google.com/forms/d/e/1FAIpQLSdelVy6yQ1iN6u88KeiEIKGj7gGaM0xccSt2tiYKB46ICmjkQ/viewform). -Applications deadline: September 1, 2022. +Applications deadline: November 30, 2022. ### Prizes @@ -105,7 +105,7 @@ The course is evaluated according to a separate, publicly available lesson submi The application to the contest is performed via the [Google Form](https://docs.google.com/forms/d/e/1FAIpQLSdf2Q68X4hPnFE9f3EP95AxPNnzHKqIsFHtTRT6EBKiH93wzg/viewform) where the link to the video course should be attached. -Applications deadline: September 1, 2022. +Applications deadline: November 30, 2022. ### Prizes diff --git a/docs/en/frames.md b/docs/en/frames.md index 0482fe32c..a2620deba 100644 --- a/docs/en/frames.md +++ b/docs/en/frames.md @@ -9,6 +9,7 @@ Main frames in the `clover` package: * `base_link` is rigidly bound to the drone. It is shown by the simplified drone model on the image above; * `body` is bound to the drone, but its Z axis points up regardless of the drone's pitch and roll. It is shown by the red, blue and green lines in the illustration; * `navigate_target` is bound to the current navigation target (as set by the [navigate](simple_offboard.md#navigate) service); +* `terrain` is bound to the floor at the current drone position (see the [set_altitude](simple_offboard.md#set_altitude) service); * `setpoint` is current position setpoint; * `main_camera_optical` is the coordinate system, [linked to the main camera](camera_setup.md#frame); diff --git a/docs/en/liceu128.md b/docs/en/liceu128.md new file mode 100644 index 000000000..3e4557dc2 --- /dev/null +++ b/docs/en/liceu128.md @@ -0,0 +1,45 @@ +# "QCS" - the network of Clover charging stations + +[CopterHack-2023](copterhack2023.md), team **Lyceum 128**. + +## Network realisation + +Our charging stations use Python web server created with Django framework. On that server we storage information about charging stations: + +- Position (GPS + ArUco marker). +- Possibility to drone landing. +- Drone info (If it's on it). + +To connect to server we use API with special personal key for every drone and station. It can be regenerated if secured key became public. + +If you want to test station without drone you can use API Debug page. You must be in your account to open it. + +### Electronics in the station + +There are Arduino Mega and Wemos D1 on the station. + +![scheme](https://github.com/Juli-Shvetsova/clover/assets/78372613/3ab05a79-0046-463b-83dd-4db06115909b) + +Wemos D1 connect with server to collect information, do tasks. Arduino Mega receive signals from Wemos and make physical updates such as moving landing platform, LED indication and other more. + +After completing mission Wemos send request to a server to confirm updates on the server. + +## Clover flight + +We're using recursive landing algorithm to achieve success landing. Small ArUco marker is on the landing platform. Camera can use this marker on the ~25cm height. Next drone use standard landing. + +## Visit our landing and API page + +[https://qcs.pythonanywhere.com/](https://qcs.pythonanywhere.com/) + +## Source code + +Of that project is in our [GitHub page](https://github.com/qcs-charge/). + +## Team + +CH2023, Lyceum 128. + +- Mikhail Konstantinov, [@mikemka](https://t.me/mikemka/), programmer. +- Julia Shvecova, [@Juli_Phil](https://t.me/Juli_Phil/), science adviser. +- Oleg Sherstobitov, [@kulumuluu](https://t.me/kulumuluu/), constructor. diff --git a/docs/en/mocap_clover.md b/docs/en/mocap_clover.md new file mode 100644 index 000000000..d25f78be6 --- /dev/null +++ b/docs/en/mocap_clover.md @@ -0,0 +1,200 @@ +# Project Video + +[CopterHack-2023](copterhack2023.md), team **Clover with Motion Capture System**. Click logo for project video. + +
+ IMAGE ALT TEXT +
+ +## Table of Contents + +* [Team Information](#item-one) +* [Educational Document](#item-two) +* [Introduction](#item-three) +* [Project Description](#item-four) +* [Hardware](#item-hardware) +* [Data Transfer](#item-transfer) +* [Examples](#item-examples) +* [Trajectory Tracking](#item-figure8) +* [Auto-Tuning](#item-auto) +* [Conclusion](#item-last) + +## Team Information {#item-one} + +The list of team members: + +* Sean Smith, @ssmith_81, roboticist and developer: [GitHub](https://github.com/ssmith-81), [Linkedin](https://www.linkedin.com/in/sean-smith-61920915a/). + +## Educational Document {#item-two} + +**My Gitbook, with detailed step by step analysis of the proposed project during the CopterHack 2023 competition can be found:** +[MoCap Clover Gitbook](https://0406hockey.gitbook.io/mocap-clover/). + +This page gives a broad overview on the motivation and purpose behind this project, it also provides research and industry based knowledge around UAV application that the reader may find interesting. If the user is interested in the technical details and implementation then refer to the educational Gitbook document. + +## Introduction {#item-three} + +Aerial robotics has become a common focus in research and industry over the past few decades. Many technical developments in research require a controlled test environment to isolate certain characteristics of the system for analysis. This typically takes place indoors to eliminate unwanted disturbances allowing results to be more predictable. Removing localization and pose feedback concerns can be accomplished with motion capture (MoCap) systems that track unmanned aerial vehicles (UAVs) pose with high precision as stated: + +"OptiTrack’s drone and ground robot tracking systems consistently produce positional error less than 0.3mm and rotational error less than 0.05°" [[reference](https://optitrack.com/applications/robotics/#:~:text=Exceptional%203D%20precision%20and%20accuracy&text=OptiTrack's%20drone%20and%20ground%20robot,error%20less%20than%200.05%C2%B0)]. + + + +This enables researchers to study the dynamics and behavior of UAVs in different environments, evaluate their performance, and develop advanced control algorithms for improved flight stability, autonomy, and safety. Research facilities around the world tend to built research drones from the ground up using off-the-shelf components with open source platforms such as PX4. While the end goal is the same: transferring pose feedback to the flight controller along with high level commands, the platforms and methods can vary significantly depending on factors such as onboard and offboard computing frameworks and data transfer methods. Many developers have a detailed background and understanding of the theoretical components of their research, however, adapting hardware configurations to their own platform such as sensor feedback and sensor fusion is not obvious. The purpose of this project is to provide detailed documentation on integrating the Clover platform with the MoCap system along with examples to familiarize users with the hardware, sensor fusion, high and low level controller development, and trajectory tracking. + + + +## Project Description {#item-four} + +In this article, we will provide an overview of MoCap systems for tracking UAV pose in research applications, highlighting their significance, advantages, and potential impacts in the field of UAV controller development. + +## Document structure + +The Motion Capture System educational document is divided into three main sections outside of the Introduction and Conclusion. Each section and its purpose is listed: + +### Hardware {#item-hardware} + +The main goal in this section is to educate the reader on the MoCap system hardware and software. This can be further divided into several steps including camera placement, marker placement, and system calibration. A summary of the process is provided: + +| Task | Description | +| --------- | ----------- | +| Camera Placement | Position the motion capture cameras in strategic locations around the area where the UAV will be flying. The number of cameras and their placement will depend on the size of the area and the desired capture volume. Typically, cameras are placed on tripods or mounted on walls or ceilings at specific heights and angles to capture the UAV's movements from different perspectives. **A simple 4-camera setup example is provided in the educational document**. | +| Marker Placement | Attach OptiTrack markers to the UAV in specific locations. OptiTrack markers are small reflective spheres that are used as reference points for the motion capture system to track the UAV's position and movements. **An example placement on the Clover is shown in the educational document**. +| System Calibration | Perform system calibration to establish the spatial relationship between the cameras and the markers. This involves capturing a calibration sequence, during which a known pattern or object is moved in the capture volume. The system uses this data to calculate the precise positions and orientations of the cameras and markers in 3D space, which is crucial for accurate motion capture. | + +With these components completed correctly, you are well on your way to commanding indoor autonomous missions like this: +

+ Alt text +

+ + +Overall, configuring a motion capture system for UAV research requires careful planning, precise marker placement, accurate system calibration, and thorough validation to ensure accurate and reliable data collection for your research purposes. For more information, refer to the [informative documentation](https://0406hockey.gitbook.io/mocap-clover/hardware/motion-capture-setup-optitrack). + +### Data Transfer {#item-transfer} + +With the data acquired from the MoCap system, the main goal in this section is to transfer it to the Raspberry Pi onboard the Clover and remap it to the flight controller/PX4 for control. A summary of the steps are listed: + +

+ Alt text +

+ +* Data Acquisition: The motion capture system continuously tracks the position and orientation (pose) of the UAV using markers attached to the UAV and cameras positioned in the capture volume. The system calculates the 3D pose of the UAV in real-time and can be viewed through the motive software. +* Data Transmission: The pose data is transmitted from the motion capture system to a Raspberry Pi using VRPN and a ROS network. While this works, I have implemented a strictly UDP data transmission method where highlighting the setup process and ease of use will be a future development, both configurations can be seen in the below figures. The Raspberry Pi acts as an intermediary for processing and relaying the data to the flight controller onboard the UAV using MAVROS. The connection can be established using USB or UART, I chose UART in my setups. + +

+ ROS Block + ROS Block + Fig.1(a) - Left figure: ROS network experimental setup topology. Legend: Black dotted line is the provided local network; Blue solid line is the Clover pose transmission where the final transmission from laptop to Pi is over a ROS network; Red line is hardware connections; MAVLink arrow is communication via a MAVLink protocol. .
+ Fig.1(b) - Right figure: UDP transmission experimental setup topology. Legend: Black dotted line is the provided local network; Black solid line is the UDP client-server drone pose transmission; Light blue line is the pose data transmission; Red line is hardware connections; Purple line is communication via secure shell protocol and ROS network communication; MAVLink arrow is communication via a MAVLink protocol. . +

+ +* Data Processing: The Raspberry Pi receives the pose data from the motion capture system over a ROS network on a VRPN ROS topic, this was initially parsed from the sensor readings into position and attitude. +* Data Remapping: Once the pose data is processed, the Raspberry Pi maps it to the to a gateway/MAVROS topic sending it to the flight controller onboard the UAV. All coordinate transformations (ENU->NED) are taken care of with MAVROS. +* Flight Control Update: The flight controller onboard the UAV receives the remapped pose data and uses it to update the UAV's flight control algorithms. The updated pose information can be used to adjust the UAV's flight trajectory, orientation, or other control parameters to achieve the desired flight behavior or control objectives based on the motion capture system feedback. +* Closed-Loop Control: The flight controller continuously receives pose feedback from the motion capture system via the Raspberry Pi, and uses it to update the UAV's flight control commands in a closed-loop fashion (PX4 uses a cascaded PID control system with more details provided in the educational document). This allows the UAV to maintain precise position and orientation control based on the real-time pose data provided by the motion capture system. + +Overall, sending pose feedback from a motion capture system to a Raspberry Pi and remapping the data to the flight controller onboard a UAV involves acquiring, processing, and transmitting the pose data in a compatible format to enable real-time closed-loop control of the UAV based on the motion capture system's feedback. + +### Examples {#item-examples} + +This section provides two practical examples to help the user better understand the Clover platform, sensor fusion, UAV applications such as trajectory tracking, high level commands, and low level control. The reader will become familiar with an abundance of state-of-the-art open source UAV platforms/technologies such as: + +| Platform | Description | +| ----------- | ----------- | +| PX4 | PX4 is an open-source flight control software for drones and other unmanned vehicles used on the Clover. It supports a wide range of platforms and sensors and is used in commercial and research applications. | +| Robot Operating System (ROS) |ROS is an open-source software framework for building robotic systems. It provides a set of libraries and tools for developing and managing robot software and is widely used in drone and robotics research. | +| MAVLink| MAVLink is a lightweight messaging protocol for communicating with unmanned systems. It is widely used in drone and robotics applications and provides a flexible and extensible communication framework.| +|QGroundControl (QGC)| QGC is an open-source ground control station software for drones and other unmanned vehicles. It provides a user-friendly interface for managing and monitoring drone flights and is widely used in commercial and research applications. | + + + +1. **A figure-8 high-level trajectory generation**: this example is outlined for both Software in the Loop (SITL) simulations and hardware testing with the Clover platform. Check out this interesting example from my [trajectory tracking section](https://0406hockey.gitbook.io/mocap-clover/examples/flight-tests/complex-trajectory-tracking)! + +

+ Alt text +

+

+ Fig.2 - Lemniscate of Bernoulli [reference]. +

+ +Here's a summary of the importance of trajectory tracking for UAV applications: + +* *Navigation and Path Planning*: Trajectory tracking allows UAVs to follow pre-defined paths or trajectories, which is essential for tasks such as aerial mapping, surveying, inspection, and monitoring. +* *Precision and Safety*: Trajectory tracking enables precise control of the UAV's position, velocity, and orientation, which is crucial for maintaining safe and stable flight operations. Precise trajectory tracking allows UAVs to avoid obstacles, maintain safe distances from other objects or aircraft, and operate in confined or complex environments with high precision, reducing the risk of collisions or accidents. +* *Autonomy and Scalability*: Trajectory tracking enables UAV autonomy, allowing them to operate independently without constant operator intervention. This enables UAVs to perform repetitive or complex tasks autonomously, freeing up human operators to focus on higher-level decision-making or supervisory roles. Trajectory tracking also facilitates scalable operations, where multiple UAVs can follow coordinated trajectories to perform collaborative tasks, such as swarm operations or coordinated data collection. +* *Flexibility and Adaptability*: Trajectory tracking allows UAVs to adapt their flight paths or trajectories in real-time based on changing conditions or objectives. UAVs can dynamically adjust their trajectories to accommodate changes in environmental conditions, mission requirements, or operational constraints, allowing for flexible and adaptive operations in dynamic or unpredictable environments. + +In summary, trajectory tracking is crucial for UAV applications as it enables precise navigation, safety, efficiency, autonomy, and scalability, while optimizing payload performance and adaptability to changing conditions. It plays a fundamental role in ensuring that UAVs can accomplish their missions effectively and safely, making it a critical component of UAV operations in various industries and domains. + + + +1. **Clover adaptive auto-tuning**: The second example shows the user how to implement the adaptive auto-tune module provided by PX4 to tune the low-level controllers or attitude control module. You can take a look into how this is accomplished with the Clover platform in the [auto-tuning section](https://0406hockey.gitbook.io/mocap-clover/examples/auto-tuning). + +

+ Alt text +

+

+ Fig.3 - Cascaded PX4 control system [reference]. +

+ +This is a much faster and easier way to tune a real drone and provides good tuning for most air frames. Manual tuning is recommended when auto-tuning dos not work, or when fine-tuning is essential. However, the process is tedious and not easy especially for users with limited control background and experience. The Clover airframe provides sufficient base settings where auto-tuning can further improve performance depending on the Clover being used. + +Here's a summary of the importance of low-level controller performance for UAV applications: + +* *Flight Stability and Safety*: The low-level controller, typically implemented as a PID (Proportional-Integral-Derivative) or similar control algorithm, governs the UAV's attitude and position control. Properly tuning the low-level controller ensures that the UAV remains stable during flight, with accurate and responsive control inputs. This is essential for safe and reliable UAV operations, as it helps prevent undesired oscillations, overshooting, or instability that can lead to crashes or accidents. +* *Control Precision and Responsiveness*: Accurate control is crucial for achieving precise and responsive UAV maneuvers, such as smooth trajectory tracking, precise hovering, or dynamic maneuvers. Proper tuning of the low-level controller allows for precise control of the UAV's attitude, position, and velocity, enabling it to accurately follow desired flight trajectories, respond to changing conditions or commands, and perform complex flight maneuvers with high precision. +* *Adaptability and Robustness*: UAV operations can be subject to varying environmental conditions, payload configurations, or operational requirements. Proper low-level controller tuning allows for adaptability and robustness, enabling the UAV to perform reliably and accurately across a wide range of conditions or mission requirements. Tuning the controller parameters can help account for changes in payload mass, wind conditions, or other external factors, ensuring stable and responsive flight performance. + +

+ Alt text +

+ +In summary, low-level controller tuning is crucial for UAV applications as it directly affects flight stability, control precision, payload performance, energy efficiency, adaptability, and compliance with safety and regulatory requirements. It is an essential step in optimizing the performance and safety of UAV operations, ensuring reliable and effective flight control for various applications across different industries and domains. + +## Conclusion {#item-last} + +Over the course of this project I was able to extend my knowledge in robotic applications while enduring many ups and downs along the way. This greatly helped me with my research when testing controller development was required. The motivation behind this documentation is to improve this experience for other researchers, robotic developers, or hobbyists that have a desire to learn fundamental robotic application which is beginning to shape the world we know today. These details can be explored in a [GitBook](https://0406hockey.gitbook.io/mocap-clover/) for those who are interested. + +I provided many details on the interworking components required to achieve an indoor autonomous flight setup with the COEX Clover platform. With an extensive background in UAV control, I tried to provide a basic understanding of this for the readers benefit. There are many more sections I would like to include along with improving upon the existing ones. A few examples include firmware testing with hardware in the loop simulations, advanced trajectory generation, and an extensive list of flight examples for the Gazebo simulator with application to hardware. + +Lastly, I would like to thank the entire COEX team that made this project possible by providing a wonderful platform with support. I would like to give a special thanks to [Oleg Kalachev](https://github.com/okalachev) for helping me debug and succeed through applied learning. With that being said, I hope you all enjoy the resourceful content provided, and I plan on releasing more detailed documents on other interesting topics as I progress through my research and development. + + diff --git a/docs/en/models.md b/docs/en/models.md index 3d6f96917..17f2bec49 100644 --- a/docs/en/models.md +++ b/docs/en/models.md @@ -198,6 +198,15 @@ This page contains models and drawings of some of the drone parts. They can be u +### 3D print + +#### Mechanical gripper + +* **Left claw**: [`grip_left.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/stl/grip_left.stl). +* **Right claw**: [`grip_right.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/stl/grip_right.stl). + +Material: SBS Glass. Infill: 100%. Quantity: 1 pcs. + ## Clover 4 ### 3D print diff --git a/docs/en/parameters.md b/docs/en/parameters.md index caef83fd4..0895a8ee4 100644 --- a/docs/en/parameters.md +++ b/docs/en/parameters.md @@ -39,17 +39,27 @@ In case of using EKF2 (official firmware): |Parameter|Value|Comment| |-|-|-| -|`EKF2_AID_MASK`|26|Checkboxes: *flow* + *vision position* + *vision yaw*.
Details: [Optical Flow](optical_flow.md), [ArUco markers](aruco_map.md), [GPS](gps.md).| +|`EKF2_AID_MASK`\*|26|Checkboxes: *flow* + *vision position* + *vision yaw*.
Details: [Optical Flow](optical_flow.md), [ArUco markers](aruco_map.md), [GPS](gps.md).| |`EKF2_OF_DELAY`|0|| |`EKF2_OF_QMIN`|10|| |`EKF2_OF_N_MIN`|0.05|| |`EKF2_OF_N_MAX`|0.2|| -|`EKF2_HGT_MODE`|2 (*Range sensor*)|If the [rangefinder](laser.md) is present and flying over horizontal floor| +|`EKF2_HGT_MODE`\*|3 (*Vision*)|If the [rangefinder](laser.md) is present and flying over horizontal floor – 2 (*Range sensor*)| |`EKF2_EVA_NOISE`|0.1|| |`EKF2_EVP_NOISE`|0.1|| |`EKF2_EV_DELAY`|0|| |`EKF2_MAG_TYPE`|5 (*None*)|Disabling usage of the magnetometer (when navigating indoor)| +\* — starting from PX4 version 1.14, the parameters marked with an asterisk are replaced with the following: + +|Parameter|Value|Comment| +|-|-|-| +|`EKF2_EV_CTRL`|11|Checkboxes: *Horizontal position* + *Vertical position* + *Yaw*| +|`EKF2_GPS_CTRL`|0|All checkboxes are disabled| +|`EKF2_BARO_CTRL`|0 (*Disabled*)|Barometer is disabled| +|`EKF2_OF_CTRL`|1 (*Enabled*)|Optical flow is enabled| +|`EKF2_HGT_REF`|3 (*Vision*)|If the [rangefinder](laser.md) is present and flying over horizontal floor – 2 (*Range sensor*)| + > **Info** See also: list of default parameters of the [Clover simulator](simulation.md): https://github.com/CopterExpress/clover/blob/master/clover_simulation/airframes/4500_clover. @@ -60,8 +70,8 @@ The `SYS_MC_EST_GROUP` parameter defines the estimator subsystem to use. Estimator subsystem is a group of modules that calculates the current state of the copter using readings from the sensors. The copter state includes: -* Angle rate of the copter – pitch_rate, roll_rate, yaw_rate; -* Copter orientation (in the local coordinate system) – pitch, roll, yaw (one of presentations); +* Angle rate of the copter – roll_rate, pitch_rate, yaw_rate; +* Copter orientation (in the local coordinate system) – roll, pitch, yaw (one of presentations); * Copter position (in the local coordinate system) – x, y, z; * Copter speed (in the local coordinate system) – vx, vy, vz; * Global coordinates of the copter – latitude, longitude, altitude; diff --git a/docs/en/simple_offboard.md b/docs/en/simple_offboard.md index 832bd3637..8c3c665a9 100644 --- a/docs/en/simple_offboard.md +++ b/docs/en/simple_offboard.md @@ -1,5 +1,7 @@ # Autonomous flight +> **Note** The following applies to the [image version **0.24**](https://github.com/CopterExpress/clover/releases/tag/v0.24), which is not yet released. Older documentation is still available for [for version **0.23**](https://github.com/CopterExpress/clover/blob/f78a03ec8943b596d5a99b893188a159d5319888/docs/en/simple_offboard.md). + The `simple_offboard` module of the `clover` package is intended for simplified programming of the autonomous drone flight (`OFFBOARD` [flight mode](modes.md)). It allows setting the desired flight tasks, and automatically transforms [coordinates between frames](frames.md). `simple_offboard` is a high level system for interacting with the flight controller. For a more low level system, see [mavros](mavros.md). @@ -20,6 +22,9 @@ rospy.init_node('flight') # 'flight' is name of your ROS node get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) navigate = rospy.ServiceProxy('navigate', srv.Navigate) navigate_global = rospy.ServiceProxy('navigate_global', srv.NavigateGlobal) +set_altitude = rospy.ServiceProxy('set_altitude', srv.SetAltitude) +set_yaw = rospy.ServiceProxy('set_yaw', srv.SetYaw) +set_yaw_rate = rospy.ServiceProxy('set_yaw_rate', srv.SetYawRate) set_position = rospy.ServiceProxy('set_position', srv.SetPosition) set_velocity = rospy.ServiceProxy('set_velocity', srv.SetVelocity) set_attitude = rospy.ServiceProxy('set_attitude', srv.SetAttitude) @@ -51,11 +56,11 @@ Response format: * `lat, lon` – drone latitude and longitude *(degrees)*, requires [GPS](gps.md) module; * `alt` – altitude in the global coordinate system (according to [WGS-84](https://ru.wikipedia.org/wiki/WGS_84) standard, not AMSL!), requires [GPS](gps.md) module; * `vx, vy, vz` – drone velocity *(m/s)*; -* `pitch` – pitch angle *(radians)*; * `roll` – roll angle *(radians)*; +* `pitch` – pitch angle *(radians)*; * `yaw` — yaw angle *(radians)*; -* `pitch_rate` — angular pitch velocity *(rad/s)*; * `roll_rate` – angular roll velocity *(rad/s)*; +* `pitch_rate` — angular pitch velocity *(rad/s)*; * `yaw_rate` – angular yaw velocity *(rad/s)*; * `voltage` – total battery voltage *(V)*; * `cell_voltage` – battery cell voltage *(V)*. @@ -100,10 +105,9 @@ Parameters: * `x`, `y`, `z` — coordinates *(m)*; * `yaw` — yaw angle *(radians)*; -* `yaw_rate` – angular yaw velocity (will be used if yaw is set to `NaN`) *(rad/s)*; * `speed` – flight speed (setpoint speed) *(m/s)*; * `auto_arm` – switch the drone to `OFFBOARD` mode and arm automatically (**the drone will take off**); -* `frame_id` – [coordinate system](frames.md) for values `x`, `y`, `z`, `vx`, `vy`, `vz`. Example: `map`, `body`, `aruco_map`. Default value: `map`. +* `frame_id` – [coordinate system](frames.md) for values `x`, `y`, `z` and `yaw`. Example: `map`, `body`, `aruco_map`. Default value: `map`. > **Note** If you don't want to change your current yaw set the `yaw` parameter to `NaN` (angular velocity by default is 0). @@ -119,7 +123,7 @@ Flying in a straight line to point 5:0 (altitude 2) in the local system of coord navigate(x=5, y=0, z=3, speed=0.8) ``` -Flying to point 5:0 without changing the yaw angle (`yaw` = `NaN`, `yaw_rate` = 0): +Flying to point 5:0 without changing the yaw angle: ```python navigate(x=5, y=0, z=3, speed=0.8, yaw=float('nan')) @@ -149,22 +153,10 @@ Flying to point 3:2 (with the altitude of 2 m) in the [ArUco map](aruco.md) coor navigate(x=3, y=2, z=2, speed=1, frame_id='aruco_map') ``` -Rotating on the spot at the speed of 0.5 rad/s (counterclockwise): - -```python -navigate(x=0, y=0, z=0, yaw=float('nan'), yaw_rate=0.5, frame_id='body') -``` - -Flying 3 meters forwards at the speed of 0.5 m/s, yaw-rotating at the speed of 0.2 rad/s: - -```python -navigate(x=3, y=0, z=0, speed=0.5, yaw=float('nan'), yaw_rate=0.2, frame_id='body') -``` - Ascending to the altitude of 2 m (command line): ```(bash) -rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, yaw_rate: 0.0, speed: 0.5, frame_id: 'body', auto_arm: true}" +rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, speed: 0.5, frame_id: 'body', auto_arm: true}" ``` > **Note** Consider using the `navigate_target` frame instead of `body` for missions that primarily use relative movements forward/back/left/right. This negates inaccuracies in relative point calculations. @@ -178,7 +170,6 @@ Parameters: * `lat`, `lon` — latitude and longitude *(degrees)*; * `z` — altitude *(m)*; * `yaw` — yaw angle *(radians)*; -* `yaw_rate` – angular yaw velocity (used for setting the yaw to `NaN`) *(rad/s)*; * `speed` – flight speed (setpoint speed) *(m/s)*; * `auto_arm` – switch the drone to `OFFBOARD` and arm automatically (**the drone will take off**); * `frame_id` – [coordinate system](frames.md) for `z` and `yaw` (Default value: `map`). @@ -191,7 +182,7 @@ Flying to a global point at the speed of 5 m/s, while maintaining current altitu navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, frame_id='body') ``` -Flying to a global point without changing the yaw angle (`yaw` = `NaN`, `yaw_rate` = 0): +Flying to a global point without changing the yaw angle: ```python navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, yaw=float('nan'), frame_id='body') @@ -200,7 +191,71 @@ navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, yaw=float('nan'), fr Flying to a global point (command line): ```bash -rosservice call /navigate_global "{lat: 55.707033, lon: 37.725010, z: 0.0, yaw: 0.0, yaw_rate: 0.0, speed: 5.0, frame_id: 'body', auto_arm: false}" +rosservice call /navigate_global "{lat: 55.707033, lon: 37.725010, z: 0.0, yaw: 0.0, speed: 5.0, frame_id: 'body', auto_arm: false}" +``` + +### set_altitude + +Change the desired flight altitude. The service is used to set the altitude and its coordinate system independently, after calling [`navigate`](#navigate) or [`set_position`](#setposition). + +Parameters: + +* `z` – flight altitude *(m)*; +* `frame_id` – [coordinate system](frames.md) for computing the altitude. + +Set the desired altitude to 2 m relative to the floor: + +```python +set_altitude(z=2, frame_id='terrain') +``` + +Set the desired altitude to 1 m relative to [the ArUco map](aruco.md): + +```python +set_altitude(z=1, frame_id='aruco_map') +``` + +### set_yaw + +Change the desired yaw angle (and its coordinate system), keeping the previous command in effect. + +Parameters: + +* `yaw` — yaw angle *(radians)*; +* `frame_id` – [coordinate system](frames.md) for computing the yaw. + +Rotate by 90 degrees clockwise (the previous command continues): + +```python +set_yaw(yaw=math.radians(-90), frame_id='body') +``` + +Set the desired yaw angle to zero relative to [the ArUco map](aruco.md): + +```python +set_yaw(yaw=0, frame_id='aruco_map') +``` + +Stop yaw rotation (caused by [`set_yaw_rate`](#setyawrate) call): + +```python +set_yaw(yaw=float('nan')) +``` + +### set_yaw_rate + +The the desired angular yaw velocity, keeping the previous command in effect. + +Parameters: + +* `yaw_rate` – angular yaw velocity *(rad/s)*; + +The positive direction of `yaw_rate` rotation (when viewed from the top) is counterclockwise. + +Start yaw rotation at 0.5 rad/s (the previous command continues): + +```python +set_yaw_rate(yaw_rate=0.5) ``` ### set_position @@ -213,7 +268,6 @@ Parameters: * `x`, `y`, `z` — point coordinates *(m)*; * `yaw` — yaw angle *(radians)*; -* `yaw_rate` – angular yaw velocity (used for setting the yaw to NaN) *(rad/s)*; * `auto_arm` – switch the drone to `OFFBOARD` and arm automatically (**the drone will take off**); * `frame_id` – [coordinate system](frames.md) for `x`, `y`, `z` and `yaw` parameters (Default value: `map`). @@ -235,19 +289,12 @@ Assigning the target point 1 m ahead of the current position: set_position(x=1, y=0, z=0, frame_id='body') ``` -Rotating on the spot at the speed of 0.5 rad/s: - -```python -set_position(x=0, y=0, z=0, frame_id='body', yaw=float('nan'), yaw_rate=0.5) -``` - ### set_velocity Set speed and yaw setpoints. * `vx`, `vy`, `vz` – flight speed *(m/s)*; * `yaw` — yaw angle *(radians)*; -* `yaw_rate` – angular yaw velocity (used for setting the yaw to NaN) *(rad/s)*; * `auto_arm` – switch the drone to `OFFBOARD` and arm automatically (**the drone will take off**); * `frame_id` – [coordinate system](frames.md) for `vx`, `vy`, `vz` and `yaw` (Default value: `map`). @@ -261,26 +308,26 @@ set_velocity(vx=1, vy=0.0, vz=0, frame_id='body') ### set_attitude -Set pitch, roll, yaw and throttle level (similar to [the `STABILIZED` mode](modes.md)). This service may be used for lower level control of the drone behavior, or controlling the drone when no reliable data on its position is available. +Set roll, pitch, yaw and throttle level (similar to [the `STABILIZED` mode](modes.md)). This service may be used for lower level control of the drone behavior, or controlling the drone when no reliable data on its position is available. Parameters: -* `pitch`, `roll`, `yaw` – requested pitch, roll, and yaw angle *(radians)*; +* `roll`, `pitch`, `yaw` – requested roll, pitch, and yaw angle *(radians)*; * `thrust` — throttle level, ranges from 0 (no throttle, propellers are stopped) to 1 (full throttle). * `auto_arm` – switch the drone to `OFFBOARD` mode and arm automatically (**the drone will take off**); * `frame_id` – [coordinate system](frames.md) for `yaw` (Default value: `map`). ### set_rates -Set pitch, roll, and yaw rates and the throttle level (similar to [the `ACRO` mode](modes.md)). This is the lowest drone control level (excluding direct control of motor rotation speed). This service may be used to automatically perform aerobatic tricks (e.g., flips). +Set roll, pitch, and yaw rates and the throttle level (similar to [the `ACRO` mode](modes.md)). This is the lowest drone control level (excluding direct control of motor rotation speed). This service may be used to automatically perform aerobatic tricks (e.g., flips). Parameters: -* `pitch_rate`, `roll_rate`, `yaw_rate` – pitch, roll, and yaw rates *(rad/s)*; +* `roll_rate`, `pitch_rate`, `yaw_rate` – pitch, roll, and yaw rates *(rad/s)*; * `thrust` — throttle level, ranges from 0 (no throttle, propellers are stopped) to 1 (full throttle). * `auto_arm` – switch the drone to `OFFBOARD` and arm automatically (**the drone will take off**); -The positive direction of `yaw_rate` rotation (when viewed from the top) is counterclockwise,`pitch_rate` rotation is forward, `roll_rate` rotation is to the left. +The positive direction of `yaw_rate` rotation (when viewed from the top) is counterclockwise, `pitch_rate` rotation is forward, `roll_rate` rotation is to the left. ### land @@ -305,6 +352,16 @@ rosservice call /land "{}" > **Caution** In recent PX4 versions, the vehicle will be switched out of LAND mode to manual mode, if the remote control sticks are moved significantly. +### release + +If it's necessary to pause sending setpoint messages, use the `simple_offboard/release` service: + +```python +release = rospy.ServiceProxy('simple_offboard/release', Trigger) + +release() +``` + ## Additional materials * [ArUco-based position estimation and navigation](aruco.md). diff --git a/docs/en/simulation_m1.md b/docs/en/simulation_m1.md index c434264bb..64c34aa9f 100644 --- a/docs/en/simulation_m1.md +++ b/docs/en/simulation_m1.md @@ -9,7 +9,7 @@ The recommended virtual machine hypervisor is [UTM app](https://mac.getutm.app/) 1. Download UTM App from the official site [mac.getutm.app](https://mac.getutm.app/) and install it. -2. Download Ubuntu Linux 20.04 installation iso-file for ARM64 architecture using the link: https://cdimage.ubuntu.com/focal/daily-live/current/focal-desktop-arm64.iso. +2. Download Ubuntu Linux 20.04 installation iso-file for ARM64 architecture using the link: https://clovervm.ams3.digitaloceanspaces.com/focal-desktop-arm64.iso. 3. Create a new virtual machine in UTM, using the following settings: * **Type**: Virtualize. diff --git a/docs/en/simulation_native.md b/docs/en/simulation_native.md index 35df908f8..d28bc4b3a 100644 --- a/docs/en/simulation_native.md +++ b/docs/en/simulation_native.md @@ -147,6 +147,8 @@ sudo systemctl enable roscore sudo systemctl start roscore ``` +### Web tools setup + Install any web server to serve Clover's web tools (`~/.ros/www` directory), e. g. Monkey: ```bash @@ -158,3 +160,11 @@ sudo cp ~/catkin_ws/src/clover/builder/assets/monkey.service /etc/systemd/system sudo systemctl enable monkey sudo systemctl start monkey ``` + +Create `~/.ros/www` using the following command: + +```bash +rosrun clover www +``` + +If the set of packages containing a web part (through `www` directory) is changed, the above command also must be run. diff --git a/docs/en/snippets.md b/docs/en/snippets.md index af2e36c97..052270b59 100644 --- a/docs/en/snippets.md +++ b/docs/en/snippets.md @@ -144,7 +144,7 @@ Determine whether the copter is turned upside-down: PI_2 = math.pi / 2 telem = get_telemetry() -flipped = abs(telem.pitch) > PI_2 or abs(telem.roll) > PI_2 +flipped = abs(telem.roll) > PI_2 or abs(telem.pitch) > PI_2 ``` ### # {#angle-hor} @@ -155,8 +155,8 @@ Calculate the copter horizontal angle: PI_2 = math.pi / 2 telem = get_telemetry() -flipped = not -PI_2 <= telem.pitch <= PI_2 or not -PI_2 <= telem.roll <= PI_2 -angle_to_horizon = math.atan(math.hypot(math.tan(telem.pitch), math.tan(telem.roll))) +flipped = not -PI_2 <= telem.roll <= PI_2 or not -PI_2 <= telem.pitch <= PI_2 +angle_to_horizon = math.atan(math.hypot(math.tan(telem.roll), math.tan(telem.pitch))) if flipped: angle_to_horizon = math.pi - angle_to_horizon ``` @@ -207,9 +207,9 @@ def pose_update(pose): # Processing new data of copter's position pass -rospy.Subscriber('/mavros/local_position/pose', PoseStamped, pose_update) -rospy.Subscriber('/mavros/local_position/velocity', TwistStamped, velocity_update) -rospy.Subscriber('/mavros/battery', BatteryState, battery_update) +rospy.Subscriber('mavros/local_position/pose', PoseStamped, pose_update) +rospy.Subscriber('mavros/local_position/velocity', TwistStamped, velocity_update) +rospy.Subscriber('mavros/battery', BatteryState, battery_update) rospy.Subscriber('mavros/rc/in', RCIn, rc_callback) rospy.spin() @@ -324,7 +324,7 @@ def flip(): while True: telem = get_telemetry() - flipped = abs(telem.pitch) > PI_2 or abs(telem.roll) > PI_2 + flipped = abs(telem.roll) > PI_2 or abs(telem.pitch) > PI_2 if flipped: break @@ -349,7 +349,7 @@ from pymavlink import mavutil from mavros_msgs.srv import CommandLong from mavros_msgs.msg import State -send_command = rospy.ServiceProxy('/mavros/cmd/command', CommandLong) +send_command = rospy.ServiceProxy('mavros/cmd/command', CommandLong) def calibrate_gyro(): rospy.loginfo('Calibrate gyro') @@ -480,3 +480,11 @@ param_set(param_id='COM_FLTMODE1', value=ParamValue(integer=8)) # Set parameter of type FLOAT: param_set(param_id='MPC_Z_P', value=ParamValue(real=1.5)) ``` + +### # {#is-simulation} + +Check, if the code is running inside a [Gazebo simulation](simulation.md): + +```python +is_simulation = rospy.get_param('/use_sim_time', False) +``` diff --git a/docs/en/sonar.md b/docs/en/sonar.md index 296b93f92..600db1107 100644 --- a/docs/en/sonar.md +++ b/docs/en/sonar.md @@ -1,14 +1,14 @@ -# Working with the ultrasonic distance gage +# Working with the ultrasonic distance sensor -Ultrasonic distance gage (*"sonar"*) is a distance gage based on the principle of measuring the time of a sound wave (about 40 kHz) propagation to the obstacle and back. The sonar can measure the distance up to 1.5 – 3 m with the accuracy of several centimeters. +Ultrasonic distance sensor (*"sonar"*) is a distance sensor based on the principle of measuring the time of a sound wave (about 40 kHz) propagation to the obstacle and back. The sonar can measure the distance up to 1.5 – 3 m with the accuracy of several centimeters. -## Distance gage HC-SR04 +## HC-SR04 distance sensor hc-sr04 ## Installation -The distance gage is attached to the body using double-sided tape. For obtaining acceptable results, the use of vibro-insulation is required. A piece of PU foam may be used for vibro-insulation. +The distance sensor is attached to the body using double-sided tape. For obtaining acceptable results, the use of vibro-insulation is required. A piece of PU foam may be used for vibro-insulation. ### Connection @@ -24,17 +24,17 @@ Connect HC-SR04 to Raspberry Pi according to the connection diagram. Use 1.0 and ### Reading the data -To read the data from distance gage HC-SR04 library for working with GPIO is used – [`pigpio`](http://abyz.me.uk/rpi/pigpio/index.html). This library is pre-installed in the [Clover image](image.md), starting with version **v0.14**. For older versions of the image, use [an installation guide](http://abyz.me.uk/rpi/pigpio/download.html). +To read the data from distance sensor HC-SR04 library for working with GPIO is used – [`pigpio`](http://abyz.me.uk/rpi/pigpio/index.html). This library is pre-installed in the [Clover image](image.md), starting with version **v0.14**. For older versions of the image, use [an installation guide](http://abyz.me.uk/rpi/pigpio/download.html). To work with `pigpio`, start appropriate daemon: -```(bash) +```bash sudo systemctl start pigpiod.service ``` You can also enable `pigpiod` auto launch on system startup: -```(bash) +```bash sudo systemctl enable pigpiod.service ``` @@ -113,15 +113,15 @@ An example of charts of initial and filtered data: The source code of the ROS-node used for building the chart can be found [on Gist](https://gist.github.com/okalachev/feb2d7235f5c9636802c3cda43add253). -## Distance gage RCW-0001 +## RCW-0001 distance sensor -Ultrasonic distance gage RCW-0001 is compatible with distance gage HC-SR04. Use the instruction above to connect and work with it. +The RCW-0001 distance sensor is compatible with distance sensor HC-SR04. Use the instruction above to connect and work with it. ## Flight -An example of a flight program with the use of [simple_offboard](simple_offboard.md), which makes the copter fly forward until the connected ultrasonic distance gage detects an obstacle: +An example of a flight program with the use of [simple_offboard](simple_offboard.md), which makes the copter fly forward until the connected ultrasonic distance sensor detects an obstacle: ```python set_velocity(vx=0.5, frame_id='body', auto_arm=True) # flying forward at the velocity of 0.5 mps diff --git a/docs/en/swarm_in_blocks_2.md b/docs/en/swarm_in_blocks_2.md new file mode 100644 index 000000000..3d2bc2383 --- /dev/null +++ b/docs/en/swarm_in_blocks_2.md @@ -0,0 +1,247 @@ + + +Swarm in Blocks + +

+ Swarm in Blocks +

+ +[CopterHack-2023](copterhack2023.md), team **Atena**. + +## Project Status[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#project-status) + + + + + + + + + +
GitHub last commitGitHub commit activityGitHub contributorsGitHub language count
+ +## Final Video ![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg) + +

+ +

+ +--- + +## Table of contents[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#table-of-contents) + +- [Introduction](#Introduction) +- [Getting started](#getting-started) +- [Usage modes](#Usage-modes) +- [New Swarm Features](#New-Swarm-Features) +- [Conclusion](#Conclusion) + +--- + +## Introduction[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#Introduction) + +Nowadays, **swarms of drones** are getting more and more applications and being used in several different areas, from agriculture to surveillance and rescues. But controlling a high amount of drones isn't a simple task, demanding a lot of studies and complex software. + +Swarm in Blocks (from it's origin in 2022) was born looking to make a *high-level interface based on the blocks language*, to make simple handling swarms, without requiring advanced knowledge in all the necessary platforms, creating tools to allow a lot of applications based on the user needs and also using the Clover platform. + +In 2023, Swarm in Blocks has taken an even bigger step, looking to fulfill our biggest vision **"It's never been easier to Swarm"**, we talk to transcend the local scope of the past project and explore the biggest problems for implementing a Swarm. For Copterhack 2023, we present Swarm in Blocks 2.0, an even more complete platform with the purpose of facing the biggest difficulties of a Swarm in a simple and polished way. + +

+ +

+ +### Swarm in Blocks 2022 + +Swarm in Blocks is a CopterHack 2022 project. It's a high-level interface based on the blocks language, which consists of fitting code parts, like a puzzle. Each script represents a functionality, for example, conditional structures, loops, or functions that receive parameters and return an instruction to the swarm. + +

+ +

+ +

+ +

+ +For more information on our project from last year, see our final article in [Swarm in Blocks 2022](https://clover.coex.tech/en/swarm_in_blocks.html). In addition, we also recommend watching our final video from last year, [Swarm in Blocks 2022 - Final Video](https://www.youtube.com/watch?v=5C-1rRnyiE8). + +Even with the huge facilities that the block platform offers, we realized that this was just the *tip of the iceberg* when it comes to deploying real swarms. Several other operational and conceptual problems in validating a real swarm still haunted the general public. With that, this year's project comes precisely with the purpose of **tackling the main problems in validating a Swarm in a simple and polished way**. + +### What's new + +As already mentioned, of the various problems that can increase the complexity of a real swarm, we decided to deal with the ones that most afflicted us and reintegrated our solutions into our central platform, building a single extremely complete and cohesive platform. + +| Problem | Our Solution | +| -------- | -------- | +| Possible collision between drones (lack of safety especially for large Swarms) | Collision Avoidance System | +| Giant clutter to keep track of all Clovers in a swarm individually (several terminals, many simultaneous computers with several people to keep track of) | Swarm Station | +| Lack of basic features for handling a swarm pre-implemented in the Clover platform (such as access to battery data and raspberry computational power) | Full integration of low level data in our Swarm Station | +| Lack of security in indoor tests regarding the limitation of physical space (walls and objects) in the Swarm region | Safe Area Pop Up in Swarm Station | +| Decentralization of information and platforms for access | Web Homepage | +| Difficulty configuring physical drones for swarm | Our complete documentation with pre-designed settings for swarms in our repo image | +| Lack of a center for reports of successful tests with swarms of drones for the Clover platform describing the test conditions (odometry, etc.) | Show off section in our Gitbook | + +And many other solutions are also featured on our platform, for more information please check the solutions described clearly and in detail throughout our **Gitbook**. We recommend reading in order to understand the fundamental precepts of our platform. + +📖 **Access our [Gitbook](https://swarm-in-blocks.gitbook.io/swarm-in-blocks/background-theory/systems)!** + +💻 **Access our [GitHub](https://github.com/Grupo-SEMEAR-USP/swarm_in_blocks.git)!** + + + +--- + +## Getting started[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#getting-started) + +Our platform was made to be extremely intuitive and easy to use. To start (after completing the installation we suggested in our gitbook), you can run the command: + +```bash +roslaunch swarm_in_blocks simulation.launch num:=2 +``` + +After that, you can open your browser and access our homepage by typing `localhost` in the search bar. + + + +--- + +## Usage modes[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#Usage-modes) + +The Swarm in Blocks can be programmed either with the blocks interface or directly in Python and we developed three main launch modes, each one focused on a different application of the project, they are: + +- *Planning Mode:* Its main goal is to allow the user to check the drones' layout, save and load formations, before starting the simulator or using real clovers. In order to need less computational power and avoid possible errors during the simulation. +- *Simulation Mode:* In this mode happens the simulation indeed, starting the Gazebo, the necessary ROS nodes and some other tools. It allows applying the developed features, which will be explained ahead and see how they would behave in real life. +- *Navigation Mode:* The last mode will support executing everything developed in real clovers so that it's possible to control a swarm with block programming. The biggest obstacle yet is the practical testing of this mode, due to the financial difficulty of acquiring a Clover swarm. + + + +--- + +## New Swarm Features[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#New-Swarm-Features) + +With our vision of solving the problems that most plague the deployment of a real swarm, we have developed several features (and even integrated platforms), below we will list our main developments: + +### Homepage + +Like last year, we really wanted to make it easier for the user to go through our platform. That's why this year we decided to restructure our Homepage, gathering our main features and functionalities. + +

+ +

+ +### Swarm Station + +The main feature from our platform is the *Swarm Station*, which is a **3d Web Visualizer** that shows in real time all the necessary information regarding the drones state, such as real time positioning and visualization, which clover is connected, the topics available and a lot more. Also, you can define a safe area to ensure each drones safety, forcing them to land in case they cross the forbidden area. The front end runs completely on the web browser, saving processing and installation resources. It also comes with a web terminal, allowing the user to open several instances of a terminal emulation in just one click. + +

+ +

+ +This package uses the ROS suite `rosbridge_server` to establish a communication between the ROS environment and the web server. + +To run it, we recommend using **Firefox** browser to assure stability. But feel free to test it on other navigators. + +If you launched our `simulation.launch` from the `swarm_in_blocks` package, then you just have to run + +```bash +roslaunch swarm_station swarm_station.launch +``` + +Otherwise, you have to make sure that the `rosbridge_websocket` is running on port `9090`: + +```bash +roslaunch rosbridge_server rosbridge_websocket.launch port:=9090 +``` + +For more detailed instructions on how to use each single feature from the Swarm Station, check our [Gitbook page about the station](https://swarm-in-blocks.gitbook.io/swarm-in-blocks/). + +### Swarm Collision Avoidance + +When many drones move close to each other, collisions are very likely to occur. To avoid this problem, an algorithm was developed to avoid collisions between drones. When analyzing a collision, 3 types of scenario are possible, the case where one clover is stationary and the other in motion, the case where both are in motion and with parallel trajectories, and finally the case where both are in motion and with non-parallel trajectory. + +To turn on the collision avoidance, it is necessary to run: + +```bash +rosrun swarm_collision_avoidance swarm_collision_avoidance_node.py +``` + +

+ +

+ +### Rasp Package + +The Raspberry package was developed to instantiate a node that will be responsible for collecting essential processing, memory and temperature information from the raspberry and send it to the Swarm Station. It's the package that should be put on the `catkin_ws/src/` directory of each Raspberry Pi, because it also contains the `realClover.launch` needed to launch the swarm on real life. + +### Swarm FPV + +This package is a reformulation of one of the CopterHack 2022 implementations, the **Swarm First Person Viewer**. This year, we decided to restart its structure, making it run also completely on the web to integrate with the Swarm Station. It also depends on the `rosbridge_websocket` running on the port `9090` (default). + +

+ +

+ +### Real Swarm + +In order to fly a real swarm using clover, we decided to take an approach of putting every clover on the same ROS network / environment so that the master could talk to each one of them. + +We did this by separating each drone topics / nodes / services with namespaces. The goal is to achieve the same effect as the simulation that we've done in [**CopterHack-2022**](copterhack2022.md), so each drone would have its own `/cloverID` namespace, and the ID is the identifier for each drone. + +In other words, instead of just `simple_offboard` node for a single drone, we'd now have `/clover0/simple_offboard`, `/clover1/simple_offboard` and so on. + +To launch it, you need to first stop clover's default daemon, and then connect all Raspberries to the same network. After that, you should connect all their `roscore` to the same IP address (the master's), and then launch the `realClover.launch` file passing the `ID` argument as a parameter. Again, for more detailed information on how this works, please check out our [gitbook](https://swarm-in-blocks.gitbook.io/swarm-in-blocks/): + +```bash +sudo systemctl stop clover +roslaunch rasp_pkg realClover.launch ID:=0 +``` + +

+ +

+ +> **Note** We are aware that in the video the calibration of the drone control is not ideal, however, the objective of this test was really to validate the operation of the swarm in a real environment (which was actually done). + + + +--- + +## Conclusion[![](https://raw.githubusercontent.com/aregtech/areg-sdk/master/docs/img/pin.svg)](#Conclusion) + +Engineering and robotics challenges have always been the main driver of Team Athena, from which we seek to impact society through innovation. Last year, during CopterHack 2022, there was no lack of challenges of this type, and in them we grew and exceeded our limits, all to deliver the best possible project: **Swarm in Blocks**. All the motivation to facilitate a task as complex as the manipulation of swarms of drones, even through block programming, delighted us a lot and we hope that it delights all our users. + +With that came the Swarm in Blocks 2.0, which brought with it innovations that optimized the clover's flight control and that could allow for greater emotions in the handling of the drone, in addition to focusing on greater flight safety. +The Swarm in Blocks 2.0 presents new features for this year, such as the Web terminal, First Person View (FPV), Collision Avoidance, Clover UI and Swarm Station. +However, the work will not stop there. Our goal is to further improve our system and next steps include validating Collision Avoidance outside the simulated world and performing performance tests with network communication solutions to optimize Real Swarm. + +Finally, we thank the entire COEX team that made CopterHack 2023 possible and all the support given during the competition. We are Team Atena, creator of the Swarm in Blocks platform and we appreciate all your attention! + + + +--- + +## The Atena Team + +Atena Team 2023 (Swarm in Blocks 2.0): + +- Agnes Bressan de Almeida : [GitHub](https://github.com/AgnesBressan), [LinkedIn](https://www.linkedin.com/in/agnes-bressan-148615262/) +- Felipe Tommaselli: [GitHub](https://github.com/Felipe-Tommaselli), [LinkedIn](https://www.linkedin.com/in/felipe-tommaselli-385a9b1a4/) +- Gabriel Ribeiro Rodrigues Dessotti : [GitHub](https://github.com/dessotti1), [LinkedIn](https://www.linkedin.com/in/gabriel-ribeiro-rodrigues-dessotti-8884a3216) +- José Carlos Andrade do Nascimento: [GitHub](https://github.com/joseCarlosAndrade), [LinkedIn](https://www.linkedin.com/in/jos%C3%A9-carlos-andrade-do-nascimento-71186421a) +- Lucas Sales Duarte : [GitHub](https://github.com/LucasDuarte026), [LinkedIn](https://www.linkedin.com/in/lucas-sales-duarte-a963071a1) +- Matheus Della Rocca Martins : [GitHub](https://github.com/MatheusDrm), [LinkedIn](https://www.linkedin.com/in/matheus-martins-9aba09212/) +- Nathan Fernandes Vilas Boas : [GitHub](https://github.com/uspnathan), [LinkedIn](https://www.linkedin.com/mwlite/in/nathan-fernandes-vilas-boas-047616262) + +

+ +

+ +In honor of Atena Team 2022: + +- Guilherme Soares Silvestre : [GitHub](https://github.com/guisoares9), [LinkedIn](https://www.linkedin.com/in/guilherme-soares-silvestre-76570118b/) +- Eduardo Morelli Fares: [GitHub](https://github.com/faresedu), [LinkedIn](https://www.linkedin.com/in/eduardo-fares-a271561a0/) +- Felipe Tommaselli: [GitHub](https://github.com/Felipe-Tommaselli), [LinkedIn](https://www.linkedin.com/in/felipe-tommaselli-385a9b1a4/) +- João Aires C. F. Marsicano: [GitHub](https://github.com/Playergeek181), [LinkedIn](https://www.linkedin.com/in/joao-aires-correa-fernandes-marciano-53b426195/) +- José Carlos Andrade do Nascimento: [GitHub](https://github.com/joseCarlosAndrade), [LinkedIn](https://www.linkedin.com/in/jos%C3%A9-carlos-andrade-do-nascimento-71186421a) +- Rafael Saud C. Ferro: [GitHub](https://github.com/Rafael-Saud), [LinkedIn](https://www.linkedin.com/in/rafael-saud/) + + diff --git a/docs/en/wall_aruco.md b/docs/en/wall_aruco.md index 0fe2bc737..130e005a3 100644 --- a/docs/en/wall_aruco.md +++ b/docs/en/wall_aruco.md @@ -49,10 +49,10 @@ If you are using the marker map, where the markers have equal distances along th After you fill out the map, you need to apply it. To do it, edit the file `aruco.launch`, located in `~/catkin_ws/src/clover/clover/launch/`. Change the line ``, where `map_name.txt` is the name of your map file. -If you are using markers that are not linked to horizontal surfaces (floor, ceiling), you must disable the parameter `known_tilt` both in the module `aruco_detect` and `aruco_map` in the same file. To do it automatically, enter: +If you are using markers that are not linked to horizontal surfaces (floor, ceiling), you must blank the `placement` argument in the same file: -```bash -sed -i "/known_tilt/s/value=\".*\"/value=\"\"/" /home/pi/catkin_ws/src/clover/clover/launch/aruco.launch +```xml + ``` After all the settings, call `sudo systemctl restart clover` to restart the `clover` service. diff --git a/docs/ru/SUMMARY.md b/docs/ru/SUMMARY.md index 347b86358..580e31a2a 100644 --- a/docs/ru/SUMMARY.md +++ b/docs/ru/SUMMARY.md @@ -120,6 +120,7 @@ * [Конкурс видео](video_contest.md) * [Образовательные конкурсы](educational_contests.md) * [Проекты на базе Клевера](projects.md) + * [Система радионавигации](nav-beacon.md) * [Advanced Clover Simulator](advanced_clover_simulator.md) * [Copter For Space](c4s.md) * [CopterCat CM4](copter_cat.md) diff --git a/docs/ru/aruco_marker.md b/docs/ru/aruco_marker.md index d75995444..d8f7d5cd9 100644 --- a/docs/ru/aruco_marker.md +++ b/docs/ru/aruco_marker.md @@ -84,12 +84,6 @@ navigate(frame_id='aruco_5', x=0, y=0, z=1) navigate(frame_id='aruco_7', x=-1, y=0, z=2) ``` -Вращаться против часовой стрелки на высоте 1.5 метра над маркером 10: - -```python -navigate(frame_id='aruco_10', x=0, y=0, z=1.5, yaw_rate=0.5) -``` - Если необходимый маркер не появится в поле зрения в течение полусекунды, дрон продолжит выполнять предыдущую команду. Подобные значения `frame_id` можно использовать и в других сервисах, например `get_telemetry`. Получение расположения дрона относительно маркера 3: diff --git a/docs/ru/camera.md b/docs/ru/camera.md index 7d90084e6..7cf15231d 100644 --- a/docs/ru/camera.md +++ b/docs/ru/camera.md @@ -1,5 +1,7 @@ # Работа с камерой +> **Note** Эта статья описывает работу с [образом версии **0.24**](https://github.com/CopterExpress/clover/releases/tag/v0.24), который пока находится в стадии тестирования. Для версии **0.23** доступна [более старая документация](https://github.com/CopterExpress/clover/blob/f78a03ec8943b596d5a99b893188a159d5319888/docs/ru/camera.md). + Для работы с основной камерой необходимо убедиться что она включена в файле `~/catkin_ws/src/clover/clover/launch/clover.launch`: @@ -54,8 +56,6 @@ raspistill -o test.jpg ### Python -Основная статья: http://wiki.ros.org/cv_bridge/Tutorials/ConvertingBetweenROSImagesAndOpenCVImagesPython. - Пример создания подписчика на топик с изображением с основной камеры для обработки с использованием OpenCV: ```python @@ -63,12 +63,14 @@ import rospy import cv2 from sensor_msgs.msg import Image from cv_bridge import CvBridge +from clover import long_callback -rospy.init_node('computer_vision_sample') +rospy.init_node('cv') bridge = CvBridge() +@long_callback def image_callback(data): - cv_image = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image + img = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image # Do any image processing with cv2... image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback) @@ -76,19 +78,31 @@ image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback) rospy.spin() ``` +> **Note** Обработка изображения может занимать значительное время. Это может вызвать [проблему](https://github.com/ros/ros_comm/issues/1901) в библиотеке rospy, которая приведет к обработке устаревших кадров с камеры. Для решения этой проблемы необходимо использовать декоратор `long_callback` из библиотеки `clover`, как в примере выше. + +#### Ограничение использования CPU + +При использовании топика `main_camera/image_raw` скрипт будет обрабатывать максимальное количество кадров с камеры, активно используя CPU (вплоть до 100%). В задачах, где обработка каждого кадра не критична, можно использовать топик, где кадры публикуются с частотой 5 Гц: `main_camera/image_raw_throttled`: + +```python +image_sub = rospy.Subscriber('main_camera/image_raw_throttled', Image, image_callback, queue_size=1) +``` + +#### Публикация изображений + Для отладки обработки изображения можно публиковать отдельный топик с обработанным изображением: ```python image_pub = rospy.Publisher('~debug', Image) ``` -Публикация обработанного изображения (в конце функции image_callback): +Публикация обработанного изображения: ```python -image_pub.publish(bridge.cv2_to_imgmsg(cv_image, 'bgr8')) +image_pub.publish(bridge.cv2_to_imgmsg(img, 'bgr8')) ``` -Получаемые изображения можно просматривать используя [web_video_server](web_video_server.md). +Получаемые изображения можно просматривать используя [web_video_server](web_video_server.md) или [rqt](rviz.md). #### Получение одного кадра @@ -99,12 +113,12 @@ import rospy from sensor_msgs.msg import Image from cv_bridge import CvBridge -rospy.init_node('computer_vision_sample') +rospy.init_node('cv') bridge = CvBridge() # ... -# Получение кадра: +# Retrieve a frame: img = bridge.imgmsg_to_cv2(rospy.wait_for_message('main_camera/image_raw', Image), 'bgr8') ``` @@ -121,38 +135,32 @@ img = bridge.imgmsg_to_cv2(rospy.wait_for_message('main_camera/image_raw', Image ```python import rospy from pyzbar import pyzbar +import cv2 from cv_bridge import CvBridge from sensor_msgs.msg import Image +from clover import long_callback +rospy.init_node('cv') bridge = CvBridge() -rospy.init_node('barcode_test') - -# Image subscriber callback function -def image_callback(data): - cv_image = bridge.imgmsg_to_cv2(data, 'bgr8') # OpenCV image - barcodes = pyzbar.decode(cv_image) +@long_callback +def image_callback(msg): + img = bridge.imgmsg_to_cv2(msg, 'bgr8') + barcodes = pyzbar.decode(img) for barcode in barcodes: - b_data = barcode.data.decode("utf-8") + b_data = barcode.data.decode('utf-8') b_type = barcode.type (x, y, w, h) = barcode.rect xc = x + w/2 yc = y + h/2 - print("Found {} with data {} with center at x={}, y={}".format(b_type, b_data, xc, yc)) + print('Found {} with data {} with center at x={}, y={}'.format(b_type, b_data, xc, yc)) -image_sub = rospy.Subscriber('main_camera/image_raw', Image, image_callback, queue_size=1) +image_sub = rospy.Subscriber('main_camera/image_raw_throttled', Image, image_callback, queue_size=1) rospy.spin() ``` -Скрипт будет занимать 100% процессора. Для искусственного замедления работы скрипта можно запустить [throttling](http://wiki.ros.org/topic_tools/throttle) кадров с камеры, например, в 5 Гц (`main_camera.launch`): - -```xml - -``` - -Топик для подписчика в этом случае необходимо поменять на `main_camera/image_raw_throttled`. +> **Hint** Смотрите другие примеры по работе с компьютерным зрением в каталоге `~/examples` [образа для RPi](image.md). ## Запись видео diff --git a/docs/ru/camera_setup.md b/docs/ru/camera_setup.md index 83c87d138..cc987752d 100644 --- a/docs/ru/camera_setup.md +++ b/docs/ru/camera_setup.md @@ -66,7 +66,7 @@ #### Камера направлена вверх, шлейф вперёд ```xml - + ``` diff --git a/docs/ru/copterhack2023.md b/docs/ru/copterhack2023.md index 5d2484c46..955e87f16 100644 --- a/docs/ru/copterhack2023.md +++ b/docs/ru/copterhack2023.md @@ -8,6 +8,36 @@ CopterHack 2023 — это международный конкурс по ра На конкурс принимаются проекты с открытым исходным кодом и совместимые с платформой квадрокоптера "Клевер". На протяжении конкурса команды работают на собственными идеями и разработками, приближая их к состоянию готового продукта. В этом участникам помогают эксперты отрасли через лекции и регулярную обратную связь. +Финал конкурса CopterHack 2022 прошел 27 мая 2023. Победителями стала команда 🇷🇺 **[Clover Cloud Platform](../en/clover-cloud-platform.html)**. + +## Полный стрим финала + + + +## Проекты участников конкурса {#participants} + +|Место|Команда|Проект|Балл| +|:-:|-|-|-| +|1|🇷🇺 Clover Cloud Team|[Clover Cloud Platform](../en/clover-cloud-platform.html)|21.7| +|2|🇧🇾 FTL|[Advanced Clover 2](../en/advanced_clover_simulator_platform.html)|21| +|3|🇨🇦 Clover with Motion Capture System|[Clover with Motion Capture System](../en/mocap_clover.html)|20.5| +|4|🇧🇷 Atena|[Swarm in Blocks 2](../en/swarm_in_blocks_2.html)|20.3| +|5|🇷🇺 C305|[Система радио-навигации](nav-beacon.md)|17.5| +|6|🇮🇳 DJS PHOENIX|[Autonomous Racing Drone](../en/djs_phoenix_chetak.html)|14.6| +|7|🇷🇺 Лицей №128|[Платформа для зарядки квадрокоптера](../en/liceu128.html)|13.7| +|✕|🇰🇬 Zavarka|[Система обмена грузами с помощью конвейера](https://github.com/aiurobotics/clover/blob/conveyance/docs/ru/conveyance.md)|| +|✕|🇷🇺 FSOTM|[Дрон-перехватчик](https://github.com/deadln/clover/blob/interceptor/docs/ru/interceptor.md)|| +|✕|🇰🇬 Бездомные|[Дрон-бездомный](https://github.com/Isa-jesus/clover/blob/trash-collector/docs/ru/show_maker.md)|| +|✕|🇷🇺 Digital otters|[Digital otters](https://github.com/Mentalsupernova/clover_cool/blob/new-article.md/docs/ru/new-article.md)|| +|✕|🇷🇺 Light Flight|[Сопровождение БПЛА при посадке](https://github.com/SirSerow/clover_inertial_ns/blob/inertial-1/Description.md)|| +|✕|🇰🇬 LiveSavers|[LiveSavers](https://github.com/Sarvar00/clover/blob/livesavers/docs/ru/livesaver.md)|| +|✕|🇷🇺 XenCOM|[Bound by fate](https://github.com/xenkek/clover/blob/xenkek-patch-1/docs/ru/bound_by_fate.md)|| +|✕|🇷🇺 Ava_Clover|[DoubleClover](https://github.com/bessiaka/clover/blob/Ava_Clover/docs/ru/soosocta.md)|| +|✕|🇷🇺 TPU_1|[Совместная транспортировка груза](https://github.com/shamoleg/clover/blob/tpu_1/docs/ru/tpu_1.md)|| +|✕|🇷🇺 TPU_2|[Алгоритм полета сквозь лесную местность](https://github.com/shamoleg/clover/blob/tpu_2/docs/ru/tpu_2.md)| | + +Смотрите все оценки по критериям в [полной таблице](https://docs.google.com/spreadsheets/d/1qTpW8zFVdSEGnbtOvMgQD6DcYwu8URFt1RKOCeUaOe8). + ## Этапы CopterHack 2023 Отборочный и проектный этапы конкурса проходят в онлайн-формате, формат проведения финала – гибридный (оффлайн + онлайн). Конкурс подразумевает ежемесячные апдейты от команд с получением регулярной обратной связи от жюри. Для участия в заключительном этапе необходимо подготовить финальное видео и презентацию о результатах проекта. diff --git a/docs/ru/educational_contests.md b/docs/ru/educational_contests.md index bbab6f342..48b0e12bd 100644 --- a/docs/ru/educational_contests.md +++ b/docs/ru/educational_contests.md @@ -26,7 +26,7 @@ Прием заявок осуществляется через [Google Форму](https://docs.google.com/forms/d/e/1FAIpQLScE2kN5dO2OYNSM8hOYzOa5Qvh2uDdd9Fjx8OnL1W93bfEBgw/viewform). -Дедлайн подачи заявок: 1 сентября 2022 года. +Дедлайн подачи заявок: 30 ноября 2022 года. ### Призы @@ -64,7 +64,7 @@ Прием заявок осуществляется через [Google Форму](https://docs.google.com/forms/d/e/1FAIpQLSdelVy6yQ1iN6u88KeiEIKGj7gGaM0xccSt2tiYKB46ICmjkQ/viewform). -Дедлайн подачи заявок: 1 сентября 2022 года. +Дедлайн подачи заявок: 30 ноября 2022 года. ### Призы @@ -106,7 +106,7 @@ Прием заявок осуществляется через [Google Форму](https://docs.google.com/forms/d/e/1FAIpQLSdf2Q68X4hPnFE9f3EP95AxPNnzHKqIsFHtTRT6EBKiH93wzg/viewform). -Дедлайн подачи заявок: 1 сентября 2022 года +Дедлайн подачи заявок: 30 ноября 2022 года ### Призы diff --git a/docs/ru/frames.md b/docs/ru/frames.md index 19a8b1197..d96a0b899 100644 --- a/docs/ru/frames.md +++ b/docs/ru/frames.md @@ -11,6 +11,7 @@ * `base_link` — координаты относительно квадрокоптера: схематичное изображение квадрокоптера на иллюстрации; * `body` — координаты относительно квадрокоптера без учета наклонов по тангажу и крену: красная, синяя и зеленая линии на иллюстрации; * `navigate_target` – координаты точки, в которую сейчас летит дрон (с использованием [navigate](simple_offboard.md#navigate)); +* `terrain` – координаты относительно пола в текущей позиции коптера (см. сервис [set_altitude](simple_offboard.md#set_altitude)) * `setpoint` – текущий setpoint по позиции; * `main_camera_optical` – система координат, [связанная с основной камерой](camera_setup.md#frame). diff --git a/docs/ru/models.md b/docs/ru/models.md index 6a925c71d..2428edce4 100644 --- a/docs/ru/models.md +++ b/docs/ru/models.md @@ -200,13 +200,20 @@ ### 3D печать +#### Механический захват + +* **Левая клешня**: [`grip_left.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/stl/grip_left.stl). +* **Правая клешня**: [`grip_right.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/stl/grip_right.stl). + +Материал: SBS Glass. Заполнение 100%. Количество: 1 шт. + #### Груз для магнитного захвата -* Груз для магнитного захвата: [`load_for_magnetic_grip.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/load_for_magnetic_grip.stl) -* Дополнение-для-подставки-груза: [`add-on_for_load_support.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/add-on_for_load_support.stl) -* Подставка под теннисный мяч для магнитного захвата: [`tennis_ball_stand_for_magnetic_grip.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/tennis_ball_stand_for_magnetic_grip.stl). +* **Груз для магнитного захвата**: [`load_for_magnetic_grip.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/load_for_magnetic_grip.stl). +* **Дополнение для подставки груза**: [`add-on_for_load_support.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/add-on_for_load_support.stl). +* **Подставка под теннисный мяч для магнитного захвата**: [`tennis_ball_stand_for_magnetic_grip.stl`](https://github.com/CopterExpress/clover/raw/master/docs/assets/grip_load/tennis_ball_stand_for_magnetic_grip.stl). -Материал: PETG. Заполнение 100%. Количество: 1шт. +Материал: PETG. Заполнение 100%. Количество: 1 шт. ## Клевер 4 diff --git a/docs/ru/nav-beacon.md b/docs/ru/nav-beacon.md new file mode 100644 index 000000000..6ff8c931a --- /dev/null +++ b/docs/ru/nav-beacon.md @@ -0,0 +1,49 @@ +# Система радио-навигации + +[CopterHack-2023](copterhack2023.md), команда **C305**. + +## Информация о команде + +Мы команда студентов, представляющая Центр Проектной Деятельности Дальневосточного федерального университета (C305). + +Состав команды: + +* Антонов Георгий, @SonSobaki, инженер. +* Филимонов Сергей, @Lukerrr, программист. +* Смадыч Никита, @Sm_nikita, инженер-программист. +* Максим Харченко, @milian_c305, программист. + +## Описание проекта + +### Идея проекта + +Проект направлен на разработку системы позиционирования внутри помещений для квадрокоптеров с использованием широкополосных передатчиков DWM1000. + +Разрабатываемая система позиционирования использует передатчики DWM1000, которые обеспечивают широкополосную связь на радиочастотах. Она предназначена для обеспечения точного и надежного позиционирования квадрокоптеров внутри зданий, где оптические системы могут оказаться ограниченными или неэффективными. + +Одной из ключевых особенностей этой системы является ее способность обеспечивать высокую точность позиционирования и дальность действия. Широкополосные передатчики позволяют достичь высокой разрешающей способности и низкой задержки передачи данных, что особенно важно в случае быстрого и точного позиционирования квадрокоптеров. Благодаря использованию радиочастотных сигналов, система не подвержена помехам от окружающей среды, таких как освещение или преграды. Это обеспечивает стабильную работу системы в различных условиях и позволяет использовать ее в помещениях с ограниченной видимостью или сложным рельефом. + +В целом, разрабатываемая система предлагает более точное, надежное и экономичное решение по сравнению с оптическими системами. + +### Ключевые особенности + +* Точность системы позиционирования +-0.1м. +* Поддержка indoor и outdoor навигации. +* Лёгкая масштабируемость системы. +* Лёгкое развёртывание и лёгкая настройка системы. + +### Презентационный ролик + +[![](https://img.youtube.com/vi/ra45vH3IFuI/0.jpg)](https://www.youtube.com/watch?v=ra45vH3IFuI) + +### Документация к проекту + +* [Работа с UWB модулями](https://github.com/NikitaS2001/dw1000-stm32/blob/main/README.md) +* [Запуск ROS пакета позиционирования](https://github.com/NikitaS2001/dwm1000_pose/blob/main/README.md) +* [Настройка системы позиционирования](https://github.com/NikitaS2001/dwm1000_pose/blob/main/docs/ru/navigation_system_setup.md) + +### Ресурсы проекта + +* [Исходный код прошивок UWB модулей](https://github.com/NikitaS2001/dw1000-stm32) +* [Исходный код ROS пакета позиционирования](https://github.com/NikitaS2001/dwm1000_pose) +* [Модели корпуса UWB модулей](https://github.com/NikitaS2001/dw1000-stm32/tree/main/3D) diff --git a/docs/ru/parameters.md b/docs/ru/parameters.md index 312de3e06..1b53a4a73 100644 --- a/docs/ru/parameters.md +++ b/docs/ru/parameters.md @@ -39,17 +39,27 @@ |Параметр|Значение|Примечание| |-|-|-| -|`EKF2_AID_MASK`|26|Чекбоксы: *flow* + *vision position* + *vision yaw*.
Подробнее: [Optical Flow](optical_flow.md), [ArUco-маркеры](aruco_map.md), [GPS](gps.md).| +|`EKF2_AID_MASK`\*|26|Чекбоксы: *flow* + *vision position* + *vision yaw*.
Подробнее: [Optical Flow](optical_flow.md), [ArUco-маркеры](aruco_map.md), [GPS](gps.md).| |`EKF2_OF_DELAY`|0|| |`EKF2_OF_QMIN`|10|| |`EKF2_OF_N_MIN`|0.05|| |`EKF2_OF_N_MAX`|0.2|| -|`EKF2_HGT_MODE`|2 (*Range sensor*)|При наличии [дальномера](laser.md) и полете над ровным полом| +|`EKF2_HGT_MODE`\*|3 (*Vision*)|При наличии [дальномера](laser.md) и полете над ровным полом — 2 (*Range sensor*)| |`EKF2_EVA_NOISE`|0.1|| |`EKF2_EVP_NOISE`|0.1|| |`EKF2_EV_DELAY`|0|| |`EKF2_MAG_TYPE`|5 (*None*)|Выключение магнитометра (при навигации внутри помещения)| +\* — начиная с версии PX4 1.14 помеченные звездочкой параметры заменены на следующие: + +|Параметр|Значение|Примечание| +|-|-|-| +|`EKF2_EV_CTRL`|11|Чекбоксы: *Horizontal position* + *Vertical position* + *Yaw*| +|`EKF2_GPS_CTRL`|0|Все чекбоксы сняты| +|`EKF2_BARO_CTRL`|0 (*Disabled*)|Барометр отключен| +|`EKF2_OF_CTRL`|1 (*Enabled*)|Optical flow включен| +|`EKF2_HGT_REF`|3 (*Vision*)|При наличии [дальномера](laser.md) и полете над ровным полом — 2 (*Range sensor*)| + > **Info** См. также: список параметров по умолчанию в [симуляторе](simulation.md): https://github.com/CopterExpress/clover/blob/master/clover_simulation/airframes/4500_clover. @@ -60,8 +70,8 @@ Estimator это подсистема, которая вычисляет текущее состояние (state) коптера, используя показания с датчиков. В состояние коптера входит: -* угловая скорость коптера – pitch_rate, roll_rate, yaw_rate; -* ориентация коптера (в локальной системе координат) – pitch (тангаж), roll (крен), yaw (рысканье) (одно из представлений); +* угловая скорость коптера – roll_rate, pitch_rate, yaw_rate; +* ориентация коптера (в локальной системе координат) – roll (крен), pitch (тангаж), yaw (рысканье) (одно из представлений); * позиция коптера (в локальной системе координат) – x, y, z; * скорость коптера (в локальной системе координат) – vx, vy, vz; * глобальные координаты коптера – latitude, longitude, altitude; diff --git a/docs/ru/simple_offboard.md b/docs/ru/simple_offboard.md index 29938e253..e554abb38 100644 --- a/docs/ru/simple_offboard.md +++ b/docs/ru/simple_offboard.md @@ -1,5 +1,7 @@ # Автономный полет +> **Note** Эта статья описывает работу с [образом версии **0.24**](https://github.com/CopterExpress/clover/releases/tag/v0.24), который пока находится в стадии тестирования. Для версии **0.23** доступна [более старая документация](https://github.com/CopterExpress/clover/blob/f78a03ec8943b596d5a99b893188a159d5319888/docs/ru/simple_offboard.md). + Модуль `simple_offboard` пакета `clover` предназначен для упрощенного программирования автономного полета дрона ([режим](modes.md) `OFFBOARD`). Он позволяет устанавливать желаемые полетные задачи и автоматически трансформирует [систему координат](frames.md). `simple_offboard` является высокоуровневым способом взаимодействия с полетным контроллером. Для более низкоуровневой работы см. [mavros](mavros.md). @@ -20,6 +22,9 @@ rospy.init_node('flight') get_telemetry = rospy.ServiceProxy('get_telemetry', srv.GetTelemetry) navigate = rospy.ServiceProxy('navigate', srv.Navigate) navigate_global = rospy.ServiceProxy('navigate_global', srv.NavigateGlobal) +set_altitude = rospy.ServiceProxy('set_altitude', srv.SetAltitude) +set_yaw = rospy.ServiceProxy('set_yaw', srv.SetYaw) +set_yaw_rate = rospy.ServiceProxy('set_yaw_rate', srv.SetYawRate) set_position = rospy.ServiceProxy('set_position', srv.SetPosition) set_velocity = rospy.ServiceProxy('set_velocity', srv.SetVelocity) set_attitude = rospy.ServiceProxy('set_attitude', srv.SetAttitude) @@ -51,11 +56,11 @@ land = rospy.ServiceProxy('land', Trigger) * `lat, lon` – широта, долгота *(градусы)*, необходимо наличие [GPS](gps.md); * `alt` – высота в глобальной системе координат (стандарт [WGS-84](https://ru.wikipedia.org/wiki/WGS_84), не AMSL!), необходимо наличие [GPS](gps.md); * `vx, vy, vz` – скорость коптера *(м/с)*; -* `pitch` – угол по тангажу *(радианы)*; * `roll` – угол по крену *(радианы)*; +* `pitch` – угол по тангажу *(радианы)*; * `yaw` – угол по рысканью *(радианы)*; -* `pitch_rate` – угловая скорость по тангажу *(рад/с)*; * `roll_rate` – угловая скорость по крену *(рад/с)*; +* `pitch_rate` – угловая скорость по тангажу *(рад/с)*; * `yaw_rate` – угловая скорость по рысканью *(рад/с)*; * `voltage` – общее напряжение аккумулятора *(В)*; * `cell_voltage` – напряжение аккумулятора на ячейку *(В)*. @@ -100,7 +105,6 @@ rosservice call /get_telemetry "{frame_id: ''}" * `x`, `y`, `z` – координаты *(м)*; * `yaw` – угол по рысканью *(радианы)*; -* `yaw_rate` – угловая скорость по рысканью (применяется при установке yaw в `NaN`) *(рад/с)*; * `speed` – скорость полета (скорость движения setpoint) *(м/с)*; * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); * `frame_id` – [система координат](frames.md), в которой заданы `x`, `y`, `z` и `yaw` (по умолчанию: `map`). @@ -119,7 +123,7 @@ navigate(x=0, y=0, z=1.5, speed=0.5, frame_id='body', auto_arm=True) navigate(x=5, y=0, z=3, speed=0.8) ``` -Полет в точку 5:0 без изменения угла по рысканью (`yaw` = `NaN`, `yaw_rate` = 0): +Полет в точку 5:0 без изменения угла по рысканью: ```python navigate(x=5, y=0, z=3, speed=0.8, yaw=float('nan')) @@ -149,22 +153,10 @@ navigate(yaw=math.radians(-90), frame_id='body') navigate(x=3, y=2, z=2, speed=1, frame_id='aruco_map') ``` -Вращение на месте со скоростью 0.5 рад/c (против часовой): - -```python -navigate(x=0, y=0, z=0, yaw=float('nan'), yaw_rate=0.5, frame_id='body') -``` - -Полет вперед 3 метра со скоростью 0.5 м/с, вращаясь по рысканью со скоростью 0.2 рад/с: - -```python -navigate(x=3, y=0, z=0, speed=0.5, yaw=float('nan'), yaw_rate=0.2, frame_id='body') -``` - Взлет на высоту 2 м (командная строка): ```bash -rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, yaw_rate: 0.0, speed: 0.5, frame_id: 'body', auto_arm: true}" +rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, speed: 0.5, frame_id: 'body', auto_arm: true}" ``` > **Note** При программировании миссии дрона в терминах "вперед-назад-влево-вправо" рекомендуется использовать систему координат `navigate_target` вместо `body`, чтобы не учитывать неточность прилета дрона в предыдущую целевую точку при вычислении следующей. @@ -178,12 +170,11 @@ rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, yaw_rate: 0.0, speed * `lat`, `lon` – широта и долгота *(градусы)*; * `z` – высота *(м)*; * `yaw` – угол по рысканью *(радианы)*; -* `yaw_rate` – угловая скорость по рысканью (при установке yaw в `NaN`) *(рад/с)*; * `speed` – скорость полета (скорость движения setpoint) *(м/с)*; * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); * `frame_id` – [система координат](frames.md), в которой заданы `z` и `yaw` (по умолчанию: `map`). -> **Note** Для полета без изменения угла по рысканью достаточно установить `yaw` в `NaN` (значение угловой скорости по умолчанию – 0). +> **Note** Для полета без изменения угла по рысканью достаточно установить `yaw` в `NaN`. Полет в глобальную точку со скоростью 5 м/с, оставаясь на текущей высоте (`yaw` установится в 0, коптер сориентируется передом на восток): @@ -191,7 +182,7 @@ rosservice call /navigate "{x: 0.0, y: 0.0, z: 2, yaw: 0.0, yaw_rate: 0.0, speed navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, frame_id='body') ``` -Полет в глобальную точку без изменения угла по рысканью (`yaw` = `NaN`, `yaw_rate` = 0): +Полет в глобальную точку без изменения угла по рысканью: ```python navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, yaw=float('nan'), frame_id='body') @@ -200,7 +191,71 @@ navigate_global(lat=55.707033, lon=37.725010, z=0, speed=5, yaw=float('nan'), fr Полет в глобальную точку (командная строка): ```bash -rosservice call /navigate_global "{lat: 55.707033, lon: 37.725010, z: 0.0, yaw: 0.0, yaw_rate: 0.0, speed: 5.0, frame_id: 'body', auto_arm: false}" +rosservice call /navigate_global "{lat: 55.707033, lon: 37.725010, z: 0.0, yaw: 0.0, speed: 5.0, frame_id: 'body', auto_arm: false}" +``` + +### set_altitude + +Изменить целевую высоту полета. Сервис используется для независимой установки высоты (и системы координат для расчета высота) в режимах полета [`navigate`](#navigate) и [`set_position`](#setposition). + +Параметры: + +* `z` – высота полета *(м)*; +* `frame_id` – [система координат](frames.md) для расчета высоты полета. + +Установить высоту полета в 2 м относительно пола: + +```python +set_altitude(z=2, frame_id='terrain') +``` + +Установить высоту полета в 1 м относительно [маркерного поля](aruco.md): + +```python +set_altitude(z=1, frame_id='aruco_map') +``` + +### set_yaw + +Изменить целевой угол по рысканью (и систему координат для его расчета), оставив предыдущую команду в силе. + +Параметры: + +* yaw – угол по рысканью *(радианы)*; +* frame_id – [система координат](frames.md) для расчета угла по рысканью. + +Повернуться на 90 градусов по часовой (продолжая выполнять предыдущую команду): + +```python +set_yaw(yaw=math.radians(-90), frame_id='body') +``` + +Установить угол по рысканью в ноль в системе координат [маркерного поля](aruco.md): + +```python +set_yaw(yaw=0, frame_id='aruco_map') +``` + +Остановить вращение по рысканью (при использовании [`set_yaw_rate`](#setyawrate)): + +```python +set_yaw(yaw=float('nan')) +``` + +### set_yaw_rate + +Изменить целевую угловую скорость по рысканью, оставив предыдущую команду в силе. + +Параметры: + +* yaw_rate – угловая скорость по рысканью *(рад/с)*. + +Положительное направление вращения (при виде сверху) – против часовой. + +Начать вращение на месте со скоростью 0.5 рад/c против часовой (продолжая выполнять предыдущую команду): + +```python +set_yaw_rate(yaw_rate=0.5) ``` ### set_position @@ -213,7 +268,6 @@ rosservice call /navigate_global "{lat: 55.707033, lon: 37.725010, z: 0.0, yaw: * `x`, `y`, `z` – координаты точки *(м)*; * `yaw` – угол по рысканью *(радианы)*; -* `yaw_rate` – угловая скорость по рысканью (при установке yaw в NaN) *(рад/с)*; * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); * `frame_id` – [система координат](frames.md), в которой заданы `x`, `y`, `z` и `yaw` (по умолчанию: `map`). @@ -235,19 +289,12 @@ set_position(x=0, y=0, z=3, frame_id='body') set_position(x=1, y=0, z=0, frame_id='body') ``` -Вращение на месте со скоростью 0.5 рад/c: - -```python -set_position(x=0, y=0, z=0, frame_id='body', yaw=float('nan'), yaw_rate=0.5) -``` - ### set_velocity Установить скорости и рысканье. * `vx`, `vy`, `vz` – требуемая скорость полета *(м/с)*; * `yaw` – угол по рысканью *(радианы)*; -* `yaw_rate` – угловая скорость по рысканью (при установке yaw в NaN) *(рад/с)*; * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); * `frame_id` – [система координат](frames.md), в которой заданы `vx`, `vy`, `vz` и `yaw` (по умолчанию: `map`). @@ -265,7 +312,7 @@ set_velocity(vx=1, vy=0.0, vz=0, frame_id='body') Параметры: -* `pitch`, `roll`, `yaw` – необходимый угол по тангажу, крену и рысканью *(радианы)*; +* `roll`, `pitch`, `yaw` – необходимый угол по тангажу, крену и рысканью *(радианы)*; * `thrust` – уровень газа от 0 (нет газа, пропеллеры остановлены) до 1 (полный газ); * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); * `frame_id` – [система координат](frames.md), в которой задан `yaw` (по умолчанию: `map`). @@ -276,7 +323,7 @@ set_velocity(vx=1, vy=0.0, vz=0, frame_id='body') Параметры: -* `pitch_rate`, `roll_rate`, `yaw_rate` – угловая скорость по тангажу, крену и рыканью *(рад/с)*; +* `roll_rate`, `pitch_rate`, `yaw_rate` – угловая скорость по тангажу, крену и рыканью *(рад/с)*; * `thrust` – уровень газа от 0 (нет газа, пропеллеры остановлены) до 1 (полный газ). * `auto_arm` – перевести коптер в `OFFBOARD` и заармить автоматически (**коптер взлетит**); @@ -305,6 +352,16 @@ rosservice call /land "{}" > **Caution** В более новых версиях PX4 коптер выйдет из режима LAND в ручной режим, если сильно перемещать стики. +### release + +В случае необходимости приостановки отправки setpoint-сообщений, используйте сервис `simple_offboard/release`: + +```python +release = rospy.ServiceProxy('simple_offboard/release', Trigger) + +release() +``` + ## Дополнительные материалы * [Полеты в поле ArUco-маркеров](aruco.md). diff --git a/docs/ru/simulation_m1.md b/docs/ru/simulation_m1.md index 714a6bdd1..978b071f3 100644 --- a/docs/ru/simulation_m1.md +++ b/docs/ru/simulation_m1.md @@ -9,7 +9,7 @@ 1. Скачайте UTM с официального сайта [mac.getutm.app](https://mac.getutm.app/) и установите его. -2. Скачайте исходный образ установщика Ubuntu 20.04 для архитектуры ARM64 по ссылке: https://cdimage.ubuntu.com/focal/daily-live/current/focal-desktop-arm64.iso. +2. Скачайте исходный образ установщика Ubuntu 20.04 для архитектуры ARM64 по ссылке: https://clovervm.ams3.digitaloceanspaces.com/focal-desktop-arm64.iso. 3. Создайте новую виртуальную машину в UTM, выбирая следующие настройки: * **Тип**: Virtualize. diff --git a/docs/ru/simulation_native.md b/docs/ru/simulation_native.md index a3f738ae1..10dce3794 100644 --- a/docs/ru/simulation_native.md +++ b/docs/ru/simulation_native.md @@ -147,6 +147,8 @@ sudo systemctl enable roscore sudo systemctl start roscore ``` +### Конфигурация веб-инструментов + Установите любой веб-сервер, чтобы раздавать веб-инструменты Клевера (директория `~/.ros/www`), например, Monkey: ```bash @@ -158,3 +160,11 @@ sudo cp ~/catkin_ws/src/clover/builder/assets/monkey.service /etc/systemd/system sudo systemctl enable monkey sudo systemctl start monkey ``` + +Создайте директорию `~/.ros/www` следующей командой: + +```bash +rosrun clover www +``` + +При обновлении набора пакетов, содержащих веб-часть (через каталог `www`), также необходимо выполнение данной команды. diff --git a/docs/ru/snippets.md b/docs/ru/snippets.md index 85af2531f..89d15d60f 100644 --- a/docs/ru/snippets.md +++ b/docs/ru/snippets.md @@ -154,7 +154,7 @@ new_pose = tf_buffer.transform(pose, frame_id, transform_timeout) PI_2 = math.pi / 2 telem = get_telemetry() -flipped = abs(telem.pitch) > PI_2 or abs(telem.roll) > PI_2 +flipped = abs(telem.roll) > PI_2 or abs(telem.pitch) > PI_2 ``` ### # {#angle-hor} @@ -165,7 +165,7 @@ flipped = abs(telem.pitch) > PI_2 or abs(telem.roll) > PI_2 PI_2 = math.pi / 2 telem = get_telemetry() -flipped = not -PI_2 <= telem.pitch <= PI_2 or not -PI_2 <= telem.roll <= PI_2 +flipped = not -PI_2 <= telem.roll <= PI_2 or not -PI_2 <= telem.pitch <= PI_2 angle_to_horizon = math.atan(math.hypot(math.tan(telem.pitch), math.tan(telem.roll))) if flipped: angle_to_horizon = math.pi - angle_to_horizon @@ -217,9 +217,9 @@ def pose_update(pose): # Обработка новых данных о позиции коптера pass -rospy.Subscriber('/mavros/local_position/pose', PoseStamped, pose_update) -rospy.Subscriber('/mavros/local_position/velocity', TwistStamped, velocity_update) -rospy.Subscriber('/mavros/battery', BatteryState, battery_update) +rospy.Subscriber('mavros/local_position/pose', PoseStamped, pose_update) +rospy.Subscriber('mavros/local_position/velocity', TwistStamped, velocity_update) +rospy.Subscriber('mavros/battery', BatteryState, battery_update) rospy.Subscriber('mavros/rc/in', RCIn, rc_callback) rospy.spin() @@ -335,7 +335,7 @@ def flip(): while True: telem = get_telemetry() - flipped = abs(telem.pitch) > PI_2 or abs(telem.roll) > PI_2 + flipped = abs(telem.roll) > PI_2 or abs(telem.pitch) > PI_2 if flipped: break @@ -360,7 +360,7 @@ from pymavlink import mavutil from mavros_msgs.srv import CommandLong from mavros_msgs.msg import State -send_command = rospy.ServiceProxy('/mavros/cmd/command', CommandLong) +send_command = rospy.ServiceProxy('mavros/cmd/command', CommandLong) def calibrate_gyro(): rospy.loginfo('Calibrate gyro') @@ -491,3 +491,11 @@ param_set(param_id='COM_FLTMODE1', value=ParamValue(integer=8)) # Изменить параметр типа FLOAT: param_set(param_id='MPC_Z_P', value=ParamValue(real=1.5)) ``` + +### # {#is-simulation} + +Проверить, что код запущен в [симуляции Gazebo](simulation.md): + +```python +is_simulation = rospy.get_param('/use_sim_time', False) +``` diff --git a/docs/ru/testing.md b/docs/ru/testing.md index aff01f867..2a8275e9b 100644 --- a/docs/ru/testing.md +++ b/docs/ru/testing.md @@ -23,6 +23,7 @@ * **Корректная работа optical flow и всех его топиков, полет по optical flow** * **Полет по полю маркеров** * **Корректная установка OpenCV – возможность использования из Python и C++** +* Работа примера с компьютерном зрением: `red_circle.py` * **Отсутствие неожиданного жора памяти и CPU (можно контролировать с помощью `selfcheck.py` или `htop`)** * Автоматическая перекалибровка камеры при изменении разрешения @@ -46,6 +47,13 @@ * **Показывает возникающие ошибки и опечатки, допущенные в .launch файлах** * **Проверка на throttling** +### Автоматизированные тесты + +* **Корректная работы автоматизированных тестов**: + * Тест автономного полета: `rosrun clover autotest_flight.py` + * Тест автономного полета по маркерам: `rosrun clover autotest_aruco.py` + * Тест LED-ленты: `rosrun clover autotest_led.py` + ### Тесты simple_offboard * **Корректная работа simple_offboard – взлет, полет в точку в любом фрейме, отсутствие проблем с `yaw` и `yaw_rate`** @@ -53,6 +61,7 @@ * **В фрейме `aruco_map`** * **В фрейме `map`** * **В фрейме `navigate_target`** +* **В фрейме `terrain`** * Корректное выполнения флипа * **Возможность лететь к отдельным маркерам в карте, которые вне кадра и в кадре** * **Корректное детектирование статуса kill switch при выполнение команды с флагом `auto_arm`** @@ -70,11 +79,6 @@ * Полет по Optical Flow над 1 маркером * `aruco_map` не падает в случае маленьких размеров карты и маркеров -### Тесты [pigpiod](gpio.md) - -* Корректная работа pigpiod, возможность работы с сонаром, сервой и электромагнитом по мануалу -* Одновременная работа pigpiod и rpi_ws281x (правильная работа светодиодной ленты и сервы) - ### Тесты [LED-ленты](leds.md) * **Работает нода LED ленты на RPi 4** @@ -83,6 +87,11 @@ * **Низкоуровневое управление отдельными диодами** * **Высокоуровневое управление эффектами** +### Тесты [pigpiod](gpio.md) + +* Корректная работа pigpiod, возможность работы с сонаром, сервой и электромагнитом по мануалу +* Одновременная работа pigpiod и rpi_ws281x (правильная работа светодиодной ленты и сервы) + ### [Блочное программирование](blocks.md) * Корректная работа функционала блочного программирования @@ -95,6 +104,7 @@ * ROS ноды не падают в случае потери всех соединений (удобно проверять с экраном) * Работает `rosshow` * Работает `espeak` +* Работает аргумент `rectify` в `main_camera.launch` * *Работает LIRC* * *Работа iOS-пульта из коробки* * *Работа Android-пульта из коробки* diff --git a/docs/ru/wall_aruco.md b/docs/ru/wall_aruco.md index c26213946..ca3bd97ac 100644 --- a/docs/ru/wall_aruco.md +++ b/docs/ru/wall_aruco.md @@ -51,10 +51,10 @@ sed -i "/direction_y/s/default=\".*\"/default=\"\"/" /home/pi/catkin_ws/src/clov После того, как вы заполните карту, необходимо применить ее — для этого отредактируйте файл `aruco.launch`, расположенный в `~/catkin_ws/src/clover/clover/launch/`. Измените в нем строку ``, где `map_name.txt` название вашего файла с картой. -При использовании маркеров, не привязанных к горизонтальным плоскостям(пол, потолок), необходимо отключить параметр `known_tilt` как в модуле `aruco_detect`, так и в модуле `aruco_map` в том же файле. Для того, чтобы сделать это автоматически, введите: +При использовании маркеров, не привязанных к горизонтальным плоскостям (пол, потолок), необходимо также сделать пустым значение аргумента `placement` в том же файле: -```bash -sed -i "/known_tilt/s/value=\".*\"/value=\"\"/" /home/pi/catkin_ws/src/clover/clover/launch/aruco.launch +```xml + ``` После всех настроек вызовите `sudo systemctl restart clover` для перезагрузки сервиса `clover`. diff --git a/redirects.json b/redirects.json index 07d1d70c5..a851c75e7 100644 --- a/redirects.json +++ b/redirects.json @@ -35,7 +35,7 @@ { "from": "snippets.html", "to": "ru/snippets.html" }, { "from": "camera_frame.html", "to": "ru/camera_setup.html" }, { "from": "ru/camera_frame.html", "to": "camera_setup.html" }, - { "from": "camera.html", "to": "ru/camera.html" }, + { "from": "camera.html", "to": "en/camera.html" }, { "from": "led.html", "to": "en/leds.html" }, { "from": "leds.html", "to": "ru/leds.html" }, { "from": "rviz.html", "to": "ru/rviz.html" }, @@ -51,7 +51,7 @@ { "from": "firmware/", "to": "en/firmware.html" }, { "from": "simple_offboard/", "to": "en/simple_offboard.html" }, { "from": "offboard/", "to": "en/simple_offboard.html" }, - { "from": "camera/", "to": "ru/camera.html" }, + { "from": "camera/", "to": "en/camera.html" }, { "from": "snippets/", "to": "ru/snippets.html" }, { "from": "optical_flow/", "to": "ru/optical_flow.html" }, { "from": "laser/", "to": "ru/laser.html" }, @@ -65,6 +65,7 @@ { "from": "clover_vm/", "to": "en/simulation_vm.html" }, { "from": "gpio/", "to": "en/gpio.html" }, { "from": "blocks/", "to": "en/blocks.html" }, + { "from": "red_circle/", "to": "en/camera.html" }, { "from": "ru/microsd_images.html", "to": "image.html" }, { "from": "en/microsd_images.html", "to": "image.html" }, diff --git a/roswww_static/CMakeLists.txt b/roswww_static/CMakeLists.txt index bb9e1fc72..f44f98ea2 100644 --- a/roswww_static/CMakeLists.txt +++ b/roswww_static/CMakeLists.txt @@ -5,8 +5,6 @@ find_package(catkin REQUIRED) catkin_package() -install(DIRECTORY launch DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) - -catkin_install_python(PROGRAMS main.py +catkin_install_python(PROGRAMS src/update DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/roswww_static/README.md b/roswww_static/README.md index c2eea67f6..92eb35f48 100644 --- a/roswww_static/README.md +++ b/roswww_static/README.md @@ -6,12 +6,14 @@ Note: you should configure your web server to make it follow symlinks. ## Instructions -* Run `main.py` node and it will generate the symlinks and index file. +* Run `update` script and it will generate the symlinks and index file: `rosrun roswww_static update`. * Point your static web server path to `~/.ros/www`. -You can rerun `main.py` if the list of installed packages changes. +You can rerun `update` if the list of installed packages changes. ## Parameters -* `index` – path for index page, otherwise packages list would be generated. -* `default_package` – if set then the index page would redirect to this package's page. +Parameters are passed through environment variables: + +* `ROSWWW_INDEX` – path for index page, otherwise packages list would be generated. +* `ROSWWW_DEFAULT` – if set then the index page would redirect to this package's page. diff --git a/roswww_static/launch/example.launch b/roswww_static/launch/example.launch deleted file mode 100644 index 3732081f5..000000000 --- a/roswww_static/launch/example.launch +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/roswww_static/package.xml b/roswww_static/package.xml index b98dba6a5..09f001596 100644 --- a/roswww_static/package.xml +++ b/roswww_static/package.xml @@ -1,7 +1,7 @@ roswww_static - 0.23.0 + 0.24.0 Static web pages for ROS packages Oleg Kalachev MIT diff --git a/roswww_static/main.py b/roswww_static/src/update similarity index 82% rename from roswww_static/main.py rename to roswww_static/src/update index 0b1685099..6fbd06654 100755 --- a/roswww_static/main.py +++ b/roswww_static/src/update @@ -13,17 +13,15 @@ import os import shutil -import rospy import rospkg -rospy.init_node('roswww_static') - rospack = rospkg.RosPack() www = rospkg.get_ros_home() + '/www' -index_file = rospy.get_param('~index_file', None) -default_package = rospy.get_param('~default_package', None) +index_file = os.environ.get('ROSWWW_INDEX') +default_package = os.environ.get('ROSWWW_DEFAULT') +print('using www dir: ' + www) shutil.rmtree(www, ignore_errors=True) # reset www directory content os.mkdir(www) @@ -34,7 +32,7 @@ for name in packages: path = rospack.get_path(name) if os.path.exists(path + '/www'): - rospy.loginfo('found www path for %s package', name) + print('found www path for %s package' % name) os.symlink(path + '/www', www + '/' + name) index += '
  • {name}
  • '.format(name=name) @@ -42,7 +40,7 @@ redirect_html = ''.format(name=default_package) open(www + '/index.html', 'w').write(redirect_html) elif index_file is not None: - rospy.loginfo('symlinking index file') + print('symlinking index file') os.symlink(index_file, www + '/index.html') else: open(www + '/index.html', 'w').write(index)