Skip to content

Commit

Permalink
Merge branch 'PR/feature/so3_attitude_control' into develop/delta/devel
Browse files Browse the repository at this point in the history
  • Loading branch information
sugikazu75 committed Oct 14, 2024
2 parents bd30187 + c0d532b commit 3ae4f66
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 44 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ namespace sensor_plugin
double sensor_dt_;

/* imu */
tf::Vector3 wz_b_; /* unit z axis of world frame in baselink frame */
tf::Vector3 g_b_; /* the *opposite* gravity vector in baselink frame */
tf::Vector3 omega_; /* the omega both of body frame */
tf::Vector3 mag_; /* the magnetometer of body frame */
/* acc */
Expand Down
62 changes: 38 additions & 24 deletions aerial_robot_estimation/src/sensor/imu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ namespace sensor_plugin
sensor_plugin::SensorBase(),
calib_count_(200),
acc_b_(0, 0, 0),
wz_b_(0, 0, 0),
g_b_(0, 0, 0),
omega_(0, 0, 0),
mag_(0, 0, 0),
acc_bias_b_(0, 0, 0),
Expand Down Expand Up @@ -113,7 +113,7 @@ namespace sensor_plugin
acc_b_[i] = imu_msg->acc_data[i]; // baselink frame
omega_[i] = imu_msg->gyro_data[i]; // baselink frame
mag_[i] = imu_msg->mag_data[i]; // baselink frame
wz_b_[i] = imu_msg->angles[i]; // workaround to avoid the singularity of RPY Euler angles.
g_b_[i] = imu_msg->angles[i]; // workaround to avoid the singularity of RPY Euler angles.
}

if(first_flag)
Expand Down Expand Up @@ -159,11 +159,11 @@ namespace sensor_plugin
tf::Transform cog2baselink_tf;
tf::transformKDLToTF(robot_model_->getCog2Baselink<KDL::Frame>(), cog2baselink_tf);

// convert to CoG frame, since in the current system, CoG frame is never being sigular due to RPY euler
tf::Vector3 wz_c = cog2baselink_tf.getBasis() * wz_b_;
tf::Vector3 cog_euler;
cog_euler[0] = atan2(wz_c.y() , wz_c.z());
cog_euler[1] = atan2(-wz_c.x() , sqrt(wz_c.y() * wz_c.y() + wz_c.z() * wz_c.z()));
tf::Vector3 wz_b = g_b_.normalize();

tf::Vector3 mag = mag_.normalize();
tf::Vector3 wx_b = mag.cross(wz_b);
wx_b.normalize();

// 1. egomotion estimate mode
// check whether have valid rotation from VO sensor
Expand All @@ -175,36 +175,50 @@ namespace sensor_plugin

if (vo_handler->rotValid())
{
tf::Matrix3x3 cog_rot_by_vo = vo_handler->getRawBaselinkTF().getBasis() * cog2baselink_tf.getBasis().inverse();
// we replace the yaw angle from the spinal with the value from VO.
double r,p,y;
cog_rot_by_vo.getRPY(r, p, y);
cog_euler[2] = y;
tf::Matrix3x3 vo_rot = vo_handler->getRawBaselinkTF().getBasis();
// we replace the wx_b with the value from VO.
wx_b = vo_rot.transpose() * tf::Vector3(1,0,0);
break;
}
}
}

cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]);
estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE));
base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) * cog2baselink_tf.getBasis();
estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE));
tf::Vector3 wy_b = wz_b.cross(wx_b);
wy_b.normalize();
tf::Matrix3x3 rot(wx_b.x(), wx_b.y(), wx_b.z(),
wy_b.x(), wy_b.y(), wy_b.z(),
wz_b.x(), wz_b.y(), wz_b.z());

base_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = rot;
estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, rot);

tf::Matrix3x3 rot_c = rot * cog2baselink_tf.getBasis().transpose();
cog_rot_.at(aerial_robot_estimation::EGOMOTION_ESTIMATE) = rot_c;
estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, rot_c);

estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EGOMOTION_ESTIMATE, cog2baselink_tf.getBasis() * omega_);
estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE, omega_);

// 2. experiment estimate mode
if(estimator_->getStateStatus(State::CoG::Rot, aerial_robot_estimation::GROUND_TRUTH))
{
tf::Matrix3x3 gt_cog_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH) * cog2baselink_tf.getBasis().inverse();
double r,p,y;
gt_cog_rot.getRPY(r, p, y);
cog_euler[2] = y;
tf::Matrix3x3 gt_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH);
// we replace the wx_b with the value from ground truth.
wx_b = gt_rot.transpose() * tf::Vector3(1,0,0);
}
cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE).setRPY(cog_euler[0], cog_euler[1], cog_euler[2]);
estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE));
base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) * cog2baselink_tf.getBasis();
estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE));

wy_b = wz_b.cross(wx_b);
wy_b.normalize();
rot = tf::Matrix3x3(wx_b.x(), wx_b.y(), wx_b.z(),
wy_b.x(), wy_b.y(), wy_b.z(),
wz_b.x(), wz_b.y(), wz_b.z());

base_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = rot;
estimator_->setOrientation(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, rot);

rot_c = rot * cog2baselink_tf.getBasis().transpose();
cog_rot_.at(aerial_robot_estimation::EXPERIMENT_ESTIMATE) = rot_c;
estimator_->setOrientation(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, rot_c);

estimator_->setAngularVel(Frame::COG, aerial_robot_estimation::EXPERIMENT_ESTIMATE, cog2baselink_tf.getBasis() * omega_);
estimator_->setAngularVel(Frame::BASELINK, aerial_robot_estimation::EXPERIMENT_ESTIMATE, omega_);
Expand Down
31 changes: 13 additions & 18 deletions aerial_robot_estimation/src/sensor/vo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -237,37 +237,32 @@ namespace sensor_plugin
}
}

/** step1: ^{w}H_{b'}, b': level frame of b **/
tf::Transform w_bdash_f;
/** step1: ^{w}H_{b} **/
tf::Transform w_b_f;
tf::Matrix3x3 base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE);
double r,p,y;
base_rot.getRPY(r, p, y); // we assume the orientation of baselink at the initial phase should not be entire vertical
w_bdash_f.setRotation(tf::createQuaternionFromYaw(y));
w_b_f.setBasis(base_rot);

tf::Vector3 baselink_pos = estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::EGOMOTION_ESTIMATE);
if(estimator_->getStateStatus(State::X_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
w_bdash_f.getOrigin().setX(baselink_pos.x());
w_b_f.getOrigin().setX(baselink_pos.x());
if(estimator_->getStateStatus(State::Y_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
w_bdash_f.getOrigin().setY(baselink_pos.y());
w_b_f.getOrigin().setY(baselink_pos.y());
if(estimator_->getStateStatus(State::Z_BASE, aerial_robot_estimation::EGOMOTION_ESTIMATE))
w_bdash_f.getOrigin().setZ(baselink_pos.z());
w_b_f.getOrigin().setZ(baselink_pos.z());

/* set the offset if we know the ground truth */
if(estimator_->getStateStatus(State::Base::Rot, aerial_robot_estimation::GROUND_TRUTH))
{
w_bdash_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH));
w_b_f.setOrigin(estimator_->getPos(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH));
base_rot = estimator_->getOrientation(Frame::BASELINK, aerial_robot_estimation::GROUND_TRUTH);
base_rot.getRPY(r, p, y);
w_bdash_f.setRotation(tf::createQuaternionFromYaw(y));
w_b_f.setBasis(base_rot);
}

/** step2: ^{vo}H_{b'} **/
tf::Transform vo_bdash_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b}
vo_bdash_f.getBasis().getRPY(r,p,y);
vo_bdash_f.setRotation(tf::createQuaternionFromYaw(y)); // ^{vo}H_{b'}
/** step2: ^{vo}H_{b} **/
tf::Transform vo_b_f = raw_sensor_tf * sensor_tf_.inverse(); // ^{vo}H_{b}

/** step3: ^{w}H_{vo} = ^{w}H_{b'} * ^{b'}H_{vo} **/
world_offset_tf_ = w_bdash_f * vo_bdash_f.inverse();
/** step3: ^{w}H_{vo} = ^{w}H_{b} * ^{b}H_{vo} **/
world_offset_tf_ = w_b_f * vo_b_f.inverse();

/* publish the offset tf if necessary */
geometry_msgs::TransformStamped static_transformStamped;
Expand All @@ -277,7 +272,7 @@ namespace sensor_plugin
tf::transformTFToMsg(world_offset_tf_, static_transformStamped.transform);
static_broadcaster_.sendTransform(static_transformStamped);

tf::Vector3 init_pos = w_bdash_f.getOrigin();
tf::Vector3 init_pos = w_b_f.getOrigin();


for(auto& fuser : estimator_->getFuser(aerial_robot_estimation::EGOMOTION_ESTIMATE))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ class AttitudeEstimate
#endif
for(int i = 0; i < 3 ; i ++)
{
imu_msg_.mag_data[i] = estimator_->getMag(Frame::BODY)[i];
imu_msg_.mag_data[i] = estimator_->getEstM(Frame::BODY)[i];
imu_msg_.acc_data[i] = estimator_->getAcc(Frame::BODY)[i];
imu_msg_.gyro_data[i] = estimator_->getAngular(Frame::BODY)[i];
#if ESTIMATE_TYPE == COMPLEMENTARY
Expand Down

0 comments on commit 3ae4f66

Please sign in to comment.