Update slam_tracker.hpp to 4.0.0 (feature info pose extension)

This commit is contained in:
Mateo de Mayo 2022-07-13 20:16:00 +00:00
parent e36bead967
commit 41b04f6292
7 changed files with 189 additions and 58 deletions

View File

@ -383,8 +383,8 @@ target_sources(basalt
) )
target_link_libraries(basalt target_link_libraries(basalt
PUBLIC ${STD_CXX_FS} basalt::opencv basalt::basalt-headers TBB::tbb pangolin basalt::cli11 PUBLIC ${STD_CXX_FS} basalt::opencv basalt::basalt-headers TBB::tbb pangolin basalt::cli11 basalt::monado
PRIVATE basalt::magic_enum rosbag apriltag opengv nlohmann::json fmt::fmt basalt::monado) PRIVATE basalt::magic_enum rosbag apriltag opengv nlohmann::json fmt::fmt)
target_include_directories(basalt PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) target_include_directories(basalt PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include)
target_compile_definitions(basalt PUBLIC ${BASALT_COMPILE_DEFINITIONS}) target_compile_definitions(basalt PUBLIC ${BASALT_COMPILE_DEFINITIONS})
#target_compile_definitions(basalt PUBLIC BASALT_DISABLE_ASSERTS) #target_compile_definitions(basalt PUBLIC BASALT_DISABLE_ASSERTS)

View File

@ -119,7 +119,9 @@ Run an OpenXR app like `hello_xr` with the following environment variables set
export EUROC_PATH=/path/to/euroc/V1_01_easy/ # Set euroc dataset path. You can get a dataset from http://robotics.ethz.ch/~asl-datasets/ijrr_euroc_mav_dataset/vicon_room1/V1_01_easy/V1_01_easy.zip export EUROC_PATH=/path/to/euroc/V1_01_easy/ # Set euroc dataset path. You can get a dataset from http://robotics.ethz.ch/~asl-datasets/ijrr_euroc_mav_dataset/vicon_room1/V1_01_easy/V1_01_easy.zip
export EUROC_LOG=debug export EUROC_LOG=debug
export EUROC_HMD=false # if false, a fake controller will be tracked, else a fake HMD export EUROC_HMD=false # if false, a fake controller will be tracked, else a fake HMD
export EUROC_PLAY_FROM_START=true
export SLAM_LOG=debug export SLAM_LOG=debug
export SLAM_SUBMIT_FROM_START=true
export SLAM_CONFIG=$bsltdeps/basalt/data/monado/euroc.toml # Point to Basalt config file for Euroc export SLAM_CONFIG=$bsltdeps/basalt/data/monado/euroc.toml # Point to Basalt config file for Euroc
export OXR_DEBUG_GUI=1 # We will need the debug ui to start streaming the dataset export OXR_DEBUG_GUI=1 # We will need the debug ui to start streaming the dataset
``` ```

View File

@ -45,12 +45,14 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <basalt/calibration/calibration.hpp> #include <basalt/calibration/calibration.hpp>
#include <basalt/camera/stereographic_param.hpp> #include <basalt/camera/stereographic_param.hpp>
#include <basalt/utils/sophus_utils.hpp> #include <basalt/utils/sophus_utils.hpp>
#include <slam_tracker.hpp>
#include <tbb/concurrent_queue.h> #include <tbb/concurrent_queue.h>
namespace basalt { namespace basalt {
using KeypointId = size_t; using KeypointId = size_t;
using xrt::auxiliary::tracking::slam::timestats;
struct OpticalFlowInput { struct OpticalFlowInput {
using Ptr = std::shared_ptr<OpticalFlowInput>; using Ptr = std::shared_ptr<OpticalFlowInput>;
@ -58,20 +60,9 @@ struct OpticalFlowInput {
int64_t t_ns; int64_t t_ns;
std::vector<ImageData> img_data; std::vector<ImageData> img_data;
// Keep track of internal timestamps for this input timestats stats; //!< Keeps track of internal metrics for this t_ns
bool timing_enabled = false; void addTime(const char* name, int64_t custom_ts = INT64_MIN) {
std::vector<int64_t> tss{}; stats.addTime(name, custom_ts);
void addTime(const char* /* name */, int64_t custom_ts = INT64_MIN) {
if (!timing_enabled) {
return;
}
if (custom_ts != INT64_MIN) {
tss.push_back(custom_ts);
} else {
auto ts = std::chrono::steady_clock::now().time_since_epoch().count();
tss.push_back(ts);
}
} }
}; };

View File

@ -63,6 +63,23 @@ string pose2str(const pose &p) {
return str; return str;
} }
static const vector<string> timing_titles{
"frame_ts",
"tracker_received",
"tracker_pushed",
"frames_received", // < Basalt computation starts
"opticalflow_produced",
"vio_start",
"imu_preintegrated",
"landmarks_updated",
"optimized",
"marginalized",
"pose_produced", // Basalt computation ends
"tracker_consumer_received",
"tracker_consumer_pushed",
"monado_dequeued",
};
struct slam_tracker::implementation { struct slam_tracker::implementation {
EIGEN_MAKE_ALIGNED_OPERATOR_NEW EIGEN_MAKE_ALIGNED_OPERATOR_NEW
@ -110,14 +127,20 @@ struct slam_tracker::implementation {
MargDataSaver::Ptr marg_data_saver; MargDataSaver::Ptr marg_data_saver;
// slam_tracker features // slam_tracker features
unordered_set<int> supported_features{F_ADD_CAMERA_CALIBRATION, F_ADD_IMU_CALIBRATION, F_ENABLE_POSE_EXT_TIMING}; unordered_set<int> supported_features{
F_ADD_CAMERA_CALIBRATION,
F_ADD_IMU_CALIBRATION,
F_ENABLE_POSE_EXT_TIMING,
F_ENABLE_POSE_EXT_FEATURES,
};
// Additional calibration data // Additional calibration data
vector<cam_calibration> added_cam_calibs{}; vector<cam_calibration> added_cam_calibs{};
vector<imu_calibration> added_imu_calibs{}; vector<imu_calibration> added_imu_calibs{};
// Pose timing // Enabled pose extensions
bool pose_timing_enabled = false; bool pose_timing_enabled = false;
bool pose_features_enabled = false;
public: public:
implementation(const string &unified_config) { implementation(const string &unified_config) {
@ -186,10 +209,20 @@ struct slam_tracker::implementation {
p.timestamp = state->t_ns; p.timestamp = state->t_ns;
p.next = nullptr; p.next = nullptr;
if (pose_timing_enabled) { auto *next = &p.next;
auto pose_timing = make_shared<pose_ext_timing>();
pose_timing->timestamps = state->input_images->tss; if (state->input_images->stats.timing_enabled) {
p.next = pose_timing; pose_ext_timing_data petd = state->input_images->stats;
auto pose_timing = make_shared<pose_ext_timing>(petd);
*next = pose_timing;
next = &pose_timing->next;
}
if (state->input_images->stats.features_enabled) {
pose_ext_features_data pefd = state->input_images->stats;
auto pose_features = make_shared<pose_ext_features>(pefd);
*next = pose_features;
next = &pose_features->next;
} }
return p; return p;
@ -241,7 +274,7 @@ struct slam_tracker::implementation {
apply_cam_calibration(c); apply_cam_calibration(c);
} }
bool calib_from_monado = added_cam_calibs.size() == 2; bool calib_from_monado = added_cam_calibs.size() == NUM_CAMS;
bool view_offset_unknown = calib.view_offset(0) == 0 && calib.view_offset(1) == 0; bool view_offset_unknown = calib.view_offset(0) == 0 && calib.view_offset(1) == 0;
if (calib_from_monado || view_offset_unknown) { if (calib_from_monado || view_offset_unknown) {
compute_view_offset(); compute_view_offset();
@ -331,11 +364,24 @@ struct slam_tracker::implementation {
int i = -1; int i = -1;
if (s.is_left) { if (s.is_left) {
partial_frame = make_shared<OpticalFlowInput>(); partial_frame = make_shared<OpticalFlowInput>();
partial_frame->timing_enabled = pose_timing_enabled;
partial_frame->addTime("frame_ts", s.timestamp);
partial_frame->addTime("tracker_received");
partial_frame->img_data.resize(NUM_CAMS); partial_frame->img_data.resize(NUM_CAMS);
partial_frame->t_ns = s.timestamp; partial_frame->t_ns = s.timestamp;
// Initialize stats
partial_frame->stats.ts = s.timestamp;
if (pose_timing_enabled) {
partial_frame->stats.timing_enabled = true;
partial_frame->stats.timing.reserve(timing_titles.size());
partial_frame->stats.timing_titles = &timing_titles;
partial_frame->addTime("frame_ts", s.timestamp);
partial_frame->addTime("tracker_received");
}
if (pose_features_enabled) {
partial_frame->stats.features_enabled = true;
partial_frame->stats.features_per_cam.resize(NUM_CAMS);
}
i = 0; i = 0;
} else { } else {
ASSERT(partial_frame->t_ns == s.timestamp, "Left and right frame timestamps differ: %ld != %ld", ASSERT(partial_frame->t_ns == s.timestamp, "Left and right frame timestamps differ: %ld != %ld",
@ -384,7 +430,11 @@ struct slam_tracker::implementation {
shared_ptr<FPARAMS_AIC> casted_params = static_pointer_cast<FPARAMS_AIC>(params); shared_ptr<FPARAMS_AIC> casted_params = static_pointer_cast<FPARAMS_AIC>(params);
add_imu_calibration(*casted_params); add_imu_calibration(*casted_params);
} else if (feature_id == FID_EPET) { } else if (feature_id == FID_EPET) {
result = enable_pose_ext_timing(); shared_ptr<FPARAMS_EPET> casted_params = static_pointer_cast<FPARAMS_EPET>(params);
result = enable_pose_ext_timing(*casted_params);
} else if (feature_id == FID_EPEF) {
shared_ptr<FPARAMS_EPEF> casted_params = static_pointer_cast<FPARAMS_EPEF>(params);
enable_pose_ext_features(*casted_params);
} else { } else {
return false; return false;
} }
@ -510,24 +560,12 @@ struct slam_tracker::implementation {
calib_data_ready.imu = true; calib_data_ready.imu = true;
} }
shared_ptr<vector<string>> enable_pose_ext_timing() { shared_ptr<vector<string>> enable_pose_ext_timing(bool enable) {
pose_timing_enabled = true; pose_timing_enabled = enable;
vector<string> titles{"frame_ts", return make_shared<vector<string>>(timing_titles);
"tracker_received",
"tracker_pushed",
"frames_received", // < Basalt computation starts
"opticalflow_produced",
"vio_start",
"imu_preintegrated",
"landmarks_updated",
"optimized",
"marginalized",
"pose_produced", // Basalt computation ends
"tracker_consumer_received",
"tracker_consumer_pushed",
"monado_dequeued"};
return make_shared<vector<string>>(titles);
} }
void enable_pose_ext_features(bool enable) { pose_features_enabled = enable; }
}; };
slam_tracker::slam_tracker(const string &config_file) { impl = make_unique<slam_tracker::implementation>(config_file); } slam_tracker::slam_tracker(const string &config_file) { impl = make_unique<slam_tracker::implementation>(config_file); }

View File

@ -53,6 +53,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <tbb/parallel_reduce.h> #include <tbb/parallel_reduce.h>
#include <chrono> #include <chrono>
#include <slam_tracker.hpp>
namespace basalt { namespace basalt {
@ -486,6 +487,14 @@ bool SqrtKeypointVioEstimator<Scalar_>::measure(
optimize_and_marg(opt_flow_meas->input_images, num_points_connected, optimize_and_marg(opt_flow_meas->input_images, num_points_connected,
lost_landmaks); lost_landmaks);
const size_t num_cams = opt_flow_meas->observations.size();
const bool features_ext = opt_flow_meas->input_images->stats.features_enabled;
std::vector<Eigen::aligned_vector<Eigen::Vector4d>> projections{};
if (features_ext || out_vis_queue) {
projections.resize(num_cams);
computeProjections(projections, last_state_t_ns);
}
if (out_state_queue) { if (out_state_queue) {
PoseVelBiasStateWithLin p = frame_states.at(last_state_t_ns); PoseVelBiasStateWithLin p = frame_states.at(last_state_t_ns);
@ -494,6 +503,21 @@ bool SqrtKeypointVioEstimator<Scalar_>::measure(
data->input_images = opt_flow_meas->input_images; data->input_images = opt_flow_meas->input_images;
data->input_images->addTime("pose_produced"); data->input_images->addTime("pose_produced");
if (features_ext) {
for (size_t i = 0; i < num_cams; i++) {
for (const Eigen::Vector4d& v : projections[i]) {
using Feature =
xrt::auxiliary::tracking::slam::pose_ext_features_data::feature;
Feature lm{};
lm.id = v.w();
lm.u = v.x();
lm.v = v.y();
lm.depth = v.z();
data->input_images->stats.addFeature(i, lm);
}
}
}
out_state_queue->push(data); out_state_queue->push(data);
} }
@ -513,8 +537,7 @@ bool SqrtKeypointVioEstimator<Scalar_>::measure(
get_current_points(data->points, data->point_ids); get_current_points(data->points, data->point_ids);
data->projections.resize(opt_flow_meas->observations.size()); data->projections = projections;
computeProjections(data->projections, last_state_t_ns);
data->opt_flow_res = prev_opt_flow_res[last_state_t_ns]; data->opt_flow_res = prev_opt_flow_res[last_state_t_ns];

View File

@ -143,7 +143,7 @@ size_t last_frame_processed = 0;
tbb::concurrent_unordered_map<int64_t, int, std::hash<int64_t>> timestamp_to_id; tbb::concurrent_unordered_map<int64_t, int, std::hash<int64_t>> timestamp_to_id;
std::mutex m; std::mutex m;
std::condition_variable cv; std::condition_variable cvar;
bool step_by_step = false; bool step_by_step = false;
size_t max_frames = 0; size_t max_frames = 0;
@ -169,7 +169,7 @@ void feed_images() {
if (step_by_step) { if (step_by_step) {
std::unique_lock<std::mutex> lk(m); std::unique_lock<std::mutex> lk(m);
cv.wait(lk); cvar.wait(lk);
} }
basalt::OpticalFlowInput::Ptr data(new basalt::OpticalFlowInput); basalt::OpticalFlowInput::Ptr data(new basalt::OpticalFlowInput);
@ -829,7 +829,7 @@ bool next_step() {
if (show_frame < int(vio_dataset->get_image_timestamps().size()) - 1) { if (show_frame < int(vio_dataset->get_image_timestamps().size()) - 1) {
show_frame = show_frame + 1; show_frame = show_frame + 1;
show_frame.Meta().gui_changed = true; show_frame.Meta().gui_changed = true;
cv.notify_one(); cvar.notify_one();
return true; return true;
} else { } else {
return false; return false;

View File

@ -19,15 +19,17 @@
#include <opencv2/core/mat.hpp> #include <opencv2/core/mat.hpp>
#include <cstdint> #include <cstdint>
#include <iostream>
#include <memory> #include <memory>
#include <string> #include <string>
#include <vector> #include <vector>
#include <chrono>
namespace xrt::auxiliary::tracking::slam { namespace xrt::auxiliary::tracking::slam {
// For implementation: same as IMPLEMENTATION_VERSION_* // For implementation: same as IMPLEMENTATION_VERSION_*
// For user: expected IMPLEMENTATION_VERSION_*. Should be checked in runtime. // For user: expected IMPLEMENTATION_VERSION_*. Should be checked in runtime.
constexpr int HEADER_VERSION_MAJOR = 3; //!< API Breakages constexpr int HEADER_VERSION_MAJOR = 4; //!< API Breakages
constexpr int HEADER_VERSION_MINOR = 0; //!< Backwards compatible API changes constexpr int HEADER_VERSION_MINOR = 0; //!< Backwards compatible API changes
constexpr int HEADER_VERSION_PATCH = 0; //!< Backw. comp. .h-implemented changes constexpr int HEADER_VERSION_PATCH = 0; //!< Backw. comp. .h-implemented changes
@ -199,7 +201,7 @@ struct cam_calibration {
double fx, fy; //<! Focal point double fx, fy; //<! Focal point
double cx, cy; //<! Principal point double cx, cy; //<! Principal point
std::string distortion_model; //!< Models like: none, rt4, rt5, rt8, kb4 std::string distortion_model; //!< Models like: none, rt4, rt5, rt8, kb4
std::vector<double> distortion; //!< Parameters for the distortion_model std::vector<double> distortion{}; //!< Parameters for the distortion_model
cv::Matx<double, 4, 4> t_imu_cam; //!< Transformation from IMU to camera cv::Matx<double, 4, 4> t_imu_cam; //!< Transformation from IMU to camera
}; };
@ -254,10 +256,17 @@ DEFINE_FEATURE(ADD_IMU_CALIBRATION, AIC, 2, imu_calibration, void)
/*! /*!
* Feature ENABLE_POSE_EXT_TIMING * Feature ENABLE_POSE_EXT_TIMING
* *
* Use it after constructor but before `start()`. * Enable/disable adding internal timestamps to the estimated poses.
* Returns a vector with names for the timestamps in `pose_ext_timing`. * Returns a vector with names for the timestamps in `pose_ext_timing`.
*/ */
DEFINE_FEATURE(ENABLE_POSE_EXT_TIMING, EPET, 3, void, std::vector<std::string>) DEFINE_FEATURE(ENABLE_POSE_EXT_TIMING, EPET, 3, bool, std::vector<std::string>)
/*!
* Feature ENABLE_POSE_EXT_FEATURES
*
* Enable/disable adding feature information to the estimated poses.
*/
DEFINE_FEATURE(ENABLE_POSE_EXT_FEATURES, EPEF, 4, bool, void)
/* /*
* Pose extensions * Pose extensions
@ -272,6 +281,7 @@ DEFINE_FEATURE(ENABLE_POSE_EXT_TIMING, EPET, 3, void, std::vector<std::string>)
enum class pose_ext_type : int { enum class pose_ext_type : int {
UNDEFINED = 0, UNDEFINED = 0,
TIMING = 1, TIMING = 1,
FEATURES = 2,
}; };
struct pose_extension { struct pose_extension {
@ -290,12 +300,79 @@ pose::find_pose_extension(pose_ext_type required_type) const {
return pe; return pe;
} }
struct pose_ext_timing : pose_extension { // Timing pose extension
//! Internal pipeline timestamps of interest when generating the pose. In struct pose_ext_timing_data {
//! steady clock ns. Must have the same number of elements in the same run. //! Internal pipeline stage timestamps of interest when generating the pose.
std::vector<std::int64_t> timestamps{}; //! In steady clock ns. Must have the same number of elements in the same run.
std::vector<std::int64_t> timing{};
//! Names of each timing stage. Should point to static memory.
const std::vector<std::string> *timing_titles = nullptr;
};
struct pose_ext_timing : pose_extension, pose_ext_timing_data {
pose_ext_timing() : pose_extension{pose_ext_type::TIMING} {} pose_ext_timing() : pose_extension{pose_ext_type::TIMING} {}
pose_ext_timing(const pose_ext_timing_data &petd)
: pose_extension{pose_ext_type::TIMING}, pose_ext_timing_data{petd} {}
};
// Features pose extension
struct pose_ext_features_data {
struct feature {
std::int64_t id;
float u;
float v;
float depth;
};
std::vector<std::vector<feature>> features_per_cam{};
};
struct pose_ext_features : pose_extension, pose_ext_features_data {
pose_ext_features() : pose_extension{pose_ext_type::FEATURES} {}
pose_ext_features(const pose_ext_features_data &pefd)
: pose_extension{pose_ext_type::FEATURES}, pose_ext_features_data{pefd} {}
};
/*!
* Utility object to keep track of different stats for a particular timestamp.
* Stats usually correspond with a particular pose extension.
*/
struct timestats : pose_ext_timing_data, pose_ext_features_data {
using ptr = std::shared_ptr<timestats>;
std::int64_t ts = -1;
bool timing_enabled = false;
bool features_enabled = false;
void addTime(const char *name, int64_t ts = INT64_MIN) {
if (!timing_enabled) {
return;
}
if (timing_titles) {
std::string expected = timing_titles->at(timing.size());
if (expected != name) {
std::cout << "Invalid timing stage\n";
std::cout << "expected: " << expected;
std::cout << ", got: " << name << std::endl;
exit(EXIT_FAILURE);
}
}
if (ts == INT64_MIN) {
ts = std::chrono::steady_clock::now().time_since_epoch().count();
}
timing.push_back(ts);
}
void addFeature(size_t cam, const feature &f) {
if (!features_enabled) {
return;
}
if (cam >= features_per_cam.size()) {
features_per_cam.resize(cam + 1);
}
features_per_cam.at(cam).push_back(f);
}
}; };
} // namespace xrt::auxiliary::tracking::slam } // namespace xrt::auxiliary::tracking::slam