Merge branch 'realsense' into 'master'
Added Realsense VIO See merge request basalt/basalt!17
This commit is contained in:
commit
53287c5588
|
@ -247,8 +247,11 @@ target_link_libraries(basalt_vio basalt)
|
|||
|
||||
find_package(realsense2 QUIET)
|
||||
if(realsense2_FOUND)
|
||||
add_executable(basalt_rs_t265_record src/rs_t265_record.cpp)
|
||||
add_executable(basalt_rs_t265_record src/rs_t265_record.cpp src/device/rs_t265.cpp)
|
||||
target_link_libraries(basalt_rs_t265_record basalt realsense2::realsense2 ${OpenCV_LIBS})
|
||||
|
||||
add_executable(basalt_rs_t265_vio src/rs_t265_vio.cpp src/device/rs_t265.cpp)
|
||||
target_link_libraries(basalt_rs_t265_vio basalt realsense2::realsense2)
|
||||
endif()
|
||||
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"config.optical_flow_max_iterations": 5,
|
||||
"config.optical_flow_epipolar_error": 0.005,
|
||||
"config.optical_flow_levels": 3,
|
||||
"config.optical_flow_skip_frames": 1,
|
||||
"config.vio_max_states": 3,
|
||||
"config.vio_max_kfs": 7,
|
||||
"config.vio_min_frames_after_kf": 5,
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"config.optical_flow_max_iterations": 5,
|
||||
"config.optical_flow_epipolar_error": 0.005,
|
||||
"config.optical_flow_levels": 3,
|
||||
"config.optical_flow_skip_frames": 1,
|
||||
"config.vio_max_states": 3,
|
||||
"config.vio_max_kfs": 7,
|
||||
"config.vio_min_frames_after_kf": 5,
|
||||
|
|
|
@ -0,0 +1,118 @@
|
|||
/**
|
||||
BSD 3-Clause License
|
||||
|
||||
This file is part of the Basalt project.
|
||||
https://gitlab.com/VladyslavUsenko/basalt.git
|
||||
|
||||
Copyright (c) 2019, Vladyslav Usenko, Michael Loipführer and Nikolaus Demmel.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <math.h>
|
||||
#include <atomic>
|
||||
#include <cstring>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <mutex>
|
||||
#include <thread>
|
||||
|
||||
#include <librealsense2/rs.hpp>
|
||||
|
||||
#include <pangolin/image/typed_image.h>
|
||||
#include <pangolin/pangolin.h>
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
#include <tbb/concurrent_queue.h>
|
||||
|
||||
#include <basalt/imu/imu_types.h>
|
||||
#include <basalt/optical_flow/optical_flow.h>
|
||||
#include <basalt/calibration/calibration.hpp>
|
||||
#include <experimental/filesystem>
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
namespace basalt {
|
||||
|
||||
struct RsIMUData {
|
||||
double timestamp;
|
||||
Eigen::Vector3d data;
|
||||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
};
|
||||
|
||||
struct RsPoseData {
|
||||
int64_t t_ns;
|
||||
Sophus::SE3d data;
|
||||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
};
|
||||
|
||||
class Device {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<Device>;
|
||||
|
||||
static constexpr int IMU_RATE = 200;
|
||||
static constexpr int NUM_CAMS = 2;
|
||||
|
||||
Device(bool manual_exposure, int skip_frames, int webp_quality,
|
||||
double exposure_value = 10.0);
|
||||
~Device();
|
||||
void start();
|
||||
|
||||
bool setExposure(double exposure); // in milliseconds
|
||||
void setSkipFrames(int skip);
|
||||
void setWebpQuality(int quality);
|
||||
|
||||
std::shared_ptr<basalt::Calibration<double>> exportCalibration();
|
||||
|
||||
OpticalFlowInput::Ptr last_img_data;
|
||||
tbb::concurrent_bounded_queue<OpticalFlowInput::Ptr>* image_data_queue =
|
||||
nullptr;
|
||||
tbb::concurrent_bounded_queue<ImuData::Ptr>* imu_data_queue = nullptr;
|
||||
tbb::concurrent_bounded_queue<RsPoseData>* pose_data_queue = nullptr;
|
||||
|
||||
private:
|
||||
bool manual_exposure;
|
||||
int skip_frames;
|
||||
int webp_quality;
|
||||
|
||||
rs2::config config;
|
||||
rs2::pipeline pipe;
|
||||
rs2::context context;
|
||||
rs2::pipeline_profile profile;
|
||||
|
||||
Eigen::deque<RsIMUData> gyro_data_queue;
|
||||
std::shared_ptr<RsIMUData> prev_accel_data;
|
||||
|
||||
std::shared_ptr<basalt::Calibration<double>> calib;
|
||||
};
|
||||
|
||||
} // namespace basalt
|
|
@ -67,7 +67,7 @@ class FrameToFrameOpticalFlow : public OpticalFlowBase {
|
|||
|
||||
FrameToFrameOpticalFlow(const VioConfig& config,
|
||||
const basalt::Calibration<double>& calib)
|
||||
: t_ns(-1), last_keypoint_id(0), config(config) {
|
||||
: t_ns(-1), frame_counter(0), last_keypoint_id(0), config(config) {
|
||||
input_queue.set_capacity(10);
|
||||
|
||||
this->calib = calib.cast<Scalar>();
|
||||
|
@ -165,9 +165,11 @@ class FrameToFrameOpticalFlow : public OpticalFlowBase {
|
|||
filterPoints();
|
||||
}
|
||||
|
||||
if (output_queue) {
|
||||
if (output_queue && frame_counter % config.optical_flow_skip_frames == 0) {
|
||||
output_queue->push(transforms);
|
||||
}
|
||||
|
||||
frame_counter++;
|
||||
}
|
||||
|
||||
void trackPoints(
|
||||
|
@ -365,6 +367,8 @@ class FrameToFrameOpticalFlow : public OpticalFlowBase {
|
|||
private:
|
||||
int64_t t_ns;
|
||||
|
||||
size_t frame_counter;
|
||||
|
||||
KeypointId last_keypoint_id;
|
||||
|
||||
VioConfig config;
|
||||
|
|
|
@ -65,7 +65,11 @@ class PatchOpticalFlow : public OpticalFlowBase {
|
|||
|
||||
PatchOpticalFlow(const VioConfig& config,
|
||||
const basalt::Calibration<double>& calib)
|
||||
: t_ns(-1), last_keypoint_id(0), config(config), calib(calib) {
|
||||
: t_ns(-1),
|
||||
frame_counter(0),
|
||||
last_keypoint_id(0),
|
||||
config(config),
|
||||
calib(calib) {
|
||||
patches.reserve(3000);
|
||||
input_queue.set_capacity(10);
|
||||
|
||||
|
@ -151,9 +155,10 @@ class PatchOpticalFlow : public OpticalFlowBase {
|
|||
filterPoints();
|
||||
}
|
||||
|
||||
if (output_queue) {
|
||||
if (output_queue && frame_counter % config.optical_flow_skip_frames == 0) {
|
||||
output_queue->push(transforms);
|
||||
}
|
||||
frame_counter++;
|
||||
}
|
||||
|
||||
void trackPoints(
|
||||
|
@ -357,6 +362,8 @@ class PatchOpticalFlow : public OpticalFlowBase {
|
|||
private:
|
||||
int64_t t_ns;
|
||||
|
||||
size_t frame_counter;
|
||||
|
||||
KeypointId last_keypoint_id;
|
||||
|
||||
VioConfig config;
|
||||
|
|
|
@ -50,6 +50,7 @@ struct VioConfig {
|
|||
int optical_flow_max_iterations;
|
||||
int optical_flow_levels;
|
||||
float optical_flow_epipolar_error;
|
||||
int optical_flow_skip_frames;
|
||||
|
||||
int vio_max_states;
|
||||
int vio_max_kfs;
|
||||
|
|
|
@ -65,14 +65,17 @@ class KeypointVioEstimator : public VioEstimatorBase,
|
|||
|
||||
static constexpr double prior_weight = 1e8;
|
||||
|
||||
KeypointVioEstimator(int64_t t_ns, const Sophus::SE3d& T_w_i,
|
||||
const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg, const Eigen::Vector3d& ba,
|
||||
double int_std_dev, const Eigen::Vector3d& g,
|
||||
KeypointVioEstimator(double int_std_dev, const Eigen::Vector3d& g,
|
||||
const basalt::Calibration<double>& calib,
|
||||
const VioConfig& config);
|
||||
|
||||
~KeypointVioEstimator() { processing_thread->join(); }
|
||||
void initialize(int64_t t_ns, const Sophus::SE3d& T_w_i,
|
||||
const Eigen::Vector3d& vel_w_i, const Eigen::Vector3d& bg,
|
||||
const Eigen::Vector3d& ba);
|
||||
|
||||
void initialize(const Eigen::Vector3d& bg, const Eigen::Vector3d& ba);
|
||||
|
||||
virtual ~KeypointVioEstimator() { processing_thread->join(); }
|
||||
|
||||
void addIMUToQueue(const ImuData::Ptr& data);
|
||||
void addVisionToQueue(const OpticalFlowResult::Ptr& data);
|
||||
|
@ -190,6 +193,8 @@ class KeypointVioEstimator : public VioEstimatorBase,
|
|||
return res;
|
||||
}
|
||||
|
||||
const Sophus::SE3d& getT_w_i_init() { return T_w_i_init; }
|
||||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
|
||||
private:
|
||||
|
@ -218,6 +223,9 @@ class KeypointVioEstimator : public VioEstimatorBase,
|
|||
size_t max_states;
|
||||
size_t max_kfs;
|
||||
|
||||
Sophus::SE3d T_w_i_init;
|
||||
|
||||
bool initialized;
|
||||
bool opt_started;
|
||||
|
||||
VioConfig config;
|
||||
|
|
|
@ -84,15 +84,24 @@ class VioEstimatorBase {
|
|||
tbb::concurrent_bounded_queue<MargData::Ptr>* out_marg_queue = nullptr;
|
||||
tbb::concurrent_bounded_queue<VioVisualizationData::Ptr>* out_vis_queue =
|
||||
nullptr;
|
||||
|
||||
virtual void initialize(int64_t t_ns, const Sophus::SE3d& T_w_i,
|
||||
const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg,
|
||||
const Eigen::Vector3d& ba) = 0;
|
||||
|
||||
virtual void initialize(const Eigen::Vector3d& bg,
|
||||
const Eigen::Vector3d& ba) = 0;
|
||||
|
||||
virtual const Sophus::SE3d& getT_w_i_init() = 0;
|
||||
};
|
||||
|
||||
class VioEstimatorFactory {
|
||||
public:
|
||||
static VioEstimatorBase::Ptr getVioEstimator(
|
||||
const VioConfig& config, const Calibration<double>& cam, int64_t t_ns,
|
||||
const Sophus::SE3d& T_w_i, const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg, const Eigen::Vector3d& ba, double int_std_dev,
|
||||
const Eigen::Vector3d& g);
|
||||
static VioEstimatorBase::Ptr getVioEstimator(const VioConfig& config,
|
||||
const Calibration<double>& cam,
|
||||
double int_std_dev,
|
||||
const Eigen::Vector3d& g);
|
||||
};
|
||||
|
||||
double alignSVD(const std::vector<int64_t>& filter_t_ns,
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import math
|
||||
import os
|
||||
import webp
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
dataset_path = sys.argv[1]
|
||||
|
||||
print(dataset_path)
|
||||
|
||||
timestamps = np.loadtxt(dataset_path + '/mav0/cam0/data.csv', usecols=[0], delimiter=',', dtype=np.int64)
|
||||
exposures = np.loadtxt(dataset_path + '/mav0/cam0/exposure.csv', usecols=[1], delimiter=',', dtype=np.int64)
|
||||
pixel_avgs = list()
|
||||
|
||||
|
||||
# check image data.
|
||||
img_extensions = ['.png', '.jpg', '.webp']
|
||||
for timestamp in timestamps:
|
||||
path = dataset_path + '/mav0/cam0/data/' + str(timestamp)
|
||||
img = webp.imread(dataset_path + '/mav0/cam0/data/' + str(timestamp) + '.webp')
|
||||
pixel_avgs.append(np.mean(img))
|
||||
|
||||
plt.plot(exposures, pixel_avgs)
|
||||
plt.ylabel('Img Mean')
|
||||
plt.xlabel('Exposure')
|
||||
plt.show()
|
||||
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import math
|
||||
|
@ -8,41 +8,42 @@ import numpy as np
|
|||
|
||||
dataset_path = sys.argv[1]
|
||||
|
||||
print dataset_path
|
||||
print(dataset_path)
|
||||
|
||||
timestamps = {}
|
||||
exposures = {}
|
||||
|
||||
|
||||
for sensor in ['cam0', 'cam1', 'imu0']:
|
||||
data = np.loadtxt(dataset_path + '/mav0/' + sensor + '/data.csv', usecols=[0], delimiter=',', dtype=np.int64)
|
||||
timestamps[sensor] = data
|
||||
|
||||
# check if dataset is OK...
|
||||
for key, value in timestamps.iteritems():
|
||||
for key, value in timestamps.items():
|
||||
times = value * 1e-9
|
||||
min_t = times.min()
|
||||
max_t = times.max()
|
||||
interval = max_t - min_t
|
||||
diff = times[1:] - times[:-1]
|
||||
print '=========================================='
|
||||
print 'sensor', key
|
||||
print 'min timestamp', min_t
|
||||
print 'max timestamp', max_t
|
||||
print 'interval', interval
|
||||
print 'hz', times.shape[0]/interval
|
||||
print 'min time between consecutive msgs', diff.min()
|
||||
print 'max time between consecutive msgs', diff.max()
|
||||
print('==========================================')
|
||||
print('sensor', key)
|
||||
print('min timestamp', min_t)
|
||||
print('max timestamp', max_t)
|
||||
print('interval', interval)
|
||||
print('hz', times.shape[0] / interval)
|
||||
print('min time between consecutive msgs', diff.min())
|
||||
print('max time between consecutive msgs', diff.max())
|
||||
for i, d in enumerate(diff):
|
||||
# Note: 0.001 is just a hacky heuristic, since we have nothing faster than 1000Hz. Should maybe be topic-specific.
|
||||
if d < 0.001:
|
||||
print("ERROR: Difference on consecutive measurements too small: {} - {} = {}".format(times[i+1], times[i], d))
|
||||
print("ERROR: Difference on consecutive measurements too small: {} - {} = {}".format(times[i + 1], times[i],
|
||||
d) + ' in sensor ' + key)
|
||||
|
||||
# check if we have all images for timestamps
|
||||
timestamp_to_topic = {}
|
||||
|
||||
for key, value in timestamps.iteritems():
|
||||
if not key.startswith('cam'): continue
|
||||
for key, value in timestamps.items():
|
||||
if not key.startswith('cam'):
|
||||
continue
|
||||
for v in value:
|
||||
if v not in timestamp_to_topic:
|
||||
timestamp_to_topic[v] = list()
|
||||
|
@ -50,13 +51,13 @@ for key, value in timestamps.iteritems():
|
|||
|
||||
for key in timestamp_to_topic.keys():
|
||||
if len(timestamp_to_topic[key]) != 2:
|
||||
print 'timestamp', key, 'has topics', timestamp_to_topic[key]
|
||||
|
||||
print('timestamp', key, 'has topics', timestamp_to_topic[key])
|
||||
|
||||
# check image data.
|
||||
img_extensions = ['.png', '.jpg', '.webp']
|
||||
for key, value in timestamps.iteritems():
|
||||
if not key.startswith('cam'): continue
|
||||
for key, value in timestamps.items():
|
||||
if not key.startswith('cam'):
|
||||
continue
|
||||
for v in value:
|
||||
path = dataset_path + '/mav0/' + key + '/data/' + str(v)
|
||||
img_exists = False
|
||||
|
@ -77,6 +78,3 @@ for key, value in timestamps.iteritems():
|
|||
idx = np.searchsorted(exposure_data[:, 0], v)
|
||||
if exposure_data[idx, 0] != v:
|
||||
print('No exposure data for ' + key + ' at timestamp ' + str(v))
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,335 @@
|
|||
/**
|
||||
BSD 3-Clause License
|
||||
|
||||
This file is part of the Basalt project.
|
||||
https://gitlab.com/VladyslavUsenko/basalt.git
|
||||
|
||||
Copyright (c) 2019, Vladyslav Usenko, Michael Loipführer and Nikolaus Demmel.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <basalt/device/rs_t265.h>
|
||||
|
||||
std::string get_date();
|
||||
|
||||
namespace basalt {
|
||||
|
||||
Device::Device(bool manual_exposure, int skip_frames, int webp_quality,
|
||||
double exposure_value)
|
||||
: manual_exposure(manual_exposure),
|
||||
skip_frames(skip_frames),
|
||||
webp_quality(webp_quality) {
|
||||
rs2::log_to_console(RS2_LOG_SEVERITY_ERROR);
|
||||
pipe = rs2::pipeline(context);
|
||||
|
||||
config.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F);
|
||||
config.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F);
|
||||
config.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
|
||||
config.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
|
||||
if (!manual_exposure) {
|
||||
config.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF);
|
||||
}
|
||||
|
||||
if (context.query_devices().size() == 0) {
|
||||
std::cout << "Waiting for device to be connected" << std::endl;
|
||||
rs2::device_hub hub(context);
|
||||
hub.wait_for_device();
|
||||
}
|
||||
|
||||
for (auto& s : context.query_devices()[0].query_sensors()) {
|
||||
std::cout << "Sensor " << s.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< ". Supported options:" << std::endl;
|
||||
|
||||
for (const auto& o : s.get_supported_options()) {
|
||||
std::cout << "\t" << rs2_option_to_string(o) << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
if (manual_exposure) {
|
||||
auto device = context.query_devices()[0];
|
||||
std::cout << "Device " << device.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< " connected" << std::endl;
|
||||
auto sens = device.query_sensors()[0];
|
||||
|
||||
std::cout << "Enabling manual exposure control" << std::endl;
|
||||
sens.set_option(rs2_option::RS2_OPTION_ENABLE_AUTO_EXPOSURE, 0);
|
||||
sens.set_option(rs2_option::RS2_OPTION_EXPOSURE, exposure_value * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
Device::~Device() = default;
|
||||
|
||||
void Device::start() {
|
||||
auto callback = [&](const rs2::frame& frame) {
|
||||
exportCalibration();
|
||||
|
||||
if (auto fp = frame.as<rs2::motion_frame>()) {
|
||||
auto motion = frame.as<rs2::motion_frame>();
|
||||
|
||||
if (motion && motion.get_profile().stream_type() == RS2_STREAM_GYRO &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
gyro_data_queue.emplace_back(d);
|
||||
} else if (motion &&
|
||||
motion.get_profile().stream_type() == RS2_STREAM_ACCEL &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
if (!prev_accel_data.get()) {
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
} else {
|
||||
BASALT_ASSERT(d.timestamp > prev_accel_data->timestamp);
|
||||
|
||||
while (!gyro_data_queue.empty() && gyro_data_queue.front().timestamp <
|
||||
prev_accel_data->timestamp) {
|
||||
std::cout << "Skipping gyro data. Timestamp before the first accel "
|
||||
"measurement.";
|
||||
gyro_data_queue.pop_front();
|
||||
}
|
||||
|
||||
while (!gyro_data_queue.empty() &&
|
||||
gyro_data_queue.front().timestamp < d.timestamp) {
|
||||
RsIMUData gyro_data = gyro_data_queue.front();
|
||||
gyro_data_queue.pop_front();
|
||||
|
||||
double w0 = (d.timestamp - gyro_data.timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
double w1 = (gyro_data.timestamp - prev_accel_data->timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
Eigen::Vector3d accel_interpolated =
|
||||
w0 * prev_accel_data->data + w1 * d.data;
|
||||
|
||||
basalt::ImuData::Ptr data;
|
||||
data.reset(new basalt::ImuData);
|
||||
data->t_ns = gyro_data.timestamp * 1e6;
|
||||
data->accel = accel_interpolated;
|
||||
data->gyro = gyro_data.data;
|
||||
|
||||
const double accel_noise_std =
|
||||
calib->dicreete_time_accel_noise_std();
|
||||
const double gyro_noise_std = calib->dicreete_time_gyro_noise_std();
|
||||
|
||||
data->accel_cov.setConstant(accel_noise_std * accel_noise_std);
|
||||
data->gyro_cov.setConstant(gyro_noise_std * gyro_noise_std);
|
||||
|
||||
if (imu_data_queue) imu_data_queue->push(data);
|
||||
}
|
||||
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
}
|
||||
}
|
||||
} else if (auto fs = frame.as<rs2::frameset>()) {
|
||||
OpticalFlowInput::Ptr data(new OpticalFlowInput);
|
||||
data->img_data.resize(NUM_CAMS);
|
||||
|
||||
for (int i = 0; i < NUM_CAMS; i++) {
|
||||
auto f = fs[i];
|
||||
if (!f.as<rs2::video_frame>()) {
|
||||
std::cout << "Weird Frame, skipping" << std::endl;
|
||||
continue;
|
||||
}
|
||||
auto vf = f.as<rs2::video_frame>();
|
||||
|
||||
data->t_ns = vf.get_timestamp() * 1e6;
|
||||
|
||||
data->img_data[i].exposure =
|
||||
vf.get_frame_metadata(RS2_FRAME_METADATA_ACTUAL_EXPOSURE) * 1e-6;
|
||||
|
||||
data->img_data[i].img.reset(new basalt::ManagedImage<uint16_t>(
|
||||
vf.get_width(), vf.get_height()));
|
||||
|
||||
const uint8_t* data_in = (const uint8_t*)vf.get_data();
|
||||
uint16_t* data_out = data->img_data[i].img->ptr;
|
||||
|
||||
size_t full_size = vf.get_width() * vf.get_height();
|
||||
for (size_t j = 0; j < full_size; j++) {
|
||||
int val = data_in[j];
|
||||
val = val << 8;
|
||||
data_out[j] = val;
|
||||
}
|
||||
}
|
||||
|
||||
last_img_data = data;
|
||||
if (image_data_queue) image_data_queue->push(data);
|
||||
|
||||
} else if (auto pf = frame.as<rs2::pose_frame>()) {
|
||||
auto data = pf.get_pose_data();
|
||||
|
||||
RsPoseData pdata;
|
||||
pdata.t_ns = pf.get_timestamp() * 1e6;
|
||||
|
||||
Eigen::Vector3d trans(data.translation.x, data.translation.y,
|
||||
data.translation.z);
|
||||
Eigen::Quaterniond quat(data.rotation.w, data.rotation.x, data.rotation.y,
|
||||
data.rotation.z);
|
||||
|
||||
pdata.data = Sophus::SE3d(quat, trans);
|
||||
|
||||
if (pose_data_queue) pose_data_queue->push(pdata);
|
||||
}
|
||||
};
|
||||
|
||||
profile = pipe.start(config, callback);
|
||||
}
|
||||
|
||||
bool Device::setExposure(double exposure) {
|
||||
if (!manual_exposure) return false;
|
||||
|
||||
auto device = context.query_devices()[0];
|
||||
auto sens = device.query_sensors()[0];
|
||||
sens.set_option(rs2_option::RS2_OPTION_EXPOSURE, exposure * 1000);
|
||||
return true;
|
||||
}
|
||||
|
||||
void Device::setSkipFrames(int skip) { skip_frames = skip; }
|
||||
|
||||
void Device::setWebpQuality(int quality) { webp_quality = quality; }
|
||||
|
||||
std::shared_ptr<basalt::Calibration<double>> Device::exportCalibration() {
|
||||
using Scalar = double;
|
||||
|
||||
if (calib.get()) return calib;
|
||||
|
||||
calib.reset(new basalt::Calibration<Scalar>);
|
||||
calib->imu_update_rate = IMU_RATE;
|
||||
|
||||
auto accel_stream = profile.get_stream(RS2_STREAM_ACCEL);
|
||||
auto gyro_stream = profile.get_stream(RS2_STREAM_GYRO);
|
||||
auto cam0_stream = profile.get_stream(RS2_STREAM_FISHEYE, 1);
|
||||
auto cam1_stream = profile.get_stream(RS2_STREAM_FISHEYE, 2);
|
||||
|
||||
// get gyro extrinsics
|
||||
if (auto gyro = gyro_stream.as<rs2::motion_stream_profile>()) {
|
||||
rs2_motion_device_intrinsic intrinsics = gyro.get_motion_intrinsics();
|
||||
|
||||
Eigen::Matrix<Scalar, 12, 1> gyro_bias_full;
|
||||
gyro_bias_full << intrinsics.data[0][3], intrinsics.data[1][3],
|
||||
intrinsics.data[2][3], intrinsics.data[0][0], intrinsics.data[1][0],
|
||||
intrinsics.data[2][0], intrinsics.data[0][1], intrinsics.data[1][1],
|
||||
intrinsics.data[2][1], intrinsics.data[0][2], intrinsics.data[1][2],
|
||||
intrinsics.data[2][2];
|
||||
basalt::CalibGyroBias<Scalar> gyro_bias;
|
||||
gyro_bias.getParam() = gyro_bias_full;
|
||||
calib->calib_gyro_bias = gyro_bias;
|
||||
|
||||
// std::cout << "Gyro Bias\n" << gyro_bias_full << std::endl;
|
||||
|
||||
calib->gyro_noise_std = std::sqrt(std::max(
|
||||
std::max(intrinsics.noise_variances[0], intrinsics.noise_variances[1]),
|
||||
intrinsics.noise_variances[2]));
|
||||
|
||||
calib->gyro_bias_std = std::sqrt(std::max(
|
||||
std::max(intrinsics.bias_variances[0], intrinsics.bias_variances[1]),
|
||||
intrinsics.bias_variances[2]));
|
||||
|
||||
// std::cout << "Gyro noise var: " << intrinsics.noise_variances[0]
|
||||
// << " bias var: " << intrinsics.bias_variances[0] << std::endl;
|
||||
} else {
|
||||
std::abort();
|
||||
}
|
||||
|
||||
// get accel extrinsics
|
||||
if (auto accel = accel_stream.as<rs2::motion_stream_profile>()) {
|
||||
rs2_motion_device_intrinsic intrinsics = accel.get_motion_intrinsics();
|
||||
Eigen::Matrix<Scalar, 9, 1> accel_bias_full;
|
||||
accel_bias_full << intrinsics.data[0][3], intrinsics.data[1][3],
|
||||
intrinsics.data[2][3], intrinsics.data[0][0], intrinsics.data[1][0],
|
||||
intrinsics.data[2][0], intrinsics.data[1][1], intrinsics.data[2][1],
|
||||
intrinsics.data[2][2];
|
||||
basalt::CalibAccelBias<Scalar> accel_bias;
|
||||
accel_bias.getParam() = accel_bias_full;
|
||||
calib->calib_accel_bias = accel_bias;
|
||||
|
||||
// std::cout << "Gyro Bias\n" << accel_bias_full << std::endl;
|
||||
|
||||
calib->accel_noise_std = sqrt(std::max(
|
||||
std::max(intrinsics.noise_variances[0], intrinsics.noise_variances[1]),
|
||||
intrinsics.noise_variances[2]));
|
||||
|
||||
calib->accel_bias_std = sqrt(std::max(
|
||||
std::max(intrinsics.bias_variances[0], intrinsics.bias_variances[1]),
|
||||
intrinsics.bias_variances[2]));
|
||||
|
||||
// std::cout << "Accel noise var: " << intrinsics.noise_variances[0]
|
||||
// << " bias var: " << intrinsics.bias_variances[0] << std::endl;
|
||||
} else {
|
||||
std::abort();
|
||||
}
|
||||
|
||||
// get camera ex-/intrinsics
|
||||
for (const auto& cam_stream : {cam0_stream, cam1_stream}) {
|
||||
if (auto cam = cam_stream.as<rs2::video_stream_profile>()) {
|
||||
// extrinsics
|
||||
rs2_extrinsics ex = cam.get_extrinsics_to(gyro_stream);
|
||||
Eigen::Matrix3f rot = Eigen::Map<Eigen::Matrix3f>(ex.rotation);
|
||||
Eigen::Vector3f trans = Eigen::Map<Eigen::Vector3f>(ex.translation);
|
||||
|
||||
Eigen::Quaterniond q(rot.cast<double>());
|
||||
basalt::Calibration<Scalar>::SE3 T_i_c(q, trans.cast<double>());
|
||||
|
||||
// std::cout << "T_i_c\n" << T_i_c.matrix() << std::endl;
|
||||
|
||||
calib->T_i_c.push_back(T_i_c);
|
||||
|
||||
// get resolution
|
||||
Eigen::Vector2i resolution;
|
||||
resolution << cam.width(), cam.height();
|
||||
calib->resolution.push_back(resolution);
|
||||
|
||||
// intrinsics
|
||||
rs2_intrinsics intrinsics = cam.get_intrinsics();
|
||||
basalt::KannalaBrandtCamera4<Scalar>::VecN params;
|
||||
params << intrinsics.fx, intrinsics.fy, intrinsics.ppx, intrinsics.ppy,
|
||||
intrinsics.coeffs[0], intrinsics.coeffs[1], intrinsics.coeffs[2],
|
||||
intrinsics.coeffs[3];
|
||||
|
||||
// std::cout << "Camera intrinsics: " << params.transpose() << std::endl;
|
||||
|
||||
basalt::GenericCamera<Scalar> camera;
|
||||
basalt::KannalaBrandtCamera4 kannala_brandt(params);
|
||||
camera.variant = kannala_brandt;
|
||||
|
||||
calib->intrinsics.push_back(camera);
|
||||
} else {
|
||||
std::abort();
|
||||
}
|
||||
}
|
||||
|
||||
return calib;
|
||||
}
|
||||
|
||||
} // namespace basalt
|
|
@ -708,9 +708,9 @@ void setup_vio() {
|
|||
basalt::VioConfig config;
|
||||
config.vio_debug = true;
|
||||
|
||||
vio.reset(new basalt::KeypointVioEstimator(
|
||||
t_init_ns, T_w_i_init, vel_w_i_init, gt_gyro_bias.front(),
|
||||
gt_accel_bias.front(), 0.0001, g, calib, config));
|
||||
vio.reset(new basalt::KeypointVioEstimator(0.0001, g, calib, config));
|
||||
vio->initialize(t_init_ns, T_w_i_init, vel_w_i_init, gt_gyro_bias.front(),
|
||||
gt_accel_bias.front());
|
||||
|
||||
vio->setMaxStates(10000);
|
||||
vio->setMaxKfs(10000);
|
||||
|
|
|
@ -1,4 +1,40 @@
|
|||
#include <atomic>
|
||||
/**
|
||||
BSD 3-Clause License
|
||||
|
||||
This file is part of the Basalt project.
|
||||
https://gitlab.com/VladyslavUsenko/basalt.git
|
||||
|
||||
Copyright (c) 2019, Vladyslav Usenko, Michael Loipführer and Nikolaus Demmel.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
#include <atomic>
|
||||
#include <cstring>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
|
@ -13,101 +49,200 @@
|
|||
#include <pangolin/image/typed_image.h>
|
||||
#include <pangolin/pangolin.h>
|
||||
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
#include <tbb/concurrent_queue.h>
|
||||
|
||||
#include <basalt/device/rs_t265.h>
|
||||
#include <basalt/serialization/headers_serialization.h>
|
||||
#include <CLI/CLI.hpp>
|
||||
#include <basalt/calibration/calibration.hpp>
|
||||
#include <cereal/archives/json.hpp>
|
||||
#include <experimental/filesystem>
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
constexpr int NUM_CAMS = 2;
|
||||
constexpr int UI_WIDTH = 200;
|
||||
|
||||
pangolin::DataLog imu_log;
|
||||
basalt::Device::Ptr t265_device;
|
||||
|
||||
std::shared_ptr<pangolin::DataLog> imu_log;
|
||||
|
||||
pangolin::Var<int> webp_quality("ui.webp_quality", 90, 0, 101);
|
||||
pangolin::Var<int> skip_frames("ui.skip_frames", 1, 1, 10);
|
||||
pangolin::Var<float> exposure("ui.exposure", 5.0, 1, 20);
|
||||
|
||||
struct ImageData {
|
||||
using Ptr = std::shared_ptr<ImageData>;
|
||||
|
||||
int cam_id;
|
||||
double exposure_time;
|
||||
int64_t timestamp;
|
||||
cv::Mat image;
|
||||
};
|
||||
|
||||
struct RsIMUData {
|
||||
double timestamp;
|
||||
Eigen::Vector3d data;
|
||||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
};
|
||||
|
||||
ImageData::Ptr last_images[NUM_CAMS];
|
||||
tbb::concurrent_bounded_queue<ImageData::Ptr> image_save_queue;
|
||||
float exposure;
|
||||
std::string dataset_dir;
|
||||
std::string dataset_folder;
|
||||
std::string result_dir;
|
||||
tbb::concurrent_bounded_queue<basalt::OpticalFlowInput::Ptr> image_data_queue;
|
||||
tbb::concurrent_bounded_queue<basalt::ImuData::Ptr> imu_data_queue;
|
||||
tbb::concurrent_bounded_queue<basalt::RsPoseData> pose_data_queue;
|
||||
|
||||
std::atomic<bool> stop_workers;
|
||||
std::atomic<bool> record;
|
||||
std::atomic<bool> recording;
|
||||
|
||||
std::ofstream cam_data[NUM_CAMS], exposure_data[NUM_CAMS], imu0_data;
|
||||
std::string dataset_dir;
|
||||
|
||||
std::string get_date();
|
||||
static constexpr int NUM_CAMS = basalt::Device::NUM_CAMS;
|
||||
static constexpr int NUM_WORKERS = 8;
|
||||
|
||||
std::ofstream cam_data[NUM_CAMS], exposure_data[NUM_CAMS], imu0_data, pose_data;
|
||||
|
||||
std::vector<std::thread> worker_threads;
|
||||
std::thread imu_worker_thread, pose_worker_thread;
|
||||
|
||||
// manual exposure mode, if not enabled will also record pose data
|
||||
bool manual_exposure;
|
||||
|
||||
void image_save_worker() {
|
||||
ImageData::Ptr img;
|
||||
basalt::OpticalFlowInput::Ptr img;
|
||||
|
||||
while (!stop_workers) {
|
||||
if (image_save_queue.try_pop(img)) {
|
||||
if (image_data_queue.try_pop(img)) {
|
||||
if (recording)
|
||||
for (size_t cam_id = 0; cam_id < NUM_CAMS; ++cam_id) {
|
||||
#if CV_MAJOR_VERSION >= 3
|
||||
std::string filename = dataset_folder + "mav0/cam" +
|
||||
std::to_string(img->cam_id) + "/data/" +
|
||||
std::to_string(img->timestamp) + ".webp";
|
||||
|
||||
std::vector<int> compression_params = {cv::IMWRITE_WEBP_QUALITY,
|
||||
webp_quality};
|
||||
cv::imwrite(filename, img->image, compression_params);
|
||||
cam_data[cam_id] << img->t_ns << "," << img->t_ns << ".webp"
|
||||
<< std::endl;
|
||||
#else
|
||||
std::string filename = dataset_folder + "mav0/cam" +
|
||||
std::to_string(img->cam_id) + "/data/" +
|
||||
std::to_string(img->timestamp) + ".jpg";
|
||||
|
||||
std::vector<int> compression_params = {cv::IMWRITE_JPEG_QUALITY,
|
||||
webp_quality};
|
||||
cv::imwrite(filename, img->image, compression_params);
|
||||
cam_data[cam_id] << img->t_ns << "," << img->t_ns << ".jpg"
|
||||
<< std::endl;
|
||||
#endif
|
||||
|
||||
exposure_data[cam_id] << img->t_ns << ","
|
||||
<< int64_t(img->img_data[cam_id].exposure * 1e9)
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
for (size_t cam_id = 0; cam_id < NUM_CAMS; ++cam_id) {
|
||||
basalt::ManagedImage<uint16_t>::Ptr image_raw =
|
||||
img->img_data[cam_id].img;
|
||||
|
||||
if (!image_raw.get()) continue;
|
||||
|
||||
cv::Mat image(image_raw->h, image_raw->w, CV_8U);
|
||||
|
||||
uint8_t *dst = image.ptr();
|
||||
const uint16_t *src = image_raw->ptr;
|
||||
|
||||
for (size_t i = 0; i < image_raw->size(); i++) {
|
||||
dst[i] = (src[i] >> 8);
|
||||
}
|
||||
|
||||
#if CV_MAJOR_VERSION >= 3
|
||||
std::string filename = dataset_dir + "mav0/cam" +
|
||||
std::to_string(cam_id) + "/data/" +
|
||||
std::to_string(img->t_ns) + ".webp";
|
||||
|
||||
std::vector<int> compression_params = {cv::IMWRITE_WEBP_QUALITY,
|
||||
webp_quality};
|
||||
cv::imwrite(filename, image, compression_params);
|
||||
#else
|
||||
std::string filename = dataset_folder + "mav0/cam" +
|
||||
std::to_string(cam_id) + "/data/" +
|
||||
std::to_string(img->t_ns) + ".jpg";
|
||||
|
||||
std::vector<int> compression_params = {cv::IMWRITE_JPEG_QUALITY,
|
||||
webp_quality};
|
||||
cv::imwrite(filename, image, compression_params);
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void toggle_recording() {
|
||||
record = !record;
|
||||
if (record) {
|
||||
dataset_folder = dataset_dir + "dataset_" + get_date() + "/";
|
||||
fs::create_directory(dataset_folder);
|
||||
fs::create_directory(dataset_folder + "mav0/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam0/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam0/data/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam1/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam1/data/");
|
||||
fs::create_directory(dataset_folder + "mav0/imu0/");
|
||||
void imu_save_worker() {
|
||||
basalt::ImuData::Ptr data;
|
||||
|
||||
cam_data[0].open(dataset_folder + "mav0/cam0/data.csv");
|
||||
cam_data[1].open(dataset_folder + "mav0/cam1/data.csv");
|
||||
exposure_data[0].open(dataset_folder + "mav0/cam0/exposure.csv");
|
||||
exposure_data[1].open(dataset_folder + "mav0/cam1/exposure.csv");
|
||||
imu0_data.open(dataset_folder + "mav0/imu0/data.csv");
|
||||
while (!stop_workers) {
|
||||
if (imu_data_queue.try_pop(data)) {
|
||||
if (imu_log.get())
|
||||
imu_log->Log(data->accel[0], data->accel[1], data->accel[2]);
|
||||
|
||||
if (recording) {
|
||||
imu0_data << data->t_ns << "," << data->gyro[0] << "," << data->gyro[1]
|
||||
<< "," << data->gyro[2] << "," << data->accel[0] << ","
|
||||
<< data->accel[1] << "," << data->accel[2] << "\n";
|
||||
}
|
||||
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void pose_save_worker() {
|
||||
basalt::RsPoseData data;
|
||||
|
||||
while (!stop_workers) {
|
||||
if (pose_data_queue.try_pop(data)) {
|
||||
if (recording) {
|
||||
pose_data << data.t_ns << "," << data.data.translation().x() << ","
|
||||
<< data.data.translation().y() << ","
|
||||
<< data.data.translation().z() << ","
|
||||
<< data.data.unit_quaternion().w() << ","
|
||||
<< data.data.unit_quaternion().x() << ","
|
||||
<< data.data.unit_quaternion().y() << ","
|
||||
<< data.data.unit_quaternion().z() << std::endl;
|
||||
}
|
||||
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void save_calibration(const basalt::Device::Ptr &device) {
|
||||
auto calib = device->exportCalibration();
|
||||
|
||||
if (calib) {
|
||||
std::ofstream os(dataset_dir + "/calibration.json");
|
||||
cereal::JSONOutputArchive archive(os);
|
||||
|
||||
archive(*calib);
|
||||
}
|
||||
}
|
||||
|
||||
inline std::string get_date() {
|
||||
constexpr int MAX_DATE = 64;
|
||||
time_t now;
|
||||
char the_date[MAX_DATE];
|
||||
|
||||
the_date[0] = '\0';
|
||||
|
||||
now = time(nullptr);
|
||||
|
||||
if (now != -1) {
|
||||
strftime(the_date, MAX_DATE, "%Y_%m_%d_%H_%M_%S", gmtime(&now));
|
||||
}
|
||||
|
||||
return std::string(the_date);
|
||||
}
|
||||
|
||||
void startRecording(const std::string &dir_path) {
|
||||
if (!recording) {
|
||||
dataset_dir = dir_path + "dataset_" + get_date() + "/";
|
||||
|
||||
fs::create_directory(dataset_dir);
|
||||
fs::create_directory(dataset_dir + "mav0/");
|
||||
fs::create_directory(dataset_dir + "mav0/cam0/");
|
||||
fs::create_directory(dataset_dir + "mav0/cam0/data/");
|
||||
fs::create_directory(dataset_dir + "mav0/cam1/");
|
||||
fs::create_directory(dataset_dir + "mav0/cam1/data/");
|
||||
fs::create_directory(dataset_dir + "mav0/imu0/");
|
||||
|
||||
cam_data[0].open(dataset_dir + "mav0/cam0/data.csv");
|
||||
cam_data[1].open(dataset_dir + "mav0/cam1/data.csv");
|
||||
exposure_data[0].open(dataset_dir + "mav0/cam0/exposure.csv");
|
||||
exposure_data[1].open(dataset_dir + "mav0/cam1/exposure.csv");
|
||||
imu0_data.open(dataset_dir + "mav0/imu0/data.csv");
|
||||
|
||||
if (!manual_exposure) {
|
||||
fs::create_directory(dataset_dir + "mav0/realsense0/");
|
||||
pose_data.open(dataset_dir + "mav0/realsense0/data.csv");
|
||||
pose_data << "#timestamp [ns], p_RS_R_x [m], p_RS_R_y [m], p_RS_R_z [m], "
|
||||
"q_RS_w [], q_RS_x [], q_RS_y [], q_RS_z []\n";
|
||||
}
|
||||
|
||||
cam_data[0] << "#timestamp [ns], filename\n";
|
||||
cam_data[1] << "#timestamp [ns], filename\n";
|
||||
|
@ -117,121 +252,47 @@ void toggle_recording() {
|
|||
"s^-1],w_RS_S_z [rad s^-1],a_RS_S_x [m s^-2],a_RS_S_y "
|
||||
"[m s^-2],a_RS_S_z [m s^-2]\n";
|
||||
|
||||
std::cout << "Started recording dataset in " << dataset_folder << std::endl;
|
||||
save_calibration(t265_device);
|
||||
t265_device->image_data_queue->clear();
|
||||
t265_device->imu_data_queue->clear();
|
||||
|
||||
std::cout << "Started recording dataset in " << dataset_dir << std::endl;
|
||||
|
||||
recording = true;
|
||||
} else {
|
||||
std::cout << "Already recording" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void stopRecording() {
|
||||
if (recording) {
|
||||
recording = false;
|
||||
cam_data[0].close();
|
||||
cam_data[1].close();
|
||||
exposure_data[0].close();
|
||||
exposure_data[1].close();
|
||||
imu0_data.close();
|
||||
|
||||
std::cout << "Stopped recording dataset in " << dataset_folder << std::endl;
|
||||
std::cout << "Stopped recording dataset in " << dataset_dir << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void export_device_calibration(rs2::pipeline_profile &profile,
|
||||
const std::string &out_path) {
|
||||
using Scalar = double;
|
||||
|
||||
std::shared_ptr<basalt::Calibration<Scalar>> calib;
|
||||
calib.reset(new basalt::Calibration<Scalar>);
|
||||
|
||||
auto accel_stream = profile.get_stream(RS2_STREAM_ACCEL);
|
||||
auto gyro_stream = profile.get_stream(RS2_STREAM_GYRO);
|
||||
auto cam0_stream = profile.get_stream(RS2_STREAM_FISHEYE, 1);
|
||||
auto cam1_stream = profile.get_stream(RS2_STREAM_FISHEYE, 2);
|
||||
|
||||
// get gyro extrinsics
|
||||
if (auto gyro = gyro_stream.as<rs2::motion_stream_profile>()) {
|
||||
// TODO: gyro
|
||||
rs2_motion_device_intrinsic intrinsics = gyro.get_motion_intrinsics();
|
||||
|
||||
std::cout << " Scale X cross axis cross axis Bias X \n";
|
||||
std::cout << " cross axis Scale Y cross axis Bias Y \n";
|
||||
std::cout << " cross axis cross axis Scale Z Bias Z \n";
|
||||
for (auto &i : intrinsics.data) {
|
||||
for (int j = 0; j < 4; j++) {
|
||||
std::cout << i[j] << " ";
|
||||
}
|
||||
std::cout << "\n";
|
||||
}
|
||||
|
||||
std::cout << "Variance of noise for X, Y, Z axis \n";
|
||||
for (float noise_variance : intrinsics.noise_variances)
|
||||
std::cout << noise_variance << " ";
|
||||
std::cout << "\n";
|
||||
|
||||
std::cout << "Variance of bias for X, Y, Z axis \n";
|
||||
for (float bias_variance : intrinsics.bias_variances)
|
||||
std::cout << bias_variance << " ";
|
||||
std::cout << "\n";
|
||||
void toggleRecording(const std::string &dir_path) {
|
||||
if (recording) {
|
||||
stopRecording();
|
||||
} else {
|
||||
throw std::exception();
|
||||
startRecording(dir_path);
|
||||
}
|
||||
|
||||
// get accel extrinsics
|
||||
if (auto gyro = accel_stream.as<rs2::motion_stream_profile>()) {
|
||||
// TODO: accel
|
||||
// rs2_motion_device_intrinsic intrinsics = accel.get_motion_intrinsics();
|
||||
} else {
|
||||
throw std::exception();
|
||||
}
|
||||
|
||||
// get camera ex-/intrinsics
|
||||
for (const auto &cam_stream : {cam0_stream, cam1_stream}) {
|
||||
if (auto cam = cam_stream.as<rs2::video_stream_profile>()) {
|
||||
// extrinsics
|
||||
rs2_extrinsics ex = cam.get_extrinsics_to(gyro_stream);
|
||||
Eigen::Matrix3f rot = Eigen::Map<Eigen::Matrix3f>(ex.rotation);
|
||||
Eigen::Vector3f trans = Eigen::Map<Eigen::Vector3f>(ex.translation);
|
||||
|
||||
Eigen::Quaterniond q(rot.cast<double>());
|
||||
basalt::Calibration<Scalar>::SE3 T_i_c(q, trans.cast<double>());
|
||||
|
||||
std::cout << "T_i_c\n" << T_i_c.matrix() << std::endl;
|
||||
|
||||
calib->T_i_c.push_back(T_i_c);
|
||||
|
||||
// get resolution
|
||||
Eigen::Vector2i resolution;
|
||||
resolution << cam.width(), cam.height();
|
||||
calib->resolution.push_back(resolution);
|
||||
|
||||
// intrinsics
|
||||
rs2_intrinsics intrinsics = cam.get_intrinsics();
|
||||
basalt::KannalaBrandtCamera4<Scalar>::VecN params;
|
||||
params << intrinsics.fx, intrinsics.fy, intrinsics.ppx, intrinsics.ppy,
|
||||
intrinsics.coeffs[0], intrinsics.coeffs[1], intrinsics.coeffs[2],
|
||||
intrinsics.coeffs[3];
|
||||
|
||||
std::cout << "params: " << params.transpose() << std::endl;
|
||||
|
||||
basalt::GenericCamera<Scalar> camera;
|
||||
basalt::KannalaBrandtCamera4 kannala_brandt(params);
|
||||
camera.variant = kannala_brandt;
|
||||
|
||||
calib->intrinsics.push_back(camera);
|
||||
} else {
|
||||
throw std::exception(); // TODO: better exception
|
||||
}
|
||||
}
|
||||
|
||||
// serialize and store calibration
|
||||
// std::ofstream os(out_path + "calibration.json");
|
||||
// cereal::JSONOutputArchive archive(os);
|
||||
|
||||
// archive(*calib);
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
CLI::App app{"Record RealSense T265 Data"};
|
||||
|
||||
app.add_option("--dataset-dir", dataset_dir, "Path to dataset");
|
||||
app.add_option("--exposure", exposure,
|
||||
"If set will enable manual exposure, value in microseconds.");
|
||||
app.add_option("--result-dir", result_dir,
|
||||
"If set will enable manual exposure, value in microseconds.");
|
||||
std::string dataset_path;
|
||||
|
||||
app.add_option("--dataset-path", dataset_path, "Path to dataset");
|
||||
app.add_flag("--manual-exposure", manual_exposure,
|
||||
"If set will enable manual exposure.");
|
||||
|
||||
try {
|
||||
app.parse(argc, argv);
|
||||
|
@ -241,185 +302,38 @@ int main(int argc, char *argv[]) {
|
|||
|
||||
bool show_gui = true;
|
||||
|
||||
image_save_queue.set_capacity(5000);
|
||||
|
||||
stop_workers = false;
|
||||
std::vector<std::thread> worker_threads;
|
||||
for (int i = 0; i < 8; i++) {
|
||||
worker_threads.emplace_back(image_save_worker);
|
||||
if (worker_threads.empty()) {
|
||||
for (int i = 0; i < NUM_WORKERS; i++) {
|
||||
worker_threads.emplace_back(image_save_worker);
|
||||
}
|
||||
}
|
||||
|
||||
std::string color_mode;
|
||||
imu_worker_thread = std::thread(imu_save_worker);
|
||||
pose_worker_thread = std::thread(pose_save_worker);
|
||||
|
||||
image_data_queue.set_capacity(1000);
|
||||
imu_data_queue.set_capacity(10000);
|
||||
pose_data_queue.set_capacity(10000);
|
||||
|
||||
// realsense
|
||||
rs2::log_to_console(RS2_LOG_SEVERITY_ERROR);
|
||||
rs2::context ctx;
|
||||
rs2::pipeline pipe(ctx);
|
||||
rs2::config cfg;
|
||||
t265_device.reset(
|
||||
new basalt::Device(manual_exposure, skip_frames, webp_quality, exposure));
|
||||
|
||||
// Add streams of gyro and accelerometer to configuration
|
||||
cfg.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F);
|
||||
cfg.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F);
|
||||
cfg.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
|
||||
cfg.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
|
||||
t265_device->image_data_queue = &image_data_queue;
|
||||
t265_device->imu_data_queue = &imu_data_queue;
|
||||
t265_device->pose_data_queue = &pose_data_queue;
|
||||
|
||||
// Using the device_hub we can block the program until a device connects
|
||||
// rs2::device_hub device_hub(ctx);
|
||||
|
||||
auto devices = ctx.query_devices();
|
||||
if (devices.size() == 0) {
|
||||
std::abort();
|
||||
}
|
||||
auto device = devices[0];
|
||||
std::cout << "Device " << device.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< " connected" << std::endl;
|
||||
auto sens = device.query_sensors()[0];
|
||||
|
||||
if (exposure > 0) {
|
||||
std::cout << "Setting exposure to: " << exposure << " microseconds"
|
||||
<< std::endl;
|
||||
sens.set_option(rs2_option::RS2_OPTION_ENABLE_AUTO_EXPOSURE, 0);
|
||||
sens.set_option(rs2_option::RS2_OPTION_EXPOSURE, (float)exposure);
|
||||
}
|
||||
|
||||
std::mutex data_mutex;
|
||||
|
||||
rs2::motion_frame last_gyro_meas = rs2::motion_frame(rs2::frame());
|
||||
Eigen::deque<RsIMUData> gyro_data_queue;
|
||||
|
||||
std::shared_ptr<RsIMUData> prev_accel_data;
|
||||
|
||||
int processed_frame = 0;
|
||||
|
||||
auto callback = [&](const rs2::frame &frame) {
|
||||
std::lock_guard<std::mutex> lock(data_mutex);
|
||||
|
||||
if (auto fp = frame.as<rs2::motion_frame>()) {
|
||||
auto motion = frame.as<rs2::motion_frame>();
|
||||
|
||||
if (motion && motion.get_profile().stream_type() == RS2_STREAM_GYRO &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
gyro_data_queue.emplace_back(d);
|
||||
}
|
||||
|
||||
if (motion && motion.get_profile().stream_type() == RS2_STREAM_ACCEL &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
if (!prev_accel_data.get()) {
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
} else {
|
||||
BASALT_ASSERT(d.timestamp > prev_accel_data->timestamp);
|
||||
|
||||
while (!gyro_data_queue.empty() && gyro_data_queue.front().timestamp <
|
||||
prev_accel_data->timestamp) {
|
||||
std::cout << "Skipping gyro data. Timestamp before the first accel "
|
||||
"measurement.";
|
||||
gyro_data_queue.pop_front();
|
||||
}
|
||||
|
||||
while (!gyro_data_queue.empty() &&
|
||||
gyro_data_queue.front().timestamp < d.timestamp) {
|
||||
RsIMUData gyro_data = gyro_data_queue.front();
|
||||
gyro_data_queue.pop_front();
|
||||
|
||||
double w0 = (d.timestamp - gyro_data.timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
double w1 = (gyro_data.timestamp - prev_accel_data->timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
Eigen::Vector3d accel_interpolated =
|
||||
w0 * prev_accel_data->data + w1 * d.data;
|
||||
|
||||
if (record) {
|
||||
int64_t timestamp = gyro_data.timestamp * 1e6;
|
||||
imu0_data << timestamp << "," << gyro_data.data[0] << ","
|
||||
<< gyro_data.data[1] << "," << gyro_data.data[2] << ","
|
||||
<< accel_interpolated[0] << "," << accel_interpolated[1]
|
||||
<< "," << accel_interpolated[2] << "\n";
|
||||
}
|
||||
|
||||
imu_log.Log(accel_interpolated[0], accel_interpolated[1],
|
||||
accel_interpolated[2]);
|
||||
}
|
||||
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (auto fs = frame.as<rs2::frameset>()) {
|
||||
processed_frame++;
|
||||
if (processed_frame % int(skip_frames) != 0) return;
|
||||
|
||||
for (int i = 0; i < NUM_CAMS; i++) {
|
||||
auto f = fs[i];
|
||||
if (!f.as<rs2::video_frame>()) {
|
||||
std::cout << "Weird Frame, skipping" << std::endl;
|
||||
continue;
|
||||
}
|
||||
auto vf = f.as<rs2::video_frame>();
|
||||
|
||||
last_images[i].reset(new ImageData);
|
||||
last_images[i]->image = cv::Mat(vf.get_height(), vf.get_width(), CV_8U);
|
||||
std::memcpy(
|
||||
last_images[i]->image.ptr(), vf.get_data(),
|
||||
vf.get_width() * vf.get_height() * vf.get_bytes_per_pixel());
|
||||
|
||||
last_images[i]->exposure_time =
|
||||
vf.get_frame_metadata(RS2_FRAME_METADATA_ACTUAL_EXPOSURE);
|
||||
|
||||
last_images[i]->timestamp = vf.get_timestamp() * 1e6;
|
||||
last_images[i]->cam_id = i;
|
||||
|
||||
if (record) {
|
||||
image_save_queue.push(last_images[i]);
|
||||
|
||||
cam_data[i] << last_images[i]->timestamp << ","
|
||||
<< last_images[i]->timestamp << ".webp" << std::endl;
|
||||
|
||||
exposure_data[i] << last_images[i]->timestamp << ","
|
||||
<< int64_t(vf.get_frame_metadata(
|
||||
RS2_FRAME_METADATA_ACTUAL_EXPOSURE) *
|
||||
1e3)
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start streaming through the callback
|
||||
rs2::pipeline_profile profiles = pipe.start(cfg, callback);
|
||||
|
||||
{
|
||||
auto sensors = profiles.get_device().query_sensors();
|
||||
|
||||
for (auto &s : sensors) {
|
||||
std::cout << "Sensor " << s.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< ". Supported options:" << std::endl;
|
||||
|
||||
for (const auto &o : s.get_supported_options()) {
|
||||
std::cout << "\t" << rs2_option_to_string(o) << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export_device_calibration(profiles, result_dir);
|
||||
t265_device->start();
|
||||
imu_log.reset(new pangolin::DataLog);
|
||||
|
||||
if (show_gui) {
|
||||
pangolin::CreateWindowAndBind("Record RealSense T265", 1200, 800);
|
||||
|
||||
pangolin::Var<std::function<void(void)>> record_btn("ui.record",
|
||||
toggle_recording);
|
||||
pangolin::Var<std::function<void(void)>> record_btn(
|
||||
"ui.record", [&] { return toggleRecording(dataset_path); });
|
||||
pangolin::Var<std::function<void(void)>> export_calibration(
|
||||
"ui.export_calib", [&] { return save_calibration(t265_device); });
|
||||
|
||||
std::atomic<int64_t> record_t_ns;
|
||||
record_t_ns = 0;
|
||||
|
@ -438,7 +352,7 @@ int main(int argc, char *argv[]) {
|
|||
pangolin::Attach::Pix(UI_WIDTH));
|
||||
|
||||
std::vector<std::shared_ptr<pangolin::ImageView>> img_view;
|
||||
while (img_view.size() < NUM_CAMS) {
|
||||
while (img_view.size() < basalt::Device::NUM_CAMS) {
|
||||
int idx = img_view.size();
|
||||
std::shared_ptr<pangolin::ImageView> iv(new pangolin::ImageView);
|
||||
|
||||
|
@ -448,47 +362,36 @@ int main(int argc, char *argv[]) {
|
|||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
|
||||
if (last_images[idx].get())
|
||||
if (t265_device->last_img_data.get())
|
||||
pangolin::GlFont::I()
|
||||
.Text("Exposure: %.3f ms.",
|
||||
last_images[idx]->exposure_time / 1000.0)
|
||||
t265_device->last_img_data->img_data[idx].exposure * 1000.0)
|
||||
.Draw(30, 30);
|
||||
|
||||
if (idx == 0) {
|
||||
pangolin::GlFont::I()
|
||||
.Text("Queue: %d.", image_save_queue.size())
|
||||
.Text("Queue: %d.", image_data_queue.size())
|
||||
.Draw(30, 60);
|
||||
}
|
||||
|
||||
if (idx == 0 && record) {
|
||||
if (idx == 0 && recording) {
|
||||
pangolin::GlFont::I().Text("Recording").Draw(30, 90);
|
||||
}
|
||||
};
|
||||
|
||||
iv->OnSelectionCallback =
|
||||
[&](pangolin::ImageView::OnSelectionEventData o) {
|
||||
int64_t curr_t_ns = std::chrono::high_resolution_clock::now()
|
||||
.time_since_epoch()
|
||||
.count();
|
||||
if (std::abs(record_t_ns - curr_t_ns) > int64_t(2e9)) {
|
||||
toggle_recording();
|
||||
record_t_ns = curr_t_ns;
|
||||
}
|
||||
};
|
||||
|
||||
img_view.push_back(iv);
|
||||
img_view_display.AddDisplay(*iv);
|
||||
}
|
||||
|
||||
imu_log.Clear();
|
||||
imu_log->Clear();
|
||||
|
||||
std::vector<std::string> labels;
|
||||
labels.push_back(std::string("accel x"));
|
||||
labels.push_back(std::string("accel y"));
|
||||
labels.push_back(std::string("accel z"));
|
||||
imu_log.SetLabels(labels);
|
||||
imu_log->SetLabels(labels);
|
||||
|
||||
pangolin::Plotter plotter(&imu_log, 0.0f, 2000.0f, -15.0f, 15.0f, 0.1f,
|
||||
pangolin::Plotter plotter(imu_log.get(), 0.0f, 2000.0f, -15.0f, 15.0f, 0.1f,
|
||||
0.1f);
|
||||
plotter.SetBounds(0.0, 1.0, 0.0, 1.0);
|
||||
plotter.Track("$i");
|
||||
|
@ -507,16 +410,30 @@ int main(int argc, char *argv[]) {
|
|||
{
|
||||
pangolin::GlPixFormat fmt;
|
||||
fmt.glformat = GL_LUMINANCE;
|
||||
fmt.gltype = GL_UNSIGNED_BYTE;
|
||||
fmt.scalable_internal_format = GL_LUMINANCE8;
|
||||
fmt.gltype = GL_UNSIGNED_SHORT;
|
||||
fmt.scalable_internal_format = GL_LUMINANCE16;
|
||||
|
||||
for (size_t cam_id = 0; cam_id < NUM_CAMS; cam_id++) {
|
||||
if (last_images[cam_id].get())
|
||||
img_view[cam_id]->SetImage(last_images[cam_id]->image.ptr(),
|
||||
last_images[cam_id]->image.cols,
|
||||
last_images[cam_id]->image.rows,
|
||||
last_images[cam_id]->image.step, fmt);
|
||||
}
|
||||
if (t265_device->last_img_data.get())
|
||||
for (size_t cam_id = 0; cam_id < basalt::Device::NUM_CAMS; cam_id++) {
|
||||
if (t265_device->last_img_data->img_data[cam_id].img.get())
|
||||
img_view[cam_id]->SetImage(
|
||||
t265_device->last_img_data->img_data[cam_id].img->ptr,
|
||||
t265_device->last_img_data->img_data[cam_id].img->w,
|
||||
t265_device->last_img_data->img_data[cam_id].img->h,
|
||||
t265_device->last_img_data->img_data[cam_id].img->pitch, fmt);
|
||||
}
|
||||
}
|
||||
|
||||
if (manual_exposure && exposure.GuiChanged()) {
|
||||
t265_device->setExposure(exposure);
|
||||
}
|
||||
|
||||
if (webp_quality.GuiChanged()) {
|
||||
t265_device->setWebpQuality(webp_quality);
|
||||
}
|
||||
|
||||
if (skip_frames.GuiChanged()) {
|
||||
t265_device->setSkipFrames(skip_frames);
|
||||
}
|
||||
|
||||
pangolin::FinishFrame();
|
||||
|
@ -525,26 +442,12 @@ int main(int argc, char *argv[]) {
|
|||
}
|
||||
}
|
||||
|
||||
if (recording) stopRecording();
|
||||
stop_workers = true;
|
||||
for (auto &t : worker_threads) {
|
||||
t.join();
|
||||
}
|
||||
|
||||
for (auto &t : worker_threads) t.join();
|
||||
imu_worker_thread.join();
|
||||
pose_worker_thread.join();
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
std::string get_date() {
|
||||
constexpr int MAX_DATE = 64;
|
||||
time_t now;
|
||||
char the_date[MAX_DATE];
|
||||
|
||||
the_date[0] = '\0';
|
||||
|
||||
now = time(NULL);
|
||||
|
||||
if (now != -1) {
|
||||
strftime(the_date, MAX_DATE, "%Y_%m_%d_%H_%M_%S", gmtime(&now));
|
||||
}
|
||||
|
||||
return std::string(the_date);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,488 @@
|
|||
/**
|
||||
BSD 3-Clause License
|
||||
|
||||
This file is part of the Basalt project.
|
||||
https://gitlab.com/VladyslavUsenko/basalt.git
|
||||
|
||||
Copyright (c) 2019, Vladyslav Usenko, Michael Loipführer and Nikolaus Demmel.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
#include <condition_variable>
|
||||
#include <iostream>
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
|
||||
#include <sophus/se3.hpp>
|
||||
|
||||
#include <tbb/concurrent_unordered_map.h>
|
||||
#include <tbb/tbb.h>
|
||||
|
||||
#include <pangolin/display/image_view.h>
|
||||
#include <pangolin/gl/gldraw.h>
|
||||
#include <pangolin/image/image.h>
|
||||
#include <pangolin/image/image_io.h>
|
||||
#include <pangolin/image/typed_image.h>
|
||||
#include <pangolin/pangolin.h>
|
||||
|
||||
#include <CLI/CLI.hpp>
|
||||
|
||||
#include <basalt/device/rs_t265.h>
|
||||
#include <basalt/io/dataset_io.h>
|
||||
#include <basalt/io/marg_data_io.h>
|
||||
#include <basalt/spline/se3_spline.h>
|
||||
#include <basalt/vi_estimator/vio_estimator.h>
|
||||
#include <basalt/calibration/calibration.hpp>
|
||||
|
||||
#include <basalt/serialization/headers_serialization.h>
|
||||
|
||||
#include <basalt/utils/vis_utils.h>
|
||||
|
||||
// GUI functions
|
||||
void draw_image_overlay(pangolin::View& v, size_t cam_id);
|
||||
void draw_scene();
|
||||
void load_data(const std::string& calib_path);
|
||||
void draw_plots();
|
||||
|
||||
// Pangolin variables
|
||||
constexpr int UI_WIDTH = 200;
|
||||
|
||||
basalt::Device::Ptr t265_device;
|
||||
|
||||
using Button = pangolin::Var<std::function<void(void)>>;
|
||||
|
||||
pangolin::DataLog imu_data_log, vio_data_log, error_data_log;
|
||||
pangolin::Plotter* plotter;
|
||||
|
||||
pangolin::Var<bool> show_obs("ui.show_obs", true, false, true);
|
||||
pangolin::Var<bool> show_ids("ui.show_ids", false, false, true);
|
||||
|
||||
pangolin::Var<bool> show_est_pos("ui.show_est_pos", true, false, true);
|
||||
pangolin::Var<bool> show_est_vel("ui.show_est_vel", false, false, true);
|
||||
pangolin::Var<bool> show_est_bg("ui.show_est_bg", false, false, true);
|
||||
pangolin::Var<bool> show_est_ba("ui.show_est_ba", false, false, true);
|
||||
|
||||
pangolin::Var<bool> show_gt("ui.show_gt", true, false, true);
|
||||
|
||||
pangolin::Var<bool> follow("ui.follow", true, false, true);
|
||||
|
||||
// Visualization variables
|
||||
basalt::VioVisualizationData::Ptr curr_vis_data;
|
||||
|
||||
tbb::concurrent_bounded_queue<basalt::VioVisualizationData::Ptr> out_vis_queue;
|
||||
tbb::concurrent_bounded_queue<basalt::PoseVelBiasState::Ptr> out_state_queue;
|
||||
|
||||
std::vector<int64_t> vio_t_ns;
|
||||
Eigen::vector<Eigen::Vector3d> vio_t_w_i;
|
||||
|
||||
std::string marg_data_path;
|
||||
|
||||
std::mutex m;
|
||||
bool step_by_step = false;
|
||||
std::atomic<bool> terminate;
|
||||
int64_t curr_t_ns = -1;
|
||||
|
||||
// VIO variables
|
||||
basalt::Calibration<double> calib;
|
||||
|
||||
basalt::VioConfig vio_config;
|
||||
basalt::OpticalFlowBase::Ptr opt_flow_ptr;
|
||||
basalt::VioEstimatorBase::Ptr vio;
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
terminate = false;
|
||||
bool show_gui = true;
|
||||
bool print_queue = false;
|
||||
std::string cam_calib_path;
|
||||
std::string config_path;
|
||||
int num_threads = 0;
|
||||
|
||||
CLI::App app{"RealSense T265 Live Vio"};
|
||||
|
||||
app.add_option("--show-gui", show_gui, "Show GUI");
|
||||
app.add_option("--cam-calib", cam_calib_path,
|
||||
"Ground-truth camera calibration used for simulation.");
|
||||
|
||||
app.add_option("--marg-data", marg_data_path,
|
||||
"Path to folder where marginalization data will be stored.");
|
||||
|
||||
app.add_option("--print-queue", print_queue, "Print queue.");
|
||||
app.add_option("--config-path", config_path, "Path to config file.");
|
||||
app.add_option("--num-threads", num_threads, "Number of threads.");
|
||||
app.add_option("--step-by-step", step_by_step, "Path to config file.");
|
||||
|
||||
if (num_threads > 0) {
|
||||
tbb::task_scheduler_init init(num_threads);
|
||||
}
|
||||
|
||||
try {
|
||||
app.parse(argc, argv);
|
||||
} catch (const CLI::ParseError& e) {
|
||||
return app.exit(e);
|
||||
}
|
||||
|
||||
if (!config_path.empty()) {
|
||||
vio_config.load(config_path);
|
||||
} else {
|
||||
vio_config.optical_flow_skip_frames = 2;
|
||||
}
|
||||
|
||||
// realsense
|
||||
t265_device.reset(
|
||||
new basalt::Device(false, 1, 90, 10.0)); // TODO: add options?
|
||||
|
||||
// startup device and load calibration
|
||||
t265_device->start();
|
||||
if (cam_calib_path.empty()) {
|
||||
calib = *t265_device->exportCalibration();
|
||||
} else {
|
||||
load_data(cam_calib_path);
|
||||
}
|
||||
|
||||
opt_flow_ptr = basalt::OpticalFlowFactory::getOpticalFlow(vio_config, calib);
|
||||
t265_device->image_data_queue = &opt_flow_ptr->input_queue;
|
||||
|
||||
vio = basalt::VioEstimatorFactory::getVioEstimator(vio_config, calib, 0.0001,
|
||||
basalt::constants::g);
|
||||
vio->initialize(Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero());
|
||||
t265_device->imu_data_queue = &vio->imu_data_queue;
|
||||
|
||||
opt_flow_ptr->output_queue = &vio->vision_data_queue;
|
||||
if (show_gui) vio->out_vis_queue = &out_vis_queue;
|
||||
vio->out_state_queue = &out_state_queue;
|
||||
|
||||
// basalt::MargDataSaver::Ptr marg_data_saver;
|
||||
//
|
||||
// if (!marg_data_path.empty()) {
|
||||
// marg_data_saver.reset(new basalt::MargDataSaver(marg_data_path));
|
||||
// vio->out_marg_queue = &marg_data_saver->in_marg_queue;
|
||||
// }
|
||||
|
||||
vio_data_log.Clear();
|
||||
|
||||
std::shared_ptr<std::thread> t3;
|
||||
|
||||
if (show_gui)
|
||||
t3.reset(new std::thread([&]() {
|
||||
while (true) {
|
||||
out_vis_queue.pop(curr_vis_data);
|
||||
}
|
||||
|
||||
std::cout << "Finished t3" << std::endl;
|
||||
}));
|
||||
|
||||
std::thread t4([&]() {
|
||||
basalt::PoseVelBiasState::Ptr data;
|
||||
|
||||
while (true) {
|
||||
out_state_queue.pop(data);
|
||||
|
||||
if (!data.get()) break;
|
||||
|
||||
int64_t t_ns = data->t_ns;
|
||||
|
||||
if (curr_t_ns < 0) curr_t_ns = t_ns;
|
||||
|
||||
// std::cerr << "t_ns " << t_ns << std::endl;
|
||||
Sophus::SE3d T_w_i = data->T_w_i;
|
||||
Eigen::Vector3d vel_w_i = data->vel_w_i;
|
||||
Eigen::Vector3d bg = data->bias_gyro;
|
||||
Eigen::Vector3d ba = data->bias_accel;
|
||||
|
||||
vio_t_ns.emplace_back(data->t_ns);
|
||||
vio_t_w_i.emplace_back(T_w_i.translation());
|
||||
|
||||
if (show_gui) {
|
||||
std::vector<float> vals;
|
||||
vals.push_back((t_ns - curr_t_ns) * 1e-9);
|
||||
|
||||
for (int i = 0; i < 3; i++) vals.push_back(vel_w_i[i]);
|
||||
for (int i = 0; i < 3; i++) vals.push_back(T_w_i.translation()[i]);
|
||||
for (int i = 0; i < 3; i++) vals.push_back(bg[i]);
|
||||
for (int i = 0; i < 3; i++) vals.push_back(ba[i]);
|
||||
|
||||
vio_data_log.Log(vals);
|
||||
}
|
||||
}
|
||||
|
||||
std::cout << "Finished t4" << std::endl;
|
||||
});
|
||||
|
||||
std::shared_ptr<std::thread> t5;
|
||||
|
||||
if (print_queue) {
|
||||
t5.reset(new std::thread([&]() {
|
||||
while (true) {
|
||||
std::cout << "opt_flow_ptr->input_queue "
|
||||
<< opt_flow_ptr->input_queue.size()
|
||||
<< " opt_flow_ptr->output_queue "
|
||||
<< opt_flow_ptr->output_queue->size() << " out_state_queue "
|
||||
<< out_state_queue.size() << std::endl;
|
||||
sleep(1);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
if (show_gui) {
|
||||
pangolin::CreateWindowAndBind("RS T265 Vio", 1800, 1000);
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
|
||||
pangolin::View& img_view_display =
|
||||
pangolin::CreateDisplay()
|
||||
.SetBounds(0.4, 1.0, pangolin::Attach::Pix(UI_WIDTH), 0.4)
|
||||
.SetLayout(pangolin::LayoutEqual);
|
||||
|
||||
pangolin::View& plot_display = pangolin::CreateDisplay().SetBounds(
|
||||
0.0, 0.4, pangolin::Attach::Pix(UI_WIDTH), 1.0);
|
||||
|
||||
plotter =
|
||||
new pangolin::Plotter(&imu_data_log, 0.0, 100, -3.0, 3.0, 0.01f, 0.01f);
|
||||
plot_display.AddDisplay(*plotter);
|
||||
|
||||
pangolin::CreatePanel("ui").SetBounds(0.0, 1.0, 0.0,
|
||||
pangolin::Attach::Pix(UI_WIDTH));
|
||||
|
||||
std::vector<std::shared_ptr<pangolin::ImageView>> img_view;
|
||||
while (img_view.size() < calib.intrinsics.size()) {
|
||||
std::shared_ptr<pangolin::ImageView> iv(new pangolin::ImageView);
|
||||
|
||||
size_t idx = img_view.size();
|
||||
img_view.push_back(iv);
|
||||
|
||||
img_view_display.AddDisplay(*iv);
|
||||
iv->extern_draw_function =
|
||||
std::bind(&draw_image_overlay, std::placeholders::_1, idx);
|
||||
}
|
||||
|
||||
Eigen::Vector3d cam_p(-0.5, -3, -5);
|
||||
cam_p = vio->getT_w_i_init().so3() * calib.T_i_c[0].so3() * cam_p;
|
||||
|
||||
pangolin::OpenGlRenderState camera(
|
||||
pangolin::ProjectionMatrix(640, 480, 400, 400, 320, 240, 0.001, 10000),
|
||||
pangolin::ModelViewLookAt(cam_p[0], cam_p[1], cam_p[2], 0, 0, 0,
|
||||
pangolin::AxisZ));
|
||||
|
||||
pangolin::View& display3D =
|
||||
pangolin::CreateDisplay()
|
||||
.SetAspect(-640 / 480.0)
|
||||
.SetBounds(0.4, 1.0, 0.4, 1.0)
|
||||
.SetHandler(new pangolin::Handler3D(camera));
|
||||
|
||||
while (!pangolin::ShouldQuit()) {
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
if (follow) {
|
||||
if (curr_vis_data.get()) {
|
||||
auto T_w_i = curr_vis_data->states.back();
|
||||
T_w_i.so3() = Sophus::SO3d();
|
||||
|
||||
camera.Follow(T_w_i.matrix());
|
||||
}
|
||||
}
|
||||
|
||||
display3D.Activate(camera);
|
||||
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
|
||||
|
||||
draw_scene();
|
||||
|
||||
img_view_display.Activate();
|
||||
|
||||
{
|
||||
pangolin::GlPixFormat fmt;
|
||||
fmt.glformat = GL_LUMINANCE;
|
||||
fmt.gltype = GL_UNSIGNED_SHORT;
|
||||
fmt.scalable_internal_format = GL_LUMINANCE16;
|
||||
|
||||
if (curr_vis_data.get() && curr_vis_data->opt_flow_res.get() &&
|
||||
curr_vis_data->opt_flow_res->input_images.get()) {
|
||||
auto& img_data = curr_vis_data->opt_flow_res->input_images->img_data;
|
||||
|
||||
for (size_t cam_id = 0; cam_id < basalt::Device::NUM_CAMS; cam_id++) {
|
||||
if (img_data[cam_id].img.get())
|
||||
img_view[cam_id]->SetImage(
|
||||
img_data[cam_id].img->ptr, img_data[cam_id].img->w,
|
||||
img_data[cam_id].img->h, img_data[cam_id].img->pitch, fmt);
|
||||
}
|
||||
}
|
||||
|
||||
draw_plots();
|
||||
}
|
||||
|
||||
if (show_est_vel.GuiChanged() || show_est_pos.GuiChanged() ||
|
||||
show_est_ba.GuiChanged() || show_est_bg.GuiChanged()) {
|
||||
draw_plots();
|
||||
}
|
||||
|
||||
pangolin::FinishFrame();
|
||||
}
|
||||
}
|
||||
|
||||
terminate = true;
|
||||
if (t3.get()) t3->join();
|
||||
t4.join();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void draw_image_overlay(pangolin::View& v, size_t cam_id) {
|
||||
if (show_obs) {
|
||||
glLineWidth(1.0);
|
||||
glColor3f(1.0, 0.0, 0.0);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
|
||||
if (curr_vis_data.get() && cam_id < curr_vis_data->projections.size()) {
|
||||
const auto& points = curr_vis_data->projections[cam_id];
|
||||
|
||||
if (!points.empty()) {
|
||||
double min_id = points[0][2], max_id = points[0][2];
|
||||
|
||||
for (const auto& points2 : curr_vis_data->projections)
|
||||
for (const auto& p : points2) {
|
||||
min_id = std::min(min_id, p[2]);
|
||||
max_id = std::max(max_id, p[2]);
|
||||
}
|
||||
|
||||
for (const auto& c : points) {
|
||||
const float radius = 6.5;
|
||||
|
||||
float r, g, b;
|
||||
getcolor(c[2] - min_id, max_id - min_id, b, g, r);
|
||||
glColor3f(r, g, b);
|
||||
|
||||
pangolin::glDrawCirclePerimeter(c[0], c[1], radius);
|
||||
|
||||
if (show_ids)
|
||||
pangolin::GlFont::I().Text("%d", int(c[3])).Draw(c[0], c[1]);
|
||||
}
|
||||
}
|
||||
|
||||
glColor3f(1.0, 0.0, 0.0);
|
||||
pangolin::GlFont::I()
|
||||
.Text("Tracked %d points", points.size())
|
||||
.Draw(5, 20);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void draw_scene() {
|
||||
glPointSize(3);
|
||||
glColor3f(1.0, 0.0, 0.0);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
|
||||
glColor3ubv(cam_color);
|
||||
Eigen::vector<Eigen::Vector3d> sub_gt(vio_t_w_i.begin(), vio_t_w_i.end());
|
||||
pangolin::glDrawLineStrip(sub_gt);
|
||||
|
||||
if (curr_vis_data.get()) {
|
||||
for (const auto& p : curr_vis_data->states)
|
||||
for (const auto& t_i_c : calib.T_i_c)
|
||||
render_camera((p * t_i_c).matrix(), 2.0f, state_color, 0.1f);
|
||||
|
||||
for (const auto& p : curr_vis_data->frames)
|
||||
for (const auto& t_i_c : calib.T_i_c)
|
||||
render_camera((p * t_i_c).matrix(), 2.0f, pose_color, 0.1f);
|
||||
|
||||
for (const auto& t_i_c : calib.T_i_c)
|
||||
render_camera((curr_vis_data->states.back() * t_i_c).matrix(), 2.0f,
|
||||
cam_color, 0.1f);
|
||||
|
||||
glColor3ubv(pose_color);
|
||||
pangolin::glDrawPoints(curr_vis_data->points);
|
||||
}
|
||||
|
||||
pangolin::glDrawAxis(Sophus::SE3d().matrix(), 1.0);
|
||||
}
|
||||
|
||||
void load_data(const std::string& calib_path) {
|
||||
std::ifstream os(calib_path, std::ios::binary);
|
||||
|
||||
if (os.is_open()) {
|
||||
cereal::JSONInputArchive archive(os);
|
||||
archive(calib);
|
||||
std::cout << "Loaded camera with " << calib.intrinsics.size() << " cameras"
|
||||
<< std::endl;
|
||||
|
||||
} else {
|
||||
std::cerr << "could not load camera calibration " << calib_path
|
||||
<< std::endl;
|
||||
std::abort();
|
||||
}
|
||||
}
|
||||
|
||||
void draw_plots() {
|
||||
plotter->ClearSeries();
|
||||
plotter->ClearMarkers();
|
||||
|
||||
if (show_est_pos) {
|
||||
plotter->AddSeries("$0", "$4", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Red(), "position x", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$5", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Green(), "position y", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$6", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Blue(), "position z", &vio_data_log);
|
||||
}
|
||||
|
||||
if (show_est_vel) {
|
||||
plotter->AddSeries("$0", "$1", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Red(), "velocity x", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$2", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Green(), "velocity y", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$3", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Blue(), "velocity z", &vio_data_log);
|
||||
}
|
||||
|
||||
if (show_est_bg) {
|
||||
plotter->AddSeries("$0", "$7", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Red(), "gyro bias x", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$8", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Green(), "gyro bias y", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$9", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Blue(), "gyro bias z", &vio_data_log);
|
||||
}
|
||||
|
||||
if (show_est_ba) {
|
||||
plotter->AddSeries("$0", "$10", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Red(), "accel bias x", &vio_data_log);
|
||||
plotter->AddSeries("$0", "$11", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Green(), "accel bias y",
|
||||
&vio_data_log);
|
||||
plotter->AddSeries("$0", "$12", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Blue(), "accel bias z", &vio_data_log);
|
||||
}
|
||||
|
||||
if (t265_device->last_img_data.get()) {
|
||||
double t = t265_device->last_img_data->t_ns * 1e-9;
|
||||
plotter->AddMarker(pangolin::Marker::Vertical, t, pangolin::Marker::Equal,
|
||||
pangolin::Colour::White());
|
||||
}
|
||||
}
|
|
@ -51,6 +51,7 @@ VioConfig::VioConfig() {
|
|||
optical_flow_max_iterations = 5;
|
||||
optical_flow_levels = 3;
|
||||
optical_flow_epipolar_error = 0.005;
|
||||
optical_flow_skip_frames = 1;
|
||||
|
||||
vio_max_states = 3;
|
||||
vio_max_kfs = 7;
|
||||
|
@ -106,6 +107,7 @@ void serialize(Archive& ar, basalt::VioConfig& config) {
|
|||
ar(CEREAL_NVP(config.optical_flow_max_iterations));
|
||||
ar(CEREAL_NVP(config.optical_flow_epipolar_error));
|
||||
ar(CEREAL_NVP(config.optical_flow_levels));
|
||||
ar(CEREAL_NVP(config.optical_flow_skip_frames));
|
||||
|
||||
ar(CEREAL_NVP(config.vio_max_states));
|
||||
ar(CEREAL_NVP(config.vio_max_kfs));
|
||||
|
|
|
@ -47,21 +47,17 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
namespace basalt {
|
||||
|
||||
KeypointVioEstimator::KeypointVioEstimator(
|
||||
int64_t t_ns, const Sophus::SE3d& T_w_i, const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg, const Eigen::Vector3d& ba, double int_std_dev,
|
||||
const Eigen::Vector3d& g, const basalt::Calibration<double>& calib,
|
||||
const VioConfig& config)
|
||||
: take_kf(true), frames_after_kf(0), g(g), config(config) {
|
||||
double int_std_dev, const Eigen::Vector3d& g,
|
||||
const basalt::Calibration<double>& calib, const VioConfig& config)
|
||||
: take_kf(true),
|
||||
frames_after_kf(0),
|
||||
g(g),
|
||||
initialized(false),
|
||||
config(config) {
|
||||
this->obs_std_dev = config.vio_obs_std_dev;
|
||||
this->huber_thresh = config.vio_obs_huber_thresh;
|
||||
this->calib = calib;
|
||||
|
||||
last_state_t_ns = t_ns;
|
||||
|
||||
imu_meas[t_ns] = IntegratedImuMeasurement(t_ns, bg, ba);
|
||||
frame_states[t_ns] =
|
||||
PoseVelBiasStateWithLin(t_ns, T_w_i, vel_w_i, bg, ba, true);
|
||||
|
||||
// Setup marginalization
|
||||
marg_H.setZero(POSE_VEL_BIAS_SIZE, POSE_VEL_BIAS_SIZE);
|
||||
marg_b.setZero(POSE_VEL_BIAS_SIZE);
|
||||
|
@ -77,10 +73,6 @@ KeypointVioEstimator::KeypointVioEstimator(
|
|||
|
||||
std::cout << "marg_H\n" << marg_H << std::endl;
|
||||
|
||||
marg_order.abs_order_map[t_ns] = std::make_pair(0, POSE_VEL_BIAS_SIZE);
|
||||
marg_order.total_size = POSE_VEL_BIAS_SIZE;
|
||||
marg_order.items = 1;
|
||||
|
||||
gyro_bias_weight.setConstant(1.0 /
|
||||
(calib.gyro_bias_std * calib.gyro_bias_std));
|
||||
accel_bias_weight.setConstant(1.0 /
|
||||
|
@ -93,8 +85,29 @@ KeypointVioEstimator::KeypointVioEstimator(
|
|||
|
||||
vision_data_queue.set_capacity(10);
|
||||
imu_data_queue.set_capacity(300);
|
||||
}
|
||||
|
||||
auto proc_func = [&] {
|
||||
void KeypointVioEstimator::initialize(int64_t t_ns, const Sophus::SE3d& T_w_i,
|
||||
const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg,
|
||||
const Eigen::Vector3d& ba) {
|
||||
initialized = true;
|
||||
T_w_i_init = T_w_i;
|
||||
|
||||
imu_meas[t_ns] = IntegratedImuMeasurement(t_ns, bg, ba);
|
||||
frame_states[t_ns] =
|
||||
PoseVelBiasStateWithLin(t_ns, T_w_i, vel_w_i, bg, ba, true);
|
||||
|
||||
marg_order.abs_order_map[t_ns] = std::make_pair(0, POSE_VEL_BIAS_SIZE);
|
||||
marg_order.total_size = POSE_VEL_BIAS_SIZE;
|
||||
marg_order.items = 1;
|
||||
|
||||
initialize(bg, ba);
|
||||
}
|
||||
|
||||
void KeypointVioEstimator::initialize(const Eigen::Vector3d& bg,
|
||||
const Eigen::Vector3d& ba) {
|
||||
auto proc_func = [&, bg, ba] {
|
||||
OpticalFlowResult::Ptr prev_frame, curr_frame;
|
||||
IntegratedImuMeasurement::Ptr meas;
|
||||
|
||||
|
@ -104,6 +117,31 @@ KeypointVioEstimator::KeypointVioEstimator(
|
|||
while (true) {
|
||||
vision_data_queue.pop(curr_frame);
|
||||
|
||||
if (!initialized) {
|
||||
Eigen::Vector3d vel_w_i_init;
|
||||
vel_w_i_init.setZero();
|
||||
|
||||
T_w_i_init.setQuaternion(Eigen::Quaterniond::FromTwoVectors(
|
||||
data->accel, Eigen::Vector3d::UnitZ()));
|
||||
|
||||
last_state_t_ns = curr_frame->t_ns;
|
||||
imu_meas[last_state_t_ns] =
|
||||
IntegratedImuMeasurement(last_state_t_ns, bg, ba);
|
||||
frame_states[last_state_t_ns] = PoseVelBiasStateWithLin(
|
||||
last_state_t_ns, T_w_i_init, vel_w_i_init, bg, ba, true);
|
||||
|
||||
marg_order.abs_order_map[last_state_t_ns] =
|
||||
std::make_pair(0, POSE_VEL_BIAS_SIZE);
|
||||
marg_order.total_size = POSE_VEL_BIAS_SIZE;
|
||||
marg_order.items = 1;
|
||||
|
||||
std::cout << "Setting up filter: t_ns " << last_state_t_ns << std::endl;
|
||||
std::cout << "T_w_i\n" << T_w_i_init.matrix() << std::endl;
|
||||
std::cout << "vel_w_i " << vel_w_i_init.transpose() << std::endl;
|
||||
|
||||
initialized = true;
|
||||
}
|
||||
|
||||
if (!curr_frame.get()) {
|
||||
break;
|
||||
}
|
||||
|
@ -347,6 +385,8 @@ bool KeypointVioEstimator::measure(const OpticalFlowResult::Ptr& opt_flow_meas,
|
|||
data->projections.resize(opt_flow_meas->observations.size());
|
||||
computeProjections(data->projections);
|
||||
|
||||
data->opt_flow_res = prev_opt_flow_res[last_state_t_ns];
|
||||
|
||||
out_vis_queue->push(data);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,14 +40,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
namespace basalt {
|
||||
|
||||
VioEstimatorBase::Ptr VioEstimatorFactory::getVioEstimator(
|
||||
const VioConfig& config, const Calibration<double>& cam, int64_t t_ns,
|
||||
const Sophus::SE3d& T_w_i, const Eigen::Vector3d& vel_w_i,
|
||||
const Eigen::Vector3d& bg, const Eigen::Vector3d& ba, double int_std_dev,
|
||||
const VioConfig& config, const Calibration<double>& cam, double int_std_dev,
|
||||
const Eigen::Vector3d& g) {
|
||||
KeypointVioEstimator::Ptr res;
|
||||
|
||||
res.reset(new KeypointVioEstimator(t_ns, T_w_i, vel_w_i, bg, ba, int_std_dev,
|
||||
g, cam, config));
|
||||
res.reset(new KeypointVioEstimator(int_std_dev, g, cam, config));
|
||||
|
||||
res->setMaxKfs(config.vio_max_kfs);
|
||||
res->setMaxStates(config.vio_max_states);
|
||||
|
|
29
src/vio.cpp
29
src/vio.cpp
|
@ -248,33 +248,10 @@ int main(int argc, char** argv) {
|
|||
}
|
||||
|
||||
const int64_t start_t_ns = vio_dataset->get_image_timestamps().front();
|
||||
Sophus::SE3d T_w_i_init;
|
||||
Eigen::Vector3d vel_w_i_init;
|
||||
|
||||
{
|
||||
int64_t t_init_ns = vio_dataset->get_image_timestamps()[0];
|
||||
|
||||
{
|
||||
T_w_i_init.setQuaternion(Eigen::Quaterniond::FromTwoVectors(
|
||||
vio_dataset->get_accel_data()[0].data, Eigen::Vector3d::UnitZ()));
|
||||
|
||||
std::cout << "T_w_i_init\n" << T_w_i_init.matrix() << std::endl;
|
||||
std::cout
|
||||
<< "accel_w "
|
||||
<< (T_w_i_init * vio_dataset->get_accel_data()[0].data).transpose()
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
vel_w_i_init.setZero();
|
||||
|
||||
std::cout << "Setting up filter: t_ns " << t_init_ns << std::endl;
|
||||
std::cout << "T_w_i\n" << T_w_i_init.matrix() << std::endl;
|
||||
std::cout << "vel_w_i " << vel_w_i_init.transpose() << std::endl;
|
||||
|
||||
vio = basalt::VioEstimatorFactory::getVioEstimator(
|
||||
vio_config, calib, t_init_ns, T_w_i_init, vel_w_i_init,
|
||||
Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero(), 0.0001,
|
||||
basalt::constants::g);
|
||||
vio_config, calib, 0.0001, basalt::constants::g);
|
||||
vio->initialize(Eigen::Vector3d::Zero(), Eigen::Vector3d::Zero());
|
||||
|
||||
opt_flow_ptr->output_queue = &vio->vision_data_queue;
|
||||
if (show_gui) vio->out_vis_queue = &out_vis_queue;
|
||||
|
@ -408,7 +385,7 @@ int main(int argc, char** argv) {
|
|||
}
|
||||
|
||||
Eigen::Vector3d cam_p(-0.5, -3, -5);
|
||||
cam_p = T_w_i_init.so3() * calib.T_i_c[0].so3() * cam_p;
|
||||
cam_p = vio->getT_w_i_init().so3() * calib.T_i_c[0].so3() * cam_p;
|
||||
|
||||
pangolin::OpenGlRenderState camera(
|
||||
pangolin::ProjectionMatrix(640, 480, 400, 400, 320, 240, 0.001, 10000),
|
||||
|
|
|
@ -871,9 +871,9 @@ void setup_vio() {
|
|||
|
||||
basalt::VioConfig config;
|
||||
|
||||
vio.reset(new basalt::KeypointVioEstimator(
|
||||
t_init_ns, T_w_i_init, vel_w_i_init, gt_gyro_bias.front(),
|
||||
gt_accel_bias.front(), 0.0001, g, calib, config));
|
||||
vio.reset(new basalt::KeypointVioEstimator(0.0001, g, calib, config));
|
||||
vio->initialize(t_init_ns, T_w_i_init, vel_w_i_init, gt_gyro_bias.front(),
|
||||
gt_accel_bias.front());
|
||||
|
||||
vio->setMaxStates(3);
|
||||
vio->setMaxKfs(5);
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 2fd7e3787332587e8972135804ae0a78d9f7c932
|
||||
Subproject commit 7f32ad9e6a7739c903c7a4a8ea31f483b5410af1
|
Loading…
Reference in New Issue