Add initial realsense support
This commit is contained in:
parent
7d29d019ec
commit
9c4a4fa726
|
@ -179,7 +179,7 @@ endif()
|
|||
find_package(TBB REQUIRED)
|
||||
include_directories(${TBB_INCLUDE_DIR})
|
||||
|
||||
find_package(OpenCV REQUIRED core imgproc calib3d)
|
||||
find_package(OpenCV REQUIRED core imgproc calib3d highgui)
|
||||
include_directories(${OpenCV_INCLUDE_DIR})
|
||||
message(STATUS "Found OpenCV headers in: ${OpenCV_INCLUDE_DIR}")
|
||||
message(STATUS "Found OpenCV_LIBS: ${OpenCV_LIBS}")
|
||||
|
@ -244,6 +244,13 @@ target_link_libraries(basalt_opt_flow basalt)
|
|||
add_executable(basalt_vio src/vio.cpp)
|
||||
target_link_libraries(basalt_vio basalt)
|
||||
|
||||
find_package(realsense2 QUIET)
|
||||
if(realsense2_FOUND)
|
||||
add_executable(basalt_rs_t265_record src/rs_t265_record.cpp)
|
||||
target_link_libraries(basalt_rs_t265_record basalt realsense2::realsense2 ${OpenCV_LIBS})
|
||||
endif()
|
||||
|
||||
|
||||
|
||||
install(TARGETS basalt_calibrate basalt_calibrate_imu basalt_vio_sim basalt_mapper_sim basalt_mapper_sim_naive basalt_mapper basalt_opt_flow basalt_vio basalt
|
||||
EXPORT BasaltTargets
|
||||
|
|
|
@ -64,8 +64,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <basalt/camera/generic_camera.hpp>
|
||||
#include <basalt/camera/stereographic_param.hpp>
|
||||
|
||||
#include <pangolin/image/image_io.h>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
inline bool file_exists(const std::string &name) {
|
||||
|
|
|
@ -40,6 +40,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <experimental/filesystem>
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
class EurocVioDataset : public VioDataset {
|
||||
|
@ -63,6 +65,8 @@ class EurocVioDataset : public VioDataset {
|
|||
|
||||
int64_t mocap_to_imu_offset_ns;
|
||||
|
||||
std::vector<std::unordered_map<int64_t, double>> exposure_times;
|
||||
|
||||
public:
|
||||
~EurocVioDataset(){};
|
||||
|
||||
|
@ -90,27 +94,46 @@ class EurocVioDataset : public VioDataset {
|
|||
path + folder[i] + "data/" + image_path[t_ns];
|
||||
|
||||
if (file_exists(full_image_path)) {
|
||||
pangolin::TypedImage img = pangolin::LoadImage(full_image_path);
|
||||
cv::Mat img = cv::imread(full_image_path, cv::IMREAD_UNCHANGED);
|
||||
|
||||
if (img.fmt.bpp == 8) {
|
||||
res[i].img.reset(new ManagedImage<uint16_t>(img.w, img.h));
|
||||
if (img.type() == CV_8UC1) {
|
||||
res[i].img.reset(new ManagedImage<uint16_t>(img.cols, img.rows));
|
||||
|
||||
const uint8_t *data_in = img.ptr;
|
||||
const uint8_t *data_in = img.ptr();
|
||||
uint16_t *data_out = res[i].img->ptr;
|
||||
|
||||
for (size_t i = 0; i < img.size(); i++) {
|
||||
size_t full_size = img.cols * img.rows;
|
||||
for (size_t i = 0; i < full_size; i++) {
|
||||
int val = data_in[i];
|
||||
val = val << 8;
|
||||
data_out[i] = val;
|
||||
}
|
||||
} else if (img.fmt.bpp == 16) {
|
||||
res[i].img.reset(new ManagedImage<uint16_t>(img.w, img.h));
|
||||
std::memcpy(res[i].img->ptr, img.ptr, img.size() * sizeof(uint16_t));
|
||||
} else if (img.type() == CV_8UC3) {
|
||||
res[i].img.reset(new ManagedImage<uint16_t>(img.cols, img.rows));
|
||||
|
||||
const uint8_t *data_in = img.ptr();
|
||||
uint16_t *data_out = res[i].img->ptr;
|
||||
|
||||
size_t full_size = img.cols * img.rows;
|
||||
for (size_t i = 0; i < full_size; i++) {
|
||||
int val = data_in[i * 3];
|
||||
val = val << 8;
|
||||
data_out[i] = val;
|
||||
}
|
||||
} else if (img.type() == CV_16UC1) {
|
||||
res[i].img.reset(new ManagedImage<uint16_t>(img.cols, img.rows));
|
||||
std::memcpy(res[i].img->ptr, img.ptr(),
|
||||
img.cols * img.rows * sizeof(uint16_t));
|
||||
|
||||
} else {
|
||||
std::cerr << "img.fmt.bpp " << img.fmt.bpp << std::endl;
|
||||
std::cerr << "img.fmt.bpp " << img.type() << std::endl;
|
||||
std::abort();
|
||||
}
|
||||
|
||||
auto exp_it = exposure_times[i].find(t_ns);
|
||||
if (exp_it != exposure_times[i].end()) {
|
||||
res[i].exposure = exp_it->second;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -144,6 +167,16 @@ class EurocIO : public DatasetIoInterface {
|
|||
} else if (file_exists(path + "/mav0/mocap0/data.csv")) {
|
||||
read_gt_data_pose(path + "/mav0/mocap0/");
|
||||
}
|
||||
|
||||
data->exposure_times.resize(data->num_cams);
|
||||
if (file_exists(path + "/mav0/cam0/exposure.csv")) {
|
||||
std::cout << "Loading exposure times for cam0" << std::endl;
|
||||
read_exposure(path + "/mav0/cam0/", data->exposure_times[0]);
|
||||
}
|
||||
if (file_exists(path + "/mav0/cam1/exposure.csv")) {
|
||||
std::cout << "Loading exposure times for cam1" << std::endl;
|
||||
read_exposure(path + "/mav0/cam1/", data->exposure_times[1]);
|
||||
}
|
||||
}
|
||||
|
||||
void reset() { data.reset(); }
|
||||
|
@ -151,6 +184,27 @@ class EurocIO : public DatasetIoInterface {
|
|||
VioDatasetPtr get_data() { return data; }
|
||||
|
||||
private:
|
||||
void read_exposure(const std::string &path,
|
||||
std::unordered_map<int64_t, double> &exposure_data) {
|
||||
exposure_data.clear();
|
||||
|
||||
std::ifstream f(path + "exposure.csv");
|
||||
std::string line;
|
||||
while (std::getline(f, line)) {
|
||||
if (line[0] == '#') continue;
|
||||
|
||||
std::stringstream ss(line);
|
||||
|
||||
char tmp;
|
||||
int64_t timestamp, exposure_int;
|
||||
Eigen::Vector3d gyro, accel;
|
||||
|
||||
ss >> timestamp >> tmp >> exposure_int;
|
||||
|
||||
exposure_data[timestamp] = exposure_int * 1e-9;
|
||||
}
|
||||
}
|
||||
|
||||
void read_image_timestamps(const std::string &path) {
|
||||
std::ifstream f(path + "data.csv");
|
||||
std::string line;
|
||||
|
|
|
@ -40,8 +40,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <basalt/calibration/calibration.hpp>
|
||||
#include <basalt/camera/stereographic_param.hpp>
|
||||
|
||||
#include <pangolin/image/managed_image.h>
|
||||
|
||||
#include <tbb/tbb.h>
|
||||
|
||||
namespace basalt {
|
||||
|
|
|
@ -66,12 +66,25 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
#include <memory>
|
||||
|
||||
#include <pangolin/image/image.h>
|
||||
#include <pangolin/image/managed_image.h>
|
||||
#include <pangolin/image/typed_image.h>
|
||||
#include <basalt/utils/assert.h>
|
||||
|
||||
// Renamed Pangoling defines to avoid clash
|
||||
#define BASALT_HOST_DEVICE
|
||||
#define BASALT_EXTENSION_IMAGE
|
||||
#ifdef BASALT_ENABLE_BOUNDS_CHECKS
|
||||
#define BASALT_BOUNDS_ASSERT(...) BASALT_ASSERT(##__VA_ARGS__)
|
||||
#else
|
||||
#define BASALT_BOUNDS_ASSERT(...) ((void)0)
|
||||
#endif
|
||||
|
||||
namespace basalt {
|
||||
|
||||
template <typename T>
|
||||
struct CopyObject {
|
||||
CopyObject(const T& obj) : obj(obj) {}
|
||||
const T& obj;
|
||||
};
|
||||
|
||||
inline void PitchedCopy(char* dst, unsigned int dst_pitch_bytes,
|
||||
const char* src, unsigned int src_pitch_bytes,
|
||||
unsigned int width_bytes, unsigned int height) {
|
||||
|
@ -93,42 +106,37 @@ struct Image {
|
|||
inline Image(T* ptr, size_t w, size_t h, size_t pitch)
|
||||
: pitch(pitch), ptr(ptr), w(w), h(h) {}
|
||||
|
||||
PANGO_HOST_DEVICE inline size_t SizeBytes() const { return pitch * h; }
|
||||
BASALT_HOST_DEVICE inline size_t SizeBytes() const { return pitch * h; }
|
||||
|
||||
PANGO_HOST_DEVICE inline size_t Area() const { return w * h; }
|
||||
BASALT_HOST_DEVICE inline size_t Area() const { return w * h; }
|
||||
|
||||
PANGO_HOST_DEVICE inline bool IsValid() const { return ptr != 0; }
|
||||
BASALT_HOST_DEVICE inline bool IsValid() const { return ptr != 0; }
|
||||
|
||||
PANGO_HOST_DEVICE inline bool IsContiguous() const {
|
||||
BASALT_HOST_DEVICE inline bool IsContiguous() const {
|
||||
return w * sizeof(T) == pitch;
|
||||
}
|
||||
|
||||
pangolin::Image<T> toPangoImage() {
|
||||
pangolin::Image<T> img(ptr, w, h, pitch);
|
||||
return img;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////
|
||||
// Iterators
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
PANGO_HOST_DEVICE inline T* begin() { return ptr; }
|
||||
BASALT_HOST_DEVICE inline T* begin() { return ptr; }
|
||||
|
||||
PANGO_HOST_DEVICE inline T* end() { return RowPtr(h - 1) + w; }
|
||||
BASALT_HOST_DEVICE inline T* end() { return RowPtr(h - 1) + w; }
|
||||
|
||||
PANGO_HOST_DEVICE inline const T* begin() const { return ptr; }
|
||||
BASALT_HOST_DEVICE inline const T* begin() const { return ptr; }
|
||||
|
||||
PANGO_HOST_DEVICE inline const T* end() const { return RowPtr(h - 1) + w; }
|
||||
BASALT_HOST_DEVICE inline const T* end() const { return RowPtr(h - 1) + w; }
|
||||
|
||||
PANGO_HOST_DEVICE inline size_t size() const { return w * h; }
|
||||
BASALT_HOST_DEVICE inline size_t size() const { return w * h; }
|
||||
|
||||
//////////////////////////////////////////////////////
|
||||
// Image transforms
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
template <typename UnaryOperation>
|
||||
PANGO_HOST_DEVICE inline void Transform(UnaryOperation unary_op) {
|
||||
PANGO_ASSERT(IsValid());
|
||||
BASALT_HOST_DEVICE inline void Transform(UnaryOperation unary_op) {
|
||||
BASALT_ASSERT(IsValid());
|
||||
|
||||
for (size_t y = 0; y < h; ++y) {
|
||||
T* el = RowPtr(y);
|
||||
|
@ -139,32 +147,32 @@ struct Image {
|
|||
}
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline void Fill(const T& val) {
|
||||
BASALT_HOST_DEVICE inline void Fill(const T& val) {
|
||||
Transform([&](const T&) { return val; });
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline void Replace(const T& oldval, const T& newval) {
|
||||
BASALT_HOST_DEVICE inline void Replace(const T& oldval, const T& newval) {
|
||||
Transform([&](const T& val) { return (val == oldval) ? newval : val; });
|
||||
}
|
||||
|
||||
inline void Memset(unsigned char v = 0) {
|
||||
PANGO_ASSERT(IsValid());
|
||||
BASALT_ASSERT(IsValid());
|
||||
if (IsContiguous()) {
|
||||
::pangolin::Memset((char*)ptr, v, pitch * h);
|
||||
std::memset((char*)ptr, v, pitch * h);
|
||||
} else {
|
||||
for (size_t y = 0; y < h; ++y) {
|
||||
::pangolin::Memset((char*)RowPtr(y), v, pitch);
|
||||
std::memset((char*)RowPtr(y), v, pitch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void CopyFrom(const Image<T>& img) {
|
||||
if (IsValid() && img.IsValid()) {
|
||||
PANGO_ASSERT(w >= img.w && h >= img.h);
|
||||
BASALT_ASSERT(w >= img.w && h >= img.h);
|
||||
PitchedCopy((char*)ptr, pitch, (char*)img.ptr, img.pitch,
|
||||
std::min(img.w, w) * sizeof(T), std::min(img.h, h));
|
||||
} else if (img.IsValid() != IsValid()) {
|
||||
PANGO_ASSERT(false && "Cannot copy from / to an unasigned image.");
|
||||
BASALT_ASSERT(false && "Cannot copy from / to an unasigned image.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,9 +181,9 @@ struct Image {
|
|||
//////////////////////////////////////////////////////
|
||||
|
||||
template <typename BinaryOperation>
|
||||
PANGO_HOST_DEVICE inline T Accumulate(const T init,
|
||||
BinaryOperation binary_op) {
|
||||
PANGO_ASSERT(IsValid());
|
||||
BASALT_HOST_DEVICE inline T Accumulate(const T init,
|
||||
BinaryOperation binary_op) {
|
||||
BASALT_ASSERT(IsValid());
|
||||
|
||||
T val = init;
|
||||
for (size_t y = 0; y < h; ++y) {
|
||||
|
@ -189,7 +197,7 @@ struct Image {
|
|||
}
|
||||
|
||||
std::pair<T, T> MinMax() const {
|
||||
PANGO_ASSERT(IsValid());
|
||||
BASALT_ASSERT(IsValid());
|
||||
|
||||
std::pair<T, T> minmax(std::numeric_limits<T>::max(),
|
||||
std::numeric_limits<T>::lowest());
|
||||
|
@ -220,43 +228,43 @@ struct Image {
|
|||
// Direct Pixel Access
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
PANGO_HOST_DEVICE inline T* RowPtr(size_t y) {
|
||||
BASALT_HOST_DEVICE inline T* RowPtr(size_t y) {
|
||||
return (T*)((unsigned char*)(ptr) + y * pitch);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline const T* RowPtr(size_t y) const {
|
||||
BASALT_HOST_DEVICE inline const T* RowPtr(size_t y) const {
|
||||
return (T*)((unsigned char*)(ptr) + y * pitch);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline T& operator()(size_t x, size_t y) {
|
||||
PANGO_BOUNDS_ASSERT(InBounds(x, y));
|
||||
BASALT_HOST_DEVICE inline T& operator()(size_t x, size_t y) {
|
||||
BASALT_BOUNDS_ASSERT(InBounds(x, y));
|
||||
return RowPtr(y)[x];
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline const T& operator()(size_t x, size_t y) const {
|
||||
PANGO_BOUNDS_ASSERT(InBounds(x, y));
|
||||
BASALT_HOST_DEVICE inline const T& operator()(size_t x, size_t y) const {
|
||||
BASALT_BOUNDS_ASSERT(InBounds(x, y));
|
||||
return RowPtr(y)[x];
|
||||
}
|
||||
|
||||
template <typename TVec>
|
||||
PANGO_HOST_DEVICE inline T& operator()(const TVec& p) {
|
||||
PANGO_BOUNDS_ASSERT(InBounds(p[0], p[1]));
|
||||
BASALT_HOST_DEVICE inline T& operator()(const TVec& p) {
|
||||
BASALT_BOUNDS_ASSERT(InBounds(p[0], p[1]));
|
||||
return RowPtr(p[1])[p[0]];
|
||||
}
|
||||
|
||||
template <typename TVec>
|
||||
PANGO_HOST_DEVICE inline const T& operator()(const TVec& p) const {
|
||||
PANGO_BOUNDS_ASSERT(InBounds(p[0], p[1]));
|
||||
BASALT_HOST_DEVICE inline const T& operator()(const TVec& p) const {
|
||||
BASALT_BOUNDS_ASSERT(InBounds(p[0], p[1]));
|
||||
return RowPtr(p[1])[p[0]];
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline T& operator[](size_t ix) {
|
||||
PANGO_BOUNDS_ASSERT(InImage(ptr + ix));
|
||||
BASALT_HOST_DEVICE inline T& operator[](size_t ix) {
|
||||
BASALT_BOUNDS_ASSERT(InImage(ptr + ix));
|
||||
return ptr[ix];
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline const T& operator[](size_t ix) const {
|
||||
PANGO_BOUNDS_ASSERT(InImage(ptr + ix));
|
||||
BASALT_HOST_DEVICE inline const T& operator[](size_t ix) const {
|
||||
BASALT_BOUNDS_ASSERT(InImage(ptr + ix));
|
||||
return ptr[ix];
|
||||
}
|
||||
|
||||
|
@ -346,21 +354,22 @@ struct Image {
|
|||
// Bounds Checking
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
PANGO_HOST_DEVICE
|
||||
BASALT_HOST_DEVICE
|
||||
bool InImage(const T* ptest) const {
|
||||
return ptr <= ptest && ptest < RowPtr(h);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline bool InBounds(int x, int y) const {
|
||||
BASALT_HOST_DEVICE inline bool InBounds(int x, int y) const {
|
||||
return 0 <= x && x < (int)w && 0 <= y && y < (int)h;
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline bool InBounds(float x, float y, float border) const {
|
||||
BASALT_HOST_DEVICE inline bool InBounds(float x, float y,
|
||||
float border) const {
|
||||
return border <= x && x < (w - border) && border <= y && y < (h - border);
|
||||
}
|
||||
|
||||
template <typename TVec, typename TBorder>
|
||||
PANGO_HOST_DEVICE inline bool InBounds(
|
||||
BASALT_HOST_DEVICE inline bool InBounds(
|
||||
const TVec& p, const TBorder border = (TBorder)0) const {
|
||||
return border <= p[0] && p[0] < ((int)w - border) && border <= p[1] &&
|
||||
p[1] < ((int)h - border);
|
||||
|
@ -370,24 +379,24 @@ struct Image {
|
|||
// Obtain slices / subimages
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
PANGO_HOST_DEVICE inline const Image<const T> SubImage(size_t x, size_t y,
|
||||
size_t width,
|
||||
size_t height) const {
|
||||
PANGO_ASSERT((x + width) <= w && (y + height) <= h);
|
||||
BASALT_HOST_DEVICE inline const Image<const T> SubImage(size_t x, size_t y,
|
||||
size_t width,
|
||||
size_t height) const {
|
||||
BASALT_ASSERT((x + width) <= w && (y + height) <= h);
|
||||
return Image<const T>(RowPtr(y) + x, width, height, pitch);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline Image<T> SubImage(size_t x, size_t y, size_t width,
|
||||
size_t height) {
|
||||
PANGO_ASSERT((x + width) <= w && (y + height) <= h);
|
||||
BASALT_HOST_DEVICE inline Image<T> SubImage(size_t x, size_t y, size_t width,
|
||||
size_t height) {
|
||||
BASALT_ASSERT((x + width) <= w && (y + height) <= h);
|
||||
return Image<T>(RowPtr(y) + x, width, height, pitch);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline Image<T> Row(int y) const {
|
||||
BASALT_HOST_DEVICE inline Image<T> Row(int y) const {
|
||||
return SubImage(0, y, w, 1);
|
||||
}
|
||||
|
||||
PANGO_HOST_DEVICE inline Image<T> Col(int x) const {
|
||||
BASALT_HOST_DEVICE inline Image<T> Col(int x) const {
|
||||
return SubImage(x, 0, 1, h);
|
||||
}
|
||||
|
||||
|
@ -396,15 +405,15 @@ struct Image {
|
|||
//////////////////////////////////////////////////////
|
||||
|
||||
template <typename TRecast>
|
||||
PANGO_HOST_DEVICE inline Image<TRecast> Reinterpret() const {
|
||||
PANGO_ASSERT(sizeof(TRecast) == sizeof(T),
|
||||
"sizeof(TRecast) must match sizeof(T): % != %",
|
||||
sizeof(TRecast), sizeof(T));
|
||||
BASALT_HOST_DEVICE inline Image<TRecast> Reinterpret() const {
|
||||
BASALT_ASSERT_STREAM(sizeof(TRecast) == sizeof(T),
|
||||
"sizeof(TRecast) must match sizeof(T): "
|
||||
<< sizeof(TRecast) << " != " << sizeof(T));
|
||||
return UnsafeReinterpret<TRecast>();
|
||||
}
|
||||
|
||||
template <typename TRecast>
|
||||
PANGO_HOST_DEVICE inline Image<TRecast> UnsafeReinterpret() const {
|
||||
BASALT_HOST_DEVICE inline Image<TRecast> UnsafeReinterpret() const {
|
||||
return Image<TRecast>((TRecast*)ptr, w, h, pitch);
|
||||
}
|
||||
|
||||
|
@ -444,7 +453,7 @@ struct Image {
|
|||
size_t w;
|
||||
size_t h;
|
||||
|
||||
PANGO_EXTENSION_IMAGE
|
||||
BASALT_EXTENSION_IMAGE
|
||||
};
|
||||
|
||||
template <class T>
|
||||
|
@ -491,30 +500,15 @@ class ManagedImage : public Image<T> {
|
|||
img.ptr = nullptr;
|
||||
}
|
||||
|
||||
// Move constructor
|
||||
inline ManagedImage(pangolin::ManagedImage<T, Allocator>&& img) {
|
||||
*this = std::move(img);
|
||||
}
|
||||
|
||||
// Move asignment
|
||||
inline void operator=(pangolin::ManagedImage<T, Allocator>&& img) {
|
||||
Deallocate();
|
||||
Image<T>::pitch = img.pitch;
|
||||
Image<T>::ptr = img.ptr;
|
||||
Image<T>::w = img.w;
|
||||
Image<T>::h = img.h;
|
||||
img.ptr = nullptr;
|
||||
}
|
||||
|
||||
// Explicit copy constructor
|
||||
template <typename TOther>
|
||||
ManagedImage(const pangolin::CopyObject<TOther>& other) {
|
||||
ManagedImage(const CopyObject<TOther>& other) {
|
||||
CopyFrom(other.obj);
|
||||
}
|
||||
|
||||
// Explicit copy assignment
|
||||
template <typename TOther>
|
||||
void operator=(const pangolin::CopyObject<TOther>& other) {
|
||||
void operator=(const CopyObject<TOther>& other) {
|
||||
CopyFrom(other.obj);
|
||||
}
|
||||
|
||||
|
@ -677,8 +671,6 @@ class ManagedImagePyr {
|
|||
return Eigen::Matrix<S, 2, 1>(x, y);
|
||||
}
|
||||
|
||||
inline pangolin::Image<T> toPangoImage() { return image.toPangoImage(); }
|
||||
|
||||
private:
|
||||
inline Image<T> lvl_internal(size_t lvl) {
|
||||
size_t x = (lvl == 0) ? 0 : orig_w;
|
||||
|
@ -693,19 +685,4 @@ class ManagedImagePyr {
|
|||
ManagedImage<T> image;
|
||||
};
|
||||
|
||||
inline void rgb_to_gray(const pangolin::TypedImage& rgb,
|
||||
basalt::ManagedImage<uint8_t>& gray) {
|
||||
gray.Reinitialise(rgb.w, rgb.h);
|
||||
|
||||
for (size_t x = 0; x < rgb.w; x++) {
|
||||
for (size_t y = 0; y < rgb.h; y++) {
|
||||
double val = 0.2989 * (double)rgb(3 * x + 0, y) +
|
||||
0.5870 * (double)rgb(3 * x + 1, y) +
|
||||
0.1140 * (double)rgb(3 * x + 2, y);
|
||||
|
||||
gray(x, y) = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace basalt
|
||||
|
|
|
@ -40,8 +40,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <Eigen/Dense>
|
||||
#include <sophus/se3.hpp>
|
||||
|
||||
#include <pangolin/image/managed_image.h>
|
||||
|
||||
#include <basalt/utils/image.h>
|
||||
#include <basalt/utils/sophus_utils.hpp>
|
||||
|
||||
|
|
|
@ -0,0 +1,82 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import math
|
||||
import os
|
||||
|
||||
import numpy as np
|
||||
|
||||
dataset_path = sys.argv[1]
|
||||
|
||||
print dataset_path
|
||||
|
||||
timestamps = {}
|
||||
exposures = {}
|
||||
|
||||
|
||||
for sensor in ['cam0', 'cam1', 'imu0']:
|
||||
data = np.loadtxt(dataset_path + '/mav0/' + sensor + '/data.csv', usecols=[0], delimiter=',', dtype=np.int64)
|
||||
timestamps[sensor] = data
|
||||
|
||||
# check if dataset is OK...
|
||||
for key, value in timestamps.iteritems():
|
||||
times = value * 1e-9
|
||||
min_t = times.min()
|
||||
max_t = times.max()
|
||||
interval = max_t - min_t
|
||||
diff = times[1:] - times[:-1]
|
||||
print '=========================================='
|
||||
print 'sensor', key
|
||||
print 'min timestamp', min_t
|
||||
print 'max timestamp', max_t
|
||||
print 'interval', interval
|
||||
print 'hz', times.shape[0]/interval
|
||||
print 'min time between consecutive msgs', diff.min()
|
||||
print 'max time between consecutive msgs', diff.max()
|
||||
for i, d in enumerate(diff):
|
||||
# Note: 0.001 is just a hacky heuristic, since we have nothing faster than 1000Hz. Should maybe be topic-specific.
|
||||
if d < 0.001:
|
||||
print("ERROR: Difference on consecutive measurements too small: {} - {} = {}".format(times[i+1], times[i], d))
|
||||
|
||||
# check if we have all images for timestamps
|
||||
timestamp_to_topic = {}
|
||||
|
||||
for key, value in timestamps.iteritems():
|
||||
if not key.startswith('cam'): continue
|
||||
for v in value:
|
||||
if v not in timestamp_to_topic:
|
||||
timestamp_to_topic[v] = list()
|
||||
timestamp_to_topic[v].append(key)
|
||||
|
||||
for key in timestamp_to_topic.keys():
|
||||
if len(timestamp_to_topic[key]) != 2:
|
||||
print 'timestamp', key, 'has topics', timestamp_to_topic[key]
|
||||
|
||||
|
||||
# check image data.
|
||||
img_extensions = ['.png', '.jpg', '.webp']
|
||||
for key, value in timestamps.iteritems():
|
||||
if not key.startswith('cam'): continue
|
||||
for v in value:
|
||||
path = dataset_path + '/mav0/' + key + '/data/' + str(v)
|
||||
img_exists = False
|
||||
for e in img_extensions:
|
||||
if os.path.exists(dataset_path + '/mav0/' + key + '/data/' + str(v) + e):
|
||||
img_exists = True
|
||||
|
||||
if not img_exists:
|
||||
print('No image data for ' + key + ' at timestamp ' + str(v))
|
||||
|
||||
exposure_file = dataset_path + '/mav0/' + key + '/exposure.csv'
|
||||
if not os.path.exists(exposure_file):
|
||||
print('No exposure data for ' + key)
|
||||
continue
|
||||
|
||||
exposure_data = np.loadtxt(exposure_file, delimiter=',', dtype=np.int64)
|
||||
for v in value:
|
||||
idx = np.searchsorted(exposure_data[:, 0], v)
|
||||
if exposure_data[idx, 0] != v:
|
||||
print('No exposure data for ' + key + ' at timestamp ' + str(v))
|
||||
|
||||
|
||||
|
|
@ -180,6 +180,11 @@ void CamCalib::computeVign() {
|
|||
it->second.corners_proj_success[k]) {
|
||||
double val = img_vec[j].img->interp(pos);
|
||||
val /= std::numeric_limits<uint16_t>::max();
|
||||
|
||||
if (img_vec[j].exposure > 0) {
|
||||
val *= 0.001 / img_vec[j].exposure; // bring to common exposure
|
||||
}
|
||||
|
||||
rv[k][2] = val;
|
||||
} else {
|
||||
rv[k][2] = -1;
|
||||
|
|
|
@ -35,6 +35,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
#include <basalt/calibration/vignette.h>
|
||||
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
VignetteEstimator::VignetteEstimator(
|
||||
|
@ -280,8 +282,7 @@ void VignetteEstimator::compute_data_log(pangolin::DataLog &vign_data_log) {
|
|||
|
||||
void VignetteEstimator::save_vign_png(const std::string &path) {
|
||||
for (size_t k = 0; k < vio_dataset->get_num_cams(); k++) {
|
||||
pangolin::ManagedImage<uint16_t> vign_img(resolutions[k][0],
|
||||
resolutions[k][1]);
|
||||
ManagedImage<uint16_t> vign_img(resolutions[k][0], resolutions[k][1]);
|
||||
vign_img.Fill(0);
|
||||
|
||||
Eigen::Vector2d oc = optical_centers[k];
|
||||
|
@ -298,9 +299,12 @@ void VignetteEstimator::save_vign_png(const std::string &path) {
|
|||
}
|
||||
}
|
||||
|
||||
pangolin::SaveImage(vign_img.UnsafeReinterpret<uint8_t>(),
|
||||
pangolin::PixelFormatFromString("GRAY16LE"),
|
||||
path + "/vingette_" + std::to_string(k) + ".png");
|
||||
// pangolin::SaveImage(vign_img.UnsafeReinterpret<uint8_t>(),
|
||||
// pangolin::PixelFormatFromString("GRAY16LE"),
|
||||
// path + "/vingette_" + std::to_string(k) + ".png");
|
||||
|
||||
cv::Mat img(vign_img.h, vign_img.w, CV_16U, vign_img.ptr);
|
||||
cv::imwrite(path + "/vingette_" + std::to_string(k) + ".png", img);
|
||||
}
|
||||
}
|
||||
} // namespace basalt
|
||||
|
|
|
@ -244,14 +244,15 @@ int main(int argc, char** argv) {
|
|||
|
||||
for (size_t cam_id = 0; cam_id < calib.intrinsics.size(); cam_id++) {
|
||||
if (img_vec[cam_id].img.get()) {
|
||||
auto img = img_vec[cam_id].img->toPangoImage();
|
||||
auto img = img_vec[cam_id].img;
|
||||
|
||||
pangolin::GlPixFormat fmt;
|
||||
fmt.glformat = GL_LUMINANCE;
|
||||
fmt.gltype = GL_UNSIGNED_SHORT;
|
||||
fmt.scalable_internal_format = GL_LUMINANCE16;
|
||||
|
||||
img_view[cam_id]->SetImage(img.ptr, img.w, img.h, img.pitch, fmt);
|
||||
img_view[cam_id]->SetImage(img->ptr, img->w, img->h, img->pitch,
|
||||
fmt);
|
||||
} else {
|
||||
img_view[cam_id]->Clear();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,541 @@
|
|||
#include <atomic>
|
||||
#include <cstring>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <thread>
|
||||
|
||||
#include <librealsense2/rs.hpp>
|
||||
|
||||
#include <pangolin/display/image_view.h>
|
||||
#include <pangolin/gl/gldraw.h>
|
||||
#include <pangolin/image/image.h>
|
||||
#include <pangolin/image/image_io.h>
|
||||
#include <pangolin/image/typed_image.h>
|
||||
#include <pangolin/pangolin.h>
|
||||
|
||||
#include <opencv2/core.hpp>
|
||||
#include <opencv2/highgui.hpp>
|
||||
|
||||
#include <tbb/concurrent_queue.h>
|
||||
|
||||
#include <CLI/CLI.hpp>
|
||||
#include <basalt/calibration/calibration.hpp>
|
||||
#include <cereal/archives/json.hpp>
|
||||
#include <experimental/filesystem>
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
constexpr int NUM_CAMS = 2;
|
||||
constexpr int UI_WIDTH = 200;
|
||||
|
||||
pangolin::DataLog imu_log;
|
||||
|
||||
pangolin::Var<int> webp_quality("ui.webp_quality", 90, 0, 101);
|
||||
pangolin::Var<int> skip_frames("ui.skip_frames", 1, 1, 10);
|
||||
|
||||
struct ImageData {
|
||||
using Ptr = std::shared_ptr<ImageData>;
|
||||
|
||||
int cam_id;
|
||||
double exposure_time;
|
||||
int64_t timestamp;
|
||||
cv::Mat image;
|
||||
};
|
||||
|
||||
struct RsIMUData {
|
||||
double timestamp;
|
||||
Eigen::Vector3d data;
|
||||
|
||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||
};
|
||||
|
||||
ImageData::Ptr last_images[NUM_CAMS];
|
||||
tbb::concurrent_bounded_queue<ImageData::Ptr> image_save_queue;
|
||||
float exposure;
|
||||
std::string dataset_dir;
|
||||
std::string dataset_folder;
|
||||
std::string result_dir;
|
||||
|
||||
std::atomic<bool> stop_workers;
|
||||
std::atomic<bool> record;
|
||||
|
||||
std::ofstream cam_data[NUM_CAMS], exposure_data[NUM_CAMS], imu0_data;
|
||||
|
||||
std::string get_date();
|
||||
|
||||
void image_save_worker() {
|
||||
ImageData::Ptr img;
|
||||
|
||||
while (!stop_workers) {
|
||||
if (image_save_queue.try_pop(img)) {
|
||||
std::string filename = dataset_folder + "mav0/cam" +
|
||||
std::to_string(img->cam_id) + "/data/" +
|
||||
std::to_string(img->timestamp) + ".webp";
|
||||
|
||||
std::vector<int> compression_params = {cv::IMWRITE_WEBP_QUALITY,
|
||||
webp_quality};
|
||||
|
||||
cv::imwrite(filename, img->image, compression_params);
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void toggle_recording() {
|
||||
record = !record;
|
||||
if (record) {
|
||||
dataset_folder = dataset_dir + "dataset_" + get_date() + "/";
|
||||
fs::create_directory(dataset_folder);
|
||||
fs::create_directory(dataset_folder + "mav0/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam0/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam0/data/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam1/");
|
||||
fs::create_directory(dataset_folder + "mav0/cam1/data/");
|
||||
fs::create_directory(dataset_folder + "mav0/imu0/");
|
||||
|
||||
cam_data[0].open(dataset_folder + "mav0/cam0/data.csv");
|
||||
cam_data[1].open(dataset_folder + "mav0/cam1/data.csv");
|
||||
exposure_data[0].open(dataset_folder + "mav0/cam0/exposure.csv");
|
||||
exposure_data[1].open(dataset_folder + "mav0/cam1/exposure.csv");
|
||||
imu0_data.open(dataset_folder + "mav0/imu0/data.csv");
|
||||
|
||||
cam_data[0] << "#timestamp [ns], filename\n";
|
||||
cam_data[1] << "#timestamp [ns], filename\n";
|
||||
exposure_data[0] << "#timestamp [ns], exposure time[ns]\n";
|
||||
exposure_data[1] << "#timestamp [ns], exposure time[ns]\n";
|
||||
imu0_data << "#timestamp [ns],w_RS_S_x [rad s^-1],w_RS_S_y [rad "
|
||||
"s^-1],w_RS_S_z [rad s^-1],a_RS_S_x [m s^-2],a_RS_S_y "
|
||||
"[m s^-2],a_RS_S_z [m s^-2]\n";
|
||||
|
||||
std::cout << "Started recording dataset in " << dataset_folder << std::endl;
|
||||
|
||||
} else {
|
||||
cam_data[0].close();
|
||||
cam_data[1].close();
|
||||
exposure_data[0].close();
|
||||
exposure_data[1].close();
|
||||
imu0_data.close();
|
||||
|
||||
std::cout << "Stopped recording dataset in " << dataset_folder << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void export_device_calibration(rs2::pipeline_profile &profile,
|
||||
const std::string &out_path) {
|
||||
using Scalar = double;
|
||||
|
||||
std::shared_ptr<basalt::Calibration<Scalar>> calib;
|
||||
calib.reset(new basalt::Calibration<Scalar>);
|
||||
|
||||
auto accel_stream = profile.get_stream(RS2_STREAM_ACCEL);
|
||||
auto gyro_stream = profile.get_stream(RS2_STREAM_GYRO);
|
||||
auto cam0_stream = profile.get_stream(RS2_STREAM_FISHEYE, 1);
|
||||
auto cam1_stream = profile.get_stream(RS2_STREAM_FISHEYE, 2);
|
||||
|
||||
// get gyro extrinsics
|
||||
if (auto gyro = gyro_stream.as<rs2::motion_stream_profile>()) {
|
||||
// TODO: gyro
|
||||
rs2_motion_device_intrinsic intrinsics = gyro.get_motion_intrinsics();
|
||||
|
||||
std::cout << " Scale X cross axis cross axis Bias X \n";
|
||||
std::cout << " cross axis Scale Y cross axis Bias Y \n";
|
||||
std::cout << " cross axis cross axis Scale Z Bias Z \n";
|
||||
for (auto &i : intrinsics.data) {
|
||||
for (int j = 0; j < 4; j++) {
|
||||
std::cout << i[j] << " ";
|
||||
}
|
||||
std::cout << "\n";
|
||||
}
|
||||
|
||||
std::cout << "Variance of noise for X, Y, Z axis \n";
|
||||
for (float noise_variance : intrinsics.noise_variances)
|
||||
std::cout << noise_variance << " ";
|
||||
std::cout << "\n";
|
||||
|
||||
std::cout << "Variance of bias for X, Y, Z axis \n";
|
||||
for (float bias_variance : intrinsics.bias_variances)
|
||||
std::cout << bias_variance << " ";
|
||||
std::cout << "\n";
|
||||
} else {
|
||||
throw std::exception();
|
||||
}
|
||||
|
||||
// get accel extrinsics
|
||||
if (auto gyro = accel_stream.as<rs2::motion_stream_profile>()) {
|
||||
// TODO: accel
|
||||
// rs2_motion_device_intrinsic intrinsics = accel.get_motion_intrinsics();
|
||||
} else {
|
||||
throw std::exception();
|
||||
}
|
||||
|
||||
// get camera ex-/intrinsics
|
||||
for (const auto &cam_stream : {cam0_stream, cam1_stream}) {
|
||||
if (auto cam = cam_stream.as<rs2::video_stream_profile>()) {
|
||||
// extrinsics
|
||||
rs2_extrinsics ex = cam.get_extrinsics_to(gyro_stream);
|
||||
Eigen::Matrix3f rot = Eigen::Map<Eigen::Matrix3f>(ex.rotation);
|
||||
Eigen::Vector3f trans = Eigen::Map<Eigen::Vector3f>(ex.translation);
|
||||
|
||||
Eigen::Quaterniond q(rot.cast<double>());
|
||||
basalt::Calibration<Scalar>::SE3 T_i_c(q, trans.cast<double>());
|
||||
|
||||
std::cout << "T_i_c\n" << T_i_c.matrix() << std::endl;
|
||||
|
||||
calib->T_i_c.push_back(T_i_c);
|
||||
|
||||
// get resolution
|
||||
Eigen::Vector2i resolution;
|
||||
resolution << cam.width(), cam.height();
|
||||
calib->resolution.push_back(resolution);
|
||||
|
||||
// intrinsics
|
||||
rs2_intrinsics intrinsics = cam.get_intrinsics();
|
||||
basalt::KannalaBrandtCamera4<Scalar>::VecN params;
|
||||
params << intrinsics.fx, intrinsics.fy, intrinsics.ppx, intrinsics.ppy,
|
||||
intrinsics.coeffs[0], intrinsics.coeffs[1], intrinsics.coeffs[2],
|
||||
intrinsics.coeffs[3];
|
||||
|
||||
std::cout << "params: " << params.transpose() << std::endl;
|
||||
|
||||
basalt::GenericCamera<Scalar> camera;
|
||||
basalt::KannalaBrandtCamera4 kannala_brandt(params);
|
||||
camera.variant = kannala_brandt;
|
||||
|
||||
calib->intrinsics.push_back(camera);
|
||||
} else {
|
||||
throw std::exception(); // TODO: better exception
|
||||
}
|
||||
}
|
||||
|
||||
// serialize and store calibration
|
||||
// std::ofstream os(out_path + "calibration.json");
|
||||
// cereal::JSONOutputArchive archive(os);
|
||||
|
||||
// archive(*calib);
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
CLI::App app{"Record RealSense T265 Data"};
|
||||
|
||||
app.add_option("--dataset-dir", dataset_dir, "Path to dataset");
|
||||
app.add_option("--exposure", exposure,
|
||||
"If set will enable manual exposure, value in microseconds.");
|
||||
app.add_option("--result-dir", result_dir,
|
||||
"If set will enable manual exposure, value in microseconds.");
|
||||
|
||||
try {
|
||||
app.parse(argc, argv);
|
||||
} catch (const CLI::ParseError &e) {
|
||||
return app.exit(e);
|
||||
}
|
||||
|
||||
bool show_gui = true;
|
||||
|
||||
image_save_queue.set_capacity(5000);
|
||||
|
||||
stop_workers = false;
|
||||
std::vector<std::thread> worker_threads;
|
||||
for (int i = 0; i < 8; i++) {
|
||||
worker_threads.emplace_back(image_save_worker);
|
||||
}
|
||||
|
||||
std::string color_mode;
|
||||
|
||||
// realsense
|
||||
rs2::log_to_console(RS2_LOG_SEVERITY_ERROR);
|
||||
rs2::context ctx;
|
||||
rs2::pipeline pipe(ctx);
|
||||
rs2::config cfg;
|
||||
|
||||
// Add streams of gyro and accelerometer to configuration
|
||||
cfg.enable_stream(RS2_STREAM_ACCEL, RS2_FORMAT_MOTION_XYZ32F);
|
||||
cfg.enable_stream(RS2_STREAM_GYRO, RS2_FORMAT_MOTION_XYZ32F);
|
||||
cfg.enable_stream(RS2_STREAM_FISHEYE, 1, RS2_FORMAT_Y8);
|
||||
cfg.enable_stream(RS2_STREAM_FISHEYE, 2, RS2_FORMAT_Y8);
|
||||
|
||||
// Using the device_hub we can block the program until a device connects
|
||||
// rs2::device_hub device_hub(ctx);
|
||||
|
||||
auto devices = ctx.query_devices();
|
||||
if (devices.size() == 0) {
|
||||
std::abort();
|
||||
}
|
||||
auto device = devices[0];
|
||||
std::cout << "Device " << device.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< " connected" << std::endl;
|
||||
auto sens = device.query_sensors()[0];
|
||||
|
||||
if (exposure > 0) {
|
||||
std::cout << "Setting exposure to: " << exposure << " microseconds"
|
||||
<< std::endl;
|
||||
sens.set_option(rs2_option::RS2_OPTION_ENABLE_AUTO_EXPOSURE, 0);
|
||||
sens.set_option(rs2_option::RS2_OPTION_EXPOSURE, (float)exposure);
|
||||
}
|
||||
|
||||
std::mutex data_mutex;
|
||||
|
||||
rs2::motion_frame last_gyro_meas = rs2::motion_frame(rs2::frame());
|
||||
Eigen::deque<RsIMUData> gyro_data_queue;
|
||||
|
||||
std::shared_ptr<RsIMUData> prev_accel_data;
|
||||
|
||||
int processed_frame = 0;
|
||||
|
||||
auto callback = [&](const rs2::frame &frame) {
|
||||
std::lock_guard<std::mutex> lock(data_mutex);
|
||||
|
||||
if (auto fp = frame.as<rs2::motion_frame>()) {
|
||||
auto motion = frame.as<rs2::motion_frame>();
|
||||
|
||||
if (motion && motion.get_profile().stream_type() == RS2_STREAM_GYRO &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
gyro_data_queue.emplace_back(d);
|
||||
}
|
||||
|
||||
if (motion && motion.get_profile().stream_type() == RS2_STREAM_ACCEL &&
|
||||
motion.get_profile().format() == RS2_FORMAT_MOTION_XYZ32F) {
|
||||
RsIMUData d;
|
||||
d.timestamp = motion.get_timestamp();
|
||||
d.data << motion.get_motion_data().x, motion.get_motion_data().y,
|
||||
motion.get_motion_data().z;
|
||||
|
||||
if (!prev_accel_data.get()) {
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
} else {
|
||||
BASALT_ASSERT(d.timestamp > prev_accel_data->timestamp);
|
||||
|
||||
while (!gyro_data_queue.empty() && gyro_data_queue.front().timestamp <
|
||||
prev_accel_data->timestamp) {
|
||||
std::cout << "Skipping gyro data. Timestamp before the first accel "
|
||||
"measurement.";
|
||||
gyro_data_queue.pop_front();
|
||||
}
|
||||
|
||||
while (!gyro_data_queue.empty() &&
|
||||
gyro_data_queue.front().timestamp < d.timestamp) {
|
||||
RsIMUData gyro_data = gyro_data_queue.front();
|
||||
gyro_data_queue.pop_front();
|
||||
|
||||
double w0 = (d.timestamp - gyro_data.timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
double w1 = (gyro_data.timestamp - prev_accel_data->timestamp) /
|
||||
(d.timestamp - prev_accel_data->timestamp);
|
||||
|
||||
Eigen::Vector3d accel_interpolated =
|
||||
w0 * prev_accel_data->data + w1 * d.data;
|
||||
|
||||
if (record) {
|
||||
int64_t timestamp = gyro_data.timestamp * 1e6;
|
||||
imu0_data << timestamp << "," << gyro_data.data[0] << ","
|
||||
<< gyro_data.data[1] << "," << gyro_data.data[2] << ","
|
||||
<< accel_interpolated[0] << "," << accel_interpolated[1]
|
||||
<< "," << accel_interpolated[2] << "\n";
|
||||
}
|
||||
|
||||
imu_log.Log(accel_interpolated[0], accel_interpolated[1],
|
||||
accel_interpolated[2]);
|
||||
}
|
||||
|
||||
prev_accel_data.reset(new RsIMUData(d));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (auto fs = frame.as<rs2::frameset>()) {
|
||||
processed_frame++;
|
||||
if (processed_frame % int(skip_frames) != 0) return;
|
||||
|
||||
for (int i = 0; i < NUM_CAMS; i++) {
|
||||
auto f = fs[i];
|
||||
if (!f.as<rs2::video_frame>()) {
|
||||
std::cout << "Weird Frame, skipping" << std::endl;
|
||||
continue;
|
||||
}
|
||||
auto vf = f.as<rs2::video_frame>();
|
||||
|
||||
last_images[i].reset(new ImageData);
|
||||
last_images[i]->image = cv::Mat(vf.get_height(), vf.get_width(), CV_8U);
|
||||
std::memcpy(
|
||||
last_images[i]->image.ptr(), vf.get_data(),
|
||||
vf.get_width() * vf.get_height() * vf.get_bytes_per_pixel());
|
||||
|
||||
last_images[i]->exposure_time =
|
||||
vf.get_frame_metadata(RS2_FRAME_METADATA_ACTUAL_EXPOSURE);
|
||||
|
||||
last_images[i]->timestamp = vf.get_timestamp() * 1e6;
|
||||
last_images[i]->cam_id = i;
|
||||
|
||||
if (record) {
|
||||
image_save_queue.push(last_images[i]);
|
||||
|
||||
cam_data[i] << last_images[i]->timestamp << ","
|
||||
<< last_images[i]->timestamp << ".webp" << std::endl;
|
||||
|
||||
exposure_data[i] << last_images[i]->timestamp << ","
|
||||
<< int64_t(vf.get_frame_metadata(
|
||||
RS2_FRAME_METADATA_ACTUAL_EXPOSURE) *
|
||||
1e3)
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Start streaming through the callback
|
||||
rs2::pipeline_profile profiles = pipe.start(cfg, callback);
|
||||
|
||||
{
|
||||
auto sensors = profiles.get_device().query_sensors();
|
||||
|
||||
for (auto &s : sensors) {
|
||||
std::cout << "Sensor " << s.get_info(RS2_CAMERA_INFO_NAME)
|
||||
<< ". Supported options:" << std::endl;
|
||||
|
||||
for (const auto &o : s.get_supported_options()) {
|
||||
std::cout << "\t" << rs2_option_to_string(o) << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export_device_calibration(profiles, result_dir);
|
||||
|
||||
if (show_gui) {
|
||||
pangolin::CreateWindowAndBind("Record RealSense T265", 1200, 800);
|
||||
|
||||
pangolin::Var<std::function<void(void)>> record_btn("ui.record",
|
||||
toggle_recording);
|
||||
|
||||
std::atomic<int64_t> record_t_ns;
|
||||
record_t_ns = 0;
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
|
||||
pangolin::View &img_view_display =
|
||||
pangolin::CreateDisplay()
|
||||
.SetBounds(0.4, 1.0, pangolin::Attach::Pix(UI_WIDTH), 1.0)
|
||||
.SetLayout(pangolin::LayoutEqual);
|
||||
|
||||
pangolin::View &plot_display = pangolin::CreateDisplay().SetBounds(
|
||||
0.0, 0.4, pangolin::Attach::Pix(UI_WIDTH), 1.0);
|
||||
|
||||
pangolin::CreatePanel("ui").SetBounds(0.0, 1.0, 0.0,
|
||||
pangolin::Attach::Pix(UI_WIDTH));
|
||||
|
||||
std::vector<std::shared_ptr<pangolin::ImageView>> img_view;
|
||||
while (img_view.size() < NUM_CAMS) {
|
||||
int idx = img_view.size();
|
||||
std::shared_ptr<pangolin::ImageView> iv(new pangolin::ImageView);
|
||||
|
||||
iv->extern_draw_function = [&, idx](pangolin::View &v) {
|
||||
glLineWidth(1.0);
|
||||
glColor3f(1.0, 0.0, 0.0); // red
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
|
||||
if (last_images[idx].get())
|
||||
pangolin::GlFont::I()
|
||||
.Text("Exposure: %.3f ms.",
|
||||
last_images[idx]->exposure_time / 1000.0)
|
||||
.Draw(30, 30);
|
||||
|
||||
if (idx == 0) {
|
||||
pangolin::GlFont::I()
|
||||
.Text("Queue: %d.", image_save_queue.size())
|
||||
.Draw(30, 60);
|
||||
}
|
||||
|
||||
if (idx == 0 && record) {
|
||||
pangolin::GlFont::I().Text("Recording").Draw(30, 90);
|
||||
}
|
||||
};
|
||||
|
||||
iv->OnSelectionCallback =
|
||||
[&](pangolin::ImageView::OnSelectionEventData o) {
|
||||
int64_t curr_t_ns = std::chrono::high_resolution_clock::now()
|
||||
.time_since_epoch()
|
||||
.count();
|
||||
if (std::abs(record_t_ns - curr_t_ns) > int64_t(2e9)) {
|
||||
toggle_recording();
|
||||
record_t_ns = curr_t_ns;
|
||||
}
|
||||
};
|
||||
|
||||
img_view.push_back(iv);
|
||||
img_view_display.AddDisplay(*iv);
|
||||
}
|
||||
|
||||
imu_log.Clear();
|
||||
|
||||
std::vector<std::string> labels;
|
||||
labels.push_back(std::string("accel x"));
|
||||
labels.push_back(std::string("accel y"));
|
||||
labels.push_back(std::string("accel z"));
|
||||
imu_log.SetLabels(labels);
|
||||
|
||||
pangolin::Plotter plotter(&imu_log, 0.0f, 2000.0f, -15.0f, 15.0f, 0.1f,
|
||||
0.1f);
|
||||
plotter.SetBounds(0.0, 1.0, 0.0, 1.0);
|
||||
plotter.Track("$i");
|
||||
|
||||
plot_display.AddDisplay(plotter);
|
||||
|
||||
plotter.ClearSeries();
|
||||
plotter.AddSeries("$i", "$0", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Red(), "accel x");
|
||||
plotter.AddSeries("$i", "$1", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Green(), "accel y");
|
||||
plotter.AddSeries("$i", "$2", pangolin::DrawingModeLine,
|
||||
pangolin::Colour::Blue(), "accel z");
|
||||
|
||||
while (!pangolin::ShouldQuit()) {
|
||||
{
|
||||
pangolin::GlPixFormat fmt;
|
||||
fmt.glformat = GL_LUMINANCE;
|
||||
fmt.gltype = GL_UNSIGNED_BYTE;
|
||||
fmt.scalable_internal_format = GL_LUMINANCE8;
|
||||
|
||||
for (size_t cam_id = 0; cam_id < NUM_CAMS; cam_id++) {
|
||||
if (last_images[cam_id].get())
|
||||
img_view[cam_id]->SetImage(last_images[cam_id]->image.ptr(),
|
||||
last_images[cam_id]->image.cols,
|
||||
last_images[cam_id]->image.rows,
|
||||
last_images[cam_id]->image.step, fmt);
|
||||
}
|
||||
}
|
||||
|
||||
pangolin::FinishFrame();
|
||||
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(15));
|
||||
}
|
||||
}
|
||||
|
||||
stop_workers = true;
|
||||
for (auto &t : worker_threads) {
|
||||
t.join();
|
||||
}
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
std::string get_date() {
|
||||
constexpr int MAX_DATE = 64;
|
||||
time_t now;
|
||||
char the_date[MAX_DATE];
|
||||
|
||||
the_date[0] = '\0';
|
||||
|
||||
now = time(NULL);
|
||||
|
||||
if (now != -1) {
|
||||
strftime(the_date, MAX_DATE, "%Y_%m_%d_%H_%M_%S", gmtime(&now));
|
||||
}
|
||||
|
||||
return std::string(the_date);
|
||||
}
|
|
@ -20,6 +20,6 @@ include_directories(../basalt-headers/thirdparty/Sophus)
|
|||
add_library(apriltag STATIC ${APRILTAG_SRCS} src/apriltag.cpp)
|
||||
|
||||
target_include_directories(apriltag PUBLIC include)
|
||||
target_link_libraries(apriltag PUBLIC ${OpenCV_LIBS} pangolin)
|
||||
target_link_libraries(apriltag PUBLIC ${OpenCV_LIBS})
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue