diff --git a/CMakeLists.txt b/CMakeLists.txt index 9093fba..a7b041f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -97,7 +97,7 @@ set(BASALT_CXX_FLAGS "-Wall -Wextra -Werror -Wno-error=unused-parameter -ftempla # clang-specific compile flags if(CMAKE_CXX_COMPILER_ID STREQUAL "Clang" OR CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang") - set(BASALT_CXX_FLAGS "${BASALT_CXX_FLAGS} -Wno-exceptions -fcolor-diagnostics -Wno-error=deprecated-declarations") + set(BASALT_CXX_FLAGS "${BASALT_CXX_FLAGS} -Wno-exceptions -fcolor-diagnostics -Wno-error=deprecated-declarations -Wno-error=defaulted-function-deleted") else() set(BASALT_CXX_FLAGS "${BASALT_CXX_FLAGS} -Wno-error=maybe-uninitialized") endif() diff --git a/README.md b/README.md index 60e773a..527e638 100644 --- a/README.md +++ b/README.md @@ -57,6 +57,7 @@ NOTE: It is possible to compile the code on Ubuntu 16.04, but you need to instal ## Usage * [Camera, IMU and Mocap calibration. (TUM-VI, Euroc, UZH-FPV and Kalibr datasets)](doc/Calibration.md) +* [Tutorial on Camera-IMU and Motion capture calibration with Realsense T265.](doc/Realsense.md) * [Visual-inertial odometry and mapping. (TUM-VI and Euroc datasets)](doc/VioMapping.md) * [Visual odometry (no IMU). (KITTI dataset)](doc/Vo.md) * [Simulation tools to test different components of the system.](doc/Simulation.md) diff --git a/doc/Realsense.md b/doc/Realsense.md new file mode 100644 index 0000000..8d141c3 --- /dev/null +++ b/doc/Realsense.md @@ -0,0 +1,157 @@ +# Tutorial on Camera-IMU and Motion Capture Calibration with Realsense T265 + +![Realsense](doc/img/realsense_setup.jpg) + +In this tutorial we explain how to perform photometric and geometric calibration of the multi-camera setup and then calibrate the transformations between cameras, IMU and the motion capture marker setup. To make sure the calibration is successful we recommend to rigidly attach markers to the camera as shown on the figure above. + +## Dataset +We provide a set of example datasets for the calibration. Even if you plan to record your own datasets (covered in the next section), we recommend to download and try the provided examples: +``` +mkdir ~/t265_calib_data +cd ~/t265_calib_data +wget http://vision.in.tum.de/tumvi/t265/response_calib.zip +wget http://vision.in.tum.de/tumvi/t265/cam_calib.zip +wget http://vision.in.tum.de/tumvi/t265/imu_calib.zip +wget http://vision.in.tum.de/tumvi/t265/sequence0.zip +unzip response_calib +unzip cam_calib +unzip imu_calib +unzip sequence0 +``` + +## Recording Own Dataset +You can record your own sequences using the `basalt_rs_t265_record` executable: +``` +basalt_rs_t265_record --dataset-path ~/t265_calib_data/ --manual-exposure +``` +* `--dataset-path` specifies the location where the recorded dataset will be stored. In this case it will be stored in `~/t265_calib_data//`. +* `--manual-exposure` disables the autoexposure. In this tutorial the autoexposure is disabled for all calibration sequences, but for the VIO sequence (sequence0) we enable it. + +![t265_record](doc/img/t265_record.png) + +The GUI elements have the following meaning: +* `webp_quality` compression quality. The highest value (101) means lossless compression. For photometric calibration it is important not to have any compression artifacts, so we record these calibration sequences with lossless compression. +* `skip_frames` reduces the framerate of the recorded dataset by skipping frames. +* `exposure` controls the exposure time of the cameras. +* `record` starts/stops the recoding of the dataset. If you run the system on the slow PC pay attention to the number of messages in the queue. If it goes beyond the limit the recorder will start dropping frames. +* `export_calibration` exports factory calibration in the basalt format. + +After recoding the dataset it is a good practice to verify the quality of the dataset. You can do this by running: +``` +basalt_verify_dataset.py -d ~/t265_calib_data// +``` +It will report the actual frequencies of the recorded sensor messages and warn you if any files with image data are missing. + +Every sequence required for the calibration should have certain properties to enable successful calibration. Pay attention to the **Important for recording the dataset** subsections and inspect the provided examples before recoding your own sequences. + +## Response calibration +In this project we assume a camera has a linear response function (intensity of the pixel is linearly proportional to the amount of light captured by the pixel). In this section we will verify this for the Realsense T265 cameras. We will need to record a static scene with different exposure times. + +**Important for recording the dataset:** +* Keep the camera static and make sure that nothing in the scene moves during the recording. +* Move `webp_quality` slider to the highest value to enable lossless compression. +* Optionally set the `skip_frames` slider to 3-5 to speed up the dataset recoding. +* Start recoding and slowly move the exposure slider up and down. Stop recoding. +* Rename the dataset to `response_calib`. + +Run the response function calibration: +``` +basalt_response_calib.py -d ~/t265_calib_data/response_calib +``` +You should see the response function and the irradiance image similar to the one shown below. For the details of the algorithm see Section 2.3.1 of [[arXiv:1607.02555]](https://arxiv.org/abs/1607.02555). The results suggest that the response function used in the camera is linear. +![t265_inv_resp_irradiance](doc/img/t265_inv_resp_irradiance.png) + +## Multi-Camera Geometric and Vignette Calibration +For the camera calibration we need to record a dataset with a static aprilgrid pattern. + +**Important for recording the dataset:** +* Move `webp_quality` slider to the highest value to enable lossless compression (important for vignette calibration). +* Set the `skip_frames` slider to 5 to speed up the dataset recoding. +* Move the camera slowly to reduce the motion blur. +* Cover the entire field of view of the camera with the calibration pattern. Try to observe the pattern from different angles. +* Make sure you do not cast shadows at the pattern (important for vignette calibration). +* Rename the dataset to `cam_calib` + +Run the calibration executable: +``` +basalt_calibrate --dataset-path ~/t265_calib_data/cam_calib --dataset-type euroc --result-path ~/t265_calib_results/ --aprilgrid /usr/etc/basalt/aprilgrid_6x6.json --cam-types kb4 kb4 +``` +To perform the calibration follow these steps: +* `load_dataset` load the dataset. +* `detect_corners` detect corners. If the corners were detected before the cached detections will be loaded at the previous step so there is no need to re-run the detection. +* `init_cam_intr` initialize camera intrinsics. +* `init_cam_poses` initialize camera poses using the current intrinsics estimate. +* `init_cam_extr` initialize transformations between multiple cameras. +* `init_opt` initialize optimization. +* `opt_until_converge` optimize until convergence. +* `opt_cam_poses` some initial poses computed from the initialized intrinsics can be far from optimum and not converge to the right minimum. To improve the final result we can re-initialize poses with optimized intrinsics. +* `init_opt` initialize optimization with new initial poses. +* `opt_until_converge` optimize until convergence. +* `compute_vign` after optimizing geometric models compute the vignetting of the cameras. +* `save_calib` save calibration file to the `~/t265_calib_results/calibration.json`. + +![t265_cam_calib](/doc/img/t265_cam_calib.png) + + +## IMU and Motion Capture Calibration +After calibrating cameras we can proceed to geometric and time calibration of the cameras, IMU and motion capture system. Setting up the motion capture system is specific for your setup. + +For the motion capture recording we use [ros_vrpn_client](https://github.com/ethz-asl/ros_vrpn_client) with [basalt_capture_mocap.py](scripts/basalt_capture_mocap.py). We record the data to the `mocap0` folder and then move it to the `mav0` directory of the camera dataset. This script is provided as an example. Motion capture setup is different in every particular case. + +**Important for recording the dataset:** +* Set the `skip_frames` slider to 1 to use the full framerate. +* Reduce the exposure time to reduce the motion blur. +* Move the setup such that all axis of accelerometer and gyro are exited. This means moving with acceleration along X, Y and Z axis and rotating around those axes. +* Do not forget to simultaneously record motion capture data. +* Rename the dataset to `imu_calib`. + +Run the calibration executable: +``` +basalt_calibrate_imu --dataset-path ~/t265_calib_data/imu_calib --dataset-type euroc --result-path ~/t265_calib_results/ --aprilgrid /usr/etc/basalt/aprilgrid_6x6.json --accel-noise-std 0.00818 --gyro-noise-std 0.00226 --accel-bias-std 0.01 --gyro-bias-std 0.0007 +``` + +To perform the calibration follow these steps: +* `load_dataset` load the dataset. +* `detect_corners` detect corners. If the corners were detected before the cached detections will be loaded at the previous step so there is no need to re-run the detection. +* `init_cam_poses` initialize camera poses. +* `init_cam_imu` initialize the transformation between cameras and the IMU. +* `init_opt` initialize optimization. +* `opt_until_converge` optimize until convergence. +* Enable `opt_cam_time_offset` and `opt_imu_scale` to optimize the time offset between cameras and the IMU and the IMU scale. +* `opt_until_converge` optimize until convergence. +* `init_mocap` initialize transformation between the motion capture marker frame and the IMU and the transformation between the aprilgrid pattern and the motion capture system origin. +* Enable `opt_mocap`. +* `opt_until_converge` optimize until convergence. +* `save_calib` save calibration file to the `~/t265_calib_results/calibration.json`. +* `save_mocap_calib` save motion capture system calibration file to the `~/t265_calib_results/mocap_calibration.json`. + +![t265_imu_calib](/doc/img/t265_imu_calib.png) + + +## Generating Time-Aligned Ground Truth +Since motion capture system and the PC where the dataset was recorded might not have the same clock we need to perform the time synchronization. Additionally we need to transform the coordinate frame of the GT data to the IMU frame (originally it is in the coordinate frame attached to the markers). + +The raw motion capture data is stored in the `mav/mocap0/` folder. We can find the time offset by minimizing the error between gyro measurements and rotational velocities computed from the motion capture data. The resulting trajectory (time aligned and transformed to the IMU frame) will be written to `mav/gt/` and automatically loaded when available. +``` +basalt_time_alignment --dataset-path ~/t265_calib_data/sequence0 --dataset-type euroc --calibration ~/t265_calib_results/calibration.json --mocap-calibration ~/t265_calib_results/mocap_calibration.json --save-gt +``` +You should be able to see that, despite some noise, rotational velocity computed from the motion capture data aligns well with gyro measurements. +![t265_time_align_gyro](/doc/img/t265_time_align_gyro.png) + +You can also switch to the error function plot and see that there is a clear minimum corresponding to the computed time offset. +![t265_time_align_error](/doc/img/t265_time_align_error.png) + +## Running Visual-Inertial Odometry +Now we can run the visual-inertial odometry on the recorded dataset: +``` +basalt_vio --dataset-path ~/t265_calib_data/sequence0 --cam-calib ~/t265_calib_results/calibration.json --dataset-type euroc --config-path /usr/etc/basalt/euroc_config.json --show-gui 1 +``` +After the system processes the whole sequence you can use `align_se3` button to align trajectory to the ground-truth data and compute RMS ATE. +![t265_vio](/doc/img/t265_vio.png) + + +## Running Visual-Inertial Odometry Live +It is also possible to run the odometry live with the camera If no calibration files are provided the factory calibration will be used. +``` +basalt_rs_t265_vio --cam-calib ~/t265_calib_results/calibration.json --config-path /usr/etc/basalt/euroc_config.json +``` \ No newline at end of file diff --git a/doc/img/realsense_setup.jpg b/doc/img/realsense_setup.jpg new file mode 100644 index 0000000..f34b6d2 Binary files /dev/null and b/doc/img/realsense_setup.jpg differ diff --git a/doc/img/t265_cam_calib.png b/doc/img/t265_cam_calib.png new file mode 100644 index 0000000..f8d519e Binary files /dev/null and b/doc/img/t265_cam_calib.png differ diff --git a/doc/img/t265_imu_calib.png b/doc/img/t265_imu_calib.png new file mode 100644 index 0000000..f622008 Binary files /dev/null and b/doc/img/t265_imu_calib.png differ diff --git a/doc/img/t265_inv_resp_irradiance.png b/doc/img/t265_inv_resp_irradiance.png new file mode 100644 index 0000000..1f5d5f2 Binary files /dev/null and b/doc/img/t265_inv_resp_irradiance.png differ diff --git a/doc/img/t265_record.png b/doc/img/t265_record.png new file mode 100644 index 0000000..290d032 Binary files /dev/null and b/doc/img/t265_record.png differ diff --git a/doc/img/t265_time_align_error.png b/doc/img/t265_time_align_error.png new file mode 100644 index 0000000..9cbcdca Binary files /dev/null and b/doc/img/t265_time_align_error.png differ diff --git a/doc/img/t265_time_align_gyro.png b/doc/img/t265_time_align_gyro.png new file mode 100644 index 0000000..5eb3d7b Binary files /dev/null and b/doc/img/t265_time_align_gyro.png differ diff --git a/doc/img/t265_vio.png b/doc/img/t265_vio.png new file mode 100644 index 0000000..6249a28 Binary files /dev/null and b/doc/img/t265_vio.png differ diff --git a/scripts/basalt_response_calib.py b/scripts/basalt_response_calib.py index 23e1f21..e0d7a40 100755 --- a/scripts/basalt_response_calib.py +++ b/scripts/basalt_response_calib.py @@ -91,15 +91,14 @@ for iter in range(5): -plt.figure() -plt.plot(inv_resp[:-1]) -plt.ylabel('Irradiance Value') -plt.xlabel('Image Intensity') -plt.title('Inverse Responce Function') +fig, (ax1, ax2) = plt.subplots(1, 2) +ax1.plot(inv_resp[:-1]) +ax1.set(xlabel='Image Intensity', ylabel='Irradiance Value') +ax1.set_title('Inverse Response Function') -plt.figure() -plt.imshow(irradiance) -plt.title('Irradiance Image') + +ax2.imshow(irradiance) +ax2.set_title('Irradiance Image') plt.show() diff --git a/src/time_alignment.cpp b/src/time_alignment.cpp index f461063..bea3823 100644 --- a/src/time_alignment.cpp +++ b/src/time_alignment.cpp @@ -66,6 +66,8 @@ int main(int argc, char **argv) { std::string output_gyro_path; std::string output_mocap_path; + bool save_new_gt = false; + double max_offset_s = 10.0; bool show_gui = true; @@ -95,6 +97,8 @@ int main(int argc, char **argv) { "Maximum offset for a grid search in seconds."); app.add_flag("--show-gui", show_gui, "Show GUI for debugging"); + app.add_flag("--save-gt", save_new_gt, + "Save time aligned data to mav0/gt/ folder"); try { app.parse(argc, argv); @@ -322,8 +326,8 @@ int main(int argc, char **argv) { } } - int64_t min_time = vio_dataset->get_gyro_data().front().timestamp_ns; - int64_t max_time = vio_dataset->get_gyro_data().back().timestamp_ns; + const int64_t min_time = vio_dataset->get_gyro_data().front().timestamp_ns; + const int64_t max_time = vio_dataset->get_gyro_data().back().timestamp_ns; if (output_gyro_path != "") { std::cout << "Writing gyro values to '" << output_gyro_path << "'" @@ -351,6 +355,30 @@ int main(int argc, char **argv) { } } + if (save_new_gt) { + fs::create_directory(dataset_path + "/mav0/gt/"); + + std::ofstream f(dataset_path + "/mav0/gt/data.csv"); + + for (size_t i = 0; i < vio_dataset->get_gt_timestamps().size(); i++) { + const int64_t corrected_time = vio_dataset->get_gt_timestamps()[i] + + vio_dataset->get_mocap_to_imu_offset_ns() + + best_offset_refined_ns; + + if (corrected_time >= min_time && corrected_time <= max_time) { + const Sophus::SE3d &p = vio_dataset->get_gt_pose_data()[i]; + + f << corrected_time << ',' << p.translation().x() << ',' + << p.translation().y() << ',' << p.translation().z() << ',' + << p.unit_quaternion().w() << ',' << p.unit_quaternion().x() << ',' + << p.unit_quaternion().y() << ',' << p.unit_quaternion().z() + << std::endl; + } + } + + f.close(); + } + if (show_gui) { static constexpr int UI_WIDTH = 280; diff --git a/src/vio.cpp b/src/vio.cpp index 985d9b3..a4ed462 100644 --- a/src/vio.cpp +++ b/src/vio.cpp @@ -94,7 +94,6 @@ pangolin::Var show_est_bg("ui.show_est_bg", false, false, true); pangolin::Var show_est_ba("ui.show_est_ba", false, false, true); pangolin::Var show_gt("ui.show_gt", true, false, true); -pangolin::Var show_device_gt("ui.show_device_gt", true, false, true); Button next_step_btn("ui.next_step", &next_step); Button prev_step_btn("ui.prev_step", &prev_step);