Switched to HashBow instead of DBoW3
This commit is contained in:
parent
a738f46b13
commit
0e073978b9
|
@ -193,7 +193,6 @@ include_directories(thirdparty/basalt-headers/thirdparty/cereal/include)
|
|||
include_directories(thirdparty/basalt-headers/include)
|
||||
include_directories(thirdparty/CLI11/include)
|
||||
include_directories(thirdparty/fast/include)
|
||||
include_directories(thirdparty/DBoW3/src/)
|
||||
|
||||
include_directories(include)
|
||||
|
||||
|
@ -215,7 +214,7 @@ add_library(basalt SHARED
|
|||
src/utils/keypoints.cpp)
|
||||
|
||||
|
||||
target_link_libraries(basalt PUBLIC ${TBB_LIBRARIES} ${STD_CXX_FS} ${OpenCV_LIBS} pangolin PRIVATE rosbag apriltag opengv DBoW3)
|
||||
target_link_libraries(basalt PUBLIC ${TBB_LIBRARIES} ${STD_CXX_FS} ${OpenCV_LIBS} pangolin PRIVATE rosbag apriltag opengv)
|
||||
|
||||
|
||||
add_executable(basalt_calibrate src/calibrate.cpp)
|
||||
|
@ -259,7 +258,7 @@ install(TARGETS basalt_calibrate basalt_calibrate_imu basalt_vio_sim basalt_mapp
|
|||
ARCHIVE DESTINATION ${CMAKE_INSTALL_PREFIX}/lib)
|
||||
|
||||
file(GLOB CONFIG_FILES "${CMAKE_CURRENT_SOURCE_DIR}/data/*.json")
|
||||
install(FILES ${CONFIG_FILES} ${CMAKE_CURRENT_SOURCE_DIR}/data/basalt-data/orbvoc.dbow3
|
||||
install(FILES ${CONFIG_FILES}
|
||||
DESTINATION ${CMAKE_INSTALL_PREFIX}/etc/basalt)
|
||||
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
"config.mapper_ransac_threshold": 5e-5,
|
||||
"config.mapper_min_track_length": 5,
|
||||
"config.mapper_max_hamming_distance": 70,
|
||||
"config.mapper_second_best_test_ratio": 1.2
|
||||
"config.mapper_second_best_test_ratio": 1.2,
|
||||
"config.mapper_bow_num_bits": 16
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,9 +47,9 @@ For evaluation the button `align_svd` is used. It aligns the GT trajectory with
|
|||
### Visual-inertial mapping
|
||||
To run the mapping tool execute the following command:
|
||||
```
|
||||
basalt_mapper --cam-calib /usr/etc/basalt/euroc_ds_calib.json --marg-data euroc_marg_data --vocabulary /usr/etc/basalt/orbvoc.dbow3
|
||||
basalt_mapper --cam-calib /usr/etc/basalt/euroc_ds_calib.json --marg-data euroc_marg_data
|
||||
```
|
||||
Here `--marg-data` is the folder with the results from VIO and `--vocabulary` is the path to DBoW3 vocabulary.
|
||||
Here `--marg-data` is the folder with the results from VIO.
|
||||
|
||||
This opens the GUI and extracts non-linear factors from the marginalization data.
|
||||
![MH_05_MAPPING](/doc/img/MH_05_MAPPING.png)
|
||||
|
@ -112,6 +112,6 @@ basalt_vio --dataset-path dataset-magistrale1_512_16/ --cam-calib /usr/etc/basal
|
|||
### Visual-inertial mapping
|
||||
To run the mapping tool execute the following command:
|
||||
```
|
||||
basalt_mapper --cam-calib /usr/etc/basalt/tumvi_512_ds_calib.json --marg-data tumvi_marg_data --vocabulary /usr/etc/basalt/orbvoc.dbow3
|
||||
basalt_mapper --cam-calib /usr/etc/basalt/tumvi_512_ds_calib.json --marg-data tumvi_marg_data
|
||||
```
|
||||
![magistrale1_mapping](/doc/img/magistrale1_mapping.png)
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
#pragma once
|
||||
|
||||
#include <array>
|
||||
#include <bitset>
|
||||
#include <iostream>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include <basalt/utils/common_types.h>
|
||||
|
||||
#include <tbb/concurrent_unordered_map.h>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
template <size_t N>
|
||||
class HashBow {
|
||||
public:
|
||||
HashBow(size_t num_bits) : num_bits(num_bits < 32 ? num_bits : 32) {
|
||||
static_assert(N < 512,
|
||||
"This implementation of HashBow only supports the descriptor "
|
||||
"length below 512.");
|
||||
}
|
||||
|
||||
inline FeatureHash compute_hash(const std::bitset<N>& descriptor) const {
|
||||
FeatureHash res;
|
||||
for (size_t i = 0; i < num_bits; ++i) {
|
||||
res[i] = descriptor[word_bit_permutation[i]];
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
inline void compute_bow(const std::vector<std::bitset<N>>& descriptors,
|
||||
std::vector<FeatureHash>& hashes,
|
||||
HashBowVector& bow_vector) const {
|
||||
size_t descriptors_size = descriptors.size();
|
||||
hashes.resize(descriptors_size);
|
||||
|
||||
bow_vector.clear();
|
||||
bow_vector.reserve(descriptors_size);
|
||||
|
||||
for (size_t i = 0; i < descriptors_size; i++) {
|
||||
hashes[i] = compute_hash(descriptors[i]);
|
||||
|
||||
FeatureHash bow_word = hashes[i];
|
||||
bow_vector[bow_word] += 1.0;
|
||||
}
|
||||
|
||||
double sum_squared = 0;
|
||||
for (const auto& kv : bow_vector) {
|
||||
sum_squared += kv.second * kv.second;
|
||||
}
|
||||
|
||||
double norm = std::sqrt(sum_squared);
|
||||
|
||||
for (auto& kv : bow_vector) {
|
||||
kv.second /= norm;
|
||||
}
|
||||
}
|
||||
|
||||
inline void add_to_database(const TimeCamId& tcid,
|
||||
const HashBowVector& bow_vector) {
|
||||
for (const auto& kv : bow_vector) {
|
||||
std::pair<TimeCamId, double> p = std::make_pair(tcid, kv.second);
|
||||
inverted_index.emplace(kv.first, p);
|
||||
}
|
||||
}
|
||||
|
||||
inline void querry_database(
|
||||
const HashBowVector& bow_vector, size_t num_results,
|
||||
std::vector<std::pair<TimeCamId, double>>& results,
|
||||
const int64_t* max_t_ns = nullptr) const {
|
||||
results.clear();
|
||||
|
||||
std::unordered_map<TimeCamId, double, tbb::tbb_hash<TimeCamId>> scores;
|
||||
|
||||
for (const auto& kv : bow_vector) {
|
||||
const auto range_it = inverted_index.equal_range(kv.first);
|
||||
|
||||
for (auto it = range_it.first; it != range_it.second; ++it) {
|
||||
// if there is a maximum query time select only the frames that have
|
||||
// timestamp below max_t_ns
|
||||
if (!max_t_ns || it->second.first.first < (*max_t_ns))
|
||||
scores[it->second.first] += kv.second * it->second.second;
|
||||
}
|
||||
}
|
||||
|
||||
results.reserve(scores.size());
|
||||
|
||||
for (const auto& kv : scores) results.emplace_back(kv);
|
||||
|
||||
std::sort(results.begin(), results.end(),
|
||||
[](const auto& a, const auto& b) { return a.second > b.second; });
|
||||
|
||||
if (results.size() > num_results) results.resize(num_results);
|
||||
}
|
||||
|
||||
protected:
|
||||
constexpr static const size_t random_bit_permutation[512] = {
|
||||
484, 458, 288, 170, 215, 424, 41, 38, 293, 96, 172, 428, 508, 52, 370,
|
||||
1, 182, 472, 89, 339, 273, 234, 98, 217, 73, 195, 307, 306, 113, 429,
|
||||
161, 443, 364, 439, 301, 247, 325, 24, 490, 366, 75, 7, 464, 232, 49,
|
||||
196, 144, 69, 470, 387, 3, 86, 361, 313, 396, 356, 94, 201, 291, 360,
|
||||
107, 251, 413, 393, 296, 124, 308, 146, 298, 160, 121, 302, 151, 345, 336,
|
||||
26, 63, 238, 79, 267, 262, 437, 433, 350, 53, 134, 194, 452, 114, 54,
|
||||
82, 214, 191, 242, 482, 37, 432, 311, 130, 460, 422, 221, 271, 192, 474,
|
||||
46, 289, 34, 20, 95, 463, 499, 159, 272, 481, 129, 448, 173, 323, 258,
|
||||
416, 229, 334, 510, 461, 263, 362, 346, 39, 500, 381, 401, 492, 299, 33,
|
||||
169, 241, 11, 254, 449, 199, 486, 400, 365, 70, 436, 108, 19, 233, 505,
|
||||
152, 6, 480, 468, 278, 426, 253, 471, 328, 327, 139, 29, 27, 488, 332,
|
||||
290, 412, 164, 259, 352, 222, 186, 32, 319, 410, 211, 405, 187, 213, 507,
|
||||
205, 395, 62, 178, 36, 140, 87, 491, 351, 450, 314, 77, 342, 132, 133,
|
||||
477, 103, 389, 206, 197, 324, 485, 425, 297, 231, 123, 447, 126, 9, 64,
|
||||
181, 40, 14, 5, 261, 431, 333, 223, 4, 138, 220, 76, 44, 300, 331,
|
||||
78, 193, 497, 403, 435, 275, 147, 66, 368, 141, 451, 225, 250, 61, 18,
|
||||
444, 208, 380, 109, 255, 337, 372, 212, 359, 457, 31, 398, 354, 219, 117,
|
||||
248, 392, 203, 88, 479, 509, 149, 120, 145, 51, 15, 367, 190, 163, 417,
|
||||
454, 329, 183, 390, 83, 404, 249, 81, 264, 445, 317, 179, 244, 473, 71,
|
||||
111, 118, 209, 171, 224, 459, 446, 104, 13, 377, 200, 414, 198, 420, 226,
|
||||
153, 384, 25, 441, 305, 338, 316, 483, 184, 402, 48, 131, 502, 252, 469,
|
||||
12, 167, 243, 373, 35, 127, 341, 455, 379, 210, 340, 128, 430, 57, 434,
|
||||
330, 415, 494, 142, 355, 282, 322, 65, 105, 421, 68, 409, 466, 245, 59,
|
||||
269, 112, 386, 257, 256, 93, 174, 16, 60, 143, 343, 115, 506, 276, 10,
|
||||
496, 489, 235, 47, 136, 22, 165, 204, 42, 465, 440, 498, 312, 504, 116,
|
||||
419, 185, 303, 218, 353, 283, 374, 2, 177, 137, 240, 102, 309, 292, 85,
|
||||
453, 388, 397, 438, 281, 279, 442, 110, 55, 101, 100, 150, 375, 406, 157,
|
||||
23, 0, 237, 376, 236, 216, 8, 154, 91, 456, 423, 176, 427, 284, 30,
|
||||
84, 349, 335, 56, 270, 227, 286, 168, 239, 122, 478, 162, 475, 166, 17,
|
||||
348, 285, 175, 155, 266, 382, 304, 268, 180, 295, 125, 371, 467, 277, 294,
|
||||
58, 347, 72, 280, 50, 287, 511, 80, 260, 326, 495, 45, 106, 399, 369,
|
||||
503, 357, 315, 418, 487, 99, 43, 320, 188, 407, 246, 501, 119, 158, 274,
|
||||
408, 230, 358, 90, 148, 363, 207, 344, 265, 462, 189, 310, 385, 67, 28,
|
||||
383, 378, 156, 394, 97, 476, 493, 321, 411, 228, 21, 391, 202, 92, 318,
|
||||
74, 135};
|
||||
|
||||
constexpr static std::array<size_t, FEATURE_HASH_MAX_SIZE>
|
||||
compute_permutation() {
|
||||
std::array<size_t, FEATURE_HASH_MAX_SIZE> res{};
|
||||
size_t j = 0;
|
||||
for (size_t i = 0; i < 512 && j < FEATURE_HASH_MAX_SIZE; ++i) {
|
||||
if (random_bit_permutation[i] < N) {
|
||||
res[j] = random_bit_permutation[i];
|
||||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
constexpr static const std::array<size_t, FEATURE_HASH_MAX_SIZE>
|
||||
word_bit_permutation = compute_permutation();
|
||||
|
||||
size_t num_bits;
|
||||
|
||||
tbb::concurrent_unordered_multimap<FeatureHash, std::pair<TimeCamId, double>,
|
||||
std::hash<FeatureHash>>
|
||||
inverted_index;
|
||||
};
|
||||
|
||||
} // namespace basalt
|
|
@ -48,6 +48,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
namespace basalt {
|
||||
|
||||
/// ids for 2D features detected in images
|
||||
using FeatureId = int;
|
||||
|
||||
/// identifies a frame of multiple images (stereo pair)
|
||||
using FrameId = int64_t;
|
||||
|
||||
|
@ -61,8 +64,9 @@ inline std::ostream& operator<<(std::ostream& os, const TimeCamId& tcid) {
|
|||
return os;
|
||||
}
|
||||
|
||||
/// ids for 2D features detected in images
|
||||
using FeatureId = int;
|
||||
constexpr static const size_t FEATURE_HASH_MAX_SIZE = 32;
|
||||
using FeatureHash = std::bitset<FEATURE_HASH_MAX_SIZE>;
|
||||
using HashBowVector = std::unordered_map<FeatureHash, double>;
|
||||
|
||||
/// keypoint positions and descriptors for an image
|
||||
struct KeypointsData {
|
||||
|
@ -77,6 +81,9 @@ struct KeypointsData {
|
|||
std::vector<std::bitset<256>> corner_descriptors;
|
||||
|
||||
Eigen::vector<Eigen::Vector4d> corners_3d;
|
||||
|
||||
std::vector<FeatureHash> hashes;
|
||||
HashBowVector bow_vector;
|
||||
};
|
||||
|
||||
/// feature corners is a collection of { imageId => KeypointsData }
|
||||
|
|
|
@ -65,11 +65,6 @@ void computeAngles(const basalt::Image<const uint16_t>& img_raw,
|
|||
void computeDescriptors(const basalt::Image<const uint16_t>& img_raw,
|
||||
KeypointsData& kd);
|
||||
|
||||
void matchFastHelper(const std::vector<std::bitset<256>>& corner_descriptors_1,
|
||||
const std::vector<std::bitset<256>>& corner_descriptors_2,
|
||||
std::map<int, int>& matches, int threshold,
|
||||
double test_dist);
|
||||
|
||||
void matchDescriptors(const std::vector<std::bitset<256>>& corner_descriptors_1,
|
||||
const std::vector<std::bitset<256>>& corner_descriptors_2,
|
||||
std::vector<std::pair<int, int>>& matches, int threshold,
|
||||
|
|
|
@ -71,5 +71,6 @@ struct VioConfig {
|
|||
double mapper_min_track_length;
|
||||
double mapper_max_hamming_distance;
|
||||
double mapper_second_best_test_ratio;
|
||||
int mapper_bow_num_bits;
|
||||
};
|
||||
} // namespace basalt
|
||||
|
|
|
@ -49,12 +49,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <tbb/parallel_for.h>
|
||||
#include <tbb/parallel_reduce.h>
|
||||
|
||||
#include <mutex>
|
||||
|
||||
#include <DBoW3.h>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
template <size_t N>
|
||||
class HashBow;
|
||||
|
||||
class NfrMapper : public BundleAdjustmentBase {
|
||||
public:
|
||||
using Ptr = std::shared_ptr<NfrMapper>;
|
||||
|
@ -164,8 +163,7 @@ class NfrMapper : public BundleAdjustmentBase {
|
|||
const Eigen::map<int64_t, PoseStateWithLin>* frame_poses;
|
||||
};
|
||||
|
||||
NfrMapper(const basalt::Calibration<double>& calib, const VioConfig& config,
|
||||
const std::string& vocabulary = "");
|
||||
NfrMapper(const basalt::Calibration<double>& calib, const VioConfig& config);
|
||||
|
||||
void addMargData(basalt::MargData::Ptr& data);
|
||||
|
||||
|
@ -199,19 +197,13 @@ class NfrMapper : public BundleAdjustmentBase {
|
|||
|
||||
std::unordered_map<int64_t, OpticalFlowInput::Ptr> img_data;
|
||||
|
||||
tbb::concurrent_unordered_map<
|
||||
TimeCamId, std::pair<DBoW3::BowVector, DBoW3::FeatureVector>>
|
||||
bow_data;
|
||||
|
||||
Corners feature_corners;
|
||||
|
||||
Matches feature_matches;
|
||||
|
||||
FeatureTracks feature_tracks;
|
||||
|
||||
DBoW3::Database bow_database;
|
||||
|
||||
std::unordered_map<int, TimeCamId> bow_id_to_tcid;
|
||||
std::shared_ptr<HashBow<256>> hash_bow_database;
|
||||
|
||||
VioConfig config;
|
||||
};
|
||||
|
|
|
@ -19,7 +19,7 @@ for d in ${DATASETS[$CI_NODE_INDEX-1]}; do
|
|||
--result-path $folder_name/vio_$d --marg-data eval_tmp_marg_data
|
||||
|
||||
basalt_mapper --show-gui 0 --cam-calib /usr/etc/basalt/euroc_eucm_calib.json --marg-data eval_tmp_marg_data \
|
||||
--vocabulary /usr/etc/basalt/orbvoc.dbow3 --result-path $folder_name/mapper_$d
|
||||
--result-path $folder_name/mapper_$d
|
||||
rm -rf eval_tmp_marg_data
|
||||
done
|
||||
|
||||
|
|
|
@ -140,7 +140,6 @@ Button align_btn("ui.aling_svd", &alignButton);
|
|||
pangolin::OpenGlRenderState camera;
|
||||
|
||||
std::string marg_data_path;
|
||||
std::string vocabulary;
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
bool show_gui = true;
|
||||
|
@ -160,8 +159,6 @@ int main(int argc, char** argv) {
|
|||
|
||||
app.add_option("--config-path", config_path, "Path to config file.");
|
||||
|
||||
app.add_option("--vocabulary", vocabulary, "Path to vocabulary.")->required();
|
||||
|
||||
app.add_option("--result-path", result_path, "Path to config file.");
|
||||
|
||||
try {
|
||||
|
@ -537,7 +534,7 @@ void load_data(const std::string& calib_path, const std::string& cache_path) {
|
|||
}
|
||||
}
|
||||
|
||||
nrf_mapper.reset(new basalt::NfrMapper(calib, vio_config, vocabulary));
|
||||
nrf_mapper.reset(new basalt::NfrMapper(calib, vio_config));
|
||||
|
||||
basalt::MargDataLoader mdl;
|
||||
tbb::concurrent_bounded_queue<basalt::MargData::Ptr> marg_queue;
|
||||
|
|
|
@ -33,6 +33,8 @@ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include <unordered_set>
|
||||
|
||||
#include <basalt/utils/keypoints.h>
|
||||
|
||||
#include <opencv2/features2d/features2d.hpp>
|
||||
|
@ -305,7 +307,7 @@ void computeDescriptors(const basalt::Image<const uint16_t>& img_raw,
|
|||
|
||||
void matchFastHelper(const std::vector<std::bitset<256>>& corner_descriptors_1,
|
||||
const std::vector<std::bitset<256>>& corner_descriptors_2,
|
||||
std::map<int, int>& matches, int threshold,
|
||||
std::unordered_map<int, int>& matches, int threshold,
|
||||
double test_dist) {
|
||||
matches.clear();
|
||||
|
||||
|
@ -338,7 +340,7 @@ void matchDescriptors(const std::vector<std::bitset<256>>& corner_descriptors_1,
|
|||
double dist_2_best) {
|
||||
matches.clear();
|
||||
|
||||
std::map<int, int> matches_1_2, matches_2_1;
|
||||
std::unordered_map<int, int> matches_1_2, matches_2_1;
|
||||
matchFastHelper(corner_descriptors_1, corner_descriptors_2, matches_1_2,
|
||||
threshold, dist_2_best);
|
||||
matchFastHelper(corner_descriptors_2, corner_descriptors_1, matches_2_1,
|
||||
|
@ -397,8 +399,8 @@ void findInliersRansac(const KeypointsData& kd1, const KeypointsData& kd2,
|
|||
ransac.sac_model_->selectWithinDistance(nonlinear_transformation,
|
||||
ransac.threshold_, ransac.inliers_);
|
||||
|
||||
// Sanity check if the number of inliers decreased, but only warn if it is by
|
||||
// 3 or more, since some small fluctuation is expected.
|
||||
// Sanity check if the number of inliers decreased, but only warn if it is
|
||||
// by 3 or more, since some small fluctuation is expected.
|
||||
if (ransac.inliers_.size() + 2 < num_inliers_ransac) {
|
||||
std::cout << "Warning: non-linear refinement reduced the relative pose "
|
||||
"ransac inlier count from "
|
||||
|
|
|
@ -65,12 +65,13 @@ VioConfig::VioConfig() {
|
|||
mapper_obs_huber_thresh = 1.5;
|
||||
mapper_detection_num_points = 800;
|
||||
mapper_num_frames_to_match = 30;
|
||||
mapper_frames_to_match_threshold = 0.3;
|
||||
mapper_frames_to_match_threshold = 0.04;
|
||||
mapper_min_matches = 20;
|
||||
mapper_ransac_threshold = 5e-5;
|
||||
mapper_min_track_length = 5;
|
||||
mapper_max_hamming_distance = 70;
|
||||
mapper_second_best_test_ratio = 1.2;
|
||||
mapper_bow_num_bits = 16;
|
||||
}
|
||||
|
||||
void VioConfig::save(const std::string& filename) {
|
||||
|
@ -126,5 +127,6 @@ void serialize(Archive& ar, basalt::VioConfig& config) {
|
|||
ar(CEREAL_NVP(config.mapper_min_track_length));
|
||||
ar(CEREAL_NVP(config.mapper_max_hamming_distance));
|
||||
ar(CEREAL_NVP(config.mapper_second_best_test_ratio));
|
||||
ar(CEREAL_NVP(config.mapper_bow_num_bits));
|
||||
}
|
||||
} // namespace cereal
|
||||
|
|
|
@ -39,21 +39,17 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#include <basalt/utils/tracks.h>
|
||||
#include <basalt/vi_estimator/nfr_mapper.h>
|
||||
|
||||
#include <DBoW3.h>
|
||||
#include <basalt/hash_bow/hash_bow.h>
|
||||
|
||||
namespace basalt {
|
||||
|
||||
NfrMapper::NfrMapper(const Calibration<double>& calib, const VioConfig& config,
|
||||
const std::string& vocabulary)
|
||||
NfrMapper::NfrMapper(const Calibration<double>& calib, const VioConfig& config)
|
||||
: config(config) {
|
||||
this->calib = calib;
|
||||
this->obs_std_dev = config.mapper_obs_std_dev;
|
||||
this->huber_thresh = config.mapper_obs_huber_thresh;
|
||||
|
||||
if (!vocabulary.empty()) {
|
||||
DBoW3::Vocabulary voc(vocabulary);
|
||||
bow_database.setVocabulary(voc);
|
||||
}
|
||||
hash_bow_database.reset(new HashBow<256>(config.mapper_bow_num_bits));
|
||||
}
|
||||
|
||||
void NfrMapper::addMargData(MargData::Ptr& data) {
|
||||
|
@ -378,7 +374,7 @@ void NfrMapper::detect_keypoints() {
|
|||
if (kv->second.get()) {
|
||||
for (size_t i = 0; i < kv->second->img_data.size(); i++) {
|
||||
TimeCamId tcid(kv->first, i);
|
||||
KeypointsData kd;
|
||||
KeypointsData& kd = feature_corners[tcid];
|
||||
|
||||
if (!kv->second->img_data[i].img.get()) continue;
|
||||
|
||||
|
@ -394,18 +390,13 @@ void NfrMapper::detect_keypoints() {
|
|||
calib.intrinsics[tcid.second].unproject(kd.corners, kd.corners_3d,
|
||||
success);
|
||||
|
||||
feature_corners[tcid] = kd;
|
||||
hash_bow_database->compute_bow(kd.corner_descriptors, kd.hashes,
|
||||
kd.bow_vector);
|
||||
|
||||
auto& bow = bow_data[tcid];
|
||||
hash_bow_database->add_to_database(tcid, kd.bow_vector);
|
||||
|
||||
if (bow_database.usingDirectIndex()) {
|
||||
bow_database.getVocabulary()->transform(
|
||||
kd.corner_descriptors, bow.first, bow.second,
|
||||
bow_database.getDirectIndexLevels());
|
||||
} else {
|
||||
bow_database.getVocabulary()->transform(kd.corner_descriptors,
|
||||
bow.first);
|
||||
}
|
||||
// std::cout << "bow " << kd.bow_vector.size() << " desc "
|
||||
// << kd.corner_descriptors.size() << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -413,28 +404,13 @@ void NfrMapper::detect_keypoints() {
|
|||
|
||||
auto t2 = std::chrono::high_resolution_clock::now();
|
||||
|
||||
for (const auto& kv : bow_data) {
|
||||
int bow_id;
|
||||
if (bow_database.usingDirectIndex()) {
|
||||
bow_id = bow_database.add(kv.second.first, kv.second.second);
|
||||
} else {
|
||||
bow_id = bow_database.add(kv.second.first);
|
||||
}
|
||||
bow_id_to_tcid[bow_id] = kv.first;
|
||||
}
|
||||
|
||||
auto t3 = std::chrono::high_resolution_clock::now();
|
||||
|
||||
auto elapsed1 =
|
||||
std::chrono::duration_cast<std::chrono::microseconds>(t2 - t1);
|
||||
auto elapsed2 =
|
||||
std::chrono::duration_cast<std::chrono::microseconds>(t3 - t2);
|
||||
|
||||
std::cout << "Processed " << feature_corners.size() << " frames."
|
||||
<< std::endl;
|
||||
|
||||
std::cout << "Detection time: " << elapsed1.count() * 1e-6
|
||||
<< "s. Adding to DB time: " << elapsed2.count() * 1e-6 << "s."
|
||||
std::cout << "Detection time: " << elapsed1.count() * 1e-6 << "s."
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
|
@ -502,33 +478,34 @@ void NfrMapper::match_all() {
|
|||
tbb::blocked_range<size_t> keys_range(0, keys.size());
|
||||
auto compute_pairs = [&](const tbb::blocked_range<size_t>& r) {
|
||||
for (size_t i = r.begin(); i != r.end(); ++i) {
|
||||
DBoW3::QueryResults q;
|
||||
const TimeCamId& tcid = keys[i];
|
||||
const KeypointsData& kd = feature_corners.at(tcid);
|
||||
|
||||
auto it = bow_data.find(keys[i]);
|
||||
std::vector<std::pair<TimeCamId, double>> results;
|
||||
|
||||
if (it != bow_data.end()) {
|
||||
bow_database.query(it->second.first, q,
|
||||
config.mapper_num_frames_to_match);
|
||||
hash_bow_database->querry_database(kd.bow_vector,
|
||||
config.mapper_num_frames_to_match,
|
||||
results, &tcid.first);
|
||||
|
||||
for (const auto& r : q) {
|
||||
// Match only previous frames
|
||||
|
||||
size_t j = id_to_key_idx.at(bow_id_to_tcid.at(r.Id));
|
||||
if (r.Score > config.mapper_frames_to_match_threshold &&
|
||||
keys[i].first < keys[j].first) {
|
||||
// std::cout << "Closest frames for " << tcid << ": ";
|
||||
for (const auto& otcid_score : results) {
|
||||
// std::cout << otcid_score.first << "(" << otcid_score.second << ") ";
|
||||
if (otcid_score.first.first != tcid.first &&
|
||||
otcid_score.second > config.mapper_frames_to_match_threshold) {
|
||||
match_pair m;
|
||||
m.i = i;
|
||||
m.j = j;
|
||||
m.score = r.Score;
|
||||
m.j = id_to_key_idx.at(otcid_score.first);
|
||||
m.score = otcid_score.second;
|
||||
|
||||
ids_to_match.emplace_back(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
// std::cout << std::endl;
|
||||
}
|
||||
};
|
||||
|
||||
tbb::parallel_for(keys_range, compute_pairs);
|
||||
// compute_pairs(keys_range);
|
||||
|
||||
auto t2 = std::chrono::high_resolution_clock::now();
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ cmake_minimum_required(VERSION 3.10)
|
|||
|
||||
add_subdirectory(ros EXCLUDE_FROM_ALL)
|
||||
add_subdirectory(apriltag EXCLUDE_FROM_ALL)
|
||||
add_subdirectory(DBoW3 EXCLUDE_FROM_ALL)
|
||||
|
||||
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "Enable BUILD_SHARED_LIBS")
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
cmake_minimum_required(VERSION 2.8)
|
||||
PROJECT(DBoW3)
|
||||
set(PROJECT_VERSION "0.0.1")
|
||||
string(REGEX MATCHALL "[0-9]" PROJECT_VERSION_PARTS "${PROJECT_VERSION}")
|
||||
list(GET PROJECT_VERSION_PARTS 0 PROJECT_VERSION_MAJOR)
|
||||
list(GET PROJECT_VERSION_PARTS 1 PROJECT_VERSION_MINOR)
|
||||
list(GET PROJECT_VERSION_PARTS 2 PROJECT_VERSION_PATCH)
|
||||
set(PROJECT_SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}")
|
||||
|
||||
#------------------------------------------------
|
||||
# DIRS
|
||||
#------------------------------------------------
|
||||
ADD_SUBDIRECTORY(src)
|
||||
set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
DBoW3: bag-of-words library for C++ with generic descriptors
|
||||
|
||||
Copyright (c) 2015 Dorian Galvez-Lopez <http://doriangalvez.com> (Universidad de Zaragoza)
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
3. Neither the name of copyright holders nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
|
||||
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
If you use it in an academic work, please cite:
|
||||
|
||||
@ARTICLE{GalvezTRO12,
|
||||
author={G\'alvez-L\'opez, Dorian and Tard\'os, J. D.},
|
||||
journal={IEEE Transactions on Robotics},
|
||||
title={Bags of Binary Words for Fast Place Recognition in Image Sequences},
|
||||
year={2012},
|
||||
month={October},
|
||||
volume={28},
|
||||
number={5},
|
||||
pages={1188--1197},
|
||||
doi={10.1109/TRO.2012.2197158},
|
||||
ISSN={1552-3098}
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
You should have received this DBoW3 version along with ORB-SLAM2 (https://github.com/raulmur/ORB_SLAM2).
|
||||
See the original DBoW3 library at: https://github.com/dorian3d/DBoW3
|
||||
All files included in this version are BSD, see LICENSE.txt
|
||||
|
||||
We also use Random.h, Random.cpp, Timestamp.pp and Timestamp.h from DLib/DUtils.
|
||||
See the original DLib library at: https://github.com/dorian3d/DLib
|
||||
All files included in this version are BSD, see LICENSE.txt
|
|
@ -1,136 +0,0 @@
|
|||
/**
|
||||
* File: BowVector.cpp
|
||||
* Date: March 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: bag of words vector
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include "BowVector.h"
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
BowVector::BowVector(void) {}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
BowVector::~BowVector(void) {}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::addWeight(WordId id, WordValue v) {
|
||||
BowVector::iterator vit = this->lower_bound(id);
|
||||
|
||||
if (vit != this->end() && !(this->key_comp()(id, vit->first))) {
|
||||
vit->second += v;
|
||||
} else {
|
||||
this->insert(vit, BowVector::value_type(id, v));
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::addIfNotExist(WordId id, WordValue v) {
|
||||
BowVector::iterator vit = this->lower_bound(id);
|
||||
|
||||
if (vit == this->end() || (this->key_comp()(id, vit->first))) {
|
||||
this->insert(vit, BowVector::value_type(id, v));
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::normalize(LNorm norm_type) {
|
||||
double norm = 0.0;
|
||||
BowVector::iterator it;
|
||||
|
||||
if (norm_type == DBoW3::L1) {
|
||||
for (it = begin(); it != end(); ++it) norm += fabs(it->second);
|
||||
} else {
|
||||
for (it = begin(); it != end(); ++it) norm += it->second * it->second;
|
||||
norm = sqrt(norm);
|
||||
}
|
||||
|
||||
if (norm > 0.0) {
|
||||
for (it = begin(); it != end(); ++it) it->second /= norm;
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
std::ostream &operator<<(std::ostream &out, const BowVector &v) {
|
||||
BowVector::const_iterator vit;
|
||||
// std::vector<unsigned int>::const_iterator iit;
|
||||
unsigned int i = 0;
|
||||
const size_t N = v.size();
|
||||
for (vit = v.begin(); vit != v.end(); ++vit, ++i) {
|
||||
out << "<" << vit->first << ", " << vit->second << ">";
|
||||
|
||||
if (i < N - 1) out << ", ";
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::saveM(const std::string &filename, size_t W) const {
|
||||
std::fstream f(filename.c_str(), std::ios::out);
|
||||
|
||||
WordId last = 0;
|
||||
BowVector::const_iterator bit;
|
||||
for (bit = this->begin(); bit != this->end(); ++bit) {
|
||||
for (; last < bit->first; ++last) {
|
||||
f << "0 ";
|
||||
}
|
||||
f << bit->second << " ";
|
||||
|
||||
last = bit->first + 1;
|
||||
}
|
||||
for (; last < (WordId)W; ++last) f << "0 ";
|
||||
|
||||
f.close();
|
||||
}
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::toStream(std::ostream &str) const {
|
||||
uint32_t s = size();
|
||||
str.write((char *)&s, sizeof(s));
|
||||
for (auto d : *this) {
|
||||
str.write((char *)&d.first, sizeof(d.first));
|
||||
str.write((char *)&d.second, sizeof(d.second));
|
||||
}
|
||||
}
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void BowVector::fromStream(std::istream &str) {
|
||||
clear();
|
||||
uint32_t s;
|
||||
|
||||
str.read((char *)&s, sizeof(s));
|
||||
for (uint32_t i = 0; i < s; i++) {
|
||||
WordId wid;
|
||||
WordValue wv;
|
||||
str.read((char *)&wid, sizeof(wid));
|
||||
str.read((char *)&wv, sizeof(wv));
|
||||
insert(std::make_pair(wid, wv));
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t BowVector::getSignature() const {
|
||||
uint64_t sig = 0;
|
||||
for (auto ww : *this) sig += ww.first + 1e6 * ww.second;
|
||||
return sig;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
} // namespace DBoW3
|
|
@ -1,117 +0,0 @@
|
|||
/**
|
||||
* File: BowVector.h
|
||||
* Date: March 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: bag of words vector
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_BOW_VECTOR__
|
||||
#define __D_T_BOW_VECTOR__
|
||||
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include "exports.h"
|
||||
#if _WIN32
|
||||
#include <cstdint>
|
||||
#endif
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Id of words
|
||||
typedef unsigned int WordId;
|
||||
|
||||
/// Value of a word
|
||||
typedef double WordValue;
|
||||
|
||||
/// Id of nodes in the vocabulary tree
|
||||
typedef unsigned int NodeId;
|
||||
|
||||
/// L-norms for normalization
|
||||
enum LNorm
|
||||
{
|
||||
L1,
|
||||
L2
|
||||
};
|
||||
|
||||
/// Weighting type
|
||||
enum WeightingType
|
||||
{
|
||||
TF_IDF,
|
||||
TF,
|
||||
IDF,
|
||||
BINARY
|
||||
};
|
||||
|
||||
/// Scoring type
|
||||
enum ScoringType
|
||||
{
|
||||
L1_NORM,
|
||||
L2_NORM,
|
||||
CHI_SQUARE,
|
||||
KL,
|
||||
BHATTACHARYYA,
|
||||
DOT_PRODUCT
|
||||
};
|
||||
|
||||
/// Vector of words to represent images
|
||||
class DBOW_API BowVector:
|
||||
public std::map<WordId, WordValue>
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
BowVector(void);
|
||||
|
||||
/**
|
||||
* Destructor
|
||||
*/
|
||||
~BowVector(void);
|
||||
|
||||
/**
|
||||
* Adds a value to a word value existing in the vector, or creates a new
|
||||
* word with the given value
|
||||
* @param id word id to look for
|
||||
* @param v value to create the word with, or to add to existing word
|
||||
*/
|
||||
void addWeight(WordId id, WordValue v);
|
||||
|
||||
/**
|
||||
* Adds a word with a value to the vector only if this does not exist yet
|
||||
* @param id word id to look for
|
||||
* @param v value to give to the word if this does not exist
|
||||
*/
|
||||
void addIfNotExist(WordId id, WordValue v);
|
||||
|
||||
/**
|
||||
* L1-Normalizes the values in the vector
|
||||
* @param norm_type norm used
|
||||
*/
|
||||
void normalize(LNorm norm_type);
|
||||
|
||||
/**
|
||||
* Prints the content of the bow vector
|
||||
* @param out stream
|
||||
* @param v
|
||||
*/
|
||||
friend std::ostream& operator<<(std::ostream &out, const BowVector &v);
|
||||
|
||||
/**
|
||||
* Saves the bow vector as a vector in a matlab file
|
||||
* @param filename
|
||||
* @param W number of words in the vocabulary
|
||||
*/
|
||||
void saveM(const std::string &filename, size_t W) const;
|
||||
|
||||
//returns a unique number from the configuration
|
||||
uint64_t getSignature()const;
|
||||
//serialization
|
||||
void toStream(std::ostream &str)const;
|
||||
void fromStream(std::istream &str);
|
||||
};
|
||||
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
|
@ -1,20 +0,0 @@
|
|||
FILE(GLOB hdrs_base "*.h" )
|
||||
FILE(GLOB srcs_base "*.c*")
|
||||
|
||||
FILE(GLOB hdrs ${hdrs_base} )
|
||||
FILE(GLOB srcs ${srcs_base} )
|
||||
|
||||
|
||||
ADD_LIBRARY(${PROJECT_NAME} ${srcs} ${hdrs})
|
||||
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_BINARY_DIR} )
|
||||
|
||||
SET_TARGET_PROPERTIES(${PROJECT_NAME} PROPERTIES # create *nix style library versions + symbolic links
|
||||
DEFINE_SYMBOL DBOW_DSO_EXPORTS
|
||||
VERSION ${PROJECT_VERSION}
|
||||
SOVERSION ${PROJECT_SOVERSION}
|
||||
CLEAN_DIRECT_OUTPUT 1 # allow creating static and shared libs without conflicts
|
||||
OUTPUT_NAME "${PROJECT_NAME}${PROJECT_DLLVERSION}" # avoid conflicts between library and binary target names
|
||||
)
|
||||
|
||||
TARGET_LINK_LIBRARIES(${PROJECT_NAME} ${REQUIRED_LIBRARIES} ${OpenCV_LIBS})
|
||||
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* File: DBoW3.h
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: Generic include file for the DBoW3 classes and
|
||||
* the specialized vocabularies and databases
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
/*! \mainpage DBoW3 Library
|
||||
*
|
||||
* DBoW3 library for C++:
|
||||
* Bag-of-word image database for image retrieval.
|
||||
*
|
||||
* Written by Rafael Muñoz Salinas,
|
||||
* University of Cordoba (Spain)
|
||||
*
|
||||
*
|
||||
* \section requirements Requirements
|
||||
* This library requires the OpenCV libraries,
|
||||
* as well as the boost::dynamic_bitset class.
|
||||
*
|
||||
* \section citation Citation
|
||||
* If you use this software in academic works, please cite:
|
||||
<pre>
|
||||
@@ARTICLE{GalvezTRO12,
|
||||
author={Galvez-Lopez, Dorian and Tardos, J. D.},
|
||||
journal={IEEE Transactions on Robotics},
|
||||
title={Bags of Binary Words for Fast Place Recognition in Image Sequences},
|
||||
year={2012},
|
||||
month={October},
|
||||
volume={28},
|
||||
number={5},
|
||||
pages={1188--1197},
|
||||
doi={10.1109/TRO.2012.2197158},
|
||||
ISSN={1552-3098}
|
||||
}
|
||||
</pre>
|
||||
*
|
||||
* \section license License
|
||||
* This file is licensed under a Creative Commons
|
||||
* Attribution-NonCommercial-ShareAlike 3.0 license.
|
||||
* This file can be freely used and users can use, download and edit this file
|
||||
* provided that credit is attributed to the original author. No users are
|
||||
* permitted to use this file for commercial purposes unless explicit permission
|
||||
* is given by the original author. Derivative works must be licensed using the
|
||||
* same or similar license.
|
||||
* Check http://creativecommons.org/licenses/by-nc-sa/3.0/ to obtain further
|
||||
* details.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_DBOW3__
|
||||
#define __D_T_DBOW3__
|
||||
|
||||
/// Includes all the data structures to manage vocabularies and image databases
|
||||
|
||||
#include "Vocabulary.h"
|
||||
#include "Database.h"
|
||||
#include "BowVector.h"
|
||||
#include "FeatureVector.h"
|
||||
#include "QueryResults.h"
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
|
|
@ -1,855 +0,0 @@
|
|||
#include "Database.h"
|
||||
|
||||
#include <unordered_map>
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
// For query functions
|
||||
static int MIN_COMMON_WORDS = 5;
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::Database(bool use_di, int di_levels)
|
||||
: m_voc(NULL), m_use_di(use_di), m_dilevels(di_levels), m_nentries(0) {}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::Database(const Vocabulary &voc, bool use_di, int di_levels)
|
||||
: m_voc(NULL), m_use_di(use_di), m_dilevels(di_levels) {
|
||||
setVocabulary(voc);
|
||||
clear();
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::Database(const Database &db) : m_voc(NULL) { *this = db; }
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::Database(const std::string &filename) : m_voc(NULL) {
|
||||
load(filename);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::Database(const char *filename) : m_voc(NULL) { load(filename); }
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database::~Database(void) { delete m_voc; }
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
Database &Database::operator=(const Database &db) {
|
||||
if (this != &db) {
|
||||
m_dfile = db.m_dfile;
|
||||
m_dilevels = db.m_dilevels;
|
||||
m_ifile = db.m_ifile;
|
||||
m_nentries = db.m_nentries;
|
||||
m_use_di = db.m_use_di;
|
||||
if (db.m_voc != 0) setVocabulary(*db.m_voc);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
EntryId Database::add(const cv::Mat &features, BowVector *bowvec,
|
||||
FeatureVector *fvec) {
|
||||
std::vector<cv::Mat> vf(features.rows);
|
||||
for (int r = 0; r < features.rows; r++) vf[r] = features.rowRange(r, r + 1);
|
||||
return add(vf, bowvec, fvec);
|
||||
}
|
||||
|
||||
EntryId Database::add(const std::vector<cv::Mat> &features, BowVector *bowvec,
|
||||
FeatureVector *fvec) {
|
||||
BowVector aux;
|
||||
BowVector &v = (bowvec ? *bowvec : aux);
|
||||
|
||||
if (m_use_di && fvec != NULL) {
|
||||
m_voc->transform(features, v, *fvec, m_dilevels); // with features
|
||||
return add(v, *fvec);
|
||||
} else if (m_use_di) {
|
||||
FeatureVector fv;
|
||||
m_voc->transform(features, v, fv, m_dilevels); // with features
|
||||
return add(v, fv);
|
||||
} else if (fvec != NULL) {
|
||||
m_voc->transform(features, v, *fvec, m_dilevels); // with features
|
||||
return add(v);
|
||||
} else {
|
||||
m_voc->transform(features, v); // with features
|
||||
return add(v);
|
||||
}
|
||||
}
|
||||
|
||||
EntryId Database::add(const std::vector<std::bitset<256>> &features,
|
||||
BowVector *bowvec, FeatureVector *fvec) {
|
||||
BowVector aux;
|
||||
BowVector &v = (bowvec ? *bowvec : aux);
|
||||
|
||||
if (m_use_di && fvec != NULL) {
|
||||
m_voc->transform(features, v, *fvec, m_dilevels); // with features
|
||||
return add(v, *fvec);
|
||||
} else if (m_use_di) {
|
||||
FeatureVector fv;
|
||||
m_voc->transform(features, v, fv, m_dilevels); // with features
|
||||
return add(v, fv);
|
||||
} else if (fvec != NULL) {
|
||||
m_voc->transform(features, v, *fvec, m_dilevels); // with features
|
||||
return add(v);
|
||||
} else {
|
||||
m_voc->transform(features, v); // with features
|
||||
return add(v);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
EntryId Database::add(const BowVector &v, const FeatureVector &fv) {
|
||||
EntryId entry_id = m_nentries++;
|
||||
|
||||
BowVector::const_iterator vit;
|
||||
// std::vector<unsigned int>::const_iterator iit;
|
||||
|
||||
if (m_use_di) {
|
||||
// update direct file
|
||||
if (entry_id == m_dfile.size()) {
|
||||
m_dfile.push_back(fv);
|
||||
} else {
|
||||
m_dfile[entry_id] = fv;
|
||||
}
|
||||
}
|
||||
|
||||
// update inverted file
|
||||
for (vit = v.begin(); vit != v.end(); ++vit) {
|
||||
const WordId &word_id = vit->first;
|
||||
const WordValue &word_weight = vit->second;
|
||||
|
||||
IFRow &ifrow = m_ifile[word_id];
|
||||
ifrow.push_back(IFPair(entry_id, word_weight));
|
||||
}
|
||||
|
||||
return entry_id;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::setVocabulary(const Vocabulary &voc) {
|
||||
delete m_voc;
|
||||
m_voc = new Vocabulary(voc);
|
||||
clear();
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::setVocabulary(const Vocabulary &voc, bool use_di,
|
||||
int di_levels) {
|
||||
m_use_di = use_di;
|
||||
m_dilevels = di_levels;
|
||||
delete m_voc;
|
||||
m_voc = new Vocabulary(voc);
|
||||
clear();
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::clear() {
|
||||
// resize vectors
|
||||
m_ifile.resize(0);
|
||||
m_ifile.resize(m_voc->size());
|
||||
m_dfile.resize(0);
|
||||
m_nentries = 0;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::allocate(int nd, int ni) {
|
||||
// m_ifile already contains |words| items
|
||||
if (ni > 0) {
|
||||
for (auto rit = m_ifile.begin(); rit != m_ifile.end(); ++rit) {
|
||||
int n = (int)rit->size();
|
||||
if (ni > n) {
|
||||
rit->resize(ni);
|
||||
rit->resize(n);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (m_use_di && (int)m_dfile.size() < nd) {
|
||||
m_dfile.resize(nd);
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::query(const cv::Mat &features, QueryResults &ret,
|
||||
int max_results, int max_id) const {
|
||||
std::vector<cv::Mat> vf(features.rows);
|
||||
for (int r = 0; r < features.rows; r++) vf[r] = features.rowRange(r, r + 1);
|
||||
query(vf, ret, max_results, max_id);
|
||||
}
|
||||
|
||||
void Database::query(const std::vector<cv::Mat> &features, QueryResults &ret,
|
||||
int max_results, int max_id) const {
|
||||
BowVector vec;
|
||||
m_voc->transform(features, vec);
|
||||
query(vec, ret, max_results, max_id);
|
||||
}
|
||||
|
||||
void Database::query(const std::vector<std::bitset<256>> &features,
|
||||
QueryResults &ret, int max_results, int max_id) const {
|
||||
BowVector vec;
|
||||
m_voc->transform(features, vec);
|
||||
query(vec, ret, max_results, max_id);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::query(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const {
|
||||
ret.resize(0);
|
||||
|
||||
switch (m_voc->getScoringType()) {
|
||||
case L1_NORM:
|
||||
queryL1(vec, ret, max_results, max_id);
|
||||
break;
|
||||
|
||||
case L2_NORM:
|
||||
queryL2(vec, ret, max_results, max_id);
|
||||
break;
|
||||
|
||||
case CHI_SQUARE:
|
||||
queryChiSquare(vec, ret, max_results, max_id);
|
||||
break;
|
||||
|
||||
case KL:
|
||||
queryKL(vec, ret, max_results, max_id);
|
||||
break;
|
||||
|
||||
case BHATTACHARYYA:
|
||||
queryBhattacharyya(vec, ret, max_results, max_id);
|
||||
break;
|
||||
|
||||
case DOT_PRODUCT:
|
||||
queryDotProduct(vec, ret, max_results, max_id);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::queryL1(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
std::unordered_map<EntryId, double> pairs;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &qvalue = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (const auto &rit : row) {
|
||||
const EntryId &entry_id = rit.entry_id;
|
||||
const WordValue &dvalue = rit.word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
double value = fabs(qvalue - dvalue) - fabs(qvalue) - fabs(dvalue);
|
||||
|
||||
auto it = pairs.find(entry_id);
|
||||
if (it != pairs.end()) {
|
||||
it->second += value;
|
||||
} else {
|
||||
pairs.emplace(entry_id, value);
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// move to vector
|
||||
ret.reserve(pairs.size());
|
||||
for (const auto &pit : pairs) {
|
||||
ret.push_back(Result(pit.first, pit.second));
|
||||
}
|
||||
|
||||
// resulting "scores" are now in [-2 best .. 0 worst]
|
||||
|
||||
// sort vector in ascending order of score
|
||||
std::sort(ret.begin(), ret.end());
|
||||
// (ret is inverted now --the lower the better--)
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
|
||||
// complete and scale score to [0 worst .. 1 best]
|
||||
// ||v - w||_{L1} = 2 + Sum(|v_i - w_i| - |v_i| - |w_i|)
|
||||
// for all i | v_i != 0 and w_i != 0
|
||||
// (Nister, 2006)
|
||||
// scaled_||v - w||_{L1} = 1 - 0.5 * ||v - w||_{L1}
|
||||
QueryResults::iterator qit;
|
||||
for (qit = ret.begin(); qit != ret.end(); qit++)
|
||||
qit->Score = -qit->Score / 2.0;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::queryL2(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
std::map<EntryId, double> pairs;
|
||||
std::map<EntryId, double>::iterator pit;
|
||||
|
||||
// map<EntryId, int> counters;
|
||||
// map<EntryId, int>::iterator cit;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &qvalue = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (auto rit = row.begin(); rit != row.end(); ++rit) {
|
||||
const EntryId entry_id = rit->entry_id;
|
||||
const WordValue &dvalue = rit->word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
double value = -qvalue * dvalue; // minus sign for sorting trick
|
||||
|
||||
pit = pairs.lower_bound(entry_id);
|
||||
// cit = counters.lower_bound(entry_id);
|
||||
if (pit != pairs.end() && !(pairs.key_comp()(entry_id, pit->first))) {
|
||||
pit->second += value;
|
||||
// cit->second += 1;
|
||||
} else {
|
||||
pairs.insert(pit,
|
||||
std::map<EntryId, double>::value_type(entry_id, value));
|
||||
|
||||
// counters.insert(cit,
|
||||
// map<EntryId, int>::value_type(entry_id, 1));
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// move to vector
|
||||
ret.reserve(pairs.size());
|
||||
// cit = counters.begin();
|
||||
for (pit = pairs.begin(); pit != pairs.end(); ++pit) //, ++cit)
|
||||
{
|
||||
ret.push_back(Result(pit->first, pit->second)); // / cit->second));
|
||||
}
|
||||
|
||||
// resulting "scores" are now in [-1 best .. 0 worst]
|
||||
|
||||
// sort vector in ascending order of score
|
||||
std::sort(ret.begin(), ret.end());
|
||||
// (ret is inverted now --the lower the better--)
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
|
||||
// complete and scale score to [0 worst .. 1 best]
|
||||
// ||v - w||_{L2} = sqrt( 2 - 2 * Sum(v_i * w_i)
|
||||
// for all i | v_i != 0 and w_i != 0 )
|
||||
// (Nister, 2006)
|
||||
QueryResults::iterator qit;
|
||||
for (qit = ret.begin(); qit != ret.end(); qit++) {
|
||||
if (qit->Score <= -1.0) // rounding error
|
||||
qit->Score = 1.0;
|
||||
else
|
||||
qit->Score = 1.0 - sqrt(1.0 + qit->Score); // [0..1]
|
||||
// the + sign is ok, it is due to - sign in
|
||||
// value = - qvalue * dvalue
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::queryChiSquare(const BowVector &vec, QueryResults &ret,
|
||||
int max_results, int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
std::map<EntryId, std::pair<double, int>> pairs;
|
||||
std::map<EntryId, std::pair<double, int>>::iterator pit;
|
||||
|
||||
std::map<EntryId, std::pair<double, double>> sums; // < sum vi, sum wi >
|
||||
std::map<EntryId, std::pair<double, double>>::iterator sit;
|
||||
|
||||
// In the current implementation, we suppose vec is not normalized
|
||||
|
||||
// map<EntryId, double> expected;
|
||||
// map<EntryId, double>::iterator eit;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &qvalue = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (auto rit = row.begin(); rit != row.end(); ++rit) {
|
||||
const EntryId entry_id = rit->entry_id;
|
||||
const WordValue &dvalue = rit->word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
// (v-w)^2/(v+w) - v - w = -4 vw/(v+w)
|
||||
// we move the 4 out
|
||||
double value = 0;
|
||||
if (qvalue + dvalue != 0.0) // words may have weight zero
|
||||
value = -qvalue * dvalue / (qvalue + dvalue);
|
||||
|
||||
pit = pairs.lower_bound(entry_id);
|
||||
sit = sums.lower_bound(entry_id);
|
||||
// eit = expected.lower_bound(entry_id);
|
||||
if (pit != pairs.end() && !(pairs.key_comp()(entry_id, pit->first))) {
|
||||
pit->second.first += value;
|
||||
pit->second.second += 1;
|
||||
// eit->second += dvalue;
|
||||
sit->second.first += qvalue;
|
||||
sit->second.second += dvalue;
|
||||
} else {
|
||||
pairs.insert(pit,
|
||||
std::map<EntryId, std::pair<double, int>>::value_type(
|
||||
entry_id, std::make_pair(value, 1)));
|
||||
// expected.insert(eit,
|
||||
// map<EntryId, double>::value_type(entry_id, dvalue));
|
||||
|
||||
sums.insert(sit,
|
||||
std::map<EntryId, std::pair<double, double>>::value_type(
|
||||
entry_id, std::make_pair(qvalue, dvalue)));
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// move to vector
|
||||
ret.reserve(pairs.size());
|
||||
sit = sums.begin();
|
||||
for (pit = pairs.begin(); pit != pairs.end(); ++pit, ++sit) {
|
||||
if (pit->second.second >= MIN_COMMON_WORDS) {
|
||||
ret.push_back(Result(pit->first, pit->second.first));
|
||||
ret.back().nWords = pit->second.second;
|
||||
ret.back().sumCommonVi = sit->second.first;
|
||||
ret.back().sumCommonWi = sit->second.second;
|
||||
ret.back().expectedChiScore =
|
||||
2 * sit->second.second / (1 + sit->second.second);
|
||||
}
|
||||
|
||||
// ret.push_back(Result(pit->first, pit->second));
|
||||
}
|
||||
|
||||
// resulting "scores" are now in [-2 best .. 0 worst]
|
||||
// we have to add +2 to the scores to obtain the chi square score
|
||||
|
||||
// sort vector in ascending order of score
|
||||
std::sort(ret.begin(), ret.end());
|
||||
// (ret is inverted now --the lower the better--)
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
|
||||
// complete and scale score to [0 worst .. 1 best]
|
||||
QueryResults::iterator qit;
|
||||
for (qit = ret.begin(); qit != ret.end(); qit++) {
|
||||
// this takes the 4 into account
|
||||
qit->Score = -2. * qit->Score; // [0..1]
|
||||
|
||||
qit->chiScore = qit->Score;
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::queryKL(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
std::map<EntryId, double> pairs;
|
||||
std::map<EntryId, double>::iterator pit;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &vi = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (auto rit = row.begin(); rit != row.end(); ++rit) {
|
||||
const EntryId entry_id = rit->entry_id;
|
||||
const WordValue &wi = rit->word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
double value = 0;
|
||||
if (vi != 0 && wi != 0) value = vi * log(vi / wi);
|
||||
|
||||
pit = pairs.lower_bound(entry_id);
|
||||
if (pit != pairs.end() && !(pairs.key_comp()(entry_id, pit->first))) {
|
||||
pit->second += value;
|
||||
} else {
|
||||
pairs.insert(pit,
|
||||
std::map<EntryId, double>::value_type(entry_id, value));
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// resulting "scores" are now in [-X worst .. 0 best .. X worst]
|
||||
// but we cannot make sure which ones are better without calculating
|
||||
// the complete score
|
||||
|
||||
// complete scores and move to vector
|
||||
ret.reserve(pairs.size());
|
||||
for (pit = pairs.begin(); pit != pairs.end(); ++pit) {
|
||||
EntryId eid = pit->first;
|
||||
double value = 0.0;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordValue &vi = vit->second;
|
||||
const IFRow &row = m_ifile[vit->first];
|
||||
|
||||
if (vi != 0) {
|
||||
if (row.end() == find(row.begin(), row.end(), eid)) {
|
||||
value += vi * (log(vi) - GeneralScoring::LOG_EPS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pit->second += value;
|
||||
|
||||
// to vector
|
||||
ret.push_back(Result(pit->first, pit->second));
|
||||
}
|
||||
|
||||
// real scores are now in [0 best .. X worst]
|
||||
|
||||
// sort vector in ascending order
|
||||
// (scores are inverted now --the lower the better--)
|
||||
std::sort(ret.begin(), ret.end());
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
|
||||
// cannot scale scores
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::queryBhattacharyya(const BowVector &vec, QueryResults &ret,
|
||||
int max_results, int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
// map<EntryId, double> pairs;
|
||||
// map<EntryId, double>::iterator pit;
|
||||
|
||||
std::map<EntryId, std::pair<double, int>> pairs; // <eid, <score, counter> >
|
||||
std::map<EntryId, std::pair<double, int>>::iterator pit;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &qvalue = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (auto rit = row.begin(); rit != row.end(); ++rit) {
|
||||
const EntryId entry_id = rit->entry_id;
|
||||
const WordValue &dvalue = rit->word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
double value = sqrt(qvalue * dvalue);
|
||||
|
||||
pit = pairs.lower_bound(entry_id);
|
||||
if (pit != pairs.end() && !(pairs.key_comp()(entry_id, pit->first))) {
|
||||
pit->second.first += value;
|
||||
pit->second.second += 1;
|
||||
} else {
|
||||
pairs.insert(pit,
|
||||
std::map<EntryId, std::pair<double, int>>::value_type(
|
||||
entry_id, std::make_pair(value, 1)));
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// move to vector
|
||||
ret.reserve(pairs.size());
|
||||
for (pit = pairs.begin(); pit != pairs.end(); ++pit) {
|
||||
if (pit->second.second >= MIN_COMMON_WORDS) {
|
||||
ret.push_back(Result(pit->first, pit->second.first));
|
||||
ret.back().nWords = pit->second.second;
|
||||
ret.back().bhatScore = pit->second.first;
|
||||
}
|
||||
}
|
||||
|
||||
// scores are already in [0..1]
|
||||
|
||||
// sort vector in descending order
|
||||
std::sort(ret.begin(), ret.end(), Result::gt);
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
void Database::queryDotProduct(const BowVector &vec, QueryResults &ret,
|
||||
int max_results, int max_id) const {
|
||||
BowVector::const_iterator vit;
|
||||
|
||||
std::map<EntryId, double> pairs;
|
||||
std::map<EntryId, double>::iterator pit;
|
||||
|
||||
for (vit = vec.begin(); vit != vec.end(); ++vit) {
|
||||
const WordId word_id = vit->first;
|
||||
const WordValue &qvalue = vit->second;
|
||||
|
||||
const IFRow &row = m_ifile[word_id];
|
||||
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
for (auto rit = row.begin(); rit != row.end(); ++rit) {
|
||||
const EntryId entry_id = rit->entry_id;
|
||||
const WordValue &dvalue = rit->word_weight;
|
||||
|
||||
if ((int)entry_id < max_id || max_id == -1) {
|
||||
double value;
|
||||
if (this->m_voc->getWeightingType() == BINARY)
|
||||
value = 1;
|
||||
else
|
||||
value = qvalue * dvalue;
|
||||
|
||||
pit = pairs.lower_bound(entry_id);
|
||||
if (pit != pairs.end() && !(pairs.key_comp()(entry_id, pit->first))) {
|
||||
pit->second += value;
|
||||
} else {
|
||||
pairs.insert(pit,
|
||||
std::map<EntryId, double>::value_type(entry_id, value));
|
||||
}
|
||||
}
|
||||
|
||||
} // for each inverted row
|
||||
} // for each query word
|
||||
|
||||
// move to vector
|
||||
ret.reserve(pairs.size());
|
||||
for (pit = pairs.begin(); pit != pairs.end(); ++pit) {
|
||||
ret.push_back(Result(pit->first, pit->second));
|
||||
}
|
||||
|
||||
// scores are the greater the better
|
||||
|
||||
// sort vector in descending order
|
||||
std::sort(ret.begin(), ret.end(), Result::gt);
|
||||
|
||||
// cut vector
|
||||
if (max_results > 0 && (int)ret.size() > max_results) ret.resize(max_results);
|
||||
|
||||
// these scores cannot be scaled
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const FeatureVector &Database::retrieveFeatures(EntryId id) const {
|
||||
assert(id < size());
|
||||
return m_dfile[id];
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::save(const std::string &filename) const {
|
||||
cv::FileStorage fs(filename.c_str(), cv::FileStorage::WRITE);
|
||||
if (!fs.isOpened()) throw std::string("Could not open file ") + filename;
|
||||
|
||||
save(fs);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::save(cv::FileStorage &fs, const std::string &name) const {
|
||||
// Format YAML:
|
||||
// vocabulary { ... see TemplatedVocabulary::save }
|
||||
// database
|
||||
// {
|
||||
// nEntries:
|
||||
// usingDI:
|
||||
// diLevels:
|
||||
// invertedIndex
|
||||
// [
|
||||
// [
|
||||
// {
|
||||
// imageId:
|
||||
// weight:
|
||||
// }
|
||||
// ]
|
||||
// ]
|
||||
// directIndex
|
||||
// [
|
||||
// [
|
||||
// {
|
||||
// nodeId:
|
||||
// features: [ ]
|
||||
// }
|
||||
// ]
|
||||
// ]
|
||||
|
||||
// invertedIndex[i] is for the i-th word
|
||||
// directIndex[i] is for the i-th entry
|
||||
// directIndex may be empty if not using direct index
|
||||
//
|
||||
// imageId's and nodeId's must be stored in ascending order
|
||||
// (according to the construction of the indexes)
|
||||
|
||||
m_voc->save(fs);
|
||||
|
||||
fs << name << "{";
|
||||
|
||||
fs << "nEntries" << m_nentries;
|
||||
fs << "usingDI" << (m_use_di ? 1 : 0);
|
||||
fs << "diLevels" << m_dilevels;
|
||||
|
||||
fs << "invertedIndex"
|
||||
<< "[";
|
||||
|
||||
for (auto iit = m_ifile.begin(); iit != m_ifile.end(); ++iit) {
|
||||
fs << "["; // word of IF
|
||||
for (auto irit = iit->begin(); irit != iit->end(); ++irit) {
|
||||
fs << "{:"
|
||||
<< "imageId" << (int)irit->entry_id << "weight" << irit->word_weight
|
||||
<< "}";
|
||||
}
|
||||
fs << "]"; // word of IF
|
||||
}
|
||||
|
||||
fs << "]"; // invertedIndex
|
||||
|
||||
fs << "directIndex"
|
||||
<< "[";
|
||||
|
||||
for (auto dit = m_dfile.begin(); dit != m_dfile.end(); ++dit) {
|
||||
fs << "["; // entry of DF
|
||||
|
||||
for (auto drit = dit->begin(); drit != dit->end(); ++drit) {
|
||||
NodeId nid = drit->first;
|
||||
const std::vector<unsigned int> &features = drit->second;
|
||||
|
||||
// save info of last_nid
|
||||
fs << "{";
|
||||
fs << "nodeId" << (int)nid;
|
||||
// msvc++ 2010 with opencv 2.3.1 does not allow FileStorage::operator<<
|
||||
// with vectors of unsigned int
|
||||
fs << "features"
|
||||
<< "[" << *(const std::vector<int> *)(&features) << "]";
|
||||
fs << "}";
|
||||
}
|
||||
|
||||
fs << "]"; // entry of DF
|
||||
}
|
||||
|
||||
fs << "]"; // directIndex
|
||||
|
||||
fs << "}"; // database
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::load(const std::string &filename) {
|
||||
cv::FileStorage fs(filename.c_str(), cv::FileStorage::READ);
|
||||
if (!fs.isOpened()) throw std::string("Could not open file ") + filename;
|
||||
|
||||
load(fs);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void Database::load(const cv::FileStorage &fs, const std::string &name) {
|
||||
// load voc first
|
||||
// subclasses must instantiate m_voc before calling this ::load
|
||||
if (!m_voc) m_voc = new Vocabulary;
|
||||
|
||||
m_voc->load(fs);
|
||||
|
||||
// load database now
|
||||
clear(); // resizes inverted file
|
||||
|
||||
cv::FileNode fdb = fs[name];
|
||||
|
||||
m_nentries = (int)fdb["nEntries"];
|
||||
m_use_di = (int)fdb["usingDI"] != 0;
|
||||
m_dilevels = (int)fdb["diLevels"];
|
||||
|
||||
cv::FileNode fn = fdb["invertedIndex"];
|
||||
for (WordId wid = 0; wid < fn.size(); ++wid) {
|
||||
cv::FileNode fw = fn[wid];
|
||||
|
||||
for (unsigned int i = 0; i < fw.size(); ++i) {
|
||||
EntryId eid = (int)fw[i]["imageId"];
|
||||
WordValue v = fw[i]["weight"];
|
||||
|
||||
m_ifile[wid].push_back(IFPair(eid, v));
|
||||
}
|
||||
}
|
||||
|
||||
if (m_use_di) {
|
||||
fn = fdb["directIndex"];
|
||||
|
||||
m_dfile.resize(fn.size());
|
||||
assert(m_nentries == (int)fn.size());
|
||||
|
||||
FeatureVector::iterator dit;
|
||||
for (EntryId eid = 0; eid < fn.size(); ++eid) {
|
||||
cv::FileNode fe = fn[eid];
|
||||
|
||||
m_dfile[eid].clear();
|
||||
for (unsigned int i = 0; i < fe.size(); ++i) {
|
||||
NodeId nid = (int)fe[i]["nodeId"];
|
||||
|
||||
dit = m_dfile[eid].insert(m_dfile[eid].end(),
|
||||
make_pair(nid, std::vector<unsigned int>()));
|
||||
|
||||
// this failed to compile with some opencv versions (2.3.1)
|
||||
// fe[i]["features"] >> dit->second;
|
||||
|
||||
// this was ok until OpenCV 2.4.1
|
||||
// std::vector<int> aux;
|
||||
// fe[i]["features"] >> aux; // OpenCV < 2.4.1
|
||||
// dit->second.resize(aux.size());
|
||||
// std::copy(aux.begin(), aux.end(), dit->second.begin());
|
||||
|
||||
cv::FileNode ff = fe[i]["features"][0];
|
||||
dit->second.reserve(ff.size());
|
||||
|
||||
cv::FileNodeIterator ffit;
|
||||
for (ffit = ff.begin(); ffit != ff.end(); ++ffit) {
|
||||
dit->second.push_back((int)*ffit);
|
||||
}
|
||||
}
|
||||
} // for each entry
|
||||
} // if use_id
|
||||
}
|
||||
|
||||
std::ostream &operator<<(std::ostream &os, const Database &db) {
|
||||
os << "Database: Entries = " << db.size() << ", "
|
||||
"Using direct index = "
|
||||
<< (db.usingDirectIndex() ? "yes" : "no");
|
||||
|
||||
if (db.usingDirectIndex())
|
||||
os << ", Direct index levels = " << db.getDirectIndexLevels();
|
||||
|
||||
os << ". " << *db.getVocabulary();
|
||||
return os;
|
||||
}
|
||||
} // namespace DBoW3
|
|
@ -1,354 +0,0 @@
|
|||
/**
|
||||
* File: Database.h
|
||||
* Date: March 2011
|
||||
* Modified By Rafael Muñoz in 2016
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: database of images
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_DATABASE__
|
||||
#define __D_T_DATABASE__
|
||||
|
||||
#include <bitset>
|
||||
#include <fstream>
|
||||
#include <list>
|
||||
#include <numeric>
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "BowVector.h"
|
||||
#include "FeatureVector.h"
|
||||
#include "QueryResults.h"
|
||||
#include "ScoringObject.h"
|
||||
#include "Vocabulary.h"
|
||||
#include "exports.h"
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Database
|
||||
class DBOW_API Database {
|
||||
public:
|
||||
/**
|
||||
* Creates an empty database without vocabulary
|
||||
* @param use_di a direct index is used to store feature indexes
|
||||
* @param di_levels levels to go up the vocabulary tree to select the
|
||||
* node id to store in the direct index when adding images
|
||||
*/
|
||||
explicit Database(bool use_di = true, int di_levels = 0);
|
||||
|
||||
/**
|
||||
* Creates a database with the given vocabulary
|
||||
* @param T class inherited from Vocabulary
|
||||
* @param voc vocabulary
|
||||
* @param use_di a direct index is used to store feature indexes
|
||||
* @param di_levels levels to go up the vocabulary tree to select the
|
||||
* node id to store in the direct index when adding images
|
||||
*/
|
||||
|
||||
explicit Database(const Vocabulary &voc, bool use_di = true,
|
||||
int di_levels = 0);
|
||||
|
||||
/**
|
||||
* Copy constructor. Copies the vocabulary too
|
||||
* @param db object to copy
|
||||
*/
|
||||
Database(const Database &db);
|
||||
|
||||
/**
|
||||
* Creates the database from a file
|
||||
* @param filename
|
||||
*/
|
||||
Database(const std::string &filename);
|
||||
|
||||
/**
|
||||
* Creates the database from a file
|
||||
* @param filename
|
||||
*/
|
||||
Database(const char *filename);
|
||||
|
||||
/**
|
||||
* Destructor
|
||||
*/
|
||||
virtual ~Database(void);
|
||||
|
||||
/**
|
||||
* Copies the given database and its vocabulary
|
||||
* @param db database to copy
|
||||
*/
|
||||
Database &operator=(const Database &db);
|
||||
|
||||
/**
|
||||
* Sets the vocabulary to use and clears the content of the database.
|
||||
* @param T class inherited from Vocabulary
|
||||
* @param voc vocabulary to copy
|
||||
*/
|
||||
void setVocabulary(const Vocabulary &voc);
|
||||
|
||||
/**
|
||||
* Sets the vocabulary to use and the direct index parameters, and clears
|
||||
* the content of the database
|
||||
* @param T class inherited from Vocabulary
|
||||
* @param voc vocabulary to copy
|
||||
* @param use_di a direct index is used to store feature indexes
|
||||
* @param di_levels levels to go up the vocabulary tree to select the
|
||||
* node id to store in the direct index when adding images
|
||||
*/
|
||||
|
||||
void setVocabulary(const Vocabulary &voc, bool use_di, int di_levels = 0);
|
||||
|
||||
/**
|
||||
* Returns a pointer to the vocabulary used
|
||||
* @return vocabulary
|
||||
*/
|
||||
inline const Vocabulary *getVocabulary() const { return m_voc; }
|
||||
|
||||
/**
|
||||
* Allocates some memory for the direct and inverted indexes
|
||||
* @param nd number of expected image entries in the database
|
||||
* @param ni number of expected words per image
|
||||
* @note Use 0 to ignore a parameter
|
||||
*/
|
||||
void allocate(int nd = 0, int ni = 0);
|
||||
|
||||
/**
|
||||
* Adds an entry to the database and returns its index
|
||||
* @param features features of the new entry
|
||||
* @param bowvec if given, the bow vector of these features is returned
|
||||
* @param fvec if given, the vector of nodes and feature indexes is returned
|
||||
* @return id of new entry
|
||||
*/
|
||||
EntryId add(const std::vector<std::bitset<256>> &features,
|
||||
BowVector *bowvec = NULL, FeatureVector *fvec = NULL);
|
||||
|
||||
/**
|
||||
* Adds an entry to the database and returns its index
|
||||
* @param features features of the new entry
|
||||
* @param bowvec if given, the bow vector of these features is returned
|
||||
* @param fvec if given, the vector of nodes and feature indexes is returned
|
||||
* @return id of new entry
|
||||
*/
|
||||
EntryId add(const std::vector<cv::Mat> &features, BowVector *bowvec = NULL,
|
||||
FeatureVector *fvec = NULL);
|
||||
/**
|
||||
* Adds an entry to the database and returns its index
|
||||
* @param features features of the new entry, one per row
|
||||
* @param bowvec if given, the bow vector of these features is returned
|
||||
* @param fvec if given, the vector of nodes and feature indexes is returned
|
||||
* @return id of new entry
|
||||
*/
|
||||
EntryId add(const cv::Mat &features, BowVector *bowvec = NULL,
|
||||
FeatureVector *fvec = NULL);
|
||||
|
||||
/**
|
||||
* Adss an entry to the database and returns its index
|
||||
* @param vec bow vector
|
||||
* @param fec feature vector to add the entry. Only necessary if using the
|
||||
* direct index
|
||||
* @return id of new entry
|
||||
*/
|
||||
EntryId add(const BowVector &vec, const FeatureVector &fec = FeatureVector());
|
||||
|
||||
/**
|
||||
* Empties the database
|
||||
*/
|
||||
inline void clear();
|
||||
|
||||
/**
|
||||
* Returns the number of entries in the database
|
||||
* @return number of entries in the database
|
||||
*/
|
||||
unsigned int size() const { return m_nentries; }
|
||||
|
||||
/**
|
||||
* Checks if the direct index is being used
|
||||
* @return true iff using direct index
|
||||
*/
|
||||
bool usingDirectIndex() const { return m_use_di; }
|
||||
|
||||
/**
|
||||
* Returns the di levels when using direct index
|
||||
* @return di levels
|
||||
*/
|
||||
int getDirectIndexLevels() const { return m_dilevels; }
|
||||
|
||||
/**
|
||||
* Queries the database with some features
|
||||
* @param features query features
|
||||
* @param ret (out) query results
|
||||
* @param max_results number of results to return. <= 0 means all
|
||||
* @param max_id only entries with id <= max_id are returned in ret.
|
||||
* < 0 means all
|
||||
*/
|
||||
void query(const std::vector<cv::Mat> &features, QueryResults &ret,
|
||||
int max_results = 1, int max_id = -1) const;
|
||||
|
||||
void query(const std::vector<std::bitset<256>> &features, QueryResults &ret,
|
||||
int max_results = 1, int max_id = -1) const;
|
||||
|
||||
/**
|
||||
* Queries the database with some features
|
||||
* @param features query features,one per row
|
||||
* @param ret (out) query results
|
||||
* @param max_results number of results to return. <= 0 means all
|
||||
* @param max_id only entries with id <= max_id are returned in ret.
|
||||
* < 0 means all
|
||||
*/
|
||||
void query(const cv::Mat &features, QueryResults &ret, int max_results = 1,
|
||||
int max_id = -1) const;
|
||||
|
||||
/**
|
||||
* Queries the database with a vector
|
||||
* @param vec bow vector already normalized
|
||||
* @param ret results
|
||||
* @param max_results number of results to return. <= 0 means all
|
||||
* @param max_id only entries with id <= max_id are returned in ret.
|
||||
* < 0 means all
|
||||
*/
|
||||
void query(const BowVector &vec, QueryResults &ret, int max_results = 1,
|
||||
int max_id = -1) const;
|
||||
|
||||
/**
|
||||
* Returns the a feature vector associated with a database entry
|
||||
* @param id entry id (must be < size())
|
||||
* @return const reference to map of nodes and their associated features in
|
||||
* the given entry
|
||||
*/
|
||||
const FeatureVector &retrieveFeatures(EntryId id) const;
|
||||
|
||||
/**
|
||||
* Stores the database in a file
|
||||
* @param filename
|
||||
*/
|
||||
void save(const std::string &filename) const;
|
||||
|
||||
/**
|
||||
* Loads the database from a file
|
||||
* @param filename
|
||||
*/
|
||||
void load(const std::string &filename);
|
||||
|
||||
/**
|
||||
* Stores the database in the given file storage structure
|
||||
* @param fs
|
||||
* @param name node name
|
||||
*/
|
||||
virtual void save(cv::FileStorage &fs,
|
||||
const std::string &name = "database") const;
|
||||
|
||||
/**
|
||||
* Loads the database from the given file storage structure
|
||||
* @param fs
|
||||
* @param name node name
|
||||
*/
|
||||
virtual void load(const cv::FileStorage &fs,
|
||||
const std::string &name = "database");
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Writes printable information of the database
|
||||
* @param os stream to write to
|
||||
* @param db
|
||||
*/
|
||||
DBOW_API friend std::ostream &operator<<(std::ostream &os,
|
||||
const Database &db);
|
||||
|
||||
protected:
|
||||
/// Query with L1 scoring
|
||||
void queryL1(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const;
|
||||
|
||||
/// Query with L2 scoring
|
||||
void queryL2(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const;
|
||||
|
||||
/// Query with Chi square scoring
|
||||
void queryChiSquare(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const;
|
||||
|
||||
/// Query with Bhattacharyya scoring
|
||||
void queryBhattacharyya(const BowVector &vec, QueryResults &ret,
|
||||
int max_results, int max_id) const;
|
||||
|
||||
/// Query with KL divergence scoring
|
||||
void queryKL(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const;
|
||||
|
||||
/// Query with dot product scoring
|
||||
void queryDotProduct(const BowVector &vec, QueryResults &ret, int max_results,
|
||||
int max_id) const;
|
||||
|
||||
protected:
|
||||
/* Inverted file declaration */
|
||||
|
||||
/// Item of IFRow
|
||||
struct IFPair {
|
||||
/// Entry id
|
||||
EntryId entry_id;
|
||||
|
||||
/// Word weight in this entry
|
||||
WordValue word_weight;
|
||||
|
||||
/**
|
||||
* Creates an empty pair
|
||||
*/
|
||||
IFPair() {}
|
||||
|
||||
/**
|
||||
* Creates an inverted file pair
|
||||
* @param eid entry id
|
||||
* @param wv word weight
|
||||
*/
|
||||
IFPair(EntryId eid, WordValue wv) : entry_id(eid), word_weight(wv) {}
|
||||
|
||||
/**
|
||||
* Compares the entry ids
|
||||
* @param eid
|
||||
* @return true iff this entry id is the same as eid
|
||||
*/
|
||||
inline bool operator==(EntryId eid) const { return entry_id == eid; }
|
||||
};
|
||||
|
||||
/// Row of InvertedFile
|
||||
typedef std::vector<IFPair> IFRow;
|
||||
// IFRows are sorted in ascending entry_id order
|
||||
|
||||
/// Inverted index
|
||||
typedef std::vector<IFRow> InvertedFile;
|
||||
// InvertedFile[word_id] --> inverted file of that word
|
||||
|
||||
/* Direct file declaration */
|
||||
|
||||
/// Direct index
|
||||
typedef std::vector<FeatureVector> DirectFile;
|
||||
// DirectFile[entry_id] --> [ directentry, ... ]
|
||||
|
||||
protected:
|
||||
/// Associated vocabulary
|
||||
Vocabulary *m_voc;
|
||||
|
||||
/// Flag to use direct index
|
||||
bool m_use_di;
|
||||
|
||||
/// Levels to go up the vocabulary tree to select nodes to store
|
||||
/// in the direct index
|
||||
int m_dilevels;
|
||||
|
||||
/// Inverted file (must have size() == |words|)
|
||||
InvertedFile m_ifile;
|
||||
|
||||
/// Direct file (resized for allocation)
|
||||
DirectFile m_dfile;
|
||||
|
||||
/// Number of valid entries in m_dfile
|
||||
int m_nentries;
|
||||
};
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
|
@ -1,239 +0,0 @@
|
|||
/**
|
||||
* File: DescManip.cpp
|
||||
* Date: June 2012
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: functions for ORB descriptors
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#include <limits.h>
|
||||
#include <stdint.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "DescManip.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void DescManip::meanValue(const std::vector<cv::Mat> &descriptors,
|
||||
cv::Mat &mean) {
|
||||
if (descriptors.empty()) return;
|
||||
|
||||
if (descriptors.size() == 1) {
|
||||
mean = descriptors[0].clone();
|
||||
return;
|
||||
}
|
||||
// binary descriptor
|
||||
if (descriptors[0].type() == CV_8U) {
|
||||
// determine number of bytes of the binary descriptor
|
||||
int L = getDescSizeBytes(descriptors[0]);
|
||||
vector<int> sum(L * 8, 0);
|
||||
|
||||
for (size_t i = 0; i < descriptors.size(); ++i) {
|
||||
const cv::Mat &d = descriptors[i];
|
||||
const unsigned char *p = d.ptr<unsigned char>();
|
||||
|
||||
for (int j = 0; j < d.cols; ++j, ++p) {
|
||||
if (*p & (1 << 7)) ++sum[j * 8];
|
||||
if (*p & (1 << 6)) ++sum[j * 8 + 1];
|
||||
if (*p & (1 << 5)) ++sum[j * 8 + 2];
|
||||
if (*p & (1 << 4)) ++sum[j * 8 + 3];
|
||||
if (*p & (1 << 3)) ++sum[j * 8 + 4];
|
||||
if (*p & (1 << 2)) ++sum[j * 8 + 5];
|
||||
if (*p & (1 << 1)) ++sum[j * 8 + 6];
|
||||
if (*p & (1)) ++sum[j * 8 + 7];
|
||||
}
|
||||
}
|
||||
|
||||
mean = cv::Mat::zeros(1, L, CV_8U);
|
||||
unsigned char *p = mean.ptr<unsigned char>();
|
||||
|
||||
const int N2 = (int)descriptors.size() / 2 + descriptors.size() % 2;
|
||||
for (size_t i = 0; i < sum.size(); ++i) {
|
||||
if (sum[i] >= N2) {
|
||||
// set bit
|
||||
*p |= 1 << (7 - (i % 8));
|
||||
}
|
||||
|
||||
if (i % 8 == 7) ++p;
|
||||
}
|
||||
}
|
||||
// non binary descriptor
|
||||
else {
|
||||
assert(descriptors[0].type() == CV_32F); // ensure it is float
|
||||
|
||||
mean.create(1, descriptors[0].cols, descriptors[0].type());
|
||||
mean.setTo(cv::Scalar::all(0));
|
||||
float inv_s = 1. / double(descriptors.size());
|
||||
for (size_t i = 0; i < descriptors.size(); i++)
|
||||
mean += descriptors[i] * inv_s;
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// static inline uint32_t distance_8uc1(const cv::Mat &a, const cv::Mat &b);
|
||||
|
||||
double DescManip::distance(const cv::Mat &a, const cv::Mat &b) {
|
||||
// binary descriptor
|
||||
if (a.type() == CV_8U) {
|
||||
// Bit count function got from:
|
||||
// http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetKernighan
|
||||
// This implementation assumes that a.cols (CV_8U) % sizeof(uint64_t) == 0
|
||||
|
||||
const uint64_t *pa, *pb;
|
||||
pa = a.ptr<uint64_t>(); // a & b are actually CV_8U
|
||||
pb = b.ptr<uint64_t>();
|
||||
|
||||
uint64_t v, ret = 0;
|
||||
for (size_t i = 0; i < a.cols / sizeof(uint64_t); ++i, ++pa, ++pb) {
|
||||
v = *pa ^ *pb;
|
||||
v = v - ((v >> 1) & (uint64_t) ~(uint64_t)0 / 3);
|
||||
v = (v & (uint64_t) ~(uint64_t)0 / 15 * 3) +
|
||||
((v >> 2) & (uint64_t) ~(uint64_t)0 / 15 * 3);
|
||||
v = (v + (v >> 4)) & (uint64_t) ~(uint64_t)0 / 255 * 15;
|
||||
ret += (uint64_t)(v * ((uint64_t) ~(uint64_t)0 / 255)) >>
|
||||
(sizeof(uint64_t) - 1) * CHAR_BIT;
|
||||
}
|
||||
|
||||
return ret;
|
||||
} else {
|
||||
double sqd = 0.;
|
||||
assert(a.type() == CV_32F);
|
||||
assert(a.rows == 1);
|
||||
const float *a_ptr = a.ptr<float>(0);
|
||||
const float *b_ptr = b.ptr<float>(0);
|
||||
for (int i = 0; i < a.cols; i++)
|
||||
sqd += (a_ptr[i] - b_ptr[i]) * (a_ptr[i] - b_ptr[i]);
|
||||
return sqd;
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
std::string DescManip::toString(const cv::Mat &a) {
|
||||
stringstream ss;
|
||||
// introduce a magic value to distinguish from DBoW3
|
||||
ss << "dbw3 ";
|
||||
// save size and type
|
||||
|
||||
ss << a.type() << " " << a.cols << " ";
|
||||
|
||||
if (a.type() == CV_8U) {
|
||||
const unsigned char *p = a.ptr<unsigned char>();
|
||||
for (int i = 0; i < a.cols; ++i, ++p) ss << (int)*p << " ";
|
||||
} else {
|
||||
const float *p = a.ptr<float>();
|
||||
for (int i = 0; i < a.cols; ++i, ++p) ss << *p << " ";
|
||||
}
|
||||
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void DescManip::fromString(cv::Mat &a, const std::string &s) {
|
||||
// check if the dbow3 is present
|
||||
string ss_aux;
|
||||
ss_aux.reserve(10);
|
||||
for (size_t i = 0; i < 10 && i < s.size(); i++) ss_aux.push_back(s[i]);
|
||||
if (ss_aux.find("dbw3") == std::string::npos) { // is DBoW3
|
||||
// READ UNTIL END
|
||||
stringstream ss(s);
|
||||
int val;
|
||||
vector<uchar> data;
|
||||
data.reserve(100);
|
||||
while (ss >> val) data.push_back(val);
|
||||
// copy to a
|
||||
a.create(1, data.size(), CV_8UC1);
|
||||
memcpy(a.ptr<char>(0), &data[0], data.size());
|
||||
} else {
|
||||
int type, cols;
|
||||
stringstream ss(s);
|
||||
ss >> type >> cols;
|
||||
a.create(1, cols, type);
|
||||
if (type == CV_8UC1) {
|
||||
unsigned char *p = a.ptr<unsigned char>();
|
||||
int n;
|
||||
for (int i = 0; i < a.cols; ++i, ++p)
|
||||
if (ss >> n) *p = (unsigned char)n;
|
||||
} else {
|
||||
float *p = a.ptr<float>();
|
||||
for (int i = 0; i < a.cols; ++i, ++p)
|
||||
if (!(ss >> *p))
|
||||
cerr << "Error reading. Unexpected EOF. DescManip::fromString"
|
||||
<< endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
void DescManip::toMat32F(const std::vector<cv::Mat> &descriptors,
|
||||
cv::Mat &mat) {
|
||||
if (descriptors.empty()) {
|
||||
mat.release();
|
||||
return;
|
||||
}
|
||||
|
||||
if (descriptors[0].type() == CV_8UC1) {
|
||||
const size_t N = descriptors.size();
|
||||
int L = getDescSizeBytes(descriptors[0]);
|
||||
mat.create(N, L * 8, CV_32F);
|
||||
float *p = mat.ptr<float>();
|
||||
|
||||
for (size_t i = 0; i < N; ++i) {
|
||||
const int C = descriptors[i].cols;
|
||||
const unsigned char *desc = descriptors[i].ptr<unsigned char>();
|
||||
|
||||
for (int j = 0; j < C; ++j, p += 8) {
|
||||
p[0] = (desc[j] & (1 << 7) ? 1 : 0);
|
||||
p[1] = (desc[j] & (1 << 6) ? 1 : 0);
|
||||
p[2] = (desc[j] & (1 << 5) ? 1 : 0);
|
||||
p[3] = (desc[j] & (1 << 4) ? 1 : 0);
|
||||
p[4] = (desc[j] & (1 << 3) ? 1 : 0);
|
||||
p[5] = (desc[j] & (1 << 2) ? 1 : 0);
|
||||
p[6] = (desc[j] & (1 << 1) ? 1 : 0);
|
||||
p[7] = desc[j] & (1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
assert(descriptors[0].type() == CV_32F);
|
||||
const int N = descriptors.size();
|
||||
int L = descriptors[0].cols;
|
||||
mat.create(N, L, CV_32F);
|
||||
for (int i = 0; i < N; ++i)
|
||||
memcpy(mat.ptr<float>(i), descriptors[i].ptr<float>(0),
|
||||
sizeof(float) * L);
|
||||
}
|
||||
}
|
||||
|
||||
void DescManip::toStream(const cv::Mat &m, std::ostream &str) {
|
||||
assert(m.rows == 1 || m.isContinuous());
|
||||
int type = m.type();
|
||||
int cols = m.cols;
|
||||
int rows = m.rows;
|
||||
str.write((char *)&cols, sizeof(cols));
|
||||
str.write((char *)&rows, sizeof(rows));
|
||||
str.write((char *)&type, sizeof(type));
|
||||
str.write((char *)m.ptr<char>(0), m.elemSize() * m.cols);
|
||||
}
|
||||
|
||||
void DescManip::fromStream(cv::Mat &m, std::istream &str) {
|
||||
int type, cols, rows;
|
||||
str.read((char *)&cols, sizeof(cols));
|
||||
str.read((char *)&rows, sizeof(rows));
|
||||
str.read((char *)&type, sizeof(type));
|
||||
m.create(rows, cols, type);
|
||||
str.read((char *)m.ptr<char>(0), m.elemSize() * m.cols);
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
} // namespace DBoW3
|
|
@ -1,99 +0,0 @@
|
|||
/**
|
||||
* File: FClass.h
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: generic FClass to instantiate templated classes
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_DESCMANIP__
|
||||
#define __D_T_DESCMANIP__
|
||||
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "exports.h"
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Class to manipulate descriptors (calculating means, differences and IO
|
||||
/// routines)
|
||||
class DBOW_API DescManip {
|
||||
public:
|
||||
/**
|
||||
* Calculates the mean value of a set of descriptors
|
||||
* @param descriptors
|
||||
* @param mean mean descriptor
|
||||
*/
|
||||
static void meanValue(const std::vector<cv::Mat> &descriptors, cv::Mat &mean);
|
||||
|
||||
/**
|
||||
* Calculates the distance between two descriptors
|
||||
* @param a
|
||||
* @param b
|
||||
* @return distance
|
||||
*/
|
||||
static double distance(const cv::Mat &a, const cv::Mat &b);
|
||||
static inline uint32_t distance_8uc1(const cv::Mat &a, const cv::Mat &b);
|
||||
|
||||
/**
|
||||
* Returns a string version of the descriptor
|
||||
* @param a descriptor
|
||||
* @return string version
|
||||
*/
|
||||
static std::string toString(const cv::Mat &a);
|
||||
|
||||
/**
|
||||
* Returns a descriptor from a string
|
||||
* @param a descriptor
|
||||
* @param s string version
|
||||
*/
|
||||
static void fromString(cv::Mat &a, const std::string &s);
|
||||
|
||||
/**
|
||||
* Returns a mat with the descriptors in float format
|
||||
* @param descriptors
|
||||
* @param mat (out) NxL 32F matrix
|
||||
*/
|
||||
static void toMat32F(const std::vector<cv::Mat> &descriptors, cv::Mat &mat);
|
||||
|
||||
/**io routines*/
|
||||
static void toStream(const cv::Mat &m, std::ostream &str);
|
||||
static void fromStream(cv::Mat &m, std::istream &str);
|
||||
|
||||
public:
|
||||
/**Returns the number of bytes of the descriptor
|
||||
* used for binary descriptors only*/
|
||||
static size_t getDescSizeBytes(const cv::Mat &d) {
|
||||
return d.cols * d.elemSize();
|
||||
}
|
||||
};
|
||||
|
||||
uint32_t DescManip::distance_8uc1(const cv::Mat &a, const cv::Mat &b) {
|
||||
// binary descriptor
|
||||
|
||||
// Bit count function got from:
|
||||
// http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetKernighan
|
||||
// This implementation assumes that a.cols (CV_8U) % sizeof(uint64_t) == 0
|
||||
|
||||
const uint64_t *pa, *pb;
|
||||
pa = a.ptr<uint64_t>(); // a & b are actually CV_8U
|
||||
pb = b.ptr<uint64_t>();
|
||||
|
||||
uint64_t v, ret = 0;
|
||||
size_t n = a.cols / sizeof(uint64_t);
|
||||
for (size_t i = 0; i < n; ++i, ++pa, ++pb) {
|
||||
v = *pa ^ *pb;
|
||||
v = v - ((v >> 1) & (uint64_t) ~(uint64_t)0 / 3);
|
||||
v = (v & (uint64_t) ~(uint64_t)0 / 15 * 3) +
|
||||
((v >> 2) & (uint64_t) ~(uint64_t)0 / 15 * 3);
|
||||
v = (v + (v >> 4)) & (uint64_t) ~(uint64_t)0 / 255 * 15;
|
||||
ret += (uint64_t)(v * ((uint64_t) ~(uint64_t)0 / 255)) >>
|
||||
(sizeof(uint64_t) - 1) * CHAR_BIT;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
|
@ -1,85 +0,0 @@
|
|||
/**
|
||||
* File: FeatureVector.cpp
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: feature vector
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#include "FeatureVector.h"
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
|
||||
namespace DBoW3 {
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
FeatureVector::FeatureVector(void)
|
||||
{
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
FeatureVector::~FeatureVector(void)
|
||||
{
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
void FeatureVector::addFeature(NodeId id, unsigned int i_feature)
|
||||
{
|
||||
FeatureVector::iterator vit = this->lower_bound(id);
|
||||
|
||||
if(vit != this->end() && vit->first == id)
|
||||
{
|
||||
vit->second.push_back(i_feature);
|
||||
}
|
||||
else
|
||||
{
|
||||
vit = this->insert(vit, FeatureVector::value_type(id,
|
||||
std::vector<unsigned int>() ));
|
||||
vit->second.push_back(i_feature);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
std::ostream& operator<<(std::ostream &out,
|
||||
const FeatureVector &v)
|
||||
{
|
||||
if(!v.empty())
|
||||
{
|
||||
FeatureVector::const_iterator vit = v.begin();
|
||||
|
||||
const std::vector<unsigned int>* f = &vit->second;
|
||||
|
||||
out << "<" << vit->first << ": [";
|
||||
if(!f->empty()) out << (*f)[0];
|
||||
for(unsigned int i = 1; i < f->size(); ++i)
|
||||
{
|
||||
out << ", " << (*f)[i];
|
||||
}
|
||||
out << "]>";
|
||||
|
||||
for(++vit; vit != v.end(); ++vit)
|
||||
{
|
||||
f = &vit->second;
|
||||
|
||||
out << ", <" << vit->first << ": [";
|
||||
if(!f->empty()) out << (*f)[0];
|
||||
for(unsigned int i = 1; i < f->size(); ++i)
|
||||
{
|
||||
out << ", " << (*f)[i];
|
||||
}
|
||||
out << "]>";
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
} // namespace DBoW3
|
|
@ -1,55 +0,0 @@
|
|||
/**
|
||||
* File: FeatureVector.h
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: feature vector
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_FEATURE_VECTOR__
|
||||
#define __D_T_FEATURE_VECTOR__
|
||||
|
||||
#include "BowVector.h"
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include "exports.h"
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Vector of nodes with indexes of local features
|
||||
class DBOW_API FeatureVector:
|
||||
public std::map<NodeId, std::vector<unsigned int> >
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
FeatureVector(void);
|
||||
|
||||
/**
|
||||
* Destructor
|
||||
*/
|
||||
~FeatureVector(void);
|
||||
|
||||
/**
|
||||
* Adds a feature to an existing node, or adds a new node with an initial
|
||||
* feature
|
||||
* @param id node id to add or to modify
|
||||
* @param i_feature index of feature to add to the given node
|
||||
*/
|
||||
void addFeature(NodeId id, unsigned int i_feature);
|
||||
|
||||
/**
|
||||
* Sends a string versions of the feature vector through the stream
|
||||
* @param out stream
|
||||
* @param v feature vector
|
||||
*/
|
||||
friend std::ostream& operator<<(std::ostream &out, const FeatureVector &v);
|
||||
|
||||
};
|
||||
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
/**
|
||||
* File: QueryResults.cpp
|
||||
* Date: March, November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: structure to store results of database queries
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include "QueryResults.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace DBoW3
|
||||
{
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
ostream & operator<<(ostream& os, const Result& ret )
|
||||
{
|
||||
os << "<EntryId: " << ret.Id << ", Score: " << ret.Score << ">";
|
||||
return os;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
ostream & operator<<(ostream& os, const QueryResults& ret )
|
||||
{
|
||||
if(ret.size() == 1)
|
||||
os << "1 result:" << endl;
|
||||
else
|
||||
os << ret.size() << " results:" << endl;
|
||||
|
||||
QueryResults::const_iterator rit;
|
||||
for(rit = ret.begin(); rit != ret.end(); ++rit)
|
||||
{
|
||||
os << *rit;
|
||||
if(rit + 1 != ret.end()) os << endl;
|
||||
}
|
||||
return os;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
void QueryResults::saveM(const std::string &filename) const
|
||||
{
|
||||
fstream f(filename.c_str(), ios::out);
|
||||
|
||||
QueryResults::const_iterator qit;
|
||||
for(qit = begin(); qit != end(); ++qit)
|
||||
{
|
||||
f << qit->Id << " " << qit->Score << endl;
|
||||
}
|
||||
|
||||
f.close();
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
} // namespace DBoW3
|
||||
|
|
@ -1,205 +0,0 @@
|
|||
/**
|
||||
* File: QueryResults.h
|
||||
* Date: March, November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: structure to store results of database queries
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_QUERY_RESULTS__
|
||||
#define __D_T_QUERY_RESULTS__
|
||||
|
||||
#include <vector>
|
||||
#include "exports.h"
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Id of entries of the database
|
||||
typedef unsigned int EntryId;
|
||||
|
||||
/// Single result of a query
|
||||
class DBOW_API Result
|
||||
{
|
||||
public:
|
||||
|
||||
/// Entry id
|
||||
EntryId Id;
|
||||
|
||||
/// Score obtained
|
||||
double Score;
|
||||
|
||||
/// debug
|
||||
int nWords; // words in common
|
||||
// !!! this is filled only by Bhatt score!
|
||||
// (and for BCMatching, BCThresholding then)
|
||||
|
||||
double bhatScore, chiScore;
|
||||
/// debug
|
||||
|
||||
// only done by ChiSq and BCThresholding
|
||||
double sumCommonVi;
|
||||
double sumCommonWi;
|
||||
double expectedChiScore;
|
||||
/// debug
|
||||
|
||||
/**
|
||||
* Empty constructors
|
||||
*/
|
||||
inline Result(){}
|
||||
|
||||
/**
|
||||
* Creates a result with the given data
|
||||
* @param _id entry id
|
||||
* @param _score score
|
||||
*/
|
||||
inline Result(EntryId _id, double _score): Id(_id), Score(_score){}
|
||||
|
||||
/**
|
||||
* Compares the scores of two results
|
||||
* @return true iff this.score < r.score
|
||||
*/
|
||||
inline bool operator<(const Result &r) const
|
||||
{
|
||||
return this->Score < r.Score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the scores of two results
|
||||
* @return true iff this.score > r.score
|
||||
*/
|
||||
inline bool operator>(const Result &r) const
|
||||
{
|
||||
return this->Score > r.Score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the entry id of the result
|
||||
* @return true iff this.id == id
|
||||
*/
|
||||
inline bool operator==(EntryId id) const
|
||||
{
|
||||
return this->Id == id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the score of this entry with a given one
|
||||
* @param s score to compare with
|
||||
* @return true iff this score < s
|
||||
*/
|
||||
inline bool operator<(double s) const
|
||||
{
|
||||
return this->Score < s;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the score of this entry with a given one
|
||||
* @param s score to compare with
|
||||
* @return true iff this score > s
|
||||
*/
|
||||
inline bool operator>(double s) const
|
||||
{
|
||||
return this->Score > s;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the score of two results
|
||||
* @param a
|
||||
* @param b
|
||||
* @return true iff a.Score > b.Score
|
||||
*/
|
||||
static inline bool gt(const Result &a, const Result &b)
|
||||
{
|
||||
return a.Score > b.Score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the scores of two results
|
||||
* @return true iff a.Score > b.Score
|
||||
*/
|
||||
inline static bool ge(const Result &a, const Result &b)
|
||||
{
|
||||
return a.Score > b.Score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true iff a.Score >= b.Score
|
||||
* @param a
|
||||
* @param b
|
||||
* @return true iff a.Score >= b.Score
|
||||
*/
|
||||
static inline bool geq(const Result &a, const Result &b)
|
||||
{
|
||||
return a.Score >= b.Score;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true iff a.Score >= s
|
||||
* @param a
|
||||
* @param s
|
||||
* @return true iff a.Score >= s
|
||||
*/
|
||||
static inline bool geqv(const Result &a, double s)
|
||||
{
|
||||
return a.Score >= s;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns true iff a.Id < b.Id
|
||||
* @param a
|
||||
* @param b
|
||||
* @return true iff a.Id < b.Id
|
||||
*/
|
||||
static inline bool ltId(const Result &a, const Result &b)
|
||||
{
|
||||
return a.Id < b.Id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints a string version of the result
|
||||
* @param os ostream
|
||||
* @param ret Result to print
|
||||
*/
|
||||
friend std::ostream & operator<<(std::ostream& os, const Result& ret );
|
||||
};
|
||||
|
||||
/// Multiple results from a query
|
||||
class QueryResults: public std::vector<Result>
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* Multiplies all the scores in the vector by factor
|
||||
* @param factor
|
||||
*/
|
||||
inline void scaleScores(double factor);
|
||||
|
||||
/**
|
||||
* Prints a string version of the results
|
||||
* @param os ostream
|
||||
* @param ret QueryResults to print
|
||||
*/
|
||||
DBOW_API friend std::ostream & operator<<(std::ostream& os, const QueryResults& ret );
|
||||
|
||||
/**
|
||||
* Saves a matlab file with the results
|
||||
* @param filename
|
||||
*/
|
||||
void saveM(const std::string &filename) const;
|
||||
|
||||
};
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
inline void QueryResults::scaleScores(double factor)
|
||||
{
|
||||
for(QueryResults::iterator qit = begin(); qit != end(); ++qit)
|
||||
qit->Score *= factor;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
|
||||
} // namespace TemplatedBoW
|
||||
|
||||
#endif
|
||||
|
|
@ -1,315 +0,0 @@
|
|||
/**
|
||||
* File: ScoringObject.cpp
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: functions to compute bow scores
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#include <cfloat>
|
||||
#include "Vocabulary.h"
|
||||
#include "BowVector.h"
|
||||
|
||||
using namespace DBoW3;
|
||||
|
||||
// If you change the type of WordValue, make sure you change also the
|
||||
// epsilon value (this is needed by the KL method)
|
||||
const double GeneralScoring::LOG_EPS = log(DBL_EPSILON); // FLT_EPSILON
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double L1Scoring::score(const BowVector &v1, const BowVector &v2) const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
score += fabs(vi - wi) - fabs(vi) - fabs(wi);
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
v1_it = v1.lower_bound(v2_it->first);
|
||||
// v1_it = (first element >= v2_it.id)
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2 forward
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
// v2_it = (first element >= v1_it.id)
|
||||
}
|
||||
}
|
||||
|
||||
// ||v - w||_{L1} = 2 + Sum(|v_i - w_i| - |v_i| - |w_i|)
|
||||
// for all i | v_i != 0 and w_i != 0
|
||||
// (Nister, 2006)
|
||||
// scaled_||v - w||_{L1} = 1 - 0.5 * ||v - w||_{L1}
|
||||
score = -score/2.0;
|
||||
|
||||
return score; // [0..1]
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double L2Scoring::score(const BowVector &v1, const BowVector &v2) const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
score += vi * wi;
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
v1_it = v1.lower_bound(v2_it->first);
|
||||
// v1_it = (first element >= v2_it.id)
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2 forward
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
// v2_it = (first element >= v1_it.id)
|
||||
}
|
||||
}
|
||||
|
||||
// ||v - w||_{L2} = sqrt( 2 - 2 * Sum(v_i * w_i) )
|
||||
// for all i | v_i != 0 and w_i != 0 )
|
||||
// (Nister, 2006)
|
||||
if(score >= 1) // rounding errors
|
||||
score = 1.0;
|
||||
else
|
||||
score = 1.0 - sqrt(1.0 - score); // [0..1]
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double ChiSquareScoring::score(const BowVector &v1, const BowVector &v2)
|
||||
const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
// all the items are taken into account
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
// (v-w)^2/(v+w) - v - w = -4 vw/(v+w)
|
||||
// we move the -4 out
|
||||
if(vi + wi != 0.0) score += vi * wi / (vi + wi);
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
v1_it = v1.lower_bound(v2_it->first);
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2 forward
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
}
|
||||
}
|
||||
|
||||
// this takes the -4 into account
|
||||
score = 2. * score; // [0..1]
|
||||
|
||||
return score;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double KLScoring::score(const BowVector &v1, const BowVector &v2) const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
// all the items or v are taken into account
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
if(vi != 0 && wi != 0) score += vi * log(vi/wi);
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
score += vi * (log(vi) - LOG_EPS);
|
||||
++v1_it;
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2_it forward, do not add any score
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
// v2_it = (first element >= v1_it.id)
|
||||
}
|
||||
}
|
||||
|
||||
// sum rest of items of v
|
||||
for(; v1_it != v1_end; ++v1_it)
|
||||
if(v1_it->second != 0)
|
||||
score += v1_it->second * (log(v1_it->second) - LOG_EPS);
|
||||
|
||||
return score; // cannot be scaled
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double BhattacharyyaScoring::score(const BowVector &v1,
|
||||
const BowVector &v2) const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
score += sqrt(vi * wi);
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
v1_it = v1.lower_bound(v2_it->first);
|
||||
// v1_it = (first element >= v2_it.id)
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2 forward
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
// v2_it = (first element >= v1_it.id)
|
||||
}
|
||||
}
|
||||
|
||||
return score; // already scaled
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
double DotProductScoring::score(const BowVector &v1,
|
||||
const BowVector &v2) const
|
||||
{
|
||||
BowVector::const_iterator v1_it, v2_it;
|
||||
const BowVector::const_iterator v1_end = v1.end();
|
||||
const BowVector::const_iterator v2_end = v2.end();
|
||||
|
||||
v1_it = v1.begin();
|
||||
v2_it = v2.begin();
|
||||
|
||||
double score = 0;
|
||||
|
||||
while(v1_it != v1_end && v2_it != v2_end)
|
||||
{
|
||||
const WordValue& vi = v1_it->second;
|
||||
const WordValue& wi = v2_it->second;
|
||||
|
||||
if(v1_it->first == v2_it->first)
|
||||
{
|
||||
score += vi * wi;
|
||||
|
||||
// move v1 and v2 forward
|
||||
++v1_it;
|
||||
++v2_it;
|
||||
}
|
||||
else if(v1_it->first < v2_it->first)
|
||||
{
|
||||
// move v1 forward
|
||||
v1_it = v1.lower_bound(v2_it->first);
|
||||
// v1_it = (first element >= v2_it.id)
|
||||
}
|
||||
else
|
||||
{
|
||||
// move v2 forward
|
||||
v2_it = v2.lower_bound(v1_it->first);
|
||||
// v2_it = (first element >= v1_it.id)
|
||||
}
|
||||
}
|
||||
|
||||
return score; // cannot scale
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ---------------------------------------------------------------------------
|
||||
|
|
@ -1,95 +0,0 @@
|
|||
/**
|
||||
* File: ScoringObject.h
|
||||
* Date: November 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: functions to compute bow scores
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T_SCORING_OBJECT__
|
||||
#define __D_T_SCORING_OBJECT__
|
||||
|
||||
#include "BowVector.h"
|
||||
#include "exports.h"
|
||||
namespace DBoW3 {
|
||||
|
||||
/// Base class of scoring functions
|
||||
class DBOW_API GeneralScoring
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Computes the score between two vectors. Vectors must be sorted and
|
||||
* normalized if necessary
|
||||
* @param v (in/out)
|
||||
* @param w (in/out)
|
||||
* @return score
|
||||
*/
|
||||
virtual double score(const BowVector &v, const BowVector &w) const = 0;
|
||||
|
||||
/**
|
||||
* Returns whether a vector must be normalized before scoring according
|
||||
* to the scoring scheme
|
||||
* @param norm norm to use
|
||||
* @return true iff must normalize
|
||||
*/
|
||||
virtual bool mustNormalize(LNorm &norm) const = 0;
|
||||
|
||||
/// Log of epsilon
|
||||
static const double LOG_EPS;
|
||||
// If you change the type of WordValue, make sure you change also the
|
||||
// epsilon value (this is needed by the KL method)
|
||||
|
||||
virtual ~GeneralScoring() {} //!< Required for virtual base classes
|
||||
};
|
||||
|
||||
/**
|
||||
* Macro for defining Scoring classes
|
||||
* @param NAME name of class
|
||||
* @param MUSTNORMALIZE if vectors must be normalized to compute the score
|
||||
* @param NORM type of norm to use when MUSTNORMALIZE
|
||||
*/
|
||||
#define __SCORING_CLASS(NAME, MUSTNORMALIZE, NORM) \
|
||||
NAME: public GeneralScoring \
|
||||
{ public: \
|
||||
/** \
|
||||
* Computes score between two vectors \
|
||||
* @param v \
|
||||
* @param w \
|
||||
* @return score between v and w \
|
||||
*/ \
|
||||
virtual double score(const BowVector &v, const BowVector &w) const; \
|
||||
\
|
||||
/** \
|
||||
* Says if a vector must be normalized according to the scoring function \
|
||||
* @param norm (out) if true, norm to use
|
||||
* @return true iff vectors must be normalized \
|
||||
*/ \
|
||||
virtual inline bool mustNormalize(LNorm &norm) const \
|
||||
{ norm = NORM; return MUSTNORMALIZE; } \
|
||||
}
|
||||
|
||||
/// L1 Scoring object
|
||||
class __SCORING_CLASS(L1Scoring, true, L1);
|
||||
|
||||
/// L2 Scoring object
|
||||
class __SCORING_CLASS(L2Scoring, true, L2);
|
||||
|
||||
/// Chi square Scoring object
|
||||
class __SCORING_CLASS(ChiSquareScoring, true, L1);
|
||||
|
||||
/// KL divergence Scoring object
|
||||
class __SCORING_CLASS(KLScoring, true, L1);
|
||||
|
||||
/// Bhattacharyya Scoring object
|
||||
class __SCORING_CLASS(BhattacharyyaScoring, true, L1);
|
||||
|
||||
/// Dot product Scoring object
|
||||
class __SCORING_CLASS(DotProductScoring, false, L1);
|
||||
|
||||
#undef __SCORING_CLASS
|
||||
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,468 +0,0 @@
|
|||
/**
|
||||
* File: Vocabulary.h
|
||||
* Date: February 2011
|
||||
* Author: Dorian Galvez-Lopez
|
||||
* Description: templated vocabulary
|
||||
* License: see the LICENSE.txt file
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef __D_T__VOCABULARY__
|
||||
#define __D_T__VOCABULARY__
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include <algorithm>
|
||||
#include <bitset>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <numeric>
|
||||
#include <opencv2/core/core.hpp>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "BowVector.h"
|
||||
#include "FeatureVector.h"
|
||||
#include "ScoringObject.h"
|
||||
#include "exports.h"
|
||||
namespace DBoW3 {
|
||||
/// Vocabulary
|
||||
class DBOW_API Vocabulary {
|
||||
friend class FastSearch;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Initiates an empty vocabulary
|
||||
* @param k branching factor
|
||||
* @param L depth levels
|
||||
* @param weighting weighting type
|
||||
* @param scoring scoring type
|
||||
*/
|
||||
Vocabulary(int k = 10, int L = 5, WeightingType weighting = TF_IDF,
|
||||
ScoringType scoring = L1_NORM);
|
||||
|
||||
/**
|
||||
* Creates the vocabulary by loading a file
|
||||
* @param filename
|
||||
*/
|
||||
Vocabulary(const std::string &filename);
|
||||
|
||||
/**
|
||||
* Creates the vocabulary by loading a file
|
||||
* @param filename
|
||||
*/
|
||||
Vocabulary(const char *filename);
|
||||
|
||||
/**
|
||||
* Copy constructor
|
||||
* @param voc
|
||||
*/
|
||||
Vocabulary(const Vocabulary &voc);
|
||||
|
||||
/**
|
||||
* Destructor
|
||||
*/
|
||||
virtual ~Vocabulary();
|
||||
|
||||
/**
|
||||
* Assigns the given vocabulary to this by copying its data and removing
|
||||
* all the data contained by this vocabulary before
|
||||
* @param voc
|
||||
* @return reference to this vocabulary
|
||||
*/
|
||||
Vocabulary &operator=(const Vocabulary &voc);
|
||||
|
||||
/**
|
||||
* Creates a vocabulary from the training features with the already
|
||||
* defined parameters
|
||||
* @param training_features
|
||||
*/
|
||||
virtual void create(
|
||||
const std::vector<std::vector<cv::Mat>> &training_features);
|
||||
/**
|
||||
* Creates a vocabulary from the training features with the already
|
||||
* defined parameters
|
||||
* @param training_features. Each row of a matrix is a feature
|
||||
*/
|
||||
virtual void create(const std::vector<cv::Mat> &training_features);
|
||||
|
||||
/**
|
||||
* Creates a vocabulary from the training features, setting the branching
|
||||
* factor and the depth levels of the tree
|
||||
* @param training_features
|
||||
* @param k branching factor
|
||||
* @param L depth levels
|
||||
*/
|
||||
virtual void create(
|
||||
const std::vector<std::vector<cv::Mat>> &training_features, int k, int L);
|
||||
|
||||
/**
|
||||
* Creates a vocabulary from the training features, setting the branching
|
||||
* factor nad the depth levels of the tree, and the weighting and scoring
|
||||
* schemes
|
||||
*/
|
||||
virtual void create(
|
||||
const std::vector<std::vector<cv::Mat>> &training_features, int k, int L,
|
||||
WeightingType weighting, ScoringType scoring);
|
||||
|
||||
/**
|
||||
* Returns the number of words in the vocabulary
|
||||
* @return number of words
|
||||
*/
|
||||
virtual inline unsigned int size() const {
|
||||
return (unsigned int)m_words.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the vocabulary is empty (i.e. it has not been trained)
|
||||
* @return true iff the vocabulary is empty
|
||||
*/
|
||||
virtual inline bool empty() const { return m_words.empty(); }
|
||||
|
||||
/** Clears the vocabulary object
|
||||
*/
|
||||
void clear();
|
||||
/**
|
||||
* Transforms a set of descriptores into a bow vector
|
||||
* @param features
|
||||
* @param v (out) bow vector of weighted words
|
||||
*/
|
||||
virtual void transform(const std::vector<cv::Mat> &features,
|
||||
BowVector &v) const;
|
||||
|
||||
virtual void transform(const std::vector<std::bitset<256>> &features,
|
||||
BowVector &v) const;
|
||||
|
||||
/**
|
||||
* Transforms a set of descriptores into a bow vector
|
||||
* @param features, one per row
|
||||
* @param v (out) bow vector of weighted words
|
||||
*/
|
||||
virtual void transform(const cv::Mat &features, BowVector &v) const;
|
||||
/**
|
||||
* Transform a set of descriptors into a bow vector and a feature vector
|
||||
* @param features
|
||||
* @param v (out) bow vector
|
||||
* @param fv (out) feature vector of nodes and feature indexes
|
||||
* @param levelsup levels to go up the vocabulary tree to get the node index
|
||||
*/
|
||||
virtual void transform(const std::vector<cv::Mat> &features, BowVector &v,
|
||||
FeatureVector &fv, int levelsup) const;
|
||||
|
||||
/**
|
||||
* Transforms a single feature into a word (without weight)
|
||||
* @param feature
|
||||
* @return word id
|
||||
*/
|
||||
virtual WordId transform(const cv::Mat &feature) const;
|
||||
|
||||
void transform(const std::vector<std::bitset<256>> &features, BowVector &v,
|
||||
FeatureVector &fv, int levelsup) const;
|
||||
|
||||
/**
|
||||
* Returns the score of two vectors
|
||||
* @param a vector
|
||||
* @param b vector
|
||||
* @return score between vectors
|
||||
* @note the vectors must be already sorted and normalized if necessary
|
||||
*/
|
||||
double score(const BowVector &a, const BowVector &b) const {
|
||||
return m_scoring_object->score(a, b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the id of the node that is "levelsup" levels from the word given
|
||||
* @param wid word id
|
||||
* @param levelsup 0..L
|
||||
* @return node id. if levelsup is 0, returns the node id associated to the
|
||||
* word id
|
||||
*/
|
||||
virtual NodeId getParentNode(WordId wid, int levelsup) const;
|
||||
|
||||
/**
|
||||
* Returns the ids of all the words that are under the given node id,
|
||||
* by traversing any of the branches that goes down from the node
|
||||
* @param nid starting node id
|
||||
* @param words ids of words
|
||||
*/
|
||||
void getWordsFromNode(NodeId nid, std::vector<WordId> &words) const;
|
||||
|
||||
/**
|
||||
* Returns the branching factor of the tree (k)
|
||||
* @return k
|
||||
*/
|
||||
inline int getBranchingFactor() const { return m_k; }
|
||||
|
||||
/**
|
||||
* Returns the depth levels of the tree (L)
|
||||
* @return L
|
||||
*/
|
||||
inline int getDepthLevels() const { return m_L; }
|
||||
|
||||
/**
|
||||
* Returns the real depth levels of the tree on average
|
||||
* @return average of depth levels of leaves
|
||||
*/
|
||||
float getEffectiveLevels() const;
|
||||
|
||||
/**
|
||||
* Returns the descriptor of a word
|
||||
* @param wid word id
|
||||
* @return descriptor
|
||||
*/
|
||||
virtual inline cv::Mat getWord(WordId wid) const;
|
||||
|
||||
/**
|
||||
* Returns the weight of a word
|
||||
* @param wid word id
|
||||
* @return weight
|
||||
*/
|
||||
virtual inline WordValue getWordWeight(WordId wid) const;
|
||||
|
||||
/**
|
||||
* Returns the weighting method
|
||||
* @return weighting method
|
||||
*/
|
||||
inline WeightingType getWeightingType() const { return m_weighting; }
|
||||
|
||||
/**
|
||||
* Returns the scoring method
|
||||
* @return scoring method
|
||||
*/
|
||||
inline ScoringType getScoringType() const { return m_scoring; }
|
||||
|
||||
/**
|
||||
* Changes the weighting method
|
||||
* @param type new weighting type
|
||||
*/
|
||||
inline void setWeightingType(WeightingType type);
|
||||
|
||||
/**
|
||||
* Changes the scoring method
|
||||
* @param type new scoring type
|
||||
*/
|
||||
void setScoringType(ScoringType type);
|
||||
|
||||
/**
|
||||
* Saves the vocabulary into a file. If filename extension contains .yml,
|
||||
* opencv YALM format is used. Otherwise, binary format is employed
|
||||
* @param filename
|
||||
*/
|
||||
void save(const std::string &filename, bool binary_compressed = true) const;
|
||||
|
||||
/**
|
||||
* Loads the vocabulary from a file created with save
|
||||
* @param filename.
|
||||
*/
|
||||
void load(const std::string &filename);
|
||||
|
||||
/**
|
||||
* Saves the vocabulary to a file storage structure
|
||||
* @param fn node in file storage
|
||||
*/
|
||||
virtual void save(cv::FileStorage &fs,
|
||||
const std::string &name = "vocabulary") const;
|
||||
|
||||
/**
|
||||
* Loads the vocabulary from a file storage node
|
||||
* @param fn first node
|
||||
* @param subname name of the child node of fn where the tree is stored.
|
||||
* If not given, the fn node is used instead
|
||||
*/
|
||||
virtual void load(const cv::FileStorage &fs,
|
||||
const std::string &name = "vocabulary");
|
||||
|
||||
/**
|
||||
* Stops those words whose weight is below minWeight.
|
||||
* Words are stopped by setting their weight to 0. There are not returned
|
||||
* later when transforming image features into vectors.
|
||||
* Note that when using IDF or TF_IDF, the weight is the idf part, which
|
||||
* is equivalent to -log(f), where f is the frequency of the word
|
||||
* (f = Ni/N, Ni: number of training images where the word is present,
|
||||
* N: number of training images).
|
||||
* Note that the old weight is forgotten, and subsequent calls to this
|
||||
* function with a lower minWeight have no effect.
|
||||
* @return number of words stopped now
|
||||
*/
|
||||
virtual int stopWords(double minWeight);
|
||||
|
||||
/** Returns the size of the descriptor employed. If the Vocabulary is empty,
|
||||
* returns -1
|
||||
*/
|
||||
int getDescritorSize() const;
|
||||
/** Returns the type of the descriptor employed normally(8U_C1, 32F_C1)
|
||||
*/
|
||||
int getDescritorType() const;
|
||||
// io to-from a stream
|
||||
void toStream(std::ostream &str, bool compressed = true) const;
|
||||
void fromStream(std::istream &str);
|
||||
|
||||
protected:
|
||||
/// reference to descriptor
|
||||
typedef const cv::Mat pDescriptor;
|
||||
|
||||
/// Tree node
|
||||
struct Node {
|
||||
/// Node id
|
||||
NodeId id;
|
||||
/// Weight if the node is a word
|
||||
WordValue weight;
|
||||
/// Children
|
||||
std::vector<NodeId> children;
|
||||
/// Parent node (undefined in case of root)
|
||||
NodeId parent;
|
||||
/// Node descriptor
|
||||
cv::Mat descriptor;
|
||||
|
||||
/// Word id if the node is a word
|
||||
WordId word_id;
|
||||
|
||||
/**
|
||||
* Empty constructor
|
||||
*/
|
||||
Node() : id(0), weight(0), parent(0), word_id(0) {}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param _id node id
|
||||
*/
|
||||
Node(NodeId _id) : id(_id), weight(0), parent(0), word_id(0) {}
|
||||
|
||||
/**
|
||||
* Returns whether the node is a leaf node
|
||||
* @return true iff the node is a leaf
|
||||
*/
|
||||
inline bool isLeaf() const { return children.empty(); }
|
||||
};
|
||||
|
||||
protected:
|
||||
/**
|
||||
* Creates an instance of the scoring object accoring to m_scoring
|
||||
*/
|
||||
void createScoringObject();
|
||||
|
||||
/**
|
||||
* Returns a set of pointers to descriptores
|
||||
* @param training_features all the features
|
||||
* @param features (out) pointers to the training features
|
||||
*/
|
||||
void getFeatures(const std::vector<std::vector<cv::Mat>> &training_features,
|
||||
std::vector<cv::Mat> &features) const;
|
||||
|
||||
/**
|
||||
* Returns the word id associated to a feature
|
||||
* @param feature
|
||||
* @param id (out) word id
|
||||
* @param weight (out) word weight
|
||||
* @param nid (out) if given, id of the node "levelsup" levels up
|
||||
* @param levelsup
|
||||
*/
|
||||
virtual void transform(const cv::Mat &feature, WordId &id, WordValue &weight,
|
||||
NodeId *nid, int levelsup = 0) const;
|
||||
/**
|
||||
* Returns the word id associated to a feature
|
||||
* @param feature
|
||||
* @param id (out) word id
|
||||
* @param weight (out) word weight
|
||||
* @param nid (out) if given, id of the node "levelsup" levels up
|
||||
* @param levelsup
|
||||
*/
|
||||
virtual void transform(const cv::Mat &feature, WordId &id,
|
||||
WordValue &weight) const;
|
||||
|
||||
void transform(const std::bitset<256> &feature, WordId &word_id,
|
||||
WordValue &weight) const;
|
||||
|
||||
virtual void transform(const std::bitset<256> &feature, WordId &word_id,
|
||||
WordValue &weight, NodeId *nid, int levelsup) const;
|
||||
|
||||
/**
|
||||
* Returns the word id associated to a feature
|
||||
* @param feature
|
||||
* @param id (out) word id
|
||||
*/
|
||||
virtual void transform(const cv::Mat &feature, WordId &id) const;
|
||||
|
||||
/**
|
||||
* Creates a level in the tree, under the parent, by running kmeans with
|
||||
* a descriptor set, and recursively creates the subsequent levels too
|
||||
* @param parent_id id of parent node
|
||||
* @param descriptors descriptors to run the kmeans on
|
||||
* @param current_level current level in the tree
|
||||
*/
|
||||
void HKmeansStep(NodeId parent_id, const std::vector<cv::Mat> &descriptors,
|
||||
int current_level);
|
||||
|
||||
/**
|
||||
* Creates k clusters from the given descriptors with some seeding algorithm.
|
||||
* @note In this class, kmeans++ is used, but this function should be
|
||||
* overriden by inherited classes.
|
||||
*/
|
||||
virtual void initiateClusters(const std::vector<cv::Mat> &descriptors,
|
||||
std::vector<cv::Mat> &clusters) const;
|
||||
|
||||
/**
|
||||
* Creates k clusters from the given descriptor sets by running the
|
||||
* initial step of kmeans++
|
||||
* @param descriptors
|
||||
* @param clusters resulting clusters
|
||||
*/
|
||||
void initiateClustersKMpp(const std::vector<cv::Mat> &descriptors,
|
||||
std::vector<cv::Mat> &clusters) const;
|
||||
|
||||
/**
|
||||
* Create the words of the vocabulary once the tree has been built
|
||||
*/
|
||||
void createWords();
|
||||
|
||||
/**
|
||||
* Sets the weights of the nodes of tree according to the given features.
|
||||
* Before calling this function, the nodes and the words must be already
|
||||
* created (by calling HKmeansStep and createWords)
|
||||
* @param features
|
||||
*/
|
||||
void setNodeWeights(const std::vector<std::vector<cv::Mat>> &features);
|
||||
|
||||
/**
|
||||
* Writes printable information of the vocabulary
|
||||
* @param os stream to write to
|
||||
* @param voc
|
||||
*/
|
||||
DBOW_API friend std::ostream &operator<<(std::ostream &os,
|
||||
const Vocabulary &voc);
|
||||
|
||||
/**Loads from ORBSLAM txt files
|
||||
*/
|
||||
void load_fromtxt(const std::string &filename);
|
||||
|
||||
protected:
|
||||
/// Branching factor
|
||||
int m_k;
|
||||
|
||||
/// Depth levels
|
||||
int m_L;
|
||||
|
||||
/// Weighting method
|
||||
WeightingType m_weighting;
|
||||
|
||||
/// Scoring method
|
||||
ScoringType m_scoring;
|
||||
|
||||
/// Object for computing scores
|
||||
GeneralScoring *m_scoring_object;
|
||||
|
||||
/// Tree nodes
|
||||
std::vector<Node> m_nodes;
|
||||
|
||||
/// Words of the vocabulary (tree leaves)
|
||||
/// this condition holds: m_words[wid]->word_id == wid
|
||||
std::vector<Node *> m_words;
|
||||
|
||||
public:
|
||||
// for debug (REMOVE)
|
||||
inline Node *getNodeWord(uint32_t idx) { return m_words[idx]; }
|
||||
};
|
||||
|
||||
} // namespace DBoW3
|
||||
|
||||
#endif
|
|
@ -1,51 +0,0 @@
|
|||
/*****************************
|
||||
Copyright 2014 Rafael Muñoz Salinas. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are
|
||||
permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
of conditions and the following disclaimer in the documentation and/or other materials
|
||||
provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR
|
||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
The views and conclusions contained in the software and documentation are those of the
|
||||
authors and should not be interpreted as representing official policies, either expressed
|
||||
or implied, of Rafael Muñoz Salinas.
|
||||
********************************/
|
||||
|
||||
|
||||
|
||||
#ifndef __DBOW_CORE_TYPES_H__
|
||||
#define __DBOW_CORE_TYPES_H__
|
||||
|
||||
#if !defined _CRT_SECURE_NO_DEPRECATE && _MSC_VER > 1300
|
||||
#define _CRT_SECURE_NO_DEPRECATE /* to avoid multiple Visual Studio 2005 warnings */
|
||||
#endif
|
||||
|
||||
#if (defined WIN32 || defined _WIN32 || defined WINCE) && defined DBOW_DSO_EXPORTS
|
||||
#define DBOW_API __declspec(dllexport)
|
||||
#pragma warning ( disable : 4251 ) //disable warning to templates with dll linkage.
|
||||
#pragma warning ( disable : 4290 ) //disable warning due to exception specifications.
|
||||
#pragma warning ( disable : 4996 ) //disable warning regarding unsafe vsprintf.
|
||||
#pragma warning ( disable : 4244 ) //disable warning convesions with lost of data.
|
||||
|
||||
#else
|
||||
#define DBOW_API
|
||||
#endif
|
||||
|
||||
|
||||
#define DBOW_VERSION "3.0.0"
|
||||
#endif
|
|
@ -1,848 +0,0 @@
|
|||
// Fast data compression library
|
||||
// Copyright (C) 2006-2011 Lasse Mikkel Reinhold
|
||||
// lar@quicklz.com
|
||||
//
|
||||
// QuickLZ can be used for free under the GPL 1, 2 or 3 license (where anything
|
||||
// released into public must be open source) or under a commercial license if such
|
||||
// has been acquired (see http://www.quicklz.com/order.html). The commercial license
|
||||
// does not cover derived or ported versions created by third parties under GPL.
|
||||
|
||||
// 1.5.0 final
|
||||
|
||||
#include "quicklz.h"
|
||||
|
||||
#if QLZ_VERSION_MAJOR != 1 || QLZ_VERSION_MINOR != 5 || QLZ_VERSION_REVISION != 0
|
||||
#error quicklz.c and quicklz.h have different versions
|
||||
#endif
|
||||
|
||||
#if (defined(__X86__) || defined(__i386__) || defined(i386) || defined(_M_IX86) || defined(__386__) || defined(__x86_64__) || defined(_M_X64))
|
||||
#define X86X64
|
||||
#endif
|
||||
|
||||
#define MINOFFSET 2
|
||||
#define UNCONDITIONAL_MATCHLEN 6
|
||||
#define UNCOMPRESSED_END 4
|
||||
#define CWORD_LEN 4
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL == 1 && defined QLZ_PTR_64 && QLZ_STREAMING_BUFFER == 0
|
||||
#define OFFSET_BASE source
|
||||
#define CAST (ui32)(size_t)
|
||||
#else
|
||||
#define OFFSET_BASE 0
|
||||
#define CAST
|
||||
#endif
|
||||
|
||||
int qlz_get_setting(int setting)
|
||||
{
|
||||
switch (setting)
|
||||
{
|
||||
case 0: return QLZ_COMPRESSION_LEVEL;
|
||||
case 1: return sizeof(qlz_state_compress);
|
||||
case 2: return sizeof(qlz_state_decompress);
|
||||
case 3: return QLZ_STREAMING_BUFFER;
|
||||
#ifdef QLZ_MEMORY_SAFE
|
||||
case 6: return 1;
|
||||
#else
|
||||
case 6: return 0;
|
||||
#endif
|
||||
case 7: return QLZ_VERSION_MAJOR;
|
||||
case 8: return QLZ_VERSION_MINOR;
|
||||
case 9: return QLZ_VERSION_REVISION;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
static int same(const unsigned char *src, size_t n)
|
||||
{
|
||||
while(n > 0 && *(src + n) == *src)
|
||||
n--;
|
||||
return n == 0 ? 1 : 0;
|
||||
}
|
||||
#endif
|
||||
|
||||
static void reset_table_compress(qlz_state_compress *state)
|
||||
{
|
||||
int i;
|
||||
for(i = 0; i < QLZ_HASH_VALUES; i++)
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
state->hash[i].offset = 0;
|
||||
#else
|
||||
state->hash_counter[i] = 0;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
static void reset_table_decompress(qlz_state_decompress *state)
|
||||
{
|
||||
int i;
|
||||
(void)state;
|
||||
(void)i;
|
||||
#if QLZ_COMPRESSION_LEVEL == 2
|
||||
for(i = 0; i < QLZ_HASH_VALUES; i++)
|
||||
{
|
||||
state->hash_counter[i] = 0;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static __inline ui32 hash_func(ui32 i)
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 2
|
||||
return ((i >> 9) ^ (i >> 13) ^ i) & (QLZ_HASH_VALUES - 1);
|
||||
#else
|
||||
return ((i >> 12) ^ i) & (QLZ_HASH_VALUES - 1);
|
||||
#endif
|
||||
}
|
||||
|
||||
static __inline ui32 fast_read(void const *src, ui32 bytes)
|
||||
{
|
||||
#ifndef X86X64
|
||||
unsigned char *p = (unsigned char*)src;
|
||||
switch (bytes)
|
||||
{
|
||||
case 4:
|
||||
return(*p | *(p + 1) << 8 | *(p + 2) << 16 | *(p + 3) << 24);
|
||||
case 3:
|
||||
return(*p | *(p + 1) << 8 | *(p + 2) << 16);
|
||||
case 2:
|
||||
return(*p | *(p + 1) << 8);
|
||||
case 1:
|
||||
return(*p);
|
||||
}
|
||||
return 0;
|
||||
#else
|
||||
if (bytes >= 1 && bytes <= 4)
|
||||
return *((ui32*)src);
|
||||
else
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
static __inline ui32 hashat(const unsigned char *src)
|
||||
{
|
||||
ui32 fetch, hash;
|
||||
fetch = fast_read(src, 3);
|
||||
hash = hash_func(fetch);
|
||||
return hash;
|
||||
}
|
||||
|
||||
static __inline void fast_write(ui32 f, void *dst, size_t bytes)
|
||||
{
|
||||
#ifndef X86X64
|
||||
unsigned char *p = (unsigned char*)dst;
|
||||
|
||||
switch (bytes)
|
||||
{
|
||||
case 4:
|
||||
*p = (unsigned char)f;
|
||||
*(p + 1) = (unsigned char)(f >> 8);
|
||||
*(p + 2) = (unsigned char)(f >> 16);
|
||||
*(p + 3) = (unsigned char)(f >> 24);
|
||||
return;
|
||||
case 3:
|
||||
*p = (unsigned char)f;
|
||||
*(p + 1) = (unsigned char)(f >> 8);
|
||||
*(p + 2) = (unsigned char)(f >> 16);
|
||||
return;
|
||||
case 2:
|
||||
*p = (unsigned char)f;
|
||||
*(p + 1) = (unsigned char)(f >> 8);
|
||||
return;
|
||||
case 1:
|
||||
*p = (unsigned char)f;
|
||||
return;
|
||||
}
|
||||
#else
|
||||
switch (bytes)
|
||||
{
|
||||
case 4:
|
||||
*((ui32*)dst) = f;
|
||||
return;
|
||||
case 3:
|
||||
*((ui32*)dst) = f;
|
||||
return;
|
||||
case 2:
|
||||
*((ui16 *)dst) = (ui16)f;
|
||||
return;
|
||||
case 1:
|
||||
*((unsigned char*)dst) = (unsigned char)f;
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
size_t qlz_size_decompressed(const char *source)
|
||||
{
|
||||
ui32 n, r;
|
||||
n = (((*source) & 2) == 2) ? 4 : 1;
|
||||
r = fast_read(source + 1 + n, n);
|
||||
r = r & (0xffffffff >> ((4 - n)*8));
|
||||
return r;
|
||||
}
|
||||
|
||||
size_t qlz_size_compressed(const char *source)
|
||||
{
|
||||
ui32 n, r;
|
||||
n = (((*source) & 2) == 2) ? 4 : 1;
|
||||
r = fast_read(source + 1, n);
|
||||
r = r & (0xffffffff >> ((4 - n)*8));
|
||||
return r;
|
||||
}
|
||||
|
||||
size_t qlz_size_header(const char *source)
|
||||
{
|
||||
size_t n = 2*((((*source) & 2) == 2) ? 4 : 1) + 1;
|
||||
return n;
|
||||
}
|
||||
|
||||
|
||||
static __inline void memcpy_up(unsigned char *dst, const unsigned char *src, ui32 n)
|
||||
{
|
||||
// Caution if modifying memcpy_up! Overlap of dst and src must be special handled.
|
||||
#ifndef X86X64
|
||||
unsigned char *end = dst + n;
|
||||
while(dst < end)
|
||||
{
|
||||
*dst = *src;
|
||||
dst++;
|
||||
src++;
|
||||
}
|
||||
#else
|
||||
ui32 f = 0;
|
||||
do
|
||||
{
|
||||
*(ui32 *)(dst + f) = *(ui32 *)(src + f);
|
||||
f += MINOFFSET + 1;
|
||||
}
|
||||
while (f < n);
|
||||
#endif
|
||||
}
|
||||
|
||||
static __inline void update_hash(qlz_state_decompress *state, const unsigned char *s)
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
ui32 hash;
|
||||
hash = hashat(s);
|
||||
state->hash[hash].offset = s;
|
||||
state->hash_counter[hash] = 1;
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
ui32 hash;
|
||||
unsigned char c;
|
||||
hash = hashat(s);
|
||||
c = state->hash_counter[hash];
|
||||
state->hash[hash].offset[c & (QLZ_POINTERS - 1)] = s;
|
||||
c++;
|
||||
state->hash_counter[hash] = c;
|
||||
#endif
|
||||
(void)state;
|
||||
(void)s;
|
||||
}
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL <= 2
|
||||
static void update_hash_upto(qlz_state_decompress *state, unsigned char **lh, const unsigned char *max)
|
||||
{
|
||||
while(*lh < max)
|
||||
{
|
||||
(*lh)++;
|
||||
update_hash(state, *lh);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
static size_t qlz_compress_core(const unsigned char *source, unsigned char *destination, size_t size, qlz_state_compress *state)
|
||||
{
|
||||
const unsigned char *last_byte = source + size - 1;
|
||||
const unsigned char *src = source;
|
||||
unsigned char *cword_ptr = destination;
|
||||
unsigned char *dst = destination + CWORD_LEN;
|
||||
ui32 cword_val = 1U << 31;
|
||||
const unsigned char *last_matchstart = last_byte - UNCONDITIONAL_MATCHLEN - UNCOMPRESSED_END;
|
||||
ui32 fetch = 0;
|
||||
unsigned int lits = 0;
|
||||
|
||||
(void) lits;
|
||||
|
||||
if(src <= last_matchstart)
|
||||
fetch = fast_read(src, 3);
|
||||
|
||||
while(src <= last_matchstart)
|
||||
{
|
||||
if ((cword_val & 1) == 1)
|
||||
{
|
||||
// store uncompressed if compression ratio is too low
|
||||
if (src > source + (size >> 1) && dst - destination > src - source - ((src - source) >> 5))
|
||||
return 0;
|
||||
|
||||
fast_write((cword_val >> 1) | (1U << 31), cword_ptr, CWORD_LEN);
|
||||
|
||||
cword_ptr = dst;
|
||||
dst += CWORD_LEN;
|
||||
cword_val = 1U << 31;
|
||||
fetch = fast_read(src, 3);
|
||||
}
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
{
|
||||
const unsigned char *o;
|
||||
ui32 hash, cached;
|
||||
|
||||
hash = hash_func(fetch);
|
||||
cached = fetch ^ state->hash[hash].cache;
|
||||
state->hash[hash].cache = fetch;
|
||||
|
||||
o = state->hash[hash].offset + OFFSET_BASE;
|
||||
state->hash[hash].offset = CAST(src - OFFSET_BASE);
|
||||
|
||||
#ifdef X86X64
|
||||
if ((cached & 0xffffff) == 0 && o != OFFSET_BASE && (src - o > MINOFFSET || (src == o + 1 && lits >= 3 && src > source + 3 && same(src - 3, 6))))
|
||||
{
|
||||
if(cached != 0)
|
||||
{
|
||||
#else
|
||||
if (cached == 0 && o != OFFSET_BASE && (src - o > MINOFFSET || (src == o + 1 && lits >= 3 && src > source + 3 && same(src - 3, 6))))
|
||||
{
|
||||
if (*(o + 3) != *(src + 3))
|
||||
{
|
||||
#endif
|
||||
hash <<= 4;
|
||||
cword_val = (cword_val >> 1) | (1U << 31);
|
||||
fast_write((3 - 2) | hash, dst, 2);
|
||||
src += 3;
|
||||
dst += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
const unsigned char *old_src = src;
|
||||
size_t matchlen;
|
||||
hash <<= 4;
|
||||
|
||||
cword_val = (cword_val >> 1) | (1U << 31);
|
||||
src += 4;
|
||||
|
||||
if(*(o + (src - old_src)) == *src)
|
||||
{
|
||||
src++;
|
||||
if(*(o + (src - old_src)) == *src)
|
||||
{
|
||||
size_t q = last_byte - UNCOMPRESSED_END - (src - 5) + 1;
|
||||
size_t remaining = q > 255 ? 255 : q;
|
||||
src++;
|
||||
while(*(o + (src - old_src)) == *src && (size_t)(src - old_src) < remaining)
|
||||
src++;
|
||||
}
|
||||
}
|
||||
|
||||
matchlen = src - old_src;
|
||||
if (matchlen < 18)
|
||||
{
|
||||
fast_write((ui32)(matchlen - 2) | hash, dst, 2);
|
||||
dst += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
fast_write((ui32)(matchlen << 16) | hash, dst, 3);
|
||||
dst += 3;
|
||||
}
|
||||
}
|
||||
fetch = fast_read(src, 3);
|
||||
lits = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
lits++;
|
||||
*dst = *src;
|
||||
src++;
|
||||
dst++;
|
||||
cword_val = (cword_val >> 1);
|
||||
#ifdef X86X64
|
||||
fetch = fast_read(src, 3);
|
||||
#else
|
||||
fetch = (fetch >> 8 & 0xffff) | (*(src + 2) << 16);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
#elif QLZ_COMPRESSION_LEVEL >= 2
|
||||
{
|
||||
const unsigned char *o, *offset2;
|
||||
ui32 hash, matchlen, k, m, best_k = 0;
|
||||
unsigned char c;
|
||||
size_t remaining = (last_byte - UNCOMPRESSED_END - src + 1) > 255 ? 255 : (last_byte - UNCOMPRESSED_END - src + 1);
|
||||
(void)best_k;
|
||||
|
||||
|
||||
//hash = hashat(src);
|
||||
fetch = fast_read(src, 3);
|
||||
hash = hash_func(fetch);
|
||||
|
||||
c = state->hash_counter[hash];
|
||||
|
||||
offset2 = state->hash[hash].offset[0];
|
||||
if(offset2 < src - MINOFFSET && c > 0 && ((fast_read(offset2, 3) ^ fetch) & 0xffffff) == 0)
|
||||
{
|
||||
matchlen = 3;
|
||||
if(*(offset2 + matchlen) == *(src + matchlen))
|
||||
{
|
||||
matchlen = 4;
|
||||
while(*(offset2 + matchlen) == *(src + matchlen) && matchlen < remaining)
|
||||
matchlen++;
|
||||
}
|
||||
}
|
||||
else
|
||||
matchlen = 0;
|
||||
for(k = 1; k < QLZ_POINTERS && c > k; k++)
|
||||
{
|
||||
o = state->hash[hash].offset[k];
|
||||
#if QLZ_COMPRESSION_LEVEL == 3
|
||||
if(((fast_read(o, 3) ^ fetch) & 0xffffff) == 0 && o < src - MINOFFSET)
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
if(*(src + matchlen) == *(o + matchlen) && ((fast_read(o, 3) ^ fetch) & 0xffffff) == 0 && o < src - MINOFFSET)
|
||||
#endif
|
||||
{
|
||||
m = 3;
|
||||
while(*(o + m) == *(src + m) && m < remaining)
|
||||
m++;
|
||||
#if QLZ_COMPRESSION_LEVEL == 3
|
||||
if ((m > matchlen) || (m == matchlen && o > offset2))
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
if (m > matchlen)
|
||||
#endif
|
||||
{
|
||||
offset2 = o;
|
||||
matchlen = m;
|
||||
best_k = k;
|
||||
}
|
||||
}
|
||||
}
|
||||
o = offset2;
|
||||
state->hash[hash].offset[c & (QLZ_POINTERS - 1)] = src;
|
||||
c++;
|
||||
state->hash_counter[hash] = c;
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL == 3
|
||||
if(matchlen > 2 && src - o < 131071)
|
||||
{
|
||||
ui32 u;
|
||||
size_t offset = src - o;
|
||||
|
||||
for(u = 1; u < matchlen; u++)
|
||||
{
|
||||
hash = hashat(src + u);
|
||||
c = state->hash_counter[hash]++;
|
||||
state->hash[hash].offset[c & (QLZ_POINTERS - 1)] = src + u;
|
||||
}
|
||||
|
||||
cword_val = (cword_val >> 1) | (1U << 31);
|
||||
src += matchlen;
|
||||
|
||||
if(matchlen == 3 && offset <= 63)
|
||||
{
|
||||
*dst = (unsigned char)(offset << 2);
|
||||
dst++;
|
||||
}
|
||||
else if (matchlen == 3 && offset <= 16383)
|
||||
{
|
||||
ui32 f = (ui32)((offset << 2) | 1);
|
||||
fast_write(f, dst, 2);
|
||||
dst += 2;
|
||||
}
|
||||
else if (matchlen <= 18 && offset <= 1023)
|
||||
{
|
||||
ui32 f = ((matchlen - 3) << 2) | ((ui32)offset << 6) | 2;
|
||||
fast_write(f, dst, 2);
|
||||
dst += 2;
|
||||
}
|
||||
|
||||
else if(matchlen <= 33)
|
||||
{
|
||||
ui32 f = ((matchlen - 2) << 2) | ((ui32)offset << 7) | 3;
|
||||
fast_write(f, dst, 3);
|
||||
dst += 3;
|
||||
}
|
||||
else
|
||||
{
|
||||
ui32 f = ((matchlen - 3) << 7) | ((ui32)offset << 15) | 3;
|
||||
fast_write(f, dst, 4);
|
||||
dst += 4;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
*dst = *src;
|
||||
src++;
|
||||
dst++;
|
||||
cword_val = (cword_val >> 1);
|
||||
}
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
|
||||
if(matchlen > 2)
|
||||
{
|
||||
cword_val = (cword_val >> 1) | (1U << 31);
|
||||
src += matchlen;
|
||||
|
||||
if (matchlen < 10)
|
||||
{
|
||||
ui32 f = best_k | ((matchlen - 2) << 2) | (hash << 5);
|
||||
fast_write(f, dst, 2);
|
||||
dst += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
ui32 f = best_k | (matchlen << 16) | (hash << 5);
|
||||
fast_write(f, dst, 3);
|
||||
dst += 3;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
*dst = *src;
|
||||
src++;
|
||||
dst++;
|
||||
cword_val = (cword_val >> 1);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
}
|
||||
while (src <= last_byte)
|
||||
{
|
||||
if ((cword_val & 1) == 1)
|
||||
{
|
||||
fast_write((cword_val >> 1) | (1U << 31), cword_ptr, CWORD_LEN);
|
||||
cword_ptr = dst;
|
||||
dst += CWORD_LEN;
|
||||
cword_val = 1U << 31;
|
||||
}
|
||||
#if QLZ_COMPRESSION_LEVEL < 3
|
||||
if (src <= last_byte - 3)
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
ui32 hash, fetch;
|
||||
fetch = fast_read(src, 3);
|
||||
hash = hash_func(fetch);
|
||||
state->hash[hash].offset = CAST(src - OFFSET_BASE);
|
||||
state->hash[hash].cache = fetch;
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
ui32 hash;
|
||||
unsigned char c;
|
||||
hash = hashat(src);
|
||||
c = state->hash_counter[hash];
|
||||
state->hash[hash].offset[c & (QLZ_POINTERS - 1)] = src;
|
||||
c++;
|
||||
state->hash_counter[hash] = c;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
*dst = *src;
|
||||
src++;
|
||||
dst++;
|
||||
cword_val = (cword_val >> 1);
|
||||
}
|
||||
|
||||
while((cword_val & 1) != 1)
|
||||
cword_val = (cword_val >> 1);
|
||||
|
||||
fast_write((cword_val >> 1) | (1U << 31), cword_ptr, CWORD_LEN);
|
||||
|
||||
// min. size must be 9 bytes so that the qlz_size functions can take 9 bytes as argument
|
||||
return dst - destination < 9 ? 9 : dst - destination;
|
||||
}
|
||||
|
||||
static size_t qlz_decompress_core(const unsigned char *source, unsigned char *destination, size_t size, qlz_state_decompress *state, const unsigned char *history)
|
||||
{
|
||||
const unsigned char *src = source + qlz_size_header((const char *)source);
|
||||
unsigned char *dst = destination;
|
||||
const unsigned char *last_destination_byte = destination + size - 1;
|
||||
ui32 cword_val = 1;
|
||||
const unsigned char *last_matchstart = last_destination_byte - UNCONDITIONAL_MATCHLEN - UNCOMPRESSED_END;
|
||||
unsigned char *last_hashed = destination - 1;
|
||||
const unsigned char *last_source_byte = source + qlz_size_compressed((const char *)source) - 1;
|
||||
static const ui32 bitlut[16] = {4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0};
|
||||
|
||||
(void) last_source_byte;
|
||||
(void) last_hashed;
|
||||
(void) state;
|
||||
(void) history;
|
||||
|
||||
for(;;)
|
||||
{
|
||||
ui32 fetch;
|
||||
|
||||
if (cword_val == 1)
|
||||
{
|
||||
#ifdef QLZ_MEMORY_SAFE
|
||||
if(src + CWORD_LEN - 1 > last_source_byte)
|
||||
return 0;
|
||||
#endif
|
||||
cword_val = fast_read(src, CWORD_LEN);
|
||||
src += CWORD_LEN;
|
||||
}
|
||||
|
||||
#ifdef QLZ_MEMORY_SAFE
|
||||
if(src + 4 - 1 > last_source_byte)
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
fetch = fast_read(src, 4);
|
||||
|
||||
if ((cword_val & 1) == 1)
|
||||
{
|
||||
ui32 matchlen;
|
||||
const unsigned char *offset2;
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
ui32 hash;
|
||||
cword_val = cword_val >> 1;
|
||||
hash = (fetch >> 4) & 0xfff;
|
||||
offset2 = (const unsigned char *)(size_t)state->hash[hash].offset;
|
||||
|
||||
if((fetch & 0xf) != 0)
|
||||
{
|
||||
matchlen = (fetch & 0xf) + 2;
|
||||
src += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
matchlen = *(src + 2);
|
||||
src += 3;
|
||||
}
|
||||
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
ui32 hash;
|
||||
unsigned char c;
|
||||
cword_val = cword_val >> 1;
|
||||
hash = (fetch >> 5) & 0x7ff;
|
||||
c = (unsigned char)(fetch & 0x3);
|
||||
offset2 = state->hash[hash].offset[c];
|
||||
|
||||
if((fetch & (28)) != 0)
|
||||
{
|
||||
matchlen = ((fetch >> 2) & 0x7) + 2;
|
||||
src += 2;
|
||||
}
|
||||
else
|
||||
{
|
||||
matchlen = *(src + 2);
|
||||
src += 3;
|
||||
}
|
||||
|
||||
#elif QLZ_COMPRESSION_LEVEL == 3
|
||||
ui32 offset;
|
||||
cword_val = cword_val >> 1;
|
||||
if ((fetch & 3) == 0)
|
||||
{
|
||||
offset = (fetch & 0xff) >> 2;
|
||||
matchlen = 3;
|
||||
src++;
|
||||
}
|
||||
else if ((fetch & 2) == 0)
|
||||
{
|
||||
offset = (fetch & 0xffff) >> 2;
|
||||
matchlen = 3;
|
||||
src += 2;
|
||||
}
|
||||
else if ((fetch & 1) == 0)
|
||||
{
|
||||
offset = (fetch & 0xffff) >> 6;
|
||||
matchlen = ((fetch >> 2) & 15) + 3;
|
||||
src += 2;
|
||||
}
|
||||
else if ((fetch & 127) != 3)
|
||||
{
|
||||
offset = (fetch >> 7) & 0x1ffff;
|
||||
matchlen = ((fetch >> 2) & 0x1f) + 2;
|
||||
src += 3;
|
||||
}
|
||||
else
|
||||
{
|
||||
offset = (fetch >> 15);
|
||||
matchlen = ((fetch >> 7) & 255) + 3;
|
||||
src += 4;
|
||||
}
|
||||
|
||||
offset2 = dst - offset;
|
||||
#endif
|
||||
|
||||
#ifdef QLZ_MEMORY_SAFE
|
||||
if(offset2 < history || offset2 > dst - MINOFFSET - 1)
|
||||
return 0;
|
||||
|
||||
if(matchlen > (ui32)(last_destination_byte - dst - UNCOMPRESSED_END + 1))
|
||||
return 0;
|
||||
#endif
|
||||
|
||||
memcpy_up(dst, offset2, matchlen);
|
||||
dst += matchlen;
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL <= 2
|
||||
update_hash_upto(state, &last_hashed, dst - matchlen);
|
||||
last_hashed = dst - 1;
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
if (dst < last_matchstart)
|
||||
{
|
||||
unsigned int n = bitlut[cword_val & 0xf];
|
||||
#ifdef X86X64
|
||||
*(ui32 *)dst = *(ui32 *)src;
|
||||
#else
|
||||
memcpy_up(dst, src, 4);
|
||||
#endif
|
||||
cword_val = cword_val >> n;
|
||||
dst += n;
|
||||
src += n;
|
||||
#if QLZ_COMPRESSION_LEVEL <= 2
|
||||
update_hash_upto(state, &last_hashed, dst - 3);
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
while(dst <= last_destination_byte)
|
||||
{
|
||||
if (cword_val == 1)
|
||||
{
|
||||
src += CWORD_LEN;
|
||||
cword_val = 1U << 31;
|
||||
}
|
||||
#ifdef QLZ_MEMORY_SAFE
|
||||
if(src >= last_source_byte + 1)
|
||||
return 0;
|
||||
#endif
|
||||
*dst = *src;
|
||||
dst++;
|
||||
src++;
|
||||
cword_val = cword_val >> 1;
|
||||
}
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL <= 2
|
||||
update_hash_upto(state, &last_hashed, last_destination_byte - 3); // todo, use constant
|
||||
#endif
|
||||
return size;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
size_t qlz_compress(const void *source, char *destination, size_t size, qlz_state_compress *state)
|
||||
{
|
||||
size_t r;
|
||||
ui32 compressed;
|
||||
size_t base;
|
||||
|
||||
if(size == 0 || size > 0xffffffff - 400)
|
||||
return 0;
|
||||
|
||||
if(size < 216)
|
||||
base = 3;
|
||||
else
|
||||
base = 9;
|
||||
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
if (state->stream_counter + size - 1 >= QLZ_STREAMING_BUFFER)
|
||||
#endif
|
||||
{
|
||||
reset_table_compress(state);
|
||||
r = base + qlz_compress_core((const unsigned char *)source, (unsigned char*)destination + base, size, state);
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
reset_table_compress(state);
|
||||
#endif
|
||||
if(r == base)
|
||||
{
|
||||
memcpy(destination + base, source, size);
|
||||
r = size + base;
|
||||
compressed = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
compressed = 1;
|
||||
}
|
||||
state->stream_counter = 0;
|
||||
}
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
else
|
||||
{
|
||||
unsigned char *src = state->stream_buffer + state->stream_counter;
|
||||
|
||||
memcpy(src, source, size);
|
||||
r = base + qlz_compress_core(src, (unsigned char*)destination + base, size, state);
|
||||
|
||||
if(r == base)
|
||||
{
|
||||
memcpy(destination + base, src, size);
|
||||
r = size + base;
|
||||
compressed = 0;
|
||||
reset_table_compress(state);
|
||||
}
|
||||
else
|
||||
{
|
||||
compressed = 1;
|
||||
}
|
||||
state->stream_counter += size;
|
||||
}
|
||||
#endif
|
||||
if(base == 3)
|
||||
{
|
||||
*destination = (unsigned char)(0 | compressed);
|
||||
*(destination + 1) = (unsigned char)r;
|
||||
*(destination + 2) = (unsigned char)size;
|
||||
}
|
||||
else
|
||||
{
|
||||
*destination = (unsigned char)(2 | compressed);
|
||||
fast_write((ui32)r, destination + 1, 4);
|
||||
fast_write((ui32)size, destination + 5, 4);
|
||||
}
|
||||
|
||||
*destination |= (QLZ_COMPRESSION_LEVEL << 2);
|
||||
*destination |= (1 << 6);
|
||||
*destination |= ((QLZ_STREAMING_BUFFER == 0 ? 0 : (QLZ_STREAMING_BUFFER == 100000 ? 1 : (QLZ_STREAMING_BUFFER == 1000000 ? 2 : 3))) << 4);
|
||||
|
||||
// 76543210
|
||||
// 01SSLLHC
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
size_t qlz_decompress(const char *source, void *destination, qlz_state_decompress *state)
|
||||
{
|
||||
size_t dsiz = qlz_size_decompressed(source);
|
||||
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
if (state->stream_counter + qlz_size_decompressed(source) - 1 >= QLZ_STREAMING_BUFFER)
|
||||
#endif
|
||||
{
|
||||
if((*source & 1) == 1)
|
||||
{
|
||||
reset_table_decompress(state);
|
||||
dsiz = qlz_decompress_core((const unsigned char *)source, (unsigned char *)destination, dsiz, state, (const unsigned char *)destination);
|
||||
}
|
||||
else
|
||||
{
|
||||
memcpy(destination, source + qlz_size_header(source), dsiz);
|
||||
}
|
||||
state->stream_counter = 0;
|
||||
reset_table_decompress(state);
|
||||
}
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
else
|
||||
{
|
||||
unsigned char *dst = state->stream_buffer + state->stream_counter;
|
||||
if((*source & 1) == 1)
|
||||
{
|
||||
dsiz = qlz_decompress_core((const unsigned char *)source, dst, dsiz, state, (const unsigned char *)state->stream_buffer);
|
||||
}
|
||||
else
|
||||
{
|
||||
memcpy(dst, source + qlz_size_header(source), dsiz);
|
||||
reset_table_decompress(state);
|
||||
}
|
||||
memcpy(destination, dst, dsiz);
|
||||
state->stream_counter += dsiz;
|
||||
}
|
||||
#endif
|
||||
return dsiz;
|
||||
}
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
#ifndef QLZ_HEADER
|
||||
#define QLZ_HEADER
|
||||
|
||||
// Fast data compression library
|
||||
// Copyright (C) 2006-2011 Lasse Mikkel Reinhold
|
||||
// lar@quicklz.com
|
||||
//
|
||||
// QuickLZ can be used for free under the GPL 1, 2 or 3 license (where anything
|
||||
// released into public must be open source) or under a commercial license if such
|
||||
// has been acquired (see http://www.quicklz.com/order.html). The commercial license
|
||||
// does not cover derived or ported versions created by third parties under GPL.
|
||||
|
||||
// You can edit following user settings. Data must be decompressed with the same
|
||||
// setting of QLZ_COMPRESSION_LEVEL and QLZ_STREAMING_BUFFER as it was compressed
|
||||
// (see manual). If QLZ_STREAMING_BUFFER > 0, scratch buffers must be initially
|
||||
// zeroed out (see manual). First #ifndef makes it possible to define settings from
|
||||
// the outside like the compiler command line.
|
||||
|
||||
// 1.5.0 final
|
||||
|
||||
#ifndef QLZ_COMPRESSION_LEVEL
|
||||
|
||||
// 1 gives fastest compression speed. 3 gives fastest decompression speed and best
|
||||
// compression ratio.
|
||||
#define QLZ_COMPRESSION_LEVEL 1
|
||||
//#define QLZ_COMPRESSION_LEVEL 2
|
||||
//#define QLZ_COMPRESSION_LEVEL 3
|
||||
|
||||
// If > 0, zero out both states prior to first call to qlz_compress() or qlz_decompress()
|
||||
// and decompress packets in the same order as they were compressed
|
||||
#define QLZ_STREAMING_BUFFER 0
|
||||
//#define QLZ_STREAMING_BUFFER 100000
|
||||
//#define QLZ_STREAMING_BUFFER 1000000
|
||||
|
||||
// Guarantees that decompression of corrupted data cannot crash. Decreases decompression
|
||||
// speed 10-20%. Compression speed not affected.
|
||||
//#define QLZ_MEMORY_SAFE
|
||||
#endif
|
||||
|
||||
#define QLZ_VERSION_MAJOR 1
|
||||
#define QLZ_VERSION_MINOR 5
|
||||
#define QLZ_VERSION_REVISION 0
|
||||
|
||||
// Using size_t, memset() and memcpy()
|
||||
#include <string.h>
|
||||
|
||||
// Verify compression level
|
||||
#if QLZ_COMPRESSION_LEVEL != 1 && QLZ_COMPRESSION_LEVEL != 2 && QLZ_COMPRESSION_LEVEL != 3
|
||||
#error QLZ_COMPRESSION_LEVEL must be 1, 2 or 3
|
||||
#endif
|
||||
|
||||
typedef unsigned int ui32;
|
||||
typedef unsigned short int ui16;
|
||||
|
||||
// Decrease QLZ_POINTERS for level 3 to increase compression speed. Do not touch any other values!
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
#define QLZ_POINTERS 1
|
||||
#define QLZ_HASH_VALUES 4096
|
||||
#elif QLZ_COMPRESSION_LEVEL == 2
|
||||
#define QLZ_POINTERS 4
|
||||
#define QLZ_HASH_VALUES 2048
|
||||
#elif QLZ_COMPRESSION_LEVEL == 3
|
||||
#define QLZ_POINTERS 16
|
||||
#define QLZ_HASH_VALUES 4096
|
||||
#endif
|
||||
|
||||
// Detect if pointer size is 64-bit. It's not fatal if some 64-bit target is not detected because this is only for adding an optional 64-bit optimization.
|
||||
#if defined _LP64 || defined __LP64__ || defined __64BIT__ || _ADDR64 || defined _WIN64 || defined __arch64__ || __WORDSIZE == 64 || (defined __sparc && defined __sparcv9) || defined __x86_64 || defined __amd64 || defined __x86_64__ || defined _M_X64 || defined _M_IA64 || defined __ia64 || defined __IA64__
|
||||
#define QLZ_PTR_64
|
||||
#endif
|
||||
|
||||
// hash entry
|
||||
typedef struct
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
ui32 cache;
|
||||
#if defined QLZ_PTR_64 && QLZ_STREAMING_BUFFER == 0
|
||||
unsigned int offset;
|
||||
#else
|
||||
const unsigned char *offset;
|
||||
#endif
|
||||
#else
|
||||
const unsigned char *offset[QLZ_POINTERS];
|
||||
#endif
|
||||
|
||||
} qlz_hash_compress;
|
||||
|
||||
typedef struct
|
||||
{
|
||||
#if QLZ_COMPRESSION_LEVEL == 1
|
||||
const unsigned char *offset;
|
||||
#else
|
||||
const unsigned char *offset[QLZ_POINTERS];
|
||||
#endif
|
||||
} qlz_hash_decompress;
|
||||
|
||||
|
||||
// states
|
||||
typedef struct
|
||||
{
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
unsigned char stream_buffer[QLZ_STREAMING_BUFFER];
|
||||
#endif
|
||||
size_t stream_counter;
|
||||
qlz_hash_compress hash[QLZ_HASH_VALUES];
|
||||
unsigned char hash_counter[QLZ_HASH_VALUES];
|
||||
} qlz_state_compress;
|
||||
|
||||
|
||||
#if QLZ_COMPRESSION_LEVEL == 1 || QLZ_COMPRESSION_LEVEL == 2
|
||||
typedef struct
|
||||
{
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
unsigned char stream_buffer[QLZ_STREAMING_BUFFER];
|
||||
#endif
|
||||
qlz_hash_decompress hash[QLZ_HASH_VALUES];
|
||||
unsigned char hash_counter[QLZ_HASH_VALUES];
|
||||
size_t stream_counter;
|
||||
} qlz_state_decompress;
|
||||
#elif QLZ_COMPRESSION_LEVEL == 3
|
||||
typedef struct
|
||||
{
|
||||
#if QLZ_STREAMING_BUFFER > 0
|
||||
unsigned char stream_buffer[QLZ_STREAMING_BUFFER];
|
||||
#endif
|
||||
#if QLZ_COMPRESSION_LEVEL <= 2
|
||||
qlz_hash_decompress hash[QLZ_HASH_VALUES];
|
||||
#endif
|
||||
size_t stream_counter;
|
||||
} qlz_state_decompress;
|
||||
#endif
|
||||
|
||||
|
||||
#if defined (__cplusplus)
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
// Public functions of QuickLZ
|
||||
size_t qlz_size_decompressed(const char *source);
|
||||
size_t qlz_size_compressed(const char *source);
|
||||
size_t qlz_compress(const void *source, char *destination, size_t size, qlz_state_compress *state);
|
||||
size_t qlz_decompress(const char *source, void *destination, qlz_state_decompress *state);
|
||||
int qlz_get_setting(int setting);
|
||||
|
||||
#if defined (__cplusplus)
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
|
@ -1,159 +0,0 @@
|
|||
#ifndef DBoW3_TIMERS_H
|
||||
#define DBoW3_TIMERS_H
|
||||
|
||||
#include <chrono>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
namespace DBoW3 {
|
||||
|
||||
// timer
|
||||
struct ScopeTimer {
|
||||
std::chrono::high_resolution_clock::time_point begin, end;
|
||||
|
||||
std::string name;
|
||||
bool use;
|
||||
enum SCALE { NSEC, MSEC, SEC };
|
||||
SCALE sc;
|
||||
ScopeTimer(std::string name_, bool use_ = true, SCALE _sc = MSEC) {
|
||||
name = name_;
|
||||
use = use_;
|
||||
sc = _sc;
|
||||
begin = std::chrono::high_resolution_clock::now();
|
||||
}
|
||||
~ScopeTimer() {
|
||||
if (use) {
|
||||
end = std::chrono::high_resolution_clock::now();
|
||||
double fact = 1;
|
||||
std::string str;
|
||||
switch (sc) {
|
||||
case NSEC:
|
||||
fact = 1;
|
||||
str = "ns";
|
||||
break;
|
||||
case MSEC:
|
||||
fact = 1e6;
|
||||
str = "ms";
|
||||
break;
|
||||
case SEC:
|
||||
fact = 1e9;
|
||||
str = "s";
|
||||
break;
|
||||
};
|
||||
|
||||
std::cout << "Time (" << name << ")= "
|
||||
<< double(std::chrono::duration_cast<std::chrono::nanoseconds>(
|
||||
end - begin)
|
||||
.count()) /
|
||||
fact
|
||||
<< str << std::endl;
|
||||
;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct ScopedTimerEvents {
|
||||
enum SCALE { NSEC, MSEC, SEC };
|
||||
SCALE sc;
|
||||
std::vector<std::chrono::high_resolution_clock::time_point> vtimes;
|
||||
std::vector<std::string> names;
|
||||
std::string _name;
|
||||
|
||||
ScopedTimerEvents(std::string name = "", bool start = true,
|
||||
SCALE _sc = MSEC) {
|
||||
if (start) add("start");
|
||||
sc = _sc;
|
||||
_name = name;
|
||||
}
|
||||
|
||||
void add(std::string name) {
|
||||
vtimes.push_back(std::chrono::high_resolution_clock::now());
|
||||
names.push_back(name);
|
||||
}
|
||||
void addspaces(std::vector<std::string> &str) {
|
||||
// get max size
|
||||
size_t m = -1;
|
||||
for (auto &s : str) m = std::max(s.size(), m);
|
||||
for (auto &s : str) {
|
||||
while (s.size() < m) s.push_back(' ');
|
||||
}
|
||||
}
|
||||
|
||||
~ScopedTimerEvents() {
|
||||
double fact = 1;
|
||||
std::string str;
|
||||
switch (sc) {
|
||||
case NSEC:
|
||||
fact = 1;
|
||||
str = "ns";
|
||||
break;
|
||||
case MSEC:
|
||||
fact = 1e6;
|
||||
str = "ms";
|
||||
break;
|
||||
case SEC:
|
||||
fact = 1e9;
|
||||
str = "s";
|
||||
break;
|
||||
};
|
||||
|
||||
add("total");
|
||||
addspaces(names);
|
||||
for (size_t i = 1; i < vtimes.size(); i++) {
|
||||
std::cout << "Time(" << _name << ")-" << names[i] << " "
|
||||
<< double(std::chrono::duration_cast<std::chrono::nanoseconds>(
|
||||
vtimes[i] - vtimes[i - 1])
|
||||
.count()) /
|
||||
fact
|
||||
<< str << " "
|
||||
<< double(std::chrono::duration_cast<std::chrono::nanoseconds>(
|
||||
vtimes[i] - vtimes[0])
|
||||
.count()) /
|
||||
fact
|
||||
<< str << std::endl;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct Timer {
|
||||
enum SCALE { NSEC, MSEC, SEC };
|
||||
|
||||
std::chrono::high_resolution_clock::time_point _s;
|
||||
double sum = 0, n = 0;
|
||||
std::string _name;
|
||||
Timer() {}
|
||||
|
||||
Timer(std::string name) : _name(name) {}
|
||||
void setName(std::string name) { _name = name; }
|
||||
void start() { _s = std::chrono::high_resolution_clock::now(); }
|
||||
void end() {
|
||||
auto e = std::chrono::high_resolution_clock::now();
|
||||
sum += double(
|
||||
std::chrono::duration_cast<std::chrono::nanoseconds>(e - _s).count());
|
||||
n++;
|
||||
}
|
||||
|
||||
void print(SCALE sc = MSEC) {
|
||||
double fact = 1;
|
||||
std::string str;
|
||||
switch (sc) {
|
||||
case NSEC:
|
||||
fact = 1;
|
||||
str = "ns";
|
||||
break;
|
||||
case MSEC:
|
||||
fact = 1e6;
|
||||
str = "ms";
|
||||
break;
|
||||
case SEC:
|
||||
fact = 1e9;
|
||||
str = "s";
|
||||
break;
|
||||
};
|
||||
std::cout << "Time(" << _name << ")= " << (sum / n) / fact << str
|
||||
<< std::endl;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endif
|
Loading…
Reference in New Issue