Last active
January 2, 2018 15:15
-
-
Save csukuangfj/2cbe9374a28df0f2d709c88325357a53 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/** | |
* @brief compare the parsing time between yml file and hdf file for the SED model. | |
* @author Fangjun Kuang | |
* @date January, 2018 | |
*/ | |
#include <opencv2/core.hpp> | |
#include <opencv2/ximgproc.hpp> | |
#include <opencv2/hdf.hpp> | |
#include <stdio.h> | |
static int g_compress_level = 9; // 0 to 9 | |
/*! random forest used to detect edges */ | |
static struct RandomForest | |
{ | |
/*! random forest options, e.g. number of trees */ | |
struct RandomForestOptions | |
{ | |
// model params | |
int numberOfOutputChannels; /*!< number of edge orientation bins for output */ | |
int patchSize; /*!< width of image patches */ | |
int patchInnerSize; /*!< width of predicted part inside patch*/ | |
// feature params | |
int regFeatureSmoothingRadius; /*!< radius for smoothing of regular features | |
* (using convolution with triangle filter) */ | |
int ssFeatureSmoothingRadius; /*!< radius for smoothing of additional features | |
* (using convolution with triangle filter) */ | |
int shrinkNumber; //kfjj /*!< amount to shrink channels */ | |
int numberOfGradientOrientations; /*!< number of orientations per gradient scale */ | |
int gradientSmoothingRadius; /*!< radius for smoothing of gradients | |
* (using convolution with triangle filter) */ | |
int gradientNormalizationRadius; /*!< gradient normalization radius */ | |
int selfsimilarityGridSize; /*!< number of self similarity cells */ | |
// detection params | |
int numberOfTrees; /*!< number of trees in forest to train */ | |
int numberOfTreesToEvaluate; /*!< number of trees to evaluate per location */ | |
int stride; /*!< stride at which to compute edges */ | |
} options; | |
int numberOfTreeNodes; | |
std::vector <int> featureIds; /*!< feature coordinate thresholded at k-th node */ | |
std::vector <float> thresholds; /*!< threshold applied to featureIds[k] at k-th node */ | |
std::vector <int> childs; /*!< k --> child[k] - 1, child[k] */ | |
std::vector <int> edgeBoundaries; /*!< ... */ | |
std::vector <int> edgeBins; /*!< ... */ | |
} __rf, rf; | |
static void read_yml_model(const cv::String& filename) | |
{ | |
cv::FileStorage modelFile(filename, cv::FileStorage::READ); | |
CV_Assert( modelFile.isOpened() ); | |
__rf.options.stride | |
= modelFile["options"]["stride"]; | |
__rf.options.shrinkNumber | |
= modelFile["options"]["shrinkNumber"]; | |
__rf.options.patchSize | |
= modelFile["options"]["patchSize"]; | |
__rf.options.patchInnerSize | |
= modelFile["options"]["patchInnerSize"]; | |
__rf.options.numberOfGradientOrientations | |
= modelFile["options"]["numberOfGradientOrientations"]; | |
__rf.options.gradientSmoothingRadius | |
= modelFile["options"]["gradientSmoothingRadius"]; | |
__rf.options.regFeatureSmoothingRadius | |
= modelFile["options"]["regFeatureSmoothingRadius"]; | |
__rf.options.ssFeatureSmoothingRadius | |
= modelFile["options"]["ssFeatureSmoothingRadius"]; | |
__rf.options.gradientNormalizationRadius | |
= modelFile["options"]["gradientNormalizationRadius"]; | |
__rf.options.selfsimilarityGridSize | |
= modelFile["options"]["selfsimilarityGridSize"]; | |
__rf.options.numberOfTrees | |
= modelFile["options"]["numberOfTrees"]; | |
__rf.options.numberOfTreesToEvaluate | |
= modelFile["options"]["numberOfTreesToEvaluate"]; | |
__rf.options.numberOfOutputChannels = | |
2*(__rf.options.numberOfGradientOrientations + 1) + 3; | |
//-------------------------------------------- | |
cv::FileNode childs = modelFile["childs"]; | |
cv::FileNode featureIds = modelFile["featureIds"]; | |
std::vector <int> currentTree; | |
for(cv::FileNodeIterator it = childs.begin(); | |
it != childs.end(); ++it) | |
{ | |
(*it) >> currentTree; | |
std::copy(currentTree.begin(), currentTree.end(), | |
std::back_inserter(__rf.childs)); | |
} | |
for(cv::FileNodeIterator it = featureIds.begin(); | |
it != featureIds.end(); ++it) | |
{ | |
(*it) >> currentTree; | |
std::copy(currentTree.begin(), currentTree.end(), | |
std::back_inserter(__rf.featureIds)); | |
} | |
cv::FileNode thresholds = modelFile["thresholds"]; | |
std::vector <float> fcurrentTree; | |
for(cv::FileNodeIterator it = thresholds.begin(); | |
it != thresholds.end(); ++it) | |
{ | |
(*it) >> fcurrentTree; | |
std::copy(fcurrentTree.begin(), fcurrentTree.end(), | |
std::back_inserter(__rf.thresholds)); | |
} | |
cv::FileNode edgeBoundaries = modelFile["edgeBoundaries"]; | |
cv::FileNode edgeBins = modelFile["edgeBins"]; | |
for(cv::FileNodeIterator it = edgeBoundaries.begin(); | |
it != edgeBoundaries.end(); ++it) | |
{ | |
(*it) >> currentTree; | |
std::copy(currentTree.begin(), currentTree.end(), | |
std::back_inserter(__rf.edgeBoundaries)); | |
} | |
for(cv::FileNodeIterator it = edgeBins.begin(); | |
it != edgeBins.end(); ++it) | |
{ | |
(*it) >> currentTree; | |
std::copy(currentTree.begin(), currentTree.end(), | |
std::back_inserter(__rf.edgeBins)); | |
} | |
__rf.numberOfTreeNodes = int( __rf.childs.size() ) / __rf.options.numberOfTrees; | |
} | |
static void generate_hdf_model(const cv::String& filename) | |
{ | |
cv::Ptr<cv::hdf::HDF5> h5io = cv::hdf::open(filename); | |
h5io->atwrite(__rf.options.stride, "options.stride"); | |
h5io->atwrite(__rf.options.shrinkNumber, "options.shrinkNumber"); | |
h5io->atwrite(__rf.options.patchSize, "options.patchSize"); | |
h5io->atwrite(__rf.options.patchInnerSize, "options.patchInnerSize"); | |
h5io->atwrite(__rf.options.numberOfGradientOrientations, "options.numberOfGradientOrientations"); | |
h5io->atwrite(__rf.options.gradientSmoothingRadius, "options.gradientSmoothingRadius"); | |
h5io->atwrite(__rf.options.regFeatureSmoothingRadius, "options.regFeatureSmoothingRadius"); | |
h5io->atwrite(__rf.options.ssFeatureSmoothingRadius, "options.ssFeatureSmoothingRadius"); | |
h5io->atwrite(__rf.options.gradientNormalizationRadius, "options.gradientNormalizationRadius"); | |
h5io->atwrite(__rf.options.selfsimilarityGridSize, "options.selfsimilarityGridSize"); | |
h5io->atwrite(__rf.options.numberOfTrees, "options.numberOfTrees"); | |
h5io->atwrite(__rf.options.numberOfTreesToEvaluate, "options.numberOfTreesToEvaluate"); | |
int ndmis = 2; | |
int chuncks[2] = {1, 100*1024}; | |
int sizes[2]; | |
sizes[0] = 1; | |
sizes[1] = (int) __rf.childs.size(); | |
h5io->dscreate(ndmis, sizes, CV_32SC1, "childs", g_compress_level, chuncks); | |
sizes[1] = (int) __rf.featureIds.size(); | |
h5io->dscreate(ndmis, sizes, CV_32SC1, "featureIds", g_compress_level, chuncks); | |
sizes[1] = (int) __rf.thresholds.size(); | |
h5io->dscreate(ndmis, sizes, CV_32FC1, "thresholds", g_compress_level, chuncks); | |
sizes[1] = (int) __rf.edgeBoundaries.size(); | |
h5io->dscreate(ndmis, sizes, CV_32SC1, "edgeBoundaries", g_compress_level, chuncks); | |
sizes[1] = (int) __rf.edgeBins.size(); | |
h5io->dscreate(ndmis, sizes, CV_32SC1, "edgeBins", g_compress_level, chuncks); | |
h5io->dswrite(__rf.childs, "childs"); | |
h5io->dswrite(__rf.featureIds, "featureIds"); | |
h5io->dswrite(__rf.thresholds, "thresholds"); | |
h5io->dswrite(__rf.edgeBoundaries, "edgeBoundaries"); | |
h5io->dswrite(__rf.edgeBins, "edgeBins"); | |
h5io->close(); | |
} | |
static void read_hdf_model(const cv::String& filename) | |
{ | |
cv::Ptr<cv::hdf::HDF5> h5io = cv::hdf::open(filename); | |
h5io->atread(&rf.options.stride, "options.stride"); | |
h5io->atread(&rf.options.shrinkNumber, "options.shrinkNumber"); | |
h5io->atread(&rf.options.patchSize, "options.patchSize"); | |
h5io->atread(&rf.options.patchInnerSize, "options.patchInnerSize"); | |
h5io->atread(&rf.options.numberOfGradientOrientations, "options.numberOfGradientOrientations"); | |
h5io->atread(&rf.options.gradientSmoothingRadius, "options.gradientSmoothingRadius"); | |
h5io->atread(&rf.options.regFeatureSmoothingRadius, "options.regFeatureSmoothingRadius"); | |
h5io->atread(&rf.options.ssFeatureSmoothingRadius, "options.ssFeatureSmoothingRadius"); | |
h5io->atread(&rf.options.gradientNormalizationRadius, "options.gradientNormalizationRadius"); | |
h5io->atread(&rf.options.selfsimilarityGridSize, "options.selfsimilarityGridSize"); | |
h5io->atread(&rf.options.numberOfTrees, "options.numberOfTrees"); | |
h5io->atread(&rf.options.numberOfTreesToEvaluate, "options.numberOfTreesToEvaluate"); | |
rf.options.numberOfOutputChannels = 2*(rf.options.numberOfGradientOrientations + 1) + 3; | |
h5io->dsread(rf.childs, "childs"); | |
h5io->dsread(rf.featureIds, "featureIds"); | |
h5io->dsread(rf.thresholds, "thresholds"); | |
h5io->dsread(rf.edgeBoundaries, "edgeBoundaries"); | |
h5io->dsread(rf.edgeBins, "edgeBins"); | |
rf.numberOfTreeNodes = int(rf.childs.size()) / rf.options.numberOfTrees; | |
h5io->close(); | |
} | |
static void check_results() | |
{ | |
CV_Assert(__rf.options.stride == rf.options.stride); | |
CV_Assert(__rf.options.shrinkNumber == rf.options.shrinkNumber); | |
CV_Assert(__rf.options.patchSize == rf.options.patchSize); | |
CV_Assert(__rf.options.patchInnerSize == rf.options.patchInnerSize); | |
CV_Assert(__rf.options.numberOfGradientOrientations == rf.options.numberOfGradientOrientations); | |
CV_Assert(__rf.options.gradientSmoothingRadius == rf.options.gradientSmoothingRadius); | |
CV_Assert(__rf.options.regFeatureSmoothingRadius == rf.options.regFeatureSmoothingRadius); | |
CV_Assert(__rf.options.ssFeatureSmoothingRadius == rf.options.ssFeatureSmoothingRadius); | |
CV_Assert(__rf.options.gradientNormalizationRadius == rf.options.gradientNormalizationRadius); | |
CV_Assert(__rf.options.selfsimilarityGridSize == rf.options.selfsimilarityGridSize); | |
CV_Assert(__rf.options.numberOfTrees == rf.options.numberOfTrees); | |
CV_Assert(__rf.options.numberOfTreesToEvaluate == rf.options.numberOfTreesToEvaluate); | |
CV_Assert(__rf.options.numberOfOutputChannels == rf.options.numberOfOutputChannels); | |
CV_Assert(__rf.numberOfTreeNodes == rf.numberOfTreeNodes); | |
CV_Assert(__rf.featureIds.size() == rf.featureIds.size()); | |
for (size_t i = 0; i < rf.featureIds.size(); i++) | |
CV_Assert(__rf.featureIds[i] == rf.featureIds[i]); | |
CV_Assert(__rf.thresholds.size() == rf.thresholds.size()); | |
for (size_t i = 0; i < rf.thresholds.size(); i++) | |
CV_Assert(cv::abs(__rf.thresholds[i] - rf.thresholds[i]) < 1e-7); | |
CV_Assert(__rf.childs.size() == rf.childs.size()); | |
for (size_t i = 0; i < rf.childs.size(); i++) | |
CV_Assert(__rf.childs[i] == rf.childs[i]); | |
CV_Assert(__rf.edgeBoundaries.size() == rf.edgeBoundaries.size()); | |
for (size_t i = 0; i < rf.edgeBoundaries.size(); i++) | |
CV_Assert(__rf.edgeBoundaries[i] == rf.edgeBoundaries[i]); | |
CV_Assert(__rf.edgeBins.size() == rf.edgeBins.size()); | |
for (size_t i = 0; i < rf.edgeBins.size(); i++) | |
CV_Assert(__rf.edgeBins[i] == rf.edgeBins[i]); | |
} | |
int main() | |
{ | |
// refer to https://github.com/opencv/opencv_extra/blob/master/testdata/cv/ximgproc/model.yml.gz | |
cv::String filename_yml = "model.yml.gz"; // downloaded from the above address | |
cv::String filename_hdf = "model.h5"; // generated by the program | |
remove(filename_hdf.c_str()); | |
cv::TickMeter tm; | |
printf("Start to read yml file....\n"); | |
tm.start(); | |
read_yml_model(filename_yml); | |
tm.stop(); | |
printf("Finish reading yml file in %.3f seconds\n", tm.getTimeSec()); | |
tm.reset(); | |
printf("Start to generate hdf file....\n"); | |
tm.start(); | |
generate_hdf_model(filename_hdf); | |
tm.stop(); | |
printf("Finish generating hdf file in %.3f seconds\n", tm.getTimeSec()); | |
tm.reset(); | |
printf("Start to read hdf file....\n"); | |
tm.start(); | |
read_hdf_model(filename_hdf); | |
tm.stop(); | |
printf("Finish reading hdf file in %.3f seconds\n", tm.getTimeSec()); | |
check_results(); | |
return 0; | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment