Skip to content

Commit

Permalink
Added possibility of getting any intermediate blob with thrifty memor…
Browse files Browse the repository at this point in the history
…y management
  • Loading branch information
arrybn authored and mshabunin committed Jun 16, 2017
1 parent b18e357 commit aa0d806
Show file tree
Hide file tree
Showing 18 changed files with 350 additions and 205 deletions.
48 changes: 29 additions & 19 deletions modules/dnn/include/opencv2/dnn/dnn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -337,19 +337,35 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* In fact, this layer provides the only way to pass user data into the network.
* As any other layer, this layer can label its outputs and this function provides an easy way to do this.
*/
CV_WRAP void setNetInputs(const std::vector<String> &inputBlobNames);
CV_WRAP void setInputsNames(const std::vector<String> &inputBlobNames);

/** @brief Initializes and allocates all layers. */
CV_WRAP void allocate();

/** @brief Runs forward pass to compute output of layer @p toLayer.
/** @brief Runs forward pass to compute output of layer with name @p outputName.
* @param outputName name for layer which output is needed to get
* @return blob for first output of specified layer.
* @details By default runs forward pass for the whole network.
*/
CV_WRAP void forward(LayerId toLayer = String());
/** @brief Runs forward pass to compute output of layer @p toLayer, but computations start from @p startLayer */
void forward(LayerId startLayer, LayerId toLayer);
/** @overload */
void forward(const std::vector<LayerId> &startLayers, const std::vector<LayerId> &toLayers);
CV_WRAP Mat forward(const String& outputName = String());

/** @brief Runs forward pass to compute output of layer with name @p outputName.
* @param outputBlobs contains all output blobs for specified layer.
* @param outputName name for layer which output is needed to get
* @details If @p outputName is empty, runs forward pass for the whole network.
*/
CV_WRAP void forward(std::vector<Mat>& outputBlobs, const String& outputName = String());

/** @brief Runs forward pass to compute outputs of layers listed in @p outBlobNames.
* @param outputBlobs contains blobs for first outputs of specified layers.
* @param outBlobNames names for layers which outputs are needed to get
*/
CV_WRAP void forward(std::vector<Mat>& outputBlobs,
const std::vector<String>& outBlobNames);

/** @brief Runs forward pass to compute outputs of layers listed in @p outBlobNames.
* @param outputBlobs contains all output blobs for each layer specified in @p outBlobNames.
* @param outBlobNames names for layers which outputs are needed to get
*/
CV_WRAP void forward(std::vector<std::vector<Mat> >& outputBlobs,
const std::vector<String>& outBlobNames);

//TODO:
/** @brief Optimized forward.
Expand All @@ -369,7 +385,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* specific target. For layers that not represented in scheduling file
* or if no manual scheduling used at all, automatic scheduling will be applied.
*/
void compileHalide(const std::string& scheduler = "");
void setHalideScheduler(const String& scheduler);

/**
* @brief Ask network to use specific computation backend where it supported.
Expand All @@ -379,19 +395,13 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
void setPreferableBackend(int backendId);

/** @brief Sets the new value for the layer output blob
* @param outputName descriptor of the updating layer output blob.
* @param name descriptor of the updating layer output blob.
* @param blob new blob.
* @see connect(String, String) to know format of the descriptor.
* @note If updating blob is not empty then @p blob must have the same shape,
* because network reshaping is not implemented yet.
*/
CV_WRAP void setBlob(String outputName, const Mat &blob);

/** @brief Returns the layer output blob.
* @param outputName the descriptor of the returning layer output blob.
* @see connect(String, String)
*/
CV_WRAP Mat getBlob(String outputName);
CV_WRAP void setInput(const Mat &blob, const String& name = "");

/** @brief Sets the new value for the learned param of the layer.
* @param layer name or id of the layer.
Expand Down
1 change: 1 addition & 0 deletions modules/dnn/misc/python/pyopencv_dnn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ typedef dnn::DictValue LayerId;
typedef std::vector<dnn::MatShape> vector_MatShape;
typedef std::vector<std::vector<dnn::MatShape> > vector_vector_MatShape;
typedef std::vector<size_t> vector_size_t;
typedef std::vector<std::vector<Mat> > vector_vector_Mat;

template<>
bool pyopencv_to(PyObject *o, dnn::DictValue &dv, const char *name)
Expand Down
6 changes: 2 additions & 4 deletions modules/dnn/samples/caffe_googlenet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -119,16 +119,14 @@ int main(int argc, char **argv)
//! [Prepare blob]

//! [Set input blob]
net.setBlob(".data", inputBlob); //set the network input
net.setInput(inputBlob, "data"); //set the network input
//! [Set input blob]

//! [Make forward pass]
net.forward(); //compute output
Mat prob = net.forward("prob"); //compute output
//! [Make forward pass]

//! [Gather output]
Mat prob = net.getBlob("prob"); //gather output of "prob" layer

int classId;
double classProb;
getMaxClass(prob, &classId, &classProb);//find the best class
Expand Down
7 changes: 2 additions & 5 deletions modules/dnn/samples/fcn_semsegm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -134,19 +134,16 @@ int main(int argc, char **argv)
//! [Prepare blob]

//! [Set input blob]
net.setBlob(".data", inputBlob); //set the network input
net.setInput(inputBlob, "data"); //set the network input
//! [Set input blob]

//! [Make forward pass]
double t = (double)cv::getTickCount();
net.forward(); //compute output
Mat score = net.forward("score"); //compute output
t = (double)cv::getTickCount() - t;
printf("processing time: %.1fms\n", t*1000./getTickFrequency());
//! [Make forward pass]

//! [Gather output]
Mat score = net.getBlob("score");

Mat colorize;
colorizeSegmentation(score, colors, colorize);
Mat show;
Expand Down
8 changes: 4 additions & 4 deletions modules/dnn/samples/squeezenet_halide.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -93,23 +93,23 @@ int main(int argc, char **argv)
//! [Prepare blob]

//! [Set input blob]
net.setBlob("", inputBlob); // Set the network input.
net.setInput(inputBlob); // Set the network input.
//! [Set input blob]

//! [Enable Halide backend]
net.setPreferableBackend(DNN_BACKEND_HALIDE); // Tell engine to use Halide where it possible.
//! [Enable Halide backend]

//! [Compile Halide pipeline]
net.compileHalide(); // Compile Halide pipeline.
// net.compileHalide(); // Compile Halide pipeline.
//! [Compile Halide pipeline]

//! [Make forward pass]
net.forward(); // Compute output.
Mat prob = net.forward("prob"); // Compute output.
//! [Make forward pass]

//! [Gather output]
Mat prob = net.getBlob("prob"); // Gather output of "prob" layer.
// net.getBlob(); // Gather output of "prob" layer.

int classId;
double classProb;
Expand Down
6 changes: 2 additions & 4 deletions modules/dnn/samples/ssd_object_detection.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,15 +108,13 @@ int main(int argc, char** argv)
//! [Prepare blob]

//! [Set input blob]
net.setBlob(".data", inputBlob); //set the network input
net.setInput(inputBlob, "data"); //set the network input
//! [Set input blob]

//! [Make forward pass]
net.forward(); //compute output
Mat detection = net.forward("detection_out"); //compute output
//! [Make forward pass]

//! [Gather output]
Mat detection = net.getBlob("detection_out");
Mat detectionMat(detection.size[2], detection.size[3], CV_32F, detection.ptr<float>());

float confidenceThreshold = parser.get<float>("min_confidence");
Expand Down
9 changes: 3 additions & 6 deletions modules/dnn/samples/tf_inception.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ const String keys =
"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip }"
"{model m |tensorflow_inception_graph.pb| path to TensorFlow .pb model file }"
"{image i || path to image file }"
"{i_blob | .input | input blob name) }"
"{i_blob | input | input blob name) }"
"{o_blob | softmax2 | output blob name) }"
"{c_names c | imagenet_comp_graph_label_strings.txt | path to file with classnames for class id }"
"{result r || path to save output blob (optional, binary format, NCHW order) }"
Expand Down Expand Up @@ -101,21 +101,18 @@ int main(int argc, char **argv)
//! [Prepare blob]
inputBlob -= 117.0;
//! [Set input blob]
net.setBlob(inBlobName, inputBlob); //set the network input
net.setInput(inputBlob, inBlobName); //set the network input
//! [Set input blob]

cv::TickMeter tm;
tm.start();

//! [Make forward pass]
net.forward(); //compute output
Mat result = net.forward(outBlobName); //compute output
//! [Make forward pass]

tm.stop();

//! [Gather output]
Mat result = net.getBlob(outBlobName); //gather output of "prob" layer

if (!resultFile.empty()) {
CV_Assert(result.isContinuous());

Expand Down
19 changes: 3 additions & 16 deletions modules/dnn/samples/torch_enet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,32 +73,19 @@ int main(int argc, char **argv)
//! [Prepare blob]

//! [Set input blob]
net.setBlob("", inputBlob); //set the network input
net.setInput(inputBlob, ""); //set the network input
//! [Set input blob]

const int N = 3;
TickMeter tm;

//! [Make forward pass]
for( int i = 0; i < N; i++ )
{
TickMeter tm_;
tm_.start();
net.forward(); //compute output
tm_.stop();
if( i == 0 || tm_.getTimeTicks() < tm.getTimeTicks() )
tm = tm_;
}

//! [Gather output]

String oBlob = net.getLayerNames().back();
if (!parser.get<String>("o_blob").empty())
{
oBlob = parser.get<String>("o_blob");
}

Mat result = net.getBlob(oBlob); //gather output of "prob" layer
//! [Make forward pass]
Mat result = net.forward(oBlob);

if (!resultFile.empty()) {
CV_Assert(result.isContinuous());
Expand Down
2 changes: 1 addition & 1 deletion modules/dnn/src/caffe/caffe_importer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ class CaffeImporter : public Importer
addedBlobs.push_back(BlobNote(net.input(inNum), 0, inNum));
netInputs[inNum] = net.input(inNum);
}
dstNet.setNetInputs(netInputs);
dstNet.setInputsNames(netInputs);
}

for (int li = 0; li < layersSize; li++)
Expand Down
Loading

0 comments on commit aa0d806

Please sign in to comment.