Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Added nullptr check for LeakyRelu gamma parameter. #10886

Merged
merged 4 commits into from
May 11, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,9 @@ try {
stash includes: 'build/cpp-package/example/googlenet', name: 'cpp_googlenet'
stash includes: 'build/cpp-package/example/lenet_with_mxdataiter', name: 'cpp_lenet_with_mxdataiter'
stash includes: 'build/cpp-package/example/resnet', name: 'cpp_resnet'
stash includes: 'build/cpp-package/example/mlp', name: 'cpp_mlp'
stash includes: 'build/cpp-package/example/mlp_cpu', name: 'cpp_mlp_cpu'
stash includes: 'build/cpp-package/example/mlp_gpu', name: 'cpp_mlp_gpu'
stash includes: 'build/cpp-package/example/test_score', name: 'cpp_test_score'
stash includes: 'build/cpp-package/example/test_optimizer', name: 'cpp_test_optimizer'
}
Expand Down Expand Up @@ -791,6 +794,9 @@ try {
unstash 'cpp_googlenet'
unstash 'cpp_lenet_with_mxdataiter'
unstash 'cpp_resnet'
unstash 'cpp_mlp'
unstash 'cpp_mlp_cpu'
unstash 'cpp_mlp_gpu'
unstash 'cpp_test_score'
unstash 'cpp_test_optimizer'
sh "ci/build.py --nvidiadocker --platform ubuntu_gpu /work/runtime_functions.sh integrationtest_ubuntu_gpu_cpp_package"
Expand Down
24 changes: 12 additions & 12 deletions cpp-package/example/mlp_cpu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
* Xin Li yakumolx@gmail.com
*/
#include <chrono>
#include "utils.h"
#include "mxnet-cpp/MxNetCpp.h"

using namespace mxnet::cpp;
Expand Down Expand Up @@ -55,18 +56,17 @@ int main(int argc, char** argv) {
const float learning_rate = 0.1;
const float weight_decay = 1e-2;

auto train_iter = MXDataIter("MNISTIter")
.SetParam("image", "./mnist_data/train-images-idx3-ubyte")
.SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
.SetParam("batch_size", batch_size)
.SetParam("flat", 1)
.CreateDataIter();
auto val_iter = MXDataIter("MNISTIter")
.SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
.SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
.SetParam("batch_size", batch_size)
.SetParam("flat", 1)
.CreateDataIter();
std::vector<std::string> data_files = { "./data/mnist_data/train-images-idx3-ubyte",
"./data/mnist_data/train-labels-idx1-ubyte",
"./data/mnist_data/t10k-images-idx3-ubyte",
"./data/mnist_data/t10k-labels-idx1-ubyte"
};

auto train_iter = MXDataIter("MNISTIter");
setDataIter(&train_iter, "Train", data_files, batch_size);

auto val_iter = MXDataIter("MNISTIter");
setDataIter(&val_iter, "Label", data_files, batch_size);

auto net = mlp(layers);

Expand Down
24 changes: 12 additions & 12 deletions cpp-package/example/mlp_gpu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
* Xin Li yakumolx@gmail.com
*/
#include <chrono>
#include "utils.h"
#include "mxnet-cpp/MxNetCpp.h"

using namespace mxnet::cpp;
Expand Down Expand Up @@ -55,18 +56,17 @@ int main(int argc, char** argv) {
const float learning_rate = 0.1;
const float weight_decay = 1e-2;

auto train_iter = MXDataIter("MNISTIter")
.SetParam("image", "./mnist_data/train-images-idx3-ubyte")
.SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
.SetParam("batch_size", batch_size)
.SetParam("flat", 1)
.CreateDataIter();
auto val_iter = MXDataIter("MNISTIter")
.SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
.SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
.SetParam("batch_size", batch_size)
.SetParam("flat", 1)
.CreateDataIter();
std::vector<std::string> data_files = { "./data/mnist_data/train-images-idx3-ubyte",
"./data/mnist_data/train-labels-idx1-ubyte",
"./data/mnist_data/t10k-images-idx3-ubyte",
"./data/mnist_data/t10k-labels-idx1-ubyte"
};

auto train_iter = MXDataIter("MNISTIter");
setDataIter(&train_iter, "Train", data_files, batch_size);

auto val_iter = MXDataIter("MNISTIter");
setDataIter(&val_iter, "Label", data_files, batch_size);

auto net = mlp(layers);

Expand Down
8 changes: 5 additions & 3 deletions cpp-package/include/mxnet-cpp/operator.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -159,9 +159,11 @@ inline void Operator::Invoke(NDArray &output) {
}

inline Operator &Operator::SetInput(const std::string &name, Symbol symbol) {
input_keys_.push_back(name.c_str());
input_symbols_.push_back(symbol.GetHandle());
return *this;
if (symbol.GetHandle()) {
input_keys_.push_back(name.c_str());
input_symbols_.push_back(symbol.GetHandle());
}
return *this;
}

inline Operator &Operator::SetInput(const std::string &name, NDArray ndarray) {
Expand Down
2 changes: 1 addition & 1 deletion cpp-package/include/mxnet-cpp/symbol.h
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ class Symbol {
/*!
* \return the SymbolHandle
*/
SymbolHandle GetHandle() const { return blob_ptr_->handle_; }
SymbolHandle GetHandle() const { return (blob_ptr_) ? blob_ptr_->handle_: NULL; }
/*!
* \brief construct an operator Symbol, with given input Symbol and config
* \param name the name of the Symbol
Expand Down
9 changes: 9 additions & 0 deletions cpp-package/tests/ci_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,15 @@ cp ../../build/cpp-package/example/lenet_with_mxdataiter .
cp ../../build/cpp-package/example/resnet .
./resnet 5

cp ../../build/cpp-package/example/mlp .
./mlp

cp ../../build/cpp-package/example/mlp_cpu .
./mlp_cpu

cp ../../build/cpp-package/example/mlp_gpu .
./mlp_gpu

cp ../../build/cpp-package/example/test_optimizer .
./test_optimizer

Expand Down