MXNet是一种开源的深度学习框架,核心代码是由C++实现。MXNet官网推荐使用VS2015或VS2017编译,因为源码中使用了一些C++14的特性,VS2013是不支持的。这里通过VS2017编译,步骤如下:
1. 编译OpenCV,版本为3.4.2,可参考 https://blog.csdn.net/fengbingchun/article/details/78163217 ,注意加入opencv_contrib模块;
2. 编译OpenBLAS,版本为0.3.3,可参考: https://blog.csdn.net/fengbingchun/article/details/55509764 ;
3. 编译dmlc-core,版本为0.3:
4. 下载mshadow,注意不能是1.0或1.1版本,只能是master,因为它们的文件名不一致,里面仅有头文件;
5. 编译tvm,版本为0.4,在编译MXNet时,目前仅需要nnvm/src下的c_api, core, pass三个目录的文件参与:
6. 编译dlpack,版本为最新master,commit为bee4d1d,
7. 编译MXNet,版本为1.3.0:
8. 使用mxnet/cpp-package/scripts/OpWrapperGenerator.py生成mxnet/cpp-package/include/mxnet-cpp目录下的op.h文件操作步骤:
(1). 将lib/rel/x64目录下的libmxnet.dll和libopenblas.dll两个动态库拷贝到mxnet/cpp-package/scripts/目录下;
(2). 在mxnet/cpp-package/scripts/目录下打开命令行提示符,执行:
python OpWrapperGenerator.py libmxnet.dll
(3). 修改生成的op.h文件中的两处UpSampling函数:将参数scale,修改为int scale;将参数num_filter = 0,修改为int num_filter = 0; 注:直接通过以下命令生成op.h文件时不用作任何修改,后面查找下原因
git clone --recursive https://github.com/apache/incubator-mxnet
注意:
(1). 关于MXNet中的依赖库介绍和使用可参考:https://blog.csdn.net/fengbingchun/article/details/84981969
(2). 为了正常编译整个工程,部分源码作了微小的调整;
(3). 所有项目依赖的版本如下:
1. OpenBLAS:
commit: fd8d186
version: 0.3.3
date: 2018.08.31
url: https://github.com/xianyi/OpenBLAS/releases
2. dlpack:
commit: bee4d1d
version: master
date: 2018.08.24
url: https://github.com/dmlc/dlpack
3. mshadow:
commit: 2e3a895
version: master
date: 2018.11.08
url: https://github.com/dmlc/mshadow
4. dmlc-core:
commit: 85c1180
version: 0.3
date: 2018.07.18
url: https://github.com/dmlc/dmlc-core/releases
5. tvm:
commit: 60769b7
version: 0.4
date: 2018.09.04
url: https://github.com/dmlc/tvm/releases
6. HalideIR:
commit: a08e26e
version: master
date: 2018.11.28
url: https://github.com/dmlc/HalideIR
7. mxnet:
commit: b3be92f
version: 1.3.0
date: 2018.09.12
url: https://github.com/apache/incubator-mxnet/releases
(4). 整个项目可以从https://github.com/fengbingchun/MXNet_Test clone到E:/GitCode目录下直接编译即可。
下面测试代码是用生成的MXNet.dll动态库训练MNIST:
#include "funset.hpp"
#include
#include
#include
#include
#include "mxnet-cpp/MxNetCpp.h"
namespace {
bool isFileExists(const std::string &filename)
{
std::ifstream fhandle(filename.c_str());
return fhandle.good();
}
bool check_datafiles(const std::vector &data_files)
{
for (size_t index = 0; index < data_files.size(); index++) {
if (!(isFileExists(data_files[index]))) {
LG << "Error: File does not exist: " << data_files[index];
return false;
}
}
return true;
}
bool setDataIter(mxnet::cpp::MXDataIter *iter, std::string useType, const std::vector &data_files, int batch_size)
{
if (!check_datafiles(data_files))
return false;
iter->SetParam("batch_size", batch_size);
iter->SetParam("shuffle", 1);
iter->SetParam("flat", 1);
if (useType == "Train") {
iter->SetParam("image", data_files[0]);
iter->SetParam("label", data_files[1]);
} else if (useType == "Label") {
iter->SetParam("image", data_files[2]);
iter->SetParam("label", data_files[3]);
}
iter->CreateDataIter();
return true;
}
} // namespace
////////////////////////////// mnist ////////////////////////
/* reference:
https://mxnet.incubator.apache.org/tutorials/c%2B%2B/basics.html
mxnet_source/cpp-package/example/mlp_cpu.cpp
*/
namespace {
mxnet::cpp::Symbol mlp(const std::vector &layers)
{
auto x = mxnet::cpp::Symbol::Variable("X");
auto label = mxnet::cpp::Symbol::Variable("label");
std::vector weights(layers.size());
std::vector biases(layers.size());
std::vector outputs(layers.size());
for (size_t i = 0; i < layers.size(); ++i) {
weights[i] = mxnet::cpp::Symbol::Variable("w" + std::to_string(i));
biases[i] = mxnet::cpp::Symbol::Variable("b" + std::to_string(i));
mxnet::cpp::Symbol fc = mxnet::cpp::FullyConnected(i == 0 ? x : outputs[i - 1], weights[i], biases[i], layers[i]);
outputs[i] = i == layers.size() - 1 ? fc : mxnet::cpp::Activation(fc, mxnet::cpp::ActivationActType::kRelu);
}
return mxnet::cpp::SoftmaxOutput(outputs.back(), label);
}
} // namespace
int test_mnist_train()
{
const int image_size = 28;
const std::vector layers{ 128, 64, 10 };
const int batch_size = 100;
const int max_epoch = 20;
const float learning_rate = 0.1;
const float weight_decay = 1e-2;
std::vector data_files = { "E:/GitCode/MXNet_Test/data/mnist/train-images.idx3-ubyte",
"E:/GitCode/MXNet_Test/data/mnist/train-labels.idx1-ubyte",
"E:/GitCode/MXNet_Test/data/mnist/t10k-images.idx3-ubyte",
"E:/GitCode/MXNet_Test/data/mnist/t10k-labels.idx1-ubyte"};
auto train_iter = mxnet::cpp::MXDataIter("MNISTIter");
setDataIter(&train_iter, "Train", data_files, batch_size);
auto val_iter = mxnet::cpp::MXDataIter("MNISTIter");
setDataIter(&val_iter, "Label", data_files, batch_size);
auto net = mlp(layers);
mxnet::cpp::Context ctx = mxnet::cpp::Context::cpu(); // Use CPU for training
std::map args;
args["X"] = mxnet::cpp::NDArray(mxnet::cpp::Shape(batch_size, image_size*image_size), ctx);
args["label"] = mxnet::cpp::NDArray(mxnet::cpp::Shape(batch_size), ctx);
// Let MXNet infer shapes other parameters such as weights
net.InferArgsMap(ctx, &args, args);
// Initialize all parameters with uniform distribution U(-0.01, 0.01)
auto initializer = mxnet::cpp::Uniform(0.01);
for (auto& arg : args) {
// arg.first is parameter name, and arg.second is the value
initializer(arg.first, &arg.second);
}
// Create sgd optimizer
mxnet::cpp::Optimizer* opt = mxnet::cpp::OptimizerRegistry::Find("sgd");
opt->SetParam("rescale_grad", 1.0 / batch_size)->SetParam("lr", learning_rate)->SetParam("wd", weight_decay);
// Create executor by binding parameters to the model
auto *exec = net.SimpleBind(ctx, args);
auto arg_names = net.ListArguments();
// Start training
for (int iter = 0; iter < max_epoch; ++iter) {
int samples = 0;
train_iter.Reset();
auto tic = std::chrono::system_clock::now();
while (train_iter.Next()) {
samples += batch_size;
auto data_batch = train_iter.GetDataBatch();
// Set data and label
data_batch.data.CopyTo(&args["X"]);
data_batch.label.CopyTo(&args["label"]);
// Compute gradients
exec->Forward(true);
exec->Backward();
// Update parameters
for (size_t i = 0; i < arg_names.size(); ++i) {
if (arg_names[i] == "X" || arg_names[i] == "label") continue;
opt->Update(i, exec->arg_arrays[i], exec->grad_arrays[i]);
}
}
auto toc = std::chrono::system_clock::now();
mxnet::cpp::Accuracy acc;
val_iter.Reset();
while (val_iter.Next()) {
auto data_batch = val_iter.GetDataBatch();
data_batch.data.CopyTo(&args["X"]);
data_batch.label.CopyTo(&args["label"]);
// Forward pass is enough as no gradient is needed when evaluating
exec->Forward(false);
acc.Update(data_batch.label, exec->outputs[0]);
}
float duration = std::chrono::duration_cast
(toc - tic).count() / 1000.0;
LG << "Epoch: " << iter << " " << samples / duration << " samples/sec Accuracy: " << acc.Get();
}
std::string json_file{ "E:/GitCode/MXNet_Test/data/mnist.json" };
std::string param_file{"E:/GitCode/MXNet_Test/data/mnist.params"};
net.Save(json_file);
mxnet::cpp::NDArray::Save(param_file, exec->arg_arrays);
delete exec;
MXNotifyShutdown();
return 0;
}
执行结果如下:
在Windows上编译MXNet_Test工程时注意事项:
(1). clone MXNet_Test到E:/GitCode目录下;
(2). MXNet_Test使用VS2017既可在windows7 x64上编译,也可在Windows10 x64上编译;
(3). 由于每个人的机子上VS2017安装的Windows SDK版本不同,可能会导致出现"error MSB8036: 找不到 Windows SDK 版本10.0.17134.0"类似的错误,解决方法是:选中指定的项目,打开属性页,在配置属性->常规->Windows SDK版本中重新选择你已安装的Windows SDK版本即可。
GitHub: https://github.com/fengbingchun/MXNet_Test