本文节选自我的博客:C++ 调用 pytorch 模型
记一次C++调用pytorch模型踩坑记录,跨语言是AI中急需解决的问题,本文采用#include
执行python代码
这样就可以运行处模型结果
#include "workthread.h"
#include
#include
#include
QMutex mutex;
WorkThread::WorkThread()
{
}
void WorkThread::run()
{
mutex.lock();
Py_Initialize(); //初始化Python解释器
if ( !Py_IsInitialized() )
{
qDebug()<<"python初始化失败";
}
else
{
//PyRun_SimpleString("import sys"); //执行单句Python语句,用于给出调用模块的路径,否则将无法找到相应的调用模块
//PyRun_SimpleString("print(sys.path)"); //在C++中执行python语句
PyRun_SimpleString("import torch");
PyRun_SimpleString("import numpy as np");
PyRun_SimpleString("from torch.autograd import Variable");
PyRun_SimpleString("from torchvision import transforms");
PyRun_SimpleString("from PIL import Image");
PyRun_SimpleString("img = Image.open('1.jpg')");
PyRun_SimpleString("transform = transforms.Compose([transforms.Resize((300, 300), interpolation=2),transforms.ToTensor(),transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])])");
PyRun_SimpleString("img = transform(img)");
PyRun_SimpleString("img = img.unsqueeze(0)");
PyRun_SimpleString("im = Variable(img)");
PyRun_SimpleString("Resnet_model = torch.load('Resnet18_1.pkl', map_location=lambda storage, loc: storage)");
PyRun_SimpleString("Resnet_model.eval()");
PyRun_SimpleString("pre = Resnet_model(im)");
PyRun_SimpleString("numpy_array = pre.detach().numpy()");
PyRun_SimpleString("numpy_array = np.squeeze(numpy_array, axis=0)");
PyRun_SimpleString("output_np = np.argmax(numpy_array, axis=0)");
PyRun_SimpleString("pre_data = output_np.astype(np.uint8)");
PyRun_SimpleString("print(pre_data)");
PyRun_SimpleString("print('end')");
PyObject* pModule = PyImport_ImportModule("use_model"); // 这里的test_py就是创建的python文件
if (!pModule)
{
qDebug()<< "Cant open python file!\n" << endl;
}
else
{
PyObject* pFunhello= PyObject_GetAttrString(pModule,"hello"); // 这里的hellow就是python文件定义的函数
PyRun_SimpleString("print('!!!!!!!')"); //在C++中执行python语句
if(!pFunhello)
{
qDebug()<<"Get function hello failed"<< endl;
}
PyObject_CallFunction(pFunhello,NULL);
}
Py_Finalize();
}
mutex.unlock();
}
有任何疑问和想法,欢迎在评论区与我交流。