python版本onnx模型多输入

onnx模型有两个输出的情况:

import onnxruntime

def use_onnx_model(model_path):
    
    session = onnxruntime.InferenceSession(model_path,enable_cpu_mem_arena=True,
                        enable_mem_pattern=True,
                        enable_profiling=False,
                        execution_mode='parallel',    # :sequential or :parallel
                        graph_optimization_level=all,  # :none, :basic, :extended, or :all
                        inter_op_num_threads=2,
                        intra_op_num_threads=2)
    inname = [input.name for input in session.get_inputs()]
    outname = [output.name for output in session.get_outputs()]
    
    data = {inname[0]:np.array([input1],dtype =np.float32),
            inname[1]:np.array([input2],dtype = np.float32)}
    data_output = session.run(outname,data)

你可能感兴趣的:(python)