import onnxruntime import onnx import numpy as np input = np.random.rand(1,3,224,224).astype(dtype=np.float32) sess = onnxruntime.InferenceSession("add_model.onnx") result = sess.run(["output"],{"input":input}) print(result)
import numpy as np import onnx import onnxruntime as rt #create input data input_data = np.ones((1, 3, 299, 299), dtype=np.float32) #create runtime session sess = rt.InferenceSession("inception_v3.onnx") # get output name input_name = sess.get_inputs()[0].name print("input name", input_name) output_name= sess.get_outputs()[0].name print("output name", output_name) output_shape = sess.get_outputs()[0].shape print("output shape", output_shape) #forward model res = sess.run([output_name], {input_name: input_data}) out = np.array(res)
标签:sess,name,onnxruntime,import,InferenceSession,output,np,input From: https://www.cnblogs.com/sinferwu/p/17024543.html