From 25e205604e7eafa83867a15cfda526461fe58455 Mon Sep 17 00:00:00 2001 From: Boris Fomitchev Date: Tue, 8 May 2018 20:18:10 -0700 Subject: ONNX export is working --- test.py | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) (limited to 'test.py') diff --git a/test.py b/test.py index 1effb08..203d887 100755 --- a/test.py +++ b/test.py @@ -8,6 +8,8 @@ from models.models import create_model import util.util as util from util.visualizer import Visualizer from util import html +import torch +from run_engine import run_trt_engine, run_onnx opt = TestOptions().parse(save=False) opt.nThreads = 1 # test code only supports nThreads = 1 @@ -17,30 +19,46 @@ opt.no_flip = True # no flip data_loader = CreateDataLoader(opt) dataset = data_loader.load_data() -model = create_model(opt) visualizer = Visualizer(opt) # create website web_dir = os.path.join(opt.results_dir, opt.name, '%s_%s' % (opt.phase, opt.which_epoch)) webpage = html.HTML(web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt.name, opt.phase, opt.which_epoch)) # test + +if not opt.engine and not opt.onnx: + model = create_model(opt) + if opt.data_type == 16: + model.half() + elif opt.data_type == 8: + model.type(torch.uint8) + + if opt.verbose: + print(model) + + for i, data in enumerate(dataset): if i >= opt.how_many: break if opt.data_type == 16: - model.half() data['label'] = data['label'].half() data['inst'] = data['inst'].half() elif opt.data_type == 8: - model.type(torch.uint8) - + data['label'] = data['label'].uint8() + data['inst'] = data['inst'].uint8() if opt.export_onnx: - assert opt.export_onnx.endswith(".onnx"), "Export model file should end with .onnx" - if opt.verbose: - print(model) - generated = torch.onnx.export(model, [data['label'], data['inst']], - opt.export_onnx, verbose=True) - - generated = model.inference(data['label'], data['inst']) + print ("Exporting to ONNX: ", opt.export_onnx) + assert opt.export_onnx.endswith("onnx"), "Export model file should end with .onnx" + torch.onnx.export(model, [data['label'], data['inst']], + opt.export_onnx, verbose=True) + exit(0) + minibatch = 1 + if opt.engine: + generated = run_trt_engine(opt.engine, minibatch, [data['label'], data['inst']]) + elif opt.onnx: + generated = run_onnx(opt.onnx, opt.data_type, minibatch, [data['label'], data['inst']]) + else: + generated = model.inference(data['label'], data['inst']) + visuals = OrderedDict([('input_label', util.tensor2label(data['label'][0], opt.label_nc)), ('synthesized_image', util.tensor2im(generated.data[0]))]) img_path = data['path'] -- cgit v1.2.3-70-g09d2