From 35677eca7df56407ff0c8d6980ff8d7aec987f1a Mon Sep 17 00:00:00 2001 From: Thiago Crepaldi Date: Fri, 22 Sep 2023 10:32:13 -0400 Subject: [PATCH] Update InferenceSession call to latest format (#7982) Co-authored-by: Philip Meier --- .github/workflows/tests.yml | 2 +- test/test_onnx.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 22b20e37c42..22e1a4ac18d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -124,7 +124,7 @@ jobs: conda activate ci echo '::group::Install ONNX' - pip install --progress-bar=off onnx onnxruntime!=1.16.0 + pip install --progress-bar=off onnx onnxruntime echo '::endgroup::' echo '::group::Install testing utilities' diff --git a/test/test_onnx.py b/test/test_onnx.py index 19ed13b1a6d..0350c817ff8 100644 --- a/test/test_onnx.py +++ b/test/test_onnx.py @@ -79,7 +79,7 @@ def to_numpy(tensor): inputs = list(map(to_numpy, inputs)) outputs = list(map(to_numpy, outputs)) - ort_session = onnxruntime.InferenceSession(onnx_io.getvalue()) + ort_session = onnxruntime.InferenceSession(onnx_io.getvalue(), providers=onnxruntime.get_available_providers()) # compute onnxruntime output prediction ort_inputs = {ort_session.get_inputs()[i].name: inpt for i, inpt in enumerate(inputs)} ort_outs = ort_session.run(None, ort_inputs)