-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[WASI-NN] Add support for a ONNXruntime backend using ort (#7691)
* add an initial implemenation for onnxruntime backend of wasi-nn Signed-off-by: David Justice <david@devigned.com> * vet: audit ONNX dependencies This change is the result of a long slog through the dependencies of the `ort` library. The only missing dependency is `compact_str`, which needs further discussion. * vet: add ONNX audit entry for compact_str 0.7.1 Signed-off-by: David Justice <david@devigned.com> * refactor tests to break out onnx and openvino Signed-off-by: David Justice <david@devigned.com> * mark wasi-nn onnx example as publish false Signed-off-by: David Justice <david@devigned.com> * update the ONNX classification example * do not use wasi-nn onnx feature if riskv or s390 Signed-off-by: David Justice <david@devigned.com> * prtest:full fix running WASI-NN ONNX tests across arch os Signed-off-by: David Justice <david@devigned.com> --------- Signed-off-by: David Justice <david@devigned.com> Co-authored-by: Andrew Brown <andrew.brown@intel.com>
- Loading branch information
Showing
23 changed files
with
11,088 additions
and
860 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
55 changes: 55 additions & 0 deletions
55
crates/test-programs/src/bin/nn_image_classification_onnx.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
use anyhow::Result; | ||
use std::fs; | ||
use wasi_nn::*; | ||
|
||
pub fn main() -> Result<()> { | ||
let model = fs::read("fixture/model.onnx").unwrap(); | ||
println!("[ONNX] Read model, size in bytes: {}", model.len()); | ||
|
||
let graph = | ||
GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU).build_from_bytes([&model])?; | ||
|
||
let mut context = graph.init_execution_context()?; | ||
println!( | ||
"[ONNX] Created wasi-nn execution context with ID: {}", | ||
context | ||
); | ||
|
||
// Prepare WASI-NN tensor - Tensor data is always a bytes vector | ||
// Load a tensor that precisely matches the graph input tensor | ||
let data = fs::read("fixture/tensor.bgr").unwrap(); | ||
println!("[ONNX] Read input tensor, size in bytes: {}", data.len()); | ||
context.set_input(0, wasi_nn::TensorType::F32, &[1, 3, 224, 224], &data)?; | ||
|
||
// Execute the inferencing | ||
context.compute()?; | ||
println!("[ONNX] Executed graph inference"); | ||
|
||
// Retrieve the output. | ||
let mut output_buffer = vec![0f32; 1000]; | ||
context.get_output(0, &mut output_buffer[..])?; | ||
println!( | ||
"[ONNX] Found results, sorted top 5: {:?}", | ||
&sort_results(&output_buffer)[..5] | ||
); | ||
|
||
Ok(()) | ||
} | ||
|
||
// Sort the buffer of probabilities. The graph places the match probability for | ||
// each class at the index for that class (e.g. the probability of class 42 is | ||
// placed at buffer[42]). Here we convert to a wrapping InferenceResult and sort | ||
// the results. | ||
fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> { | ||
let mut results: Vec<InferenceResult> = buffer | ||
.iter() | ||
.enumerate() | ||
.map(|(c, p)| InferenceResult(c, *p)) | ||
.collect(); | ||
results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap()); | ||
results | ||
} | ||
|
||
// A wrapper for class ID and match probabilities. | ||
#[derive(Debug, PartialEq)] | ||
struct InferenceResult(usize, f32); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.