Skip to content

Commit

Permalink
wasi-nn: use resources
Browse files Browse the repository at this point in the history
Recent discussion in the wasi-nn proposal (see [wasi-nn#59], e.g.) has
concluded that the right approach for representing wasi-nn "things"
(tensors, graph, etc.) is with a component model _resource_. This
sweeping change brings Wasmtime's implementation in line with that
decision.

Initially I had structured this PR to remove all of the WITX-based
implementation (bytecodealliance#8530). But, after consulting in a Zulip [thread] on
what other WASI proposals aim to do, this PR pivoted to support _both_`
the WITX-based and WIT-based ABIs (e.g., preview1 era versus preview2,
component model era). What is clear is that the WITX-based specification
will remain "frozen in time" while the WIT-based implementation moves
forward.

What that means for this PR is a "split world" paradigm. In many places,
we have to distinguish between the `wit` and `witx` versions of the same
thing. This change isn't the end state yet: it's a big step forward
towards bringing Wasmtime back in line with the WIT spec but, despite my
best efforts, doesn't fully fix all the TODOs left behind over several
years of development. I have, however, taken the liberty to refactor and
fix various parts as I came across them (e.g., the ONNX backend). I plan
to continue working on this in future PRs to figure out a good error
paradigm (the current one is too wordy) and device residence.

[wasi-nn#59]: WebAssembly/wasi-nn#59
[thread]: https://bytecodealliance.zulipchat.com/#narrow/stream/219900-wasi/topic/wasi-nn's.20preview1.20vs.20preview2.20timeline

prtest:full
  • Loading branch information
abrown committed Jun 26, 2024
1 parent f4b49b8 commit ee21e9c
Show file tree
Hide file tree
Showing 34 changed files with 1,539 additions and 740 deletions.
50 changes: 20 additions & 30 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions crates/bench-api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ struct BenchState {
struct HostState {
wasi: WasiCtx,
#[cfg(feature = "wasi-nn")]
wasi_nn: wasmtime_wasi_nn::WasiNnCtx,
wasi_nn: wasmtime_wasi_nn::witx::WasiNnCtx,
}

impl BenchState {
Expand Down Expand Up @@ -509,7 +509,7 @@ impl BenchState {
#[cfg(feature = "wasi-nn")]
wasi_nn: {
let (backends, registry) = wasmtime_wasi_nn::preload(&[])?;
wasmtime_wasi_nn::WasiNnCtx::new(backends, registry)
wasmtime_wasi_nn::witx::WasiNnCtx::new(backends, registry)
},
};

Expand Down
5 changes: 4 additions & 1 deletion crates/test-programs/artifacts/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,10 @@ fn build_and_generate_tests() {
}

// Generate a component from each test.
if kind == "nn" || target == "dwarf_imported_memory" || target == "dwarf_shared_memory" {
if target == "dwarf_imported_memory"
|| target == "dwarf_shared_memory"
|| target.starts_with("nn_witx")
{
continue;
}
let adapter = match target.as_str() {
Expand Down
16 changes: 0 additions & 16 deletions crates/test-programs/src/bin/nn_image_classification_winml.rs

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let model = fs::read("fixture/model.onnx")
.context("the model file to be mapped to the fixture directory")?;
let graph =
GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU).build_from_bytes([&model])?;
let graph = wit::load(
&[model],
wit::GraphEncoding::Onnx,
wit::ExecutionTarget::Cpu,
)?;
let tensor = fs::read("fixture/000000062808.rgb")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = wit::classify(graph, ("input", tensor), "output")?;
let top_five = &sort_results(&results)[..5];
// 963 is meat loaf, meatloaf.
// 963 is "meat loaf, meatloaf."
// https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L963
assert_eq!(top_five[0].class_id(), 963);
println!("found results, sorted top 5: {:?}", top_five);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let xml = fs::read("fixture/model.xml")
.context("the model file to be mapped to the fixture directory")?;
let weights = fs::read("fixture/model.bin")
.context("the weights file to be mapped to the fixture directory")?;
let graph = wit::load(
&[xml, weights],
wit::GraphEncoding::Openvino,
wit::ExecutionTarget::Cpu,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = wit::classify(
graph,
("input", tensor),
"MobilenetV2/Predictions/Reshape_1",
)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let graph = wit::load_by_name("fixtures")?;
let tensor: Vec<u8> = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = wit::classify(
graph,
("input", tensor),
"MobilenetV2/Predictions/Reshape_1",
)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
.build_from_cache("fixtures")?;
let graph = wit::load_by_name("mobilenet")?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = wit::classify(graph, ("input", tensor), "output")?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
assert_eq!(top_five[0].class_id(), 284);
Ok(())
}
22 changes: 22 additions & 0 deletions crates/test-programs/src/bin/nn_witx_image_classification_onnx.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let model = fs::read("fixture/model.onnx")
.context("the model file to be mapped to the fixture directory")?;
let graph = witx::load(
&[&model],
witx::GraphEncoding::Onnx,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/000000062808.rgb")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
// 963 is "meat loaf, meatloaf."
// https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L963
assert_eq!(top_five[0].class_id(), 963);
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let xml = fs::read("fixture/model.xml")
.context("the model file to be mapped to the fixture directory")?;
let weights = fs::read("fixture/model.bin")
.context("the weights file to be mapped to the fixture directory")?;
let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
.build_from_bytes([&xml, &weights])?;
let graph = witx::load(
&[&xml, &weights],
witx::GraphEncoding::Openvino,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let graph = witx::load_by_name(
"fixtures",
witx::GraphEncoding::Openvino,
witx::ExecutionTarget::CPU,
)?;
let tensor: Vec<u8> = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let graph = witx::load_by_name(
"mobilenet",
witx::GraphEncoding::Onnx,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
assert_eq!(top_five[0].class_id(), 284);
Ok(())
}
Loading

0 comments on commit ee21e9c

Please sign in to comment.