Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

wasi-nn: use resources #8873

Merged
merged 5 commits into from
Jun 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 20 additions & 30 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion ci/vendor-wit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,9 @@ cp -r $dst crates/wasi-http/wit
# slightly different than above.
repo=https://raw.githubusercontent.com/WebAssembly/wasi-nn
revision=e2310b
curl -L $repo/$revision/wit/wasi-nn.wit -o crates/wasi-nn/wit/wasi-nn.wit
curl -L $repo/$revision/wasi-nn.witx -o crates/wasi-nn/witx/wasi-nn.witx
# TODO: the in-tree `wasi-nn` implementation does not yet fully support the
# latest WIT specification on `main`. To create a baseline for moving forward,
# the in-tree WIT incorporates some but not all of the upstream changes. This
# TODO can be removed once the implementation catches up with the spec.
# curl -L $repo/$revision/wit/wasi-nn.wit -o crates/wasi-nn/wit/wasi-nn.wit
4 changes: 2 additions & 2 deletions crates/bench-api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ struct BenchState {
struct HostState {
wasi: WasiCtx,
#[cfg(feature = "wasi-nn")]
wasi_nn: wasmtime_wasi_nn::WasiNnCtx,
wasi_nn: wasmtime_wasi_nn::witx::WasiNnCtx,
}

impl BenchState {
Expand Down Expand Up @@ -509,7 +509,7 @@ impl BenchState {
#[cfg(feature = "wasi-nn")]
wasi_nn: {
let (backends, registry) = wasmtime_wasi_nn::preload(&[])?;
wasmtime_wasi_nn::WasiNnCtx::new(backends, registry)
wasmtime_wasi_nn::witx::WasiNnCtx::new(backends, registry)
},
};

Expand Down
5 changes: 4 additions & 1 deletion crates/test-programs/artifacts/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,10 @@ fn build_and_generate_tests() {
}

// Generate a component from each test.
if kind == "nn" || target == "dwarf_imported_memory" || target == "dwarf_shared_memory" {
if target == "dwarf_imported_memory"
|| target == "dwarf_shared_memory"
|| target.starts_with("nn_witx")
{
continue;
}
let adapter = match target.as_str() {
Expand Down
16 changes: 0 additions & 16 deletions crates/test-programs/src/bin/nn_image_classification_winml.rs

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let model = fs::read("fixture/model.onnx")
.context("the model file to be mapped to the fixture directory")?;
let graph =
GraphBuilder::new(GraphEncoding::Onnx, ExecutionTarget::CPU).build_from_bytes([&model])?;
let graph = wit::load(
&[model],
wit::GraphEncoding::Onnx,
wit::ExecutionTarget::Cpu,
)?;
let tensor = fs::read("fixture/000000062808.rgb")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = wit::classify(graph, ("input", tensor), "output")?;
let top_five = &sort_results(&results)[..5];
// 963 is meat loaf, meatloaf.
// 963 is "meat loaf, meatloaf."
// https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L963
assert_eq!(top_five[0].class_id(), 963);
println!("found results, sorted top 5: {:?}", top_five);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let xml = fs::read("fixture/model.xml")
.context("the model file to be mapped to the fixture directory")?;
let weights = fs::read("fixture/model.bin")
.context("the weights file to be mapped to the fixture directory")?;
let graph = wit::load(
&[xml, weights],
wit::GraphEncoding::Openvino,
wit::ExecutionTarget::Cpu,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = wit::classify(
graph,
("input", tensor),
"MobilenetV2/Predictions/Reshape_1",
)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let graph = wit::load_by_name("fixtures")?;
let tensor: Vec<u8> = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = wit::classify(
graph,
("input", tensor),
"MobilenetV2/Predictions/Reshape_1",
)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, wit};

pub fn main() -> Result<()> {
let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
.build_from_cache("fixtures")?;
let graph = wit::load_by_name("mobilenet")?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = wit::classify(graph, ("input", tensor), "output")?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
assert_eq!(top_five[0].class_id(), 284);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let model = fs::read("fixture/model.onnx")
.context("the model file to be mapped to the fixture directory")?;
let graph = witx::load(
&[&model],
witx::GraphEncoding::Onnx,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/000000062808.rgb")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
// 963 is "meat loaf, meatloaf."
// https://github.com/onnx/models/blob/bec48b6a70e5e9042c0badbaafefe4454e072d08/validated/vision/classification/synset.txt#L963
assert_eq!(top_five[0].class_id(), 963);
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -1,18 +1,20 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{classify, sort_results};
use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding};
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let xml = fs::read("fixture/model.xml")
.context("the model file to be mapped to the fixture directory")?;
let weights = fs::read("fixture/model.bin")
.context("the weights file to be mapped to the fixture directory")?;
let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
.build_from_bytes([&xml, &weights])?;
let graph = witx::load(
&[&xml, &weights],
witx::GraphEncoding::Openvino,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = classify(graph, tensor)?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let graph = witx::load_by_name(
"fixtures",
witx::GraphEncoding::Openvino,
witx::ExecutionTarget::CPU,
)?;
let tensor: Vec<u8> = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
Ok(())
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
use anyhow::{Context, Result};
use std::fs;
use test_programs::nn::{sort_results, witx};

pub fn main() -> Result<()> {
let graph = witx::load_by_name(
"mobilenet",
witx::GraphEncoding::Onnx,
witx::ExecutionTarget::CPU,
)?;
let tensor = fs::read("fixture/tensor.bgr")
.context("the tensor file to be mapped to the fixture directory")?;
let results = witx::classify(graph, tensor)?;
let top_five = &sort_results(&results)[..5];
println!("found results, sorted top 5: {:?}", top_five);
assert_eq!(top_five[0].class_id(), 284);
Ok(())
}
Loading
Loading