From b46b8504204d207c8a5d557cebff63e23b4ff6c4 Mon Sep 17 00:00:00 2001 From: Mikhail Katychev Date: Tue, 8 Oct 2024 10:29:23 -0500 Subject: [PATCH 1/5] cargo clippy --all-targets --all-features --fix -- -W clippy::uninlined_format_args -W clippy::pedantic -W clippy::nursery --- c14n/src/_c14n_term.rs | 2 +- c14n/src/hash.rs | 4 +- c14n/src/lib.rs | 2 +- c14n/src/rdfc10.rs | 66 ++++++++++++++-------------- iri/benches/bench1.rs | 12 ++--- iri/src/_regex.rs | 8 ++-- iri/src/_serde.rs | 6 +-- iri/src/_wrap_macro.rs | 4 +- iri/src/_wrapper.rs | 22 +++++----- iri/src/lib.rs | 2 +- iri/src/resolve.rs | 18 ++++---- isomorphism/src/dataset.rs | 6 +-- isomorphism/src/iso_term.rs | 2 +- isomorphism/src/test.rs | 44 +++++++++---------- jsonld/src/context.rs | 2 +- jsonld/src/error.rs | 12 ++--- jsonld/src/loader/chain_loader.rs | 4 +- jsonld/src/loader/closure_loader.rs | 4 +- jsonld/src/loader/file_url_loader.rs | 6 +-- jsonld/src/loader/static_loader.rs | 2 +- jsonld/src/loader_factory.rs | 2 +- jsonld/src/options.rs | 54 +++++++++++------------ jsonld/src/parser.rs | 10 ++--- jsonld/src/parser/adapter.rs | 6 +-- jsonld/src/parser/source.rs | 8 ++-- jsonld/src/serializer.rs | 34 +++++++------- jsonld/src/serializer/engine.rs | 42 +++++++----------- jsonld/src/serializer/rdf_object.rs | 28 ++++++------ jsonld/src/util_traits.rs | 8 ++-- jsonld/src/vocabulary.rs | 2 +- resource/src/lib.rs | 2 +- resource/src/loader/_error.rs | 12 ++--- resource/src/loader/_local.rs | 8 ++-- resource/src/loader/_no.rs | 2 +- resource/src/loader/_trait.rs | 8 ++-- resource/src/resource/_error.rs | 20 ++++----- resource/src/resource/_iter.rs | 4 +- resource/src/resource/_struct.rs | 18 ++++---- rio/src/model.rs | 22 +++++----- rio/src/parser.rs | 10 ++--- rio/src/serializer.rs | 6 +-- sophia/examples/canonicalize.rs | 6 +-- sophia/examples/jsonld-context.rs | 9 ++-- sophia/examples/parse.rs | 34 +++++++------- sophia/examples/serialize.rs | 12 ++--- term/src/_generic.rs | 20 ++++----- term/src/_macro.rs | 8 ++-- turtle/src/parser/gnq.rs | 2 +- turtle/src/parser/gtrig.rs | 10 ++--- turtle/src/parser/nq.rs | 2 +- turtle/src/parser/nt.rs | 2 +- turtle/src/parser/trig.rs | 6 +-- turtle/src/parser/turtle.rs | 2 +- turtle/src/serializer/_pretty.rs | 24 +++++----- turtle/src/serializer/nq.rs | 20 ++++----- turtle/src/serializer/nt.rs | 47 ++++++++++---------- turtle/src/serializer/trig.rs | 49 ++++++++++----------- turtle/src/serializer/turtle.rs | 50 ++++++++++----------- xml/src/parser.rs | 2 +- xml/src/serializer.rs | 26 +++++------ 60 files changed, 426 insertions(+), 439 deletions(-) diff --git a/c14n/src/_c14n_term.rs b/c14n/src/_c14n_term.rs index 1c92e79b..d3035e37 100644 --- a/c14n/src/_c14n_term.rs +++ b/c14n/src/_c14n_term.rs @@ -8,7 +8,7 @@ pub enum C14nTerm { Blank(BnodeId>), Other(T), } -use C14nTerm::*; +use C14nTerm::{Blank, Other}; impl Term for C14nTerm { type BorrowTerm<'x> = &'x Self where Self: 'x; diff --git a/c14n/src/hash.rs b/c14n/src/hash.rs index 541f3072..de1c7fda 100644 --- a/c14n/src/hash.rs +++ b/c14n/src/hash.rs @@ -23,7 +23,7 @@ impl HashFunction for Sha256 { type Output = [u8; 32]; fn initialize() -> Self { - Sha256(sha2::Sha256::new()) + Self(sha2::Sha256::new()) } fn update(&mut self, data: impl AsRef<[u8]>) { @@ -42,7 +42,7 @@ impl HashFunction for Sha384 { type Output = [u8; 48]; fn initialize() -> Self { - Sha384(sha2::Sha384::new()) + Self(sha2::Sha384::new()) } fn update(&mut self, data: impl AsRef<[u8]>) { diff --git a/c14n/src/lib.rs b/c14n/src/lib.rs index 31da7d47..5848c27c 100644 --- a/c14n/src/lib.rs +++ b/c14n/src/lib.rs @@ -7,7 +7,7 @@ //! //! TODO list: //! - [x] check that UTF-8 byte-by-byte ordering is indeed equivalent to code point ordering. -//! - [ ] use c14n in sophia_isomorphism, replacing the current incomplete algorithm +//! - [ ] use c14n in `sophia_isomorphism`, replacing the current incomplete algorithm //! //! [Sophia]: https://docs.rs/sophia/latest/sophia/ //! [RDF]: https://www.w3.org/TR/rdf-primer/ diff --git a/c14n/src/rdfc10.rs b/c14n/src/rdfc10.rs index b7e42bf7..1a35a95b 100644 --- a/c14n/src/rdfc10.rs +++ b/c14n/src/rdfc10.rs @@ -176,7 +176,7 @@ pub fn relabel_with<'a, H: HashFunction, D: SetDataset>( } } // Step 3 - for (bnid, quads) in state.b2q.iter() { + for (bnid, quads) in &state.b2q { let hash = hash_first_degree_quads::(bnid, &quads[..]); let bnid2 = Rc::clone(bnid); state.h2b.entry(hash).or_default().push(bnid2); @@ -187,7 +187,7 @@ pub fn relabel_with<'a, H: HashFunction, D: SetDataset>( let mut next_h2b = BTreeMap::new(); // TODO once BTreeMap::drain_filter is stabilize, // use it in the loop below instead of reinserting elements into a new map - for (hash, bnids) in state.h2b.into_iter() { + for (hash, bnids) in state.h2b { debug_assert!(!bnids.is_empty()); if bnids.len() > 1 { next_h2b.insert(hash, bnids); @@ -254,7 +254,7 @@ struct C14nState<'a, H: HashFunction, T: Term> { canonical: BnodeIssuer, /// Not specified in the spec: memozing the results of hash 1st degree b2h: BTreeMap, H::Output>, - /// Not specified in the spec: maximum recursion factor in hash_n_degree_quads + /// Not specified in the spec: maximum recursion factor in `hash_n_degree_quads` depth_factor: f32, /// Not specified in the spec: maximum number of nodes on which permutations will be computed permutation_limit: usize, @@ -272,7 +272,7 @@ impl<'a, H: HashFunction, T: Term> C14nState<'a, H, T> { } } - /// Implements https://www.w3.org/TR/rdf-canon/#hash-related-blank-node + /// Implements fn hash_related_bnode( &self, related: &str, @@ -301,7 +301,7 @@ impl<'a, H: HashFunction, T: Term> C14nState<'a, H, T> { input.finalize() } - /// Implements https://www.w3.org/TR/rdf-canon/#hash-nd-quads + /// Implements fn hash_n_degree_quads( &self, identifier: &str, @@ -336,7 +336,7 @@ impl<'a, H: HashFunction, T: Term> C14nState<'a, H, T> { let mut data_to_hash = H::initialize(); // Step 5 let mut ret_issuer: Option = None; - for (related_hash, mut blank_node) in hn.into_iter() { + for (related_hash, mut blank_node) in hn { data_to_hash.update(hex(&related_hash)); let mut chosen_path = String::new(); let mut chosen_issuer: Option = None; @@ -420,15 +420,15 @@ struct BnodeIssuer { } impl BnodeIssuer { - fn new(prefix: BnodeId<&'static str>) -> Self { - BnodeIssuer { + const fn new(prefix: BnodeId<&'static str>) -> Self { + Self { prefix, issued: BTreeMap::new(), issued_order: vec![], } } - /// Implements https://www.w3.org/TR/rdf-canon/#issue-identifier + /// Implements /// modified to also return a boolean indicating whether the issued identifier /// was newly created (true) or if it existed before (false) fn issue(&mut self, bnid: &str) -> (&str, bool) { @@ -445,7 +445,7 @@ impl BnodeIssuer { } } -/// Implements https://www.w3.org/TR/rdf-canon/#hash-1d-quads +/// Implements /// with the difference that the C14n state is not passed; /// instead, the quad list corresponding to bnid is passed directly fn hash_first_degree_quads(bnid: &str, quads: &[&Q]) -> H::Output { @@ -465,7 +465,7 @@ fn hash_first_degree_quads(bnid: &str, quads: &[&Q]) - .collect(); nquads.sort_unstable(); let mut hasher = H::initialize(); - for line in nquads.into_iter() { + for line in nquads { hasher.update(&line); } let ret = hasher.finalize(); @@ -491,13 +491,13 @@ fn nq_for_hash(term: T, buffer: &mut String, ref_bnid: &str) { fn hex(hash: &impl AsRef<[u8]>) -> String { let mut digest = String::with_capacity(64); for b in hash.as_ref() { - write!(&mut digest, "{:02x}", b).unwrap(); + write!(&mut digest, "{b:02x}").unwrap(); } digest } fn smaller_path(path1: &str, path2: &str) -> bool { - use std::cmp::Ordering::*; + use std::cmp::Ordering::{Equal, Greater, Less}; match Ord::cmp(&path1.len(), &path2.len()) { Less => true, Equal => path1 < path2, @@ -528,13 +528,13 @@ mod test { "_:e0 .", "_:e1 .", ]); - let exp = r#" _:c14n0 . + let exp = r" _:c14n0 . _:c14n1 . _:c14n0 . _:c14n1 . -"#; +"; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -549,14 +549,14 @@ _:c14n1 . "_:e1 _:e3 .", "_:e2 _:e3 .", ]); - let exp = r#" _:c14n2 . + let exp = r" _:c14n2 . _:c14n3 . _:c14n0 _:c14n1 . _:c14n2 _:c14n1 . _:c14n3 _:c14n0 . -"#; +"; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -571,14 +571,14 @@ _:c14n3 _:c14n0 . "_:e3 _:e4 .", "_:e4 _:e0 .", ]); - let exp = r#"_:c14n0 _:c14n4 . + let exp = r"_:c14n0 _:c14n4 . _:c14n1 _:c14n0 . _:c14n2 _:c14n1 . _:c14n3 _:c14n2 . _:c14n4 _:c14n3 . -"#; +"; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -630,7 +630,7 @@ _:c14n4 _:c14n3 . "_:e4 _:e2 .", "_:e4 _:e3 .", ]); - let exp = r#"_:c14n0 _:c14n1 . + let exp = r"_:c14n0 _:c14n1 . _:c14n0 _:c14n2 . _:c14n0 _:c14n3 . _:c14n0 _:c14n4 . @@ -650,9 +650,9 @@ _:c14n4 _:c14n0 . _:c14n4 _:c14n1 . _:c14n4 _:c14n2 . _:c14n4 _:c14n3 . -"#; +"; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -700,14 +700,14 @@ _:c14n4 _:c14n3 . "_:e3 _:e4 .", "_:e4 _:e2 .", ]); - let exp = r#"_:c14n0 _:c14n1 . + let exp = r"_:c14n0 _:c14n1 . _:c14n1 _:c14n0 . _:c14n2 _:c14n4 . _:c14n3 _:c14n2 . _:c14n4 _:c14n3 . -"#; +"; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -729,7 +729,7 @@ _:c14n4 _:c14n3 . _:c14n0 . "#; let got = c14n_nquads(&dataset).unwrap(); - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } @@ -740,7 +740,7 @@ _:c14n4 _:c14n3 . } /// Simplistic Quad parser, useful for writing test cases. - /// It is based on eq_quad below. + /// It is based on `eq_quad` below. fn ez_quads<'a>(lines: &[&'a str]) -> std::collections::HashSet>> { lines.iter().map(|line| ez_quad(line)).collect() } @@ -804,15 +804,15 @@ _:c14n4 _:c14n3 . "_:e0 .", "_:e1 .", ]); - let exp = r#" _:c14n1 . + let exp = r" _:c14n1 . _:c14n0 . _:c14n0 . _:c14n1 . -"#; +"; let mut got = Vec::::new(); normalize_sha384(&dataset, &mut got).unwrap(); let got = unsafe { String::from_utf8_unchecked(got) }; - println!(">>>> GOT\n{}>>>> EXPECTED\n{}<<<<", got, exp); + println!(">>>> GOT\n{got}>>>> EXPECTED\n{exp}<<<<"); assert!(got == exp); } } diff --git a/iri/benches/bench1.rs b/iri/benches/bench1.rs index be87e33c..2b97af7a 100644 --- a/iri/benches/bench1.rs +++ b/iri/benches/bench1.rs @@ -1,12 +1,12 @@ //! This benchmark is used to compare the time it takes to create -//! * borrowing MownStr's vs. standard &str references -//! * owning MownStr's vs. Strings +//! * borrowing `MownStr`'s vs. standard &str references +//! * owning `MownStr`'s vs. Strings //! //! The results of `borrowed_mownstr` should therefore be compared to `refs`, //! and that of `owned_mownstr` should be compared to `strings`. use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion}; -use sophia_iri::resolve::*; +use sophia_iri::resolve::{BaseIri, BaseIriRef}; fn parse(c: &mut Criterion) { c.bench_with_input( @@ -25,7 +25,7 @@ fn parse(c: &mut Criterion) { black_box(BaseIriRef::new(*iri).is_ok()); } } - }) + }); }, ); } @@ -42,7 +42,7 @@ fn resolve_from_scratch(c: &mut Criterion) { black_box(&base.resolve(*rel).unwrap()); } } - }) + }); }, ); } @@ -58,7 +58,7 @@ fn resolve_mutualized(c: &mut Criterion) { black_box(i.0.resolve(*rel).unwrap()); } } - }) + }); }, ); } diff --git a/iri/src/_regex.rs b/iri/src/_regex.rs index 5e0c0de0..09a9db1d 100644 --- a/iri/src/_regex.rs +++ b/iri/src/_regex.rs @@ -8,7 +8,7 @@ use regex::Regex; /// is not `None`. /// Future implementations may be smarter about this. #[inline] -pub fn is_valid_suffixed_iri_ref(ns: &str, suffix: Option<&str>) -> bool { +#[must_use] pub fn is_valid_suffixed_iri_ref(ns: &str, suffix: Option<&str>) -> bool { match suffix { None => is_valid_iri_ref(ns), Some(suffix) => { @@ -22,19 +22,19 @@ pub fn is_valid_suffixed_iri_ref(ns: &str, suffix: Option<&str>) -> bool { /// Check whether `txt` is a valid (absolute or relative) IRI reference. #[inline] -pub fn is_valid_iri_ref(txt: &str) -> bool { +#[must_use] pub fn is_valid_iri_ref(txt: &str) -> bool { IRI_REGEX.is_match(txt) || IRELATIVE_REF_REGEX.is_match(txt) } /// Check whether `txt` is an absolute IRI reference. #[inline] -pub fn is_absolute_iri_ref(txt: &str) -> bool { +#[must_use] pub fn is_absolute_iri_ref(txt: &str) -> bool { IRI_REGEX.is_match(txt) } /// Check whether `txt` is a relative IRI reference. #[inline] -pub fn is_relative_iri_ref(txt: &str) -> bool { +#[must_use] pub fn is_relative_iri_ref(txt: &str) -> bool { IRELATIVE_REF_REGEX.is_match(txt) } diff --git a/iri/src/_serde.rs b/iri/src/_serde.rs index fb1c8e76..d621147a 100644 --- a/iri/src/_serde.rs +++ b/iri/src/_serde.rs @@ -1,4 +1,4 @@ -use super::*; +use super::{Iri, IriRef, Result}; use serde::{ de::{Error, Unexpected}, Deserialize, Serialize, @@ -11,7 +11,7 @@ impl<'a, T: Borrow + Deserialize<'a>> Deserialize<'a> for Iri { D: serde::Deserializer<'a>, { let inner: T = T::deserialize(deserializer)?; - Iri::new(inner) + Self::new(inner) .map_err(|err| D::Error::invalid_value(Unexpected::Str(&err.0), &"valid IRI")) } } @@ -31,7 +31,7 @@ impl<'a, T: Borrow + Deserialize<'a>> Deserialize<'a> for IriRef { D: serde::Deserializer<'a>, { let inner: T = T::deserialize(deserializer)?; - IriRef::new(inner) + Self::new(inner) .map_err(|err| D::Error::invalid_value(Unexpected::Str(&err.0), &"valid IRI reference")) } } diff --git a/iri/src/_wrap_macro.rs b/iri/src/_wrap_macro.rs index 05d01f7d..7c5aae5f 100644 --- a/iri/src/_wrap_macro.rs +++ b/iri/src/_wrap_macro.rs @@ -243,7 +243,7 @@ macro_rules! wrap { "If it is not, it may result in undefined behaviour.", )] #[allow(dead_code)] - pub const fn new_unchecked_const(inner: &'static $bid) -> Self { + #[must_use] pub const fn new_unchecked_const(inner: &'static $bid) -> Self { $wid(inner) } } @@ -415,7 +415,7 @@ pub mod test_wrap_borrowing { // only check that this compiles #[allow(dead_code)] fn new_unchecked() { - let _: Foo = Foo::new_unchecked("".into()); + let _: Foo = Foo::new_unchecked(String::new()); } // only check that this compiles diff --git a/iri/src/_wrapper.rs b/iri/src/_wrapper.rs index ff322006..10f6e22f 100644 --- a/iri/src/_wrapper.rs +++ b/iri/src/_wrapper.rs @@ -1,7 +1,7 @@ //! I provide generic wrappers around `Borrow` types, //! guaranteeing that their underlying string is a valid IRI or IRI reference. use super::resolve::{BaseIri, BaseIriRef}; -use super::{InvalidIri, IsIri, IsIriRef, *}; +use super::{InvalidIri, IsIri, IsIriRef, Result, is_absolute_iri_ref, is_valid_iri_ref, wrap}; use std::borrow::Borrow; use std::fmt::Display; @@ -108,46 +108,46 @@ mod test { #[test] fn iri() { for (txt, (abs, ..)) in POSITIVE_IRIS { - assert!(Iri::new(*txt).is_ok() == *abs) + assert!(Iri::new(*txt).is_ok() == *abs); } for txt in NEGATIVE_IRIS { - assert!(Iri::new(*txt).is_err()) + assert!(Iri::new(*txt).is_err()); } } #[test] fn iri_box() { for (txt, (abs, ..)) in POSITIVE_IRIS { - assert!(Iri::new(Box::from(txt as &str)).is_ok() == *abs) + assert!(Iri::new(Box::from(txt as &str)).is_ok() == *abs); } for txt in NEGATIVE_IRIS { - assert!(Iri::new(Box::from(txt as &str)).is_err()) + assert!(Iri::new(Box::from(txt as &str)).is_err()); } } #[test] fn iri_ref() { for (txt, _) in POSITIVE_IRIS { - assert!(IriRef::new(*txt).is_ok()) + assert!(IriRef::new(*txt).is_ok()); } for txt in NEGATIVE_IRIS { - assert!(IriRef::new(*txt).is_err()) + assert!(IriRef::new(*txt).is_err()); } for (txt, _) in RELATIVE_IRIS { - assert!(IriRef::new(*txt).is_ok()) + assert!(IriRef::new(*txt).is_ok()); } } #[test] fn iri_ref_box() { for (txt, _) in POSITIVE_IRIS { - assert!(IriRef::new(Box::from(txt as &str)).is_ok()) + assert!(IriRef::new(Box::from(txt as &str)).is_ok()); } for txt in NEGATIVE_IRIS { - assert!(IriRef::new(Box::from(txt as &str)).is_err()) + assert!(IriRef::new(Box::from(txt as &str)).is_err()); } for (txt, _) in RELATIVE_IRIS { - assert!(IriRef::new(Box::from(txt as &str)).is_ok()) + assert!(IriRef::new(Box::from(txt as &str)).is_ok()); } } diff --git a/iri/src/lib.rs b/iri/src/lib.rs index 23d5a5ef..c4c47321 100644 --- a/iri/src/lib.rs +++ b/iri/src/lib.rs @@ -7,7 +7,7 @@ //! //! # Feature gates //! -//! - **test_data** exposes the [`test`](`mod@test`) module, +//! - **`test_data`** exposes the [`test`](`mod@test`) module, //! which contains arrays of good and bad IRIs, //! useful for testing purposes, possibly in other crates. //! diff --git a/iri/src/resolve.rs b/iri/src/resolve.rs index e889f8bd..ee202083 100644 --- a/iri/src/resolve.rs +++ b/iri/src/resolve.rs @@ -42,7 +42,7 @@ impl> BaseIri { iri: R, buf: &'a mut String, ) -> R::OutputAbs { - R::output_abs(self.0.resolve_into(iri.borrow(), buf).map(|_| &buf[..])) + R::output_abs(self.0.resolve_into(iri.borrow(), buf).map(|()| &buf[..])) } } @@ -88,7 +88,7 @@ impl> BaseIriRef { iri: R, buf: &'a mut String, ) -> R::OutputRel { - R::output_rel(self.0.resolve_into(iri.borrow(), buf).map(|_| &buf[..])) + R::output_rel(self.0.resolve_into(iri.borrow(), buf).map(|()| &buf[..])) } /// Convert this to a [`BaseIri`]. @@ -176,7 +176,7 @@ mod test { let rbi = BaseIri::new(*txt); if parsed.0 { - assert!(rbi.is_ok(), "<{}> → {:?}", txt, rbi); + assert!(rbi.is_ok(), "<{txt}> → {rbi:?}"); let bi = rbi.unwrap(); assert_eq!(bi.scheme(), parsed.1.unwrap()); assert_eq!(bi.authority(), parsed.2); @@ -189,7 +189,7 @@ mod test { assert_eq!(bi, Iri::new(*txt).unwrap().to_base()); assert_eq!(bi, Iri::new(*txt).unwrap().as_base()); } else { - assert!(rbi.is_err(), "<{}> → {:?}", txt, rbi); + assert!(rbi.is_err(), "<{txt}> → {rbi:?}"); } } } @@ -198,9 +198,9 @@ mod test { fn negative() { for txt in NEGATIVE_IRIS { let rpir = BaseIriRef::new(*txt); - assert!(rpir.is_err(), "<{}> → {:?}", txt, rpir); + assert!(rpir.is_err(), "<{txt}> → {rpir:?}"); let rpi = BaseIri::new(*txt); - assert!(rpi.is_err(), "<{}> → {:?}", txt, rpi); + assert!(rpi.is_err(), "<{txt}> → {rpi:?}"); } } @@ -208,13 +208,13 @@ mod test { fn relative() { for (rel, abs) in RELATIVE_IRIS { let rbir = BaseIriRef::new(*rel); - assert!(rbir.is_ok(), "<{}> → {:?}", rel, rbir); + assert!(rbir.is_ok(), "<{rel}> → {rbir:?}"); let rbi = BaseIri::new(*rel); if rel != abs { - assert!(rbi.is_err(), "<{}> → {:?}", rel, rbi); + assert!(rbi.is_err(), "<{rel}> → {rbi:?}"); } else { - assert!(rbi.is_ok(), "<{}> → {:?}", rel, rbi); + assert!(rbi.is_ok(), "<{rel}> → {rbi:?}"); assert_eq!(rbir.unwrap().as_iri_ref(), rbi.unwrap().as_iri_ref()); } } diff --git a/isomorphism/src/dataset.rs b/isomorphism/src/dataset.rs index d1832143..2edf9988 100644 --- a/isomorphism/src/dataset.rs +++ b/isomorphism/src/dataset.rs @@ -1,5 +1,5 @@ -use super::hash::*; -use super::iso_term::*; +use super::hash::hash_quad_with; +use super::iso_term::{IsoTerm, cmp_quads}; use sophia_api::quad::{iter_spog, Quad}; use sophia_api::{ dataset::{DTerm, Dataset}, @@ -137,7 +137,7 @@ fn make_map<'a, T: Term>( fn make_equivalence_classes(map: &HashMap<&str, u64>) -> HashMap { let mut ret = HashMap::new(); - for (_, v) in map.iter() { + for (_, v) in map { let n = ret.entry(*v).or_insert(0); *n += 1; } diff --git a/isomorphism/src/iso_term.rs b/isomorphism/src/iso_term.rs index 084eb8ae..976a759d 100644 --- a/isomorphism/src/iso_term.rs +++ b/isomorphism/src/iso_term.rs @@ -67,7 +67,7 @@ impl Term for IsoTerm { self.0.eq(other) } fn hash(&self, state: &mut H) { - self.0.hash(state) + self.0.hash(state); } fn into_term(self) -> U { self.0.into_term() diff --git a/isomorphism/src/test.rs b/isomorphism/src/test.rs index bec78767..3d6e9493 100644 --- a/isomorphism/src/test.rs +++ b/isomorphism/src/test.rs @@ -150,7 +150,7 @@ fn quoted_triple() -> Result<(), Box> { fn make_chain(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..i + 1])) + .map(|i| MyTerm::Bnode(&ids[i..=i])) .collect(); let mut dataset = Vec::with_capacity(ids.len() - 1); for i in 1..nodes.len() { @@ -237,11 +237,11 @@ fn cycle_almost_pathological() -> Result<(), Box> { fn make_clique(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..i + 1])) + .map(|i| MyTerm::Bnode(&ids[i..=i])) .collect(); let mut dataset = Vec::with_capacity(ids.len() * ids.len()); - for n1 in nodes.iter() { - for n2 in nodes.iter() { + for n1 in &nodes { + for n2 in &nodes { dataset.push([*n1, rel, *n2, *n1]); } } @@ -265,7 +265,7 @@ fn clique() -> Result<(), Box> { fn make_tree(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..i + 1])) + .map(|i| MyTerm::Bnode(&ids[i..=i])) .collect(); let mut dataset = Vec::with_capacity(ids.len() * ids.len()); let mut i = 0; @@ -335,11 +335,11 @@ impl Term for MyTerm { fn kind(&self) -> TermKind { match self { - MyTerm::Iri(_) => TermKind::Iri, - MyTerm::Bnode(_) => TermKind::BlankNode, - MyTerm::String(_) => TermKind::Literal, - MyTerm::Number(_) => TermKind::Literal, - MyTerm::Triple(_) => TermKind::Triple, + Self::Iri(_) => TermKind::Iri, + Self::Bnode(_) => TermKind::BlankNode, + Self::String(_) => TermKind::Literal, + Self::Number(_) => TermKind::Literal, + Self::Triple(_) => TermKind::Triple, } } @@ -348,7 +348,7 @@ impl Term for MyTerm { } fn iri(&self) -> Option>> { - if let MyTerm::Iri(iri) = *self { + if let Self::Iri(iri) = *self { Some(IriRef::new_unchecked(iri.into())) } else { None @@ -356,7 +356,7 @@ impl Term for MyTerm { } fn bnode_id(&self) -> Option>> { - if let MyTerm::Bnode(id) = *self { + if let Self::Bnode(id) = *self { Some(BnodeId::new_unchecked(id.into())) } else { None @@ -364,19 +364,19 @@ impl Term for MyTerm { } fn lexical_form(&self) -> Option> { - if let MyTerm::String(val) = *self { + if let Self::String(val) = *self { Some(val.into()) - } else if let MyTerm::Number(n) = *self { - Some(format!("{}", n).into()) + } else if let Self::Number(n) = *self { + Some(format!("{n}").into()) } else { None } } fn datatype(&self) -> Option>> { - if let MyTerm::String(_) = *self { + if let Self::String(_) = *self { xsd::string.iri() - } else if let MyTerm::Number(_) = *self { + } else if let Self::Number(_) = *self { xsd::integer.iri() } else { None @@ -387,15 +387,15 @@ impl Term for MyTerm { None } - fn triple(&self) -> Option<[MyTerm; 3]> { - if let MyTerm::Triple(spo) = *self { + fn triple(&self) -> Option<[Self; 3]> { + if let Self::Triple(spo) = *self { let spo: Vec<_> = spo .split(' ') .map(|t| { if t.starts_with('#') { - MyTerm::Iri(t) + Self::Iri(t) } else { - MyTerm::Bnode(t) + Self::Bnode(t) } }) .collect(); @@ -405,7 +405,7 @@ impl Term for MyTerm { } } - fn to_triple(self) -> Option<[MyTerm; 3]> { + fn to_triple(self) -> Option<[Self; 3]> { self.triple() } } diff --git a/jsonld/src/context.rs b/jsonld/src/context.rs index 917fd7ef..04c65443 100644 --- a/jsonld/src/context.rs +++ b/jsonld/src/context.rs @@ -49,7 +49,7 @@ impl TryIntoContextRef for &str { let iri = ArcIri::new_unchecked("x-string://".into()); let doc = Value::parse_str(self, |span| locspan::Location::new(iri.clone(), span))?; let context = Value::extract_context(doc) - .map_err(|e| format!("Could not extract @context: {}", e))?; + .map_err(|e| format!("Could not extract @context: {e}"))?; let rdoc = RemoteDocument::new(None, None, context); Ok(RemoteDocumentReference::Loaded(rdoc)) } diff --git a/jsonld/src/error.rs b/jsonld/src/error.rs index 32c4bb79..05841ee7 100644 --- a/jsonld/src/error.rs +++ b/jsonld/src/error.rs @@ -40,13 +40,13 @@ pub enum JsonLdError { /// Poisonned lock /// - /// NB: PoisonError is generic, so we keep the message only + /// NB: `PoisonError` is generic, so we keep the message only #[error("poisonned lock for document loader: {0}")] PoisonnedLock(String), /// An expansion error was encountered while parsing /// - /// NB: ExpandError is generic, so we keep the message only + /// NB: `ExpandError` is generic, so we keep the message only #[error("error while expanding: {0}")] ExpandError(String), @@ -58,20 +58,20 @@ pub enum JsonLdError { impl From>, Location>> for JsonLdError { fn from(other: Meta>, Location>) -> Self { let Meta(error, location) = other; - JsonLdError::InvalidJson { error, location } + Self::InvalidJson { error, location } } } impl From, Span>> for JsonLdError { fn from(other: Meta, Span>) -> Self { let Meta(error, location) = other; - JsonLdError::InvalidJsonLiteral { error, location } + Self::InvalidJsonLiteral { error, location } } } impl From> for JsonLdError { fn from(value: PoisonError) -> Self { - JsonLdError::PoisonnedLock(format!("{value}")) + Self::PoisonnedLock(format!("{value}")) } } @@ -80,6 +80,6 @@ where ExpandError: Display, { fn from(value: ExpandError) -> Self { - JsonLdError::ExpandError(format!("{value}")) + Self::ExpandError(format!("{value}")) } } diff --git a/jsonld/src/loader/chain_loader.rs b/jsonld/src/loader/chain_loader.rs index dad2ac64..3d9c55e0 100644 --- a/jsonld/src/loader/chain_loader.rs +++ b/jsonld/src/loader/chain_loader.rs @@ -11,8 +11,8 @@ pub struct ChainLoader(L1, L2); impl ChainLoader { /// Build a new chain loader - pub fn new(l1: L1, l2: L2) -> Self { - ChainLoader(l1, l2) + pub const fn new(l1: L1, l2: L2) -> Self { + Self(l1, l2) } } diff --git a/jsonld/src/loader/closure_loader.rs b/jsonld/src/loader/closure_loader.rs index ed06d0d8..a19dcd6c 100644 --- a/jsonld/src/loader/closure_loader.rs +++ b/jsonld/src/loader/closure_loader.rs @@ -1,4 +1,4 @@ -use super::*; +use super::{Arc, Iri, Value}; use json_ld::future::{BoxFuture, FutureExt}; use json_ld::{Loader, RemoteDocument}; use json_syntax::Parse; @@ -59,7 +59,7 @@ where F: Send + FnMut(Iri) -> BoxFuture<'f, Result>, { /// Creates a new closure loader with the given closure. - pub fn new(f: F) -> Self { + pub const fn new(f: F) -> Self { Self { closure: f } } } diff --git a/jsonld/src/loader/file_url_loader.rs b/jsonld/src/loader/file_url_loader.rs index a679d027..b0e158ae 100644 --- a/jsonld/src/loader/file_url_loader.rs +++ b/jsonld/src/loader/file_url_loader.rs @@ -1,4 +1,4 @@ -use super::*; +use super::{Arc, Iri, Value}; use json_ld::future::{BoxFuture, FutureExt}; use json_ld::{Loader, RemoteDocument}; use json_syntax::Parse; @@ -45,7 +45,7 @@ impl Loader>, Location>>> for FileUrlLoader { let url_parsed = Url::parse(url_str).map_err(Self::Error::InvalidUrl)?; let path = url_parsed .to_file_path() - .map_err(|_| Self::Error::BadFileUrl(url_str.into()))?; + .map_err(|()| Self::Error::BadFileUrl(url_str.into()))?; let file = File::open(path).map_err(Self::Error::IO)?; let mut buf_reader = BufReader::new(file); let mut contents = String::new(); @@ -68,7 +68,7 @@ impl Loader>, Location>>> for FileUrlLoader { impl FileUrlLoader { /// Creates a new file system loader with the given content `parser`. - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Self::default() } } diff --git a/jsonld/src/loader/static_loader.rs b/jsonld/src/loader/static_loader.rs index d1068202..f6dda524 100644 --- a/jsonld/src/loader/static_loader.rs +++ b/jsonld/src/loader/static_loader.rs @@ -33,7 +33,7 @@ impl Default for StaticLoader { impl StaticLoader { /// Creates a new [`StaticLoader`] - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Self::default() } diff --git a/jsonld/src/loader_factory.rs b/jsonld/src/loader_factory.rs index acd4339e..1e34a85d 100644 --- a/jsonld/src/loader_factory.rs +++ b/jsonld/src/loader_factory.rs @@ -45,7 +45,7 @@ where { /// Create a new [`DefaultLoaderFactory`]. #[inline] - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Self::default() } } diff --git a/jsonld/src/options.rs b/jsonld/src/options.rs index 31708bf0..340942e0 100644 --- a/jsonld/src/options.rs +++ b/jsonld/src/options.rs @@ -14,7 +14,7 @@ use locspan::Location; use locspan::Span; use sophia_iri::Iri; -use crate::context::*; +use crate::context::{ContextRef, IntoContextRef, TryIntoContextRef}; use crate::loader::NoLoader; use crate::loader_factory::ClosureLoaderFactory; use crate::loader_factory::DefaultLoaderFactory; @@ -43,7 +43,7 @@ pub struct JsonLdOptions { impl JsonLdOptions> { /// Build a new JSON-LD options. - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Self::default() } } @@ -60,7 +60,7 @@ impl JsonLdOptions { /// /// [`compactArrays`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-compactarrays /// [compaction]: https://www.w3.org/TR/json-ld11-api/#dfn-compact - pub fn compact_arrays(&self) -> bool { + pub const fn compact_arrays(&self) -> bool { self.inner.compact_arrays } @@ -68,7 +68,7 @@ impl JsonLdOptions { /// /// [`compactToRelative`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-compacttorelative /// [compacting]: https://www.w3.org/TR/json-ld11-api/#dfn-compact - pub fn compact_to_relative(&self) -> bool { + pub const fn compact_to_relative(&self) -> bool { self.inner.compact_to_relative } @@ -76,14 +76,14 @@ impl JsonLdOptions { /// The returned factory can yield a [`documentLoader`]. /// /// [`documentLoader`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-documentloader - pub fn document_loader_factory(&self) -> &LF { + pub const fn document_loader_factory(&self) -> &LF { &self.loader_factory } /// [`expandContext`] is a context that is used to initialize the active context when expanding a document. /// /// [`expandContext`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-expandcontext - pub fn expand_context(&self) -> Option<&ContextRef> { + pub const fn expand_context(&self) -> Option<&ContextRef> { self.inner.expand_context.as_ref() } @@ -92,7 +92,7 @@ impl JsonLdOptions { /// If false, order is not considered in processing. /// /// [`ordered`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-ordered - pub fn ordered(&self) -> bool { + pub const fn ordered(&self) -> bool { self.inner.ordered } @@ -106,14 +106,14 @@ impl JsonLdOptions { /// [`JsonLd1_1`]: ProcessingMode::JsonLd1_1 /// [JSON-LD 1.0]: https://json-ld.org/spec/FCGS/json-ld-syntax/20130222/ /// [JSON-LD 1.1]: https://www.w3.org/TR/json-ld11/ - pub fn processing_mode(&self) -> ProcessingMode { + pub const fn processing_mode(&self) -> ProcessingMode { self.inner.processing_mode } /// [`produceGeneralizedRdf`] authorizes the JSON-LD to emit blank nodes for triple predicates, otherwise they will be omitted. /// /// [`produceGeneralizedRdf`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-producegeneralizedrdf - pub fn produce_generalized_rdf(&self) -> bool { + pub const fn produce_generalized_rdf(&self) -> bool { self.inner.produce_generalized_rdf } @@ -121,7 +121,7 @@ impl JsonLdOptions { /// a base direction are transformed to and from RDF. /// /// [`rdfDirection`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-rdfdirection - pub fn rdf_direction(&self) -> Option { + pub const fn rdf_direction(&self) -> Option { self.inner.rdf_direction } @@ -129,7 +129,7 @@ impl JsonLdOptions { /// to use native JSON values in value objects avoiding the need for an explicit `@type`. /// /// [`useNativeTypes`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-usenativetypes - pub fn use_native_types(&self) -> bool { + pub const fn use_native_types(&self) -> bool { self.use_native_types } @@ -138,7 +138,7 @@ impl JsonLdOptions { /// `@type`. /// /// [`useRdfType`]: https://www.w3.org/TR/json-ld11-api/#dom-jsonldoptions-userdftype - pub fn use_rdf_type(&self) -> bool { + pub const fn use_rdf_type(&self) -> bool { self.use_rdf_type } @@ -146,25 +146,25 @@ impl JsonLdOptions { /// /// NB: this is not a standard option of JSON-LD, /// but a specific option of the [`json_ld` crate](json_ld). - pub fn expansion_policy(&self) -> Policy { + pub const fn expansion_policy(&self) -> Policy { self.inner.expansion_policy } /// Return the number of spaces to use for indentation in the JSON output. /// /// NB: this is not a standard option of JSON-LD. - pub fn spaces(&self) -> u16 { + pub const fn spaces(&self) -> u16 { self.spaces } /// The context to be used to compact the output, if ant. /// /// NB: this is not a standard option of JSON-LD. - pub fn compact_context(&self) -> Option<&ContextRef> { + pub const fn compact_context(&self) -> Option<&ContextRef> { self.compact_context.as_ref() } - pub(crate) fn inner(&self) -> &InnerOptions { + pub(crate) const fn inner(&self) -> &InnerOptions { &self.inner } @@ -187,13 +187,13 @@ impl JsonLdOptions { } /// Change the [`compact_arrays`](Self::compact_arrays) flag - pub fn with_compact_arrays(mut self, compact_arrays: bool) -> Self { + pub const fn with_compact_arrays(mut self, compact_arrays: bool) -> Self { self.inner.compact_arrays = compact_arrays; self } /// Change the [`compact_to_relative`](Self::compact_to_relative) flag - pub fn with_compact_to_relative(mut self, compact_to_relative: bool) -> Self { + pub const fn with_compact_to_relative(mut self, compact_to_relative: bool) -> Self { self.inner.compact_to_relative = compact_to_relative; self } @@ -328,19 +328,19 @@ impl JsonLdOptions { } /// Change the [`ordered`](Self::ordered) flag - pub fn with_ordered(mut self, ordered: bool) -> Self { + pub const fn with_ordered(mut self, ordered: bool) -> Self { self.inner.ordered = ordered; self } /// Change the [`processing_mode`](Self::processing_mode) - pub fn with_processing_mode(mut self, version: ProcessingMode) -> Self { + pub const fn with_processing_mode(mut self, version: ProcessingMode) -> Self { self.inner.processing_mode = version; self } /// Change the [`produce_generalized_rdf`](Self::produce_generalized_rdf) flag - pub fn with_produce_generalized_rdf(mut self, produce_generalized_rdf: bool) -> Self { + pub const fn with_produce_generalized_rdf(mut self, produce_generalized_rdf: bool) -> Self { self.inner.produce_generalized_rdf = produce_generalized_rdf; self } @@ -348,7 +348,7 @@ impl JsonLdOptions { /// Change the [`rdf_direction`](Self::rdf_direction) /// /// See also [`with_no_rdf_direction`](Self::with_no_rdf_direction). - pub fn with_rdf_direction(mut self, rdf_direction: RdfDirection) -> Self { + pub const fn with_rdf_direction(mut self, rdf_direction: RdfDirection) -> Self { self.inner.rdf_direction = Some(rdf_direction); self } @@ -356,31 +356,31 @@ impl JsonLdOptions { /// Change the [`rdf_direction`](Self::rdf_direction) /// /// See also [`with_rdf_direction`](Self::with_rdf_direction). - pub fn with_no_rdf_direction(mut self) -> Self { + pub const fn with_no_rdf_direction(mut self) -> Self { self.inner.rdf_direction = None; self } /// Change the [`use_native_types`](Self::use_native_types) flag - pub fn with_use_native_types(mut self, flag: bool) -> Self { + pub const fn with_use_native_types(mut self, flag: bool) -> Self { self.use_native_types = flag; self } /// Change the [`use_native_types`](Self::use_native_types) flag - pub fn with_use_rdf_type(mut self, flag: bool) -> Self { + pub const fn with_use_rdf_type(mut self, flag: bool) -> Self { self.use_rdf_type = flag; self } /// Change the [expansion policy](Self::expansion_policy) - pub fn with_expansion_policy(mut self, policy: Policy) -> Self { + pub const fn with_expansion_policy(mut self, policy: Policy) -> Self { self.inner.expansion_policy = policy; self } /// Changes the [`spaces`](Self::spaces) option - pub fn with_spaces(mut self, spaces: u16) -> Self { + pub const fn with_spaces(mut self, spaces: u16) -> Self { self.spaces = spaces; self } diff --git a/jsonld/src/parser.rs b/jsonld/src/parser.rs index 34ee7a8e..4f569cf9 100644 --- a/jsonld/src/parser.rs +++ b/jsonld/src/parser.rs @@ -51,8 +51,8 @@ impl Default for JsonLdParser> { impl JsonLdParser> { /// Make a new [`JsonLdParser`] with the default options - pub fn new() -> Self { - JsonLdParser { + #[must_use] pub fn new() -> Self { + Self { options: JsonLdOptions::default(), } } @@ -60,12 +60,12 @@ impl JsonLdParser> { impl JsonLdParser { /// Make a new [`JsonLdParser`] with the given options - pub fn new_with_options(options: JsonLdOptions) -> Self { - JsonLdParser { options } + pub const fn new_with_options(options: JsonLdOptions) -> Self { + Self { options } } /// Borrow the options of this parser - pub fn options(&self) -> &JsonLdOptions { + pub const fn options(&self) -> &JsonLdOptions { &self.options } diff --git a/jsonld/src/parser/adapter.rs b/jsonld/src/parser/adapter.rs index 4c87e9f3..e7b10f80 100644 --- a/jsonld/src/parser/adapter.rs +++ b/jsonld/src/parser/adapter.rs @@ -71,19 +71,19 @@ impl SophiaTerm for RdfTerm { impl From for RdfTerm { fn from(value: RdfO) -> Self { - RdfTerm(value) + Self(value) } } impl From for RdfTerm { fn from(value: RdfS) -> Self { - RdfTerm(Term::Id(value)) + Self(Term::Id(value)) } } impl From for RdfTerm { fn from(value: ArcIri) -> Self { - RdfTerm(Term::Id(Id::Iri(value))) + Self(Term::Id(Id::Iri(value))) } } diff --git a/jsonld/src/parser/source.rs b/jsonld/src/parser/source.rs index 1445a965..d1a71a3d 100644 --- a/jsonld/src/parser/source.rs +++ b/jsonld/src/parser/source.rs @@ -22,7 +22,7 @@ pub enum JsonLdQuadSource { impl JsonLdQuadSource { pub(crate) fn from_err>(err: E) -> Self { - JsonLdQuadSource::Err(Some(err.into())) + Self::Err(Some(err.into())) } } @@ -37,14 +37,14 @@ impl Source for JsonLdQuadSource { F: FnMut(Self::Item<'_>) -> Result<(), E>, { match self { - JsonLdQuadSource::Quads(quads) => { + Self::Quads(quads) => { if let Some(quad) = quads.next() { - f(quad).map(|_| true).map_err(SinkError) + f(quad).map(|()| true).map_err(SinkError) } else { Ok(false) } } - JsonLdQuadSource::Err(opt) => { + Self::Err(opt) => { if let Some(err) = opt.take() { Err(SourceError(err)) } else { diff --git a/jsonld/src/serializer.rs b/jsonld/src/serializer.rs index 75dc5386..2dcfd5ec 100644 --- a/jsonld/src/serializer.rs +++ b/jsonld/src/serializer.rs @@ -3,13 +3,13 @@ //! //! [`Serialize RDF as JSON-LD Algorithm`]: https://www.w3.org/TR/json-ld11-api/#serialize-rdf-as-json-ld-algorithm -use crate::error::*; +use crate::error::JsonLdError; use crate::loader::NoLoader; -use crate::options::*; +use crate::options::JsonLdOptions; use json_syntax::print::Indent; use json_syntax::print::{Options, Print}; use json_syntax::Value as JsonValue; -use sophia_api::serializer::*; +use sophia_api::serializer::{QuadSerializer, Stringifier}; use sophia_api::source::{QuadSource, SinkError, StreamResult}; mod engine; @@ -38,12 +38,12 @@ impl JsonLdSerializer { impl JsonLdSerializer { /// Build a new JSON-LD serializer writing to `write`, with the given options. - pub fn new_with_options(target: W, options: JsonLdOptions) -> Self { - JsonLdSerializer { target, options } + pub const fn new_with_options(target: W, options: JsonLdOptions) -> Self { + Self { options, target } } /// Borrow this serializer's options. - pub fn options(&self) -> &JsonLdOptions { + pub const fn options(&self) -> &JsonLdOptions { &self.options } @@ -115,25 +115,25 @@ pub struct JsonTarget(JsonValue<()>); impl Jsonifier { /// Create a new serializer which targets a [`JsonValue`]. #[inline] - pub fn new_jsonifier() -> Self { - JsonLdSerializer::new(JsonTarget(JsonValue::Null)) + #[must_use] pub fn new_jsonifier() -> Self { + Self::new(JsonTarget(JsonValue::Null)) } } impl Jsonifier { /// Create a new serializer which targets a [`JsonValue`] with a custom options. #[inline] - pub fn new_jsonifier_with_options(options: JsonLdOptions) -> Self { - JsonLdSerializer::new_with_options(JsonTarget(JsonValue::Null), options) + pub const fn new_jsonifier_with_options(options: JsonLdOptions) -> Self { + Self::new_with_options(JsonTarget(JsonValue::Null), options) } - /// Get a reference to the converted JsonValue + /// Get a reference to the converted `JsonValue` #[inline] - pub fn as_json(&self) -> &JsonValue<()> { + pub const fn as_json(&self) -> &JsonValue<()> { &self.target.0 } - /// Extract the converted JsonValue + /// Extract the converted `JsonValue` #[inline] pub fn to_json(&mut self) -> JsonValue<()> { let mut ret = JsonValue::Null; @@ -165,16 +165,16 @@ pub type JsonLdStringifier = JsonLdSerializer, L>; impl JsonLdStringifier { /// Create a new serializer which targets a string. #[inline] - pub fn new_stringifier() -> Self { - JsonLdSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } } impl JsonLdStringifier { /// Create a new serializer which targets a string with a custom options. #[inline] - pub fn new_stringifier_with_options(options: JsonLdOptions) -> Self { - JsonLdSerializer::new_with_options(Vec::new(), options) + pub const fn new_stringifier_with_options(options: JsonLdOptions) -> Self { + Self::new_with_options(Vec::new(), options) } } diff --git a/jsonld/src/serializer/engine.rs b/jsonld/src/serializer/engine.rs index 9905af55..75582de2 100644 --- a/jsonld/src/serializer/engine.rs +++ b/jsonld/src/serializer/engine.rs @@ -1,7 +1,7 @@ -use super::rdf_object::*; -use crate::error::*; -use crate::options::{ProcessingMode::*, *}; -use crate::util_traits::*; +use super::rdf_object::RdfObject; +use crate::error::JsonLdError; +use crate::options::{ProcessingMode::{JsonLd1_0, JsonLd1_1}, JsonLdOptions, RdfDirection}; +use crate::util_traits::{HashMapUtil, QuadJsonLdUtil, TermJsonLdUtil, VecUtil}; use json_syntax::object::Object; use json_syntax::{Parse, Value as JsonValue}; use locspan::Meta; @@ -63,7 +63,7 @@ impl<'a, L> Engine<'a, L> { if !q.is_jsonld() { return Ok(()); } - let g_id = q.g().map(|g| g.as_id()).unwrap_or_else(|| Box::from(" ")); + let g_id = q.g().map_or_else(|| Box::from(" "), |g| g.as_id()); let s_id = q.s().as_id(); let is = self.index(g_id.clone(), s_id.clone()); if q.g().is_some() { @@ -122,21 +122,18 @@ impl<'a, L> Engine<'a, L> { where T: Term, { - match o.kind() { - TermKind::Literal => RdfObject::try_from_term(o).unwrap(), - _ => { - let o_id = o.as_id(); - RdfObject::Node(self.index(g_id.to_string(), o_id.clone()), o_id) - } + if o.kind() == TermKind::Literal { RdfObject::try_from_term(o).unwrap() } else { + let o_id = o.as_id(); + RdfObject::Node(self.index(g_id.to_string(), o_id.clone()), o_id) } } - /// Get the result as a JsonValue. + /// Get the result as a `JsonValue`. pub fn into_json(mut self) -> Result, JsonLdError> { // check all list_seeds to mark them, if appropriate, as list nodes, // and also recursively mark other list nodes (traversing back rdf:rest links) let list_seeds = std::mem::take(&mut self.list_seeds); - for inode in list_seeds.into_iter() { + for inode in list_seeds { self.mark_list_node(inode); } // check that candidate compound literals are indeed compound literels @@ -244,7 +241,7 @@ impl<'a, L> Engine<'a, L> { ) -> Result, JsonLdError> { let mut obj = Object::new(); push_entry(&mut obj, "@id", id.into()); - for (key, vals) in node.iter() { + for (key, vals) in node { if key.as_ref() == "@graph" { continue; } @@ -395,16 +392,14 @@ impl<'a, L> Engine<'a, L> { fn is_list_node(node: &HashMap, Vec>) -> bool { 2 <= node.len() && node.len() <= 3 - && node.get(RDF_FIRST).map(|v| v.len() == 1).unwrap_or(false) + && node.get(RDF_FIRST).is_some_and(|v| v.len() == 1) && node .get(RDF_REST) - .map(|v| v.len() == 1 && v[0].is_node()) - .unwrap_or(false) + .is_some_and(|v| v.len() == 1 && v[0].is_node()) && (node.len() == 2 || node .get("@type") - .map(|v| v.len() == 1 && v[0].eq_node(RDF_LIST)) - .unwrap_or(false)) + .is_some_and(|v| v.len() == 1 && v[0].eq_node(RDF_LIST))) } // check if node is a compound literal @@ -413,17 +408,14 @@ fn is_compound_literal(node: &HashMap, Vec>) -> bool { && node.len() <= 3 && node .get(RDF_DIRECTION) - .map(|v| v.len() == 1 && v[0].is_literal()) - .unwrap_or(false) + .is_some_and(|v| v.len() == 1 && v[0].is_literal()) && node .get(RDF_VALUE) - .map(|v| v.len() == 1 && v[0].is_literal()) - .unwrap_or(false) + .is_some_and(|v| v.len() == 1 && v[0].is_literal()) && (node.len() == 2 || node .get(RDF_LANGUAGE) - .map(|v| v.len() == 1 && v[0].is_literal()) - .unwrap_or(false)) + .is_some_and(|v| v.len() == 1 && v[0].is_literal())) } const NS_18N: &str = "https://www.w3.org/ns/i18n#"; diff --git a/jsonld/src/serializer/rdf_object.rs b/jsonld/src/serializer/rdf_object.rs index d7a53174..5f053eb9 100644 --- a/jsonld/src/serializer/rdf_object.rs +++ b/jsonld/src/serializer/rdf_object.rs @@ -1,7 +1,7 @@ -//! A private enum type used internally by JsonLdSerializer +//! A private enum type used internally by `JsonLdSerializer` use sophia_api::term::{IriRef, LanguageTag, TermKind, TryFromTerm}; -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum RdfObject { LangString(Box, LanguageTag>), TypedLiteral(Box, IriRef>), @@ -9,33 +9,33 @@ pub enum RdfObject { } impl RdfObject { - pub fn is_literal(&self) -> bool { + pub const fn is_literal(&self) -> bool { matches!( self, - RdfObject::LangString(..) | RdfObject::TypedLiteral(..) + Self::LangString(..) | Self::TypedLiteral(..) ) } - pub fn is_node(&self) -> bool { - matches!(self, RdfObject::Node(..)) + pub const fn is_node(&self) -> bool { + matches!(self, Self::Node(..)) } pub fn is_iri(&self) -> bool { - matches!(self, RdfObject::Node(_, id) if !id.starts_with("_:")) + matches!(self, Self::Node(_, id) if !id.starts_with("_:")) } pub fn eq_node(&self, other_id: &str) -> bool { - matches!(self, RdfObject::Node(_, id) if id.as_ref()==other_id) + matches!(self, Self::Node(_, id) if id.as_ref()==other_id) } pub fn as_str(&self) -> &str { match self { - RdfObject::LangString(lit, _) => lit.as_ref(), - RdfObject::TypedLiteral(lit, _) => lit.as_ref(), - RdfObject::Node(_, id) => id, + Self::LangString(lit, _) => lit.as_ref(), + Self::TypedLiteral(lit, _) => lit.as_ref(), + Self::Node(_, id) => id, } } } impl From<(usize, String)> for RdfObject { fn from(other: (usize, String)) -> Self { - RdfObject::Node(other.0, other.1.into()) + Self::Node(other.0, other.1.into()) } } @@ -48,10 +48,10 @@ impl TryFromTerm for RdfObject { let lex: Box = term.lexical_form().unwrap().into(); if let Some(tag) = term.language_tag() { let tag = tag.map_unchecked(Into::into); - Ok(RdfObject::LangString(lex, tag)) + Ok(Self::LangString(lex, tag)) } else { let dt = term.datatype().unwrap().map_unchecked(Into::into); - Ok(RdfObject::TypedLiteral(lex, dt)) + Ok(Self::TypedLiteral(lex, dt)) } } _ => Err(RdfObjectError {}), diff --git a/jsonld/src/util_traits.rs b/jsonld/src/util_traits.rs index f7af0700..6eeaf7bb 100644 --- a/jsonld/src/util_traits.rs +++ b/jsonld/src/util_traits.rs @@ -1,7 +1,7 @@ -//! Utility traits used internally by JsonLdSerializer +//! Utility traits used internally by `JsonLdSerializer` use sophia_api::quad::Quad; -use sophia_api::term::{Term, TermKind::*}; -use std::collections::hash_map::Entry::*; +use sophia_api::term::{Term, TermKind::{BlankNode, Iri, Literal}}; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::HashMap; pub trait TermJsonLdUtil { @@ -44,7 +44,7 @@ impl QuadJsonLdUtil for Q { self.s().is_subject() && self.p().is_iri() && self.o().is_object() - && self.g().map(|g| g.is_subject()).unwrap_or(true) + && self.g().map_or(true, |g| g.is_subject()) } } diff --git a/jsonld/src/vocabulary.rs b/jsonld/src/vocabulary.rs index 1f0860a2..334285ea 100644 --- a/jsonld/src/vocabulary.rs +++ b/jsonld/src/vocabulary.rs @@ -112,7 +112,7 @@ impl LanguageTagVocabularyMut for ArcVoc { /// Self-explanatory bnode index for JSON-LD processing. /// -/// We are not using Sophia's BnodeId because it does not allocate the '_:'. +/// We are not using Sophia's `BnodeId` because it does not allocate the '_:'. /// Since instances of this type are always created via [`ArcVoc`] from a valid bnode identifier, /// we don't need to implement any validity check. #[derive(Clone, Debug, Eq, Hash, PartialEq)] diff --git a/resource/src/lib.rs b/resource/src/lib.rs index 508961d5..3932f428 100644 --- a/resource/src/lib.rs +++ b/resource/src/lib.rs @@ -83,7 +83,7 @@ mod test { .unwrap() } - /// Test impl of TypedResource + /// Test impl of `TypedResource` pub struct WithId(Resource); impl TryFrom> for WithId { diff --git a/resource/src/loader/_error.rs b/resource/src/loader/_error.rs index f33febd0..91aa2e68 100644 --- a/resource/src/loader/_error.rs +++ b/resource/src/loader/_error.rs @@ -25,13 +25,13 @@ pub enum LoaderError { impl LoaderError { /// Return the IRI that caused this error - pub fn iri(&self) -> IriBuf { + #[must_use] pub fn iri(&self) -> IriBuf { let iri = match self { - LoaderError::UnsupportedIri(iri, _) => iri, - LoaderError::NotFound(iri) => iri, - LoaderError::IoError(iri, _) => iri, - LoaderError::CantGuessSyntax(iri) => iri, - LoaderError::ParseError(iri, _) => iri, + Self::UnsupportedIri(iri, _) => iri, + Self::NotFound(iri) => iri, + Self::IoError(iri, _) => iri, + Self::CantGuessSyntax(iri) => iri, + Self::ParseError(iri, _) => iri, }; iri.clone() } diff --git a/resource/src/loader/_local.rs b/resource/src/loader/_local.rs index 2e7e9bf6..fdb9a7f2 100644 --- a/resource/src/loader/_local.rs +++ b/resource/src/loader/_local.rs @@ -1,4 +1,4 @@ -use super::{util::*, *}; +use super::{util::{IriBuf, iri_buf}, Loader, LoaderError}; use sophia_iri::Iri; use std::borrow::Borrow; use std::fmt::Debug; @@ -24,7 +24,7 @@ impl LocalLoader { .into_iter() .map(|(iri, path)| Self::check(iri, path)) .collect::, LocalLoaderError>>()?; - Ok(LocalLoader { + Ok(Self { caches: checked_caches, }) } @@ -41,7 +41,7 @@ impl LocalLoader { } /// Wrap this loader into an `Arc`. - pub fn arced(self) -> Arc { + #[must_use] pub fn arced(self) -> Arc { Arc::new(self) } @@ -94,7 +94,7 @@ impl Loader for LocalLoader { #[cfg(feature = "xml")] "rdf", ] { - let alt = Iri::new_unchecked(format!("{}.{}", iri, ext)); + let alt = Iri::new_unchecked(format!("{iri}.{ext}")); if let Ok(res) = self.get(alt) { return Ok(res); } diff --git a/resource/src/loader/_no.rs b/resource/src/loader/_no.rs index c91fe962..b05b7b52 100644 --- a/resource/src/loader/_no.rs +++ b/resource/src/loader/_no.rs @@ -1,4 +1,4 @@ -use super::*; +use super::{Loader, LoaderError}; use sophia_iri::Iri; use std::borrow::Borrow; use std::fmt::Debug; diff --git a/resource/src/loader/_trait.rs b/resource/src/loader/_trait.rs index 98ae2466..55331861 100644 --- a/resource/src/loader/_trait.rs +++ b/resource/src/loader/_trait.rs @@ -1,4 +1,4 @@ -use super::{util::*, *}; +use super::{util::iri_buf, LoaderError}; use crate::{Resource, ResourceError, TypedResource}; #[cfg(feature = "jsonld")] use futures_util::FutureExt; @@ -38,7 +38,7 @@ pub trait Loader: Sync + Sized { let bufread = io::BufReader::new(&data[..]); match &ctype[..] { "text/turtle" => turtle::TurtleParser { - base: Some(iri.as_ref().map_unchecked(|t| t.to_string())), + base: Some(iri.as_ref().map_unchecked(std::string::ToString::to_string)), } .parse(bufread) .collect_triples() @@ -54,7 +54,7 @@ pub trait Loader: Sync + Sized { use sophia_api::prelude::{Quad, QuadParser, QuadSource}; use sophia_jsonld::{loader::ClosureLoader, JsonLdOptions, JsonLdParser}; let options = JsonLdOptions::new() - .with_base(iri.as_ref().map_unchecked(|t| t.into())) + .with_base(iri.as_ref().map_unchecked(std::convert::Into::into)) .with_document_loader_factory(ClosureLoaderFactory::new(|| { ClosureLoader::new(|url| { async move { @@ -79,7 +79,7 @@ pub trait Loader: Sync + Sized { #[cfg(feature = "xml")] "application/rdf+xml" => sophia_xml::parser::RdfXmlParser { - base: Some(iri.as_ref().map_unchecked(|t| t.to_string())), + base: Some(iri.as_ref().map_unchecked(std::string::ToString::to_string)), } .parse(bufread) .collect_triples() diff --git a/resource/src/resource/_error.rs b/resource/src/resource/_error.rs index 681bc3a1..71822866 100644 --- a/resource/src/resource/_error.rs +++ b/resource/src/resource/_error.rs @@ -81,15 +81,15 @@ where /// (*not* the resource from which it was discovered). pub fn resource_id(&self) -> SimpleTerm { match self { - ResourceError::IriNotAbsolute(iriref) => iriref.as_simple(), - ResourceError::LoaderError(err) => err.iri().into_term(), - ResourceError::GraphError { id, .. } => id.as_simple(), - ResourceError::NoValueFor { id, .. } => id.as_simple(), - ResourceError::UnexpectedMultipleValueFor { id, .. } => id.as_simple(), - ResourceError::MissingType { id, .. } => id.as_simple(), - ResourceError::UnexpectedKind { id, .. } => id.as_simple(), - ResourceError::UnexpectedDatatype { id, .. } => id.as_simple(), - ResourceError::UnexpectedValue { id, .. } => id.as_simple(), + Self::IriNotAbsolute(iriref) => iriref.as_simple(), + Self::LoaderError(err) => err.iri().into_term(), + Self::GraphError { id, .. } => id.as_simple(), + Self::NoValueFor { id, .. } => id.as_simple(), + Self::UnexpectedMultipleValueFor { id, .. } => id.as_simple(), + Self::MissingType { id, .. } => id.as_simple(), + Self::UnexpectedKind { id, .. } => id.as_simple(), + Self::UnexpectedDatatype { id, .. } => id.as_simple(), + Self::UnexpectedValue { id, .. } => id.as_simple(), } } } @@ -99,7 +99,7 @@ where E: Error + Send + Sync + 'static, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } diff --git a/resource/src/resource/_iter.rs b/resource/src/resource/_iter.rs index 1ffbee4e..a991b875 100644 --- a/resource/src/resource/_iter.rs +++ b/resource/src/resource/_iter.rs @@ -1,6 +1,6 @@ use std::marker::PhantomData; -use super::{Resource, ResourceError::NoValueFor, *}; +use super::{Resource, ResourceError::NoValueFor, ResourceError, ResourceResult, TypedResource}; use crate::Loader; use sophia_api::{graph::CollectibleGraph, prelude::*, term::SimpleTerm}; @@ -19,7 +19,7 @@ impl LadderTermIterator { /// `start` is expected to be `Some(Ok(first_blank_node)); /// if it is `Some(Err(_))`, the iterator will yield this error; /// if it is `None`, the iterator will be empty. - pub fn new( + pub const fn new( start: Option, G>>, value: SimpleTerm<'static>, next: SimpleTerm<'static>, diff --git a/resource/src/resource/_struct.rs b/resource/src/resource/_struct.rs index 5860800f..d915397f 100644 --- a/resource/src/resource/_struct.rs +++ b/resource/src/resource/_struct.rs @@ -8,7 +8,7 @@ use sophia_iri::is_absolute_iri_ref; use std::borrow::Borrow; use std::sync::Arc; -use super::{ResourceError::*, *}; +use super::{ResourceError::{GraphError, IriNotAbsolute, LoaderError, NoValueFor, UnexpectedMultipleValueFor}, LadderResourceIterator, LadderTermIterator, LadderTypedIterator, ResourceError, ResourceResult, TypedResource}; /// A [`Resource`] represents a specific node in a given graph. #[derive(Debug)] @@ -27,7 +27,7 @@ where /// Constructor pub fn new(id: T, base: Option>, graph: Arc, loader: Arc) -> Self { let id = id.into_term(); - Resource { + Self { id, base, graph, @@ -36,22 +36,22 @@ where } /// The identifying term of this resource - pub fn id(&self) -> &SimpleTerm<'static> { + #[must_use] pub const fn id(&self) -> &SimpleTerm<'static> { &self.id } /// The URL of the underlying graph of this resource - pub fn base(&self) -> Option<&Iri> { + #[must_use] pub const fn base(&self) -> Option<&Iri> { self.base.as_ref() } /// The underlying graph of this resource - pub fn graph(&self) -> &Arc { + #[must_use] pub const fn graph(&self) -> &Arc { &self.graph } /// The loader used to load neighbouring resources - pub fn loader(&self) -> &Arc { + #[must_use] pub const fn loader(&self) -> &Arc { &self.loader } @@ -443,7 +443,7 @@ where let current = match self.get_term(predicate) { Err(NoValueFor { .. }) => None, Err(err) => Some(Err(err)), - Ok(id) => Some(Ok(Resource { + Ok(id) => Some(Ok(Self { id, base: self.base.clone(), graph: self.graph.clone(), @@ -571,7 +571,7 @@ where } } } - Ok(Resource::new( + Ok(Self::new( t.borrow_term(), self.base.clone(), self.graph.clone(), @@ -591,7 +591,7 @@ impl Clone for Resource { } } -pub(crate) fn to_iri>(iri_ref: IriRef) -> Result, IriRef>> { +pub fn to_iri>(iri_ref: IriRef) -> Result, IriRef>> { if is_absolute_iri_ref(iri_ref.as_str()) { Ok(Iri::new_unchecked(iri_ref.unwrap())) } else { diff --git a/rio/src/model.rs b/rio/src/model.rs index 0883d0e3..887d9583 100644 --- a/rio/src/model.rs +++ b/rio/src/model.rs @@ -8,7 +8,7 @@ //! which ensures the validity of the underlying data. //! //! The [`Trusted`] wrapper is used to materialize the fact that we trust the underlying data of Rio types. -use rio_api::model::{Quad as RioQuad, Term as RioTerm, Triple as RioTriple, *}; +use rio_api::model::{Quad as RioQuad, Term as RioTerm, Triple as RioTriple, BlankNode, GeneralizedQuad, GeneralizedTerm, GraphName, Literal, NamedNode, Variable}; use sophia_api::ns::{rdf, xsd}; use sophia_api::quad::{QBorrowTerm, Quad, Spog}; use sophia_api::term::{BnodeId, LanguageTag, Term, TermKind, VarName}; @@ -104,7 +104,7 @@ impl<'a> Term for Trusted> { } fn lexical_form(l: Literal) -> MownStr { - use Literal::*; + use Literal::{LanguageTaggedString, Simple, Typed}; let value = match l { Simple { value } => value, LanguageTaggedString { value, .. } => value, @@ -114,7 +114,7 @@ fn lexical_form(l: Literal) -> MownStr { } fn datatype(l: Literal) -> IriRef { - use Literal::*; + use Literal::{LanguageTaggedString, Simple, Typed}; let dt = match l { Simple { .. } => xsd::string.iriref(), LanguageTaggedString { .. } => rdf::langString.iriref(), @@ -206,7 +206,7 @@ impl<'a> Term for Trusted> { type BorrowTerm<'x> = Self where Self: 'x; fn kind(&self) -> TermKind { - use GraphName::*; + use GraphName::{BlankNode, NamedNode}; match self.0 { NamedNode(_) => TermKind::Iri, BlankNode(_) => TermKind::BlankNode, @@ -238,7 +238,7 @@ impl<'a> Term for Trusted> { type BorrowTerm<'x> = Self where Self: 'x; fn kind(&self) -> TermKind { - use RioTerm::*; + use RioTerm::{BlankNode, Literal, NamedNode, Triple}; match self.0 { NamedNode(_) => TermKind::Iri, BlankNode(_) => TermKind::BlankNode, @@ -314,7 +314,7 @@ impl<'a> Term for Trusted> { type BorrowTerm<'x> = Self where Self: 'x; fn kind(&self) -> TermKind { - use GeneralizedTerm::*; + use GeneralizedTerm::{BlankNode, Literal, NamedNode, Triple, Variable}; match self.0 { NamedNode(_) => TermKind::Iri, BlankNode(_) => TermKind::BlankNode, @@ -526,7 +526,7 @@ mod test { #[test] fn blank_node() { - assert_consistent_term_impl(&Trusted(BlankNode { id: "foo" })) + assert_consistent_term_impl(&Trusted(BlankNode { id: "foo" })); } #[test] @@ -595,7 +595,7 @@ mod test { #[test] fn graph_name_blank_node() { let t: GraphName = BlankNode { id: "foo" }.into(); - assert_consistent_term_impl(&Trusted(t)) + assert_consistent_term_impl(&Trusted(t)); } #[test] @@ -607,7 +607,7 @@ mod test { #[test] fn term_blank_node() { let t: RioTerm = BlankNode { id: "foo" }.into(); - assert_consistent_term_impl(&Trusted(t)) + assert_consistent_term_impl(&Trusted(t)); } #[test] @@ -661,7 +661,7 @@ mod test { #[test] fn gterm_blank_node() { let t: RioTerm = BlankNode { id: "foo" }.into(); - assert_consistent_term_impl(&Trusted(t)) + assert_consistent_term_impl(&Trusted(t)); } #[test] @@ -715,6 +715,6 @@ mod test { #[test] fn gterm_variable() { let t: GeneralizedTerm = Variable { name: "foo" }.into(); - assert_consistent_term_impl(&Trusted(t)) + assert_consistent_term_impl(&Trusted(t)); } } diff --git a/rio/src/parser.rs b/rio/src/parser.rs index 7c23ccf4..2aaa3a74 100644 --- a/rio/src/parser.rs +++ b/rio/src/parser.rs @@ -9,7 +9,7 @@ use std::error::Error; use crate::model::Trusted; -use sophia_api::source::{StreamError, StreamError::*, StreamResult}; +use sophia_api::source::{StreamError, StreamError::{SinkError, SourceError}, StreamResult}; /// Wrap a Rio [`TriplesParser`](rio_api::parser::TriplesParser) /// into a Sophia [`TripleSource`](sophia_api::source::TripleSource). @@ -111,12 +111,12 @@ where } /// This intermediate type is required, -/// because Rio requires that the error type of triple_handler/quad_handler +/// because Rio requires that the error type of `triple_handler/quad_handler` /// implement From (or whatever Rio-specific error returned by the parser). /// /// This is costless, though, -/// because RioStreamError's internal representation is identical to StreamError, -/// so the final type conversion performed by into_stream_error is actually +/// because `RioStreamError`'s internal representation is identical to `StreamError`, +/// so the final type conversion performed by `into_stream_error` is actually /// just for pleasing the compiler. enum RioStreamError { /// Equivalent to [`StreamError::SourceError`] @@ -130,7 +130,7 @@ where E2: Error + Send + Sync + 'static, { fn from(other: E1) -> Self { - RioStreamError::Source(other) + Self::Source(other) } } impl From> for StreamError diff --git a/rio/src/serializer.rs b/rio/src/serializer.rs index e4b41f1b..b1403969 100644 --- a/rio/src/serializer.rs +++ b/rio/src/serializer.rs @@ -76,7 +76,7 @@ where }) } -/// Convert this triple of SimpleTerms to a RioTriple if possible +/// Convert this triple of `SimpleTerms` to a `RioTriple` if possible /// (i.e. if it is a strict RDF triple) fn convert_triple<'a>( t: &'a [SimpleTerm<'a>; 3], @@ -147,10 +147,10 @@ enum Stack { Empty, Node(Box<(T, Stack)>), } -use Stack::*; +use Stack::{Empty, Node}; impl Stack { /// Get the triple at the head of the stack. - fn head(&self) -> Option<&T> { + const fn head(&self) -> Option<&T> { match self { Empty => None, Node(b) => Some(&b.0), diff --git a/sophia/examples/canonicalize.rs b/sophia/examples/canonicalize.rs index 5885d1dc..beee5f48 100644 --- a/sophia/examples/canonicalize.rs +++ b/sophia/examples/canonicalize.rs @@ -3,13 +3,13 @@ //! using the [RDFC-1.0] canonicalization algorithm. //! //! Parameters of the RDFC-1.0 can be provided via the following environment variables: -//! * SOPHIA_RDFC10_DEPTH_FACTOR -//! * SOPHIA_RDFC10_PERMUTATION_LIMIT +//! * `SOPHIA_RDFC10_DEPTH_FACTOR` +//! * `SOPHIA_RDFC10_PERMUTATION_LIMIT` //! //! [N-Quads]: https://www.w3.org/TR/n-quads/ //! [RDFC-1.0]: https://www.w3.org/TR/rdf-canon/ -use std::env::{var, VarError::*}; +use std::env::{var, VarError::NotPresent}; use std::io::{stdin, stdout, BufReader, BufWriter}; use sophia::api::prelude::*; diff --git a/sophia/examples/jsonld-context.rs b/sophia/examples/jsonld-context.rs index a01f5aa8..6187012f 100644 --- a/sophia/examples/jsonld-context.rs +++ b/sophia/examples/jsonld-context.rs @@ -1,4 +1,4 @@ -//! Convert a JSON-LD file to TriG, using an optional external context. +//! Convert a JSON-LD file to `TriG`, using an optional external context. //! //! usage: cargo run --example jsonld-context [context file] //! @@ -20,14 +20,13 @@ fn main() -> Result<(), Box> { let context_path = args.next(); if let Some(context_path) = &context_path { eprintln!( - "Loading {} with @context from {}", - json_ld_path, context_path + "Loading {json_ld_path} with @context from {context_path}" ); } else { - eprintln!("Loading {}", json_ld_path); + eprintln!("Loading {json_ld_path}"); } let json_str = std::fs::read_to_string(&json_ld_path) - .unwrap_or_else(|e| panic!("Could not read file {}: {}", json_ld_path, e)); + .unwrap_or_else(|e| panic!("Could not read file {json_ld_path}: {e}")); let mut options = JsonLdOptions::new().with_expansion_policy(Policy::Standard); if let Some(context_path) = context_path { diff --git a/sophia/examples/parse.rs b/sophia/examples/parse.rs index d3b98985..7a7a4dc9 100644 --- a/sophia/examples/parse.rs +++ b/sophia/examples/parse.rs @@ -5,7 +5,7 @@ //! Alternatively, the input file name can be provided as a second argument, //! which will also set the base IRI to the corresponding file: URL. //! -//! The base IRI can be overridden via the environment variable SOPHIA_BASE. +//! The base IRI can be overridden via the environment variable `SOPHIA_BASE`. //! //! Recognized formats are: //! - [`ntriples`](https://www.w3.org/TR/n-triples/) (alias `nt`) @@ -46,13 +46,13 @@ fn main() { eprintln!("Cannot guess format of stdin"); std::process::exit(-2); }; - format = match filename.rsplit(".").next() { + format = match filename.rsplit('.').next() { Some("nt") => "ntriples", Some("nq") => "nquads", Some("ttl") => "turtle", Some("trig") => "trig", - Some("jsonld") | Some("json") => "jsonld", - Some("rdf") | Some("xml") => "rdfxml", + Some("jsonld" | "json") => "jsonld", + Some("rdf" | "xml") => "rdfxml", _ => { eprintln!("Cannot guess format of {filename}"); std::process::exit(-3); @@ -84,7 +84,7 @@ fn main() { #[cfg(feature = "jsonld")] "json-ld" | "jsonld" => { let options = JsonLdOptions::new() - .with_base(base.clone().unwrap().map_unchecked(std::sync::Arc::from)); + .with_base(base.unwrap().map_unchecked(std::sync::Arc::from)); let loader_factory = sophia::jsonld::loader::FileUrlLoader::default; #[cfg(feature = "http_client")] let loader_factory = || { @@ -99,12 +99,12 @@ fn main() { #[cfg(feature = "xml")] "rdfxml" | "rdf" => dump_triples(input, RdfXmlParser { base }), _ => { - eprintln!("Unrecognized format: {}", format); + eprintln!("Unrecognized format: {format}"); std::process::exit(-1); } }; if let Err(msg) = res { - eprintln!("{}", msg); + eprintln!("{msg}"); std::process::exit(1); } } @@ -116,8 +116,8 @@ fn dump_triples>(input: Input, p: P) -> Result<(), String let mut ser = NtSerializer::new(output); match ser.serialize_triples(triple_source) { Ok(_) => Ok(()), - Err(SourceError(e)) => Err(format!("Error while parsing input: {}", e)), - Err(SinkError(e)) => Err(format!("Error while writing quads: {}", e)), + Err(SourceError(e)) => Err(format!("Error while parsing input: {e}")), + Err(SinkError(e)) => Err(format!("Error while writing quads: {e}")), } } @@ -128,8 +128,8 @@ fn dump_quads>(input: Input, p: P) -> Result<(), String> { let mut ser = NqSerializer::new(output); match ser.serialize_quads(quad_source) { Ok(_) => Ok(()), - Err(SourceError(e)) => Err(format!("Error while parsing input: {}", e)), - Err(SinkError(e)) => Err(format!("Error while writing quads: {}", e)), + Err(SourceError(e)) => Err(format!("Error while parsing input: {e}")), + Err(SinkError(e)) => Err(format!("Error while writing quads: {e}")), } } @@ -150,8 +150,8 @@ impl Input { impl Read for Input { fn read(&mut self, buf: &mut [u8]) -> std::io::Result { match self { - Input::Stdin(b) => b.read(buf), - Input::File(b) => b.read(buf), + Self::Stdin(b) => b.read(buf), + Self::File(b) => b.read(buf), } } } @@ -159,15 +159,15 @@ impl Read for Input { impl BufRead for Input { fn fill_buf(&mut self) -> std::io::Result<&[u8]> { match self { - Input::Stdin(b) => b.fill_buf(), - Input::File(b) => b.fill_buf(), + Self::Stdin(b) => b.fill_buf(), + Self::File(b) => b.fill_buf(), } } fn consume(&mut self, amt: usize) { match self { - Input::Stdin(b) => b.consume(amt), - Input::File(b) => b.consume(amt), + Self::Stdin(b) => b.consume(amt), + Self::File(b) => b.consume(amt), } } } diff --git a/sophia/examples/serialize.rs b/sophia/examples/serialize.rs index 67dbd8fe..254c7b03 100644 --- a/sophia/examples/serialize.rs +++ b/sophia/examples/serialize.rs @@ -70,12 +70,12 @@ fn main() { serialize_triples(quad_source, ser) } _ => { - eprintln!("Unrecognized format: {}", format); + eprintln!("Unrecognized format: {format}"); std::process::exit(-1); } }; if let Err(msg) = res { - eprintln!("{}", msg); + eprintln!("{msg}"); std::process::exit(1); } } @@ -87,8 +87,8 @@ fn serialize_triples( let triple_source = quad_source.filter_quads(|q| q.g().is_none()).to_triples(); match ser.serialize_triples(triple_source) { Ok(_) => Ok(()), - Err(SourceError(e)) => Err(format!("Error while parsing input: {}", e)), - Err(SinkError(e)) => Err(format!("Error while serializing triples: {}", e)), + Err(SourceError(e)) => Err(format!("Error while parsing input: {e}")), + Err(SinkError(e)) => Err(format!("Error while serializing triples: {e}")), } } @@ -98,7 +98,7 @@ fn serialize_quads( ) -> Result<(), String> { match ser.serialize_quads(quad_source) { Ok(_) => Ok(()), - Err(SourceError(e)) => Err(format!("Error while parsing input: {}", e)), - Err(SinkError(e)) => Err(format!("Error while serializing quads: {}", e)), + Err(SourceError(e)) => Err(format!("Error while parsing input: {e}")), + Err(SinkError(e)) => Err(format!("Error while serializing quads: {e}")), } } diff --git a/term/src/_generic.rs b/term/src/_generic.rs index b400b0ce..aea46d98 100644 --- a/term/src/_generic.rs +++ b/term/src/_generic.rs @@ -24,8 +24,8 @@ impl> GenericLiteral { /// The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form) of this literal pub fn get_lexical_form(&self) -> &str { match self { - GenericLiteral::Typed(lex, ..) => lex, - GenericLiteral::LanguageString(lex, ..) => lex, + Self::Typed(lex, ..) => lex, + Self::LanguageString(lex, ..) => lex, } .borrow() } @@ -33,16 +33,16 @@ impl> GenericLiteral { /// The [datatype](https://www.w3.org/TR/rdf11-concepts/#dfn-datatype-iri) of this literal pub fn get_datatype(&self) -> IriRef<&str> { match self { - GenericLiteral::Typed(_, dt) => dt.as_ref(), - GenericLiteral::LanguageString(..) => RDF_LANG_STRING.as_ref(), + Self::Typed(_, dt) => dt.as_ref(), + Self::LanguageString(..) => RDF_LANG_STRING.as_ref(), } } /// The [language tag](https://www.w3.org/TR/rdf11-concepts/#dfn-language-tag) of this literal, if any pub fn get_language_tag(&self) -> Option> { match self { - GenericLiteral::Typed(..) => None, - GenericLiteral::LanguageString(_, tag) => Some(tag.as_ref()), + Self::Typed(..) => None, + Self::LanguageString(_, tag) => Some(tag.as_ref()), } } } @@ -81,14 +81,14 @@ impl + for<'x> From<&'x str>> TryFromTerm for GenericLiteral { let lex = unsafe { term.lexical_form().unwrap_unchecked() }; let lex = T::from(&lex); if let Some(tag) = term.language_tag() { - Ok(GenericLiteral::LanguageString( + Ok(Self::LanguageString( lex, tag.map_unchecked(|txt| T::from(&txt)), )) } else { // the following is safe because we checked term.kind() let dt = unsafe { term.datatype().unwrap_unchecked() }; - Ok(GenericLiteral::Typed( + Ok(Self::Typed( lex, dt.map_unchecked(|txt| T::from(&txt)), )) @@ -109,7 +109,7 @@ impl + Debug> Eq for GenericLiteral {} impl + Debug> std::hash::Hash for GenericLiteral { fn hash(&self, state: &mut H) { - Term::hash(self, state) + Term::hash(self, state); } } @@ -170,6 +170,6 @@ mod test { #[test] fn generic_literal_from_iri_errs() { - assert!(GenericLiteral::::try_from_term(rdf::type_).is_err()) + assert!(GenericLiteral::::try_from_term(rdf::type_).is_err()); } } diff --git a/term/src/_macro.rs b/term/src/_macro.rs index 4c54b471..b9d4ce81 100644 --- a/term/src/_macro.rs +++ b/term/src/_macro.rs @@ -330,12 +330,12 @@ macro_rules! gen_stash { impl $type_name { /// Create a new empty stash - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Default::default() } /// Retrieve a value from the stash, if present - pub fn get(&self, probe: &str) -> Option<&W> { + #[must_use] pub fn get(&self, probe: &str) -> Option<&W> { self.store.get(probe) } @@ -352,12 +352,12 @@ macro_rules! gen_stash { } /// How many values are stored in this stash - pub fn len(&self) -> usize { + #[must_use] pub fn len(&self) -> usize { self.store.len() } /// Is this stash empty? - pub fn is_empty(&self) -> bool { + #[must_use] pub fn is_empty(&self) -> bool { self.store.is_empty() } } diff --git a/turtle/src/parser/gnq.rs b/turtle/src/parser/gnq.rs index 442d2f77..8e9b2912 100644 --- a/turtle/src/parser/gnq.rs +++ b/turtle/src/parser/gnq.rs @@ -3,7 +3,7 @@ //! [N-Quads]: https://www.w3.org/TR/n-quads/ use rio_turtle::GeneralizedNQuadsParser as RioGNQParser; use sophia_api::parser::QuadParser; -use sophia_rio::parser::*; +use sophia_rio::parser::GeneralizedRioSource; use std::io::BufRead; /// N-Quads parser based on RIO. diff --git a/turtle/src/parser/gtrig.rs b/turtle/src/parser/gtrig.rs index 5fce276c..9f0be935 100644 --- a/turtle/src/parser/gtrig.rs +++ b/turtle/src/parser/gtrig.rs @@ -1,12 +1,12 @@ -//! Adapter for the Generalized TriG parser from [RIO](https://github.com/Tpt/rio/blob/master/turtle/src/gtrig.rs) +//! Adapter for the Generalized `TriG` parser from [RIO](https://github.com/Tpt/rio/blob/master/turtle/src/gtrig.rs) use rio_turtle::GTriGParser as RioGTriGParser; use sophia_api::parser::QuadParser; use sophia_iri::Iri; -use sophia_rio::parser::*; +use sophia_rio::parser::GeneralizedRioSource; use std::io::BufRead; -/// TriG parser based on RIO. +/// `TriG` parser based on RIO. #[derive(Clone, Debug, Default)] pub struct GTriGParser { /// The base IRI used by this parser to resolve relative IRI-references. @@ -47,7 +47,7 @@ mod test { #[test] fn test_simple_gtrig_string() -> std::result::Result<(), Box> { - let gtrig = r#" + let gtrig = r" @prefix : . <#me> :knows _:alice {| @@ -56,7 +56,7 @@ mod test { { _:alice a :Person ; :name ?name. } - "#; + "; let mut d = MyDataset::new(); let p = GTriGParser { diff --git a/turtle/src/parser/nq.rs b/turtle/src/parser/nq.rs index adac0bf3..0b0bef43 100644 --- a/turtle/src/parser/nq.rs +++ b/turtle/src/parser/nq.rs @@ -3,7 +3,7 @@ //! [N-Quads]: https://www.w3.org/TR/n-quads/ use rio_turtle::NQuadsParser as RioNQParser; use sophia_api::parser::QuadParser; -use sophia_rio::parser::*; +use sophia_rio::parser::StrictRioQuadSource; use std::io::BufRead; /// N-Quads parser based on RIO. diff --git a/turtle/src/parser/nt.rs b/turtle/src/parser/nt.rs index 825f26af..ec3b1756 100644 --- a/turtle/src/parser/nt.rs +++ b/turtle/src/parser/nt.rs @@ -3,7 +3,7 @@ //! [N-Triples]: https://www.w3.org/TR/n-triples/ use rio_turtle::NTriplesParser as RioNTParser; use sophia_api::parser::TripleParser; -use sophia_rio::parser::*; +use sophia_rio::parser::StrictRioTripleSource; use std::io::BufRead; /// N-Triples parser based on RIO. diff --git a/turtle/src/parser/trig.rs b/turtle/src/parser/trig.rs index 6ef9ace5..40d9f14e 100644 --- a/turtle/src/parser/trig.rs +++ b/turtle/src/parser/trig.rs @@ -1,12 +1,12 @@ -//! Adapter for the TriG parser from [RIO](https://github.com/Tpt/rio/blob/master/turtle/src/turtle.rs) +//! Adapter for the `TriG` parser from [RIO](https://github.com/Tpt/rio/blob/master/turtle/src/turtle.rs) use rio_turtle::TriGParser as RioTriGParser; use sophia_api::parser::QuadParser; use sophia_iri::Iri; -use sophia_rio::parser::*; +use sophia_rio::parser::StrictRioQuadSource; use std::io::BufRead; -/// TriG parser based on RIO. +/// `TriG` parser based on RIO. #[derive(Clone, Debug, Default)] pub struct TriGParser { /// The base IRI used by this parser to resolve relative IRI-references. diff --git a/turtle/src/parser/turtle.rs b/turtle/src/parser/turtle.rs index a5531be2..1c1c8f80 100644 --- a/turtle/src/parser/turtle.rs +++ b/turtle/src/parser/turtle.rs @@ -2,7 +2,7 @@ use rio_turtle::TurtleParser as RioTurtleParser; use sophia_api::parser::TripleParser; use sophia_iri::Iri; -use sophia_rio::parser::*; +use sophia_rio::parser::StrictRioTripleSource; use std::io::BufRead; /// Turtle parser based on RIO. diff --git a/turtle/src/serializer/_pretty.rs b/turtle/src/serializer/_pretty.rs index 62c34570..79e5ed98 100644 --- a/turtle/src/serializer/_pretty.rs +++ b/turtle/src/serializer/_pretty.rs @@ -1,9 +1,9 @@ -//! Utility code for pretty-printing Turtle and TriG. +//! Utility code for pretty-printing Turtle and `TriG`. //! //! Possible improvements: -//! 1. PrettifiableDataset should encapsulate some of the "indexes" built by Prettifier -//! (labelled, subject_types, named_graphs) -//! and build directly in CollectibleDataset::from_quad_source(). +//! 1. `PrettifiableDataset` should encapsulate some of the "indexes" built by Prettifier +//! (labelled, `subject_types`, `named_graphs`) +//! and build directly in `CollectibleDataset::from_quad_source()`. //! //! 2. Instead of writing directly to the output, //! generate a hierarchical structure, @@ -29,10 +29,10 @@ use std::ops::Range; pub type PrettifiableDataset<'a> = BTreeSet>>; -/// Serialize `dataset` in pretty TriG on `write`, using the given `config`. +/// Serialize `dataset` in pretty `TriG` on `write`, using the given `config`. /// /// NB: if dataset only contains a default graph, -/// the resulting TriG will be valid Turtle. +/// the resulting `TriG` will be valid Turtle. pub fn prettify( dataset: PrettifiableDataset<'_>, mut write: W, @@ -51,7 +51,7 @@ where Ok(()) } -/// write the prefix declarations of the given prefix_map, using SPARQL style. +/// write the prefix declarations of the given `prefix_map`, using SPARQL style. fn write_prefixes(mut write: W, prefix_map: &P) -> io::Result<()> where W: io::Write, @@ -271,7 +271,7 @@ impl<'a, W: Write> Prettifier<'a, W> { } fn write_term(&mut self, term: &'a SimpleTerm<'a>) -> io::Result<()> { - use TermKind::*; + use TermKind::{BlankNode, Iri, Literal, Triple, Variable}; match term.kind() { Iri => self.write_iri(&term.iri().unwrap()), BlankNode => self.write_bnode(term), @@ -414,7 +414,7 @@ impl<'a, W: Write> Prettifier<'a, W> { /// blank nodes MUST be labelled (as opposed to described with square brackets) if /// - they are used in several named graphs, or /// - they are used several times as object, or -/// - they are used as predicate or graph_name, or +/// - they are used as predicate or `graph_name`, or /// - they are used in a quoted triple, or /// - they are involved in a blank node cycle. /// @@ -474,7 +474,7 @@ fn build_labelled<'a>(d: &'a PrettifiableDataset) -> BTreeSet<&'a SimpleTerm<'a> } } // detect blank node cycles - let keys: Vec<_> = profiles.keys().cloned().collect(); + let keys: Vec<_> = profiles.keys().copied().collect(); for key in keys { let profile = profiles.get_mut(&key).unwrap(); if profile.bad || profile.visited { @@ -544,7 +544,7 @@ fn build_subject_types<'a>( .map(|q| (q.g(), q.s())) .dedup() .map(|(g, s)| { - use TermKind::*; + use TermKind::{BlankNode, Triple}; let st = match s.kind() { BlankNode => { if !labelled.contains(&s) @@ -746,7 +746,7 @@ where // --------------------------------------------------------------------------------- #[cfg(test)] -pub(crate) mod test { +pub mod test { use super::*; #[test] diff --git a/turtle/src/serializer/nq.rs b/turtle/src/serializer/nq.rs index 63c3eccb..395f9a02 100644 --- a/turtle/src/serializer/nq.rs +++ b/turtle/src/serializer/nq.rs @@ -11,7 +11,7 @@ use super::nt::{write_term, write_triple}; use sophia_api::quad::Quad; -use sophia_api::serializer::*; +use sophia_api::serializer::{QuadSerializer, Stringifier}; use sophia_api::source::{QuadSource, StreamResult}; use std::io; @@ -30,17 +30,17 @@ where { /// Build a new N-Quads serializer writing to `write`, with the default config. #[inline] - pub fn new(write: W) -> NqSerializer { + pub fn new(write: W) -> Self { Self::new_with_config(write, NqConfig::default()) } /// Build a new N-Quads serializer writing to `write`, with the given config. - pub fn new_with_config(write: W, config: NqConfig) -> NqSerializer { - NqSerializer { config, write } + pub const fn new_with_config(write: W, config: NqConfig) -> Self { + Self { config, write } } /// Borrow this serializer's configuration. - pub fn config(&self) -> &NqConfig { + pub const fn config(&self) -> &NqConfig { &self.config } } @@ -78,20 +78,20 @@ where } .map_err(|e| io::Error::new(io::ErrorKind::Other, e)) }) - .map(|_| self) + .map(|()| self) } } impl NqSerializer> { /// Create a new serializer which targets a `String`. #[inline] - pub fn new_stringifier() -> Self { - NqSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - pub fn new_stringifier_with_config(config: NqConfig) -> Self { - NqSerializer::new_with_config(Vec::new(), config) + #[must_use] pub const fn new_stringifier_with_config(config: NqConfig) -> Self { + Self::new_with_config(Vec::new(), config) } } diff --git a/turtle/src/serializer/nt.rs b/turtle/src/serializer/nt.rs index 763b6b14..d9269721 100644 --- a/turtle/src/serializer/nt.rs +++ b/turtle/src/serializer/nt.rs @@ -10,7 +10,7 @@ //! [`BufWriter`]: https://doc.rust-lang.org/std/io/struct.BufWriter.html use sophia_api::ns::xsd; -use sophia_api::serializer::*; +use sophia_api::serializer::{Stringifier, TripleSerializer}; use sophia_api::source::{StreamResult, TripleSource}; use sophia_api::term::{Term, TermKind}; use sophia_api::triple::Triple; @@ -42,17 +42,17 @@ where { /// Build a new N-Triples serializer writing to `write`, with the default config. #[inline] - pub fn new(write: W) -> NtSerializer { + pub fn new(write: W) -> Self { Self::new_with_config(write, NtConfig::default()) } /// Build a new N-Triples serializer writing to `write`, with the given config. - pub fn new_with_config(write: W, config: NtConfig) -> NtSerializer { - NtSerializer { config, write } + pub const fn new_with_config(write: W, config: NtConfig) -> Self { + Self { config, write } } /// Borrow this serializer's configuration. - pub fn config(&self) -> &NtConfig { + pub const fn config(&self) -> &NtConfig { &self.config } } @@ -82,20 +82,20 @@ where } .map_err(|e| io::Error::new(io::ErrorKind::Other, e)) }) - .map(|_| self) + .map(|()| self) } } impl NtSerializer> { /// Create a new serializer which targets a `String`. #[inline] - pub fn new_stringifier() -> Self { - NtSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - pub fn new_stringifier_with_config(config: NtConfig) -> Self { - NtSerializer::new_with_config(Vec::new(), config) + #[must_use] pub const fn new_stringifier_with_config(config: NtConfig) -> Self { + Self::new_with_config(Vec::new(), config) } } @@ -125,7 +125,7 @@ where W: io::Write, T: Term, { - use TermKind::*; + use TermKind::{BlankNode, Iri, Literal, Triple, Variable}; match t.kind() { Iri => { w.write_all(b"<")?; @@ -139,20 +139,17 @@ where Literal => { w.write_all(b"\"")?; quoted_string(w, t.lexical_form().unwrap().as_bytes())?; - match t.language_tag() { - Some(tag) => { - w.write_all(b"\"@")?; - w.write_all(tag.as_bytes())?; - } - None => { - let dt = t.datatype().unwrap(); - if xsd::string != dt { - w.write_all(b"\"^^<")?; - w.write_all(dt.as_bytes())?; - w.write_all(b">")?; - } else { - w.write_all(b"\"")?; - } + if let Some(tag) = t.language_tag() { + w.write_all(b"\"@")?; + w.write_all(tag.as_bytes())?; + } else { + let dt = t.datatype().unwrap(); + if xsd::string != dt { + w.write_all(b"\"^^<")?; + w.write_all(dt.as_bytes())?; + w.write_all(b">")?; + } else { + w.write_all(b"\"")?; } } } diff --git a/turtle/src/serializer/trig.rs b/turtle/src/serializer/trig.rs index 2219f545..822cec48 100644 --- a/turtle/src/serializer/trig.rs +++ b/turtle/src/serializer/trig.rs @@ -13,7 +13,6 @@ use rio_turtle::TriGFormatter; use sophia_api::quad::Quad; use sophia_api::serializer::{QuadSerializer, Stringifier}; use sophia_api::source::{QuadSource, SinkError, SourceError, StreamResult}; -use sophia_api::term::Term; use sophia_rio::serializer::rio_format_quads; use std::io; @@ -34,17 +33,17 @@ where { /// Build a new Trig serializer writing to `write`, with the default config. #[inline] - pub fn new(write: W) -> TrigSerializer { + pub fn new(write: W) -> Self { Self::new_with_config(write, TrigConfig::default()) } /// Build a new Trig serializer writing to `write`, with the given config. - pub fn new_with_config(write: W, config: TrigConfig) -> TrigSerializer { - TrigSerializer { config, write } + pub const fn new_with_config(write: W, config: TrigConfig) -> Self { + Self { config, write } } /// Borrow this serializer's configuration. - pub fn config(&self) -> &TrigConfig { + pub const fn config(&self) -> &TrigConfig { &self.config } } @@ -67,8 +66,8 @@ where source .for_each_quad(|t| { let (spo, g) = t.spog(); - let spo = spo.map(|t| t.into_term()); - let g = g.map(|t| t.into_term()); + let spo = spo.map(sophia_api::prelude::Term::into_term); + let g = g.map(sophia_api::prelude::Term::into_term); dataset.insert((g, spo)); }) .map_err(SourceError)?; @@ -85,13 +84,13 @@ where impl TrigSerializer> { /// Create a new serializer which targets a `String`. #[inline] - pub fn new_stringifier() -> Self { - TrigSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - pub fn new_stringifier_with_config(config: TrigConfig) -> Self { - TrigSerializer::new_with_config(Vec::new(), config) + #[must_use] pub const fn new_stringifier_with_config(config: TrigConfig) -> Self { + Self::new_with_config(Vec::new(), config) } } @@ -128,16 +127,16 @@ pub(crate) mod test { r#"# lists GRAPH { ( 1 2 ( 3 4 ) 5 6 ), ("a" "b"). } "#, - r#"# subject lists + r"# subject lists GRAPH { (1 2 3) a . } - "#, - r#"# malformed list + ", + r"# malformed list PREFIX rdf: GRAPH { _:a rdf:first 42, 43; rdf:rest (44 45). _:b rdf:first 42; rdf:rest (43), (44). } - "#, + ", r#"# bnode cycles PREFIX : GRAPH { @@ -146,42 +145,42 @@ pub(crate) mod test { _:c :b "c"; :t _:c. } "#, - r#"# quoted triples + r"# quoted triples PREFIX : GRAPH { << :s :p :o1 >> :a :b. :s :p :o2 {| :c :d |}. } - "#, - r#"# blank node graph name + ", + r"# blank node graph name PREFIX : :lois :believes _:b. GRAPH _:b1 { :clark a :Human } - "#, + ", r#"# blank node sharred across graphs PREFIX : _:a :name "alice". GRAPH { _:a a :Person } "#, - r#"# list split over different graphs + r"# list split over different graphs PREFIX rdf: _:a rdf:first 42; rdf:rest _:b. GRAPH [] { _:b rdf:first 43; rdf:rest (). } - "#, - r#"# issue 149 + ", + r"# issue 149 PREFIX : :s :p :o . GRAPH :g { _:b :p2 :o2 } - "#, + ", ]; #[test] fn roundtrip_not_pretty() -> Result<(), Box> { for ttl in TESTS { - println!("==========\n{}\n----------", ttl); + println!("==========\n{ttl}\n----------"); let g1: Vec> = crate::parser::trig::parse_str(ttl).collect_quads()?; let out = TrigSerializer::new_stringifier() @@ -199,7 +198,7 @@ pub(crate) mod test { #[test] fn roundtrip_pretty() -> Result<(), Box> { for ttl in TESTS { - println!("==========\n{}\n----------", ttl); + println!("==========\n{ttl}\n----------"); let g1: Vec> = crate::parser::trig::parse_str(ttl).collect_quads()?; let config = TrigConfig::new().with_pretty(true); diff --git a/turtle/src/serializer/turtle.rs b/turtle/src/serializer/turtle.rs index 514aa58b..150986b7 100644 --- a/turtle/src/serializer/turtle.rs +++ b/turtle/src/serializer/turtle.rs @@ -39,7 +39,7 @@ impl TurtleConfig { /// If true, extra effort will be made to group related triples together, /// and to use the collection syntax whenever possible. /// This requires storing the whole graph in memory. - pub fn pretty(&self) -> bool { + #[must_use] pub const fn pretty(&self) -> bool { self.pretty } @@ -47,7 +47,7 @@ impl TurtleConfig { /// (defaults to a map containing rdf:, rdfs: and xsd:) /// /// NB: currently, only used if [`pretty`][`TurtleConfig::pretty`] is `true`. - pub fn prefix_map(&self) -> &[PrefixMapPair] { + #[must_use] pub fn prefix_map(&self) -> &[PrefixMapPair] { &self.prefix_map } @@ -55,16 +55,16 @@ impl TurtleConfig { /// (defaults to `" "`, can only contain ASCII whitespaces) /// /// NB: currently, only used if [`pretty`][`TurtleConfig::pretty`] is `true`. - pub fn indentation(&self) -> &str { + #[must_use] pub fn indentation(&self) -> &str { &self.indentation } /// Build a new default [`TurtleConfig`]. - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { let pretty = false; let prefix_map = Self::default_prefix_map(); let indentation = " ".to_string(); - TurtleConfig { + Self { pretty, prefix_map, indentation, @@ -72,7 +72,7 @@ impl TurtleConfig { } /// Transform a [`TurtleConfig`] by setting the [`pretty`][`TurtleConfig::pretty`] flag. - pub fn with_pretty(mut self, b: bool) -> Self { + #[must_use] pub const fn with_pretty(mut self, b: bool) -> Self { self.pretty = b; self } @@ -84,7 +84,7 @@ impl TurtleConfig { } /// Transform a [`TurtleConfig`] by setting the [`prefix_map`][`TurtleConfig::prefix_map`] flag. - pub fn with_own_prefix_map(mut self, pm: Vec) -> Self { + #[must_use] pub fn with_own_prefix_map(mut self, pm: Vec) -> Self { self.prefix_map = pm; self } @@ -101,7 +101,7 @@ impl TurtleConfig { } /// Return the prefix map that is used when none is provided - pub fn default_prefix_map() -> Vec { + #[must_use] pub fn default_prefix_map() -> Vec { vec![ ( Prefix::new_unchecked("rdf".into()), @@ -121,7 +121,7 @@ impl TurtleConfig { impl Default for TurtleConfig { fn default() -> Self { - TurtleConfig::new() + Self::new() } } @@ -137,17 +137,17 @@ where { /// Build a new Turtle serializer writing to `write`, with the default config. #[inline] - pub fn new(write: W) -> TurtleSerializer { + pub fn new(write: W) -> Self { Self::new_with_config(write, TurtleConfig::default()) } /// Build a new Turtle serializer writing to `write`, with the given config. - pub fn new_with_config(write: W, config: TurtleConfig) -> TurtleSerializer { - TurtleSerializer { config, write } + pub const fn new_with_config(write: W, config: TurtleConfig) -> Self { + Self { config, write } } /// Borrow this serializer's configuration. - pub fn config(&self) -> &TurtleConfig { + pub const fn config(&self) -> &TurtleConfig { &self.config } } @@ -187,13 +187,13 @@ where impl TurtleSerializer> { /// Create a new serializer which targets a `String`. #[inline] - pub fn new_stringifier() -> Self { - TurtleSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - pub fn new_stringifier_with_config(config: TurtleConfig) -> Self { - TurtleSerializer::new_with_config(Vec::new(), config) + #[must_use] pub const fn new_stringifier_with_config(config: TurtleConfig) -> Self { + Self::new_with_config(Vec::new(), config) } } @@ -225,31 +225,31 @@ pub(crate) mod test { r#"# lists ( 1 2 ( 3 4 ) 5 6 ), ("a" "b"). "#, - r#"# subject lists + r"# subject lists (1 2 3) a . - "#, - r#"# malformed list + ", + r"# malformed list PREFIX rdf: _:a rdf:first 42, 43; rdf:rest (44 45). _:b rdf:first 42; rdf:rest (43), (44). - "#, + ", r#"# bnode cycles PREFIX : _:a :n "a"; :p [ :q [ :r _:a ]]. _:b :n "b"; :s [ :s _:b ]. _:c :b "c"; :t _:c. "#, - r#"# quoted triples + r"# quoted triples PREFIX : << :s :p :o1 >> :a :b. :s :p :o2 {| :c :d |}. - "#, + ", ]; #[test] fn roundtrip_not_pretty() -> Result<(), Box> { for ttl in TESTS { - println!("==========\n{}\n----------", ttl); + println!("==========\n{ttl}\n----------"); let g1: Vec<[SimpleTerm; 3]> = crate::parser::turtle::parse_str(ttl).collect_triples()?; @@ -269,7 +269,7 @@ pub(crate) mod test { #[test] fn roundtrip_pretty() -> Result<(), Box> { for ttl in TESTS { - println!("==========\n{}\n----------", ttl); + println!("==========\n{ttl}\n----------"); let g1: Vec<[SimpleTerm; 3]> = crate::parser::turtle::parse_str(ttl).collect_triples()?; let ugly = TurtleSerializer::new_stringifier() diff --git a/xml/src/parser.rs b/xml/src/parser.rs index e550bd1e..7902addf 100644 --- a/xml/src/parser.rs +++ b/xml/src/parser.rs @@ -6,7 +6,7 @@ use rio_xml::RdfXmlParser as RioRdfXmlParser; use sophia_api::parser::TripleParser; use sophia_iri::Iri; -use sophia_rio::parser::*; +use sophia_rio::parser::StrictRioTripleSource; use std::io::BufRead; /// N-Triples parser based on RIO. diff --git a/xml/src/serializer.rs b/xml/src/serializer.rs index ec4f42f1..c08df63f 100644 --- a/xml/src/serializer.rs +++ b/xml/src/serializer.rs @@ -25,17 +25,17 @@ pub struct RdfXmlConfig { impl RdfXmlConfig { /// Size of the indentation to use in the serialization. /// (defaults to 0, meaning no indentation nor linebreaks) - pub fn indentation(&self) -> usize { + #[must_use] pub const fn indentation(&self) -> usize { self.indentation } /// Build a new default [`RdfXmlConfig`] - pub fn new() -> Self { + #[must_use] pub fn new() -> Self { Default::default() } /// Transform an [`RdfXmlConfig`] by setting the [`indentation`](RdfXmlConfig::indentation). - pub fn with_indentation(mut self, i: usize) -> Self { + #[must_use] pub const fn with_indentation(mut self, i: usize) -> Self { self.indentation = i; self } @@ -53,17 +53,17 @@ where { /// Build a new N-Triples serializer writing to `write`, with the default config. #[inline] - pub fn new(write: W) -> RdfXmlSerializer { + pub fn new(write: W) -> Self { Self::new_with_config(write, RdfXmlConfig::default()) } /// Build a new N-Triples serializer writing to `write`, with the given config. - pub fn new_with_config(write: W, config: RdfXmlConfig) -> RdfXmlSerializer { - RdfXmlSerializer { config, write } + pub const fn new_with_config(write: W, config: RdfXmlConfig) -> Self { + Self { config, write } } /// Borrow this serializer's configuration. - pub fn config(&self) -> &RdfXmlConfig { + pub const fn config(&self) -> &RdfXmlConfig { &self.config } } @@ -97,13 +97,13 @@ where impl RdfXmlSerializer> { /// Create a new serializer which targets a `String`. #[inline] - pub fn new_stringifier() -> Self { - RdfXmlSerializer::new(Vec::new()) + #[must_use] pub fn new_stringifier() -> Self { + Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - pub fn new_stringifier_with_config(config: RdfXmlConfig) -> Self { - RdfXmlSerializer::new_with_config(Vec::new(), config) + #[must_use] pub const fn new_stringifier_with_config(config: RdfXmlConfig) -> Self { + Self::new_with_config(Vec::new(), config) } } @@ -140,7 +140,7 @@ pub(crate) mod test { #[test] fn roundtrip() -> Result<(), Box> { for rdfxml in TESTS { - println!("==========\n{}\n----------", rdfxml); + println!("==========\n{rdfxml}\n----------"); let g1: Vec<[SimpleTerm; 3]> = crate::parser::parse_str(rdfxml).collect_triples()?; let out = RdfXmlSerializer::new_stringifier() @@ -159,7 +159,7 @@ pub(crate) mod test { fn roundtrip_with_ident() -> Result<(), Box> { let config = RdfXmlConfig::new().with_indentation(4); for rdfxml in TESTS { - println!("==========\n{}\n----------", rdfxml); + println!("==========\n{rdfxml}\n----------"); let g1: Vec<[SimpleTerm; 3]> = crate::parser::parse_str(rdfxml).collect_triples()?; let out = RdfXmlSerializer::new_stringifier_with_config(config.clone()) From 848806128611e90992876fa950651c1edc7aca64 Mon Sep 17 00:00:00 2001 From: Mikhail Katychev Date: Tue, 8 Oct 2024 10:29:43 -0500 Subject: [PATCH 2/5] cargo fmt --- iri/src/_regex.rs | 12 ++++++++---- iri/src/_wrapper.rs | 2 +- isomorphism/src/dataset.rs | 2 +- isomorphism/src/test.rs | 12 +++--------- jsonld/src/context.rs | 4 ++-- jsonld/src/loader/file_url_loader.rs | 3 ++- jsonld/src/loader/static_loader.rs | 3 ++- jsonld/src/loader_factory.rs | 3 ++- jsonld/src/options.rs | 3 ++- jsonld/src/parser.rs | 3 ++- jsonld/src/serializer.rs | 6 ++++-- jsonld/src/serializer/engine.rs | 10 ++++++++-- jsonld/src/serializer/rdf_object.rs | 5 +---- jsonld/src/util_traits.rs | 5 ++++- resource/src/loader/_error.rs | 3 ++- resource/src/loader/_local.rs | 8 ++++++-- resource/src/resource/_iter.rs | 2 +- resource/src/resource/_struct.rs | 20 +++++++++++++++----- rio/src/model.rs | 5 ++++- rio/src/parser.rs | 6 +++++- sophia/examples/jsonld-context.rs | 4 +--- sophia/examples/parse.rs | 4 ++-- term/src/_generic.rs | 5 +---- term/src/_macro.rs | 12 ++++++++---- turtle/src/serializer/nq.rs | 6 ++++-- turtle/src/serializer/nt.rs | 6 ++++-- turtle/src/serializer/trig.rs | 6 ++++-- turtle/src/serializer/turtle.rs | 27 ++++++++++++++++++--------- xml/src/serializer.rs | 15 ++++++++++----- 29 files changed, 127 insertions(+), 75 deletions(-) diff --git a/iri/src/_regex.rs b/iri/src/_regex.rs index 09a9db1d..e46cbc68 100644 --- a/iri/src/_regex.rs +++ b/iri/src/_regex.rs @@ -8,7 +8,8 @@ use regex::Regex; /// is not `None`. /// Future implementations may be smarter about this. #[inline] -#[must_use] pub fn is_valid_suffixed_iri_ref(ns: &str, suffix: Option<&str>) -> bool { +#[must_use] +pub fn is_valid_suffixed_iri_ref(ns: &str, suffix: Option<&str>) -> bool { match suffix { None => is_valid_iri_ref(ns), Some(suffix) => { @@ -22,19 +23,22 @@ use regex::Regex; /// Check whether `txt` is a valid (absolute or relative) IRI reference. #[inline] -#[must_use] pub fn is_valid_iri_ref(txt: &str) -> bool { +#[must_use] +pub fn is_valid_iri_ref(txt: &str) -> bool { IRI_REGEX.is_match(txt) || IRELATIVE_REF_REGEX.is_match(txt) } /// Check whether `txt` is an absolute IRI reference. #[inline] -#[must_use] pub fn is_absolute_iri_ref(txt: &str) -> bool { +#[must_use] +pub fn is_absolute_iri_ref(txt: &str) -> bool { IRI_REGEX.is_match(txt) } /// Check whether `txt` is a relative IRI reference. #[inline] -#[must_use] pub fn is_relative_iri_ref(txt: &str) -> bool { +#[must_use] +pub fn is_relative_iri_ref(txt: &str) -> bool { IRELATIVE_REF_REGEX.is_match(txt) } diff --git a/iri/src/_wrapper.rs b/iri/src/_wrapper.rs index 10f6e22f..2c3f7ed6 100644 --- a/iri/src/_wrapper.rs +++ b/iri/src/_wrapper.rs @@ -1,7 +1,7 @@ //! I provide generic wrappers around `Borrow` types, //! guaranteeing that their underlying string is a valid IRI or IRI reference. use super::resolve::{BaseIri, BaseIriRef}; -use super::{InvalidIri, IsIri, IsIriRef, Result, is_absolute_iri_ref, is_valid_iri_ref, wrap}; +use super::{is_absolute_iri_ref, is_valid_iri_ref, wrap, InvalidIri, IsIri, IsIriRef, Result}; use std::borrow::Borrow; use std::fmt::Display; diff --git a/isomorphism/src/dataset.rs b/isomorphism/src/dataset.rs index 2edf9988..7e9260ab 100644 --- a/isomorphism/src/dataset.rs +++ b/isomorphism/src/dataset.rs @@ -1,5 +1,5 @@ use super::hash::hash_quad_with; -use super::iso_term::{IsoTerm, cmp_quads}; +use super::iso_term::{cmp_quads, IsoTerm}; use sophia_api::quad::{iter_spog, Quad}; use sophia_api::{ dataset::{DTerm, Dataset}, diff --git a/isomorphism/src/test.rs b/isomorphism/src/test.rs index 3d6e9493..b7dacbf8 100644 --- a/isomorphism/src/test.rs +++ b/isomorphism/src/test.rs @@ -149,9 +149,7 @@ fn quoted_triple() -> Result<(), Box> { fn make_chain(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); - let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..=i])) - .collect(); + let nodes: Vec<_> = (0..ids.len()).map(|i| MyTerm::Bnode(&ids[i..=i])).collect(); let mut dataset = Vec::with_capacity(ids.len() - 1); for i in 1..nodes.len() { dataset.push([nodes[i - 1], rel, nodes[i], nodes[i - 1]]); @@ -236,9 +234,7 @@ fn cycle_almost_pathological() -> Result<(), Box> { fn make_clique(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); - let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..=i])) - .collect(); + let nodes: Vec<_> = (0..ids.len()).map(|i| MyTerm::Bnode(&ids[i..=i])).collect(); let mut dataset = Vec::with_capacity(ids.len() * ids.len()); for n1 in &nodes { for n2 in &nodes { @@ -264,9 +260,7 @@ fn clique() -> Result<(), Box> { fn make_tree(ids: &'static str) -> Vec<[MyTerm; 4]> { let rel = MyTerm::Iri("tag:rel"); - let nodes: Vec<_> = (0..ids.len()) - .map(|i| MyTerm::Bnode(&ids[i..=i])) - .collect(); + let nodes: Vec<_> = (0..ids.len()).map(|i| MyTerm::Bnode(&ids[i..=i])).collect(); let mut dataset = Vec::with_capacity(ids.len() * ids.len()); let mut i = 0; while 2 * i < nodes.len() { diff --git a/jsonld/src/context.rs b/jsonld/src/context.rs index 04c65443..36d620dc 100644 --- a/jsonld/src/context.rs +++ b/jsonld/src/context.rs @@ -48,8 +48,8 @@ impl TryIntoContextRef for &str { fn try_into_context_ref(self) -> Result { let iri = ArcIri::new_unchecked("x-string://".into()); let doc = Value::parse_str(self, |span| locspan::Location::new(iri.clone(), span))?; - let context = Value::extract_context(doc) - .map_err(|e| format!("Could not extract @context: {e}"))?; + let context = + Value::extract_context(doc).map_err(|e| format!("Could not extract @context: {e}"))?; let rdoc = RemoteDocument::new(None, None, context); Ok(RemoteDocumentReference::Loaded(rdoc)) } diff --git a/jsonld/src/loader/file_url_loader.rs b/jsonld/src/loader/file_url_loader.rs index b0e158ae..aea2f903 100644 --- a/jsonld/src/loader/file_url_loader.rs +++ b/jsonld/src/loader/file_url_loader.rs @@ -68,7 +68,8 @@ impl Loader>, Location>>> for FileUrlLoader { impl FileUrlLoader { /// Creates a new file system loader with the given content `parser`. - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Self::default() } } diff --git a/jsonld/src/loader/static_loader.rs b/jsonld/src/loader/static_loader.rs index f6dda524..5d57b41d 100644 --- a/jsonld/src/loader/static_loader.rs +++ b/jsonld/src/loader/static_loader.rs @@ -33,7 +33,8 @@ impl Default for StaticLoader { impl StaticLoader { /// Creates a new [`StaticLoader`] - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Self::default() } diff --git a/jsonld/src/loader_factory.rs b/jsonld/src/loader_factory.rs index 1e34a85d..40c36e92 100644 --- a/jsonld/src/loader_factory.rs +++ b/jsonld/src/loader_factory.rs @@ -45,7 +45,8 @@ where { /// Create a new [`DefaultLoaderFactory`]. #[inline] - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Self::default() } } diff --git a/jsonld/src/options.rs b/jsonld/src/options.rs index 340942e0..4e035649 100644 --- a/jsonld/src/options.rs +++ b/jsonld/src/options.rs @@ -43,7 +43,8 @@ pub struct JsonLdOptions { impl JsonLdOptions> { /// Build a new JSON-LD options. - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Self::default() } } diff --git a/jsonld/src/parser.rs b/jsonld/src/parser.rs index 4f569cf9..055313db 100644 --- a/jsonld/src/parser.rs +++ b/jsonld/src/parser.rs @@ -51,7 +51,8 @@ impl Default for JsonLdParser> { impl JsonLdParser> { /// Make a new [`JsonLdParser`] with the default options - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Self { options: JsonLdOptions::default(), } diff --git a/jsonld/src/serializer.rs b/jsonld/src/serializer.rs index 2dcfd5ec..b0c61d37 100644 --- a/jsonld/src/serializer.rs +++ b/jsonld/src/serializer.rs @@ -115,7 +115,8 @@ pub struct JsonTarget(JsonValue<()>); impl Jsonifier { /// Create a new serializer which targets a [`JsonValue`]. #[inline] - #[must_use] pub fn new_jsonifier() -> Self { + #[must_use] + pub fn new_jsonifier() -> Self { Self::new(JsonTarget(JsonValue::Null)) } } @@ -165,7 +166,8 @@ pub type JsonLdStringifier = JsonLdSerializer, L>; impl JsonLdStringifier { /// Create a new serializer which targets a string. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } } diff --git a/jsonld/src/serializer/engine.rs b/jsonld/src/serializer/engine.rs index 75582de2..2b55b921 100644 --- a/jsonld/src/serializer/engine.rs +++ b/jsonld/src/serializer/engine.rs @@ -1,6 +1,10 @@ use super::rdf_object::RdfObject; use crate::error::JsonLdError; -use crate::options::{ProcessingMode::{JsonLd1_0, JsonLd1_1}, JsonLdOptions, RdfDirection}; +use crate::options::{ + JsonLdOptions, + ProcessingMode::{JsonLd1_0, JsonLd1_1}, + RdfDirection, +}; use crate::util_traits::{HashMapUtil, QuadJsonLdUtil, TermJsonLdUtil, VecUtil}; use json_syntax::object::Object; use json_syntax::{Parse, Value as JsonValue}; @@ -122,7 +126,9 @@ impl<'a, L> Engine<'a, L> { where T: Term, { - if o.kind() == TermKind::Literal { RdfObject::try_from_term(o).unwrap() } else { + if o.kind() == TermKind::Literal { + RdfObject::try_from_term(o).unwrap() + } else { let o_id = o.as_id(); RdfObject::Node(self.index(g_id.to_string(), o_id.clone()), o_id) } diff --git a/jsonld/src/serializer/rdf_object.rs b/jsonld/src/serializer/rdf_object.rs index 5f053eb9..3edc93f0 100644 --- a/jsonld/src/serializer/rdf_object.rs +++ b/jsonld/src/serializer/rdf_object.rs @@ -10,10 +10,7 @@ pub enum RdfObject { impl RdfObject { pub const fn is_literal(&self) -> bool { - matches!( - self, - Self::LangString(..) | Self::TypedLiteral(..) - ) + matches!(self, Self::LangString(..) | Self::TypedLiteral(..)) } pub const fn is_node(&self) -> bool { matches!(self, Self::Node(..)) diff --git a/jsonld/src/util_traits.rs b/jsonld/src/util_traits.rs index 6eeaf7bb..e14573e2 100644 --- a/jsonld/src/util_traits.rs +++ b/jsonld/src/util_traits.rs @@ -1,6 +1,9 @@ //! Utility traits used internally by `JsonLdSerializer` use sophia_api::quad::Quad; -use sophia_api::term::{Term, TermKind::{BlankNode, Iri, Literal}}; +use sophia_api::term::{ + Term, + TermKind::{BlankNode, Iri, Literal}, +}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::HashMap; diff --git a/resource/src/loader/_error.rs b/resource/src/loader/_error.rs index 91aa2e68..9e5e6da4 100644 --- a/resource/src/loader/_error.rs +++ b/resource/src/loader/_error.rs @@ -25,7 +25,8 @@ pub enum LoaderError { impl LoaderError { /// Return the IRI that caused this error - #[must_use] pub fn iri(&self) -> IriBuf { + #[must_use] + pub fn iri(&self) -> IriBuf { let iri = match self { Self::UnsupportedIri(iri, _) => iri, Self::NotFound(iri) => iri, diff --git a/resource/src/loader/_local.rs b/resource/src/loader/_local.rs index fdb9a7f2..94d8bcc4 100644 --- a/resource/src/loader/_local.rs +++ b/resource/src/loader/_local.rs @@ -1,4 +1,7 @@ -use super::{util::{IriBuf, iri_buf}, Loader, LoaderError}; +use super::{ + util::{iri_buf, IriBuf}, + Loader, LoaderError, +}; use sophia_iri::Iri; use std::borrow::Borrow; use std::fmt::Debug; @@ -41,7 +44,8 @@ impl LocalLoader { } /// Wrap this loader into an `Arc`. - #[must_use] pub fn arced(self) -> Arc { + #[must_use] + pub fn arced(self) -> Arc { Arc::new(self) } diff --git a/resource/src/resource/_iter.rs b/resource/src/resource/_iter.rs index a991b875..dc6d0f58 100644 --- a/resource/src/resource/_iter.rs +++ b/resource/src/resource/_iter.rs @@ -1,6 +1,6 @@ use std::marker::PhantomData; -use super::{Resource, ResourceError::NoValueFor, ResourceError, ResourceResult, TypedResource}; +use super::{Resource, ResourceError, ResourceError::NoValueFor, ResourceResult, TypedResource}; use crate::Loader; use sophia_api::{graph::CollectibleGraph, prelude::*, term::SimpleTerm}; diff --git a/resource/src/resource/_struct.rs b/resource/src/resource/_struct.rs index d915397f..e7b255ca 100644 --- a/resource/src/resource/_struct.rs +++ b/resource/src/resource/_struct.rs @@ -8,7 +8,13 @@ use sophia_iri::is_absolute_iri_ref; use std::borrow::Borrow; use std::sync::Arc; -use super::{ResourceError::{GraphError, IriNotAbsolute, LoaderError, NoValueFor, UnexpectedMultipleValueFor}, LadderResourceIterator, LadderTermIterator, LadderTypedIterator, ResourceError, ResourceResult, TypedResource}; +use super::{ + LadderResourceIterator, LadderTermIterator, LadderTypedIterator, ResourceError, + ResourceError::{ + GraphError, IriNotAbsolute, LoaderError, NoValueFor, UnexpectedMultipleValueFor, + }, + ResourceResult, TypedResource, +}; /// A [`Resource`] represents a specific node in a given graph. #[derive(Debug)] @@ -36,22 +42,26 @@ where } /// The identifying term of this resource - #[must_use] pub const fn id(&self) -> &SimpleTerm<'static> { + #[must_use] + pub const fn id(&self) -> &SimpleTerm<'static> { &self.id } /// The URL of the underlying graph of this resource - #[must_use] pub const fn base(&self) -> Option<&Iri> { + #[must_use] + pub const fn base(&self) -> Option<&Iri> { self.base.as_ref() } /// The underlying graph of this resource - #[must_use] pub const fn graph(&self) -> &Arc { + #[must_use] + pub const fn graph(&self) -> &Arc { &self.graph } /// The loader used to load neighbouring resources - #[must_use] pub const fn loader(&self) -> &Arc { + #[must_use] + pub const fn loader(&self) -> &Arc { &self.loader } diff --git a/rio/src/model.rs b/rio/src/model.rs index 887d9583..8554182a 100644 --- a/rio/src/model.rs +++ b/rio/src/model.rs @@ -8,7 +8,10 @@ //! which ensures the validity of the underlying data. //! //! The [`Trusted`] wrapper is used to materialize the fact that we trust the underlying data of Rio types. -use rio_api::model::{Quad as RioQuad, Term as RioTerm, Triple as RioTriple, BlankNode, GeneralizedQuad, GeneralizedTerm, GraphName, Literal, NamedNode, Variable}; +use rio_api::model::{ + BlankNode, GeneralizedQuad, GeneralizedTerm, GraphName, Literal, NamedNode, Quad as RioQuad, + Term as RioTerm, Triple as RioTriple, Variable, +}; use sophia_api::ns::{rdf, xsd}; use sophia_api::quad::{QBorrowTerm, Quad, Spog}; use sophia_api::term::{BnodeId, LanguageTag, Term, TermKind, VarName}; diff --git a/rio/src/parser.rs b/rio/src/parser.rs index 2aaa3a74..243e1832 100644 --- a/rio/src/parser.rs +++ b/rio/src/parser.rs @@ -9,7 +9,11 @@ use std::error::Error; use crate::model::Trusted; -use sophia_api::source::{StreamError, StreamError::{SinkError, SourceError}, StreamResult}; +use sophia_api::source::{ + StreamError, + StreamError::{SinkError, SourceError}, + StreamResult, +}; /// Wrap a Rio [`TriplesParser`](rio_api::parser::TriplesParser) /// into a Sophia [`TripleSource`](sophia_api::source::TripleSource). diff --git a/sophia/examples/jsonld-context.rs b/sophia/examples/jsonld-context.rs index 6187012f..778943c4 100644 --- a/sophia/examples/jsonld-context.rs +++ b/sophia/examples/jsonld-context.rs @@ -19,9 +19,7 @@ fn main() -> Result<(), Box> { let json_ld_path = args.nth(1).expect("Missing jsonld file."); let context_path = args.next(); if let Some(context_path) = &context_path { - eprintln!( - "Loading {json_ld_path} with @context from {context_path}" - ); + eprintln!("Loading {json_ld_path} with @context from {context_path}"); } else { eprintln!("Loading {json_ld_path}"); } diff --git a/sophia/examples/parse.rs b/sophia/examples/parse.rs index 7a7a4dc9..bf283a06 100644 --- a/sophia/examples/parse.rs +++ b/sophia/examples/parse.rs @@ -83,8 +83,8 @@ fn main() { "gtrig" => dump_quads(input, GTriGParser { base }), #[cfg(feature = "jsonld")] "json-ld" | "jsonld" => { - let options = JsonLdOptions::new() - .with_base(base.unwrap().map_unchecked(std::sync::Arc::from)); + let options = + JsonLdOptions::new().with_base(base.unwrap().map_unchecked(std::sync::Arc::from)); let loader_factory = sophia::jsonld::loader::FileUrlLoader::default; #[cfg(feature = "http_client")] let loader_factory = || { diff --git a/term/src/_generic.rs b/term/src/_generic.rs index aea46d98..903424ee 100644 --- a/term/src/_generic.rs +++ b/term/src/_generic.rs @@ -88,10 +88,7 @@ impl + for<'x> From<&'x str>> TryFromTerm for GenericLiteral { } else { // the following is safe because we checked term.kind() let dt = unsafe { term.datatype().unwrap_unchecked() }; - Ok(Self::Typed( - lex, - dt.map_unchecked(|txt| T::from(&txt)), - )) + Ok(Self::Typed(lex, dt.map_unchecked(|txt| T::from(&txt)))) } } else { Err(GenericLiteralError(term.kind())) diff --git a/term/src/_macro.rs b/term/src/_macro.rs index b9d4ce81..42dce4fd 100644 --- a/term/src/_macro.rs +++ b/term/src/_macro.rs @@ -330,12 +330,14 @@ macro_rules! gen_stash { impl $type_name { /// Create a new empty stash - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Default::default() } /// Retrieve a value from the stash, if present - #[must_use] pub fn get(&self, probe: &str) -> Option<&W> { + #[must_use] + pub fn get(&self, probe: &str) -> Option<&W> { self.store.get(probe) } @@ -352,12 +354,14 @@ macro_rules! gen_stash { } /// How many values are stored in this stash - #[must_use] pub fn len(&self) -> usize { + #[must_use] + pub fn len(&self) -> usize { self.store.len() } /// Is this stash empty? - #[must_use] pub fn is_empty(&self) -> bool { + #[must_use] + pub fn is_empty(&self) -> bool { self.store.is_empty() } } diff --git a/turtle/src/serializer/nq.rs b/turtle/src/serializer/nq.rs index 395f9a02..1bc56b38 100644 --- a/turtle/src/serializer/nq.rs +++ b/turtle/src/serializer/nq.rs @@ -85,12 +85,14 @@ where impl NqSerializer> { /// Create a new serializer which targets a `String`. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - #[must_use] pub const fn new_stringifier_with_config(config: NqConfig) -> Self { + #[must_use] + pub const fn new_stringifier_with_config(config: NqConfig) -> Self { Self::new_with_config(Vec::new(), config) } } diff --git a/turtle/src/serializer/nt.rs b/turtle/src/serializer/nt.rs index d9269721..c53ff271 100644 --- a/turtle/src/serializer/nt.rs +++ b/turtle/src/serializer/nt.rs @@ -89,12 +89,14 @@ where impl NtSerializer> { /// Create a new serializer which targets a `String`. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - #[must_use] pub const fn new_stringifier_with_config(config: NtConfig) -> Self { + #[must_use] + pub const fn new_stringifier_with_config(config: NtConfig) -> Self { Self::new_with_config(Vec::new(), config) } } diff --git a/turtle/src/serializer/trig.rs b/turtle/src/serializer/trig.rs index 822cec48..219d35b5 100644 --- a/turtle/src/serializer/trig.rs +++ b/turtle/src/serializer/trig.rs @@ -84,12 +84,14 @@ where impl TrigSerializer> { /// Create a new serializer which targets a `String`. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - #[must_use] pub const fn new_stringifier_with_config(config: TrigConfig) -> Self { + #[must_use] + pub const fn new_stringifier_with_config(config: TrigConfig) -> Self { Self::new_with_config(Vec::new(), config) } } diff --git a/turtle/src/serializer/turtle.rs b/turtle/src/serializer/turtle.rs index 150986b7..7369f86a 100644 --- a/turtle/src/serializer/turtle.rs +++ b/turtle/src/serializer/turtle.rs @@ -39,7 +39,8 @@ impl TurtleConfig { /// If true, extra effort will be made to group related triples together, /// and to use the collection syntax whenever possible. /// This requires storing the whole graph in memory. - #[must_use] pub const fn pretty(&self) -> bool { + #[must_use] + pub const fn pretty(&self) -> bool { self.pretty } @@ -47,7 +48,8 @@ impl TurtleConfig { /// (defaults to a map containing rdf:, rdfs: and xsd:) /// /// NB: currently, only used if [`pretty`][`TurtleConfig::pretty`] is `true`. - #[must_use] pub fn prefix_map(&self) -> &[PrefixMapPair] { + #[must_use] + pub fn prefix_map(&self) -> &[PrefixMapPair] { &self.prefix_map } @@ -55,12 +57,14 @@ impl TurtleConfig { /// (defaults to `" "`, can only contain ASCII whitespaces) /// /// NB: currently, only used if [`pretty`][`TurtleConfig::pretty`] is `true`. - #[must_use] pub fn indentation(&self) -> &str { + #[must_use] + pub fn indentation(&self) -> &str { &self.indentation } /// Build a new default [`TurtleConfig`]. - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { let pretty = false; let prefix_map = Self::default_prefix_map(); let indentation = " ".to_string(); @@ -72,7 +76,8 @@ impl TurtleConfig { } /// Transform a [`TurtleConfig`] by setting the [`pretty`][`TurtleConfig::pretty`] flag. - #[must_use] pub const fn with_pretty(mut self, b: bool) -> Self { + #[must_use] + pub const fn with_pretty(mut self, b: bool) -> Self { self.pretty = b; self } @@ -84,7 +89,8 @@ impl TurtleConfig { } /// Transform a [`TurtleConfig`] by setting the [`prefix_map`][`TurtleConfig::prefix_map`] flag. - #[must_use] pub fn with_own_prefix_map(mut self, pm: Vec) -> Self { + #[must_use] + pub fn with_own_prefix_map(mut self, pm: Vec) -> Self { self.prefix_map = pm; self } @@ -101,7 +107,8 @@ impl TurtleConfig { } /// Return the prefix map that is used when none is provided - #[must_use] pub fn default_prefix_map() -> Vec { + #[must_use] + pub fn default_prefix_map() -> Vec { vec![ ( Prefix::new_unchecked("rdf".into()), @@ -187,12 +194,14 @@ where impl TurtleSerializer> { /// Create a new serializer which targets a `String`. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - #[must_use] pub const fn new_stringifier_with_config(config: TurtleConfig) -> Self { + #[must_use] + pub const fn new_stringifier_with_config(config: TurtleConfig) -> Self { Self::new_with_config(Vec::new(), config) } } diff --git a/xml/src/serializer.rs b/xml/src/serializer.rs index c08df63f..84bca08a 100644 --- a/xml/src/serializer.rs +++ b/xml/src/serializer.rs @@ -25,17 +25,20 @@ pub struct RdfXmlConfig { impl RdfXmlConfig { /// Size of the indentation to use in the serialization. /// (defaults to 0, meaning no indentation nor linebreaks) - #[must_use] pub const fn indentation(&self) -> usize { + #[must_use] + pub const fn indentation(&self) -> usize { self.indentation } /// Build a new default [`RdfXmlConfig`] - #[must_use] pub fn new() -> Self { + #[must_use] + pub fn new() -> Self { Default::default() } /// Transform an [`RdfXmlConfig`] by setting the [`indentation`](RdfXmlConfig::indentation). - #[must_use] pub const fn with_indentation(mut self, i: usize) -> Self { + #[must_use] + pub const fn with_indentation(mut self, i: usize) -> Self { self.indentation = i; self } @@ -97,12 +100,14 @@ where impl RdfXmlSerializer> { /// Create a new serializer which targets a `String`. #[inline] - #[must_use] pub fn new_stringifier() -> Self { + #[must_use] + pub fn new_stringifier() -> Self { Self::new(Vec::new()) } /// Create a new serializer which targets a `String` with a custom config. #[inline] - #[must_use] pub const fn new_stringifier_with_config(config: RdfXmlConfig) -> Self { + #[must_use] + pub const fn new_stringifier_with_config(config: RdfXmlConfig) -> Self { Self::new_with_config(Vec::new(), config) } } From da098d5252ade2ad812583ba426e2bcbac5373a9 Mon Sep 17 00:00:00 2001 From: Mikhail Katychev Date: Tue, 8 Oct 2024 10:33:49 -0500 Subject: [PATCH 3/5] clippy config --- .clippy.toml | 1 + Cargo.toml | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 .clippy.toml diff --git a/.clippy.toml b/.clippy.toml new file mode 100644 index 00000000..c31f0e0a --- /dev/null +++ b/.clippy.toml @@ -0,0 +1 @@ +allowed-wildcard-imports = [ "super" ] diff --git a/Cargo.toml b/Cargo.toml index d7753fb4..059009e8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,3 +58,7 @@ url = "2.4.1" [profile.release] lto = true + +[workspace.lints.clippy] +enum_glob_use = "allow" + From 0e58ee5f0d9e02ddc555d6e36176d0c1af0281b9 Mon Sep 17 00:00:00 2001 From: Mikhail Katychev Date: Tue, 8 Oct 2024 10:43:15 -0500 Subject: [PATCH 4/5] partial clippy fix --- rio/src/model.rs | 4 +--- term/src/_generic.rs | 3 +-- xml/src/serializer.rs | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/rio/src/model.rs b/rio/src/model.rs index 8554182a..64439fd3 100644 --- a/rio/src/model.rs +++ b/rio/src/model.rs @@ -109,9 +109,7 @@ impl<'a> Term for Trusted> { fn lexical_form(l: Literal) -> MownStr { use Literal::{LanguageTaggedString, Simple, Typed}; let value = match l { - Simple { value } => value, - LanguageTaggedString { value, .. } => value, - Typed { value, .. } => value, + LanguageTaggedString { value, .. } | Typed { value, .. } | Simple { value } => value, }; value.into() } diff --git a/term/src/_generic.rs b/term/src/_generic.rs index 903424ee..21d0ccc4 100644 --- a/term/src/_generic.rs +++ b/term/src/_generic.rs @@ -24,8 +24,7 @@ impl> GenericLiteral { /// The [lexical form](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-form) of this literal pub fn get_lexical_form(&self) -> &str { match self { - Self::Typed(lex, ..) => lex, - Self::LanguageString(lex, ..) => lex, + Self::LanguageString(lex, ..) | Self::Typed(lex, ..) => lex, } .borrow() } diff --git a/xml/src/serializer.rs b/xml/src/serializer.rs index 84bca08a..ac7a433c 100644 --- a/xml/src/serializer.rs +++ b/xml/src/serializer.rs @@ -33,7 +33,7 @@ impl RdfXmlConfig { /// Build a new default [`RdfXmlConfig`] #[must_use] pub fn new() -> Self { - Default::default() + Self::default() } /// Transform an [`RdfXmlConfig`] by setting the [`indentation`](RdfXmlConfig::indentation). From b8fcaa9ee8e8db7f646b80c7c243d824d305ebed Mon Sep 17 00:00:00 2001 From: Mikhail Katychev Date: Tue, 8 Oct 2024 10:45:26 -0500 Subject: [PATCH 5/5] crate revert --- resource/src/resource/_struct.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/resource/src/resource/_struct.rs b/resource/src/resource/_struct.rs index e7b255ca..7ec254c7 100644 --- a/resource/src/resource/_struct.rs +++ b/resource/src/resource/_struct.rs @@ -601,7 +601,7 @@ impl Clone for Resource { } } -pub fn to_iri>(iri_ref: IriRef) -> Result, IriRef>> { +pub(crate) fn to_iri>(iri_ref: IriRef) -> Result, IriRef>> { if is_absolute_iri_ref(iri_ref.as_str()) { Ok(Iri::new_unchecked(iri_ref.unwrap())) } else {