diff --git a/asn-compiler/Cargo.toml b/asn-compiler/Cargo.toml index f4f4ad8..c927c3e 100644 --- a/asn-compiler/Cargo.toml +++ b/asn-compiler/Cargo.toml @@ -25,6 +25,7 @@ docx-rs = { version = "0.4.5" , optional = true } regex = { version = "1.6.0" } log = { version = "0.4" } env_logger = { version = "0.10" } +anyhow = "1.0.86" [features] rs-specs-gen = [ "docx-rs"] diff --git a/asn-compiler/src/bin/extract-asn-spec.rs b/asn-compiler/src/bin/extract-asn-spec.rs index fe0d3b4..a1160e3 100644 --- a/asn-compiler/src/bin/extract-asn-spec.rs +++ b/asn-compiler/src/bin/extract-asn-spec.rs @@ -2,6 +2,7 @@ use std::io::{Read, Write}; +use anyhow::Result; use clap::Parser; #[derive(Parser, Debug)] @@ -14,7 +15,7 @@ struct Cli { output_file: String, } -fn main() -> std::io::Result<()> { +fn main() -> Result<()> { let cli = Cli::parse(); let asn_start = regex::Regex::new("^-- ASN1START$").unwrap(); diff --git a/asn-compiler/src/bin/hampi-rs-asn1c.rs b/asn-compiler/src/bin/hampi-rs-asn1c.rs index 3eec43b..e3378cc 100644 --- a/asn-compiler/src/bin/hampi-rs-asn1c.rs +++ b/asn-compiler/src/bin/hampi-rs-asn1c.rs @@ -1,7 +1,6 @@ //! A simple utility to tokenize ASN files. -use std::io; - +use anyhow::Result; use clap::Parser; use asn1_compiler::{ @@ -36,14 +35,15 @@ struct Cli { derive: Vec, } -fn main() -> io::Result<()> { +fn main() -> Result<()> { let mut cli = Cli::parse(); if cli.files.is_empty() { return Err(std::io::Error::new( std::io::ErrorKind::InvalidInput, "No Input files Specified", - )); + ) + .into()); } let derives = if cli.derive.contains(&Derive::All) { diff --git a/asn-compiler/src/compiler.rs b/asn-compiler/src/compiler.rs index daafd2b..15e46f2 100644 --- a/asn-compiler/src/compiler.rs +++ b/asn-compiler/src/compiler.rs @@ -11,6 +11,7 @@ use std::process::{Command, Stdio}; use topological_sort::TopologicalSort; use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::module::Asn1Module; @@ -79,7 +80,7 @@ impl Asn1Compiler { /// modules are topologically sorted as well. This makes Error handling for undefined /// definitions much easier. // FIXME: Support the case where module is imported by a name different from it's actual name. - pub fn resolve_modules(&mut self) -> Result<(), Error> { + pub fn resolve_modules(&mut self) -> Result<()> { log::info!("Resolving imports from all modules."); self.resolve_imports()?; @@ -88,7 +89,7 @@ impl Asn1Compiler { } /// Generate the code - pub fn generate(&mut self) -> Result<(), Error> { + pub fn generate(&mut self) -> Result<()> { log::info!("Generating code, writing to file: {}", self.output_filename); let input_text = self.generator.generate(&self.resolver)?; @@ -109,7 +110,7 @@ impl Asn1Compiler { } /// Compilation Driver for a String as module(s). - pub fn compile_string(&mut self, modules_string: &str, parse_only: bool) -> Result<(), Error> { + pub fn compile_string(&mut self, modules_string: &str, parse_only: bool) -> Result<()> { let mut tokens = crate::tokenizer::tokenize_string(modules_string)?; self.parse_tokens_into_modules(&mut tokens)?; if !parse_only { @@ -121,10 +122,7 @@ impl Asn1Compiler { } /// The Actual compilation driver - pub fn compile_files + std::fmt::Debug>( - &mut self, - files: &[T], - ) -> Result<(), Error> { + pub fn compile_files + std::fmt::Debug>(&mut self, files: &[T]) -> Result<()> { for file in files { log::info!("Processing file: {:?}", file); let file = File::open(file).map_err(|e| io_error!("{:#?}", e))?; @@ -136,7 +134,7 @@ impl Asn1Compiler { self.generate() } - fn parse_tokens_into_modules(&mut self, tokens: &mut Vec) -> Result<(), Error> { + fn parse_tokens_into_modules(&mut self, tokens: &mut Vec) -> Result<()> { log::debug!("Parsing {} tokens.", tokens.len()); let mut modules = crate::parser::parse(tokens)?; loop { @@ -151,7 +149,7 @@ impl Asn1Compiler { Ok(()) } - fn rustfmt_generated_code(&self, code: &str) -> Result { + fn rustfmt_generated_code(&self, code: &str) -> Result { log::debug!("Runing `rustfmt` on the generated code."); let rustfmt_binary = "rustfmt"; // TODO: Get from `env` , 'custom path' etc. let mut cmd = Command::new(rustfmt_binary); @@ -177,13 +175,13 @@ impl Asn1Compiler { match String::from_utf8(output) { Ok(formatted_output) => match status.code() { Some(0) => Ok(formatted_output), - _ => Err(resolve_error!("`rustfmt` failed to write some bindings.")), + _ => Err(resolve_error!("`rustfmt` failed to write some bindings.").into()), }, _ => Ok(stdin_handle.join().unwrap()), } } - fn resolve_imports(&self) -> Result<(), Error> { + fn resolve_imports(&self) -> Result<()> { log::debug!("Resolving imports."); for (_, module) in self.modules.iter() { for (import, module_name) in module.get_imported_defs() { @@ -193,7 +191,8 @@ impl Asn1Compiler { "Module '{}', corresponding to definition '{}' not found!", module_name.name_as_str(), import - )); + ) + .into()); } } } @@ -226,7 +225,7 @@ impl Asn1Compiler { out_vec } - fn resolve_definitions(&mut self) -> Result<(), Error> { + fn resolve_definitions(&mut self) -> Result<()> { let module_names = self.sorted_modules(); for name in module_names { let module = self.modules.get_mut(&name).unwrap(); diff --git a/asn-compiler/src/generator/asn/types/base/bitstring.rs b/asn-compiler/src/generator/asn/types/base/bitstring.rs index d63245a..4363c7c 100644 --- a/asn-compiler/src/generator/asn/types/base/bitstring.rs +++ b/asn-compiler/src/generator/asn/types/base/bitstring.rs @@ -3,17 +3,13 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; +use anyhow::Result; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedBitString; impl Asn1ResolvedBitString { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let struct_name = generator.to_type_ident(name); let mut ty_attributes = quote! { type = "BITSTRING" }; @@ -39,7 +35,7 @@ impl Asn1ResolvedBitString { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/boolean.rs b/asn-compiler/src/generator/asn/types/base/boolean.rs index 478174d..7174638 100644 --- a/asn-compiler/src/generator/asn/types/base/boolean.rs +++ b/asn-compiler/src/generator/asn/types/base/boolean.rs @@ -3,16 +3,12 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedBoolean; +use anyhow::Result; impl Asn1ResolvedBoolean { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let type_name = generator.to_type_ident(name); let vis = generator.get_visibility_tokens(); @@ -29,7 +25,7 @@ impl Asn1ResolvedBoolean { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/charstring.rs b/asn-compiler/src/generator/asn/types/base/charstring.rs index ab684b3..07cd740 100644 --- a/asn-compiler/src/generator/asn/types/base/charstring.rs +++ b/asn-compiler/src/generator/asn/types/base/charstring.rs @@ -3,17 +3,13 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; +use anyhow::Result; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedCharacterString; impl Asn1ResolvedCharacterString { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let struct_name = generator.to_type_ident(name); let char_str_type: proc_macro2::TokenStream = format!("\"{}\"", self.str_type).parse().unwrap(); @@ -41,7 +37,7 @@ impl Asn1ResolvedCharacterString { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/enumerated.rs b/asn-compiler/src/generator/asn/types/base/enumerated.rs index fa58692..fa73fc1 100644 --- a/asn-compiler/src/generator/asn/types/base/enumerated.rs +++ b/asn-compiler/src/generator/asn/types/base/enumerated.rs @@ -3,17 +3,13 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; +use anyhow::Result; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedEnumerated; impl Asn1ResolvedEnumerated { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let struct_name = generator.to_type_ident(name); let inner_type = generator.to_inner_type(self.bits, self.signed); @@ -43,7 +39,7 @@ impl Asn1ResolvedEnumerated { Ok(struct_tokens) } - fn generate_named_values(&self, generator: &Generator) -> Result { + fn generate_named_values(&self, generator: &Generator) -> Result { let mut tokens = TokenStream::new(); for (name, value) in &self.named_root_values { let const_name = generator.to_const_ident(name); @@ -65,7 +61,7 @@ impl Asn1ResolvedEnumerated { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/integer.rs b/asn-compiler/src/generator/asn/types/base/integer.rs index 73519f6..cac0467 100644 --- a/asn-compiler/src/generator/asn/types/base/integer.rs +++ b/asn-compiler/src/generator/asn/types/base/integer.rs @@ -3,17 +3,13 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; +use anyhow::Result; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedInteger; impl Asn1ResolvedInteger { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let struct_name = generator.to_type_ident(name); let inner_type = generator.to_inner_type(self.bits, self.signed); let (min, max) = self.get_min_max_constraints(); @@ -68,7 +64,7 @@ impl Asn1ResolvedInteger { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/mod.rs b/asn-compiler/src/generator/asn/types/base/mod.rs index c93e429..85293e3 100644 --- a/asn-compiler/src/generator/asn/types/base/mod.rs +++ b/asn-compiler/src/generator/asn/types/base/mod.rs @@ -20,16 +20,16 @@ mod real; use proc_macro2::{Ident, TokenStream}; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::ResolvedBaseType; +use anyhow::Result; impl ResolvedBaseType { pub(crate) fn generate_for_base_type( &self, name: &str, generator: &mut Generator, - ) -> Result { + ) -> Result { match self { ResolvedBaseType::Integer(ref i) => i.generate(name, generator), ResolvedBaseType::Enum(ref e) => e.generate(name, generator), @@ -47,7 +47,7 @@ impl ResolvedBaseType { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { match self { ResolvedBaseType::Integer(ref i) => i.generate_ident_and_aux_type(generator, input), ResolvedBaseType::Enum(ref e) => e.generate_ident_and_aux_type(generator, input), diff --git a/asn-compiler/src/generator/asn/types/base/null.rs b/asn-compiler/src/generator/asn/types/base/null.rs index 0f48c3b..b0a3d79 100644 --- a/asn-compiler/src/generator/asn/types/base/null.rs +++ b/asn-compiler/src/generator/asn/types/base/null.rs @@ -3,16 +3,12 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedNull; +use anyhow::Result; impl Asn1ResolvedNull { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let type_name = generator.to_type_ident(name); let vis = generator.get_visibility_tokens(); @@ -29,7 +25,7 @@ impl Asn1ResolvedNull { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/octetstring.rs b/asn-compiler/src/generator/asn/types/base/octetstring.rs index c311bd3..e2d0576 100644 --- a/asn-compiler/src/generator/asn/types/base/octetstring.rs +++ b/asn-compiler/src/generator/asn/types/base/octetstring.rs @@ -3,17 +3,13 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; +use anyhow::Result; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedOctetString; impl Asn1ResolvedOctetString { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let struct_name = generator.to_type_ident(name); let mut ty_attributes = quote! { type = "OCTET-STRING" }; @@ -39,7 +35,7 @@ impl Asn1ResolvedOctetString { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/oid.rs b/asn-compiler/src/generator/asn/types/base/oid.rs index 7ca3419..163263b 100644 --- a/asn-compiler/src/generator/asn/types/base/oid.rs +++ b/asn-compiler/src/generator/asn/types/base/oid.rs @@ -3,16 +3,12 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedObjectIdentifier; +use anyhow::Result; impl Asn1ResolvedObjectIdentifier { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let type_name = generator.to_type_ident(name); let vis = generator.get_visibility_tokens(); @@ -29,7 +25,7 @@ impl Asn1ResolvedObjectIdentifier { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/base/real.rs b/asn-compiler/src/generator/asn/types/base/real.rs index b86ea77..8becf16 100644 --- a/asn-compiler/src/generator/asn/types/base/real.rs +++ b/asn-compiler/src/generator/asn/types/base/real.rs @@ -3,16 +3,12 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::base::Asn1ResolvedReal; +use anyhow::Result; impl Asn1ResolvedReal { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let type_name = generator.to_type_ident(name); let vis = generator.get_visibility_tokens(); @@ -29,7 +25,7 @@ impl Asn1ResolvedReal { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = if let Some(unique_name) = input { unique_name.to_string() } else { diff --git a/asn-compiler/src/generator/asn/types/constructed/choice.rs b/asn-compiler/src/generator/asn/types/constructed/choice.rs index 63ab701..fa5e617 100644 --- a/asn-compiler/src/generator/asn/types/constructed/choice.rs +++ b/asn-compiler/src/generator/asn/types/constructed/choice.rs @@ -3,12 +3,12 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::{ constructed::{ResolvedComponent, ResolvedConstructedType}, Asn1ResolvedType, }; +use anyhow::Result; // Following is a Private structure only used in this module. struct ChoiceComponentToken { @@ -22,7 +22,7 @@ impl ResolvedConstructedType { &self, name: &str, generator: &mut Generator, - ) -> Result { + ) -> Result { if let ResolvedConstructedType::Choice { ref root_components, ref additions, @@ -63,7 +63,7 @@ impl ResolvedConstructedType { Ok(choice_tokens) } else { - Err(code_generate_error!("Constructed Type is not a `CHOICE`")) + Err(code_generate_error!("Constructed Type is not a `CHOICE`").into()) } } @@ -73,7 +73,7 @@ impl ResolvedConstructedType { addition_tokens: &Option>, vis: TokenStream, dir: TokenStream, - ) -> Result { + ) -> Result { let mut root_comp_tokens = TokenStream::new(); for token in root_tokens { let variant_ident = token.variant.clone(); @@ -137,7 +137,7 @@ impl ResolvedConstructedType { components: &[ResolvedComponent], name: &str, generator: &mut Generator, - ) -> Result, Error> { + ) -> Result> { let mut out_components = vec![]; for (i, c) in components.iter().enumerate() { let comp_variant_ident = generator.to_type_ident(&c.id); diff --git a/asn-compiler/src/generator/asn/types/constructed/mod.rs b/asn-compiler/src/generator/asn/types/constructed/mod.rs index 28c1960..2f2139a 100644 --- a/asn-compiler/src/generator/asn/types/constructed/mod.rs +++ b/asn-compiler/src/generator/asn/types/constructed/mod.rs @@ -9,20 +9,16 @@ mod seq; use proc_macro2::{Ident, TokenStream}; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::constructed::ResolvedConstructedType; +use anyhow::Result; impl ResolvedConstructedType { // The main `generate` function for the constucted types // // This function simply calls the appropriate `generate_{sequence|choice|sequence_of}` function // for therespective types. - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { match self { ResolvedConstructedType::Sequence { .. } => self.generate_sequence(name, generator), ResolvedConstructedType::Choice { .. } => self.generate_choice(name, generator), @@ -56,7 +52,7 @@ impl ResolvedConstructedType { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { let unique_name = match self { ResolvedConstructedType::Sequence { name, .. } => match input { Some(ref inp) => inp.to_string(), diff --git a/asn-compiler/src/generator/asn/types/constructed/seq.rs b/asn-compiler/src/generator/asn/types/constructed/seq.rs index 20b3694..fb3aa91 100644 --- a/asn-compiler/src/generator/asn/types/constructed/seq.rs +++ b/asn-compiler/src/generator/asn/types/constructed/seq.rs @@ -1,20 +1,19 @@ //! Implementation of Code Generation for ASN.1 `SEQUENCE` Type. -use proc_macro2::TokenStream; -use quote::quote; - -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::{ constructed::ResolvedConstructedType, Asn1ResolvedType, }; +use anyhow::Result; +use proc_macro2::TokenStream; +use quote::quote; impl ResolvedConstructedType { pub(crate) fn generate_sequence( &self, name: &str, generator: &mut Generator, - ) -> Result { + ) -> Result { if let ResolvedConstructedType::Sequence { ref components, ref extensible, diff --git a/asn-compiler/src/generator/asn/types/constructed/seqof.rs b/asn-compiler/src/generator/asn/types/constructed/seqof.rs index 212d73f..7e55cce 100644 --- a/asn-compiler/src/generator/asn/types/constructed/seqof.rs +++ b/asn-compiler/src/generator/asn/types/constructed/seqof.rs @@ -3,18 +3,18 @@ use proc_macro2::TokenStream; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::{ constructed::ResolvedConstructedType, Asn1ResolvedType, }; +use anyhow::Result; impl ResolvedConstructedType { pub(crate) fn generate_sequence_of( &self, name: &str, generator: &mut Generator, - ) -> Result { + ) -> Result { if let ResolvedConstructedType::SequenceOf { ref ty, ref size_values, diff --git a/asn-compiler/src/generator/asn/types/int.rs b/asn-compiler/src/generator/asn/types/int.rs index bb815d5..9c5f6c7 100644 --- a/asn-compiler/src/generator/asn/types/int.rs +++ b/asn-compiler/src/generator/asn/types/int.rs @@ -4,16 +4,16 @@ use proc_macro2::{Ident, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::types::{Asn1ResolvedType, ResolvedSetType}; +use anyhow::Result; impl Asn1ResolvedType { pub(crate) fn generate_for_type( name: &str, ty: &Asn1ResolvedType, gen: &mut Generator, - ) -> Result, Error> { + ) -> Result> { match ty { Asn1ResolvedType::Base(ref b) => Ok(Some(b.generate_for_base_type(name, gen)?)), Asn1ResolvedType::Constructed(ref c) => Ok(Some(c.generate(name, gen)?)), @@ -28,7 +28,7 @@ impl Asn1ResolvedType { ty: &Asn1ResolvedType, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { match ty { Asn1ResolvedType::Base(ref b) => { b.generate_ident_and_aux_type_for_base(generator, input) @@ -48,7 +48,7 @@ impl Asn1ResolvedType { pub(crate) fn generate_ident_for_reference( reference: &str, gen: &mut Generator, - ) -> Result { + ) -> Result { Ok(gen.to_type_ident(reference)) } @@ -56,7 +56,7 @@ impl Asn1ResolvedType { name: &str, gen: &mut Generator, reference: &str, - ) -> Result { + ) -> Result { let referring = gen.to_type_ident(name); let reference = gen.to_type_ident(reference); @@ -69,11 +69,7 @@ impl Asn1ResolvedType { } impl ResolvedSetType { - pub(crate) fn generate( - &self, - name: &str, - generator: &mut Generator, - ) -> Result { + pub(crate) fn generate(&self, name: &str, generator: &mut Generator) -> Result { let ty_ident = generator.to_type_ident(name); let ty_elements = self.generate_aux_types(generator)?; @@ -92,7 +88,7 @@ impl ResolvedSetType { &self, generator: &mut Generator, input: Option<&String>, - ) -> Result { + ) -> Result { // FIXME: This is perhaps not right let ty_ident = match input { None => generator.to_type_ident(&self.setref), @@ -116,7 +112,7 @@ impl ResolvedSetType { Ok(ty_ident) } - fn generate_aux_types(&self, generator: &mut Generator) -> Result { + fn generate_aux_types(&self, generator: &mut Generator) -> Result { let mut variant_tokens = TokenStream::new(); for (name, ty) in &self.types { let variant_ident = generator.to_type_ident(&name.0); diff --git a/asn-compiler/src/generator/asn/values.rs b/asn-compiler/src/generator/asn/values.rs index fb86f1b..3b113c9 100644 --- a/asn-compiler/src/generator/asn/values.rs +++ b/asn-compiler/src/generator/asn/values.rs @@ -3,12 +3,12 @@ use proc_macro2::{Literal, TokenStream}; use quote::quote; -use crate::error::Error; use crate::generator::Generator; use crate::resolver::asn::structs::{ types::{base::ResolvedBaseType, Asn1ResolvedType}, values::{Asn1ResolvedValue, ResolvedBaseValue}, }; +use anyhow::Result; impl Asn1ResolvedValue { // This function generates constants for values that are of base types or references to base @@ -28,7 +28,7 @@ impl Asn1ResolvedValue { name: &str, value: &Asn1ResolvedValue, gen: &mut Generator, - ) -> Result, Error> { + ) -> Result> { match value { Asn1ResolvedValue::Base(v) => Self::tokens_from_resolved_base_value(name, v, gen), Asn1ResolvedValue::ReferencedType { value, .. } => { @@ -42,7 +42,7 @@ impl Asn1ResolvedValue { name: &str, base: &ResolvedBaseValue, gen: &mut Generator, - ) -> Result, Error> { + ) -> Result> { if let ResolvedBaseValue::Integer(i) = base { if let Asn1ResolvedType::Base(ResolvedBaseType::Integer(ref typ)) = i.typeref { let const_type = gen.to_inner_type(typ.bits, typ.signed); diff --git a/asn-compiler/src/generator/int.rs b/asn-compiler/src/generator/int.rs index 67f8ecf..21b094a 100644 --- a/asn-compiler/src/generator/int.rs +++ b/asn-compiler/src/generator/int.rs @@ -2,14 +2,13 @@ use std::collections::HashMap; +use anyhow::Result; use heck::{ToShoutySnakeCase, ToSnakeCase}; use proc_macro2::{Ident, Literal, Span, TokenStream}; - use quote::quote; use lazy_static::lazy_static; -use crate::error::Error; use crate::resolver::Resolver; use crate::resolver::asn::structs::{types::Asn1ResolvedType, values::Asn1ResolvedValue}; @@ -115,7 +114,7 @@ impl Generator { // Generates the code using the information from the `Resolver`. Returns a String // containing all the code (which is basically a `format!` of the `TokenStream`. - pub(crate) fn generate(&mut self, resolver: &Resolver) -> Result { + pub(crate) fn generate(&mut self, resolver: &Resolver) -> Result { // FIXME: Not sure how to make sure the crates defined here are a dependency. // May be can just do with documenting it. diff --git a/asn-compiler/src/parser/asn/defs.rs b/asn-compiler/src/parser/asn/defs.rs index 81fab55..eff3455 100644 --- a/asn-compiler/src/parser/asn/defs.rs +++ b/asn-compiler/src/parser/asn/defs.rs @@ -1,7 +1,7 @@ //! Top level handling of definitions -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::{ asn::structs::{ @@ -42,7 +42,7 @@ impl Asn1Definition { } } - pub fn resolve_object_class(&mut self, class: &Asn1Definition) -> Result<(), Error> { + pub fn resolve_object_class(&mut self, class: &Asn1Definition) -> Result<()> { if let Asn1AssignmentKind::Class(ref c) = class.kind { match self.kind { Asn1AssignmentKind::Object(ref mut o) => { @@ -60,16 +60,16 @@ impl Asn1Definition { Ok(()) } - pub fn apply_params(&self, actual_params: &[ActualParam]) -> Result { + pub fn apply_params(&self, actual_params: &[ActualParam]) -> Result { if self.params.is_none() { - return Err(parse_error!("apply_params: No Params for the definition!")); + return Err(parse_error!("apply_params: No Params for the definition!").into()); } let mut params = self.params.as_ref().unwrap().clone(); if params.ordered.len() != actual_params.len() { - return Err(parse_error!( - "Actual Params and Definition Params Lengths mismatch!" - )); + return Err( + parse_error!("Actual Params and Definition Params Lengths mismatch!").into(), + ); } for (idx, actual) in actual_params.iter().enumerate() { @@ -93,7 +93,7 @@ impl Asn1Definition { // or `ObjectAssigmnet` or `ObjectSetAssignment` // // `ParameterizedAssignment` is supported for only `TypeAssignment`. -pub(super) fn parse_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +pub(super) fn parse_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let consumed = 0; if expect_one_of_tokens( @@ -115,10 +115,7 @@ pub(super) fn parse_definition(tokens: &[Token]) -> Result<(Asn1Definition, usiz parse_typeish_definition(&tokens[consumed..]) } } else { - Err(parse_error!( - "Not Implemented @token: {:#?}", - tokens[consumed] - )) + Err(parse_error!("Not Implemented @token: {:#?}", tokens[consumed]).into()) } } @@ -127,7 +124,7 @@ pub(super) fn parse_definition(tokens: &[Token]) -> Result<(Asn1Definition, usiz // All the above assignments start with a lowe-case letter and will have to be parsed into their // respective 'values'. Returns the corresponding variant of the `Asn1Definition` and the number // of tokens consumed or error. -fn parse_valueish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_valueish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { if let Ok(x) = parse_object_assignment(tokens) { return Ok(x); } @@ -136,16 +133,13 @@ fn parse_valueish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize) return Ok(x); } - Err(parse_error_log!( - "Failed to parse a definition at Token: {:?}", - tokens[0] - )) + Err(parse_error_log!("Failed to parse a definition at Token: {:?}", tokens[0]).into()) } // Parse object Assginemtnt // // object CLASS ::= { ... -- Object Defined Syntax -- } -fn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let mut consumed = 0; if !expect_tokens( @@ -155,10 +149,7 @@ fn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), &[Token::is_object_class_reference], ], )? { - return Err(unexpected_token!( - "'object', 'CLASS' Reference", - tokens[consumed] - )); + return Err(unexpected_token!("'object', 'CLASS' Reference", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; @@ -167,7 +158,7 @@ fn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), consumed += 1; if !expect_token(&tokens[consumed..], Token::is_assignment)? { - return Err(unexpected_token!("::=", tokens[consumed])); + return Err(unexpected_token!("::=", tokens[consumed]).into()); } consumed += 1; @@ -193,11 +184,11 @@ fn parse_object_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), // Parse object Assginemtnt // // value Type ::= ValueDefinition -- Value Definition can be anything for now. -fn parse_value_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_value_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_value_reference)? { - return Err(unexpected_token!("Value Reference", tokens[consumed])); + return Err(unexpected_token!("Value Reference", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; @@ -206,7 +197,7 @@ fn parse_value_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), E consumed += typeref_consumed; if !expect_token(&tokens[consumed..], Token::is_assignment)? { - return Err(unexpected_token!("::=", tokens[consumed])); + return Err(unexpected_token!("::=", tokens[consumed]).into()); } consumed += 1; @@ -228,7 +219,7 @@ fn parse_value_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), E // All the above assignments start with a lowe-case letter and will have to be parsed into their // respective 'values'. Returns the corresponding variant of the `Asn1Definition` and the number // of tokens consumed or error. -fn parse_typeish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_typeish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { // Try to parse a type_definition if let Ok(x) = parse_type_assignment(tokens) { log::trace!("Parsed Type Assignment."); @@ -245,20 +236,17 @@ fn parse_typeish_definition(tokens: &[Token]) -> Result<(Asn1Definition, usize), return Ok(x); } - Err(parse_error_log!( - "Failed to parse a definition at Token: {:?}", - tokens[0] - )) + Err(parse_error_log!("Failed to parse a definition at Token: {:?}", tokens[0]).into()) } // Parse a Type Assignment // // Identifier [{Params}] :== {Type|Refere} [(Constraints)] -fn parse_type_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_type_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_type_reference)? { - return Err(unexpected_token!("Type Reference", tokens[consumed])); + return Err(unexpected_token!("Type Reference", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; @@ -271,7 +259,7 @@ fn parse_type_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Er consumed += params_consumed; if !expect_token(&tokens[consumed..], Token::is_assignment)? { - return Err(unexpected_token!("::=", tokens[consumed])); + return Err(unexpected_token!("::=", tokens[consumed]).into()); } consumed += 1; @@ -302,21 +290,21 @@ fn parse_type_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Er // // CLASS-NAME :== CLASS { .... -- CLASS DEFINITION -- } // Parameterized Class assignment not supported. -fn parse_class_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_class_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_object_class_reference)? { - return Err(unexpected_token!("CLASS Reference", tokens[consumed])); + return Err(unexpected_token!("CLASS Reference", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; if !expect_token(&tokens[consumed..], Token::is_assignment)? { - return Err(unexpected_token!("::=", tokens[consumed])); + return Err(unexpected_token!("::=", tokens[consumed]).into()); } consumed += 1; if !expect_keyword(&tokens[consumed..], "CLASS")? { - return Err(unexpected_token!("'CLASS'", tokens[consumed])); + return Err(unexpected_token!("'CLASS'", tokens[consumed]).into()); } let (classref, classref_consumed) = parse_class(&tokens[consumed..])?; @@ -335,23 +323,23 @@ fn parse_class_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), E // Parse an Object Set Assignment // // ObjectSetName CLASS ::= { Objects } -- Where Objects can be an Object/ObjectSet/Reference -fn parse_object_set_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize), Error> { +fn parse_object_set_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_type_reference)? { - return Err(unexpected_token!("'Type Reference'", tokens[consumed])); + return Err(unexpected_token!("'Type Reference'", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; if !expect_token(&tokens[consumed..], Token::is_object_class_reference)? { - return Err(unexpected_token!("'CLASS Reference'", tokens[consumed])); + return Err(unexpected_token!("'CLASS Reference'", tokens[consumed]).into()); } let class = tokens[consumed].text.clone(); consumed += 1; if !expect_token(&tokens[consumed..], Token::is_assignment)? { - return Err(unexpected_token!("'::='", tokens[consumed])); + return Err(unexpected_token!("'::='", tokens[consumed]).into()); } consumed += 1; @@ -371,11 +359,11 @@ fn parse_object_set_assignment(tokens: &[Token]) -> Result<(Asn1Definition, usiz )) } -fn parse_params(tokens: &[Token]) -> Result<(DefinitionParams, usize), Error> { +fn parse_params(tokens: &[Token]) -> Result<(DefinitionParams, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -455,10 +443,9 @@ fn parse_params(tokens: &[Token]) -> Result<(DefinitionParams, usize), Error> { ) } } else { - return Err(parse_error!( - "Error in Parsing params at Token: {:#?}", - tokens[consumed] - )); + return Err( + parse_error!("Error in Parsing params at Token: {:#?}", tokens[consumed]).into(), + ); }; let param = DefinitionParam { _governer, diff --git a/asn-compiler/src/parser/asn/module.rs b/asn-compiler/src/parser/asn/module.rs index 09a2a67..3a784f0 100644 --- a/asn-compiler/src/parser/asn/module.rs +++ b/asn-compiler/src/parser/asn/module.rs @@ -1,8 +1,8 @@ //! ASN.1 Module Parsing functionality use std::collections::HashMap; -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keyword, expect_one_of_keywords, expect_token}; @@ -20,7 +20,7 @@ impl Asn1Module { pub(crate) fn resolve_object_classes( &mut self, object_classes: &HashMap, - ) -> Result<(), Error> { + ) -> Result<()> { for def in self.definitions.values_mut() { if !def.is_object_or_object_set() { continue; @@ -32,7 +32,8 @@ impl Asn1Module { "Error Resolving Class '{}' for Definition '{}'", class, def.id() - )); + ) + .into()); } let class = classdef.unwrap(); def.resolve_object_class(class)?; @@ -42,7 +43,7 @@ impl Asn1Module { } } -pub(in crate::parser) fn parse_module(tokens: &[Token]) -> Result<(Asn1Module, usize), Error> +pub(in crate::parser) fn parse_module(tokens: &[Token]) -> Result<(Asn1Module, usize)> where { let mut consumed = 0; @@ -60,7 +61,7 @@ where if expect_keyword(&tokens[consumed..], "DEFINITIONS")? { consumed += 1; } else { - return Err(unexpected_token!("DEFINITIONS", tokens[consumed])); + return Err(unexpected_token!("DEFINITIONS", tokens[consumed]).into()); } let (tags, tags_consumed) = maybe_parse_header_tags(&tokens[consumed..])?; @@ -76,12 +77,12 @@ where if expect_token(&tokens[consumed..], Token::is_assignment)? { consumed += 1; } else { - return Err(unexpected_token!("::=", tokens[consumed])); + return Err(unexpected_token!("::=", tokens[consumed]).into()); } if expect_keyword(&tokens[consumed..], "BEGIN")? { consumed += 1; } else { - return Err(unexpected_token!("BEGIN", tokens[consumed])); + return Err(unexpected_token!("BEGIN", tokens[consumed]).into()); } // Parse but ignore exports if any (by default everything is exported). @@ -120,7 +121,7 @@ where Ok((module, consumed)) } -fn parse_module_maybe_exports(tokens: &[Token]) -> Result<((), usize), Error> { +fn parse_module_maybe_exports(tokens: &[Token]) -> Result<((), usize)> { let mut consumed = 0; if expect_keyword(&tokens[consumed..], "EXPORTS")? { consumed += 1; @@ -142,9 +143,7 @@ fn parse_module_maybe_exports(tokens: &[Token]) -> Result<((), usize), Error> { Ok(((), consumed)) } -fn parse_module_imports( - tokens: &[Token], -) -> Result<(HashMap, usize), Error> { +fn parse_module_imports(tokens: &[Token]) -> Result<(HashMap, usize)> { let mut consumed = 0; let mut imports = HashMap::new(); @@ -169,7 +168,7 @@ fn parse_module_imports( for d in imported_defs { if imports.contains_key(&d) { - return Err(parse_error!("Definition '{}' is imported twice", d)); + return Err(parse_error!("Definition '{}' is imported twice", d).into()); } let _ = imports.insert(d, module_name.clone()); } @@ -184,7 +183,7 @@ fn parse_module_imports( Ok((imports, consumed)) } -fn maybe_parse_header_tags(tokens: &[Token]) -> Result<(Asn1ModuleTag, usize), Error> { +fn maybe_parse_header_tags(tokens: &[Token]) -> Result<(Asn1ModuleTag, usize)> { let mut consumed = 0; let tag = @@ -195,14 +194,14 @@ fn maybe_parse_header_tags(tokens: &[Token]) -> Result<(Asn1ModuleTag, usize), E "AUTOMATIC" => Asn1ModuleTag::Automatic, _ => { // Will never reach - return Err(parse_error!("Should Never Reach")); + return Err(parse_error!("Should Never Reach").into()); } }; consumed += 1; if expect_keyword(&tokens[consumed..], "TAGS")? { consumed += 1 } else { - return Err(unexpected_token!("TAGS", tokens[consumed])); + return Err(unexpected_token!("TAGS", tokens[consumed]).into()); } tag } else { @@ -211,7 +210,7 @@ fn maybe_parse_header_tags(tokens: &[Token]) -> Result<(Asn1ModuleTag, usize), E Ok((tag, consumed)) } -fn parse_module_name(tokens: &[Token]) -> Result<(Asn1ModuleName, usize), Error> { +fn parse_module_name(tokens: &[Token]) -> Result<(Asn1ModuleName, usize)> { let mut consumed = 0; // First Name @@ -221,7 +220,8 @@ fn parse_module_name(tokens: &[Token]) -> Result<(Asn1ModuleName, usize), Error> return Err(parse_error!( "Module Name '{}' is not a valid Module Reference", tokens[consumed].text - )); + ) + .into()); }; consumed += 1; @@ -233,9 +233,7 @@ fn parse_module_name(tokens: &[Token]) -> Result<(Asn1ModuleName, usize), Error> Ok((Asn1ModuleName::new(name, oid), consumed)) } -fn maybe_parse_object_identifer( - tokens: &[Token], -) -> Result<(Option, usize), Error> { +fn maybe_parse_object_identifer(tokens: &[Token]) -> Result<(Option, usize)> { match expect_token(tokens, Token::is_curly_begin) { Ok(success) => { if success { diff --git a/asn-compiler/src/parser/asn/oid.rs b/asn-compiler/src/parser/asn/oid.rs index 2a9d0e6..4477c85 100644 --- a/asn-compiler/src/parser/asn/oid.rs +++ b/asn-compiler/src/parser/asn/oid.rs @@ -4,8 +4,8 @@ use std::collections::HashMap; use lazy_static::lazy_static; -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_one_of_tokens, expect_token, expect_tokens}; @@ -31,9 +31,9 @@ lazy_static! { // Parses a named OID component // // Parses named OID components of the form `iso` or `iso(1)` -fn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Error> { +fn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize)> { if !expect_token(tokens, Token::is_value_reference)? { - return Err(unexpected_token!("'IDENTIFIER'", tokens[0])); + return Err(unexpected_token!("'IDENTIFIER'", tokens[0]).into()); } let name_token = &tokens[0]; let name = &name_token.text; @@ -56,7 +56,7 @@ fn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), } else { let number = WELL_KNOWN_OID_NAMES.get(name.as_str()); if number.is_none() { - return Err(unknown_oid_name!(name_token)); + return Err(unknown_oid_name!(name_token).into()); } (*number.unwrap(), 1) } @@ -64,7 +64,7 @@ fn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Err(_) => { let number = WELL_KNOWN_OID_NAMES.get(name.as_str()); if number.is_none() { - return Err(unknown_oid_name!(name_token)); + return Err(unknown_oid_name!(name_token).into()); } (*number.unwrap(), 1) } @@ -76,7 +76,7 @@ fn parse_named_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), // Wrapper for Parsing an OID Component // // Parses Either Numbered or Named/Numbered OID components -fn parse_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Error> { +fn parse_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize)> { let consumed = 0; if expect_one_of_tokens( @@ -94,23 +94,18 @@ fn parse_oid_component(tokens: &[Token]) -> Result<(OIDComponent, usize), Error> Ok((OIDComponent::new(None, number), 1)) } } else { - Err(unexpected_token!( - "Expected 'identifier' or 'number'", - tokens[0] - )) + Err(unexpected_token!("Expected 'identifier' or 'number'", tokens[0]).into()) } } // This is required by 'resolver' to resolve object identifier values (which requires the value // which is basically just a string of the form '{ iso ... }' to be parsed (and resolved) there. // Hence this module is `pub(crate)`. -pub(crate) fn parse_object_identifier( - tokens: &[Token], -) -> Result<(ObjectIdentifier, usize), Error> { +pub(crate) fn parse_object_identifier(tokens: &[Token]) -> Result<(ObjectIdentifier, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("{", tokens[consumed])); + return Err(unexpected_token!("{", tokens[consumed]).into()); } consumed += 1; diff --git a/asn-compiler/src/parser/asn/types/base/bitstring.rs b/asn-compiler/src/parser/asn/types/base/bitstring.rs index 72dc620..226258b 100644 --- a/asn-compiler/src/parser/asn/types/base/bitstring.rs +++ b/asn-compiler/src/parser/asn/types/base/bitstring.rs @@ -1,7 +1,7 @@ //! Parser for bitstring type. -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keywords, expect_token}; @@ -9,11 +9,11 @@ use crate::parser::asn::structs::types::base::Asn1TypeBitString; use super::utils::parse_named_values; -pub(crate) fn parse_bitstring_type(tokens: &[Token]) -> Result<(Asn1TypeBitString, usize), Error> { +pub(crate) fn parse_bitstring_type(tokens: &[Token]) -> Result<(Asn1TypeBitString, usize)> { let mut consumed = 0; if !expect_keywords(&tokens[consumed..], &["BIT", "STRING"])? { - return Err(unexpected_token!("'BIT STRING'", tokens[consumed])); + return Err(unexpected_token!("'BIT STRING'", tokens[consumed]).into()); } consumed += 2; diff --git a/asn-compiler/src/parser/asn/types/base/enumerated.rs b/asn-compiler/src/parser/asn/types/base/enumerated.rs index 91c476d..df57397 100644 --- a/asn-compiler/src/parser/asn/types/base/enumerated.rs +++ b/asn-compiler/src/parser/asn/types/base/enumerated.rs @@ -1,7 +1,7 @@ //! Functionality related to parsing 'ENUMERATED' ASN.1 Type -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keyword, expect_one_of_tokens, expect_token}; @@ -10,7 +10,7 @@ use crate::parser::asn::structs::types::base::{Asn1TypeEnumerated, EnumValue}; use super::utils::parse_named_maybe_value; // Parses values in an Enum. Used for parsing values either in the root or extension. -fn parse_enum_values(tokens: &[Token]) -> Result<(Vec, usize), Error> { +fn parse_enum_values(tokens: &[Token]) -> Result<(Vec, usize)> { let mut consumed = 0; let mut values = vec![]; @@ -40,18 +40,16 @@ fn parse_enum_values(tokens: &[Token]) -> Result<(Vec, usize), Error> } // Parse an enumerated type -pub(crate) fn parse_enumerated_type( - tokens: &[Token], -) -> Result<(Asn1TypeEnumerated, usize), Error> { +pub(crate) fn parse_enumerated_type(tokens: &[Token]) -> Result<(Asn1TypeEnumerated, usize)> { let mut consumed = 0; if !expect_keyword(tokens, "ENUMERATED")? { - return Err(unexpected_token!("ENUMERATED", tokens[0])); + return Err(unexpected_token!("ENUMERATED", tokens[0]).into()); } consumed += 1; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -77,7 +75,7 @@ pub(crate) fn parse_enumerated_type( consumed += ext_values_consumed; if !expect_token(&tokens[consumed..], Token::is_curly_end)? { - return Err(unexpected_token!("'}'", tokens[consumed])); + return Err(unexpected_token!("'}'", tokens[consumed]).into()); } consumed += 1; diff --git a/asn-compiler/src/parser/asn/types/base/integer.rs b/asn-compiler/src/parser/asn/types/base/integer.rs index 15b7193..5143d16 100644 --- a/asn-compiler/src/parser/asn/types/base/integer.rs +++ b/asn-compiler/src/parser/asn/types/base/integer.rs @@ -1,7 +1,7 @@ //! Parsing "INTEGER" ASN.1 Type -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keyword, expect_token}; @@ -9,11 +9,11 @@ use crate::parser::asn::structs::types::base::Asn1TypeInteger; use super::utils::parse_named_values; -pub(crate) fn parse_integer_type(tokens: &[Token]) -> Result<(Asn1TypeInteger, usize), Error> { +pub(crate) fn parse_integer_type(tokens: &[Token]) -> Result<(Asn1TypeInteger, usize)> { let mut consumed = 0; if !expect_keyword(&tokens[consumed..], "INTEGER")? { - return Err(unexpected_token!("'INTEGER'", tokens[consumed])); + return Err(unexpected_token!("'INTEGER'", tokens[consumed]).into()); } consumed += 1; diff --git a/asn-compiler/src/parser/asn/types/base/utils.rs b/asn-compiler/src/parser/asn/types/base/utils.rs index a217ed6..73a5a11 100644 --- a/asn-compiler/src/parser/asn/types/base/utils.rs +++ b/asn-compiler/src/parser/asn/types/base/utils.rs @@ -1,7 +1,7 @@ //! Utility functions used by base types -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::expect_token; @@ -10,11 +10,11 @@ use crate::parser::asn::structs::types::base::NamedValue; // Parse a name(value). `(value)` component is optional pub(crate) fn parse_named_maybe_value( tokens: &[Token], -) -> Result<((String, Option), usize), Error> { +) -> Result<((String, Option), usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_value_reference)? { - return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed])); + return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed]).into()); } let identifier = tokens[consumed].text.clone(); consumed += 1; @@ -33,13 +33,12 @@ pub(crate) fn parse_named_maybe_value( consumed += 1; NamedValue::ValueRef(valueref) } else { - return Err(unexpected_token!( - "'Reference' or 'Number'", - tokens[consumed] - )); + return Err( + unexpected_token!("'Reference' or 'Number'", tokens[consumed]).into(), + ); }; if !expect_token(&tokens[consumed..], Token::is_round_end)? { - return Err(unexpected_token!("')'", tokens[consumed])); + return Err(unexpected_token!("')'", tokens[consumed]).into()); } consumed += 1; @@ -54,13 +53,11 @@ pub(crate) fn parse_named_maybe_value( Ok(((identifier, named_value), consumed)) } -pub(crate) fn parse_named_values( - tokens: &[Token], -) -> Result<(Vec<(String, NamedValue)>, usize), Error> { +pub(crate) fn parse_named_values(tokens: &[Token]) -> Result<(Vec<(String, NamedValue)>, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{", tokens[consumed])); + return Err(unexpected_token!("'{", tokens[consumed]).into()); } consumed += 1; let mut values = vec![]; @@ -69,7 +66,7 @@ pub(crate) fn parse_named_values( parse_named_maybe_value(&tokens[consumed..])?; if named_value.is_none() { - return Err(parse_error!("Name(Value) expected, Value missing!")); + return Err(parse_error!("Name(Value) expected, Value missing!").into()); } let named_value = named_value.unwrap(); @@ -82,10 +79,7 @@ pub(crate) fn parse_named_values( consumed += 1; break; } else { - return Err(unexpected_token!( - "'Reference' or 'Number'", - tokens[consumed] - )); + return Err(unexpected_token!("'Reference' or 'Number'", tokens[consumed]).into()); } } diff --git a/asn-compiler/src/parser/asn/types/constraints.rs b/asn-compiler/src/parser/asn/types/constraints.rs index 9d62492..8816e08 100644 --- a/asn-compiler/src/parser/asn/types/constraints.rs +++ b/asn-compiler/src/parser/asn/types/constraints.rs @@ -1,7 +1,7 @@ //! Parser for ASN.1 SubType Constraints -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::{ asn::values::parse_value, @@ -15,7 +15,7 @@ use crate::parser::asn::structs::types::constraints::*; use super::parse_type; -pub(super) fn parse_constraints(tokens: &[Token]) -> Result<(Vec, usize), Error> { +pub(super) fn parse_constraints(tokens: &[Token]) -> Result<(Vec, usize)> { let mut consumed = 0; let mut constraints = vec![]; @@ -26,7 +26,7 @@ pub(super) fn parse_constraints(tokens: &[Token]) -> Result<(Vec Ok((constraints, consumed)) } -pub(crate) fn parse_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> { +pub(crate) fn parse_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize)> { if let Ok(subtype) = parse_subtype_constraint(tokens) { Ok(subtype) } else if let Ok(table) = parse_table_constraint(tokens) { @@ -36,17 +36,15 @@ pub(crate) fn parse_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usiz } else if let Ok(with_components) = parse_with_components_constraint(tokens) { Ok(with_components) } else { - Err(parse_error!( - "Parsing of this constraint not yet supported!" - )) + Err(parse_error!("Parsing of this constraint not yet supported!").into()) } } -fn parse_table_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> { +fn parse_table_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_round_begin)? { - return Err(unexpected_token!("'('", tokens[0])); + return Err(unexpected_token!("'('", tokens[0]).into()); } consumed += 1; @@ -61,7 +59,7 @@ fn parse_table_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), E )? { tokens[consumed + 1].text.clone() } else { - return Err(parse_error!("Failed to parse Simple Table Constraint.")); + return Err(parse_error!("Failed to parse Simple Table Constraint.").into()); }; consumed += 3; @@ -93,19 +91,19 @@ fn parse_table_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), E }; if !expect_token(&tokens[consumed..], Token::is_round_end)? { - return Err(unexpected_token!("')'", tokens[consumed])); + return Err(unexpected_token!("')'", tokens[consumed]).into()); } consumed += 1; Ok((constraint, consumed)) } -fn parse_subtype_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> { +fn parse_subtype_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize)> { let (element_set, element_set_consumed, _all_except) = parse_element_set(tokens)?; Ok((Asn1Constraint::Subtype(element_set), element_set_consumed)) } -fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Error> { +fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool)> { let mut consumed = 0; // It is allowed to have constraints on SEQUENCE OF and SIZE OF to be defined without the '(' @@ -119,7 +117,7 @@ fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Erro let all_except = if expect_keyword(&tokens[consumed..], "ALL")? { consumed += 1; if !expect_keyword(&tokens[consumed..], "EXCEPT")? { - return Err(unexpected_token!("EXCEPT", tokens[consumed])); + return Err(unexpected_token!("EXCEPT", tokens[consumed]).into()); } consumed += 1; true @@ -131,7 +129,7 @@ fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Erro consumed += root_consumed; if root_elements.elements.is_empty() { - return Err(parse_error!("Empty Set in a Constraint!")); + return Err(parse_error!("Empty Set in a Constraint!").into()); } let mut additional_elements = None; @@ -140,7 +138,7 @@ fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Erro // Extension Marker if !expect_token(&tokens[consumed..], Token::is_extension)? { - return Err(unexpected_token!("'...'", tokens[consumed])); + return Err(unexpected_token!("'...'", tokens[consumed]).into()); } consumed += 1; @@ -157,13 +155,13 @@ fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Erro if round_begin { if !expect_token(&tokens[consumed..], Token::is_round_end)? { - return Err(unexpected_token!("')'", tokens[consumed])); + return Err(unexpected_token!("')'", tokens[consumed]).into()); } consumed += 1; } else { // For #47 if !expect_keyword(&tokens[consumed..], "OF")? { - return Err(unexpected_token!("'OF'", tokens[consumed])); + return Err(unexpected_token!("'OF'", tokens[consumed]).into()); } } @@ -177,7 +175,7 @@ fn parse_element_set(tokens: &[Token]) -> Result<(ElementSet, usize, bool), Erro )) } -fn parse_union_set(tokens: &[Token]) -> Result<(UnionSet, usize), Error> { +fn parse_union_set(tokens: &[Token]) -> Result<(UnionSet, usize)> { let mut consumed = 0; let mut elements = vec![]; @@ -195,7 +193,9 @@ fn parse_union_set(tokens: &[Token]) -> Result<(UnionSet, usize), Error> { } Err(_) => { if expecting_iset { - return Err(parse_error!("Expecting Interesection Set in a Constraint.")); + return Err( + parse_error!("Expecting Interesection Set in a Constraint.").into() + ); } } } @@ -227,7 +227,7 @@ fn parse_union_set(tokens: &[Token]) -> Result<(UnionSet, usize), Error> { // // This avoid having to write a lot of boiler-plate code to check for `(` or `)` in a few // functions (typically inside `parse_intersection_set`.) -fn parse_intersection_set(tokens: &[Token]) -> Result<(Elements, usize), Error> { +fn parse_intersection_set(tokens: &[Token]) -> Result<(Elements, usize)> { let mut consumed = 0; // First try to Parse a Size @@ -292,7 +292,7 @@ fn parse_intersection_set(tokens: &[Token]) -> Result<(Elements, usize), Error> )); } - Err(parse_error!("parse_intersection_set: Not Implmented")) + Err(parse_error!("parse_intersection_set: Not Implmented").into()) } // Parses a Range Value, supports all possible formats. @@ -300,7 +300,7 @@ fn parse_intersection_set(tokens: &[Token]) -> Result<(Elements, usize), Error> // If parsing fails (tokens of not adequate length or tokens don't match) returns an Error. The // caller should do the error handling. Note: Typically caller will simply say Oh it didn't match, // let's try next. -fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Error> { +fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize)> { let mut consumed = 0; fn is_min_max_keyword(token: &Token) -> bool { @@ -315,10 +315,7 @@ fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Er if expect_token(&tokens[consumed..], is_min_max_keyword)? { (tokens[consumed].text.clone(), 1) } else { - return Err(unexpected_token!( - "'MIN', 'MAX' or 'Value'", - tokens[consumed] - )); + return Err(unexpected_token!("'MIN', 'MAX' or 'Value'", tokens[consumed]).into()); } } }; @@ -328,7 +325,7 @@ fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Er &tokens[consumed..], &[Token::is_less_than, Token::is_range_separator], )? { - return Err(unexpected_token!("'<' or '..'", tokens[consumed])); + return Err(unexpected_token!("'<' or '..'", tokens[consumed]).into()); } let lower_inclusive = if expect_token(&tokens[consumed..], Token::is_less_than)? { @@ -352,10 +349,7 @@ fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Er if expect_token(&tokens[consumed..], is_min_max_keyword)? { (tokens[consumed].text.clone(), 1) } else { - return Err(unexpected_token!( - "'MIN', 'MAX' or 'Value'", - tokens[consumed] - )); + return Err(unexpected_token!("'MIN', 'MAX' or 'Value'", tokens[consumed]).into()); } } }; @@ -372,28 +366,28 @@ fn parse_range_elements(tokens: &[Token]) -> Result<(SubtypeElements, usize), Er )) } -fn parse_contents_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> { +fn parse_contents_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_round_begin)? { - return Err(unexpected_token!("'('", tokens[consumed])); + return Err(unexpected_token!("'('", tokens[consumed]).into()); } consumed += 1; if !expect_keyword(&tokens[consumed..], "CONTAINING")? { - return Err(unexpected_token!("'CONTAINING'", tokens[consumed])); + return Err(unexpected_token!("'CONTAINING'", tokens[consumed]).into()); } consumed += 1; let _containing = if expect_token(&tokens[consumed..], Token::is_type_reference)? { tokens[consumed].text.clone() } else { - return Err(unexpected_token!("'TYPE Reference'", tokens[consumed])); + return Err(unexpected_token!("'TYPE Reference'", tokens[consumed]).into()); }; consumed += 1; if !expect_token(&tokens[consumed..], Token::is_round_end)? { - return Err(unexpected_token!("')'", tokens[consumed])); + return Err(unexpected_token!("')'", tokens[consumed]).into()); } consumed += 1; @@ -407,15 +401,15 @@ fn parse_contents_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize) )) } -fn parse_with_components_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize), Error> { +fn parse_with_components_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_round_begin)? { - return Err(unexpected_token!("'('", tokens[consumed])); + return Err(unexpected_token!("'('", tokens[consumed]).into()); } consumed += 1; if !expect_keywords(&tokens[consumed..], &["WITH", "COMPONENTS"])? { - return Err(unexpected_token!("'WITH COMPONENTS'", tokens[consumed])); + return Err(unexpected_token!("'WITH COMPONENTS'", tokens[consumed]).into()); } consumed += 2; @@ -423,7 +417,7 @@ fn parse_with_components_constraint(tokens: &[Token]) -> Result<(Asn1Constraint, consumed += value_consumed; if !expect_token(&tokens[consumed..], Token::is_round_end)? { - return Err(unexpected_token!("')'", tokens[consumed])); + return Err(unexpected_token!("')'", tokens[consumed]).into()); } consumed += 1; diff --git a/asn-compiler/src/parser/asn/types/constructed/choice.rs b/asn-compiler/src/parser/asn/types/constructed/choice.rs index 9ed7e44..361a1fa 100644 --- a/asn-compiler/src/parser/asn/types/constructed/choice.rs +++ b/asn-compiler/src/parser/asn/types/constructed/choice.rs @@ -1,7 +1,7 @@ //! Parsing related to "CHOICE" Type -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keyword, expect_token}; @@ -14,16 +14,16 @@ use super::utils::parse_component; // The current implementation supports a very simple choice definition, where, everything is dumped // into 'root' components. Additional extension components or version groups etc. are not supported // for now. May be supported later if needed. -pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usize), Error> { +pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usize)> { let mut consumed = 0; if !expect_keyword(&tokens[consumed..], "CHOICE")? { - return Err(unexpected_token!("'CHOICE'", tokens[consumed])); + return Err(unexpected_token!("'CHOICE'", tokens[consumed]).into()); } consumed += 1; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'CHOICE'", tokens[consumed])); + return Err(unexpected_token!("'CHOICE'", tokens[consumed]).into()); } consumed += 1; @@ -55,7 +55,8 @@ pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usi if extension_markers != 1 { return Err(parse_error!( "Addition Component can only be present after an Extension Marker!" - )); + ) + .into()); } else { let (addition_group, addition_group_consumed) = parse_choice_addition_group(&tokens[consumed..])?; @@ -72,9 +73,9 @@ pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usi if expect_token(&tokens[consumed..], Token::is_extension)? { extension_markers += 1; if extension_markers > 1 { - return Err(parse_error!( - "Only one Extension Marker is allowed in a 'CHOICE')" - )); + return Err( + parse_error!("Only one Extension Marker is allowed in a 'CHOICE')").into(), + ); } consumed += 1; if expect_token(&tokens[consumed..], Token::is_comma)? { @@ -95,7 +96,7 @@ pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usi loop_count += 1; if loop_count == 3 { - return Err(parse_error!("Parser stuck at Token {:?}", tokens[consumed])); + return Err(parse_error!("Parser stuck at Token {:?}", tokens[consumed]).into()); } } } @@ -122,11 +123,11 @@ pub(crate) fn parse_choice_type(tokens: &[Token]) -> Result<(Asn1TypeChoice, usi )) } -fn parse_choice_addition_group(tokens: &[Token]) -> Result<(ChoiceAdditionGroup, usize), Error> { +fn parse_choice_addition_group(tokens: &[Token]) -> Result<(ChoiceAdditionGroup, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_addition_groups_begin)? { - return Err(unexpected_token!("'[['", tokens[consumed])); + return Err(unexpected_token!("'[['", tokens[consumed]).into()); } consumed += 1; @@ -134,7 +135,7 @@ fn parse_choice_addition_group(tokens: &[Token]) -> Result<(ChoiceAdditionGroup, let version = tokens[consumed].text.clone(); consumed += 1; if !expect_token(&tokens[consumed..], Token::is_colon)? { - return Err(unexpected_token!("':'", tokens[consumed])); + return Err(unexpected_token!("':'", tokens[consumed]).into()); } consumed += 1; Some(version) @@ -173,12 +174,12 @@ fn parse_choice_addition_group(tokens: &[Token]) -> Result<(ChoiceAdditionGroup, loop_count += 1; if loop_count == 3 { - return Err(parse_error!("Parser stuck at Token {:?}", tokens[consumed])); + return Err(parse_error!("Parser stuck at Token {:?}", tokens[consumed]).into()); } } } if components.is_empty() { - Err(parse_error!("Additional Components cannot be empty!")) + Err(parse_error!("Additional Components cannot be empty!").into()) } else { Ok(( ChoiceAdditionGroup { diff --git a/asn-compiler/src/parser/asn/types/constructed/seq.rs b/asn-compiler/src/parser/asn/types/constructed/seq.rs index c03d495..63164ab 100644 --- a/asn-compiler/src/parser/asn/types/constructed/seq.rs +++ b/asn-compiler/src/parser/asn/types/constructed/seq.rs @@ -1,7 +1,7 @@ //! Handling of Sequence and Sequence Of Type -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::utils::{expect_keyword, expect_one_of_keywords, expect_token}; @@ -19,9 +19,9 @@ use crate::parser::asn::{ use super::utils::parse_component; -pub(crate) fn parse_seq_or_seq_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> { +pub(crate) fn parse_seq_or_seq_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize)> { if !expect_one_of_keywords(tokens, &["SEQUENCE", "SET"])? { - return Err(unexpected_token!("'SEQUENCE'", tokens[0])); + return Err(unexpected_token!("'SEQUENCE'", tokens[0]).into()); } if expect_token(&tokens[1..], Token::is_curly_begin)? { @@ -31,14 +31,14 @@ pub(crate) fn parse_seq_or_seq_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind } } -fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> { +fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize)> { let mut consumed = 0; // Initial 'SEQUENCE' is consumed by the caller. We start with '{' consumed += 1; // For the SEQUENCE if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -74,7 +74,8 @@ fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> } else { return Err(parse_error!( "Addition groups can only be added between first and second extension markers!" - )); + ) + .into()); } } @@ -103,10 +104,7 @@ fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> loop_count += 1; if loop_count == 3 { - return Err(parse_error!( - "Parser Stuck at Token: {:?}", - tokens[consumed] - )); + return Err(parse_error!("Parser Stuck at Token: {:?}", tokens[consumed]).into()); } } } @@ -122,7 +120,7 @@ fn parse_sequence_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> )) } -fn parse_sequence_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> { +fn parse_sequence_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize)> { let mut consumed = 0; // Initial SEQUENCE is already consumed. @@ -135,7 +133,7 @@ fn parse_sequence_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Err consumed += size_consumed; if !expect_keyword(&tokens[consumed..], "OF")? { - return Err(unexpected_token!("'OF'", tokens[consumed])); + return Err(unexpected_token!("'OF'", tokens[consumed]).into()); } consumed += 1; @@ -152,7 +150,7 @@ fn parse_sequence_of_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Err )) } -fn parse_seq_component(tokens: &[Token]) -> Result<(Option, usize), Error> { +fn parse_seq_component(tokens: &[Token]) -> Result<(Option, usize)> { let mut consumed = 0; let (component, component_consumed) = match parse_component(&tokens[consumed..]) { @@ -179,9 +177,7 @@ fn parse_seq_component(tokens: &[Token]) -> Result<(Option, usize) }; if default.is_some() && optional { - return Err(parse_error!( - "Both OPTIONAL and DEFAULT not allowed for a value!" - )); + return Err(parse_error!("Both OPTIONAL and DEFAULT not allowed for a value!").into()); } Ok(( @@ -197,11 +193,11 @@ fn parse_seq_component(tokens: &[Token]) -> Result<(Option, usize) } } -fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize), Error> { +fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_addition_groups_begin)? { - return Err(unexpected_token!("'[['", tokens[consumed])); + return Err(unexpected_token!("'[['", tokens[consumed]).into()); } consumed += 1; @@ -211,7 +207,7 @@ fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize let version = tokens[consumed].text.clone(); consumed += 1; if !expect_token(&tokens[consumed..], Token::is_colon)? { - return Err(unexpected_token!("'[['", tokens[consumed])); + return Err(unexpected_token!("'[['", tokens[consumed]).into()); } consumed += 1; Some(version) @@ -237,7 +233,7 @@ fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize } if components.is_empty() { - Err(parse_error!("Empty Addition Groups not allowed!")) + Err(parse_error!("Empty Addition Groups not allowed!").into()) } else if expect_token(&tokens[consumed..], Token::is_addition_groups_end)? { consumed += 1; Ok(( @@ -248,7 +244,7 @@ fn parse_seq_addition_group(tokens: &[Token]) -> Result<(SeqAdditionGroup, usize consumed, )) } else { - Err(unexpected_token!("']]'", tokens[consumed])) + Err(unexpected_token!("']]'", tokens[consumed]).into()) } } diff --git a/asn-compiler/src/parser/asn/types/constructed/utils.rs b/asn-compiler/src/parser/asn/types/constructed/utils.rs index 1fe5c78..b955c12 100644 --- a/asn-compiler/src/parser/asn/types/constructed/utils.rs +++ b/asn-compiler/src/parser/asn/types/constructed/utils.rs @@ -1,16 +1,16 @@ //! Utility functions for Parsing Constructed types -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::{asn::types::parse_type, utils::expect_token}; use crate::parser::asn::structs::types::constructed::Component; -pub(crate) fn parse_component(tokens: &[Token]) -> Result<(Component, usize), Error> { +pub(crate) fn parse_component(tokens: &[Token]) -> Result<(Component, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_value_reference)? { - return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed])); + return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); consumed += 1; diff --git a/asn-compiler/src/parser/asn/types/int.rs b/asn-compiler/src/parser/asn/types/int.rs index 2fcd422..1cf17da 100644 --- a/asn-compiler/src/parser/asn/types/int.rs +++ b/asn-compiler/src/parser/asn/types/int.rs @@ -1,5 +1,5 @@ -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::asn::structs::types::{ ActualParam, Asn1BuiltinType, Asn1ConstructedType, Asn1Tag, Asn1TagClass, Asn1TagMode, @@ -17,7 +17,7 @@ use super::{ }; // Parses the `Type` Expansion in the ASN.1 Grammar. -pub(crate) fn parse_type(tokens: &[Token]) -> Result<(Asn1Type, usize), Error> { +pub(crate) fn parse_type(tokens: &[Token]) -> Result<(Asn1Type, usize)> { let mut consumed = 0; // Optional Tag @@ -32,10 +32,9 @@ pub(crate) fn parse_type(tokens: &[Token]) -> Result<(Asn1Type, usize), Error> { Token::is_object_class_reference, ], )? { - return Err(unexpected_token!( - "'Type Reference' or 'Builtin Type'", - tokens[consumed] - )); + return Err( + unexpected_token!("'Type Reference' or 'Builtin Type'", tokens[consumed]).into(), + ); } // Now: Parse The Type definition. @@ -87,7 +86,7 @@ pub(crate) fn parse_type(tokens: &[Token]) -> Result<(Asn1Type, usize), Error> { "OBJECT" => { log::trace!("Parsing `OBJECT IDENTIFIER` type."); if !expect_keywords(&tokens[consumed..], &["OBJECT", "IDENTIFIER"])? { - return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed + 1])); + return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed + 1]).into()); } (Asn1TypeKind::Builtin(Asn1BuiltinType::ObjectIdentifier), 2) @@ -165,7 +164,7 @@ pub(crate) fn parse_type(tokens: &[Token]) -> Result<(Asn1Type, usize), Error> { )) } -fn parse_referenced_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Error> { +fn parse_referenced_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize)> { let mut consumed = 0; if let Ok(success) = expect_tokens( @@ -241,10 +240,10 @@ fn parse_referenced_type(tokens: &[Token]) -> Result<(Asn1TypeKind, usize), Erro } } -fn parse_actual_params(tokens: &[Token]) -> Result<(Vec, usize), Error> { +fn parse_actual_params(tokens: &[Token]) -> Result<(Vec, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -260,11 +259,11 @@ fn parse_actual_params(tokens: &[Token]) -> Result<(Vec, usize), Er consumed += 1; param } else { - return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed])); + return Err(unexpected_token!("'IDENTIFIER'", tokens[consumed]).into()); }; if !expect_token(&tokens[consumed..], Token::is_curly_end)? { - return Err(unexpected_token!("'}'", tokens[consumed])); + return Err(unexpected_token!("'}'", tokens[consumed]).into()); }; consumed += 1; params.push(ActualParam::Set(param)); @@ -289,7 +288,7 @@ fn parse_actual_params(tokens: &[Token]) -> Result<(Vec, usize), Er Ok((params, consumed)) } -fn maybe_parse_tag(tokens: &[Token]) -> Result<(Option, usize), Error> { +fn maybe_parse_tag(tokens: &[Token]) -> Result<(Option, usize)> { match expect_token(tokens, Token::is_square_begin) { Ok(success) => { if success { @@ -305,10 +304,10 @@ fn maybe_parse_tag(tokens: &[Token]) -> Result<(Option, usize), Error> } } -fn parse_tag(tokens: &[Token]) -> Result<(Asn1Tag, usize), Error> { +fn parse_tag(tokens: &[Token]) -> Result<(Asn1Tag, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_square_begin)? { - return Err(unexpected_token!("[", tokens[consumed])); + return Err(unexpected_token!("[", tokens[consumed]).into()); } consumed += 1; @@ -323,14 +322,14 @@ fn parse_tag(tokens: &[Token]) -> Result<(Asn1Tag, usize), Error> { "UNIVERSAL" => Asn1TagClass::Universal, "APPLICATION" => Asn1TagClass::Application, "PRIVATE" => Asn1TagClass::Private, - _ => return Err(unexpected_token!("", token)), + _ => return Err(unexpected_token!("", token).into()), } } else { Asn1TagClass::ContextSpecific }; if !expect_token(&tokens[consumed..], Token::is_numeric)? { - return Err(unexpected_token!("", tokens[consumed])); + return Err(unexpected_token!("", tokens[consumed]).into()); } let number_token = &tokens[consumed]; let number = number_token @@ -340,7 +339,7 @@ fn parse_tag(tokens: &[Token]) -> Result<(Asn1Tag, usize), Error> { consumed += 1; if !expect_token(&tokens[consumed..], Token::is_square_end)? { - return Err(unexpected_token!("]", tokens[consumed])); + return Err(unexpected_token!("]", tokens[consumed]).into()); } consumed += 1; @@ -351,7 +350,7 @@ fn parse_tag(tokens: &[Token]) -> Result<(Asn1Tag, usize), Error> { match typestr { "IMPLICIT" => Some(Asn1TagMode::Implicit), "EXPLICIT" => Some(Asn1TagMode::Explicit), - _ => return Err(unexpected_token!("", token)), + _ => return Err(unexpected_token!("", token).into()), } } else { None diff --git a/asn-compiler/src/parser/asn/types/ioc.rs b/asn-compiler/src/parser/asn/types/ioc.rs index 26d14e3..24ad366 100644 --- a/asn-compiler/src/parser/asn/types/ioc.rs +++ b/asn-compiler/src/parser/asn/types/ioc.rs @@ -1,8 +1,8 @@ //! Parsing of Information Object Class, Objects, Object Sets etc. use std::collections::HashMap; -use crate::error::Error; use crate::tokenizer::{tokenize, Token}; +use anyhow::Result; use crate::parser::{ asn::values::parse_value, @@ -16,16 +16,16 @@ use crate::parser::asn::structs::types::ioc::*; use super::parse_type; -pub(crate) fn parse_class(tokens: &[Token]) -> Result<(Asn1ObjectClass, usize), Error> { +pub(crate) fn parse_class(tokens: &[Token]) -> Result<(Asn1ObjectClass, usize)> { let mut consumed = 0; if !expect_keyword(&tokens[consumed..], "CLASS")? { - return Err(unexpected_token!("'CLASS'", tokens[consumed])); + return Err(unexpected_token!("'CLASS'", tokens[consumed]).into()); } consumed += 1; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -43,7 +43,7 @@ pub(crate) fn parse_class(tokens: &[Token]) -> Result<(Asn1ObjectClass, usize), consumed += 1; break; } else { - return Err(unexpected_token!("',' or '}'", tokens[consumed])); + return Err(unexpected_token!("',' or '}'", tokens[consumed]).into()); } } @@ -53,23 +53,21 @@ pub(crate) fn parse_class(tokens: &[Token]) -> Result<(Asn1ObjectClass, usize), Ok((Asn1ObjectClass { fields }, consumed)) } -fn parse_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize), Error> { +fn parse_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize)> { if expect_token(tokens, Token::is_value_field_reference)? { parse_fixed_type_value_field_spec(tokens) } else if expect_token(tokens, Token::is_type_field_reference)? { parse_type_field_spec(tokens) } else { - Err(parse_error!("Unsupported Field Spec in CLASS Definition")) + Err(parse_error!("Unsupported Field Spec in CLASS Definition").into()) } } -fn parse_fixed_type_value_field_spec( - tokens: &[Token], -) -> Result<(ObjectClassFieldSpec, usize), Error> { +fn parse_fixed_type_value_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_value_field_reference)? { - return Err(unexpected_token!("'VALUE FIELD REF'", tokens[consumed])); + return Err(unexpected_token!("'VALUE FIELD REF'", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); @@ -122,9 +120,9 @@ fn parse_fixed_type_value_field_spec( } if default.is_some() && unique { - return Err(parse_error!( - "Both 'UNIQUE' and 'DEFAULT' cannot be specified together!" - )); + return Err( + parse_error!("Both 'UNIQUE' and 'DEFAULT' cannot be specified together!").into(), + ); } } @@ -144,11 +142,11 @@ fn parse_fixed_type_value_field_spec( )) } -fn parse_type_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize), Error> { +fn parse_type_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_type_field_reference)? { - return Err(unexpected_token!("'TYPE FIELD REF'", tokens[consumed])); + return Err(unexpected_token!("'TYPE FIELD REF'", tokens[consumed]).into()); } let id = tokens[consumed].text.clone(); @@ -203,7 +201,7 @@ fn parse_type_field_spec(tokens: &[Token]) -> Result<(ObjectClassFieldSpec, usiz fn parse_with_syntax_for_fields( tokens: &[Token], fields: &mut HashMap, -) -> Result { +) -> Result { let mut consumed = 0; if !expect_keywords(&tokens[consumed..], &["WITH", "SYNTAX"])? { return Ok(consumed); @@ -211,7 +209,7 @@ fn parse_with_syntax_for_fields( consumed += 2; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -228,7 +226,7 @@ fn parse_with_syntax_for_fields( let words = words.unwrap(); consumed += words.len(); if words.iter().any(Token::is_with_syntax_reserved_word) { - return Err(parse_error!("Found a WITH SYNTAX RESERVED Word!")); + return Err(parse_error!("Found a WITH SYNTAX RESERVED Word!").into()); } let words = words .iter() @@ -237,7 +235,7 @@ fn parse_with_syntax_for_fields( .join(" "); if !expect_token(&tokens[consumed..], Token::is_and_identifier)? { - return Err(unexpected_token!("'CLASS field'", tokens[consumed])); + return Err(unexpected_token!("'CLASS field'", tokens[consumed]).into()); } let field = fields.get_mut(&tokens[consumed].text); @@ -245,7 +243,8 @@ fn parse_with_syntax_for_fields( return Err(parse_error!( "Field {} Not found in Class but found in WITH SYNTAX", tokens[consumed].text - )); + ) + .into()); } consumed += 1; @@ -270,7 +269,7 @@ fn parse_with_syntax_for_fields( return Err(parse_error!( "Optional Group for a field that is not Optional and No default : '{:#?}'", field - )); + ).into()); } *with_syntax = Some(words); } @@ -284,7 +283,7 @@ fn parse_with_syntax_for_fields( if expect_token(&tokens[consumed..], Token::is_square_end)? { if !in_optional_group { - return Err(unexpected_token!("',' or '}' or 'WORD'", tokens[consumed])); + return Err(unexpected_token!("',' or '}' or 'WORD'", tokens[consumed]).into()); } in_optional_group = false; consumed += 1; @@ -292,7 +291,7 @@ fn parse_with_syntax_for_fields( if expect_token(&tokens[consumed..], Token::is_curly_end)? { if in_optional_group { - return Err(parse_error!("Unmatched ']' for Optional Group",)); + return Err(parse_error!("Unmatched ']' for Optional Group",).into()); } consumed += 1; break; @@ -302,11 +301,11 @@ fn parse_with_syntax_for_fields( Ok(consumed) } -pub(crate) fn parse_object_set(tokens: &[Token]) -> Result<(ObjectSet, usize), Error> { +pub(crate) fn parse_object_set(tokens: &[Token]) -> Result<(ObjectSet, usize)> { let mut consumed = 0; if !expect_token(&tokens[consumed..], Token::is_curly_begin)? { - return Err(unexpected_token!("'{'", tokens[consumed])); + return Err(unexpected_token!("'{'", tokens[consumed]).into()); } consumed += 1; @@ -317,7 +316,7 @@ pub(crate) fn parse_object_set(tokens: &[Token]) -> Result<(ObjectSet, usize), E if expect_token(&tokens[consumed..], Token::is_extension)? { extension_token_count += 1; if extension_token_count > 1 { - return Err(parse_error!("More than one extension markers found!")); + return Err(parse_error!("More than one extension markers found!").into()); } consumed += 1; if expect_token(&tokens[consumed..], Token::is_comma)? { @@ -384,7 +383,7 @@ pub(crate) fn parse_object_set(tokens: &[Token]) -> Result<(ObjectSet, usize), E pub(crate) fn parse_object_set_from_class( set: &mut Asn1ObjectSet, class: &Asn1ObjectClass, -) -> Result<(), Error> { +) -> Result<()> { let objectset = &mut set.objects; let mut root_elements = vec![]; @@ -435,7 +434,7 @@ pub(crate) fn parse_object_set_from_class( pub(crate) fn parse_object_from_class( value: &str, class: &Asn1ObjectClass, -) -> Result { +) -> Result { let reader = std::io::BufReader::new(std::io::Cursor::new(value)); let tokens = tokenize(reader)?; let mut consumed = 0; @@ -533,7 +532,7 @@ fn class_fieldspec_from_words<'c>( fn value_from_field_spec( spec: &ObjectClassFieldSpec, tokens: &[Token], -) -> Result<(Asn1ObjectFieldSpec, usize), Error> { +) -> Result<(Asn1ObjectFieldSpec, usize)> { match spec { ObjectClassFieldSpec::Type { .. } => { let (ty, ty_consumed) = parse_type(tokens)?; diff --git a/asn-compiler/src/parser/asn/values.rs b/asn-compiler/src/parser/asn/values.rs index 1563c5b..10fea54 100644 --- a/asn-compiler/src/parser/asn/values.rs +++ b/asn-compiler/src/parser/asn/values.rs @@ -1,14 +1,14 @@ //! Functions related to parsing of various Values -use crate::error::Error; use crate::tokenizer::{types::TokenType, Token}; +use anyhow::Result; use crate::parser::utils::{expect_one_of_tokens, parse_set_ish_value}; // Parses a given set of 'tokens' as a value and returns a string corresponding to one that would // be generated by concatenating those tokens. Note: It should be possible to regenerate, original // tokens, by 'tokenize'ing the string. -pub(crate) fn parse_value(tokens: &[Token]) -> Result<(String, usize), Error> { +pub(crate) fn parse_value(tokens: &[Token]) -> Result<(String, usize)> { if !expect_one_of_tokens( tokens, &[ @@ -24,7 +24,8 @@ pub(crate) fn parse_value(tokens: &[Token]) -> Result<(String, usize), Error> { Err(unexpected_token!( "'IDENTIFIER', 'NUMBER', 'Bit String', 'Hex String', 'String', '{', '('", tokens[0] - )) + ) + .into()) } else { let token = &tokens[0]; match token.r#type { diff --git a/asn-compiler/src/parser/int.rs b/asn-compiler/src/parser/int.rs index 93d9c26..fc236c4 100644 --- a/asn-compiler/src/parser/int.rs +++ b/asn-compiler/src/parser/int.rs @@ -1,7 +1,7 @@ //! 'parser' Inernal module, API functions from this module are exported. -use crate::error::Error; use crate::tokenizer::Token; +use anyhow::Result; use crate::parser::asn::structs::module::Asn1Module; @@ -12,7 +12,7 @@ use super::asn::parse_module; /// Token obtained from running [`tokenize`][`crate::tokenizer::tokenize] on an ANS file are parsed /// into an internal representation of [`Asn1Module`][`crate::structs::Asn1Module`]. Semantic /// errors during parsing the tokens are returned as appropriate variant of `Error`. -pub fn parse(tokens: &mut Vec) -> Result, Error> { +pub fn parse(tokens: &mut Vec) -> Result> { // Get rid of the comments, it complicates things tokens.retain(|x| !x.is_comment()); diff --git a/asn-compiler/src/parser/utils.rs b/asn-compiler/src/parser/utils.rs index 28ff0d8..eea80de 100644 --- a/asn-compiler/src/parser/utils.rs +++ b/asn-compiler/src/parser/utils.rs @@ -1,21 +1,21 @@ //! Utility functions for the parser module -use crate::error::Error; use crate::tokenizer::{types::TokenChecker, Token}; +use anyhow::Result; // Returns true if the first `token`'s text is same as the passed `keyword` -pub(super) fn expect_keyword(tokens: &[Token], keyword: &str) -> Result { +pub(super) fn expect_keyword(tokens: &[Token], keyword: &str) -> Result { if tokens.is_empty() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(tokens[0].is_given_keyword(keyword)) } } // Returns true if all the given keywords are matched. -pub(super) fn expect_keywords(tokens: &[Token], keywords: &[&str]) -> Result { +pub(super) fn expect_keywords(tokens: &[Token], keywords: &[&str]) -> Result { if tokens.len() < keywords.len() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(keywords .iter() @@ -25,39 +25,36 @@ pub(super) fn expect_keywords(tokens: &[Token], keywords: &[&str]) -> Result Result { +pub(super) fn expect_one_of_keywords(tokens: &[Token], keywords: &[&str]) -> Result { if tokens.is_empty() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(keywords.iter().any(|&k| expect_keyword(tokens, k).unwrap())) } } // Returns `checker(token)` for the first `token`. -pub(super) fn expect_token(tokens: &[Token], checker: TokenChecker) -> Result { +pub(super) fn expect_token(tokens: &[Token], checker: TokenChecker) -> Result { if tokens.is_empty() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(checker(&tokens[0])) } } // Returns if any of the `checker(token)` returns true (short-circuiting) -pub(super) fn expect_one_of_tokens( - tokens: &[Token], - checkers: &[TokenChecker], -) -> Result { +pub(super) fn expect_one_of_tokens(tokens: &[Token], checkers: &[TokenChecker]) -> Result { if tokens.is_empty() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(checkers.iter().any(|&c| expect_token(tokens, c).unwrap())) } } // Returns success if 'all' the 'tokens' return 'true' for `checker(token)`. -pub(super) fn expect_tokens(tokens: &[Token], checkers: &[&[TokenChecker]]) -> Result { +pub(super) fn expect_tokens(tokens: &[Token], checkers: &[&[TokenChecker]]) -> Result { if tokens.len() < checkers.len() { - Err(unexpected_end!()) + Err(unexpected_end!().into()) } else { Ok(checkers .iter() @@ -69,14 +66,14 @@ pub(super) fn expect_tokens(tokens: &[Token], checkers: &[&[TokenChecker]]) -> R // Consumes every thing between balanced "{" and "}" or "(" and ")". // // Unbalanced paranthesis is an error. -pub(super) fn parse_set_ish_value(tokens: &[Token]) -> Result<(String, usize), Error> { +pub(super) fn parse_set_ish_value(tokens: &[Token]) -> Result<(String, usize)> { let (begin_token, end_token): (TokenChecker, TokenChecker) = if expect_token(tokens, Token::is_curly_begin)? { (Token::is_curly_begin, Token::is_curly_end) } else if expect_token(tokens, Token::is_round_begin)? { (Token::is_round_begin, Token::is_round_end) } else { - return Err(unexpected_token!("'(' or '}'", tokens[0])); + return Err(unexpected_token!("'(' or '}'", tokens[0]).into()); }; let mut level = 0; diff --git a/asn-compiler/src/resolver/asn/defs.rs b/asn-compiler/src/resolver/asn/defs.rs index 2b43290..0389e47 100644 --- a/asn-compiler/src/resolver/asn/defs.rs +++ b/asn-compiler/src/resolver/asn/defs.rs @@ -1,6 +1,6 @@ //! 'defs' Resolver -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::defs::{ Asn1AssignmentKind, Asn1Definition, Asn1ObjectAssignment, Asn1ObjectSetAssignment, @@ -18,7 +18,7 @@ use super::values::resolve_value; pub(crate) fn resolve_definition( definition: &Asn1Definition, resolver: &mut Resolver, -) -> Result { +) -> Result { match definition.kind { Asn1AssignmentKind::Value(ref v) => resolve_value_definition(v, resolver), Asn1AssignmentKind::Type(ref t) => resolve_type_definition(t, resolver), @@ -26,17 +26,14 @@ pub(crate) fn resolve_definition( resolve_object_set_definition(objset, resolver) } Asn1AssignmentKind::Object(ref object) => resolve_object_definition(object, resolver), - _ => Err(resolve_error!( - "asn_resolve_def: Not Implemented! {:#?}", - definition - )), + _ => Err(resolve_error!("asn_resolve_def: Not Implemented! {:#?}", definition).into()), } } pub(crate) fn resolve_type_definition( def: &Asn1TypeAssignment, resolver: &mut Resolver, -) -> Result { +) -> Result { let typeref = resolve_type(&def.typeref, resolver)?; Ok(Asn1ResolvedDefinition::Type(typeref)) } @@ -44,7 +41,7 @@ pub(crate) fn resolve_type_definition( fn resolve_value_definition( value: &Asn1ValueAssignment, resolver: &mut Resolver, -) -> Result { +) -> Result { let typeref = resolve_type(&value.typeref, resolver)?; let value = resolve_value(&value.value, &typeref, resolver)?; Ok(Asn1ResolvedDefinition::Value(value)) @@ -53,7 +50,7 @@ fn resolve_value_definition( fn resolve_object_set_definition( objectset: &Asn1ObjectSetAssignment, resolver: &mut Resolver, -) -> Result { +) -> Result { let objectset = resolve_object_set(&objectset.set, &objectset.id, resolver)?; Ok(Asn1ResolvedDefinition::ObjectSet(objectset)) } @@ -61,7 +58,7 @@ fn resolve_object_set_definition( fn resolve_object_definition( object: &Asn1ObjectAssignment, resolver: &mut Resolver, -) -> Result { +) -> Result { let object = resolve_object(&object.id, &object.object.value, resolver)?; Ok(Asn1ResolvedDefinition::Object(object)) } diff --git a/asn-compiler/src/resolver/asn/types/base/bitstring.rs b/asn-compiler/src/resolver/asn/types/base/bitstring.rs index cb0f510..486db94 100644 --- a/asn-compiler/src/resolver/asn/types/base/bitstring.rs +++ b/asn-compiler/src/resolver/asn/types/base/bitstring.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{ base::{Asn1TypeBitString, NamedValue}, @@ -16,7 +16,7 @@ impl Asn1ResolvedBitString { ty: &Asn1Type, b: &Asn1TypeBitString, resolver: &Resolver, - ) -> Result { + ) -> Result { let mut base = Asn1ResolvedBitString::default(); if ty.constraints.is_some() { diff --git a/asn-compiler/src/resolver/asn/types/base/charstring.rs b/asn-compiler/src/resolver/asn/types/base/charstring.rs index 627c12b..62569c1 100644 --- a/asn-compiler/src/resolver/asn/types/base/charstring.rs +++ b/asn-compiler/src/resolver/asn/types/base/charstring.rs @@ -1,7 +1,7 @@ //! Functionality for handling Resolved ASN.1 CharacterString Types. -use crate::error::Error; use crate::resolver::Resolver; +use anyhow::Result; use crate::parser::asn::structs::types::{Asn1BuiltinType, Asn1Type, Asn1TypeKind}; use crate::resolver::asn::structs::types::base::Asn1ResolvedCharacterString; @@ -10,7 +10,7 @@ impl Asn1ResolvedCharacterString { pub(crate) fn resolve_character_string( ty: &Asn1Type, resolver: &Resolver, - ) -> Result { + ) -> Result { let mut base = Asn1ResolvedCharacterString::default(); if let Asn1TypeKind::Builtin(Asn1BuiltinType::CharacterString { str_type }) = &ty.kind { diff --git a/asn-compiler/src/resolver/asn/types/base/enumerated.rs b/asn-compiler/src/resolver/asn/types/base/enumerated.rs index 9be63f4..e2c3041 100644 --- a/asn-compiler/src/resolver/asn/types/base/enumerated.rs +++ b/asn-compiler/src/resolver/asn/types/base/enumerated.rs @@ -3,7 +3,7 @@ use std::collections::BTreeSet; use std::convert::TryInto; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{ base::{Asn1TypeEnumerated, NamedValue}, @@ -17,7 +17,7 @@ impl Asn1ResolvedEnumerated { _ty: &Asn1Type, e: &Asn1TypeEnumerated, _resolver: &Resolver, - ) -> Result { + ) -> Result { let mut base = Asn1ResolvedEnumerated { extensible: e.ext_marker_index.is_some(), ..Default::default() diff --git a/asn-compiler/src/resolver/asn/types/base/integer.rs b/asn-compiler/src/resolver/asn/types/base/integer.rs index ed4235a..a6eb69f 100644 --- a/asn-compiler/src/resolver/asn/types/base/integer.rs +++ b/asn-compiler/src/resolver/asn/types/base/integer.rs @@ -1,6 +1,6 @@ //! Functionality for handling Resolved ASN.1 INTEGER Types -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{base::Asn1TypeInteger, Asn1Type}; use crate::resolver::asn::structs::types::base::Asn1ResolvedInteger; @@ -12,7 +12,7 @@ impl Asn1ResolvedInteger { ty: &Asn1Type, _i: &Asn1TypeInteger, resolver: &mut Resolver, - ) -> Result { + ) -> Result { let mut base = Asn1ResolvedInteger::default(); if ty.constraints.is_none() { diff --git a/asn-compiler/src/resolver/asn/types/base/mod.rs b/asn-compiler/src/resolver/asn/types/base/mod.rs index 5b359e4..c5a19d4 100644 --- a/asn-compiler/src/resolver/asn/types/base/mod.rs +++ b/asn-compiler/src/resolver/asn/types/base/mod.rs @@ -10,7 +10,7 @@ mod octetstring; mod charstring; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{Asn1BuiltinType, Asn1Type, Asn1TypeKind}; @@ -26,7 +26,7 @@ use crate::resolver::{ pub(crate) fn resolve_base_type( ty: &Asn1Type, resolver: &mut Resolver, -) -> Result { +) -> Result { if let Asn1TypeKind::Builtin(ref kind) = ty.kind { match kind { Asn1BuiltinType::Integer(ref i) => Ok(ResolvedBaseType::Integer( @@ -50,12 +50,9 @@ pub(crate) fn resolve_base_type( )), Asn1BuiltinType::Null => Ok(ResolvedBaseType::Null(Asn1ResolvedNull)), Asn1BuiltinType::Real => Ok(ResolvedBaseType::Real(Asn1ResolvedReal)), - _ => Err(resolve_error!( - "parse_base_type: Not Implemented! {:#?}", - ty - )), + _ => Err(resolve_error!("parse_base_type: Not Implemented! {:#?}", ty).into()), } } else { - Err(resolve_error!("Expected Base Type. Found '{:#?}'", ty)) + Err(resolve_error!("Expected Base Type. Found '{:#?}'", ty).into()) } } diff --git a/asn-compiler/src/resolver/asn/types/base/octetstring.rs b/asn-compiler/src/resolver/asn/types/base/octetstring.rs index 0ed52b6..d5f3a39 100644 --- a/asn-compiler/src/resolver/asn/types/base/octetstring.rs +++ b/asn-compiler/src/resolver/asn/types/base/octetstring.rs @@ -1,7 +1,7 @@ //! Functionality for handling Resolved ASN.1 OCTET STRING -use crate::error::Error; use crate::resolver::Resolver; +use anyhow::Result; use crate::parser::asn::structs::types::Asn1Type; use crate::resolver::asn::structs::types::base::Asn1ResolvedOctetString; @@ -10,7 +10,7 @@ impl Asn1ResolvedOctetString { pub(crate) fn resolve_octet_string( ty: &Asn1Type, resolver: &Resolver, - ) -> Result { + ) -> Result { let mut base = Asn1ResolvedOctetString::default(); if ty.constraints.is_some() { diff --git a/asn-compiler/src/resolver/asn/types/constraints.rs b/asn-compiler/src/resolver/asn/types/constraints.rs index 0d15df8..c5a3b5b 100644 --- a/asn-compiler/src/resolver/asn/types/constraints.rs +++ b/asn-compiler/src/resolver/asn/types/constraints.rs @@ -1,7 +1,7 @@ //! Constraint Resolution Implementation use std::ops::Range; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::constraints::*; @@ -17,26 +17,24 @@ impl Asn1Constraint { /// /// This function should be called for Single Value Subtype Constraints or Simple Table /// constraints only. - pub(crate) fn get_single_string_value(&self) -> Result { + pub(crate) fn get_single_string_value(&self) -> Result { if !self.is_subtype() || !self.is_single_value() || !self.is_simple_table_constraint() { Err(constraint_error!( "Require a Single Value Subtype or Table Constraint. Found '{:#?}'.", self - )) + ) + .into()) } else if let Asn1Constraint::Subtype(ref e) = self { e.get_single_string_value() } else if let Asn1Constraint::Table(ref t) = self { t.get_single_string_value() } else { - Err(constraint_error!( - "Single Value not supported for '{:#?}'", - self - )) + Err(constraint_error!("Single Value not supported for '{:#?}'", self).into()) } } /// Returns the 'Set Reference' trimming the leading '{' and trailing '}' - pub(crate) fn get_set_reference(&self) -> Result { + pub(crate) fn get_set_reference(&self) -> Result { match self.get_single_string_value() { Ok(v) => Ok(v .trim_matches(|c| matches!(c, '{' | '}')) @@ -52,7 +50,8 @@ impl Asn1Constraint { Err(resolve_error!( "Constraint is not a component Relation Constraint! '{:#?}'", self - )) + ) + .into()) } } } @@ -84,13 +83,14 @@ impl Asn1Constraint { pub(crate) fn get_integer_valueset( &self, resolver: &Resolver, - ) -> Result { + ) -> Result { match self { Self::Subtype(ref e) => e.get_integer_valueset(resolver), _ => Err(constraint_error!( "Integer Values not supported for the constraint '{:#?}'", self - )), + ) + .into()), } } // Returns whether this constraint is a Subtype Constraint @@ -125,10 +125,7 @@ impl Asn1Constraint { } } - pub(crate) fn get_size_valueset( - &self, - resolver: &Resolver, - ) -> Result { + pub(crate) fn get_size_valueset(&self, resolver: &Resolver) -> Result { if let Self::Subtype(ref e) = self { let iset = e.get_inner_elements(); if iset.len() == 1 { @@ -139,21 +136,22 @@ impl Asn1Constraint { } _ => Err(constraint_error!( "The Constraint for the Type is not a Size Constraint." - )), + ) + .into()), }, _ => Err(constraint_error!( "The Constraint for the type is not a size Constraint." - )), + ) + .into()), } } else { - Err(constraint_error!( - "The Constraint for the type is not a size Constraint." - )) + Err( + constraint_error!("The Constraint for the type is not a size Constraint.") + .into(), + ) } } else { - Err(constraint_error!( - "The Constraint for the type is not a size Constraint." - )) + Err(constraint_error!("The Constraint for the type is not a size Constraint.").into()) } } } @@ -163,7 +161,7 @@ impl ElementSet { &self.root_elements.elements } - fn get_single_string_value(&self) -> Result { + fn get_single_string_value(&self) -> Result { let inner_elements = self.get_inner_elements(); if inner_elements.len() == 1 { let e = &inner_elements[0]; @@ -173,11 +171,12 @@ impl ElementSet { Err(constraint_error!( "The Length of the element set is {}, while expected length is 1.", inner_elements.len() - )) + ) + .into()) } } - fn get_integer_valueset(&self, resolver: &Resolver) -> Result { + fn get_integer_valueset(&self, resolver: &Resolver) -> Result { let mut root_values = ConstraintValues::new(); for element in self.get_inner_elements() { let element_values = element.get_integer_valueset(resolver)?; @@ -219,25 +218,24 @@ impl ElementSet { } impl IntersectionSet { - fn get_single_string_value(&self) -> Result { + fn get_single_string_value(&self) -> Result { if self.elements.len() == 1 { let element = &self.elements[0]; if let Elements::Subtype(SubtypeElements::SingleValue { value }) = element { Ok(value.clone()) } else { - Err(constraint_error!( - "The Element is not a SingleValue Subtype Element!" - )) + Err(constraint_error!("The Element is not a SingleValue Subtype Element!").into()) } } else { Err(constraint_error!( "The Length of the element set is {}, while expected length is 1.", self.elements.len() - )) + ) + .into()) } } - fn get_integer_valueset(&self, resolver: &Resolver) -> Result { + fn get_integer_valueset(&self, resolver: &Resolver) -> Result { let mut value_set = ConstraintValues::new(); for element in &self.elements { let element_set = element.get_integer_valueset(resolver)?; @@ -270,22 +268,22 @@ impl IntersectionSet { } impl TableConstraint { - fn get_single_string_value(&self) -> Result { + fn get_single_string_value(&self) -> Result { if let TableConstraint::Simple(ObjectSet::DefinedObjectSet(ref s)) = self { Ok(s.clone()) } else { - Err(constraint_error!("Shouldn't Reach here!")) + Err(constraint_error!("Shouldn't Reach here!").into()) } } } impl Elements { - fn get_integer_valueset(&self, resolver: &Resolver) -> Result { + fn get_integer_valueset(&self, resolver: &Resolver) -> Result { match self { Self::Subtype(ref s) => s.get_integer_valueset(resolver), - Self::Set(ref _s) => Err(constraint_error!( - "get_integer_valueset: Set Variant: Not Supported!" - )), + Self::Set(ref _s) => { + Err(constraint_error!("get_integer_valueset: Set Variant: Not Supported!").into()) + } } } @@ -298,7 +296,7 @@ impl Elements { } impl SubtypeElements { - fn get_integer_valueset(&self, resolver: &Resolver) -> Result { + fn get_integer_valueset(&self, resolver: &Resolver) -> Result { let mut value_set = ConstraintValues::new(); match self { Self::SingleValue { value } => { @@ -337,16 +335,13 @@ impl SubtypeElements { } } _ => { - return Err(constraint_error!( - "Unexpected Constraint Type '{:#?}'", - self - )); + return Err(constraint_error!("Unexpected Constraint Type '{:#?}'", self).into()); } } Ok(value_set) } - fn parse_or_resolve_value(value: &str, resolver: &Resolver) -> Result { + fn parse_or_resolve_value(value: &str, resolver: &Resolver) -> Result { // FIXME : do the 'resolve part' let parsed = value.parse::(); match parsed { @@ -354,10 +349,9 @@ impl SubtypeElements { Err(_) => { let resolved = resolver.resolved_defs.get(value); match resolved { - None => Err(constraint_error!( - "Unable To Resolve '{}'. Not Found!", - value - )), + None => { + Err(constraint_error!("Unable To Resolve '{}'. Not Found!", value).into()) + } Some(res) => { if let Asn1ResolvedDefinition::Value(Asn1ResolvedValue::Base( ResolvedBaseValue::Integer(ref i), @@ -365,10 +359,10 @@ impl SubtypeElements { { Ok(i.value) } else { - Err(constraint_error!( - "Resolved Value {:#?} of different type!", - res - )) + Err( + constraint_error!("Resolved Value {:#?} of different type!", res) + .into(), + ) } } } diff --git a/asn-compiler/src/resolver/asn/types/constructed.rs b/asn-compiler/src/resolver/asn/types/constructed.rs index cee3d33..5fbdba1 100644 --- a/asn-compiler/src/resolver/asn/types/constructed.rs +++ b/asn-compiler/src/resolver/asn/types/constructed.rs @@ -1,6 +1,6 @@ use std::collections::BTreeMap; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{ constructed::{ @@ -30,7 +30,7 @@ use crate::resolver::{ pub(crate) fn resolve_constructed_type( ty: &Asn1Type, resolver: &mut Resolver, -) -> Result { +) -> Result { if let Asn1TypeKind::Constructed(ref kind) = ty.kind { match kind { Asn1ConstructedType::Choice(ref c) => resolve_choice_type(c, resolver), @@ -38,21 +38,18 @@ pub(crate) fn resolve_constructed_type( Asn1ConstructedType::SequenceOf(ref so) => resolve_sequence_of_type(so, resolver), _ => { eprintln!("ConstructedType: {:#?}", ty); - Err(resolve_error!("resolve_constructed_Type: Not Implemented!")) + Err(resolve_error!("resolve_constructed_Type: Not Implemented!").into()) } } } else { - Err(resolve_error!( - "Expected Constructed Type. Found '{:#?}'", - ty - )) + Err(resolve_error!("Expected Constructed Type. Found '{:#?}'", ty).into()) } } fn resolve_choice_type( choice: &Asn1TypeChoice, resolver: &mut Resolver, -) -> Result { +) -> Result { let mut root_components = vec![]; for c in &choice.root_components { let ty = resolve_type(&c.ty, resolver)?; @@ -92,7 +89,7 @@ fn resolve_choice_type( fn resolve_sequence_type( sequence: &Asn1TypeSequence, resolver: &mut Resolver, -) -> Result { +) -> Result { let mut components = vec![]; eprintln!("sequence: {:#?}", sequence); @@ -185,7 +182,7 @@ fn resolve_sequence_type( fn resolve_sequence_of_type( sequence_of: &Asn1TypeSequenceOf, resolver: &mut Resolver, -) -> Result { +) -> Result { let resolved = resolve_type(&sequence_of.ty, resolver)?; let size_values = if sequence_of.size.is_some() { let size = sequence_of.size.as_ref().unwrap(); @@ -206,7 +203,7 @@ fn resolve_sequence_of_type( fn resolve_sequence_classfield_components( seq: &Asn1TypeSequence, resolver: &mut Resolver, -) -> Result { +) -> Result { let mut all_components = vec![]; all_components.extend(seq.root_components.clone()); @@ -219,33 +216,35 @@ fn resolve_sequence_classfield_components( if all_components.is_empty() { // It's an Error to try to resolve Empty components with Class Field Ref - return Err(resolve_error!("Expected Sequence with at-least one ClassField Reference Component!. Found Empty Sequences")); + return Err(resolve_error!("Expected Sequence with at-least one ClassField Reference Component!. Found Empty Sequences").into()); } // Get the Object Set first It's the Same Object Set for all components. So if we get the first // one that's good enough! - let ty = match all_components.get(0) { + let ty = match all_components.first() { Some(c) => &c.ty, None => { return Err(resolve_error!( "all_components is empty. Expected at-least one component!" - )); + ) + .into()); } }; eprintln!("all_components: {:#?}", all_components); let constraint = match ty.constraints.as_ref() { - Some(c) => match c.get(0) { + Some(c) => match c.first() { Some(c) => c, None => { return Err(resolve_error!( "constraints is empty. Expected at-least one constraint!" - )); + ) + .into()); } }, None => { return Err(resolve_error!( "Expected Sequence with at-least one ClassField Reference Component!. Found Empty Sequences" - )); + ).into()); } }; @@ -253,10 +252,7 @@ fn resolve_sequence_classfield_components( let objects = resolver.resolved_defs.get(&set_reference); if objects.is_none() { - return Err(resolve_error!( - "Object Set '{}' not resolved yet!", - set_reference - )); + return Err(resolve_error!("Object Set '{}' not resolved yet!", set_reference).into()); } let objects = objects.unwrap().clone(); @@ -273,10 +269,7 @@ fn resolve_sequence_classfield_components( }, )) } else { - Err(resolve_error!( - "Object Set '{}' not resolved yet!", - set_reference - )) + Err(resolve_error!("Object Set '{}' not resolved yet!", set_reference).into()) } } @@ -285,7 +278,7 @@ fn resolve_seq_components_for_objects( set_reference: &str, objects: &ResolvedObjectSet, resolver: &mut Resolver, -) -> Result, Error> { +) -> Result> { if objects.elements.is_empty() { return Ok(vec![]); } @@ -360,7 +353,7 @@ fn resolve_seq_components_for_objects( fn get_seq_component_for_object_set( fieldref: &str, objects: &ResolvedObjectSet, -) -> Result { +) -> Result { let mut types = BTreeMap::new(); for (key, object) in &objects.lookup_table { if let ResolvedObjectSetElement::Object(ref o) = object { diff --git a/asn-compiler/src/resolver/asn/types/int.rs b/asn-compiler/src/resolver/asn/types/int.rs index 5195053..77792c8 100644 --- a/asn-compiler/src/resolver/asn/types/int.rs +++ b/asn-compiler/src/resolver/asn/types/int.rs @@ -1,4 +1,4 @@ -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::{ Asn1BuiltinType, Asn1Type, Asn1TypeKind, Asn1TypeReference, @@ -25,7 +25,7 @@ impl Asn1Type { pub(crate) fn get_integer_valueset_from_constraint( &self, resolver: &Resolver, - ) -> Result { + ) -> Result { let kind = &self.kind; match kind { Asn1TypeKind::Builtin(Asn1BuiltinType::Integer(..)) => { @@ -33,20 +33,18 @@ impl Asn1Type { constraint.get_integer_valueset(resolver) } Asn1TypeKind::Reference(Asn1TypeReference::Reference(ref _r)) => { - Err(constraint_error!("Not Implemented!")) + Err(constraint_error!("Not Implemented!").into()) } _ => Err(constraint_error!( "The Type '{:#?}' is not of a BuiltIn Or a Referenced Kind!", self, - )), + ) + .into()), } } } -pub(crate) fn resolve_type( - ty: &Asn1Type, - resolver: &mut Resolver, -) -> Result { +pub(crate) fn resolve_type(ty: &Asn1Type, resolver: &mut Resolver) -> Result { match ty.kind { Asn1TypeKind::Builtin(..) => Ok(Asn1ResolvedType::Base(resolve_base_type(ty, resolver)?)), Asn1TypeKind::Constructed(..) => resolve_constructed_type(ty, resolver), @@ -55,10 +53,7 @@ pub(crate) fn resolve_type( } } -fn resolve_reference_type( - ty: &Asn1Type, - resolver: &mut Resolver, -) -> Result { +fn resolve_reference_type(ty: &Asn1Type, resolver: &mut Resolver) -> Result { if let Asn1TypeKind::Reference(ref reference) = ty.kind { match reference { Asn1TypeReference::Reference(ref r) => { @@ -68,15 +63,14 @@ fn resolve_reference_type( Asn1ResolvedDefinition::Type(..) => { Ok(Asn1ResolvedType::Reference(r.to_string())) } - _ => Err(resolve_error!( - "Expected a Resolved Type, found {:#?}", - resolved - )), + _ => Err( + resolve_error!("Expected a Resolved Type, found {:#?}", resolved) + .into(), + ), }, - None => Err(resolve_error!( - "Referenced Type for '{}' Not resolved yet!", - r - )), + None => { + Err(resolve_error!("Referenced Type for '{}' Not resolved yet!", r).into()) + } } } Asn1TypeReference::Parameterized { typeref, params } => { @@ -89,14 +83,15 @@ fn resolve_reference_type( None => Err(resolve_error!( "Parameterized Type for '{:#?}' Not found!", reference - )), + ) + .into()), } } - Asn1TypeReference::ClassField { .. } => Err(resolve_error!( - "Supported Inside Constructed Sequence Type." - )), + Asn1TypeReference::ClassField { .. } => { + Err(resolve_error!("Supported Inside Constructed Sequence Type.").into()) + } } } else { - Err(resolve_error!("Expected Reference Type. Found '{:#?}'", ty)) + Err(resolve_error!("Expected Reference Type. Found '{:#?}'", ty).into()) } } diff --git a/asn-compiler/src/resolver/asn/types/ioc.rs b/asn-compiler/src/resolver/asn/types/ioc.rs index 8ba23c6..7123f20 100644 --- a/asn-compiler/src/resolver/asn/types/ioc.rs +++ b/asn-compiler/src/resolver/asn/types/ioc.rs @@ -1,7 +1,7 @@ //! Handling of Information Object Classes, ObjectSets, Objects etc use std::collections::HashMap; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::types::ioc::{ Asn1ObjectFieldSpec, Asn1ObjectSet, Asn1ObjectValue, ObjectSetElement, @@ -27,13 +27,14 @@ pub(crate) fn resolve_object_set( objectset: &Asn1ObjectSet, name: &str, resolver: &mut Resolver, -) -> Result { +) -> Result { let class = resolver.classes.get(&objectset.class); match class { None => Err(resolve_error!( "Class '{}' definition not found to resolve object set!", objectset.class - )), + ) + .into()), Some(cls) => { // if the class has UNIQUE field, create a Map for the values from that field to the // Object. Note: it's indeed possible to have more than one unique field, but we are not @@ -58,10 +59,10 @@ pub(crate) fn resolve_object_set( match resolved { None => { return Err(resolve_error!( - "Unable to find the Referencing '{}' Object Set while resolving {:#?}", - r, - objectset - )); + "Unable to find the Referencing '{}' Object Set while resolving {:#?}", + r, + objectset + ).into()); } Some(res) => { if let Asn1ResolvedDefinition::ObjectSet(ref o) = res { @@ -72,7 +73,8 @@ pub(crate) fn resolve_object_set( return Err(resolve_error!( "Resolved '{}' is not an Object Set!", r, - )); + ) + .into()); } } } @@ -82,10 +84,10 @@ pub(crate) fn resolve_object_set( match resolved { None => { return Err(resolve_error!( - "Unable to find the Referencing '{}' Object Set while resolving {:#?}", - r, - objectset - )); + "Unable to find the Referencing '{}' Object Set while resolving {:#?}", + r, + objectset + ).into()); } Some(res) => { if let Asn1ResolvedDefinition::Object(ref o) = res { @@ -129,7 +131,8 @@ pub(crate) fn resolve_object_set( return Err(resolve_error!( "Resolved '{}' is not an Object!", r, - )); + ) + .into()); } } } @@ -184,7 +187,7 @@ pub(crate) fn resolve_object( object_name: &str, object_value: &Asn1ObjectValue, resolver: &mut Resolver, -) -> Result { +) -> Result { let mut resolved_fields = HashMap::new(); if let Asn1ObjectValue::Asn1ObjectFromClass { fields } = object_value { for (k, field) in fields { @@ -222,9 +225,6 @@ pub(crate) fn resolve_object( fields: resolved_fields, }) } else { - Err(resolve_error!( - "Unsupported Variant while Resolving {:#?}", - object_value - )) + Err(resolve_error!("Unsupported Variant while Resolving {:#?}", object_value).into()) } } diff --git a/asn-compiler/src/resolver/asn/values.rs b/asn-compiler/src/resolver/asn/values.rs index a2ce17f..a4d83ee 100644 --- a/asn-compiler/src/resolver/asn/values.rs +++ b/asn-compiler/src/resolver/asn/values.rs @@ -1,6 +1,6 @@ //! Resolved 'values' implementation -use crate::error::Error; +use anyhow::Result; use crate::resolver::{ asn::structs::{ @@ -18,7 +18,7 @@ pub(crate) fn resolve_value( value: &str, typeref: &Asn1ResolvedType, resolver: &mut Resolver, -) -> Result { +) -> Result { let referenced_value = resolver.resolved_defs.get(value); match referenced_value { None => { @@ -32,7 +32,8 @@ pub(crate) fn resolve_value( "resolve_value: Failed to parse Integer value: {} from {:?}", e, value - )) + ) + .into()) } }; Ok(Asn1ResolvedValue::Base(ResolvedBaseValue::Integer( @@ -63,7 +64,8 @@ pub(crate) fn resolve_value( "resolve_value: Not Supported Yet! value: {:#?}: {:#?}", value, typeref - )), + ) + .into()), }, Asn1ResolvedType::Reference(ref r) => { let typedef = resolver.resolved_defs.get(r); @@ -71,7 +73,8 @@ pub(crate) fn resolve_value( None => Err(resolve_error!( "Definition for Reference '{}' not found or not Resolved yet!", r - )), + ) + .into()), Some(def) => match def { Asn1ResolvedDefinition::Type(ref t) => { let v = resolve_value(value, &t.clone(), resolver)?; @@ -83,18 +86,19 @@ pub(crate) fn resolve_value( _ => Err(resolve_error!( "Resolved Definition '{:#?}' is not a Type definition!", typedef - )), + ) + .into()), }, } } - _ => Err(resolve_error!("resolve_value: Not Implemented!")), + _ => Err(resolve_error!("resolve_value: Not Implemented!").into()), } } Some(ref_value) => match ref_value { Asn1ResolvedDefinition::Value(ref _v) => { Ok(Asn1ResolvedValue::Reference(value.to_string())) } - _ => Err(resolve_error!("{} Not a Referenved Value!", value)), + _ => Err(resolve_error!("{} Not a Referenved Value!", value).into()), }, } } @@ -103,7 +107,7 @@ pub(crate) fn resolve_value( // present as named component in the 'value' string. // // -fn resolve_object_identifier_value(value: &str, _resolver: &Resolver) -> Result, Error> { +fn resolve_object_identifier_value(value: &str, _resolver: &Resolver) -> Result> { use crate::parser::asn::{parse_object_identifier, WELL_KNOWN_OID_NAMES}; use crate::tokenizer::tokenize; @@ -118,7 +122,7 @@ fn resolve_object_identifier_value(value: &str, _resolver: &Resolver) -> Result< let num = WELL_KNOWN_OID_NAMES.get(name.as_str()); if num.is_none() { return Err(resolve_error!( - "Name '{}' not found. Named component other than Well known named components not supported yet!", name)); + "Name '{}' not found. Named component other than Well known named components not supported yet!", name).into()); } values.push(*num.unwrap()); } else { diff --git a/asn-compiler/src/resolver/int.rs b/asn-compiler/src/resolver/int.rs index 57a46f2..9b5f4d6 100644 --- a/asn-compiler/src/resolver/int.rs +++ b/asn-compiler/src/resolver/int.rs @@ -2,7 +2,7 @@ use std::collections::{BTreeMap, HashMap}; -use crate::error::Error; +use anyhow::Result; use crate::parser::asn::structs::{defs::Asn1Definition, module::Asn1Module}; @@ -47,7 +47,7 @@ impl Resolver { // // After that we resolve definitions in a Topologically sorted order. Fairly straight forward. // We do not need to do any `Pending` definitions, as we were doing before. - pub(crate) fn resolve_definitions(&mut self, module: &mut Asn1Module) -> Result<(), Error> { + pub(crate) fn resolve_definitions(&mut self, module: &mut Asn1Module) -> Result<()> { log::debug!( "Resolving Definitions in module: {}", module.get_module_name() diff --git a/asn-compiler/src/tokenizer/mod.rs b/asn-compiler/src/tokenizer/mod.rs index 057253f..776c266 100644 --- a/asn-compiler/src/tokenizer/mod.rs +++ b/asn-compiler/src/tokenizer/mod.rs @@ -5,6 +5,7 @@ pub(crate) mod types; use crate::error::Error; +use anyhow::Result; use types::TokenType; @@ -339,11 +340,11 @@ fn get_string_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize, usize, usize), Error> { +) -> Result<(Token, usize, usize, usize)> { let mut last: Option = None; if chars.len() == 1 { - return Err(Error::TokenizeError(0, line, begin)); + return Err(Error::TokenizeError(0, line, begin).into()); } let mut i = 1; @@ -382,7 +383,7 @@ fn get_string_token( // If we didn't find the last '"' if last.is_none() { - return Err(Error::TokenizeError(5, line, begin)); + return Err(Error::TokenizeError(5, line, begin).into()); } let consumed = last.unwrap() + 1; @@ -420,20 +421,20 @@ fn get_bit_or_hex_string_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize, usize, usize), Error> { +) -> Result<(Token, usize, usize, usize)> { if chars.len() == 1 { - return Err(Error::TokenizeError(6, line, begin)); + return Err(Error::TokenizeError(6, line, begin).into()); } let last = chars[1..].iter().position(|&c| c == '\''); if last.is_none() { // No matching '\'' found till the end of the string. Clearly an error. - return Err(Error::TokenizeError(7, line, begin)); + return Err(Error::TokenizeError(7, line, begin).into()); } let mut consumed = last.unwrap() + 1 + 1; if consumed == chars.len() { // Matching'\'' found, but the string ends, Error. - return Err(Error::TokenizeError(8, line, begin)); + return Err(Error::TokenizeError(8, line, begin).into()); } let c = chars[consumed]; @@ -441,7 +442,7 @@ fn get_bit_or_hex_string_token( "h" => TokenType::HexString, "b" => TokenType::BitString, _ => { - return Err(Error::TokenizeError(9, line, begin)); + return Err(Error::TokenizeError(9, line, begin).into()); } }; @@ -456,13 +457,13 @@ fn get_bit_or_hex_string_token( text = text.replace(char::is_whitespace, ""); if token_type == TokenType::BitString && !text.replace(&['0', '1', '\''][..], "").is_empty() { - return Err(Error::TokenizeError(10, line, begin)); + return Err(Error::TokenizeError(10, line, begin).into()); } if token_type == TokenType::HexString && !text.chars().all(|c| c.is_ascii_hexdigit() || c == '\'') { - return Err(Error::TokenizeError(11, line, begin)); + return Err(Error::TokenizeError(11, line, begin).into()); } consumed += 1; // last 'h' or 'b' @@ -483,13 +484,9 @@ fn get_bit_or_hex_string_token( } // Get at and component ID list something like @.id or @component.id -fn get_at_component_id_list( - chars: &[char], - line: usize, - begin: usize, -) -> Result<(Token, usize), Error> { +fn get_at_component_id_list(chars: &[char], line: usize, begin: usize) -> Result<(Token, usize)> { if chars.len() == 1 { - return Err(Error::TokenizeError(12, line, begin)); + return Err(Error::TokenizeError(12, line, begin).into()); } let mut consumed = 1; @@ -504,7 +501,7 @@ fn get_at_component_id_list( // Identifier should not end with a '-' if ['.', '-'].iter().any(|&c| c == chars[consumed - 1]) { - return Err(Error::TokenizeError(13, line, begin)); + return Err(Error::TokenizeError(13, line, begin).into()); } Ok(( Token { @@ -523,11 +520,11 @@ fn get_at_component_id_list( // 1..2 => Will return 1 as a number // 1.2 => Will return 1.2 as a number // -1.2.3 => Will return Error -fn get_number_token(chars: &[char], line: usize, begin: usize) -> Result<(Token, usize), Error> { +fn get_number_token(chars: &[char], line: usize, begin: usize) -> Result<(Token, usize)> { let neg = (chars[0] == '-') as usize; if neg > 0 && chars.len() == 1 { - return Err(Error::TokenizeError(14, line, begin)); + return Err(Error::TokenizeError(14, line, begin).into()); } let mut consumed = neg; @@ -545,7 +542,7 @@ fn get_number_token(chars: &[char], line: usize, begin: usize) -> Result<(Token, let dot_index = chars[neg..].iter().position(|&x| x == '.'); if let Some(index) = dot_index { if index == chars.len() { - Err(Error::TokenizeError(14, line, begin)) + Err(Error::TokenizeError(14, line, begin).into()) // Error (Last .) } else if chars[index + 1] == '.' { // Atleast two .. Return this number, this becomes a parse error later on @@ -562,7 +559,7 @@ fn get_number_token(chars: &[char], line: usize, begin: usize) -> Result<(Token, )) } else { // Error something in weird form like 3.14.159 - Err(Error::TokenizeError(14, line, begin)) + Err(Error::TokenizeError(14, line, begin).into()) } } else { unreachable!(); @@ -591,11 +588,11 @@ fn get_identifier_or_keyword_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize), Error> { +) -> Result<(Token, usize)> { let and = (chars[0] == '&') as usize; if and > 0 && chars.len() == 1 { - return Err(Error::TokenizeError(15, line, begin)); + return Err(Error::TokenizeError(15, line, begin).into()); } let mut consumed = and; @@ -611,17 +608,17 @@ fn get_identifier_or_keyword_token( // Identifier should not end with a '-' if chars[consumed - 1] == '-' { - return Err(Error::TokenizeError(16, line, begin)); + return Err(Error::TokenizeError(16, line, begin).into()); } // Free standing '&' this is an error. if and > 0 && consumed == 1 { - return Err(Error::TokenizeError(17, line, begin)); + return Err(Error::TokenizeError(17, line, begin).into()); } let text = chars[..consumed].iter().collect::(); if text.contains("--") { - return Err(Error::TokenizeError(18, line, begin)); + return Err(Error::TokenizeError(18, line, begin).into()); } let token_type = if and > 0 { @@ -650,7 +647,7 @@ fn get_range_or_extension_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize), Error> { +) -> Result<(Token, usize)> { let (token_type, consumed) = if chars.len() == 1 { (TokenType::Dot, 1) } else if chars.len() == 2 { @@ -687,12 +684,12 @@ fn get_assignment_or_colon_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize), Error> { +) -> Result<(Token, usize)> { let (token_type, consumed) = if chars.len() == 1 { (TokenType::Colon, 1) } else if chars.len() == 2 { if chars[1] == ':' { - return Err(Error::TokenizeError(19, line, begin)); + return Err(Error::TokenizeError(19, line, begin).into()); } else { (TokenType::Colon, 1) } @@ -700,7 +697,7 @@ fn get_assignment_or_colon_token( if chars[2] == '=' { (TokenType::Assignment, 3) } else { - return Err(Error::TokenizeError(20, line, begin)); + return Err(Error::TokenizeError(20, line, begin).into()); } } else { (TokenType::Colon, 1) @@ -726,7 +723,7 @@ fn get_seq_extension_or_square_brackets_token( chars: &[char], line: usize, begin: usize, -) -> Result<(Token, usize), Error> { +) -> Result<(Token, usize)> { let (token_type, consumed) = if chars[0] == '[' { if chars[1] == '[' { (TokenType::AdditionGroupsBegin, 2) @@ -754,7 +751,7 @@ fn get_seq_extension_or_square_brackets_token( // Gets Begin/End of round/curly brackets. // // Note: square brackets need a special treatment due to "[[" and "]]" -fn get_single_char_token(token: char, line: usize, begin: usize) -> Result { +fn get_single_char_token(token: char, line: usize, begin: usize) -> Result { let token_type: TokenType = match token { '{' => TokenType::CurlyBegin, '}' => TokenType::CurlyEnd, @@ -766,7 +763,7 @@ fn get_single_char_token(token: char, line: usize, begin: usize) -> Result TokenType::SetUnionToken, '^' => TokenType::SetIntersectionToken, '<' => TokenType::LessThan, - _ => return Err(Error::TokenizeError(21, line, begin)), + _ => return Err(Error::TokenizeError(21, line, begin).into()), }; Ok(Token { r#type: token_type, @@ -789,7 +786,7 @@ fn get_maybe_comment_token( chars: &[char], // From the first "--" line: usize, begin: usize, -) -> Result<(Option, usize), Error> { +) -> Result<(Option, usize)> { if chars[1] != '-' { return Ok((None, 0)); } @@ -838,7 +835,7 @@ fn get_maybe_comment_token( /// This function would work on any input that implements `std::io::Read` trait, but would work /// mostly with files because this 'reads the input to end'. We look at the first character of a /// non-whitespace sequence and then tokenize that into appropriate tokens. -pub fn tokenize(mut input: T) -> Result, Error> +pub fn tokenize(mut input: T) -> Result> where T: std::io::Read, { @@ -853,7 +850,7 @@ where /// /// Tokenize a given 'String' to ASN.1 Tokens. This API Can be used to write simple test cases for /// ASN.1 files say. -pub fn tokenize_string(buffer: &str) -> Result, Error> { +pub fn tokenize_string(buffer: &str) -> Result> { let chars: Vec = buffer.chars().collect(); let mut column = 0_usize; diff --git a/examples/build.rs b/examples/build.rs index 07dbf2f..cf5c6e2 100644 --- a/examples/build.rs +++ b/examples/build.rs @@ -76,7 +76,13 @@ fn main() -> std::io::Result<()> { Derive::Deserialize, ], ); - compiler.compile_files(&specs_files)?; + if let Err(_e) = compiler.compile_files(&specs_files) { + eprintln!("Error compiling ASN.1 specs: {:#?}", specs_files); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Error compiling ASN.1 specs", + )); + } } Ok(())