diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 68d18302f34..c4c31a2dd8a 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -378,8 +378,16 @@ impl fmt::Display for Token { Token::Keyword(k) => write!(f, "{k}"), Token::Attribute(ref a) => write!(f, "{a}"), Token::InnerAttribute(ref a) => write!(f, "#![{a}]"), - Token::LineComment(ref s, _style) => write!(f, "//{s}"), - Token::BlockComment(ref s, _style) => write!(f, "/*{s}*/"), + Token::LineComment(ref s, style) => match style { + Some(DocStyle::Inner) => write!(f, "//!{s}"), + Some(DocStyle::Outer) => write!(f, "///{s}"), + None => write!(f, "//{s}"), + }, + Token::BlockComment(ref s, style) => match style { + Some(DocStyle::Inner) => write!(f, "/*!{s}*/"), + Some(DocStyle::Outer) => write!(f, "/**{s}*/"), + None => write!(f, "/*{s}*/"), + }, Token::Quote(ref stream) => { write!(f, "quote {{")?; for token in stream.0.iter() { @@ -456,6 +464,7 @@ pub enum TokenKind { InternedUnresolvedTypeData, InternedPattern, UnquoteMarker, + Comment, OuterDocComment, InnerDocComment, } @@ -477,6 +486,7 @@ impl fmt::Display for TokenKind { TokenKind::InternedUnresolvedTypeData => write!(f, "interned unresolved type"), TokenKind::InternedPattern => write!(f, "interned pattern"), TokenKind::UnquoteMarker => write!(f, "macro result"), + TokenKind::Comment => write!(f, "comment"), TokenKind::OuterDocComment => write!(f, "outer doc comment"), TokenKind::InnerDocComment => write!(f, "inner doc comment"), } @@ -503,6 +513,7 @@ impl Token { Token::InternedLValue(_) => TokenKind::InternedLValue, Token::InternedUnresolvedTypeData(_) => TokenKind::InternedUnresolvedTypeData, Token::InternedPattern(_) => TokenKind::InternedPattern, + Token::LineComment(_, None) | Token::BlockComment(_, None) => TokenKind::Comment, Token::LineComment(_, Some(DocStyle::Outer)) | Token::BlockComment(_, Some(DocStyle::Outer)) => TokenKind::OuterDocComment, Token::LineComment(_, Some(DocStyle::Inner)) @@ -635,8 +646,8 @@ impl fmt::Display for TestScope { match self { TestScope::None => write!(f, ""), TestScope::ShouldFailWith { reason } => match reason { - Some(failure_reason) => write!(f, "(should_fail_with = ({failure_reason}))"), - None => write!(f, "should_fail"), + Some(failure_reason) => write!(f, "(should_fail_with = {failure_reason:?})"), + None => write!(f, "(should_fail)"), }, } } @@ -991,7 +1002,7 @@ impl fmt::Display for SecondaryAttribute { match self { SecondaryAttribute::Deprecated(None) => write!(f, "#[deprecated]"), SecondaryAttribute::Deprecated(Some(ref note)) => { - write!(f, r#"#[deprecated("{note}")]"#) + write!(f, r#"#[deprecated({note:?})]"#) } SecondaryAttribute::Tag(ref attribute) => write!(f, "#['{}]", attribute.contents), SecondaryAttribute::Meta(ref attribute) => write!(f, "#[{}]", attribute.contents), diff --git a/compiler/noirc_frontend/src/parser/parser/function.rs b/compiler/noirc_frontend/src/parser/parser/function.rs index 6d1fc611767..acec7942e24 100644 --- a/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/compiler/noirc_frontend/src/parser/parser/function.rs @@ -285,7 +285,7 @@ fn empty_body() -> BlockExpression { #[cfg(test)] mod tests { use crate::{ - ast::{NoirFunction, UnresolvedTypeData, Visibility}, + ast::{ItemVisibility, NoirFunction, UnresolvedTypeData, Visibility}, parser::{ parser::{ parse_program, @@ -480,4 +480,13 @@ mod tests { let error = get_single_error(&errors, span); assert_eq!(error.to_string(), "Expected a type but found ,"); } + + #[test] + fn parse_function_with_unconstrained_after_visibility() { + let src = "pub unconstrained fn foo() {}"; + let noir_function = parse_function_no_error(src); + assert_eq!("foo", noir_function.def.name.to_string()); + assert!(noir_function.def.is_unconstrained); + assert_eq!(noir_function.def.visibility, ItemVisibility::Public); + } } diff --git a/compiler/noirc_frontend/src/parser/parser/modifiers.rs b/compiler/noirc_frontend/src/parser/parser/modifiers.rs index d3bf692ee53..a668d3bae6a 100644 --- a/compiler/noirc_frontend/src/parser/parser/modifiers.rs +++ b/compiler/noirc_frontend/src/parser/parser/modifiers.rs @@ -14,7 +14,10 @@ pub(crate) struct Modifiers { } impl<'a> Parser<'a> { - /// Modifiers = 'unconstrained'? ItemVisibility 'comptime'? 'mut'? + /// Modifiers = ItemVisibility 'unconstrained'? 'comptime'? 'mut'? + /// + /// NOTE: we also allow `unconstrained` before the visibility for backwards compatibility. + /// The formatter will put it after the visibility. pub(crate) fn parse_modifiers(&mut self, allow_mutable: bool) -> Modifiers { let unconstrained = if self.eat_keyword(Keyword::Unconstrained) { Some(self.previous_token_span) @@ -26,6 +29,16 @@ impl<'a> Parser<'a> { let visibility = self.parse_item_visibility(); let visibility_span = self.span_since(start_span); + let unconstrained = if unconstrained.is_none() { + if self.eat_keyword(Keyword::Unconstrained) { + Some(self.previous_token_span) + } else { + None + } + } else { + unconstrained + }; + let comptime = if self.eat_keyword(Keyword::Comptime) { Some(self.previous_token_span) } else { None }; let mutable = if allow_mutable && self.eat_keyword(Keyword::Mut) { diff --git a/noir_stdlib/src/array/quicksort.nr b/noir_stdlib/src/array/quicksort.nr index 6a54ed246f5..8563a5d75bd 100644 --- a/noir_stdlib/src/array/quicksort.nr +++ b/noir_stdlib/src/array/quicksort.nr @@ -30,7 +30,7 @@ unconstrained fn quicksort_recursive(arr: &mut [T; N], low: } } -unconstrained pub(crate) fn quicksort(_arr: [T; N], sortfn: fn[Env](T, T) -> bool) -> [T; N] { +pub(crate) unconstrained fn quicksort(_arr: [T; N], sortfn: fn[Env](T, T) -> bool) -> [T; N] { let mut arr: [T; N] = _arr; if arr.len() <= 1 {} else { quicksort_recursive(&mut arr, 0, arr.len() - 1, sortfn); diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 7cd9942e02a..c2a3ff9b7ca 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -554,7 +554,7 @@ mod bounded_vec_tests { assert_eq(bounded_vec.storage()[2], 3); } - #[test(should_fail_with="from array out of bounds")] + #[test(should_fail_with = "from array out of bounds")] fn max_len_lower_then_array_len() { let _: BoundedVec = BoundedVec::from_array([0; 3]); } diff --git a/noir_stdlib/src/collections/umap.nr b/noir_stdlib/src/collections/umap.nr index 9d23e216731..8a7e6f81103 100644 --- a/noir_stdlib/src/collections/umap.nr +++ b/noir_stdlib/src/collections/umap.nr @@ -188,7 +188,7 @@ impl UHashMap { // For each key-value entry applies mutator function. // docs:start:iter_mut - unconstrained pub fn iter_mut( + pub unconstrained fn iter_mut( &mut self, f: fn(K, V) -> (K, V) ) @@ -210,7 +210,7 @@ impl UHashMap { // For each key applies mutator function. // docs:start:iter_keys_mut - unconstrained pub fn iter_keys_mut( + pub unconstrained fn iter_keys_mut( &mut self, f: fn(K) -> K ) @@ -277,7 +277,7 @@ impl UHashMap { // Get the value by key. If it does not exist, returns none(). // docs:start:get - unconstrained pub fn get( + pub unconstrained fn get( self, key: K ) -> Option @@ -309,7 +309,7 @@ impl UHashMap { // Insert key-value entry. In case key was already present, value is overridden. // docs:start:insert - unconstrained pub fn insert( + pub unconstrained fn insert( &mut self, key: K, value: V @@ -362,7 +362,7 @@ impl UHashMap { // Removes a key-value entry. If key is not present, UHashMap remains unchanged. // docs:start:remove - unconstrained pub fn remove( + pub unconstrained fn remove( &mut self, key: K ) diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index 8ff62062d5c..d93b1128a60 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -23,7 +23,7 @@ fn compute_decomposition(x: Field) -> (Field, Field) { (low, high) } -unconstrained pub(crate) fn decompose_hint(x: Field) -> (Field, Field) { +pub(crate) unconstrained fn decompose_hint(x: Field) -> (Field, Field) { compute_decomposition(x) } diff --git a/noir_stdlib/src/field/mod.nr b/noir_stdlib/src/field/mod.nr index d5a6193db3b..35f95541354 100644 --- a/noir_stdlib/src/field/mod.nr +++ b/noir_stdlib/src/field/mod.nr @@ -46,7 +46,7 @@ impl Field { /// (e.g. 254 for the BN254 field) allow for multiple bit decompositions. This is due to how the `Field` will /// wrap around due to overflow when verifying the decomposition. #[builtin(to_be_bits)] - // docs:start:to_be_bits + // docs:start:to_be_bits pub fn to_be_bits(self: Self) -> [u1; N] {} // docs:end:to_be_bits diff --git a/noir_stdlib/src/meta/trait_def.nr b/noir_stdlib/src/meta/trait_def.nr index 0c2e7cfa5c1..9bf0132f79e 100644 --- a/noir_stdlib/src/meta/trait_def.nr +++ b/noir_stdlib/src/meta/trait_def.nr @@ -3,7 +3,7 @@ use crate::cmp::Eq; impl TraitDefinition { #[builtin(trait_def_as_trait_constraint)] -// docs:start:as_trait_constraint + // docs:start:as_trait_constraint pub comptime fn as_trait_constraint(_self: Self) -> TraitConstraint {} // docs:end:as_trait_constraint } diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index e906ad54d55..4d6954913d1 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -21,30 +21,30 @@ pub struct OracleMock { } impl OracleMock { - unconstrained pub fn mock(name: str) -> Self { + pub unconstrained fn mock(name: str) -> Self { Self { id: create_mock_oracle(name) } } - unconstrained pub fn with_params

(self, params: P) -> Self { + pub unconstrained fn with_params

(self, params: P) -> Self { set_mock_params_oracle(self.id, params); self } - unconstrained pub fn get_last_params

(self) -> P { + pub unconstrained fn get_last_params

(self) -> P { get_mock_last_params_oracle(self.id) } - unconstrained pub fn returns(self, returns: R) -> Self { + pub unconstrained fn returns(self, returns: R) -> Self { set_mock_returns_oracle(self.id, returns); self } - unconstrained pub fn times(self, times: u64) -> Self { + pub unconstrained fn times(self, times: u64) -> Self { set_mock_times_oracle(self.id, times); self } - unconstrained pub fn clear(self) { + pub unconstrained fn clear(self) { clear_mock_oracle(self.id); } } diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index f8432ab94c6..4724c8d957d 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -8,7 +8,8 @@ use crate::{ }; use noirc_frontend::{ ast::{ItemVisibility, NoirFunction, TraitImplItemKind, UnresolvedTypeData, Visibility}, - token::SecondaryAttribute, + lexer::Lexer, + token::{SecondaryAttribute, TokenKind}, }; use noirc_frontend::{ hir::resolution::errors::Span, @@ -27,7 +28,8 @@ impl super::FmtVisitor<'_> { let name_span = func.name_ident().span(); let func_span = func.span(); - let mut result = self.slice(start..name_span.end()).to_owned(); + let fn_header = self.slice(start..name_span.end()); + let mut result = self.format_fn_header(fn_header, &func); let params_open = self.span_before(name_span.end()..func_span.start(), Token::LeftParen).start(); @@ -109,6 +111,134 @@ impl super::FmtVisitor<'_> { (result.trim_end().to_string(), last_line_contains_single_line_comment(maybe_comment)) } + // This formats the function outer doc comments, attributes, modifiers, and `fn name`. + fn format_fn_header(&self, src: &str, func: &NoirFunction) -> String { + let mut result = String::new(); + let mut lexer = Lexer::new(src).skip_comments(false).peekable(); + + // First there might be outer doc comments + while let Some(Ok(token)) = lexer.peek() { + if token.kind() == TokenKind::OuterDocComment { + result.push_str(&token.to_string()); + result.push('\n'); + result.push_str(&self.indent.to_string()); + lexer.next(); + + self.append_comments_if_any(&mut lexer, &mut result); + } else { + break; + } + } + + // Then, optionally, attributes + while let Some(Ok(token)) = lexer.peek() { + if token.kind() == TokenKind::Attribute { + result.push_str(&token.to_string()); + result.push('\n'); + result.push_str(&self.indent.to_string()); + lexer.next(); + + self.append_comments_if_any(&mut lexer, &mut result); + } else { + break; + } + } + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then, optionally, the `unconstrained` keyword + // (eventually we'll stop accepting this, but we keep it for backwards compatibility) + if let Some(Ok(token)) = lexer.peek() { + if let Token::Keyword(Keyword::Unconstrained) = token.token() { + lexer.next(); + } + } + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then the visibility + let mut has_visibility = false; + if let Some(Ok(token)) = lexer.peek() { + if let Token::Keyword(Keyword::Pub) = token.token() { + has_visibility = true; + lexer.next(); + if let Some(Ok(token)) = lexer.peek() { + if let Token::LeftParen = token.token() { + lexer.next(); // Skip '(' + lexer.next(); // Skip 'crate' + lexer.next(); // Skip ')' + } + } + } + } + + if has_visibility { + result.push_str(&func.def.visibility.to_string()); + result.push(' '); + } + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then, optionally, and again, the `unconstrained` keyword + if let Some(Ok(token)) = lexer.peek() { + if let Token::Keyword(Keyword::Unconstrained) = token.token() { + lexer.next(); + } + } + + if func.def.is_unconstrained { + result.push_str("unconstrained "); + } + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then, optionally, the `comptime` keyword + if let Some(Ok(token)) = lexer.peek() { + if let Token::Keyword(Keyword::Comptime) = token.token() { + lexer.next(); + } + } + + if func.def.is_comptime { + result.push_str("comptime "); + } + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then the `fn` keyword + lexer.next(); // Skip fn + result.push_str("fn "); + + self.append_comments_if_any(&mut lexer, &mut result); + + // Then the function name + result.push_str(&func.def.name.0.contents); + + result + } + + fn append_comments_if_any( + &self, + lexer: &mut std::iter::Peekable>, + result: &mut String, + ) { + while let Some(Ok(token)) = lexer.peek() { + match token.token() { + Token::LineComment(..) => { + result.push_str(&token.to_string()); + result.push('\n'); + result.push_str(&self.indent.to_string()); + lexer.next(); + } + Token::BlockComment(..) => { + result.push_str(&token.to_string()); + lexer.next(); + } + _ => break, + } + } + } + fn format_return_type( &self, span: Span, diff --git a/tooling/nargo_fmt/tests/expected/fn.nr b/tooling/nargo_fmt/tests/expected/fn.nr index 73248d0c04f..19f022751c4 100644 --- a/tooling/nargo_fmt/tests/expected/fn.nr +++ b/tooling/nargo_fmt/tests/expected/fn.nr @@ -71,3 +71,21 @@ fn whitespace_before_generics(foo: T) {} fn more_whitespace_before_generics(foo: T) {} fn with_unconstrained(x: unconstrained fn() -> ()) {} + +pub(crate) fn one() {} + +pub unconstrained fn two() {} + +pub unconstrained comptime fn two() {} + +/// Documented +#[test] +fn three() {} + +#[test(should_fail)] +// comment +fn four() {} + +#[test(should_fail_with = "oops")] +fn five() {} + diff --git a/tooling/nargo_fmt/tests/input/fn.nr b/tooling/nargo_fmt/tests/input/fn.nr index 8db6022808f..8ed2c565bc4 100644 --- a/tooling/nargo_fmt/tests/input/fn.nr +++ b/tooling/nargo_fmt/tests/input/fn.nr @@ -56,3 +56,20 @@ fn more_whitespace_before_generics < T > (foo: T) {} fn with_unconstrained(x: unconstrained fn() -> ()) {} + +pub ( crate ) fn one() {} + +unconstrained pub fn two() {} + +unconstrained pub comptime fn two() {} + +/// Documented +#[test] fn three() {} + +#[test(should_fail)] +// comment +fn four() {} + +#[test(should_fail_with="oops")] +fn five() {} +