From d07d50ca9b3e5076c847b7f06f3ffce5244486da Mon Sep 17 00:00:00 2001 From: Pascal Seitz Date: Wed, 2 Oct 2024 07:16:12 +0800 Subject: [PATCH] fix cargo doc --- src/tokenize.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/tokenize.rs b/src/tokenize.rs index 9240bd6..9c7ebc7 100644 --- a/src/tokenize.rs +++ b/src/tokenize.rs @@ -1,7 +1,7 @@ //! Splits input into array of strings separated by opinionated -//! [`TokenType`](crate::tokenize::TokenType). +//! [`TokenType`]. //! -//! [`tokenize_detailed`](crate::tokenize::tokenize_detailed) returns an +//! [`tokenize_detailed`] returns an //! array containing `{ TokenType, String }` instead of `String` //! //! # Example @@ -91,7 +91,7 @@ fn get_type(input: char, compact: bool) -> TokenType { } /// Tokenizes the text. Splits input into array of strings separated by opinionated -/// [`TokenType`](crate::tokenize::TokenType). +/// [`TokenType`]. /// /// # Example /// ``` @@ -107,7 +107,7 @@ pub fn tokenize(input: &str) -> Vec { } /// Tokenizes the text. Splits input into array of strings separated by opinionated -/// [`TokenType`](crate::tokenize::TokenType). +/// [`TokenType`]. /// /// If `compact` is set, many same-language tokens are combined (spaces + text, kanji + kana, /// numeral + punctuation).