Skip to content

Commit

Permalink
stricter clippy (#97)
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelcolvin committed May 20, 2024
1 parent 06bd0bb commit d89c3a8
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 21 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ repos:
pass_filenames: false
- id: clippy
name: Clippy
entry: cargo clippy -F python -- -D warnings -A incomplete_features -W clippy::dbg_macro -W clippy::print_stdout
entry: cargo clippy -F python -- -D warnings
types: [rust]
language: system
pass_filenames: false
Expand Down
22 changes: 22 additions & 0 deletions crates/jiter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,25 @@ harness = false
# get docs.rs to include python docs
[package.metadata.docs.rs]
all-features = true

[lints.clippy]
dbg_macro = "deny"
print_stdout = "deny"
print_stderr = "deny"
# in general we lint against the pedantic group, but we will whitelist
# certain lints which we don't want to enforce (for now)
pedantic = { level = "deny", priority = -1 }
missing_errors_doc = "allow"
module_name_repetitions = "allow"
must_use_candidate = "allow"
if_not_else = "allow"
cast_lossless = "allow"
cast_possible_wrap = "allow"
cast_possible_truncation = "allow"
cast_precision_loss = "allow"
match_bool = "allow"
doc_markdown = "allow"
implicit_clone = "allow"
iter_without_into_iter = "allow"
inline_always = "allow" # TODO remove?
match_same_arms = "allow" # TODO remove?
10 changes: 4 additions & 6 deletions crates/jiter/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
use std::fmt;

/// Enum representing all possible errors in JSON syntax.
///
/// Almost all of `JsonErrorType` is copied from [serde_json](https://github.com/serde-rs) so errors match
Expand Down Expand Up @@ -196,9 +194,9 @@ pub enum JiterErrorType {
impl std::fmt::Display for JiterErrorType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::JsonError(error_type) => write!(f, "{}", error_type),
Self::JsonError(error_type) => write!(f, "{error_type}"),
Self::WrongType { expected, actual } => {
write!(f, "expected {} but found {}", expected, actual)
write!(f, "expected {expected} but found {actual}")
}
}
}
Expand Down Expand Up @@ -254,8 +252,8 @@ pub struct LinePosition {
pub column: usize,
}

impl fmt::Display for LinePosition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl std::fmt::Display for LinePosition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "line {} column {}", self.line, self.column)
}
}
Expand Down
9 changes: 5 additions & 4 deletions crates/jiter/src/number_decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ impl IntParse {
index += 1;
return match data.get(index) {
Some(b'.') => Ok((Self::Float, index)),
Some(b'e') | Some(b'E') => Ok((Self::Float, index)),
Some(b'e' | b'E') => Ok((Self::Float, index)),
Some(digit) if digit.is_ascii_digit() => json_err!(InvalidNumber, index),
_ => Ok((Self::Int(NumberInt::Int(0)), index)),
};
Expand All @@ -213,7 +213,7 @@ impl IntParse {
IntChunk::Done(value) => {
let mut value_i64 = value as i64;
if !positive {
value_i64 = -value_i64
value_i64 = -value_i64;
}
return Ok((Self::Int(NumberInt::Int(value_i64)), new_index));
}
Expand Down Expand Up @@ -379,7 +379,7 @@ impl AbstractNumberDecoder for NumberRange {
let end = consume_decimal(data, index)?;
Ok((start..end, end))
}
Some(b'e') | Some(b'E') => {
Some(b'e' | b'E') => {
index += 1;
let end = consume_exponential(data, index)?;
Ok((start..end, end))
Expand Down Expand Up @@ -420,6 +420,7 @@ impl AbstractNumberDecoder for NumberRange {
if (new_index - start) > 4300 {
return json_err!(NumberOutOfRange, start + 4301);
}
#[allow(clippy::single_match_else)]
match chunk {
IntChunk::Ongoing(_) => {
index = new_index;
Expand All @@ -446,7 +447,7 @@ impl AbstractNumberDecoder for NumberRange {

fn consume_exponential(data: &[u8], mut index: usize) -> JsonResult<usize> {
match data.get(index) {
Some(b'-') | Some(b'+') => {
Some(b'-' | b'+') => {
index += 1;
}
Some(v) if v.is_ascii_digit() => (),
Expand Down
4 changes: 2 additions & 2 deletions crates/jiter/src/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,13 +266,13 @@ pub(crate) fn consume_nan(data: &[u8], index: usize) -> JsonResult<usize> {
}

fn consume_ident<const SIZE: usize>(data: &[u8], mut index: usize, expected: [u8; SIZE]) -> JsonResult<usize> {
match data.get(index + 1..index + SIZE + 1) {
match data.get(index + 1..=index + SIZE) {
Some(s) if s == expected => Ok(index + SIZE + 1),
// TODO very sadly iterating over expected cause extra branches in the generated assembly
// and is significantly slower than just returning an error
_ => {
index += 1;
for c in expected.iter() {
for c in &expected {
match data.get(index) {
Some(v) if v == c => index += 1,
Some(_) => return json_err!(ExpectedSomeIdent, index),
Expand Down
2 changes: 1 addition & 1 deletion crates/jiter/src/py_string_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pub fn cache_usage(py: Python) -> usize {
}

pub fn cache_clear(py: Python) {
get_string_cache!(py).borrow_mut().clear()
get_string_cache!(py).borrow_mut().clear();
}

pub fn cached_py_string<'py>(py: Python<'py>, s: &str, ascii_only: bool) -> Bound<'py, PyString> {
Expand Down
4 changes: 1 addition & 3 deletions crates/jiter/src/python.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,7 @@ impl<'j, StringCache: StringMaybeCache, KeyCheck: MaybeKeyCheck> PythonParser<'j
// AFAIK this shouldn't happen since the key will always be a string which is hashable
// we panic here rather than returning a result and using `?` below as it's up to 14% faster
// presumably because there are fewer branches
if r == -1 {
panic!("PyDict_SetItem failed")
}
assert_ne!(r, -1, "PyDict_SetItem failed");
};
let mut check_keys = KeyCheck::default();
if let Some(first_key) = self.parser.object_first::<StringDecoder>(&mut self.tape)? {
Expand Down
3 changes: 2 additions & 1 deletion crates/jiter/src/simd_aarch64.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ unsafe fn full_calc(byte_vec: SimdVecu8_16, last_digit: u32) -> u64 {
// transmute the 2x64-bit lane into an array;
let t: [u64; 2] = transmute(x);
// since the data started out as digits, it's safe to assume the result fits in a u64
t[0].wrapping_mul(100000000).wrapping_add(t[1])
t[0].wrapping_mul(100_000_000).wrapping_add(t[1])
}

fn next_is_float(data: &[u8], index: usize) -> bool {
Expand Down Expand Up @@ -207,6 +207,7 @@ pub fn decode_string_chunk(
} else {
// this chunk contains either a stop character or a non-ascii character
let a: [u8; 16] = unsafe { transmute(byte_vec) };
#[allow(clippy::redundant_else)]
if let Some(r) = StringChunk::decode_array(a, &mut index, ascii_only) {
return r;
} else {
Expand Down
2 changes: 1 addition & 1 deletion crates/jiter/src/string_decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ fn parse_u4(data: &[u8], mut index: usize) -> JsonResult<(u16, usize)> {
.get(index + 1..index + 5)
.ok_or_else(|| json_error!(EofWhileParsingString, data.len()))?;

for c in u4.iter() {
for c in u4 {
index += 1;
let hex = match c {
b'0'..=b'9' => (c & 0x0f) as u16,
Expand Down
4 changes: 2 additions & 2 deletions crates/jiter/tests/python.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use pyo3::prelude::*;
use pyo3::types::{PyDict, PyList, PyString};
use pyo3::types::PyString;

use jiter::{cache_clear, cache_usage, map_json_error, pystring_fast_new, python_parse, JsonValue, StringCacheMode};
use jiter::{pystring_fast_new, JsonValue, StringCacheMode};

#[test]
fn test_to_py_object_numeric() {
Expand Down

0 comments on commit d89c3a8

Please sign in to comment.