Skip to content

Commit

Permalink
Merge pull request #358 from epage/elide
Browse files Browse the repository at this point in the history
fix(filter): Take redactions into account for Array elides
  • Loading branch information
epage authored Aug 14, 2024
2 parents 4492c30 + 9344f20 commit e230a1d
Showing 1 changed file with 66 additions and 50 deletions.
116 changes: 66 additions & 50 deletions crates/snapbox/src/filter/pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,38 +431,7 @@ fn normalize_value_to_redactions(
*act = normalize_str_to_redactions(act, exp, substitutions);
}
(Array(act), Array(exp)) => {
let mut sections = exp.split(|e| e == VALUE_WILDCARD).peekable();
let mut processed = 0;
while let Some(expected_subset) = sections.next() {
// Process all values in the current section
if !expected_subset.is_empty() {
let actual_subset = &mut act[processed..processed + expected_subset.len()];
for (a, e) in actual_subset.iter_mut().zip(expected_subset) {
normalize_value_to_redactions(a, e, substitutions);
}
processed += expected_subset.len();
}

if let Some(next_section) = sections.peek() {
// If the next section has nothing in it, replace from processed to end with
// a single "{...}"
if next_section.is_empty() {
act.splice(processed.., vec![String(VALUE_WILDCARD.to_owned())]);
processed += 1;
} else {
let first = next_section.first().unwrap();
// Replace everything up until the value we are looking for with
// a single "{...}".
if let Some(index) = act.iter().position(|v| v == first) {
act.splice(processed..index, vec![String(VALUE_WILDCARD.to_owned())]);
processed += 1;
} else {
// If we cannot find the value we are looking for return early
break;
}
}
}
}
*act = normalize_array_to_redactions(act, exp, substitutions);
}
(Object(act), Object(exp)) => {
let has_key_wildcard =
Expand All @@ -483,6 +452,54 @@ fn normalize_value_to_redactions(
}
}

#[cfg(feature = "structured-data")]
fn normalize_array_to_redactions(
input: &[serde_json::Value],
pattern: &[serde_json::Value],
redactions: &Redactions,
) -> Vec<serde_json::Value> {
if input == pattern {
return input.to_vec();
}

let mut normalized: Vec<serde_json::Value> = Vec::new();
let mut input_index = 0;
let mut pattern = pattern.iter().peekable();
while let Some(pattern_elem) = pattern.next() {
if pattern_elem == VALUE_WILDCARD {
let Some(next_pattern_elem) = pattern.peek() else {
// Stop as elide consumes to end
normalized.push(pattern_elem.clone());
input_index = input.len();
break;
};
let Some(index_offset) = input[input_index..].iter().position(|next_input_elem| {
let mut next_input_elem = next_input_elem.clone();
normalize_value_to_redactions(&mut next_input_elem, next_pattern_elem, redactions);
next_input_elem == **next_pattern_elem
}) else {
// Give up as we can't find where the elide ends
break;
};
normalized.push(pattern_elem.clone());
input_index += index_offset;
} else {
let Some(input_elem) = input.get(input_index) else {
// Give up as we have no more content to check
break;
};

input_index += 1;
let mut normalized_elem = input_elem.clone();
normalize_value_to_redactions(&mut normalized_elem, pattern_elem, redactions);
normalized.push(normalized_elem);
}
}

normalized.extend(input[input_index..].iter().cloned());
normalized
}

fn normalize_str_to_redactions(input: &str, pattern: &str, redactions: &Redactions) -> String {
if input == pattern {
return input.to_owned();
Expand All @@ -492,30 +509,29 @@ fn normalize_str_to_redactions(input: &str, pattern: &str, redactions: &Redactio
let mut input_index = 0;
let input_lines: Vec<_> = crate::utils::LinesWithTerminator::new(input).collect();
let mut pattern_lines = crate::utils::LinesWithTerminator::new(pattern).peekable();
'outer: while let Some(pattern_line) = pattern_lines.next() {
while let Some(pattern_line) = pattern_lines.next() {
if is_line_elide(pattern_line) {
if let Some(next_pattern_line) = pattern_lines.peek() {
for (index_offset, next_input_line) in
input_lines[input_index..].iter().copied().enumerate()
{
if line_matches(next_input_line, next_pattern_line, redactions) {
normalized.push(pattern_line);
input_index += index_offset;
continue 'outer;
}
}
// Give up doing further normalization
break;
} else {
// Give up doing further normalization
let Some(next_pattern_line) = pattern_lines.peek() else {
// Stop as elide consumes to end
normalized.push(pattern_line);
// captured rest so don't copy remaining lines over
input_index = input_lines.len();
break;
}
};
let Some(index_offset) =
input_lines[input_index..]
.iter()
.position(|next_input_line| {
line_matches(next_input_line, next_pattern_line, redactions)
})
else {
// Give up as we can't find where the elide ends
break;
};
normalized.push(pattern_line);
input_index += index_offset;
} else {
let Some(input_line) = input_lines.get(input_index) else {
// Give up doing further normalization
// Give up as we have no more content to check
break;
};

Expand Down

0 comments on commit e230a1d

Please sign in to comment.