Skip to content

Commit

Permalink
typed: Allow duplicates when we merge
Browse files Browse the repository at this point in the history
The goal is to ignore duplicates if they are not included in the partial
object, and to replace all of the occurences if they are in the partial
object.
  • Loading branch information
apelisse committed Oct 19, 2023
1 parent f8c7b27 commit c9d20e7
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 11 deletions.
23 changes: 17 additions & 6 deletions typed/merge.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,11 +180,15 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
}
out := make([]interface{}, 0, outLen)

rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs)
rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs, false)
errs = append(errs, rhsErrs...)
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs)
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs, true)
errs = append(errs, lhsErrs...)

if len(errs) != 0 {
return errs
}

sharedOrder := make([]*fieldpath.PathElement, 0, rLen)
for i := range rhsPEs {
pe := &rhsPEs[i]
Expand All @@ -199,12 +203,14 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
sharedOrder = sharedOrder[1:]
}

mergedRHS := fieldpath.MakePathElementMap(len(rhsPEs))
lLen, rLen = len(lhsPEs), len(rhsPEs)
for lI, rI := 0, 0; lI < lLen || rI < rLen; {
if lI < lLen && rI < rLen {
pe := lhsPEs[lI]
if pe.Equals(rhsPEs[rI]) {
// merge LHS & RHS items
mergedRHS.Insert(pe, struct{}{})
lChild, _ := observedLHS.Get(pe)
rChild, _ := observedRHS.Get(pe)
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
Expand Down Expand Up @@ -232,19 +238,23 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
pe := lhsPEs[lI]
if _, ok := observedRHS.Get(pe); !ok {
// take LHS item
lChild, _ := observedLHS.Get(pe)
lChild := lhs.AtUsing(w.allocator, lI)
mergeOut, errs := w.mergeListItem(t, pe, lChild, nil)
errs = append(errs, errs...)
if mergeOut != nil {
out = append(out, *mergeOut)
}
lI++
continue
} else if _, ok := mergedRHS.Get(pe); ok {
// we've already merged it with RHS, we don't want to duplicate it, skip it.
lI++
}
}
if rI < rLen {
// Take the RHS item, merge with matching LHS item if possible
pe := rhsPEs[rI]
mergedRHS.Insert(pe, struct{}{})
lChild, _ := observedLHS.Get(pe) // may be nil
rChild, _ := observedRHS.Get(pe)
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
Expand Down Expand Up @@ -272,7 +282,7 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
return errs
}

func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List, allowDuplicates bool) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
var errs ValidationErrors
length := 0
if list != nil {
Expand All @@ -290,11 +300,12 @@ func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) (
// this element.
continue
}
if _, found := observed.Get(pe); found {
if _, found := observed.Get(pe); found && !allowDuplicates {
errs = append(errs, errorf("duplicate entries for key %v", pe.String())...)
continue
} else if !found {
observed.Insert(pe, child)
}
observed.Insert(pe, child)
pes = append(pes, pe)
}
return pes, observed, errs
Expand Down
64 changes: 59 additions & 5 deletions typed/merge_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,34 @@ var mergeCases = []mergeTestCase{{
`{"setStr":["a","b","c","d","e","f","g","h","i","j"]}`,
`{"setStr":["1","b","2","h","3","e","4","k","l"]}`,
`{"setStr":["a","1","b","c","d","f","g","2","h","i","j","3","e","4","k","l"]}`,
}, { // We have a duplicate in LHS
`{"setStr":["a","b","b"]}`,
`{"setStr":["c"]}`,
`{"setStr":["a","b","b","c"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","b"]}`,
`{"setStr":["b"]}`,
`{"setStr":["a","b"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","b"]}`,
`{"setStr":["a"]}`,
`{"setStr":["a","b","b"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","c","d","e","c"]}`,
`{"setStr":["1","b","2","e","d"]}`,
`{"setStr":["a","1","b","c","2","e","c","d"]}`,
}, { // We have a duplicate in LHS, also present in RHS, keep only one.
`{"setStr":["a","b","c","d","e","c"]}`,
`{"setStr":["1","b","2","c","e","d"]}`,
`{"setStr":["a","1","b","2","c","e","d"]}`,
}, { // We have 2 duplicates in LHS, one is replaced.
`{"setStr":["a","a","b","b"]}`,
`{"setStr":["b","c","d"]}`,
`{"setStr":["a","a","b","c","d"]}`,
}, { // We have 2 duplicates in LHS, and nothing on the right
`{"setStr":["a","a","b","b"]}`,
`{"setStr":[]}`,
`{"setStr":["a","a","b","b"]}`,
}, {
`{"setBool":[true]}`,
`{"setBool":[false]}`,
Expand All @@ -336,6 +364,22 @@ var mergeCases = []mergeTestCase{{
`{"setNumeric":[1,2,3.14159]}`,
`{"setNumeric":[1,2,3]}`,
`{"setNumeric":[1,2,3.14159,3]}`,
}, {
`{"setStr":["c","a","g","f","c","a"]}`,
`{"setStr":["c","f","a","g"]}`,
`{"setStr":["c","f","a","g"]}`,
}, {
`{"setNumeric":[1,2,3.14159,1,2]}`,
`{"setNumeric":[1,2,3]}`,
`{"setNumeric":[1,2,3.14159,3]}`,
}, {
`{"setBool":[true,false,true]}`,
`{"setBool":[false]}`,
`{"setBool":[true,false,true]}`,
}, {
`{"setBool":[true,false,true]}`,
`{"setBool":[true]}`,
`{"setBool":[true, false]}`,
},
},
}, {
Expand Down Expand Up @@ -423,6 +467,18 @@ var mergeCases = []mergeTestCase{{
`{"atomicList":["a","a","a"]}`,
`{"atomicList":["a","a"]}`,
`{"atomicList":["a","a"]}`,
}, {
`{"list":[{"key":"a","id":1,"bv":true},{"key":"b","id":2},{"key":"a","id":1,"bv":false,"nv":2}]}`,
`{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
`{"list":[{"key":"a","id":1,"bv":true,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
}, {
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
`{"list":[]}`,
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
}, {
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
`{}`,
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
}},
}}

Expand All @@ -437,18 +493,16 @@ func (tt mergeTestCase) test(t *testing.T) {
t.Run(fmt.Sprintf("%v-valid-%v", tt.name, i), func(t *testing.T) {
t.Parallel()
pt := parser.Type(tt.rootTypeName)

lhs, err := pt.FromYAML(triplet.lhs)
// Former object can have duplicates in sets.
lhs, err := pt.FromYAML(triplet.lhs, typed.AllowDuplicates)
if err != nil {
t.Fatalf("unable to parser/validate lhs yaml: %v\n%v", err, triplet.lhs)
}

rhs, err := pt.FromYAML(triplet.rhs)
if err != nil {
t.Fatalf("unable to parser/validate rhs yaml: %v\n%v", err, triplet.rhs)
}

out, err := pt.FromYAML(triplet.out)
out, err := pt.FromYAML(triplet.out, typed.AllowDuplicates)
if err != nil {
t.Fatalf("unable to parser/validate out yaml: %v\n%v", err, triplet.out)
}
Expand Down

0 comments on commit c9d20e7

Please sign in to comment.