Skip to content

Commit

Permalink
typed: Allow duplicates when we merge
Browse files Browse the repository at this point in the history
The goal is to ignore duplicates if they are not included in the partial
object, and to replace all of the occurences if they are in the partial
object.
  • Loading branch information
apelisse committed Oct 13, 2023
1 parent 1233172 commit c2d986f
Show file tree
Hide file tree
Showing 2 changed files with 83 additions and 21 deletions.
40 changes: 24 additions & 16 deletions typed/merge.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,14 +180,18 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
}
out := make([]interface{}, 0, outLen)

rhsOrder, observedRHS, rhsErrs := w.indexListPathElements(t, rhs)
rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs)
errs = append(errs, rhsErrs...)
lhsOrder, observedLHS, lhsErrs := w.indexListPathElements(t, lhs)
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs)
errs = append(errs, lhsErrs...)

if len(errs) != 0 {
return errs
}

sharedOrder := make([]*fieldpath.PathElement, 0, rLen)
for i := range rhsOrder {
pe := &rhsOrder[i]
for i := range rhsPEs {
pe := &rhsPEs[i]
if _, ok := observedLHS.Get(*pe); ok {
sharedOrder = append(sharedOrder, pe)
}
Expand All @@ -199,13 +203,15 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
sharedOrder = sharedOrder[1:]
}

lLen, rLen = len(lhsOrder), len(rhsOrder)
mergedRHS := fieldpath.MakePathElementMap(len(rhsPEs))
lLen, rLen = len(lhsPEs), len(rhsPEs)
for lI, rI := 0, 0; lI < lLen || rI < rLen; {
if lI < lLen && rI < rLen {
pe := lhsOrder[lI]
if pe.Equals(rhsOrder[rI]) {
pe := lhsPEs[lI]
if pe.Equals(rhsPEs[rI]) {
// merge LHS & RHS items
lChild, _ := observedLHS.Get(pe)
mergedRHS.Insert(pe, struct{}{})
lChild := lhs.AtUsing(w.allocator, lI)
rChild, _ := observedRHS.Get(pe)
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
errs = append(errs, errs...)
Expand All @@ -222,29 +228,33 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
}
continue
}
if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsOrder[lI]) {
if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsPEs[lI]) {
// shared item, but not the one we want in this round
lI++
continue
}
}
if lI < lLen {
pe := lhsOrder[lI]
pe := lhsPEs[lI]
if _, ok := observedRHS.Get(pe); !ok {
// take LHS item
lChild, _ := observedLHS.Get(pe)
lChild := lhs.AtUsing(w.allocator, lI)
mergeOut, errs := w.mergeListItem(t, pe, lChild, nil)
errs = append(errs, errs...)
if mergeOut != nil {
out = append(out, *mergeOut)
}
lI++
continue
} else if _, ok := mergedRHS.Get(pe); ok {
// we've already merged it with RHS, we don't want to duplicate it, skip it.
lI++
}
}
if rI < rLen {
// Take the RHS item, merge with matching LHS item if possible
pe := rhsOrder[rI]
pe := rhsPEs[rI]
mergedRHS.Insert(pe, struct{}{})
lChild, _ := observedLHS.Get(pe) // may be nil
rChild, _ := observedRHS.Get(pe)
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
Expand Down Expand Up @@ -290,11 +300,9 @@ func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) (
// this element.
continue
}
if _, found := observed.Get(pe); found {
errs = append(errs, errorf("duplicate entries for key %v", pe.String())...)
continue
if _, found := observed.Get(pe); !found {
observed.Insert(pe, child)
}
observed.Insert(pe, child)
pes = append(pes, pe)
}
return pes, observed, errs
Expand Down
64 changes: 59 additions & 5 deletions typed/merge_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,34 @@ var mergeCases = []mergeTestCase{{
`{"setStr":["a","b","c","d","e","f","g","h","i","j"]}`,
`{"setStr":["1","b","2","h","3","e","4","k","l"]}`,
`{"setStr":["a","1","b","c","d","f","g","2","h","i","j","3","e","4","k","l"]}`,
}, { // We have a duplicate in LHS
`{"setStr":["a","b","b"]}`,
`{"setStr":["c"]}`,
`{"setStr":["a","b","b","c"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","b"]}`,
`{"setStr":["b"]}`,
`{"setStr":["a","b"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","b"]}`,
`{"setStr":["a"]}`,
`{"setStr":["a","b","b"]}`,
}, { // We have a duplicate in LHS.
`{"setStr":["a","b","c","d","e","c"]}`,
`{"setStr":["1","b","2","e","d"]}`,
`{"setStr":["a","1","b","c","2","e","c","d"]}`,
}, { // We have a duplicate in LHS, also present in RHS, keep only one.
`{"setStr":["a","b","c","d","e","c"]}`,
`{"setStr":["1","b","2","c","e","d"]}`,
`{"setStr":["a","1","b","2","c","e","d"]}`,
}, { // We have 2 duplicates in LHS, one is replaced.
`{"setStr":["a","a","b","b"]}`,
`{"setStr":["b","c","d"]}`,
`{"setStr":["a","a","b","c","d"]}`,
}, { // We have 2 duplicates in LHS, and nothing on the right
`{"setStr":["a","a","b","b"]}`,
`{"setStr":[]}`,
`{"setStr":["a","a","b","b"]}`,
}, {
`{"setBool":[true]}`,
`{"setBool":[false]}`,
Expand All @@ -336,6 +364,22 @@ var mergeCases = []mergeTestCase{{
`{"setNumeric":[1,2,3.14159]}`,
`{"setNumeric":[1,2,3]}`,
`{"setNumeric":[1,2,3.14159,3]}`,
}, {
`{"setStr":["c","a","g","f","c","a"]}`,
`{"setStr":["c","f","a","g"]}`,
`{"setStr":["c","f","a","g"]}`,
}, {
`{"setNumeric":[1,2,3.14159, 1, 2]}`,
`{"setNumeric":[1,2,3]}`,
`{"setNumeric":[1,2,3.14159,3]}`,
}, {
`{"setBool":[true,false,true]}`,
`{"setBool":[false]}`,
`{"setBool":[true, false,true]}`,
}, {
`{"setBool":[true,false,true]}`,
`{"setBool":[true]}`,
`{"setBool":[true, false]}`,
},
},
}, {
Expand Down Expand Up @@ -423,6 +467,18 @@ var mergeCases = []mergeTestCase{{
`{"atomicList":["a","a","a"]}`,
`{"atomicList":["a","a"]}`,
`{"atomicList":["a","a"]}`,
}, {
`{"list":[{"key":"a","id":1,"bv":true},{"key":"b","id":2},{"key":"a","id":1,"bv":false,"nv":2}]}`,
`{"list":[{"key":"a","id":1,"nv":3}, {"key":"c","id":3},{"key":"b","id":2}]}`,
`{"list":[{"key":"a","id":1,"bv":true,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
}, {
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
`{"list":[]}`,
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
}, {
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
`{}`,
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
}},
}}

Expand All @@ -437,18 +493,16 @@ func (tt mergeTestCase) test(t *testing.T) {
t.Run(fmt.Sprintf("%v-valid-%v", tt.name, i), func(t *testing.T) {
t.Parallel()
pt := parser.Type(tt.rootTypeName)

lhs, err := pt.FromYAML(triplet.lhs)
// Former object can have duplicates in sets.
lhs, err := pt.FromYAML(triplet.lhs, typed.AllowDuplicates)
if err != nil {
t.Fatalf("unable to parser/validate lhs yaml: %v\n%v", err, triplet.lhs)
}

rhs, err := pt.FromYAML(triplet.rhs)
if err != nil {
t.Fatalf("unable to parser/validate rhs yaml: %v\n%v", err, triplet.rhs)
}

out, err := pt.FromYAML(triplet.out)
out, err := pt.FromYAML(triplet.out, typed.AllowDuplicates)
if err != nil {
t.Fatalf("unable to parser/validate out yaml: %v\n%v", err, triplet.out)
}
Expand Down

0 comments on commit c2d986f

Please sign in to comment.