Skip to content

Commit

Permalink
scheduler: count implicit spread targets as a single target
Browse files Browse the repository at this point in the history
When calculating the score in the `SpreadIterator`, the score boost is
proportional to the difference between the current and desired count. But when
there are implicit spread targets, the current count is the sum of the possible
implicit targets, which results in incorrect scoring unless there's only one
implicit target.

This changeset updates the `propertySet` struct to accept a set of explicit
target values so it can detect when a property value falls into the implicit set
and should be combined with other implicit values.

Fixes: #11823
  • Loading branch information
tgross committed May 15, 2023
1 parent d706b8f commit 9cd0490
Show file tree
Hide file tree
Showing 4 changed files with 177 additions and 9 deletions.
3 changes: 3 additions & 0 deletions .changelog/17195.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:bug
scheduler: Fixed a bug where implicit `spread` targets were treated as separate targets for scoring
```
42 changes: 34 additions & 8 deletions scheduler/propertyset.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import (

log "github.com/hashicorp/go-hclog"
memdb "github.com/hashicorp/go-memdb"
"golang.org/x/exp/slices"

"github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/nomad/structs"
)
Expand All @@ -33,6 +35,10 @@ type propertySet struct {
// targetAttribute is the attribute this property set is checking
targetAttribute string

// targetValues are the set of attribute values that are explicitly expected,
// so we can combine the count of values that belong to any implicit targets.
targetValues []string

// allowedCount is the allowed number of allocations that can have the
// distinct property
allowedCount uint64
Expand Down Expand Up @@ -62,6 +68,7 @@ func NewPropertySet(ctx Context, job *structs.Job) *propertySet {
jobID: job.ID,
namespace: job.Namespace,
existingValues: make(map[string]uint64),
targetValues: []string{},
logger: ctx.Logger().Named("property_set"),
}

Expand Down Expand Up @@ -130,6 +137,10 @@ func (p *propertySet) setTargetAttributeWithCount(targetAttribute string, allowe
p.PopulateProposed()
}

func (p *propertySet) SetTargetValues(values []string) {
p.targetValues = values
}

// populateExisting is a helper shared when setting the constraint to populate
// the existing values.
func (p *propertySet) populateExisting() {
Expand Down Expand Up @@ -231,20 +242,21 @@ func (p *propertySet) SatisfiesDistinctProperties(option *structs.Node, tg strin
// UsedCount returns the number of times the value of the attribute being tracked by this
// property set is used across current and proposed allocations. It also returns the resolved
// attribute value for the node, and an error message if it couldn't be resolved correctly
func (p *propertySet) UsedCount(option *structs.Node, tg string) (string, string, uint64) {
func (p *propertySet) UsedCount(option *structs.Node, _ string) (string, string, uint64) {
// Check if there was an error building
if p.errorBuilding != nil {
return "", p.errorBuilding.Error(), 0
}

// Get the nodes property value
nValue, ok := getProperty(option, p.targetAttribute)
targetPropertyValue := p.targetedPropertyValue(nValue)
if !ok {
return nValue, fmt.Sprintf("missing property %q", p.targetAttribute), 0
}
combinedUse := p.GetCombinedUseMap()
usedCount := combinedUse[nValue]
return nValue, "", usedCount
usedCount := combinedUse[targetPropertyValue]
return targetPropertyValue, "", usedCount
}

// GetCombinedUseMap counts how many times the property has been used by
Expand All @@ -254,23 +266,25 @@ func (p *propertySet) GetCombinedUseMap() map[string]uint64 {
combinedUse := make(map[string]uint64, helper.Max(len(p.existingValues), len(p.proposedValues)))
for _, usedValues := range []map[string]uint64{p.existingValues, p.proposedValues} {
for propertyValue, usedCount := range usedValues {
combinedUse[propertyValue] += usedCount
targetPropertyValue := p.targetedPropertyValue(propertyValue)
combinedUse[targetPropertyValue] += usedCount
}
}

// Go through and discount the combined count when the value has been
// cleared by a proposed stop.
for propertyValue, clearedCount := range p.clearedValues {
combined, ok := combinedUse[propertyValue]
targetPropertyValue := p.targetedPropertyValue(propertyValue)
combined, ok := combinedUse[targetPropertyValue]
if !ok {
continue
}

// Don't clear below 0.
if combined >= clearedCount {
combinedUse[propertyValue] = combined - clearedCount
combinedUse[targetPropertyValue] = combined - clearedCount
} else {
combinedUse[propertyValue] = 0
combinedUse[targetPropertyValue] = 0
}
}
return combinedUse
Expand Down Expand Up @@ -335,7 +349,8 @@ func (p *propertySet) populateProperties(allocs []*structs.Allocation, nodes map
continue
}

properties[nProperty]++
targetPropertyValue := p.targetedPropertyValue(nProperty)
properties[targetPropertyValue]++
}
}

Expand All @@ -347,3 +362,14 @@ func getProperty(n *structs.Node, property string) (string, bool) {

return resolveTarget(property, n)
}

// targetedPropertyValue transforms the property value to combine all implicit
// target values into a single wildcard placeholder so that we get accurate
// counts when we compare an explicitly-defined target against multiple implicit
// targets.
func (p *propertySet) targetedPropertyValue(propertyValue string) string {
if len(p.targetValues) == 0 || slices.Contains(p.targetValues, propertyValue) {
return propertyValue
}
return "*"
}
5 changes: 5 additions & 0 deletions scheduler/spread.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
package scheduler

import (
"github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/nomad/structs"
)

Expand Down Expand Up @@ -91,13 +92,17 @@ func (iter *SpreadIterator) SetTaskGroup(tg *structs.TaskGroup) {
for _, spread := range iter.jobSpreads {
pset := NewPropertySet(iter.ctx, iter.job)
pset.SetTargetAttribute(spread.Attribute, tg.Name)
pset.SetTargetValues(helper.ConvertSlice(spread.SpreadTarget,
func(t *structs.SpreadTarget) string { return t.Value }))
iter.groupPropertySets[tg.Name] = append(iter.groupPropertySets[tg.Name], pset)
}

// Include property sets at the task group level
for _, spread := range tg.Spreads {
pset := NewPropertySet(iter.ctx, iter.job)
pset.SetTargetAttribute(spread.Attribute, tg.Name)
pset.SetTargetValues(helper.ConvertSlice(spread.SpreadTarget,
func(t *structs.SpreadTarget) string { return t.Value }))
iter.groupPropertySets[tg.Name] = append(iter.groupPropertySets[tg.Name], pset)
}
}
Expand Down
136 changes: 135 additions & 1 deletion scheduler/spread_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,15 @@ import (
"testing"
"time"

"github.com/shoenig/test"
"github.com/shoenig/test/must"
"github.com/stretchr/testify/require"

"github.com/hashicorp/nomad/ci"
"github.com/hashicorp/nomad/helper/pointer"
"github.com/hashicorp/nomad/helper/uuid"
"github.com/hashicorp/nomad/nomad/mock"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/stretchr/testify/require"
)

func TestSpreadIterator_SingleAttribute(t *testing.T) {
Expand Down Expand Up @@ -921,3 +924,134 @@ func TestSpreadPanicDowngrade(t *testing.T) {
require.NoError(t, processErr, "failed to process eval")
require.Len(t, h.Plans, 1)
}

func TestSpread_ImplicitTargets(t *testing.T) {

dcs := []string{"dc1", "dc2", "dc3"}

setupNodes := func(h *Harness) map[string]string {
nodesToDcs := map[string]string{}
var nodes []*RankedNode

for i, dc := range dcs {
for n := 0; n < 4; n++ {
node := mock.Node()
node.Datacenter = dc
must.NoError(t, h.State.UpsertNode(
structs.MsgTypeTestSetup, uint64(100+i), node))
nodes = append(nodes, &RankedNode{Node: node})
nodesToDcs[node.ID] = node.Datacenter
}
}
return nodesToDcs
}

setupJob := func(h *Harness, testCaseSpread *structs.Spread) *structs.Evaluation {
job := mock.MinJob()
job.Datacenters = dcs
job.TaskGroups[0].Count = 12

job.TaskGroups[0].Spreads = []*structs.Spread{testCaseSpread}
must.NoError(t, h.State.UpsertJob(
structs.MsgTypeTestSetup, h.NextIndex(), nil, job))

eval := &structs.Evaluation{
Namespace: structs.DefaultNamespace,
ID: uuid.Generate(),
Priority: job.Priority,
TriggeredBy: structs.EvalTriggerJobRegister,
JobID: job.ID,
Status: structs.EvalStatusPending,
}
must.NoError(t, h.State.UpsertEvals(structs.MsgTypeTestSetup,
h.NextIndex(), []*structs.Evaluation{eval}))

return eval
}

testCases := []struct {
name string
spread *structs.Spread
expect map[string]int
}{
{

name: "empty implicit target",
spread: &structs.Spread{
Weight: 100,
Attribute: "${node.datacenter}",
SpreadTarget: []*structs.SpreadTarget{
{
Value: "dc1",
Percent: 50,
},
},
},
expect: map[string]int{"dc1": 6},
},
{
name: "wildcard implicit target",
spread: &structs.Spread{
Weight: 100,
Attribute: "${node.datacenter}",
SpreadTarget: []*structs.SpreadTarget{
{
Value: "dc1",
Percent: 50,
},
{
Value: "*",
Percent: 50,
},
},
},
expect: map[string]int{"dc1": 6},
},
{
name: "explicit targets",
spread: &structs.Spread{
Weight: 100,
Attribute: "${node.datacenter}",
SpreadTarget: []*structs.SpreadTarget{
{
Value: "dc1",
Percent: 50,
},
{
Value: "dc2",
Percent: 25,
},
{
Value: "dc3",
Percent: 25,
},
},
},
expect: map[string]int{"dc1": 6, "dc2": 3, "dc3": 3},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
h := NewHarness(t)
nodesToDcs := setupNodes(h)
eval := setupJob(h, tc.spread)
must.NoError(t, h.Process(NewServiceScheduler, eval))
must.Len(t, 1, h.Plans)

plan := h.Plans[0]
must.False(t, plan.IsNoOp())

dcCounts := map[string]int{}
for node, allocs := range plan.NodeAllocation {
dcCounts[nodesToDcs[node]] += len(allocs)
}
for dc, expectVal := range tc.expect {
// not using test.MapEqual here because we have incomplete
// expectations for the implicit DCs on some tests.
test.Eq(t, expectVal, dcCounts[dc],
test.Sprintf("expected %d in %q", expectVal, dc))
}

})
}
}

0 comments on commit 9cd0490

Please sign in to comment.