diff --git a/CHANGELOG.md b/CHANGELOG.md index b819742925f6..6725f6cfded9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ FEATURES: * **Readiness Checks**: Adds `service` and `check` `on_update` configuration to support liveness and readiness checks. [[GH-9955](https://github.com/hashicorp/nomad/issues/9955)] IMPROVEMENTS: + * api: Added an API endpoint for fuzzy search queries [[GH-10184](https://github.com/hashicorp/nomad/pull/10184)] * api: Removed unimplemented `CSIVolumes.PluginList` API. [[GH-10158](https://github.com/hashicorp/nomad/issues/10158)] * cli: Update defaults for `nomad operator debug` flags `-interval` and `-server-id` to match common usage. [[GH-10121](https://github.com/hashicorp/nomad/issues/10121)] * consul/connect: Enable setting `local_bind_address` field on connect upstreams [[GH-6248](https://github.com/hashicorp/nomad/issues/6248)] diff --git a/api/contexts/contexts.go b/api/contexts/contexts.go index 399424df5bba..b973f733bd02 100644 --- a/api/contexts/contexts.go +++ b/api/contexts/contexts.go @@ -1,9 +1,12 @@ +// Package contexts provides constants used with the Nomad Search API. package contexts -// Context defines the scope in which a search for Nomad object operates +// Context defines the scope in which a search for Nomad object operates. type Context string const ( + // These Context types are used to reference the high level Nomad object + // types than can be searched. Allocs Context = "allocs" Deployments Context = "deployment" Evals Context = "evals" @@ -15,5 +18,16 @@ const ( ScalingPolicies Context = "scaling_policy" Plugins Context = "plugins" Volumes Context = "volumes" - All Context = "all" + + // These Context types are used to associate a search result from a lower + // level Nomad object with one of the higher level Context types above. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Context used to represent the set of all the higher level Context types. + All Context = "all" ) diff --git a/api/search.go b/api/search.go index 6a6cb9b59e17..3b020827a495 100644 --- a/api/search.go +++ b/api/search.go @@ -13,7 +13,7 @@ func (c *Client) Search() *Search { return &Search{client: c} } -// PrefixSearch returns a list of matches for a particular context and prefix. +// PrefixSearch returns a set of matches for a particular context and prefix. func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryOptions) (*SearchResponse, *QueryMeta, error) { var resp SearchResponse req := &SearchRequest{Prefix: prefix, Context: context} @@ -26,14 +26,72 @@ func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryO return &resp, qm, nil } +type SearchResponse struct { + Matches map[contexts.Context][]string + Truncations map[contexts.Context]bool + QueryMeta +} + type SearchRequest struct { Prefix string Context contexts.Context QueryOptions } -type SearchResponse struct { - Matches map[contexts.Context][]string +// FuzzySearch returns a set of matches for a given context and string. +func (s *Search) FuzzySearch(text string, context contexts.Context, q *QueryOptions) (*FuzzySearchResponse, *QueryMeta, error) { + var resp FuzzySearchResponse + + req := &FuzzySearchRequest{ + Context: context, + Text: text, + } + + qm, err := s.client.putQuery("/v1/search/fuzzy", req, &resp, q) + if err != nil { + return nil, nil, err + } + + return &resp, qm, nil +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[contexts.Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. Truncations map[contexts.Context]bool + QueryMeta } + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context contexts.Context + + QueryOptions +} diff --git a/api/search_test.go b/api/search_test.go index 892394c39ebb..26aa39c2f7d6 100644 --- a/api/search_test.go +++ b/api/search_test.go @@ -7,8 +7,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestSearch_List(t *testing.T) { - require := require.New(t) +func TestSearch_PrefixSearch(t *testing.T) { t.Parallel() c, s := makeClient(t, nil, nil) @@ -16,17 +15,38 @@ func TestSearch_List(t *testing.T) { job := testJob() _, _, err := c.Jobs().Register(job, nil) - require.Nil(err) + require.NoError(t, err) id := *job.ID prefix := id[:len(id)-2] resp, qm, err := c.Search().PrefixSearch(prefix, contexts.Jobs, nil) - - require.Nil(err) - require.NotNil(qm) - require.NotNil(qm) + require.NoError(t, err) + require.NotNil(t, qm) + require.NotNil(t, resp) jobMatches := resp.Matches[contexts.Jobs] - require.Equal(1, len(jobMatches)) - require.Equal(id, jobMatches[0]) + require.Len(t, jobMatches, 1) + require.Equal(t, id, jobMatches[0]) +} + +func TestSearch_FuzzySearch(t *testing.T) { + t.Parallel() + c, s := makeClient(t, nil, nil) + defer s.Stop() + + job := testJob() + _, _, err := c.Jobs().Register(job, nil) + require.NoError(t, err) + + resp, qm, err := c.Search().FuzzySearch("bin", contexts.All, nil) + require.NoError(t, err) + require.NotNil(t, qm) + require.NotNil(t, resp) + + commandMatches := resp.Matches[contexts.Commands] + require.Len(t, commandMatches, 1) + require.Equal(t, "/bin/sleep", commandMatches[0].ID) + require.Equal(t, []string{ + "default", "redis", "group1", "task1", + }, commandMatches[0].Scope) } diff --git a/command/agent/agent.go b/command/agent/agent.go index 0f5b415a9d08..286b87519b10 100644 --- a/command/agent/agent.go +++ b/command/agent/agent.go @@ -422,6 +422,16 @@ func convertServerConfig(agentConfig *Config) (*nomad.Config, error) { conf.LicenseEnv = agentConfig.Server.LicenseEnv conf.LicensePath = agentConfig.Server.LicensePath + // Add the search configuration + if search := agentConfig.Server.Search; search != nil { + conf.SearchConfig = &structs.SearchConfig{ + FuzzyEnabled: search.FuzzyEnabled, + LimitQuery: search.LimitQuery, + LimitResults: search.LimitResults, + MinTermLength: search.MinTermLength, + } + } + return conf, nil } diff --git a/command/agent/config.go b/command/agent/config.go index 5f3311f7ffbc..ccb5fec7c6aa 100644 --- a/command/agent/config.go +++ b/command/agent/config.go @@ -503,6 +503,44 @@ type ServerConfig struct { // ExtraKeysHCL is used by hcl to surface unexpected keys ExtraKeysHCL []string `hcl:",unusedKeys" json:"-"` + + Search *Search `hcl:"search"` +} + +// Search is used in servers to configure search API options. +type Search struct { + // FuzzyEnabled toggles whether the FuzzySearch API is enabled. If not + // enabled, requests to /v1/search/fuzzy will reply with a 404 response code. + // + // Default: enabled. + FuzzyEnabled bool `hcl:"fuzzy_enabled"` + + // LimitQuery limits the number of objects searched in the FuzzySearch API. + // The results are indicated as truncated if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server when + // the FuzzySearch API is enabled. + // + // Default value: 20. + LimitQuery int `hcl:"limit_query"` + + // LimitResults limits the number of results provided by the FuzzySearch API. + // The results are indicated as truncate if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server per + // fuzzy search request when the FuzzySearch API is enabled. + // + // Default value: 100. + LimitResults int `hcl:"limit_results"` + + // MinTermLength is the minimum length of Text required before the FuzzySearch + // API will return results. + // + // Increasing this value can avoid resource consumption on Nomad server by + // reducing searches with less meaningful results. + // + // Default value: 2. + MinTermLength int `hcl:"min_term_length"` } // ServerJoin is used in both clients and servers to bootstrap connections to @@ -900,6 +938,12 @@ func DefaultConfig() *Config { RetryInterval: 30 * time.Second, RetryMaxAttempts: 0, }, + Search: &Search{ + FuzzyEnabled: true, + LimitQuery: 20, + LimitResults: 100, + MinTermLength: 2, + }, }, ACL: &ACLConfig{ Enabled: false, @@ -1434,6 +1478,19 @@ func (a *ServerConfig) Merge(b *ServerConfig) *ServerConfig { result.DefaultSchedulerConfig = &c } + if b.Search != nil { + result.Search = &Search{FuzzyEnabled: b.Search.FuzzyEnabled} + if b.Search.LimitQuery > 0 { + result.Search.LimitQuery = b.Search.LimitQuery + } + if b.Search.LimitResults > 0 { + result.Search.LimitResults = b.Search.LimitResults + } + if b.Search.MinTermLength > 0 { + result.Search.MinTermLength = b.Search.MinTermLength + } + } + // Add the schedulers result.EnabledSchedulers = append(result.EnabledSchedulers, b.EnabledSchedulers...) diff --git a/command/agent/http.go b/command/agent/http.go index 188f11bc68b0..a8ddc4396cd9 100644 --- a/command/agent/http.go +++ b/command/agent/http.go @@ -316,6 +316,7 @@ func (s *HTTPServer) registerHandlers(enableDebug bool) { s.mux.HandleFunc("/v1/status/leader", s.wrap(s.StatusLeaderRequest)) s.mux.HandleFunc("/v1/status/peers", s.wrap(s.StatusPeersRequest)) + s.mux.HandleFunc("/v1/search/fuzzy", s.wrap(s.FuzzySearchRequest)) s.mux.HandleFunc("/v1/search", s.wrap(s.SearchRequest)) s.mux.HandleFunc("/v1/operator/license", s.wrap(s.LicenseRequest)) diff --git a/command/agent/search_endpoint.go b/command/agent/search_endpoint.go index 58ee65f4577c..95e115834821 100644 --- a/command/agent/search_endpoint.go +++ b/command/agent/search_endpoint.go @@ -12,14 +12,14 @@ func (s *HTTPServer) SearchRequest(resp http.ResponseWriter, req *http.Request) if req.Method == "POST" || req.Method == "PUT" { return s.newSearchRequest(resp, req) } - return nil, CodedError(405, ErrInvalidMethod) + return nil, CodedError(http.StatusMethodNotAllowed, ErrInvalidMethod) } func (s *HTTPServer) newSearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { args := structs.SearchRequest{} if err := decodeBody(req, &args); err != nil { - return nil, CodedError(400, err.Error()) + return nil, CodedError(http.StatusBadRequest, err.Error()) } if s.parse(resp, req, &args.Region, &args.QueryOptions) { @@ -34,3 +34,30 @@ func (s *HTTPServer) newSearchRequest(resp http.ResponseWriter, req *http.Reques setMeta(resp, &out.QueryMeta) return out, nil } + +func (s *HTTPServer) FuzzySearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { + if req.Method == "POST" || req.Method == "PUT" { + return s.newFuzzySearchRequest(resp, req) + } + return nil, CodedError(http.StatusMethodNotAllowed, ErrInvalidMethod) +} + +func (s *HTTPServer) newFuzzySearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { + var args structs.FuzzySearchRequest + + if err := decodeBody(req, &args); err != nil { + return nil, CodedError(http.StatusBadRequest, err.Error()) + } + + if s.parse(resp, req, &args.Region, &args.QueryOptions) { + return nil, nil + } + + var out structs.FuzzySearchResponse + if err := s.agent.RPC("Search.FuzzySearch", &args, &out); err != nil { + return nil, err + } + + setMeta(resp, &out.QueryMeta) + return out, nil +} diff --git a/command/agent/search_endpoint_test.go b/command/agent/search_endpoint_test.go index 4614a63def0e..e8e2f9ddd8e8 100644 --- a/command/agent/search_endpoint_test.go +++ b/command/agent/search_endpoint_test.go @@ -5,110 +5,189 @@ import ( "net/http/httptest" "testing" + "github.com/hashicorp/nomad/helper/uuid" "github.com/hashicorp/nomad/nomad/mock" "github.com/hashicorp/nomad/nomad/structs" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestHTTP_SearchWithIllegalMethod(t *testing.T) { - assert := assert.New(t) +func header(recorder *httptest.ResponseRecorder, name string) string { + return recorder.Result().Header.Get(name) +} + +func createJobForTest(jobID string, s *TestAgent, t *testing.T) { + job := mock.Job() + job.ID = jobID + job.TaskGroups[0].Count = 1 + state := s.Agent.server.State() + err := state.UpsertJob(structs.MsgTypeTestSetup, 1000, job) + require.NoError(t, err) +} + +func TestHTTP_PrefixSearchWithIllegalMethod(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { req, err := http.NewRequest("DELETE", "/v1/search", nil) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() _, err = s.Server.SearchRequest(respW, req) - assert.NotNil(err, "HTTP DELETE should not be accepted for this endpoint") + require.EqualError(t, err, "Invalid method") }) } -func createJobForTest(jobID string, s *TestAgent, t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearchWithIllegalMethod(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + req, err := http.NewRequest("DELETE", "/v1/search/fuzzy", nil) + require.NoError(t, err) + respW := httptest.NewRecorder() + _, err = s.Server.SearchRequest(respW, req) + require.EqualError(t, err, "Invalid method") + }) +} + +func createCmdJobForTest(name, cmd string, s *TestAgent, t *testing.T) { job := mock.Job() - job.ID = jobID + job.Name = name + job.TaskGroups[0].Tasks[0].Config["command"] = cmd job.TaskGroups[0].Count = 1 - state := s.Agent.server.State() err := state.UpsertJob(structs.MsgTypeTestSetup, 1000, job) - assert.Nil(err) + require.NoError(t, err) } -func TestHTTP_Search_POST(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_POST(t *testing.T) { + t.Parallel() testJob := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJob, s, t) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 1) + require.Equal(t, testJob, j[0]) + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_POST(t *testing.T) { + t.Parallel() + + testJobID := uuid.Generate() + + httpTest(t, nil, func(s *TestAgent) { + createJobForTest(testJobID, s, t) + data := structs.FuzzySearchRequest{Text: "fau", Context: structs.Namespaces} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) // searched one context: namespaces - assert.Equal(1, len(j)) - assert.Equal(j[0], testJob) + ns := res.Matches[structs.Namespaces] + require.Len(t, ns, 1) - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + require.Equal(t, "default", ns[0].ID) + require.Nil(t, ns[0].Scope) // only job types have scope + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_PUT(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_PUT(t *testing.T) { + t.Parallel() testJob := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJob, s, t) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("PUT", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 1) + require.Equal(t, testJob, j[0]) - assert.Equal(1, len(j)) - assert.Equal(j[0], testJob) + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_PUT(t *testing.T) { + t.Parallel() + + testJobID := uuid.Generate() + + httpTest(t, nil, func(s *TestAgent) { + createJobForTest(testJobID, s, t) + data := structs.FuzzySearchRequest{Text: "fau", Context: structs.Namespaces} + req, err := http.NewRequest("PUT", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) // searched one context: namespaces + + ns := res.Matches[structs.Namespaces] + require.Len(t, ns, 1) + + require.Equal(t, "default", ns[0].ID) + require.Nil(t, ns[0].Scope) // only job types have scope + + require.False(t, res.Truncations[structs.Namespaces]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_MultipleJobs(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_MultipleJobs(t *testing.T) { + t.Parallel() testJobA := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobB := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89707" testJobC := "bbbbbbbb-e8f7-fd38-c855-ab94ceb89707" - testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJobA, s, t) createJobForTest(testJobB, s, t) @@ -116,190 +195,361 @@ func TestHTTP_Search_MultipleJobs(t *testing.T) { data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 2) + require.Contains(t, j, testJobA) + require.Contains(t, j, testJobB) + require.NotContains(t, j, testJobC) - assert.Equal(2, len(j)) - assert.Contains(j, testJobA) - assert.Contains(j, testJobB) - assert.NotContains(j, testJobC) - - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Evaluation(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_MultipleJobs(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + createCmdJobForTest("job1", "/bin/yes", s, t) + createCmdJobForTest("job2", "/bin/no", s, t) + createCmdJobForTest("job3", "/opt/java", s, t) // no match + createCmdJobForTest("job4", "/sbin/ping", s, t) + + data := structs.FuzzySearchRequest{Text: "bin", Context: structs.Jobs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + // in example job, only the commands match the "bin" query + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + commands := res.Matches[structs.Commands] + require.Len(t, commands, 3) + + exp := []structs.FuzzyMatch{{ + ID: "/bin/no", + Scope: []string{"default", "job2", "web", "web"}, + }, { + ID: "/bin/yes", + Scope: []string{"default", "job1", "web", "web"}, + }, { + ID: "/sbin/ping", + Scope: []string{"default", "job4", "web", "web"}, + }} + require.Equal(t, exp, commands) + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} +func TestHTTP_PrefixSearch_Evaluation(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() eval1 := mock.Eval() eval2 := mock.Eval() err := state.UpsertEvals(structs.MsgTypeTestSetup, 9000, []*structs.Evaluation{eval1, eval2}) - assert.Nil(err) + require.NoError(t, err) prefix := eval1.ID[:len(eval1.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Evals} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Evals] - assert.Equal(1, len(j)) - assert.Contains(j, eval1.ID) - assert.NotContains(j, eval2.ID) - - assert.Equal(res.Truncations[structs.Evals], false) - assert.Equal("9000", respW.HeaderMap.Get("X-Nomad-Index")) + require.Len(t, j, 1) + require.Contains(t, j, eval1.ID) + require.NotContains(t, j, eval2.ID) + require.False(t, res.Truncations[structs.Evals]) + require.Equal(t, "9000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Allocations(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Evaluation(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + eval1 := mock.Eval() + eval2 := mock.Eval() + err := state.UpsertEvals(structs.MsgTypeTestSetup, 9000, []*structs.Evaluation{eval1, eval2}) + require.NoError(t, err) + + // fuzzy search does prefix search for evaluations + prefix := eval1.ID[:len(eval1.ID)-2] + data := structs.FuzzySearchRequest{Text: prefix, Context: structs.Evals} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + matches := res.Matches[structs.Evals] + require.Len(t, matches, 1) + require.Equal(t, structs.FuzzyMatch{ + ID: eval1.ID, + }, matches[0]) + require.False(t, res.Truncations[structs.Evals]) + require.Equal(t, "9000", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_PrefixSearch_Allocations(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() alloc := mock.Alloc() err := state.UpsertAllocs(structs.MsgTypeTestSetup, 7000, []*structs.Allocation{alloc}) - assert.Nil(err) + require.NoError(t, err) prefix := alloc.ID[:len(alloc.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Allocs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) a := res.Matches[structs.Allocs] - assert.Equal(1, len(a)) - assert.Contains(a, alloc.ID) + require.Len(t, a, 1) + require.Contains(t, a, alloc.ID) - assert.Equal(res.Truncations[structs.Allocs], false) - assert.Equal("7000", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Allocs]) + require.Equal(t, "7000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Nodes(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Allocations(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + alloc := mock.Alloc() + err := state.UpsertAllocs(structs.MsgTypeTestSetup, 7000, []*structs.Allocation{alloc}) + require.NoError(t, err) + + data := structs.FuzzySearchRequest{Text: "-job", Context: structs.Allocs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + a := res.Matches[structs.Allocs] + require.Len(t, a, 1) + require.Equal(t, "my-job.web[0]", a[0].ID) + + require.False(t, res.Truncations[structs.Allocs]) + require.Equal(t, "7000", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_PrefixSearch_Nodes(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() node := mock.Node() err := state.UpsertNode(structs.MsgTypeTestSetup, 6000, node) - assert.Nil(err) + require.NoError(t, err) prefix := node.ID[:len(node.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Nodes} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) n := res.Matches[structs.Nodes] - assert.Equal(1, len(n)) - assert.Contains(n, node.ID) + require.Len(t, n, 1) + require.Contains(t, n, node.ID) - assert.Equal(res.Truncations[structs.Nodes], false) - assert.Equal("6000", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Nodes]) + require.Equal(t, "6000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Deployments(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Nodes(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + node := mock.Node() // foobar + err := state.UpsertNode(structs.MsgTypeTestSetup, 6000, node) + require.NoError(t, err) + + data := structs.FuzzySearchRequest{Text: "oo", Context: structs.Nodes} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + n := res.Matches[structs.Nodes] + require.Len(t, n, 1) + require.Equal(t, "foobar", n[0].ID) + + require.False(t, res.Truncations[structs.Nodes]) + require.Equal(t, "6000", header(respW, "X-Nomad-Index")) + }) +} +func TestHTTP_PrefixSearch_Deployments(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() deployment := mock.Deployment() - assert.Nil(state.UpsertDeployment(999, deployment), "UpsertDeployment") + require.NoError(t, state.UpsertDeployment(999, deployment), "UpsertDeployment") prefix := deployment.ID[:len(deployment.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Deployments} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) n := res.Matches[structs.Deployments] - assert.Equal(1, len(n)) - assert.Contains(n, deployment.ID) - - assert.Equal("999", respW.HeaderMap.Get("X-Nomad-Index")) + require.Len(t, n, 1) + require.Contains(t, n, deployment.ID) + require.Equal(t, "999", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_NoJob(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Deployments(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + deployment := mock.Deployment() + require.NoError(t, state.UpsertDeployment(999, deployment), "UpsertDeployment") + + // fuzzy search of deployments are prefix searches + prefix := deployment.ID[:len(deployment.ID)-2] + data := structs.FuzzySearchRequest{Text: prefix, Context: structs.Deployments} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + n := res.Matches[structs.Deployments] + require.Len(t, n, 1) + require.Equal(t, deployment.ID, n[0].ID) + require.Equal(t, "999", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_PrefixSearch_NoJob(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { data := structs.SearchRequest{Prefix: "12345", Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) + require.Len(t, res.Matches, 1) + require.Len(t, res.Matches[structs.Jobs], 0) + require.Equal(t, "0", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_NoJob(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + data := structs.FuzzySearchRequest{Text: "12345", Context: structs.Jobs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) - assert.Equal(1, len(res.Matches)) - assert.Equal(0, len(res.Matches[structs.Jobs])) + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) - assert.Equal("0", respW.HeaderMap.Get("X-Nomad-Index")) + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 0) + require.Equal(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_AllContext(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_AllContext(t *testing.T) { + t.Parallel() testJobID := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJobID, s, t) @@ -307,28 +557,59 @@ func TestHTTP_Search_AllContext(t *testing.T) { eval1 := mock.Eval() eval1.ID = testJobID err := state.UpsertEvals(structs.MsgTypeTestSetup, 8000, []*structs.Evaluation{eval1}) - assert.Nil(err) + require.NoError(t, err) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.All} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - matchedJobs := res.Matches[structs.Jobs] matchedEvals := res.Matches[structs.Evals] + require.Len(t, matchedJobs, 1) + require.Len(t, matchedEvals, 1) + require.Equal(t, testJobID, matchedJobs[0]) + require.Equal(t, eval1.ID, matchedEvals[0]) + require.Equal(t, "8000", header(respW, "X-Nomad-Index")) + }) +} - assert.Equal(1, len(matchedJobs)) - assert.Equal(1, len(matchedEvals)) +func TestHTTP_FuzzySearch_AllContext(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + createCmdJobForTest("job1", "/bin/aardvark", s, t) - assert.Equal(matchedJobs[0], testJobID) - assert.Equal(matchedEvals[0], eval1.ID) + state := s.Agent.server.State() + eval1 := mock.Eval() + eval1.ID = "aaaa6573-04cb-61b4-04cb-865aaaf5d400" + err := state.UpsertEvals(structs.MsgTypeTestSetup, 8000, []*structs.Evaluation{eval1}) + require.NoError(t, err) - assert.Equal("8000", respW.HeaderMap.Get("X-Nomad-Index")) + data := structs.FuzzySearchRequest{Text: "aa", Context: structs.All} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + matchedCommands := res.Matches[structs.Commands] + matchedEvals := res.Matches[structs.Evals] + require.Len(t, matchedCommands, 1) + require.Len(t, matchedEvals, 1) + require.Equal(t, eval1.ID, matchedEvals[0].ID) + require.Equal(t, "/bin/aardvark", matchedCommands[0].ID) + require.Equal(t, []string{ + "default", "job1", "web", "web", + }, matchedCommands[0].Scope) + require.Equal(t, "8000", header(respW, "X-Nomad-Index")) }) } diff --git a/nomad/config.go b/nomad/config.go index 0494768279d5..05db46fb383f 100644 --- a/nomad/config.go +++ b/nomad/config.go @@ -354,9 +354,11 @@ type Config struct { // LicenseConfig is a tunable knob for enterprise license testing. LicenseConfig *LicenseConfig + LicenseEnv string + LicensePath string - LicenseEnv string - LicensePath string + // SearchConfig provides knobs for Search API. + SearchConfig *structs.SearchConfig // AgentShutdown is used to call agent.Shutdown from the context of a Server // It is used primarily for licensing diff --git a/nomad/mock/mock.go b/nomad/mock/mock.go index bf80e154e8fb..d9d619f12057 100644 --- a/nomad/mock/mock.go +++ b/nomad/mock/mock.go @@ -1107,8 +1107,10 @@ func JobSummary(jobID string) *structs.JobSummary { } func Alloc() *structs.Allocation { + job := Job() alloc := &structs.Allocation{ ID: uuid.Generate(), + Name: fmt.Sprintf("%s.%s[%d]", job.Name, "web", 0), EvalID: uuid.Generate(), NodeID: "12345678-abcd-efab-cdef-123456789abc", Namespace: structs.DefaultNamespace, @@ -1172,7 +1174,7 @@ func Alloc() *structs.Allocation { DiskMB: 150, }, }, - Job: Job(), + Job: job, DesiredStatus: structs.AllocDesiredStatusRun, ClientStatus: structs.AllocClientStatusPending, } diff --git a/nomad/search_endpoint.go b/nomad/search_endpoint.go index 382d109f80e5..108b787f98d3 100644 --- a/nomad/search_endpoint.go +++ b/nomad/search_endpoint.go @@ -2,6 +2,7 @@ package nomad import ( "fmt" + "sort" "strings" "time" @@ -42,9 +43,9 @@ type Search struct { logger log.Logger } -// getMatches extracts matches for an iterator, and returns a list of ids for +// getPrefixMatches extracts matches for an iterator, and returns a list of ids for // these matches. -func (s *Search) getMatches(iter memdb.ResultIterator, prefix string) ([]string, bool) { +func (s *Search) getPrefixMatches(iter memdb.ResultIterator, prefix string) ([]string, bool) { var matches []string for i := 0; i < truncateLimit; i++ { @@ -93,6 +94,252 @@ func (s *Search) getMatches(iter memdb.ResultIterator, prefix string) ([]string, return matches, iter.Next() != nil } +func (s *Search) getFuzzyMatches(iter memdb.ResultIterator, text string) (map[structs.Context][]structs.FuzzyMatch, map[structs.Context]bool) { + limitQuery := s.srv.config.SearchConfig.LimitQuery + limitResults := s.srv.config.SearchConfig.LimitResults + + unsorted := make(map[structs.Context][]fuzzyMatch) + truncations := make(map[structs.Context]bool) + + accumulateSet := func(limited bool, set map[structs.Context][]fuzzyMatch) { + for ctx, matches := range set { + for _, match := range matches { + if len(unsorted[ctx]) < limitResults { + unsorted[ctx] = append(unsorted[ctx], match) + } else { + // truncated by results limit + truncations[ctx] = true + return + } + if limited { + // truncated by query limit + truncations[ctx] = true + return + } + } + } + } + + accumulateSingle := func(limited bool, ctx structs.Context, match *fuzzyMatch) { + if match != nil { + if len(unsorted[ctx]) < limitResults { + unsorted[ctx] = append(unsorted[ctx], *match) + } else { + // truncated by results limit + truncations[ctx] = true + return + } + if limited { + // truncated by query limit + truncations[ctx] = true + return + } + } + } + + limited := func(i int, iter memdb.ResultIterator) bool { + if i == limitQuery-1 { + return iter.Next() != nil + } + return false + } + + for i := 0; i < limitQuery; i++ { + raw := iter.Next() + if raw == nil { + break + } + + switch t := raw.(type) { + case *structs.Job: + set := s.fuzzyMatchesJob(t, text) + accumulateSet(limited(i, iter), set) + default: + ctx, match := s.fuzzyMatchSingle(raw, text) + accumulateSingle(limited(i, iter), ctx, match) + } + } + + // sort the set of match results + for ctx := range unsorted { + sortSet(text, unsorted[ctx]) + } + + // create the result out of exported types + m := make(map[structs.Context][]structs.FuzzyMatch, len(unsorted)) + for ctx, matches := range unsorted { + m[ctx] = make([]structs.FuzzyMatch, 0, len(matches)) + for _, match := range matches { + m[ctx] = append(m[ctx], structs.FuzzyMatch{ + ID: match.id, + Scope: match.scope, + }) + } + } + + return m, truncations +} + +// fuzzySingleMatch determines if the ID of raw is a fuzzy match with text. +// Returns the context and score or nil if there is no match. +func (s *Search) fuzzyMatchSingle(raw interface{}, text string) (structs.Context, *fuzzyMatch) { + var ( + name string + ctx structs.Context + ) + + switch t := raw.(type) { + case *structs.Node: + name = t.Name + ctx = structs.Nodes + case *structs.Namespace: + name = t.Name + ctx = structs.Namespaces + case *structs.Allocation: + name = t.Name + ctx = structs.Allocs + case *structs.CSIPlugin: + name = t.ID + ctx = structs.Plugins + } + + if idx := strings.Index(name, text); idx >= 0 { + return ctx, &fuzzyMatch{ + id: name, + score: idx, + scope: nil, // currently no non-job sub-types need scoping + } + } + + return "", nil +} + +// getFuzzyMatchesJob digs through j and extracts matches against several types +// of matchable Context. Results are categorized by Context and paired with their +// score, but are unsorted. +// +// job.name +// job|group.name +// job|group|service.name +// job|group|task.name +// job|group|task|service.name +// job|group|task|driver.{image,command,class} +func (*Search) fuzzyMatchesJob(j *structs.Job, text string) map[structs.Context][]fuzzyMatch { + sm := make(map[structs.Context][]fuzzyMatch) + ns := j.Namespace + job := j.Name + + // job.name + if idx := strings.Index(j.Name, text); idx >= 0 { + sm[structs.Jobs] = append(sm[structs.Jobs], score(job, ns, idx)) + } + + // job|group.name + for _, group := range j.TaskGroups { + if idx := strings.Index(group.Name, text); idx >= 0 { + sm[structs.Groups] = append(sm[structs.Groups], score(group.Name, ns, idx, job)) + } + + // job|group|service.name + for _, service := range group.Services { + if idx := strings.Index(service.Name, text); idx >= 0 { + sm[structs.Services] = append(sm[structs.Services], score(service.Name, ns, idx, job, group.Name)) + } + } + + // job|group|task.name + for _, task := range group.Tasks { + if idx := strings.Index(task.Name, text); idx >= 0 { + sm[structs.Tasks] = append(sm[structs.Tasks], score(task.Name, ns, idx, job, group.Name)) + } + + // job|group|task|service.name + for _, service := range task.Services { + if idx := strings.Index(service.Name, text); idx >= 0 { + sm[structs.Services] = append(sm[structs.Services], score(service.Name, ns, idx, job, group.Name, task.Name)) + } + } + + // job|group|task|config.{image,driver,class} + switch task.Driver { + case "docker": + image := getConfigParam(task.Config, "image") + if idx := strings.Index(image, text); idx >= 0 { + sm[structs.Images] = append(sm[structs.Images], score(image, ns, idx, job, group.Name, task.Name)) + } + case "exec", "raw_exec": + command := getConfigParam(task.Config, "command") + if idx := strings.Index(command, text); idx >= 0 { + sm[structs.Commands] = append(sm[structs.Commands], score(command, ns, idx, job, group.Name, task.Name)) + } + case "java": + class := getConfigParam(task.Config, "class") + if idx := strings.Index(class, text); idx >= 0 { + sm[structs.Classes] = append(sm[structs.Classes], score(class, ns, idx, job, group.Name, task.Name)) + } + } + } + } + + return sm +} + +func getConfigParam(config map[string]interface{}, param string) string { + if config == nil || config[param] == nil { + return "" + } + + s, ok := config[param].(string) + if !ok { + return "" + } + + return s +} + +type fuzzyMatch struct { + id string + scope []string + score int +} + +func score(id, namespace string, score int, scope ...string) fuzzyMatch { + return fuzzyMatch{ + id: id, + score: score, + scope: append([]string{namespace}, scope...), + } +} + +func sortSet(text string, matches []fuzzyMatch) { + sort.Slice(matches, func(a, b int) bool { + A, B := matches[a], matches[b] + + // sort by index + switch { + case A.score < B.score: + return true + case B.score < A.score: + return false + } + + // shorter length matched text is more likely to be the thing being + // searched for (in theory) + // + // this also causes exact matches to score best, which is desirable + idA, idB := A.id, B.id + switch { + case len(idA) < len(idB): + return true + case len(idB) < len(idA): + return false + } + + // same index and same length, break ties alphabetically + return idA < idB + }) +} + // getResourceIter takes a context and returns a memdb iterator specific to // that context func getResourceIter(context structs.Context, aclObj *acl.ACL, namespace, prefix string, ws memdb.WatchSet, state *state.StateStore) (memdb.ResultIterator, error) { @@ -152,6 +399,30 @@ func roundUUIDDownIfOdd(prefix string, context structs.Context) string { return prefix[:len(prefix)-1] } +// silenceError determines whether err is an error we care about when getting an +// iterator from the state store - we ignore errors about invalid UUIDs, since +// we sometimes try to lookup by Name and not UUID. +func (*Search) silenceError(err error) bool { + if err == nil { + return true + } + + e := err.Error() + switch { + // Searching other contexts with job names raises an error, which in + // this case we want to ignore. + case strings.Contains(e, "Invalid UUID: encoding/hex"): + case strings.Contains(e, "UUID have 36 characters"): + case strings.Contains(e, "must be even length"): + case strings.Contains(e, "UUID should have maximum of 4"): + default: + // err was not nil and not about UUID prefix, something bad happened + return false + } + + return true +} + // PrefixSearch is used to list matches for a given prefix, and returns // matching jobs, evaluations, allocations, and/or nodes. func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.SearchResponse) error { @@ -168,7 +439,7 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search namespace := args.RequestNamespace() // Require either node:read or namespace:read-job - if !anySearchPerms(aclObj, namespace, args.Context) { + if !sufficientSearchPerms(aclObj, namespace, args.Context) { return structs.ErrPermissionDenied } @@ -182,21 +453,12 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search run: func(ws memdb.WatchSet, state *state.StateStore) error { iters := make(map[structs.Context]memdb.ResultIterator) - - contexts := searchContexts(aclObj, namespace, args.Context) + contexts := expandSearchContexts(aclObj, namespace, args.Context) for _, ctx := range contexts { iter, err := getResourceIter(ctx, aclObj, namespace, roundUUIDDownIfOdd(args.Prefix, args.Context), ws, state) if err != nil { - e := err.Error() - switch { - // Searching other contexts with job names raises an error, which in - // this case we want to ignore. - case strings.Contains(e, "Invalid UUID: encoding/hex"): - case strings.Contains(e, "UUID have 36 characters"): - case strings.Contains(e, "must be even length"): - case strings.Contains(e, "UUID should have maximum of 4"): - default: + if !s.silenceError(err) { return err } } else { @@ -206,7 +468,7 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search // Return matches for the given prefix for k, v := range iters { - res, isTrunc := s.getMatches(v, args.Prefix) + res, isTrunc := s.getPrefixMatches(v, args.Prefix) reply.Matches[k] = res reply.Truncations[k] = isTrunc } @@ -229,3 +491,140 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search }} return s.srv.blockingRPC(&opts) } + +// FuzzySearch is used to list fuzzy or prefix matches for a given text argument and Context. +// If the Context is "all", all searchable contexts are searched. If ACLs are enabled, +// results are limited to policies of the provided ACL token. +// +// These types are limited to prefix UUID searching: +// Evals, Deployments, ScalingPolicies, Volumes +// +// These types are available for fuzzy searching: +// Nodes, Namespaces, Jobs, Allocs, Plugins +// +// Jobs are a special case that expand into multiple types, and whose return +// values include Scope which is a descending list of IDs of parent objects, +// starting with the Namespace. The subtypes of jobs are fuzzy searchable. +// +// The Jobs type expands into these sub types: +// Jobs, Groups, Services, Tasks, Images, Commands, Classes +// +// The results are in descending order starting with strongest match, per Context type. +func (s *Search) FuzzySearch(args *structs.FuzzySearchRequest, reply *structs.FuzzySearchResponse) error { + if done, err := s.srv.forward("Search.FuzzySearch", args, args, reply); done { + return err + } + defer metrics.MeasureSince([]string{"nomad", "search", "fuzzy_search"}, time.Now()) + + // check that fuzzy search API is enabled + if !s.srv.config.SearchConfig.FuzzyEnabled { + return fmt.Errorf("fuzzy search is not enabled") + } + + // check the query term meets minimum length + min := s.srv.config.SearchConfig.MinTermLength + if n := len(args.Text); n < min { + return fmt.Errorf("fuzzy search query must be at least %d characters, got %d", min, n) + } + + aclObj, err := s.srv.ResolveToken(args.AuthToken) + if err != nil { + return err + } + + namespace := args.RequestNamespace() + context := args.Context + + if !sufficientSearchPerms(aclObj, namespace, context) { + return structs.ErrPermissionDenied + } + + reply.Matches = make(map[structs.Context][]structs.FuzzyMatch) + reply.Truncations = make(map[structs.Context]bool) + + // Setup the blocking query + opts := blockingOptions{ + queryMeta: &reply.QueryMeta, + queryOpts: new(structs.QueryOptions), + run: func(ws memdb.WatchSet, state *state.StateStore) error { + + fuzzyIters := make(map[structs.Context]memdb.ResultIterator) + prefixIters := make(map[structs.Context]memdb.ResultIterator) + contexts := expandSearchContexts(aclObj, namespace, context) + + for _, ctx := range contexts { + switch ctx { + // types that use UUID prefix searching + case structs.Evals, structs.Deployments, structs.ScalingPolicies, structs.Volumes: + iter, err := getResourceIter(ctx, aclObj, namespace, roundUUIDDownIfOdd(args.Prefix, args.Context), ws, state) + if err != nil { + if !s.silenceError(err) { + return err + } + } else { + prefixIters[ctx] = iter + } + + // types that use fuzzy searching + default: + iter, err := getResourceIter(ctx, aclObj, namespace, "", ws, state) + if err != nil { + return err + } + fuzzyIters[ctx] = iter + } + } + + // Set prefix matches of the given text + for ctx, iter := range prefixIters { + res, isTrunc := s.getPrefixMatches(iter, args.Text) + matches := make([]structs.FuzzyMatch, 0, len(res)) + for _, result := range res { + matches = append(matches, structs.FuzzyMatch{ID: result}) + } + reply.Matches[ctx] = matches + reply.Truncations[ctx] = isTrunc + } + + // Set fuzzy matches of the given text + for _, iter := range fuzzyIters { + matches, truncations := s.getFuzzyMatches(iter, args.Text) + for ctx := range matches { + reply.Matches[ctx] = matches[ctx] + } + for ctx := range truncations { + reply.Truncations[ctx] = truncations[ctx] + } + } + + // Set the index for the context. If the context has been specified, + // it will be used as the index of the response. Otherwise, the maximum + // index from all the resources will be used. + for _, ctx := range contexts { + index, err := state.Index(contextToIndex(ctx)) + if err != nil { + return err + } + if index > reply.Index { + reply.Index = index + } + } + + s.srv.setQueryMeta(&reply.QueryMeta) + return nil + }, + } + + return s.srv.blockingRPC(&opts) +} + +func expandContext(context structs.Context) []structs.Context { + switch context { + case structs.All: + c := make([]structs.Context, len(allContexts)) + copy(c, allContexts) + return c + default: + return []structs.Context{context} + } +} diff --git a/nomad/search_endpoint_oss.go b/nomad/search_endpoint_oss.go index 7c1b3b7a856f..0a229574f096 100644 --- a/nomad/search_endpoint_oss.go +++ b/nomad/search_endpoint_oss.go @@ -35,9 +35,11 @@ func getEnterpriseResourceIter(context structs.Context, _ *acl.ACL, namespace, p return nil, fmt.Errorf("context must be one of %v or 'all' for all contexts; got %q", allContexts, context) } -// anySearchPerms returns true if the provided ACL has access to any -// capabilities required for prefix searching. Returns true if aclObj is nil. -func anySearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) bool { +// sufficientSearchPerms returns true if the provided ACL has access to each +// capability required for prefix searching for the given context. +// +// Returns true if aclObj is nil. +func sufficientSearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) bool { if aclObj == nil { return true } @@ -78,22 +80,16 @@ func anySearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) return true } -// searchContexts returns the contexts the aclObj is valid for. If aclObj is -// nil all contexts are returned. -func searchContexts(aclObj *acl.ACL, namespace string, context structs.Context) []structs.Context { - var all []structs.Context - - switch context { - case structs.All: - all = make([]structs.Context, len(allContexts)) - copy(all, allContexts) - default: - all = []structs.Context{context} - } +// expandSearchContexts returns the expanded set of contexts of context, filtered down +// to the subset of contexts the aclObj is valid for. +// +// If aclObj is nil, no contexts are filtered out. +func expandSearchContexts(aclObj *acl.ACL, namespace string, context structs.Context) []structs.Context { + desired := expandContext(context) // If ACLs aren't enabled return all contexts if aclObj == nil { - return all + return desired } jobRead := aclObj.AllowNsOp(namespace, acl.NamespaceCapabilityReadJob) @@ -105,8 +101,8 @@ func searchContexts(aclObj *acl.ACL, namespace string, context structs.Context) policyRead := aclObj.AllowNsOp(namespace, acl.NamespaceCapabilityListScalingPolicies) // Filter contexts down to those the ACL grants access to - available := make([]structs.Context, 0, len(all)) - for _, c := range all { + available := make([]structs.Context, 0, len(desired)) + for _, c := range desired { switch c { case structs.Allocs, structs.Jobs, structs.Evals, structs.Deployments: if jobRead { diff --git a/nomad/search_endpoint_test.go b/nomad/search_endpoint_test.go index 94ecfe0a08eb..56a4ae46c89d 100644 --- a/nomad/search_endpoint_test.go +++ b/nomad/search_endpoint_test.go @@ -1,6 +1,7 @@ package nomad import ( + "fmt" "strconv" "strings" "testing" @@ -12,26 +13,26 @@ import ( "github.com/hashicorp/nomad/nomad/state" "github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/testutil" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) const jobIndex = 1000 -func registerAndVerifyJob(s *Server, t *testing.T, prefix string, counter int) *structs.Job { +func registerMockJob(s *Server, t *testing.T, prefix string, counter int) *structs.Job { job := mock.Job() job.ID = prefix + strconv.Itoa(counter) - state := s.fsm.State() - if err := state.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job); err != nil { - t.Fatalf("err: %v", err) - } - + registerJob(s, t, job) return job } +func registerJob(s *Server, t *testing.T, job *structs.Job) { + fsmState := s.fsm.State() + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) +} + func TestSearch_PrefixSearch_Job(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -41,7 +42,7 @@ func TestSearch_PrefixSearch_Job(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) req := &structs.SearchRequest{ Prefix: prefix, @@ -57,14 +58,14 @@ func TestSearch_PrefixSearch_Job(t *testing.T) { t.Fatalf("err: %v", err) } - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_ACL(t *testing.T) { t.Parallel() - assert := assert.New(t) + jobID := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, root, cleanupS := TestACLServer(t, func(c *Config) { @@ -73,10 +74,10 @@ func TestSearch_PrefixSearch_ACL(t *testing.T) { defer cleanupS() codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() - job := registerAndVerifyJob(s, t, jobID, 0) - assert.Nil(state.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + job := registerMockJob(s, t, jobID, 0) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) req := &structs.SearchRequest{ Prefix: "", @@ -91,92 +92,89 @@ func TestSearch_PrefixSearch_ACL(t *testing.T) { { var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with an invalid token and expect failure { - invalidToken := mock.CreatePolicyAndToken(t, state, 1003, "test-invalid", + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) req.AuthToken = invalidToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect failure due to Jobs being the context { - validToken := mock.CreatePolicyAndToken(t, state, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) req.AuthToken = validToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect success due to All context { - validToken := mock.CreatePolicyAndToken(t, state, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.All req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) // Jobs filtered out since token only has access to node:read - assert.Len(resp.Matches[structs.Jobs], 0) + require.Len(t, resp.Matches[structs.Jobs], 0) } // Try with a valid token for namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1009, "test-valid2", + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob})) req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) // Index of job - not node - because node context is filtered out - assert.Equal(uint64(1000), resp.Index) + require.Equal(t, uint64(1000), resp.Index) // Nodes filtered out since token only has access to namespace:read-job - assert.Len(resp.Matches[structs.Nodes], 0) + require.Len(t, resp.Matches[structs.Nodes], 0) } // Try with a valid token for node:read and namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1011, "test-valid3", strings.Join([]string{ + validToken := mock.CreatePolicyAndToken(t, fsmState, 1011, "test-valid3", strings.Join([]string{ mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob}), mock.NodePolicy(acl.PolicyRead), }, "\n")) req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Equal(uint64(1001), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, uint64(1001), resp.Index) } // Try with a management token { req.AuthToken = root.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) } } func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "example-test-------" // Assert that a job with more than 4 hyphens works s, cleanupS := TestServer(t, func(c *Config) { @@ -187,19 +185,15 @@ func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { testutil.WaitForLeader(t, s.RPC) // Register a job and an allocation - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) alloc := mock.Alloc() alloc.JobID = job.ID alloc.Namespace = job.Namespace summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) req := &structs.SearchRequest{ Context: structs.All, @@ -213,16 +207,16 @@ func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { for i := 1; i < len(prefix); i++ { req.Prefix = prefix[:i] var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.EqualValues(jobIndex, resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, 1, len(resp.Matches[structs.Jobs])) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.EqualValues(t, jobIndex, resp.Index) } } func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := strings.Repeat("a", 100) s, cleanupS := TestServer(t, func(c *Config) { @@ -233,18 +227,14 @@ func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { testutil.WaitForLeader(t, s.RPC) // Register a job and an allocation - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) alloc := mock.Alloc() alloc.JobID = job.ID summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) req := &structs.SearchRequest{ Prefix: prefix, @@ -256,19 +246,17 @@ func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.EqualValues(jobIndex, resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.EqualValues(t, jobIndex, resp.Index) } // truncate should limit results to 20 func TestSearch_PrefixSearch_Truncate(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -278,9 +266,8 @@ func TestSearch_PrefixSearch_Truncate(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - var job *structs.Job for counter := 0; counter < 25; counter++ { - job = registerAndVerifyJob(s, t, prefix, counter) + registerMockJob(s, t, prefix, counter) } req := &structs.SearchRequest{ @@ -288,23 +275,21 @@ func TestSearch_PrefixSearch_Truncate(t *testing.T) { Context: structs.Jobs, QueryOptions: structs.QueryOptions{ Region: "global", - Namespace: job.Namespace, + Namespace: "default", }, } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(20, len(resp.Matches[structs.Jobs])) - assert.Equal(resp.Truncations[structs.Jobs], true) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 20) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -315,11 +300,10 @@ func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) - + job := registerMockJob(s, t, prefix, 0) eval1 := mock.Eval() eval1.ID = job.ID - s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1}) + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) req := &structs.SearchRequest{ Prefix: prefix, @@ -331,20 +315,16 @@ func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - - assert.Equal(1, len(resp.Matches[structs.Evals])) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) } func TestSearch_PrefixSearch_Evals(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -354,7 +334,7 @@ func TestSearch_PrefixSearch_Evals(t *testing.T) { testutil.WaitForLeader(t, s.RPC) eval1 := mock.Eval() - s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1}) + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) prefix := eval1.ID[:len(eval1.ID)-2] @@ -368,20 +348,16 @@ func TestSearch_PrefixSearch_Evals(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Evals])) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) - assert.Equal(resp.Truncations[structs.Evals], false) - - assert.Equal(uint64(2000), resp.Index) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) + require.False(t, resp.Truncations[structs.Evals]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_Allocation(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -392,14 +368,10 @@ func TestSearch_PrefixSearch_Allocation(t *testing.T) { alloc := mock.Alloc() summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc})) prefix := alloc.ID[:len(alloc.ID)-2] @@ -413,20 +385,16 @@ func TestSearch_PrefixSearch_Allocation(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Allocs])) - assert.Equal(alloc.ID, resp.Matches[structs.Allocs][0]) - assert.Equal(resp.Truncations[structs.Allocs], false) - - assert.Equal(uint64(90), resp.Index) + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.ID, resp.Matches[structs.Allocs][0]) + require.False(t, resp.Truncations[structs.Allocs]) + require.Equal(t, uint64(90), resp.Index) } func TestSearch_PrefixSearch_All_UUID(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -437,25 +405,17 @@ func TestSearch_PrefixSearch_All_UUID(t *testing.T) { alloc := mock.Alloc() summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 1001, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, node)) eval1 := mock.Eval() eval1.ID = node.ID - if err := state.UpsertEvals(structs.MsgTypeTestSetup, 1002, []*structs.Evaluation{eval1}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertEvals(structs.MsgTypeTestSetup, 1002, []*structs.Evaluation{eval1})) req := &structs.SearchRequest{ Context: structs.All, @@ -468,17 +428,16 @@ func TestSearch_PrefixSearch_All_UUID(t *testing.T) { for i := 1; i < len(alloc.ID); i++ { req.Prefix = alloc.ID[:i] var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Allocs])) - assert.Equal(alloc.ID, resp.Matches[structs.Allocs][0]) - assert.Equal(resp.Truncations[structs.Allocs], false) - assert.EqualValues(1002, resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.ID, resp.Matches[structs.Allocs][0]) + require.False(t, resp.Truncations[structs.Allocs]) + require.EqualValues(t, 1002, resp.Index) } } func TestSearch_PrefixSearch_Node(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -487,12 +446,10 @@ func TestSearch_PrefixSearch_Node(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 100, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) prefix := node.ID[:len(node.ID)-2] @@ -510,16 +467,14 @@ func TestSearch_PrefixSearch_Node(t *testing.T) { t.Fatalf("err: %v", err) } - assert.Equal(1, len(resp.Matches[structs.Nodes])) - assert.Equal(node.ID, resp.Matches[structs.Nodes][0]) - assert.Equal(false, resp.Truncations[structs.Nodes]) - - assert.Equal(uint64(100), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, node.ID, resp.Matches[structs.Nodes][0]) + require.False(t, resp.Truncations[structs.Nodes]) + require.Equal(t, uint64(100), resp.Index) } func TestSearch_PrefixSearch_Deployment(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -529,7 +484,7 @@ func TestSearch_PrefixSearch_Deployment(t *testing.T) { testutil.WaitForLeader(t, s.RPC) deployment := mock.Deployment() - s.fsm.State().UpsertDeployment(2000, deployment) + require.NoError(t, s.fsm.State().UpsertDeployment(2000, deployment)) prefix := deployment.ID[:len(deployment.ID)-2] @@ -543,20 +498,15 @@ func TestSearch_PrefixSearch_Deployment(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Deployments])) - assert.Equal(deployment.ID, resp.Matches[structs.Deployments][0]) - assert.Equal(resp.Truncations[structs.Deployments], false) - - assert.Equal(uint64(2000), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Deployments], 1) + require.Equal(t, deployment.ID, resp.Matches[structs.Deployments][0]) + require.False(t, resp.Truncations[structs.Deployments]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_AllContext(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -566,18 +516,14 @@ func TestSearch_PrefixSearch_AllContext(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 100, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) eval1 := mock.Eval() eval1.ID = node.ID - if err := state.UpsertEvals(structs.MsgTypeTestSetup, 1000, []*structs.Evaluation{eval1}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertEvals(structs.MsgTypeTestSetup, 1000, []*structs.Evaluation{eval1})) prefix := node.ID[:len(node.ID)-2] @@ -591,23 +537,19 @@ func TestSearch_PrefixSearch_AllContext(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Nodes])) - assert.Equal(1, len(resp.Matches[structs.Evals])) - - assert.Equal(node.ID, resp.Matches[structs.Nodes][0]) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1000), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, node.ID, resp.Matches[structs.Nodes][0]) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) + require.Equal(t, uint64(1000), resp.Index) } // Tests that the top 20 matches are returned when no prefix is set func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -617,7 +559,7 @@ func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) req := &structs.SearchRequest{ Prefix: "", @@ -629,20 +571,17 @@ func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } // Tests that the zero matches are returned when a prefix has no matching // results func TestSearch_PrefixSearch_NoMatches(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -662,19 +601,16 @@ func TestSearch_PrefixSearch_NoMatches(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(0, len(resp.Matches[structs.Jobs])) - assert.Equal(uint64(0), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Empty(t, resp.Matches[structs.Jobs]) + require.Equal(t, uint64(0), resp.Index) } // Prefixes can only be looked up if their length is a power of two. For // prefixes which are an odd length, use the length-1 characters. func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { t.Parallel() - assert := assert.New(t) + id1 := "aaafaaaa-e8f7-fd38-c855-ab94ceb89" id2 := "aaafeaaa-e8f7-fd38-c855-ab94ceb89" prefix := "aaafa" @@ -686,8 +622,8 @@ func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, id1, 0) - registerAndVerifyJob(s, t, id2, 50) + job := registerMockJob(s, t, id1, 0) + registerMockJob(s, t, id2, 50) req := &structs.SearchRequest{ Prefix: prefix, @@ -699,17 +635,14 @@ func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) } func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { t.Parallel() - assert := assert.New(t) + jobName := "exampleexample" s1, cleanupS1 := TestServer(t, func(c *Config) { @@ -727,7 +660,7 @@ func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { TestJoin(t, s1, s2) testutil.WaitForLeader(t, s1.RPC) - job := registerAndVerifyJob(s1, t, jobName, 0) + job := registerMockJob(s1, t, jobName, 0) req := &structs.SearchRequest{ Prefix: "", @@ -741,18 +674,15 @@ func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { codec := rpcClient(t, s2) var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_CSIPlugin(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -775,18 +705,15 @@ func TestSearch_PrefixSearch_CSIPlugin(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Plugins])) - assert.Equal(id, resp.Matches[structs.Plugins][0]) - assert.Equal(resp.Truncations[structs.Plugins], false) + require.Len(t, resp.Matches[structs.Plugins], 1) + require.Equal(t, id, resp.Matches[structs.Plugins][0]) + require.False(t, resp.Truncations[structs.Plugins]) } func TestSearch_PrefixSearch_CSIVolume(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -815,18 +742,16 @@ func TestSearch_PrefixSearch_CSIVolume(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Volumes])) - assert.Equal(id, resp.Matches[structs.Volumes][0]) - assert.Equal(resp.Truncations[structs.Volumes], false) + require.Len(t, resp.Matches[structs.Volumes], 1) + require.Equal(t, id, resp.Matches[structs.Volumes][0]) + require.False(t, resp.Truncations[structs.Volumes]) } func TestSearch_PrefixSearch_Namespace(t *testing.T) { - assert := assert.New(t) t.Parallel() + s, cleanup := TestServer(t, func(c *Config) { c.NumSchedulers = 0 }) @@ -836,7 +761,7 @@ func TestSearch_PrefixSearch_Namespace(t *testing.T) { testutil.WaitForLeader(t, s.RPC) ns := mock.Namespace() - assert.Nil(s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) + require.NoError(t, s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) prefix := ns.Name[:len(ns.Name)-2] @@ -849,20 +774,17 @@ func TestSearch_PrefixSearch_Namespace(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Namespaces])) - assert.Equal(ns.Name, resp.Matches[structs.Namespaces][0]) - assert.Equal(resp.Truncations[structs.Namespaces], false) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(2000), resp.Index) + require.Len(t, resp.Matches[structs.Namespaces], 1) + require.Equal(t, ns.Name, resp.Matches[structs.Namespaces][0]) + require.False(t, resp.Truncations[structs.Namespaces]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { t.Parallel() - assert := assert.New(t) + s, root, cleanup := TestACLServer(t, func(c *Config) { c.NumSchedulers = 0 }) @@ -870,19 +792,19 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() ns := mock.Namespace() - assert.Nil(state.UpsertNamespaces(500, []*structs.Namespace{ns})) + require.NoError(t, fsmState.UpsertNamespaces(500, []*structs.Namespace{ns})) job1 := mock.Job() - assert.Nil(state.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) job2 := mock.Job() job2.Namespace = ns.Name - assert.Nil(state.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) - assert.Nil(state.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) req := &structs.SearchRequest{ Prefix: "", @@ -897,69 +819,66 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { { var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with an invalid token and expect failure { - invalidToken := mock.CreatePolicyAndToken(t, state, 1003, "test-invalid", + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) req.AuthToken = invalidToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect failure due to Namespaces being the context { - validToken := mock.CreatePolicyAndToken(t, state, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.Namespaces req.AuthToken = validToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect success due to All context { - validToken := mock.CreatePolicyAndToken(t, state, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.All req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) // Jobs filtered out since token only has access to node:read - assert.Len(resp.Matches[structs.Jobs], 0) + require.Len(t, resp.Matches[structs.Jobs], 0) } // Try with a valid token for non-default namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1009, "test-valid2", + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob})) req.Context = structs.All req.AuthToken = validToken.SecretID req.Namespace = job2.Namespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job2.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Namespaces], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job2.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Namespaces], 1) // Index of job - not node - because node context is filtered out - assert.Equal(uint64(504), resp.Index) + require.Equal(t, uint64(504), resp.Index) // Nodes filtered out since token only has access to namespace:read-job - assert.Len(resp.Matches[structs.Nodes], 0) + require.Len(t, resp.Matches[structs.Nodes], 0) } // Try with a valid token for node:read and default namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1011, "test-valid3", strings.Join([]string{ + validToken := mock.CreatePolicyAndToken(t, fsmState, 1011, "test-valid3", strings.Join([]string{ mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob}), mock.NodePolicy(acl.PolicyRead), }, "\n")) @@ -967,12 +886,12 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { req.AuthToken = validToken.SecretID req.Namespace = structs.DefaultNamespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job1.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Namespaces], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Namespaces], 1) } // Try with a management token @@ -981,18 +900,17 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { req.AuthToken = root.SecretID req.Namespace = structs.DefaultNamespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job1.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Len(resp.Matches[structs.Namespaces], 2) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Namespaces], 2) } } func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { t.Parallel() - require := require.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -1003,9 +921,9 @@ func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { job, policy := mock.JobWithScalingPolicy() prefix := policy.ID - state := s.fsm.State() + fsmState := s.fsm.State() - require.NoError(state.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) req := &structs.SearchRequest{ Prefix: prefix, @@ -1017,14 +935,757 @@ func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { } var resp structs.SearchResponse - require.NoError(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - require.Equal(1, len(resp.Matches[structs.ScalingPolicies])) - require.Equal(policy.ID, resp.Matches[structs.ScalingPolicies][0]) - require.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0]) + require.Equal(t, uint64(jobIndex), resp.Index) + + req.Context = structs.All + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_ACL(t *testing.T) { + t.Parallel() + + s, root, cleanupS := TestACLServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.MinTermLength = 1 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "set-this-in-test", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + // Try without a token and expect failure + { + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with an invalid token and expect failure + { + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) + req.AuthToken = invalidToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect failure due to Jobs being the context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect success due to All context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + req.Context = structs.All + req.AuthToken = validToken.SecretID + req.Text = "oo" // mock node ID is foobar + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + + // Jobs filtered out since token only has access to node:read + require.Len(t, resp.Matches[structs.Jobs], 0) + } + + // Try with a valid token for namespace:read-job + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob})) + req.AuthToken = validToken.SecretID + req.Text = "jo" // mock job Name is my-job + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: "my-job", + Scope: []string{"default"}, + }, resp.Matches[structs.Jobs][0]) + + // Index of job - not node - because node context is filtered out + require.Equal(t, uint64(1000), resp.Index) + + // Nodes filtered out since token only has access to namespace:read-job + require.Len(t, resp.Matches[structs.Nodes], 0) + } + + // Try with a management token + { + req.AuthToken = root.SecretID + var resp structs.FuzzySearchResponse + req.Text = "o" // matches Job:my-job and Node:foobar + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: "my-job", Scope: []string{"default"}, + }, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: "foobar", + }, resp.Matches[structs.Nodes][0]) + } +} + +func TestSearch_FuzzySearch_NotEnabled(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.FuzzyEnabled = false + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "foo", // min set to 5 + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + var resp structs.FuzzySearchResponse + require.EqualError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp), + "fuzzy search is not enabled") +} + +func TestSearch_FuzzySearch_ShortText(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.MinTermLength = 5 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "foo", // min set to 5 + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + var resp structs.FuzzySearchResponse + require.EqualError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp), + "fuzzy search query must be at least 5 characters, got 3") +} + +func TestSearch_FuzzySearch_TruncateLimitQuery(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "job", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: "default"}, + } + + for i := 0; i < 25; i++ { + job := mock.Job() + job.Name = fmt.Sprintf("my-job-%d", i) + registerJob(s, t, job) + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Jobs], 20) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_TruncateLimitResults(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.LimitQuery = 10000 + c.SearchConfig.LimitResults = 5 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "job", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: "default"}, + } + + for i := 0; i < 25; i++ { + job := mock.Job() + job.Name = fmt.Sprintf("my-job-%d", i) + registerJob(s, t, job) + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Jobs], 5) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_Evals(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + eval1 := mock.Eval() + eval1.ID = "f7dee5a1-d2b0-2f6a-2e75-6c8e467a4b99" + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) + + req := &structs.FuzzySearchRequest{ + Text: "f7dee", // evals are prefix searched + Context: structs.Evals, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: eval1.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0].ID) + require.False(t, resp.Truncations[structs.Evals]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_Allocation(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + alloc := mock.Alloc() + summary := mock.JobSummary(alloc.JobID) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc})) + + req := &structs.FuzzySearchRequest{ + Text: "web", + Context: structs.Allocs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: alloc.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.Name, resp.Matches[structs.Allocs][0].ID) + require.False(t, resp.Truncations[structs.Allocs]) + require.Equal(t, uint64(90), resp.Index) +} + +func TestSearch_FuzzySearch_Node(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + fsmState := s.fsm.State() + node := mock.Node() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) + + req := &structs.FuzzySearchRequest{ + Text: "oo", + Context: structs.Nodes, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: structs.DefaultNamespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, node.Name, resp.Matches[structs.Nodes][0].ID) + require.False(t, resp.Truncations[structs.Nodes]) + require.Equal(t, uint64(100), resp.Index) +} + +func TestSearch_FuzzySearch_Deployment(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + deployment := mock.Deployment() + require.NoError(t, s.fsm.State().UpsertDeployment(2000, deployment)) + + req := &structs.FuzzySearchRequest{ + Text: deployment.ID[0:3], // deployments are prefix searched + Context: structs.Deployments, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: deployment.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Deployments], 1) + require.Equal(t, deployment.ID, resp.Matches[structs.Deployments][0].ID) + require.False(t, resp.Truncations[structs.Deployments]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_CSIPlugin(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + state.CreateTestCSIPlugin(s.fsm.State(), "my-plugin") + + req := &structs.FuzzySearchRequest{ + Text: "lug", + Context: structs.Plugins, + QueryOptions: structs.QueryOptions{ + Region: "global", + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Plugins], 1) + require.Equal(t, "my-plugin", resp.Matches[structs.Plugins][0].ID) + require.False(t, resp.Truncations[structs.Plugins]) +} + +func TestSearch_FuzzySearch_CSIVolume(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + id := uuid.Generate() + err := s.fsm.State().CSIVolumeRegister(1000, []*structs.CSIVolume{{ + ID: id, + Namespace: structs.DefaultNamespace, + PluginID: "glade", + }}) + require.NoError(t, err) + + req := &structs.FuzzySearchRequest{ + Text: id[0:3], // volumes are prefix searched + Context: structs.Volumes, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: structs.DefaultNamespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Volumes], 1) + require.Equal(t, id, resp.Matches[structs.Volumes][0].ID) + require.False(t, resp.Truncations[structs.Volumes]) +} + +func TestSearch_FuzzySearch_Namespace(t *testing.T) { + t.Parallel() + + s, cleanup := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + + defer cleanup() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + ns := mock.Namespace() + require.NoError(t, s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) + + req := &structs.FuzzySearchRequest{ + Text: "am", // mock is team- + Context: structs.Namespaces, + QueryOptions: structs.QueryOptions{ + Region: "global", + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Namespaces], 1) + require.Equal(t, ns.Name, resp.Matches[structs.Namespaces][0].ID) + require.False(t, resp.Truncations[structs.Namespaces]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_ScalingPolicy(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + job, policy := mock.JobWithScalingPolicy() + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) + + req := &structs.FuzzySearchRequest{ + Text: policy.ID[0:3], // scaling policies are prefix searched + Context: structs.ScalingPolicies, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0].ID) + require.Equal(t, uint64(jobIndex), resp.Index) req.Context = structs.All - require.NoError(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - require.Equal(1, len(resp.Matches[structs.ScalingPolicies])) - require.Equal(policy.ID, resp.Matches[structs.ScalingPolicies][0]) - require.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0].ID) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_Namespace_ACL(t *testing.T) { + t.Parallel() + + s, root, cleanup := TestACLServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanup() + + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + ns := mock.Namespace() + require.NoError(t, fsmState.UpsertNamespaces(500, []*structs.Namespace{ns})) + + job1 := mock.Job() + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) + + job2 := mock.Job() + job2.Namespace = ns.Name + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "set-text-in-test", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job1.Namespace, + }, + } + + // Try without a token and expect failure + { + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with an invalid token and expect failure + { + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) + req.AuthToken = invalidToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect failure due to Namespaces being the context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + req.Context = structs.Namespaces + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect success due to All context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + req.Text = "foo" + req.Context = structs.All + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + + // Jobs filtered out since token only has access to node:read + require.Len(t, resp.Matches[structs.Jobs], 0) + } + + // Try with a valid token for non-default namespace:read-job + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", + mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob})) + req.Text = "job" + req.Context = structs.All + req.AuthToken = validToken.SecretID + req.Namespace = job2.Namespace + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job2.Name, resp.Matches[structs.Jobs][0].ID) + + // Index of job - not node - because node context is filtered out + require.Equal(t, uint64(504), resp.Index) + + // Nodes filtered out since token only has access to namespace:read-job + require.Len(t, resp.Matches[structs.Nodes], 0) + } + + // Try with a management token + { + req.Context = structs.All + req.AuthToken = root.SecretID + req.Namespace = structs.DefaultNamespace + var resp structs.SearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Namespaces], 2) + } +} + +func TestSearch_FuzzySearch_Job(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + job.Name = "demo-sleep" + job.Namespace = "team-sleepy" + job.TaskGroups = []*structs.TaskGroup{{ + Name: "qa-sleeper-group-one", + Services: []*structs.Service{{ + Name: "qa-group-sleep-svc-one", + }}, + Tasks: []*structs.Task{{ + Name: "qa-sleep-task-one", + Services: []*structs.Service{{ + Name: "some-sleepy-task-svc-one", + }}, + Driver: "docker", + Config: map[string]interface{}{ + "image": "sleeper:latest", + }, + }}, + }, { + Name: "prod-sleeper-group-one", + Tasks: []*structs.Task{{ + Name: "prod-sleep-task-one", + Driver: "exec", + Config: map[string]interface{}{ + "command": "/bin/sleep", + }, + }, { + Name: "prod-task-two", + Driver: "raw_exec", + Config: map[string]interface{}{ + "command": "/usr/sbin/sleep", + }, + Services: []*structs.Service{{ + Name: "some-sleepy-task-svc-two", + }}, + }}, + }, { + Name: "sleep-in-java", + Tasks: []*structs.Task{{ + Name: "prod-java-sleep", + Driver: "java", + Config: map[string]interface{}{ + "class": "sleep.class", + }, + }}, + }} + + ns := mock.Namespace() + ns.Name = job.Namespace + require.NoError(t, fsmState.UpsertNamespaces(2000, []*structs.Namespace{ns})) + registerJob(s, t, job) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1003, mock.Node())) + + t.Run("sleep", func(t *testing.T) { + req := &structs.FuzzySearchRequest{ + Text: "sleep", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job.Namespace, + }, + } + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + m := resp.Matches + require.Equal(t, uint64(1000), resp.Index) // job is explicit search context, has id=1000 + + // just the one job + require.Len(t, m[structs.Jobs], 1) + + // 3 services (1 group, 2 task) + require.Len(t, m[structs.Services], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "some-sleepy-task-svc-one", + Scope: []string{"team-sleepy", "demo-sleep", "qa-sleeper-group-one", "qa-sleep-task-one"}, + }, { + ID: "some-sleepy-task-svc-two", + Scope: []string{"team-sleepy", "demo-sleep", "prod-sleeper-group-one", "prod-task-two"}, + }, { + ID: "qa-group-sleep-svc-one", + Scope: []string{"team-sleepy", "demo-sleep", "qa-sleeper-group-one"}, + }}, m[structs.Services]) + + // 3 groups + require.Len(t, m[structs.Groups], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleep-in-java", + Scope: []string{"team-sleepy", "demo-sleep"}, + }, { + ID: "qa-sleeper-group-one", + Scope: []string{"team-sleepy", "demo-sleep"}, + }, { + ID: "prod-sleeper-group-one", + Scope: []string{"team-sleepy", "demo-sleep"}, + }}, m[structs.Groups]) + + // 3 tasks (1 does not match) + require.Len(t, m[structs.Tasks], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "qa-sleep-task-one", + Scope: []string{"team-sleepy", "demo-sleep", "qa-sleeper-group-one"}, + }, { + ID: "prod-sleep-task-one", + Scope: []string{"team-sleepy", "demo-sleep", "prod-sleeper-group-one"}, + }, { + ID: "prod-java-sleep", + Scope: []string{"team-sleepy", "demo-sleep", "sleep-in-java"}, + }}, m[structs.Tasks]) + + // 2 tasks with command + require.Len(t, m[structs.Commands], 2) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "/bin/sleep", + Scope: []string{"team-sleepy", "demo-sleep", "prod-sleeper-group-one", "prod-sleep-task-one"}, + }, { + ID: "/usr/sbin/sleep", + Scope: []string{"team-sleepy", "demo-sleep", "prod-sleeper-group-one", "prod-task-two"}, + }}, m[structs.Commands]) + + // 1 task with image + require.Len(t, m[structs.Images], 1) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleeper:latest", + Scope: []string{"team-sleepy", "demo-sleep", "qa-sleeper-group-one", "qa-sleep-task-one"}, + }}, m[structs.Images]) + + // 1 task with class + require.Len(t, m[structs.Classes], 1) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleep.class", + Scope: []string{"team-sleepy", "demo-sleep", "sleep-in-java", "prod-java-sleep"}, + }}, m[structs.Classes]) + }) } diff --git a/nomad/structs/search.go b/nomad/structs/search.go new file mode 100644 index 000000000000..3003f4dce00a --- /dev/null +++ b/nomad/structs/search.go @@ -0,0 +1,129 @@ +package structs + +// Context defines the scope in which a search for Nomad object operates, and +// is also used to query the matching index value for this context. +type Context string + +const ( + // Individual context types. + Allocs Context = "allocs" + Deployments Context = "deployment" + Evals Context = "evals" + Jobs Context = "jobs" + Nodes Context = "nodes" + Namespaces Context = "namespaces" + Quotas Context = "quotas" + Recommendations Context = "recommendations" + ScalingPolicies Context = "scaling_policy" + Plugins Context = "plugins" + Volumes Context = "volumes" + + // Subtypes used in fuzzy matching. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Union context types. + All Context = "all" +) + +// SearchConfig is used in servers to configure search API options. +type SearchConfig struct { + // FuzzyEnabled toggles whether the FuzzySearch API is enabled. If not + // enabled, requests to /v1/search/fuzzy will reply with a 404 response code. + FuzzyEnabled bool `hcl:"fuzzy_enabled"` + + // LimitQuery limits the number of objects searched in the FuzzySearch API. + // The results are indicated as truncated if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server when + // the FuzzySearch API is enabled. + LimitQuery int `hcl:"limit_query"` + + // LimitResults limits the number of results provided by the FuzzySearch API. + // The results are indicated as truncate if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server per + // fuzzy search request when the FuzzySearch API is enabled. + LimitResults int `hcl:"limit_results"` + + // MinTermLength is the minimum length of Text required before the FuzzySearch + // API will return results. + // + // Increasing this value can avoid resource consumption on Nomad server by + // reducing searches with less meaningful results. + MinTermLength int `hcl:"min_term_length"` +} + +// SearchResponse is used to return matches and information about whether +// the match list is truncated specific to each type of Context. +type SearchResponse struct { + // Map of Context types to ids which match a specified prefix + Matches map[Context][]string + + // Truncations indicates whether the matches for a particular Context have + // been truncated + Truncations map[Context]bool + + QueryMeta +} + +// SearchRequest is used to parameterize a request, and returns a +// list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type SearchRequest struct { + // Prefix is what ids are matched to. I.e, if the given prefix were + // "a", potential matches might be "abcd" or "aabb" + Prefix string + + // Context is the type that can be matched against. A context can be a job, + // node, evaluation, allocation, or empty (indicated every context should be + // matched) + Context Context + + QueryOptions +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. + Truncations map[Context]bool + + QueryMeta +} + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context Context + + QueryOptions +} diff --git a/nomad/structs/structs.go b/nomad/structs/structs.go index e92d42981100..3dfbb48184d6 100644 --- a/nomad/structs/structs.go +++ b/nomad/structs/structs.go @@ -193,25 +193,6 @@ var ( validNamespaceName = regexp.MustCompile("^[a-zA-Z0-9-]{1,128}$") ) -// Context defines the scope in which a search for Nomad object operates, and -// is also used to query the matching index value for this context -type Context string - -const ( - Allocs Context = "allocs" - Deployments Context = "deployment" - Evals Context = "evals" - Jobs Context = "jobs" - Nodes Context = "nodes" - Namespaces Context = "namespaces" - Quotas Context = "quotas" - Recommendations Context = "recommendations" - ScalingPolicies Context = "scaling_policy" - All Context = "all" - Plugins Context = "plugins" - Volumes Context = "volumes" -) - // NamespacedID is a tuple of an ID and a namespace type NamespacedID struct { ID string @@ -581,35 +562,6 @@ type NodeSpecificRequest struct { QueryOptions } -// SearchResponse is used to return matches and information about whether -// the match list is truncated specific to each type of context. -type SearchResponse struct { - // Map of context types to ids which match a specified prefix - Matches map[Context][]string - - // Truncations indicates whether the matches for a particular context have - // been truncated - Truncations map[Context]bool - - QueryMeta -} - -// SearchRequest is used to parameterize a request, and returns a -// list of matches made up of jobs, allocations, evaluations, and/or nodes, -// along with whether or not the information returned is truncated. -type SearchRequest struct { - // Prefix is what ids are matched to. I.e, if the given prefix were - // "a", potential matches might be "abcd" or "aabb" - Prefix string - - // Context is the type that can be matched against. A context can be a job, - // node, evaluation, allocation, or empty (indicated every context should be - // matched) - Context Context - - QueryOptions -} - // JobRegisterRequest is used for Job.Register endpoint // to register a job as being a schedulable entity. type JobRegisterRequest struct { diff --git a/nomad/testing.go b/nomad/testing.go index a70f43fd02bb..a3bc9b2d1a39 100644 --- a/nomad/testing.go +++ b/nomad/testing.go @@ -95,6 +95,14 @@ func TestServer(t testing.T, cb func(*Config)) (*Server, func()) { // Disable consul autojoining: tests typically join servers directly config.ConsulConfig.ServerAutoJoin = &f + // Enable fuzzy search API + config.SearchConfig = &structs.SearchConfig{ + FuzzyEnabled: true, + LimitQuery: 20, + LimitResults: 100, + MinTermLength: 2, + } + // Invoke the callback if any if cb != nil { cb(config) diff --git a/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go b/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go index 399424df5bba..b973f733bd02 100644 --- a/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go +++ b/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go @@ -1,9 +1,12 @@ +// Package contexts provides constants used with the Nomad Search API. package contexts -// Context defines the scope in which a search for Nomad object operates +// Context defines the scope in which a search for Nomad object operates. type Context string const ( + // These Context types are used to reference the high level Nomad object + // types than can be searched. Allocs Context = "allocs" Deployments Context = "deployment" Evals Context = "evals" @@ -15,5 +18,16 @@ const ( ScalingPolicies Context = "scaling_policy" Plugins Context = "plugins" Volumes Context = "volumes" - All Context = "all" + + // These Context types are used to associate a search result from a lower + // level Nomad object with one of the higher level Context types above. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Context used to represent the set of all the higher level Context types. + All Context = "all" ) diff --git a/vendor/github.com/hashicorp/nomad/api/search.go b/vendor/github.com/hashicorp/nomad/api/search.go index 6a6cb9b59e17..3b020827a495 100644 --- a/vendor/github.com/hashicorp/nomad/api/search.go +++ b/vendor/github.com/hashicorp/nomad/api/search.go @@ -13,7 +13,7 @@ func (c *Client) Search() *Search { return &Search{client: c} } -// PrefixSearch returns a list of matches for a particular context and prefix. +// PrefixSearch returns a set of matches for a particular context and prefix. func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryOptions) (*SearchResponse, *QueryMeta, error) { var resp SearchResponse req := &SearchRequest{Prefix: prefix, Context: context} @@ -26,14 +26,72 @@ func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryO return &resp, qm, nil } +type SearchResponse struct { + Matches map[contexts.Context][]string + Truncations map[contexts.Context]bool + QueryMeta +} + type SearchRequest struct { Prefix string Context contexts.Context QueryOptions } -type SearchResponse struct { - Matches map[contexts.Context][]string +// FuzzySearch returns a set of matches for a given context and string. +func (s *Search) FuzzySearch(text string, context contexts.Context, q *QueryOptions) (*FuzzySearchResponse, *QueryMeta, error) { + var resp FuzzySearchResponse + + req := &FuzzySearchRequest{ + Context: context, + Text: text, + } + + qm, err := s.client.putQuery("/v1/search/fuzzy", req, &resp, q) + if err != nil { + return nil, nil, err + } + + return &resp, qm, nil +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[contexts.Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. Truncations map[contexts.Context]bool + QueryMeta } + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context contexts.Context + + QueryOptions +} diff --git a/website/content/docs/configuration/search.mdx b/website/content/docs/configuration/search.mdx new file mode 100644 index 000000000000..78c9252eb282 --- /dev/null +++ b/website/content/docs/configuration/search.mdx @@ -0,0 +1,48 @@ +--- +layout: docs +page_title: search Stanza - Agent Configuration +sidebar_title: search +description: >- + The "search" stanza specifies configuration for the search API provided + by the Nomad servers. +--- + +# `search` Stanza + + + +The `search` stanza specifies configuration for the search API provided by the +Nomad servers. + +```hcl +server { + search { + fuzzy_enabled = true + limit_query = 200 + limit_results = 1000 + min_term_length = 5 + } +} +``` + +## `search` Parameters + +- `fuzzy_enabled` `(bool: true)` - Specifies whether the fuzzy search API is + enabled. If not enabled, requests to the fuzzy search API endpoint will return + an error response. + +- `limit_query` `(int: 20)` - Specifies the maximum number of Nomad objects to + search through in the Nomad server before truncating results. Setting this parameter + to high value may degrade Nomad server performance. + +- `limit_results` `(int: 100)` - Specifies the maximum number of matching results + to accumulate in the API response before truncating results. Setting this parameter + to a high value may cause excessively large API response sizes. + +- `min_term_length` `(int: 2)` - Specifies the minimum size of the search term + allowed for matching with the fuzzy search API. Setting this value higher can + prevent unnecessary load on the Nomad server from broad queries.