diff --git a/CHANGELOG.md b/CHANGELOG.md index 05c06f80f6b1..6b1879351d7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ __BACKWARDS INCOMPATIBILITIES:__ * csi: The `attachment_mode` and `access_mode` field are required for `volume` blocks in job specifications. Registering a volume requires at least one `capability` block with the `attachment_mode` and `access_mode` fields set. [[GH-10330](https://github.com/hashicorp/nomad/issues/10330)] IMPROVEMENTS: + * api: Added an API endpoint for fuzzy search queries [[GH-10184](https://github.com/hashicorp/nomad/pull/10184)] * api: Removed unimplemented `CSIVolumes.PluginList` API. [[GH-10158](https://github.com/hashicorp/nomad/issues/10158)] * cli: Update defaults for `nomad operator debug` flags `-interval` and `-server-id` to match common usage. [[GH-10121](https://github.com/hashicorp/nomad/issues/10121)] * cli: Added `nomad ui -authenticate` flag to generate a one-time token for authenticating to the web UI when ACLs are enabled. [[GH-10097](https://github.com/hashicorp/nomad/issues/10097)] diff --git a/api/contexts/contexts.go b/api/contexts/contexts.go index 399424df5bba..b973f733bd02 100644 --- a/api/contexts/contexts.go +++ b/api/contexts/contexts.go @@ -1,9 +1,12 @@ +// Package contexts provides constants used with the Nomad Search API. package contexts -// Context defines the scope in which a search for Nomad object operates +// Context defines the scope in which a search for Nomad object operates. type Context string const ( + // These Context types are used to reference the high level Nomad object + // types than can be searched. Allocs Context = "allocs" Deployments Context = "deployment" Evals Context = "evals" @@ -15,5 +18,16 @@ const ( ScalingPolicies Context = "scaling_policy" Plugins Context = "plugins" Volumes Context = "volumes" - All Context = "all" + + // These Context types are used to associate a search result from a lower + // level Nomad object with one of the higher level Context types above. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Context used to represent the set of all the higher level Context types. + All Context = "all" ) diff --git a/api/search.go b/api/search.go index 6a6cb9b59e17..3b020827a495 100644 --- a/api/search.go +++ b/api/search.go @@ -13,7 +13,7 @@ func (c *Client) Search() *Search { return &Search{client: c} } -// PrefixSearch returns a list of matches for a particular context and prefix. +// PrefixSearch returns a set of matches for a particular context and prefix. func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryOptions) (*SearchResponse, *QueryMeta, error) { var resp SearchResponse req := &SearchRequest{Prefix: prefix, Context: context} @@ -26,14 +26,72 @@ func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryO return &resp, qm, nil } +type SearchResponse struct { + Matches map[contexts.Context][]string + Truncations map[contexts.Context]bool + QueryMeta +} + type SearchRequest struct { Prefix string Context contexts.Context QueryOptions } -type SearchResponse struct { - Matches map[contexts.Context][]string +// FuzzySearch returns a set of matches for a given context and string. +func (s *Search) FuzzySearch(text string, context contexts.Context, q *QueryOptions) (*FuzzySearchResponse, *QueryMeta, error) { + var resp FuzzySearchResponse + + req := &FuzzySearchRequest{ + Context: context, + Text: text, + } + + qm, err := s.client.putQuery("/v1/search/fuzzy", req, &resp, q) + if err != nil { + return nil, nil, err + } + + return &resp, qm, nil +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[contexts.Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. Truncations map[contexts.Context]bool + QueryMeta } + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context contexts.Context + + QueryOptions +} diff --git a/api/search_test.go b/api/search_test.go index 892394c39ebb..0b24e6c30185 100644 --- a/api/search_test.go +++ b/api/search_test.go @@ -7,8 +7,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestSearch_List(t *testing.T) { - require := require.New(t) +func TestSearch_PrefixSearch(t *testing.T) { t.Parallel() c, s := makeClient(t, nil, nil) @@ -16,17 +15,38 @@ func TestSearch_List(t *testing.T) { job := testJob() _, _, err := c.Jobs().Register(job, nil) - require.Nil(err) + require.NoError(t, err) id := *job.ID prefix := id[:len(id)-2] resp, qm, err := c.Search().PrefixSearch(prefix, contexts.Jobs, nil) - - require.Nil(err) - require.NotNil(qm) - require.NotNil(qm) + require.NoError(t, err) + require.NotNil(t, qm) + require.NotNil(t, resp) jobMatches := resp.Matches[contexts.Jobs] - require.Equal(1, len(jobMatches)) - require.Equal(id, jobMatches[0]) + require.Len(t, jobMatches, 1) + require.Equal(t, id, jobMatches[0]) +} + +func TestSearch_FuzzySearch(t *testing.T) { + t.Parallel() + c, s := makeClient(t, nil, nil) + defer s.Stop() + + job := testJob() + _, _, err := c.Jobs().Register(job, nil) + require.NoError(t, err) + + resp, qm, err := c.Search().FuzzySearch("bin", contexts.All, nil) + require.NoError(t, err) + require.NotNil(t, qm) + require.NotNil(t, resp) + + commandMatches := resp.Matches[contexts.Commands] + require.Len(t, commandMatches, 1) + require.Equal(t, "/bin/sleep", commandMatches[0].ID) + require.Equal(t, []string{ + "default", *job.ID, "group1", "task1", + }, commandMatches[0].Scope) } diff --git a/client/taskenv/env_test.go b/client/taskenv/env_test.go index da9edc18e7c5..1ddd38300876 100644 --- a/client/taskenv/env_test.go +++ b/client/taskenv/env_test.go @@ -217,123 +217,6 @@ func TestEnvironment_AsList(t *testing.T) { require.Equal(t, exp, act) } -// COMPAT(0.11): Remove in 0.11 -func TestEnvironment_AsList_Old(t *testing.T) { - n := mock.Node() - n.Meta = map[string]string{ - "metaKey": "metaVal", - } - a := mock.Alloc() - a.AllocatedResources = nil - a.Resources = &structs.Resources{ - CPU: 500, - MemoryMB: 256, - DiskMB: 150, - Networks: []*structs.NetworkResource{ - { - Device: "eth0", - IP: "192.168.0.100", - ReservedPorts: []structs.Port{ - {Label: "ssh", Value: 22}, - {Label: "other", Value: 1234}, - }, - MBits: 50, - DynamicPorts: []structs.Port{{Label: "http", Value: 2000}}, - }, - }, - } - a.TaskResources = map[string]*structs.Resources{ - "web": { - CPU: 500, - MemoryMB: 256, - Networks: []*structs.NetworkResource{ - { - Device: "eth0", - IP: "127.0.0.1", - ReservedPorts: []structs.Port{{Label: "https", Value: 8080}}, - MBits: 50, - DynamicPorts: []structs.Port{{Label: "http", Value: 80}}, - }, - }, - }, - } - a.TaskResources["ssh"] = &structs.Resources{ - Networks: []*structs.NetworkResource{ - { - Device: "eth0", - IP: "192.168.0.100", - MBits: 50, - ReservedPorts: []structs.Port{ - {Label: "ssh", Value: 22}, - {Label: "other", Value: 1234}, - }, - }, - }, - } - - // simulate canonicalization on restore or fetch - a.Canonicalize() - - task := a.Job.TaskGroups[0].Tasks[0] - task.Env = map[string]string{ - "taskEnvKey": "taskEnvVal", - } - task.Resources.Networks = []*structs.NetworkResource{ - // Nomad 0.8 didn't fully populate the fields in task Resource Networks - { - IP: "", - ReservedPorts: []structs.Port{{Label: "https"}}, - DynamicPorts: []structs.Port{{Label: "http"}}, - }, - } - env := NewBuilder(n, a, task, "global").SetDriverNetwork( - &drivers.DriverNetwork{PortMap: map[string]int{"https": 443}}, - ) - - act := env.Build().List() - exp := []string{ - "taskEnvKey=taskEnvVal", - "NOMAD_ADDR_http=127.0.0.1:80", - "NOMAD_PORT_http=80", - "NOMAD_IP_http=127.0.0.1", - "NOMAD_ADDR_https=127.0.0.1:8080", - "NOMAD_PORT_https=443", - "NOMAD_IP_https=127.0.0.1", - "NOMAD_HOST_PORT_http=80", - "NOMAD_HOST_PORT_https=8080", - "NOMAD_TASK_NAME=web", - "NOMAD_GROUP_NAME=web", - "NOMAD_ADDR_ssh_other=192.168.0.100:1234", - "NOMAD_ADDR_ssh_ssh=192.168.0.100:22", - "NOMAD_IP_ssh_other=192.168.0.100", - "NOMAD_IP_ssh_ssh=192.168.0.100", - "NOMAD_PORT_ssh_other=1234", - "NOMAD_PORT_ssh_ssh=22", - "NOMAD_CPU_LIMIT=500", - "NOMAD_DC=dc1", - "NOMAD_NAMESPACE=default", - "NOMAD_REGION=global", - "NOMAD_MEMORY_LIMIT=256", - "NOMAD_META_ELB_CHECK_INTERVAL=30s", - "NOMAD_META_ELB_CHECK_MIN=3", - "NOMAD_META_ELB_CHECK_TYPE=http", - "NOMAD_META_FOO=bar", - "NOMAD_META_OWNER=armon", - "NOMAD_META_elb_check_interval=30s", - "NOMAD_META_elb_check_min=3", - "NOMAD_META_elb_check_type=http", - "NOMAD_META_foo=bar", - "NOMAD_META_owner=armon", - fmt.Sprintf("NOMAD_JOB_ID=%s", a.Job.ID), - "NOMAD_JOB_NAME=my-job", - fmt.Sprintf("NOMAD_ALLOC_ID=%s", a.ID), - "NOMAD_ALLOC_INDEX=0", - } - sort.Strings(act) - sort.Strings(exp) - require.Equal(t, exp, act) -} - func TestEnvironment_AllValues(t *testing.T) { t.Parallel() diff --git a/command/agent/agent.go b/command/agent/agent.go index b63bff9bb063..4100d4bfb529 100644 --- a/command/agent/agent.go +++ b/command/agent/agent.go @@ -424,6 +424,16 @@ func convertServerConfig(agentConfig *Config) (*nomad.Config, error) { conf.LicenseEnv = agentConfig.Server.LicenseEnv conf.LicensePath = agentConfig.Server.LicensePath + // Add the search configuration + if search := agentConfig.Server.Search; search != nil { + conf.SearchConfig = &structs.SearchConfig{ + FuzzyEnabled: search.FuzzyEnabled, + LimitQuery: search.LimitQuery, + LimitResults: search.LimitResults, + MinTermLength: search.MinTermLength, + } + } + return conf, nil } diff --git a/command/agent/config.go b/command/agent/config.go index 2474bd8e5e0c..5a720a77eb04 100644 --- a/command/agent/config.go +++ b/command/agent/config.go @@ -510,6 +510,44 @@ type ServerConfig struct { // ExtraKeysHCL is used by hcl to surface unexpected keys ExtraKeysHCL []string `hcl:",unusedKeys" json:"-"` + + Search *Search `hcl:"search"` +} + +// Search is used in servers to configure search API options. +type Search struct { + // FuzzyEnabled toggles whether the FuzzySearch API is enabled. If not + // enabled, requests to /v1/search/fuzzy will reply with a 404 response code. + // + // Default: enabled. + FuzzyEnabled bool `hcl:"fuzzy_enabled"` + + // LimitQuery limits the number of objects searched in the FuzzySearch API. + // The results are indicated as truncated if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server when + // the FuzzySearch API is enabled. + // + // Default value: 20. + LimitQuery int `hcl:"limit_query"` + + // LimitResults limits the number of results provided by the FuzzySearch API. + // The results are indicated as truncate if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server per + // fuzzy search request when the FuzzySearch API is enabled. + // + // Default value: 100. + LimitResults int `hcl:"limit_results"` + + // MinTermLength is the minimum length of Text required before the FuzzySearch + // API will return results. + // + // Increasing this value can avoid resource consumption on Nomad server by + // reducing searches with less meaningful results. + // + // Default value: 2. + MinTermLength int `hcl:"min_term_length"` } // ServerJoin is used in both clients and servers to bootstrap connections to @@ -900,6 +938,12 @@ func DefaultConfig() *Config { RetryInterval: 30 * time.Second, RetryMaxAttempts: 0, }, + Search: &Search{ + FuzzyEnabled: true, + LimitQuery: 20, + LimitResults: 100, + MinTermLength: 2, + }, }, ACL: &ACLConfig{ Enabled: false, @@ -1434,6 +1478,19 @@ func (a *ServerConfig) Merge(b *ServerConfig) *ServerConfig { result.DefaultSchedulerConfig = &c } + if b.Search != nil { + result.Search = &Search{FuzzyEnabled: b.Search.FuzzyEnabled} + if b.Search.LimitQuery > 0 { + result.Search.LimitQuery = b.Search.LimitQuery + } + if b.Search.LimitResults > 0 { + result.Search.LimitResults = b.Search.LimitResults + } + if b.Search.MinTermLength > 0 { + result.Search.MinTermLength = b.Search.MinTermLength + } + } + // Add the schedulers result.EnabledSchedulers = append(result.EnabledSchedulers, b.EnabledSchedulers...) diff --git a/command/agent/http.go b/command/agent/http.go index 23934f923d2b..1f4e48da4332 100644 --- a/command/agent/http.go +++ b/command/agent/http.go @@ -317,6 +317,7 @@ func (s *HTTPServer) registerHandlers(enableDebug bool) { s.mux.HandleFunc("/v1/status/leader", s.wrap(s.StatusLeaderRequest)) s.mux.HandleFunc("/v1/status/peers", s.wrap(s.StatusPeersRequest)) + s.mux.HandleFunc("/v1/search/fuzzy", s.wrap(s.FuzzySearchRequest)) s.mux.HandleFunc("/v1/search", s.wrap(s.SearchRequest)) s.mux.HandleFunc("/v1/operator/license", s.wrap(s.LicenseRequest)) diff --git a/command/agent/search_endpoint.go b/command/agent/search_endpoint.go index 58ee65f4577c..95e115834821 100644 --- a/command/agent/search_endpoint.go +++ b/command/agent/search_endpoint.go @@ -12,14 +12,14 @@ func (s *HTTPServer) SearchRequest(resp http.ResponseWriter, req *http.Request) if req.Method == "POST" || req.Method == "PUT" { return s.newSearchRequest(resp, req) } - return nil, CodedError(405, ErrInvalidMethod) + return nil, CodedError(http.StatusMethodNotAllowed, ErrInvalidMethod) } func (s *HTTPServer) newSearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { args := structs.SearchRequest{} if err := decodeBody(req, &args); err != nil { - return nil, CodedError(400, err.Error()) + return nil, CodedError(http.StatusBadRequest, err.Error()) } if s.parse(resp, req, &args.Region, &args.QueryOptions) { @@ -34,3 +34,30 @@ func (s *HTTPServer) newSearchRequest(resp http.ResponseWriter, req *http.Reques setMeta(resp, &out.QueryMeta) return out, nil } + +func (s *HTTPServer) FuzzySearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { + if req.Method == "POST" || req.Method == "PUT" { + return s.newFuzzySearchRequest(resp, req) + } + return nil, CodedError(http.StatusMethodNotAllowed, ErrInvalidMethod) +} + +func (s *HTTPServer) newFuzzySearchRequest(resp http.ResponseWriter, req *http.Request) (interface{}, error) { + var args structs.FuzzySearchRequest + + if err := decodeBody(req, &args); err != nil { + return nil, CodedError(http.StatusBadRequest, err.Error()) + } + + if s.parse(resp, req, &args.Region, &args.QueryOptions) { + return nil, nil + } + + var out structs.FuzzySearchResponse + if err := s.agent.RPC("Search.FuzzySearch", &args, &out); err != nil { + return nil, err + } + + setMeta(resp, &out.QueryMeta) + return out, nil +} diff --git a/command/agent/search_endpoint_test.go b/command/agent/search_endpoint_test.go index 4614a63def0e..68ca16bb9ea4 100644 --- a/command/agent/search_endpoint_test.go +++ b/command/agent/search_endpoint_test.go @@ -1,114 +1,195 @@ package agent import ( + "fmt" "net/http" "net/http/httptest" "testing" + "github.com/hashicorp/nomad/helper/uuid" "github.com/hashicorp/nomad/nomad/mock" "github.com/hashicorp/nomad/nomad/structs" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestHTTP_SearchWithIllegalMethod(t *testing.T) { - assert := assert.New(t) +func header(recorder *httptest.ResponseRecorder, name string) string { + return recorder.Result().Header.Get(name) +} + +func createJobForTest(jobID string, s *TestAgent, t *testing.T) { + job := mock.Job() + job.ID = jobID + job.TaskGroups[0].Count = 1 + state := s.Agent.server.State() + err := state.UpsertJob(structs.MsgTypeTestSetup, 1000, job) + require.NoError(t, err) +} + +func TestHTTP_PrefixSearchWithIllegalMethod(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { req, err := http.NewRequest("DELETE", "/v1/search", nil) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() _, err = s.Server.SearchRequest(respW, req) - assert.NotNil(err, "HTTP DELETE should not be accepted for this endpoint") + require.EqualError(t, err, "Invalid method") }) } -func createJobForTest(jobID string, s *TestAgent, t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearchWithIllegalMethod(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + req, err := http.NewRequest("DELETE", "/v1/search/fuzzy", nil) + require.NoError(t, err) + respW := httptest.NewRecorder() + _, err = s.Server.SearchRequest(respW, req) + require.EqualError(t, err, "Invalid method") + }) +} + +func createCmdJobForTest(name, cmd string, s *TestAgent, t *testing.T) *structs.Job { job := mock.Job() - job.ID = jobID + job.Name = name + job.TaskGroups[0].Tasks[0].Config["command"] = cmd job.TaskGroups[0].Count = 1 - state := s.Agent.server.State() err := state.UpsertJob(structs.MsgTypeTestSetup, 1000, job) - assert.Nil(err) + require.NoError(t, err) + return job } -func TestHTTP_Search_POST(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_POST(t *testing.T) { + t.Parallel() testJob := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJob, s, t) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 1) + require.Equal(t, testJob, j[0]) + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_POST(t *testing.T) { + t.Parallel() + + testJobID := uuid.Generate() + + httpTest(t, nil, func(s *TestAgent) { + createJobForTest(testJobID, s, t) + data := structs.FuzzySearchRequest{Text: "fau", Context: structs.Namespaces} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) // searched one context: namespaces - assert.Equal(1, len(j)) - assert.Equal(j[0], testJob) + ns := res.Matches[structs.Namespaces] + require.Len(t, ns, 1) - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + require.Equal(t, "default", ns[0].ID) + require.Nil(t, ns[0].Scope) // only job types have scope + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_PUT(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_PUT(t *testing.T) { + t.Parallel() testJob := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJob, s, t) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("PUT", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 1) + require.Equal(t, testJob, j[0]) - assert.Equal(1, len(j)) - assert.Equal(j[0], testJob) + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_PUT(t *testing.T) { + t.Parallel() + + testJobID := uuid.Generate() + + httpTest(t, nil, func(s *TestAgent) { + createJobForTest(testJobID, s, t) + data := structs.FuzzySearchRequest{Text: "fau", Context: structs.Namespaces} + req, err := http.NewRequest("PUT", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) // searched one context: namespaces + + ns := res.Matches[structs.Namespaces] + require.Len(t, ns, 1) + + require.Equal(t, "default", ns[0].ID) + require.Nil(t, ns[0].Scope) // only job types have scope + + require.False(t, res.Truncations[structs.Namespaces]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_MultipleJobs(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_MultipleJobs(t *testing.T) { + t.Parallel() testJobA := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobB := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89707" testJobC := "bbbbbbbb-e8f7-fd38-c855-ab94ceb89707" - testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJobA, s, t) createJobForTest(testJobB, s, t) @@ -116,190 +197,367 @@ func TestHTTP_Search_MultipleJobs(t *testing.T) { data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Jobs] + require.Len(t, j, 2) + require.Contains(t, j, testJobA) + require.Contains(t, j, testJobB) + require.NotContains(t, j, testJobC) - assert.Equal(2, len(j)) - assert.Contains(j, testJobA) - assert.Contains(j, testJobB) - assert.NotContains(j, testJobC) - - assert.Equal(res.Truncations[structs.Jobs], false) - assert.NotEqual("0", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Evaluation(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_MultipleJobs(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + job1ID := createCmdJobForTest("job1", "/bin/yes", s, t).ID + job2ID := createCmdJobForTest("job2", "/bin/no", s, t).ID + _ = createCmdJobForTest("job3", "/opt/java", s, t).ID // no match + job4ID := createCmdJobForTest("job4", "/sbin/ping", s, t).ID + + data := structs.FuzzySearchRequest{Text: "bin", Context: structs.Jobs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + // in example job, only the commands match the "bin" query + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + commands := res.Matches[structs.Commands] + require.Len(t, commands, 3) + + exp := []structs.FuzzyMatch{{ + ID: "/bin/no", + Scope: []string{"default", job2ID, "web", "web"}, + }, { + ID: "/bin/yes", + Scope: []string{"default", job1ID, "web", "web"}, + }, { + ID: "/sbin/ping", + Scope: []string{"default", job4ID, "web", "web"}, + }} + require.Equal(t, exp, commands) + + require.False(t, res.Truncations[structs.Jobs]) + require.NotEqual(t, "0", header(respW, "X-Nomad-Index")) + }) +} +func TestHTTP_PrefixSearch_Evaluation(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() eval1 := mock.Eval() eval2 := mock.Eval() err := state.UpsertEvals(structs.MsgTypeTestSetup, 9000, []*structs.Evaluation{eval1, eval2}) - assert.Nil(err) + require.NoError(t, err) prefix := eval1.ID[:len(eval1.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Evals} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) j := res.Matches[structs.Evals] - assert.Equal(1, len(j)) - assert.Contains(j, eval1.ID) - assert.NotContains(j, eval2.ID) + require.Len(t, j, 1) + require.Contains(t, j, eval1.ID) + require.NotContains(t, j, eval2.ID) + require.False(t, res.Truncations[structs.Evals]) + require.Equal(t, "9000", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_FuzzySearch_Evaluation(t *testing.T) { + t.Parallel() - assert.Equal(res.Truncations[structs.Evals], false) - assert.Equal("9000", respW.HeaderMap.Get("X-Nomad-Index")) + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + eval1 := mock.Eval() + eval2 := mock.Eval() + err := state.UpsertEvals(structs.MsgTypeTestSetup, 9000, []*structs.Evaluation{eval1, eval2}) + require.NoError(t, err) + + // fuzzy search does prefix search for evaluations + prefix := eval1.ID[:len(eval1.ID)-2] + data := structs.FuzzySearchRequest{Text: prefix, Context: structs.Evals} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + matches := res.Matches[structs.Evals] + require.Len(t, matches, 1) + + require.Equal(t, structs.FuzzyMatch{ + ID: eval1.ID, + }, matches[0]) + require.False(t, res.Truncations[structs.Evals]) + require.Equal(t, "9000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Allocations(t *testing.T) { - assert := assert.New(t) +func mockAlloc() *structs.Allocation { + a := mock.Alloc() + a.Name = fmt.Sprintf("%s.%s[%d]", a.Job.Name, "web", 0) + return a +} +func TestHTTP_PrefixSearch_Allocations(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() - alloc := mock.Alloc() + alloc := mockAlloc() err := state.UpsertAllocs(structs.MsgTypeTestSetup, 7000, []*structs.Allocation{alloc}) - assert.Nil(err) + require.NoError(t, err) prefix := alloc.ID[:len(alloc.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Allocs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) a := res.Matches[structs.Allocs] - assert.Equal(1, len(a)) - assert.Contains(a, alloc.ID) + require.Len(t, a, 1) + require.Contains(t, a, alloc.ID) - assert.Equal(res.Truncations[structs.Allocs], false) - assert.Equal("7000", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Allocs]) + require.Equal(t, "7000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Nodes(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Allocations(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + alloc := mockAlloc() + err := state.UpsertAllocs(structs.MsgTypeTestSetup, 7000, []*structs.Allocation{alloc}) + require.NoError(t, err) + + data := structs.FuzzySearchRequest{Text: "-job", Context: structs.Allocs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + a := res.Matches[structs.Allocs] + require.Len(t, a, 1) + require.Equal(t, "my-job.web[0]", a[0].ID) + + require.False(t, res.Truncations[structs.Allocs]) + require.Equal(t, "7000", header(respW, "X-Nomad-Index")) + }) +} +func TestHTTP_PrefixSearch_Nodes(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() node := mock.Node() err := state.UpsertNode(structs.MsgTypeTestSetup, 6000, node) - assert.Nil(err) + require.NoError(t, err) prefix := node.ID[:len(node.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Nodes} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) n := res.Matches[structs.Nodes] - assert.Equal(1, len(n)) - assert.Contains(n, node.ID) + require.Len(t, n, 1) + require.Contains(t, n, node.ID) - assert.Equal(res.Truncations[structs.Nodes], false) - assert.Equal("6000", respW.HeaderMap.Get("X-Nomad-Index")) + require.False(t, res.Truncations[structs.Nodes]) + require.Equal(t, "6000", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_Deployments(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Nodes(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + node := mock.Node() // foobar + err := state.UpsertNode(structs.MsgTypeTestSetup, 6000, node) + require.NoError(t, err) + + data := structs.FuzzySearchRequest{Text: "oo", Context: structs.Nodes} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + n := res.Matches[structs.Nodes] + require.Len(t, n, 1) + require.Equal(t, "foobar", n[0].ID) + + require.False(t, res.Truncations[structs.Nodes]) + require.Equal(t, "6000", header(respW, "X-Nomad-Index")) + }) +} + +func TestHTTP_PrefixSearch_Deployments(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { state := s.Agent.server.State() deployment := mock.Deployment() - assert.Nil(state.UpsertDeployment(999, deployment), "UpsertDeployment") + require.NoError(t, state.UpsertDeployment(999, deployment), "UpsertDeployment") prefix := deployment.ID[:len(deployment.ID)-2] data := structs.SearchRequest{Prefix: prefix, Context: structs.Deployments} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - - assert.Equal(1, len(res.Matches)) + require.Len(t, res.Matches, 1) n := res.Matches[structs.Deployments] - assert.Equal(1, len(n)) - assert.Contains(n, deployment.ID) - - assert.Equal("999", respW.HeaderMap.Get("X-Nomad-Index")) + require.Len(t, n, 1) + require.Contains(t, n, deployment.ID) + require.Equal(t, "999", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_NoJob(t *testing.T) { - assert := assert.New(t) +func TestHTTP_FuzzySearch_Deployments(t *testing.T) { + t.Parallel() + + httpTest(t, nil, func(s *TestAgent) { + state := s.Agent.server.State() + deployment := mock.Deployment() + require.NoError(t, state.UpsertDeployment(999, deployment), "UpsertDeployment") + + // fuzzy search of deployments are prefix searches + prefix := deployment.ID[:len(deployment.ID)-2] + data := structs.FuzzySearchRequest{Text: prefix, Context: structs.Deployments} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 1) + + n := res.Matches[structs.Deployments] + require.Len(t, n, 1) + require.Equal(t, deployment.ID, n[0].ID) + require.Equal(t, "999", header(respW, "X-Nomad-Index")) + }) +} +func TestHTTP_PrefixSearch_NoJob(t *testing.T) { t.Parallel() + httpTest(t, nil, func(s *TestAgent) { data := structs.SearchRequest{Prefix: "12345", Context: structs.Jobs} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) + require.Len(t, res.Matches, 1) + require.Len(t, res.Matches[structs.Jobs], 0) + require.Equal(t, "0", header(respW, "X-Nomad-Index")) + }) +} - assert.Equal(1, len(res.Matches)) - assert.Equal(0, len(res.Matches[structs.Jobs])) +func TestHTTP_FuzzySearch_NoJob(t *testing.T) { + t.Parallel() - assert.Equal("0", respW.HeaderMap.Get("X-Nomad-Index")) + httpTest(t, nil, func(s *TestAgent) { + data := structs.FuzzySearchRequest{Text: "12345", Context: structs.Jobs} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + require.Len(t, res.Matches, 0) + require.Equal(t, "0", header(respW, "X-Nomad-Index")) }) } -func TestHTTP_Search_AllContext(t *testing.T) { - assert := assert.New(t) +func TestHTTP_PrefixSearch_AllContext(t *testing.T) { + t.Parallel() testJobID := "aaaaaaaa-e8f7-fd38-c855-ab94ceb89706" testJobPrefix := "aaaaaaaa-e8f7-fd38" - t.Parallel() + httpTest(t, nil, func(s *TestAgent) { createJobForTest(testJobID, s, t) @@ -307,28 +565,59 @@ func TestHTTP_Search_AllContext(t *testing.T) { eval1 := mock.Eval() eval1.ID = testJobID err := state.UpsertEvals(structs.MsgTypeTestSetup, 8000, []*structs.Evaluation{eval1}) - assert.Nil(err) + require.NoError(t, err) data := structs.SearchRequest{Prefix: testJobPrefix, Context: structs.All} req, err := http.NewRequest("POST", "/v1/search", encodeReq(data)) - assert.Nil(err) + require.NoError(t, err) respW := httptest.NewRecorder() resp, err := s.Server.SearchRequest(respW, req) - assert.Nil(err) + require.NoError(t, err) res := resp.(structs.SearchResponse) - matchedJobs := res.Matches[structs.Jobs] matchedEvals := res.Matches[structs.Evals] + require.Len(t, matchedJobs, 1) + require.Len(t, matchedEvals, 1) + require.Equal(t, testJobID, matchedJobs[0]) + require.Equal(t, eval1.ID, matchedEvals[0]) + require.Equal(t, "8000", header(respW, "X-Nomad-Index")) + }) +} - assert.Equal(1, len(matchedJobs)) - assert.Equal(1, len(matchedEvals)) +func TestHTTP_FuzzySearch_AllContext(t *testing.T) { + t.Parallel() - assert.Equal(matchedJobs[0], testJobID) - assert.Equal(matchedEvals[0], eval1.ID) + httpTest(t, nil, func(s *TestAgent) { + jobID := createCmdJobForTest("job1", "/bin/aardvark", s, t).ID + + state := s.Agent.server.State() + eval1 := mock.Eval() + eval1.ID = "aaaa6573-04cb-61b4-04cb-865aaaf5d400" + err := state.UpsertEvals(structs.MsgTypeTestSetup, 8000, []*structs.Evaluation{eval1}) + require.NoError(t, err) - assert.Equal("8000", respW.HeaderMap.Get("X-Nomad-Index")) + data := structs.FuzzySearchRequest{Text: "aa", Context: structs.All} + req, err := http.NewRequest("POST", "/v1/search/fuzzy", encodeReq(data)) + require.NoError(t, err) + + respW := httptest.NewRecorder() + + resp, err := s.Server.FuzzySearchRequest(respW, req) + require.NoError(t, err) + + res := resp.(structs.FuzzySearchResponse) + matchedCommands := res.Matches[structs.Commands] + matchedEvals := res.Matches[structs.Evals] + require.Len(t, matchedCommands, 1) + require.Len(t, matchedEvals, 1) + require.Equal(t, eval1.ID, matchedEvals[0].ID) + require.Equal(t, "/bin/aardvark", matchedCommands[0].ID) + require.Equal(t, []string{ + "default", jobID, "web", "web", + }, matchedCommands[0].Scope) + require.Equal(t, "8000", header(respW, "X-Nomad-Index")) }) } diff --git a/nomad/config.go b/nomad/config.go index 0494768279d5..05db46fb383f 100644 --- a/nomad/config.go +++ b/nomad/config.go @@ -354,9 +354,11 @@ type Config struct { // LicenseConfig is a tunable knob for enterprise license testing. LicenseConfig *LicenseConfig + LicenseEnv string + LicensePath string - LicenseEnv string - LicensePath string + // SearchConfig provides knobs for Search API. + SearchConfig *structs.SearchConfig // AgentShutdown is used to call agent.Shutdown from the context of a Server // It is used primarily for licensing diff --git a/nomad/mock/mock.go b/nomad/mock/mock.go index bf80e154e8fb..0c91bc5e06ee 100644 --- a/nomad/mock/mock.go +++ b/nomad/mock/mock.go @@ -1107,6 +1107,7 @@ func JobSummary(jobID string) *structs.JobSummary { } func Alloc() *structs.Allocation { + job := Job() alloc := &structs.Allocation{ ID: uuid.Generate(), EvalID: uuid.Generate(), @@ -1172,7 +1173,7 @@ func Alloc() *structs.Allocation { DiskMB: 150, }, }, - Job: Job(), + Job: job, DesiredStatus: structs.AllocDesiredStatusRun, ClientStatus: structs.AllocClientStatusPending, } diff --git a/nomad/plan_normalization_test.go b/nomad/plan_normalization_test.go index 9f3df7d5e8cb..1082c44e50f9 100644 --- a/nomad/plan_normalization_test.go +++ b/nomad/plan_normalization_test.go @@ -62,5 +62,5 @@ func TestPlanNormalize(t *testing.T) { } optimizedLogSize := buf.Len() - assert.Less(t, float64(optimizedLogSize)/float64(unoptimizedLogSize), 0.65) + assert.Less(t, float64(optimizedLogSize)/float64(unoptimizedLogSize), 0.66) } diff --git a/nomad/search_endpoint.go b/nomad/search_endpoint.go index 382d109f80e5..098a846a3330 100644 --- a/nomad/search_endpoint.go +++ b/nomad/search_endpoint.go @@ -2,12 +2,13 @@ package nomad import ( "fmt" + "sort" "strings" "time" - metrics "github.com/armon/go-metrics" - log "github.com/hashicorp/go-hclog" - memdb "github.com/hashicorp/go-memdb" + "github.com/armon/go-metrics" + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-memdb" "github.com/hashicorp/nomad/acl" "github.com/hashicorp/nomad/nomad/state" @@ -16,7 +17,9 @@ import ( const ( // truncateLimit is the maximum number of matches that will be returned for a - // prefix for a specific context + // prefix for a specific context. + // + // Does not apply to fuzzy searching. truncateLimit = 20 ) @@ -39,12 +42,12 @@ var ( // Search endpoint is used to look up matches for a given prefix and context type Search struct { srv *Server - logger log.Logger + logger hclog.Logger } -// getMatches extracts matches for an iterator, and returns a list of ids for +// getPrefixMatches extracts matches for an iterator, and returns a list of ids for // these matches. -func (s *Search) getMatches(iter memdb.ResultIterator, prefix string) ([]string, bool) { +func (s *Search) getPrefixMatches(iter memdb.ResultIterator, prefix string) ([]string, bool) { var matches []string for i := 0; i < truncateLimit; i++ { @@ -93,6 +96,263 @@ func (s *Search) getMatches(iter memdb.ResultIterator, prefix string) ([]string, return matches, iter.Next() != nil } +func (s *Search) getFuzzyMatches(iter memdb.ResultIterator, text string) (map[structs.Context][]structs.FuzzyMatch, map[structs.Context]bool) { + limitQuery := s.srv.config.SearchConfig.LimitQuery + limitResults := s.srv.config.SearchConfig.LimitResults + + unsorted := make(map[structs.Context][]fuzzyMatch) + truncations := make(map[structs.Context]bool) + + accumulateSet := func(limited bool, set map[structs.Context][]fuzzyMatch) { + for ctx, matches := range set { + for _, match := range matches { + if len(unsorted[ctx]) < limitResults { + unsorted[ctx] = append(unsorted[ctx], match) + } else { + // truncated by results limit + truncations[ctx] = true + return + } + if limited { + // truncated by query limit + truncations[ctx] = true + return + } + } + } + } + + accumulateSingle := func(limited bool, ctx structs.Context, match *fuzzyMatch) { + if match != nil { + if len(unsorted[ctx]) < limitResults { + unsorted[ctx] = append(unsorted[ctx], *match) + } else { + // truncated by results limit + truncations[ctx] = true + return + } + if limited { + // truncated by query limit + truncations[ctx] = true + return + } + } + } + + limited := func(i int, iter memdb.ResultIterator) bool { + if i == limitQuery-1 { + return iter.Next() != nil + } + return false + } + + for i := 0; i < limitQuery; i++ { + raw := iter.Next() + if raw == nil { + break + } + + switch t := raw.(type) { + case *structs.Job: + set := s.fuzzyMatchesJob(t, text) + accumulateSet(limited(i, iter), set) + default: + ctx, match := s.fuzzyMatchSingle(raw, text) + accumulateSingle(limited(i, iter), ctx, match) + } + } + + // sort the set of match results + for ctx := range unsorted { + sortSet(unsorted[ctx]) + } + + // create the result out of exported types + m := make(map[structs.Context][]structs.FuzzyMatch, len(unsorted)) + for ctx, matches := range unsorted { + m[ctx] = make([]structs.FuzzyMatch, 0, len(matches)) + for _, match := range matches { + m[ctx] = append(m[ctx], structs.FuzzyMatch{ + ID: match.id, + Scope: match.scope, + }) + } + } + + return m, truncations +} + +// fuzzyIndex returns the index of text in name, ignoring case. +// text is assumed to be lower case. +// -1 is returned if name does not contain text. +func fuzzyIndex(name, text string) int { + lower := strings.ToLower(name) + return strings.Index(lower, text) +} + +// fuzzySingleMatch determines if the ID of raw is a fuzzy match with text. +// Returns the context and score or nil if there is no match. +func (s *Search) fuzzyMatchSingle(raw interface{}, text string) (structs.Context, *fuzzyMatch) { + var ( + name string // fuzzy searchable name + scope []string + ctx structs.Context + ) + + switch t := raw.(type) { + case *structs.Node: + name = t.Name + scope = []string{t.ID} + ctx = structs.Nodes + case *structs.Namespace: + name = t.Name + ctx = structs.Namespaces + case *structs.Allocation: + name = t.Name + scope = []string{t.Namespace, t.ID} + ctx = structs.Allocs + case *structs.CSIPlugin: + name = t.ID + ctx = structs.Plugins + } + + if idx := fuzzyIndex(name, text); idx >= 0 { + return ctx, &fuzzyMatch{ + id: name, + score: idx, + scope: scope, + } + } + + return "", nil +} + +// getFuzzyMatchesJob digs through j and extracts matches against several types +// of matchable Context. Results are categorized by Context and paired with their +// score, but are unsorted. +// +// job.name +// job|group.name +// job|group|service.name +// job|group|task.name +// job|group|task|service.name +// job|group|task|driver.{image,command,class} +func (*Search) fuzzyMatchesJob(j *structs.Job, text string) map[structs.Context][]fuzzyMatch { + sm := make(map[structs.Context][]fuzzyMatch) + ns := j.Namespace + job := j.ID + + // job.name + if idx := fuzzyIndex(j.Name, text); idx >= 0 { + sm[structs.Jobs] = append(sm[structs.Jobs], score(j.Name, ns, idx, job)) + } + + // job|group.name + for _, group := range j.TaskGroups { + if idx := fuzzyIndex(group.Name, text); idx >= 0 { + sm[structs.Groups] = append(sm[structs.Groups], score(group.Name, ns, idx, job)) + } + + // job|group|service.name + for _, service := range group.Services { + if idx := fuzzyIndex(service.Name, text); idx >= 0 { + sm[structs.Services] = append(sm[structs.Services], score(service.Name, ns, idx, job, group.Name)) + } + } + + // job|group|task.name + for _, task := range group.Tasks { + if idx := fuzzyIndex(task.Name, text); idx >= 0 { + sm[structs.Tasks] = append(sm[structs.Tasks], score(task.Name, ns, idx, job, group.Name)) + } + + // job|group|task|service.name + for _, service := range task.Services { + if idx := fuzzyIndex(service.Name, text); idx >= 0 { + sm[structs.Services] = append(sm[structs.Services], score(service.Name, ns, idx, job, group.Name, task.Name)) + } + } + + // job|group|task|config.{image,command,class} + switch task.Driver { + case "docker": + image := getConfigParam(task.Config, "image") + if idx := fuzzyIndex(image, text); idx >= 0 { + sm[structs.Images] = append(sm[structs.Images], score(image, ns, idx, job, group.Name, task.Name)) + } + case "exec", "raw_exec": + command := getConfigParam(task.Config, "command") + if idx := fuzzyIndex(command, text); idx >= 0 { + sm[structs.Commands] = append(sm[structs.Commands], score(command, ns, idx, job, group.Name, task.Name)) + } + case "java": + class := getConfigParam(task.Config, "class") + if idx := fuzzyIndex(class, text); idx >= 0 { + sm[structs.Classes] = append(sm[structs.Classes], score(class, ns, idx, job, group.Name, task.Name)) + } + } + } + } + + return sm +} + +func getConfigParam(config map[string]interface{}, param string) string { + if config == nil || config[param] == nil { + return "" + } + + s, ok := config[param].(string) + if !ok { + return "" + } + + return s +} + +type fuzzyMatch struct { + id string + scope []string + score int +} + +func score(id, namespace string, score int, scope ...string) fuzzyMatch { + return fuzzyMatch{ + id: id, + score: score, + scope: append([]string{namespace}, scope...), + } +} + +func sortSet(matches []fuzzyMatch) { + sort.Slice(matches, func(a, b int) bool { + A, B := matches[a], matches[b] + + // sort by index + switch { + case A.score < B.score: + return true + case B.score < A.score: + return false + } + + // shorter length matched text is more likely to be the thing being + // searched for (in theory) + // + // this also causes exact matches to score best, which is desirable + idA, idB := A.id, B.id + switch { + case len(idA) < len(idB): + return true + case len(idB) < len(idA): + return false + } + + // same index and same length, break ties alphabetically + return idA < idB + }) +} + // getResourceIter takes a context and returns a memdb iterator specific to // that context func getResourceIter(context structs.Context, aclObj *acl.ACL, namespace, prefix string, ws memdb.WatchSet, state *state.StateStore) (memdb.ResultIterator, error) { @@ -121,17 +381,94 @@ func getResourceIter(context structs.Context, aclObj *acl.ACL, namespace, prefix if aclObj == nil { return iter, nil } - return memdb.NewFilterIterator(iter, namespaceFilter(aclObj)), nil + return memdb.NewFilterIterator(iter, nsCapFilter(aclObj)), nil default: return getEnterpriseResourceIter(context, aclObj, namespace, prefix, ws, state) } } -// namespaceFilter wraps a namespace iterator with a filter for removing -// namespaces the ACL can't access. -func namespaceFilter(aclObj *acl.ACL) memdb.FilterFunc { +// wildcard is a helper for determining if namespace is '*', used to determine +// if objects from every namespace should be considered when iterating, and that +// additional ACL checks will be necessary. +func wildcard(namespace string) bool { + return namespace == structs.AllNamespacesSentinel +} + +func getFuzzyResourceIterator(context structs.Context, aclObj *acl.ACL, namespace string, ws memdb.WatchSet, state *state.StateStore) (memdb.ResultIterator, error) { + switch context { + case structs.Jobs: + if wildcard(namespace) { + iter, err := state.Jobs(ws) + return nsCapIterFilter(iter, err, aclObj) + } + return state.JobsByNamespace(ws, namespace) + + case structs.Allocs: + if wildcard(namespace) { + iter, err := state.Allocs(ws) + return nsCapIterFilter(iter, err, aclObj) + } + return state.AllocsByNamespace(ws, namespace) + + case structs.Nodes: + if wildcard(namespace) { + iter, err := state.Nodes(ws) + return nsCapIterFilter(iter, err, aclObj) + } + return state.Nodes(ws) + + case structs.Plugins: + if wildcard(namespace) { + iter, err := state.CSIPlugins(ws) + return nsCapIterFilter(iter, err, aclObj) + } + return state.CSIPlugins(ws) + + case structs.Namespaces: + iter, err := state.Namespaces(ws) + return nsCapIterFilter(iter, err, aclObj) + + default: + return getEnterpriseFuzzyResourceIter(context, aclObj, namespace, ws, state) + } +} + +// nsCapIterFilter wraps an iterator with a filter for removing items that the token +// does not have permission to read (whether missing the capability or in the +// wrong namespace). +func nsCapIterFilter(iter memdb.ResultIterator, err error, aclObj *acl.ACL) (memdb.ResultIterator, error) { + if err != nil { + return nil, err + } + if aclObj == nil { + return iter, nil + } + return memdb.NewFilterIterator(iter, nsCapFilter(aclObj)), nil +} + +// nsCapFilter produces a memdb.FilterFunc for removing objects not accessible +// by aclObj during a table scan. +func nsCapFilter(aclObj *acl.ACL) memdb.FilterFunc { return func(v interface{}) bool { - return !aclObj.AllowNamespace(v.(*structs.Namespace).Name) + switch t := v.(type) { + case *structs.Job: + return !aclObj.AllowNsOp(t.Namespace, acl.NamespaceCapabilityReadJob) + + case *structs.Allocation: + return !aclObj.AllowNsOp(t.Namespace, acl.NamespaceCapabilityReadJob) + + case *structs.Namespace: + return !aclObj.AllowNamespace(t.Name) + + case *structs.Node: + return !aclObj.AllowNodeRead() + + case *structs.CSIPlugin: + return !aclObj.AllowPluginRead() + + default: + return false + } } } @@ -152,6 +489,30 @@ func roundUUIDDownIfOdd(prefix string, context structs.Context) string { return prefix[:len(prefix)-1] } +// silenceError determines whether err is an error we care about when getting an +// iterator from the state store - we ignore errors about invalid UUIDs, since +// we sometimes try to lookup by Name and not UUID. +func (*Search) silenceError(err error) bool { + if err == nil { + return true + } + + e := err.Error() + switch { + // Searching other contexts with job names raises an error, which in + // this case we want to ignore. + case strings.Contains(e, "Invalid UUID: encoding/hex"): + case strings.Contains(e, "UUID have 36 characters"): + case strings.Contains(e, "must be even length"): + case strings.Contains(e, "UUID should have maximum of 4"): + default: + // err was not nil and not about UUID prefix, something bad happened + return false + } + + return true +} + // PrefixSearch is used to list matches for a given prefix, and returns // matching jobs, evaluations, allocations, and/or nodes. func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.SearchResponse) error { @@ -168,7 +529,7 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search namespace := args.RequestNamespace() // Require either node:read or namespace:read-job - if !anySearchPerms(aclObj, namespace, args.Context) { + if !sufficientSearchPerms(aclObj, namespace, args.Context) { return structs.ErrPermissionDenied } @@ -182,21 +543,12 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search run: func(ws memdb.WatchSet, state *state.StateStore) error { iters := make(map[structs.Context]memdb.ResultIterator) - - contexts := searchContexts(aclObj, namespace, args.Context) + contexts := filteredSearchContexts(aclObj, namespace, args.Context) for _, ctx := range contexts { iter, err := getResourceIter(ctx, aclObj, namespace, roundUUIDDownIfOdd(args.Prefix, args.Context), ws, state) if err != nil { - e := err.Error() - switch { - // Searching other contexts with job names raises an error, which in - // this case we want to ignore. - case strings.Contains(e, "Invalid UUID: encoding/hex"): - case strings.Contains(e, "UUID have 36 characters"): - case strings.Contains(e, "must be even length"): - case strings.Contains(e, "UUID should have maximum of 4"): - default: + if !s.silenceError(err) { return err } } else { @@ -206,7 +558,7 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search // Return matches for the given prefix for k, v := range iters { - res, isTrunc := s.getMatches(v, args.Prefix) + res, isTrunc := s.getPrefixMatches(v, args.Prefix) reply.Matches[k] = res reply.Truncations[k] = isTrunc } @@ -229,3 +581,188 @@ func (s *Search) PrefixSearch(args *structs.SearchRequest, reply *structs.Search }} return s.srv.blockingRPC(&opts) } + +// FuzzySearch is used to list fuzzy or prefix matches for a given text argument and Context. +// If the Context is "all", all searchable contexts are searched. If ACLs are enabled, +// results are limited to policies of the provided ACL token. +// +// These types are limited to prefix UUID searching: +// Evals, Deployments, ScalingPolicies, Volumes +// +// These types are available for fuzzy searching: +// Nodes, Namespaces, Jobs, Allocs, Plugins +// +// Jobs are a special case that expand into multiple types, and whose return +// values include Scope which is a descending list of IDs of parent objects, +// starting with the Namespace. The subtypes of jobs are fuzzy searchable. +// +// The Jobs type expands into these sub types: +// Jobs, Groups, Services, Tasks, Images, Commands, Classes +// +// The results are in descending order starting with strongest match, per Context type. +func (s *Search) FuzzySearch(args *structs.FuzzySearchRequest, reply *structs.FuzzySearchResponse) error { + if done, err := s.srv.forward("Search.FuzzySearch", args, args, reply); done { + return err + } + defer metrics.MeasureSince([]string{"nomad", "search", "fuzzy_search"}, time.Now()) + + aclObj, err := s.srv.ResolveToken(args.AuthToken) + if err != nil { + return err + } + + namespace := args.RequestNamespace() + context := args.Context + + if !sufficientFuzzySearchPerms(aclObj, namespace, context) { + return structs.ErrPermissionDenied + } + + // check that fuzzy search API is enabled + if !s.srv.config.SearchConfig.FuzzyEnabled { + return fmt.Errorf("fuzzy search is not enabled") + } + + // check the query term meets minimum length + min := s.srv.config.SearchConfig.MinTermLength + if n := len(args.Text); n < min { + return fmt.Errorf("fuzzy search query must be at least %d characters, got %d", min, n) + } + + // for case-insensitive searching, lower-case the search term once and reuse + text := strings.ToLower(args.Text) + + // accumulate fuzzy search results and any truncations + reply.Matches = make(map[structs.Context][]structs.FuzzyMatch) + reply.Truncations = make(map[structs.Context]bool) + + // Setup the blocking query + opts := blockingOptions{ + queryMeta: &reply.QueryMeta, + queryOpts: new(structs.QueryOptions), + run: func(ws memdb.WatchSet, state *state.StateStore) error { + + fuzzyIters := make(map[structs.Context]memdb.ResultIterator) + prefixIters := make(map[structs.Context]memdb.ResultIterator) + + prefixContexts := filteredSearchContexts(aclObj, namespace, context) + fuzzyContexts := filteredFuzzySearchContexts(aclObj, namespace, context) + + // Gather the iterators used for prefix searching from those allowable contexts + for _, ctx := range prefixContexts { + switch ctx { + // only apply on the types that use UUID prefix searching + case structs.Evals, structs.Deployments, structs.ScalingPolicies, structs.Volumes: + iter, err := getResourceIter(ctx, aclObj, namespace, roundUUIDDownIfOdd(args.Prefix, args.Context), ws, state) + if err != nil { + if !s.silenceError(err) { + return err + } + } else { + prefixIters[ctx] = iter + } + } + } + + // Gather the iterators used for fuzzy searching from those allowable contexts + for _, ctx := range fuzzyContexts { + switch ctx { + // skip the types that use UUID prefix searching + case structs.Evals, structs.Deployments, structs.ScalingPolicies, structs.Volumes: + continue + default: + iter, err := getFuzzyResourceIterator(ctx, aclObj, namespace, ws, state) + if err != nil { + return err + } + fuzzyIters[ctx] = iter + } + } + + // Set prefix matches of the given text + for ctx, iter := range prefixIters { + res, isTrunc := s.getPrefixMatches(iter, args.Text) + matches := make([]structs.FuzzyMatch, 0, len(res)) + for _, result := range res { + matches = append(matches, structs.FuzzyMatch{ID: result}) + } + reply.Matches[ctx] = matches + reply.Truncations[ctx] = isTrunc + } + + // Set fuzzy matches of the given text + for iterCtx, iter := range fuzzyIters { + + // prefill truncations of iterable types so keys will exist in + // the response for negative results + reply.Truncations[iterCtx] = false + + matches, truncations := s.getFuzzyMatches(iter, text) + for ctx := range matches { + reply.Matches[ctx] = matches[ctx] + } + + for ctx := range truncations { + // only contains positive results + reply.Truncations[ctx] = truncations[ctx] + } + } + + // Set the index for the context. If the context has been specified, + // it will be used as the index of the response. Otherwise, the maximum + // index from all the resources will be used. + for _, ctx := range fuzzyContexts { + index, err := state.Index(contextToIndex(ctx)) + if err != nil { + return err + } + if index > reply.Index { + reply.Index = index + } + } + + s.srv.setQueryMeta(&reply.QueryMeta) + return nil + }, + } + + return s.srv.blockingRPC(&opts) +} + +// expandContext returns either allContexts if context is 'all', or a one +// element slice with context by itself. +func expandContext(context structs.Context) []structs.Context { + switch context { + case structs.All: + c := make([]structs.Context, len(allContexts)) + copy(c, allContexts) + return c + default: + return []structs.Context{context} + } +} + +// sufficientFuzzySearchPerms returns true if the searched namespace is the wildcard +// namespace, indicating we should bypass the preflight ACL checks otherwise performed +// by sufficientSearchPerms. This is to support fuzzy searching multiple namespaces +// with tokens that have permission for more than one namespace. The actual ACL +// validation will be performed while scanning objects instead, where we have finally +// have a concrete namespace to work with. +func sufficientFuzzySearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) bool { + if wildcard(namespace) { + return true + } + return sufficientSearchPerms(aclObj, namespace, context) +} + +// filterFuzzySearchContexts returns every context asked for if the searched namespace +// is the wildcard namespace, indicating we should bypass ACL checks otherwise +// performed by filterSearchContexts. Instead we will rely on iterator filters to +// perform the ACL validation while scanning objects, where we have a concrete +// namespace to work with. +func filteredFuzzySearchContexts(aclObj *acl.ACL, namespace string, context structs.Context) []structs.Context { + if wildcard(namespace) { + return expandContext(context) + } + return filteredSearchContexts(aclObj, namespace, context) +} diff --git a/nomad/search_endpoint_oss.go b/nomad/search_endpoint_oss.go index 7c1b3b7a856f..a724518c296c 100644 --- a/nomad/search_endpoint_oss.go +++ b/nomad/search_endpoint_oss.go @@ -35,9 +35,17 @@ func getEnterpriseResourceIter(context structs.Context, _ *acl.ACL, namespace, p return nil, fmt.Errorf("context must be one of %v or 'all' for all contexts; got %q", allContexts, context) } -// anySearchPerms returns true if the provided ACL has access to any -// capabilities required for prefix searching. Returns true if aclObj is nil. -func anySearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) bool { +// getEnterpriseFuzzyResourceIter is used to retrieve an iterator over an enterprise +// only table. +func getEnterpriseFuzzyResourceIter(context structs.Context, _ *acl.ACL, _ string, _ memdb.WatchSet, _ *state.StateStore) (memdb.ResultIterator, error) { + return nil, fmt.Errorf("context must be one of %v or 'all' for all contexts; got %q", allContexts, context) +} + +// sufficientSearchPerms returns true if the provided ACL has access to each +// capability required for prefix searching for the given context. +// +// Returns true if aclObj is nil. +func sufficientSearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) bool { if aclObj == nil { return true } @@ -78,22 +86,16 @@ func anySearchPerms(aclObj *acl.ACL, namespace string, context structs.Context) return true } -// searchContexts returns the contexts the aclObj is valid for. If aclObj is -// nil all contexts are returned. -func searchContexts(aclObj *acl.ACL, namespace string, context structs.Context) []structs.Context { - var all []structs.Context - - switch context { - case structs.All: - all = make([]structs.Context, len(allContexts)) - copy(all, allContexts) - default: - all = []structs.Context{context} - } +// filteredSearchContexts returns the expanded set of contexts, filtered down +// to the subset of contexts the aclObj is valid for. +// +// If aclObj is nil, no contexts are filtered out. +func filteredSearchContexts(aclObj *acl.ACL, namespace string, context structs.Context) []structs.Context { + desired := expandContext(context) // If ACLs aren't enabled return all contexts if aclObj == nil { - return all + return desired } jobRead := aclObj.AllowNsOp(namespace, acl.NamespaceCapabilityReadJob) @@ -105,8 +107,8 @@ func searchContexts(aclObj *acl.ACL, namespace string, context structs.Context) policyRead := aclObj.AllowNsOp(namespace, acl.NamespaceCapabilityListScalingPolicies) // Filter contexts down to those the ACL grants access to - available := make([]structs.Context, 0, len(all)) - for _, c := range all { + available := make([]structs.Context, 0, len(desired)) + for _, c := range desired { switch c { case structs.Allocs, structs.Jobs, structs.Evals, structs.Deployments: if jobRead { diff --git a/nomad/search_endpoint_test.go b/nomad/search_endpoint_test.go index 94ecfe0a08eb..f4a4fa8ef1ec 100644 --- a/nomad/search_endpoint_test.go +++ b/nomad/search_endpoint_test.go @@ -1,6 +1,7 @@ package nomad import ( + "fmt" "strconv" "strings" "testing" @@ -12,26 +13,32 @@ import ( "github.com/hashicorp/nomad/nomad/state" "github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/testutil" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) const jobIndex = 1000 -func registerAndVerifyJob(s *Server, t *testing.T, prefix string, counter int) *structs.Job { +func registerMockJob(s *Server, t *testing.T, prefix string, counter int) *structs.Job { job := mock.Job() job.ID = prefix + strconv.Itoa(counter) - state := s.fsm.State() - if err := state.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job); err != nil { - t.Fatalf("err: %v", err) - } - + registerJob(s, t, job) return job } +func registerJob(s *Server, t *testing.T, job *structs.Job) { + fsmState := s.fsm.State() + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) +} + +func mockAlloc() *structs.Allocation { + a := mock.Alloc() + a.Name = fmt.Sprintf("%s.%s[%d]", a.Job.Name, "web", 0) + return a +} + func TestSearch_PrefixSearch_Job(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -41,7 +48,7 @@ func TestSearch_PrefixSearch_Job(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) req := &structs.SearchRequest{ Prefix: prefix, @@ -57,14 +64,14 @@ func TestSearch_PrefixSearch_Job(t *testing.T) { t.Fatalf("err: %v", err) } - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_ACL(t *testing.T) { t.Parallel() - assert := assert.New(t) + jobID := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, root, cleanupS := TestACLServer(t, func(c *Config) { @@ -73,10 +80,10 @@ func TestSearch_PrefixSearch_ACL(t *testing.T) { defer cleanupS() codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() - job := registerAndVerifyJob(s, t, jobID, 0) - assert.Nil(state.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + job := registerMockJob(s, t, jobID, 0) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) req := &structs.SearchRequest{ Prefix: "", @@ -91,92 +98,89 @@ func TestSearch_PrefixSearch_ACL(t *testing.T) { { var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with an invalid token and expect failure { - invalidToken := mock.CreatePolicyAndToken(t, state, 1003, "test-invalid", + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) req.AuthToken = invalidToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect failure due to Jobs being the context { - validToken := mock.CreatePolicyAndToken(t, state, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) req.AuthToken = validToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect success due to All context { - validToken := mock.CreatePolicyAndToken(t, state, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.All req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) // Jobs filtered out since token only has access to node:read - assert.Len(resp.Matches[structs.Jobs], 0) + require.Len(t, resp.Matches[structs.Jobs], 0) } // Try with a valid token for namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1009, "test-valid2", + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob})) req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) // Index of job - not node - because node context is filtered out - assert.Equal(uint64(1000), resp.Index) + require.Equal(t, uint64(1000), resp.Index) // Nodes filtered out since token only has access to namespace:read-job - assert.Len(resp.Matches[structs.Nodes], 0) + require.Len(t, resp.Matches[structs.Nodes], 0) } // Try with a valid token for node:read and namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1011, "test-valid3", strings.Join([]string{ + validToken := mock.CreatePolicyAndToken(t, fsmState, 1011, "test-valid3", strings.Join([]string{ mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob}), mock.NodePolicy(acl.PolicyRead), }, "\n")) req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Equal(uint64(1001), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, uint64(1001), resp.Index) } // Try with a management token { req.AuthToken = root.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) } } func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "example-test-------" // Assert that a job with more than 4 hyphens works s, cleanupS := TestServer(t, func(c *Config) { @@ -187,19 +191,15 @@ func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { testutil.WaitForLeader(t, s.RPC) // Register a job and an allocation - job := registerAndVerifyJob(s, t, prefix, 0) - alloc := mock.Alloc() + job := registerMockJob(s, t, prefix, 0) + alloc := mockAlloc() alloc.JobID = job.ID alloc.Namespace = job.Namespace summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) req := &structs.SearchRequest{ Context: structs.All, @@ -213,16 +213,16 @@ func TestSearch_PrefixSearch_All_JobWithHyphen(t *testing.T) { for i := 1; i < len(prefix); i++ { req.Prefix = prefix[:i] var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.EqualValues(jobIndex, resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, 1, len(resp.Matches[structs.Jobs])) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.EqualValues(t, jobIndex, resp.Index) } } func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := strings.Repeat("a", 100) s, cleanupS := TestServer(t, func(c *Config) { @@ -233,18 +233,14 @@ func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { testutil.WaitForLeader(t, s.RPC) // Register a job and an allocation - job := registerAndVerifyJob(s, t, prefix, 0) - alloc := mock.Alloc() + job := registerMockJob(s, t, prefix, 0) + alloc := mockAlloc() alloc.JobID = job.ID summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) req := &structs.SearchRequest{ Prefix: prefix, @@ -256,19 +252,17 @@ func TestSearch_PrefixSearch_All_LongJob(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.EqualValues(jobIndex, resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.EqualValues(t, jobIndex, resp.Index) } // truncate should limit results to 20 func TestSearch_PrefixSearch_Truncate(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -278,9 +272,8 @@ func TestSearch_PrefixSearch_Truncate(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - var job *structs.Job for counter := 0; counter < 25; counter++ { - job = registerAndVerifyJob(s, t, prefix, counter) + registerMockJob(s, t, prefix, counter) } req := &structs.SearchRequest{ @@ -288,23 +281,21 @@ func TestSearch_PrefixSearch_Truncate(t *testing.T) { Context: structs.Jobs, QueryOptions: structs.QueryOptions{ Region: "global", - Namespace: job.Namespace, + Namespace: "default", }, } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(20, len(resp.Matches[structs.Jobs])) - assert.Equal(resp.Truncations[structs.Jobs], true) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 20) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -315,11 +306,10 @@ func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) - + job := registerMockJob(s, t, prefix, 0) eval1 := mock.Eval() eval1.ID = job.ID - s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1}) + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) req := &structs.SearchRequest{ Prefix: prefix, @@ -331,20 +321,16 @@ func TestSearch_PrefixSearch_AllWithJob(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - - assert.Equal(1, len(resp.Matches[structs.Evals])) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) } func TestSearch_PrefixSearch_Evals(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -354,7 +340,7 @@ func TestSearch_PrefixSearch_Evals(t *testing.T) { testutil.WaitForLeader(t, s.RPC) eval1 := mock.Eval() - s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1}) + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) prefix := eval1.ID[:len(eval1.ID)-2] @@ -368,20 +354,16 @@ func TestSearch_PrefixSearch_Evals(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Evals])) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) - assert.Equal(resp.Truncations[structs.Evals], false) - - assert.Equal(uint64(2000), resp.Index) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) + require.False(t, resp.Truncations[structs.Evals]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_Allocation(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -390,16 +372,12 @@ func TestSearch_PrefixSearch_Allocation(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - alloc := mock.Alloc() + alloc := mockAlloc() summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc})) prefix := alloc.ID[:len(alloc.ID)-2] @@ -413,20 +391,16 @@ func TestSearch_PrefixSearch_Allocation(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Allocs])) - assert.Equal(alloc.ID, resp.Matches[structs.Allocs][0]) - assert.Equal(resp.Truncations[structs.Allocs], false) - - assert.Equal(uint64(90), resp.Index) + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.ID, resp.Matches[structs.Allocs][0]) + require.False(t, resp.Truncations[structs.Allocs]) + require.Equal(t, uint64(90), resp.Index) } func TestSearch_PrefixSearch_All_UUID(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -435,27 +409,19 @@ func TestSearch_PrefixSearch_All_UUID(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - alloc := mock.Alloc() + alloc := mockAlloc() summary := mock.JobSummary(alloc.JobID) - state := s.fsm.State() + fsmState := s.fsm.State() - if err := state.UpsertJobSummary(999, summary); err != nil { - t.Fatalf("err: %v", err) - } - if err := state.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 1000, []*structs.Allocation{alloc})) node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 1001, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, node)) eval1 := mock.Eval() eval1.ID = node.ID - if err := state.UpsertEvals(structs.MsgTypeTestSetup, 1002, []*structs.Evaluation{eval1}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertEvals(structs.MsgTypeTestSetup, 1002, []*structs.Evaluation{eval1})) req := &structs.SearchRequest{ Context: structs.All, @@ -468,17 +434,16 @@ func TestSearch_PrefixSearch_All_UUID(t *testing.T) { for i := 1; i < len(alloc.ID); i++ { req.Prefix = alloc.ID[:i] var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Allocs])) - assert.Equal(alloc.ID, resp.Matches[structs.Allocs][0]) - assert.Equal(resp.Truncations[structs.Allocs], false) - assert.EqualValues(1002, resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.ID, resp.Matches[structs.Allocs][0]) + require.False(t, resp.Truncations[structs.Allocs]) + require.EqualValues(t, 1002, resp.Index) } } func TestSearch_PrefixSearch_Node(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -487,12 +452,10 @@ func TestSearch_PrefixSearch_Node(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 100, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) prefix := node.ID[:len(node.ID)-2] @@ -510,16 +473,14 @@ func TestSearch_PrefixSearch_Node(t *testing.T) { t.Fatalf("err: %v", err) } - assert.Equal(1, len(resp.Matches[structs.Nodes])) - assert.Equal(node.ID, resp.Matches[structs.Nodes][0]) - assert.Equal(false, resp.Truncations[structs.Nodes]) - - assert.Equal(uint64(100), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, node.ID, resp.Matches[structs.Nodes][0]) + require.False(t, resp.Truncations[structs.Nodes]) + require.Equal(t, uint64(100), resp.Index) } func TestSearch_PrefixSearch_Deployment(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -529,7 +490,7 @@ func TestSearch_PrefixSearch_Deployment(t *testing.T) { testutil.WaitForLeader(t, s.RPC) deployment := mock.Deployment() - s.fsm.State().UpsertDeployment(2000, deployment) + require.NoError(t, s.fsm.State().UpsertDeployment(2000, deployment)) prefix := deployment.ID[:len(deployment.ID)-2] @@ -543,41 +504,31 @@ func TestSearch_PrefixSearch_Deployment(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Deployments])) - assert.Equal(deployment.ID, resp.Matches[structs.Deployments][0]) - assert.Equal(resp.Truncations[structs.Deployments], false) - - assert.Equal(uint64(2000), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Deployments], 1) + require.Equal(t, deployment.ID, resp.Matches[structs.Deployments][0]) + require.False(t, resp.Truncations[structs.Deployments]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_AllContext(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 }) - defer cleanupS() codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() node := mock.Node() - if err := state.UpsertNode(structs.MsgTypeTestSetup, 100, node); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) eval1 := mock.Eval() eval1.ID = node.ID - if err := state.UpsertEvals(structs.MsgTypeTestSetup, 1000, []*structs.Evaluation{eval1}); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, fsmState.UpsertEvals(structs.MsgTypeTestSetup, 1000, []*structs.Evaluation{eval1})) prefix := node.ID[:len(node.ID)-2] @@ -591,23 +542,19 @@ func TestSearch_PrefixSearch_AllContext(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Nodes])) - assert.Equal(1, len(resp.Matches[structs.Evals])) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(node.ID, resp.Matches[structs.Nodes][0]) - assert.Equal(eval1.ID, resp.Matches[structs.Evals][0]) - - assert.Equal(uint64(1000), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, node.ID, resp.Matches[structs.Nodes][0]) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0]) + require.Equal(t, uint64(1000), resp.Index) } // Tests that the top 20 matches are returned when no prefix is set func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -617,7 +564,7 @@ func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, prefix, 0) + job := registerMockJob(s, t, prefix, 0) req := &structs.SearchRequest{ Prefix: "", @@ -629,20 +576,17 @@ func TestSearch_PrefixSearch_NoPrefix(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } // Tests that the zero matches are returned when a prefix has no matching // results func TestSearch_PrefixSearch_NoMatches(t *testing.T) { t.Parallel() - assert := assert.New(t) + prefix := "aaaaaaaa-e8f7-fd38-c855-ab94ceb8970" s, cleanupS := TestServer(t, func(c *Config) { @@ -662,19 +606,16 @@ func TestSearch_PrefixSearch_NoMatches(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(0, len(resp.Matches[structs.Jobs])) - assert.Equal(uint64(0), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Empty(t, resp.Matches[structs.Jobs]) + require.Equal(t, uint64(0), resp.Index) } // Prefixes can only be looked up if their length is a power of two. For // prefixes which are an odd length, use the length-1 characters. func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { t.Parallel() - assert := assert.New(t) + id1 := "aaafaaaa-e8f7-fd38-c855-ab94ceb89" id2 := "aaafeaaa-e8f7-fd38-c855-ab94ceb89" prefix := "aaafa" @@ -686,8 +627,8 @@ func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - job := registerAndVerifyJob(s, t, id1, 0) - registerAndVerifyJob(s, t, id2, 50) + job := registerMockJob(s, t, id1, 0) + registerMockJob(s, t, id2, 50) req := &structs.SearchRequest{ Prefix: prefix, @@ -699,17 +640,14 @@ func TestSearch_PrefixSearch_RoundDownToEven(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) } func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { t.Parallel() - assert := assert.New(t) + jobName := "exampleexample" s1, cleanupS1 := TestServer(t, func(c *Config) { @@ -727,7 +665,7 @@ func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { TestJoin(t, s1, s2) testutil.WaitForLeader(t, s1.RPC) - job := registerAndVerifyJob(s1, t, jobName, 0) + job := registerMockJob(s1, t, jobName, 0) req := &structs.SearchRequest{ Prefix: "", @@ -741,18 +679,15 @@ func TestSearch_PrefixSearch_MultiRegion(t *testing.T) { codec := rpcClient(t, s2) var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Jobs])) - assert.Equal(job.ID, resp.Matches[structs.Jobs][0]) - assert.Equal(uint64(jobIndex), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job.ID, resp.Matches[structs.Jobs][0]) + require.Equal(t, uint64(jobIndex), resp.Index) } func TestSearch_PrefixSearch_CSIPlugin(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -775,18 +710,15 @@ func TestSearch_PrefixSearch_CSIPlugin(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Plugins])) - assert.Equal(id, resp.Matches[structs.Plugins][0]) - assert.Equal(resp.Truncations[structs.Plugins], false) + require.Len(t, resp.Matches[structs.Plugins], 1) + require.Equal(t, id, resp.Matches[structs.Plugins][0]) + require.False(t, resp.Truncations[structs.Plugins]) } func TestSearch_PrefixSearch_CSIVolume(t *testing.T) { t.Parallel() - assert := assert.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -815,28 +747,25 @@ func TestSearch_PrefixSearch_CSIVolume(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(1, len(resp.Matches[structs.Volumes])) - assert.Equal(id, resp.Matches[structs.Volumes][0]) - assert.Equal(resp.Truncations[structs.Volumes], false) + require.Len(t, resp.Matches[structs.Volumes], 1) + require.Equal(t, id, resp.Matches[structs.Volumes][0]) + require.False(t, resp.Truncations[structs.Volumes]) } func TestSearch_PrefixSearch_Namespace(t *testing.T) { - assert := assert.New(t) t.Parallel() + s, cleanup := TestServer(t, func(c *Config) { c.NumSchedulers = 0 }) - defer cleanup() codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) ns := mock.Namespace() - assert.Nil(s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) + require.NoError(t, s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) prefix := ns.Name[:len(ns.Name)-2] @@ -849,20 +778,17 @@ func TestSearch_PrefixSearch_Namespace(t *testing.T) { } var resp structs.SearchResponse - if err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp); err != nil { - t.Fatalf("err: %v", err) - } - - assert.Equal(1, len(resp.Matches[structs.Namespaces])) - assert.Equal(ns.Name, resp.Matches[structs.Namespaces][0]) - assert.Equal(resp.Truncations[structs.Namespaces], false) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(2000), resp.Index) + require.Len(t, resp.Matches[structs.Namespaces], 1) + require.Equal(t, ns.Name, resp.Matches[structs.Namespaces][0]) + require.False(t, resp.Truncations[structs.Namespaces]) + require.Equal(t, uint64(2000), resp.Index) } func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { t.Parallel() - assert := assert.New(t) + s, root, cleanup := TestACLServer(t, func(c *Config) { c.NumSchedulers = 0 }) @@ -870,19 +796,19 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { codec := rpcClient(t, s) testutil.WaitForLeader(t, s.RPC) - state := s.fsm.State() + fsmState := s.fsm.State() ns := mock.Namespace() - assert.Nil(state.UpsertNamespaces(500, []*structs.Namespace{ns})) + require.NoError(t, fsmState.UpsertNamespaces(500, []*structs.Namespace{ns})) job1 := mock.Job() - assert.Nil(state.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) job2 := mock.Job() job2.Namespace = ns.Name - assert.Nil(state.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) - assert.Nil(state.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) req := &structs.SearchRequest{ Prefix: "", @@ -897,69 +823,66 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { { var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with an invalid token and expect failure { - invalidToken := mock.CreatePolicyAndToken(t, state, 1003, "test-invalid", + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) req.AuthToken = invalidToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect failure due to Namespaces being the context { - validToken := mock.CreatePolicyAndToken(t, state, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.Namespaces req.AuthToken = validToken.SecretID var resp structs.SearchResponse err := msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp) - assert.NotNil(err) - assert.Equal(err.Error(), structs.ErrPermissionDenied.Error()) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) } // Try with a node:read token and expect success due to All context { - validToken := mock.CreatePolicyAndToken(t, state, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) req.Context = structs.All req.AuthToken = validToken.SecretID var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Nodes], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) // Jobs filtered out since token only has access to node:read - assert.Len(resp.Matches[structs.Jobs], 0) + require.Len(t, resp.Matches[structs.Jobs], 0) } // Try with a valid token for non-default namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1009, "test-valid2", + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob})) req.Context = structs.All req.AuthToken = validToken.SecretID req.Namespace = job2.Namespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job2.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Namespaces], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job2.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Namespaces], 1) // Index of job - not node - because node context is filtered out - assert.Equal(uint64(504), resp.Index) + require.Equal(t, uint64(504), resp.Index) // Nodes filtered out since token only has access to namespace:read-job - assert.Len(resp.Matches[structs.Nodes], 0) + require.Len(t, resp.Matches[structs.Nodes], 0) } // Try with a valid token for node:read and default namespace:read-job { - validToken := mock.CreatePolicyAndToken(t, state, 1011, "test-valid3", strings.Join([]string{ + validToken := mock.CreatePolicyAndToken(t, fsmState, 1011, "test-valid3", strings.Join([]string{ mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob}), mock.NodePolicy(acl.PolicyRead), }, "\n")) @@ -967,12 +890,12 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { req.AuthToken = validToken.SecretID req.Namespace = structs.DefaultNamespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job1.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Namespaces], 1) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Namespaces], 1) } // Try with a management token @@ -981,18 +904,17 @@ func TestSearch_PrefixSearch_Namespace_ACL(t *testing.T) { req.AuthToken = root.SecretID req.Namespace = structs.DefaultNamespace var resp structs.SearchResponse - assert.Nil(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - assert.Equal(uint64(1001), resp.Index) - assert.Len(resp.Matches[structs.Jobs], 1) - assert.Equal(job1.ID, resp.Matches[structs.Jobs][0]) - assert.Len(resp.Matches[structs.Nodes], 1) - assert.Len(resp.Matches[structs.Namespaces], 2) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.ID, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Namespaces], 2) } } func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { t.Parallel() - require := require.New(t) s, cleanupS := TestServer(t, func(c *Config) { c.NumSchedulers = 0 @@ -1003,9 +925,9 @@ func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { job, policy := mock.JobWithScalingPolicy() prefix := policy.ID - state := s.fsm.State() + fsmState := s.fsm.State() - require.NoError(state.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) req := &structs.SearchRequest{ Prefix: prefix, @@ -1017,14 +939,1101 @@ func TestSearch_PrefixSearch_ScalingPolicy(t *testing.T) { } var resp structs.SearchResponse - require.NoError(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - require.Equal(1, len(resp.Matches[structs.ScalingPolicies])) - require.Equal(policy.ID, resp.Matches[structs.ScalingPolicies][0]) - require.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0]) + require.Equal(t, uint64(jobIndex), resp.Index) req.Context = structs.All - require.NoError(msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) - require.Equal(1, len(resp.Matches[structs.ScalingPolicies])) - require.Equal(policy.ID, resp.Matches[structs.ScalingPolicies][0]) - require.Equal(uint64(jobIndex), resp.Index) + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.PrefixSearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_ACL(t *testing.T) { + t.Parallel() + + s, root, cleanupS := TestACLServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.MinTermLength = 1 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + node := mock.Node() + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, node)) + + req := &structs.FuzzySearchRequest{ + Text: "set-this-in-test", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + // Try without a token and expect failure + { + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with an invalid token and expect failure + { + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) + req.AuthToken = invalidToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect failure due to Jobs being the context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect success due to All context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + req.Context = structs.All + req.AuthToken = validToken.SecretID + req.Text = "oo" // mock node ID is foobar + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + + // Jobs filtered out since token only has access to node:read + require.Len(t, resp.Matches[structs.Jobs], 0) + } + + // Try with a valid token for namespace:read-job + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityReadJob})) + req.AuthToken = validToken.SecretID + req.Text = "jo" // mock job Name is my-job + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: "my-job", + Scope: []string{"default", job.ID}, + }, resp.Matches[structs.Jobs][0]) + + // Index of job - not node - because node context is filtered out + require.Equal(t, uint64(1000), resp.Index) + + // Nodes filtered out since token only has access to namespace:read-job + require.Len(t, resp.Matches[structs.Nodes], 0) + } + + // Try with a management token + { + req.AuthToken = root.SecretID + var resp structs.FuzzySearchResponse + req.Text = "o" // matches Job:my-job and Node:foobar + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: job.Name, Scope: []string{"default", job.ID}, + }, resp.Matches[structs.Jobs][0]) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, structs.FuzzyMatch{ + ID: "foobar", + Scope: []string{node.ID}, + }, resp.Matches[structs.Nodes][0]) + } +} + +func TestSearch_FuzzySearch_NotEnabled(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.FuzzyEnabled = false + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "foo", // min set to 5 + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + var resp structs.FuzzySearchResponse + require.EqualError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp), + "fuzzy search is not enabled") +} + +func TestSearch_FuzzySearch_ShortText(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.MinTermLength = 5 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + registerJob(s, t, job) + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "foo", // min set to 5 + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: job.Namespace}, + } + + var resp structs.FuzzySearchResponse + require.EqualError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp), + "fuzzy search query must be at least 5 characters, got 3") +} + +func TestSearch_FuzzySearch_TruncateLimitQuery(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "job", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: "default"}, + } + + for i := 0; i < 25; i++ { + job := mock.Job() + job.Name = fmt.Sprintf("my-job-%d", i) + registerJob(s, t, job) + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Jobs], 20) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_TruncateLimitResults(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + c.SearchConfig.LimitQuery = 10000 + c.SearchConfig.LimitResults = 5 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, mock.Node())) + + req := &structs.FuzzySearchRequest{ + Text: "job", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{Region: "global", Namespace: "default"}, + } + + for i := 0; i < 25; i++ { + job := mock.Job() + job.Name = fmt.Sprintf("my-job-%d", i) + registerJob(s, t, job) + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Jobs], 5) + require.True(t, resp.Truncations[structs.Jobs]) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_Evals(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + eval1 := mock.Eval() + eval1.ID = "f7dee5a1-d2b0-2f6a-2e75-6c8e467a4b99" + require.NoError(t, s.fsm.State().UpsertEvals(structs.MsgTypeTestSetup, 2000, []*structs.Evaluation{eval1})) + + req := &structs.FuzzySearchRequest{ + Text: "f7dee", // evals are prefix searched + Context: structs.Evals, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: eval1.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Evals], 1) + require.Equal(t, eval1.ID, resp.Matches[structs.Evals][0].ID) + require.False(t, resp.Truncations[structs.Evals]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_Allocation(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + alloc := mockAlloc() + summary := mock.JobSummary(alloc.JobID) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertJobSummary(999, summary)) + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, 90, []*structs.Allocation{alloc})) + + req := &structs.FuzzySearchRequest{ + Text: "web", + Context: structs.Allocs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: alloc.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, alloc.Name, resp.Matches[structs.Allocs][0].ID) + require.False(t, resp.Truncations[structs.Allocs]) + require.Equal(t, uint64(90), resp.Index) +} + +func TestSearch_FuzzySearch_Node(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + fsmState := s.fsm.State() + node := mock.Node() + + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 100, node)) + + req := &structs.FuzzySearchRequest{ + Text: "oo", + Context: structs.Nodes, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: structs.DefaultNamespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, node.Name, resp.Matches[structs.Nodes][0].ID) + require.False(t, resp.Truncations[structs.Nodes]) + require.Equal(t, uint64(100), resp.Index) +} + +func TestSearch_FuzzySearch_Deployment(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + deployment := mock.Deployment() + require.NoError(t, s.fsm.State().UpsertDeployment(2000, deployment)) + + req := &structs.FuzzySearchRequest{ + Text: deployment.ID[0:3], // deployments are prefix searched + Context: structs.Deployments, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: deployment.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Deployments], 1) + require.Equal(t, deployment.ID, resp.Matches[structs.Deployments][0].ID) + require.False(t, resp.Truncations[structs.Deployments]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_CSIPlugin(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + state.CreateTestCSIPlugin(s.fsm.State(), "my-plugin") + + req := &structs.FuzzySearchRequest{ + Text: "lug", + Context: structs.Plugins, + QueryOptions: structs.QueryOptions{ + Region: "global", + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Plugins], 1) + require.Equal(t, "my-plugin", resp.Matches[structs.Plugins][0].ID) + require.False(t, resp.Truncations[structs.Plugins]) +} + +func TestSearch_FuzzySearch_CSIVolume(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + id := uuid.Generate() + err := s.fsm.State().CSIVolumeRegister(1000, []*structs.CSIVolume{{ + ID: id, + Namespace: structs.DefaultNamespace, + PluginID: "glade", + }}) + require.NoError(t, err) + + req := &structs.FuzzySearchRequest{ + Text: id[0:3], // volumes are prefix searched + Context: structs.Volumes, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: structs.DefaultNamespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Volumes], 1) + require.Equal(t, id, resp.Matches[structs.Volumes][0].ID) + require.False(t, resp.Truncations[structs.Volumes]) +} + +func TestSearch_FuzzySearch_Namespace(t *testing.T) { + t.Parallel() + + s, cleanup := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanup() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + ns := mock.Namespace() + require.NoError(t, s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) + + req := &structs.FuzzySearchRequest{ + Text: "am", // mock is team- + Context: structs.Namespaces, + QueryOptions: structs.QueryOptions{ + Region: "global", + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Namespaces], 1) + require.Equal(t, ns.Name, resp.Matches[structs.Namespaces][0].ID) + require.False(t, resp.Truncations[structs.Namespaces]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_Namespace_caseInsensitive(t *testing.T) { + t.Parallel() + + s, cleanup := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanup() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + ns := mock.Namespace() + ns.Name = "TheFooNamespace" + require.NoError(t, s.fsm.State().UpsertNamespaces(2000, []*structs.Namespace{ns})) + + req := &structs.FuzzySearchRequest{ + Text: "foon", + Context: structs.Namespaces, + QueryOptions: structs.QueryOptions{ + Region: "global", + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Namespaces], 1) + require.Equal(t, ns.Name, resp.Matches[structs.Namespaces][0].ID) + require.False(t, resp.Truncations[structs.Namespaces]) + require.Equal(t, uint64(2000), resp.Index) +} + +func TestSearch_FuzzySearch_ScalingPolicy(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + + job, policy := mock.JobWithScalingPolicy() + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, jobIndex, job)) + + req := &structs.FuzzySearchRequest{ + Text: policy.ID[0:3], // scaling policies are prefix searched + Context: structs.ScalingPolicies, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job.Namespace, + }, + } + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0].ID) + require.Equal(t, uint64(jobIndex), resp.Index) + + req.Context = structs.All + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.ScalingPolicies], 1) + require.Equal(t, policy.ID, resp.Matches[structs.ScalingPolicies][0].ID) + require.Equal(t, uint64(jobIndex), resp.Index) +} + +func TestSearch_FuzzySearch_Namespace_ACL(t *testing.T) { + t.Parallel() + + s, root, cleanup := TestACLServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanup() + + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + ns := mock.Namespace() + ns.Name = "team-job-app" + require.NoError(t, fsmState.UpsertNamespaces(500, []*structs.Namespace{ns})) + + job1 := mock.Job() + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 502, job1)) + + job2 := mock.Job() + job2.Namespace = ns.Name + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, 504, job2)) + + node := mock.Node() + node.Name = "run-jobs" + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1001, node)) + + req := &structs.FuzzySearchRequest{ + Text: "set-text-in-test", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job1.Namespace, + }, + } + + // Try without a token and expect failure + { + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with an invalid token and expect failure + { + invalidToken := mock.CreatePolicyAndToken(t, fsmState, 1003, "test-invalid", + mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs})) + req.AuthToken = invalidToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect failure due to Namespaces being the context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1005, "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + req.Context = structs.Namespaces + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + } + + // Try with a node:read token and expect success due to All context + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1007, "test-valid", mock.NodePolicy(acl.PolicyRead)) + req.Text = "job" + req.Context = structs.All + req.AuthToken = validToken.SecretID + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Nodes], 1) + + // Jobs filtered out since token only has access to node:read + require.Len(t, resp.Matches[structs.Jobs], 0) + } + + // Try with a valid token for non-default namespace:read-job + { + validToken := mock.CreatePolicyAndToken(t, fsmState, 1009, "test-valid2", + mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob})) + req.Text = "job" + req.Context = structs.All + req.AuthToken = validToken.SecretID + req.Namespace = job2.Namespace + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job2.Name, resp.Matches[structs.Jobs][0].ID) + + // Index of job - not node - because node context is filtered out + require.Equal(t, uint64(504), resp.Index) + + // Nodes filtered out since token only has access to namespace:read-job + require.Len(t, resp.Matches[structs.Nodes], 0) + } + + // Try with a management token + { + req.Text = "job" + req.Context = structs.All + req.AuthToken = root.SecretID + req.Namespace = job1.Namespace + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Equal(t, uint64(1001), resp.Index) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job1.Name, resp.Matches[structs.Jobs][0].ID) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Len(t, resp.Matches[structs.Namespaces], 1) // matches "team-job-app" + } +} + +func TestSearch_FuzzySearch_MultiNamespace_ACL(t *testing.T) { + t.Parallel() + + s, root, cleanupS := TestACLServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + require.NoError(t, fsmState.UpsertNamespaces(500, []*structs.Namespace{{ + Name: "teamA", + Description: "first namespace", + CreateIndex: 100, + ModifyIndex: 200, + }, { + Name: "teamB", + Description: "second namespace", + CreateIndex: 101, + ModifyIndex: 201, + }, { + Name: "teamC", + Description: "third namespace", + CreateIndex: 102, + ModifyIndex: 202, + }})) + + // Closure to simplify fsm indexing + index := uint64(1000) + inc := func() uint64 { + index++ + return index + } + + // Upsert 3 jobs each in separate namespace + job1 := mock.Job() + job1.Name = "teamA-job1" + job1.ID = "job1" + job1.Namespace = "teamA" + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, inc(), job1)) + + job2 := mock.Job() + job2.Name = "teamB-job2" + job2.ID = "job2" + job2.Namespace = "teamB" + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, inc(), job2)) + + job3 := mock.Job() + job3.Name = "teamC-job3" + job3.ID = "job3" + job3.Namespace = "teamC" + require.NoError(t, fsmState.UpsertJob(structs.MsgTypeTestSetup, inc(), job3)) + + // Upsert a node + node := mock.Node() + node.Name = "node-for-teams" + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, inc(), node)) + + // Upsert a node that will not be matched + node2 := mock.Node() + node2.Name = "node-for-ops" + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, inc(), node2)) + + // Create parameterized requests + request := func(text, namespace, token string, context structs.Context) *structs.FuzzySearchRequest { + return &structs.FuzzySearchRequest{ + Text: text, + Context: context, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: namespace, + AuthToken: token, + }, + } + } + + t.Run("without a token expect failure", func(t *testing.T) { + var resp structs.FuzzySearchResponse + req := request("anything", job1.Namespace, "", structs.Jobs) + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + }) + + t.Run("with an invalid token expect failure", func(t *testing.T) { + policy := mock.NamespacePolicy(structs.DefaultNamespace, "", []string{acl.NamespaceCapabilityListJobs}) + invalidToken := mock.CreatePolicyAndToken(t, fsmState, inc(), "test-invalid", policy) + req := request("anything", job1.Namespace, invalidToken.SecretID, structs.Jobs) + + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + }) + + t.Run("with node:read token search namespaces expect failure", func(t *testing.T) { + validToken := mock.CreatePolicyAndToken(t, fsmState, inc(), "test-invalid2", mock.NodePolicy(acl.PolicyRead)) + req := request("team", job1.Namespace, validToken.SecretID, structs.Namespaces) + + var resp structs.FuzzySearchResponse + err := msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp) + require.EqualError(t, err, structs.ErrPermissionDenied.Error()) + }) + + t.Run("with node:read token search all expect success", func(t *testing.T) { + validToken := mock.CreatePolicyAndToken(t, fsmState, inc(), "test-valid", mock.NodePolicy(acl.PolicyRead)) + req := request("team", job1.Namespace, validToken.SecretID, structs.All) + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + // One matching node + require.Len(t, resp.Matches[structs.Nodes], 1) + + // Jobs filtered out since token only has access to node:read + require.Len(t, resp.Matches[structs.Jobs], 0) + }) + + t.Run("with a teamB/job:read token search all expect 1 job", func(t *testing.T) { + policy := mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob}) + token := mock.CreatePolicyAndToken(t, fsmState, inc(), "test-valid2", policy) + req := request("team", job2.Namespace, token.SecretID, structs.All) + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 1) + require.Equal(t, job2.Name, resp.Matches[structs.Jobs][0].ID) + + // Nodes filtered out since token only has access to namespace:read-job + require.Len(t, resp.Matches[structs.Nodes], 0) + }) + + // Using a token that can read jobs in 2 namespaces, we should get job results from + // both those namespaces (using wildcard namespace in the query) but not the + // third (and from no other contexts). + t.Run("with a multi-ns job:read token search all expect 2 jobs", func(t *testing.T) { + policyB := mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob}) + mock.CreatePolicy(t, fsmState, inc(), "policyB", policyB) + + policyC := mock.NamespacePolicy(job3.Namespace, "", []string{acl.NamespaceCapabilityReadJob}) + mock.CreatePolicy(t, fsmState, inc(), "policyC", policyC) + + token := mock.CreateToken(t, fsmState, inc(), []string{"policyB", "policyC"}) + req := request("team", structs.AllNamespacesSentinel, token.SecretID, structs.Jobs) + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 2) + require.Equal(t, job2.Name, resp.Matches[structs.Jobs][0].ID) + require.Equal(t, job3.Name, resp.Matches[structs.Jobs][1].ID) + }) + + // Using a management token, we should get job results from all three namespaces + // (using wildcard namespace in the query). + t.Run("with a management token search all expect 3 jobs", func(t *testing.T) { + req := request("team", structs.AllNamespacesSentinel, root.SecretID, structs.Jobs) + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Jobs], 3) + require.Equal(t, job1.Name, resp.Matches[structs.Jobs][0].ID) + require.Equal(t, job2.Name, resp.Matches[structs.Jobs][1].ID) + require.Equal(t, job3.Name, resp.Matches[structs.Jobs][2].ID) + }) + + // Using a token that can read nodes, we should get our 1 matching node when + // searching the nodes context. + t.Run("with node:read token read nodes", func(t *testing.T) { + policy := mock.NodePolicy("read") + mock.CreatePolicy(t, fsmState, inc(), "node-read-policy", policy) + + token := mock.CreateToken(t, fsmState, inc(), []string{"node-read-policy"}) + req := request("team", structs.AllNamespacesSentinel, token.SecretID, structs.Nodes) + + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Nodes], 1) + require.Equal(t, "node-for-teams", resp.Matches[structs.Nodes][0].ID) + }) + + // Using a token that cannot read nodes, we should get no matching nodes when + // searching the nodes context. + t.Run("with a job:read token read nodes", func(t *testing.T) { + policy := mock.AgentPolicy("read") + mock.CreatePolicy(t, fsmState, inc(), "agent-read-policy", policy) + + token := mock.CreateToken(t, fsmState, inc(), []string{"agent-read-policy"}) + req := request("team", structs.AllNamespacesSentinel, token.SecretID, structs.Nodes) + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Empty(t, resp.Matches[structs.Nodes]) + }) + + // Using a token that can read only job:read one namespace, but with wildcard + // namespace should return only the one alloc the token can access. + t.Run("with job:read token read allocs", func(t *testing.T) { + policyD := mock.NamespacePolicy(job2.Namespace, "", []string{acl.NamespaceCapabilityReadJob}) + mock.CreatePolicy(t, fsmState, inc(), "policyD", policyD) + + // Create an alloc for each of the 3 jobs + alloc1 := mockAlloc() + alloc1.JobID = job1.ID + alloc1.Name = job1.Name + ".task[0]" + alloc1.Namespace = job1.Namespace + summary1 := mock.JobSummary(alloc1.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary1)) + + alloc2 := mockAlloc() + alloc2.JobID = job2.ID + alloc2.Name = job2.Name + ".task[0]" + alloc2.Namespace = job2.Namespace + summary2 := mock.JobSummary(alloc2.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary2)) + + alloc3 := mockAlloc() + alloc3.JobID = job3.ID + alloc3.Name = job3.Name + ".task[0]" + alloc3.Namespace = job3.Namespace + summary3 := mock.JobSummary(alloc3.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary3)) + + // Upsert the allocs + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, inc(), []*structs.Allocation{alloc1, alloc2, alloc3})) + + token := mock.CreateToken(t, fsmState, inc(), []string{"policyD"}) + req := request("team", structs.AllNamespacesSentinel, token.SecretID, structs.Allocs) + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Allocs], 1) + require.Equal(t, "teamB-job2.task[0]", resp.Matches[structs.Allocs][0].ID) + }) + + // Using a management token should return allocs from all the jobs. + t.Run("with job:read token read allocs", func(t *testing.T) { + // Create an alloc for each of the 3 jobs + alloc1 := mockAlloc() + alloc1.ID = uuid.Generate() + alloc1.JobID = job1.ID + alloc1.Name = "test-alloc.one[0]" + alloc1.Namespace = job1.Namespace + summary1 := mock.JobSummary(alloc1.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary1)) + + alloc2 := mockAlloc() + alloc2.ID = uuid.Generate() + alloc2.JobID = job2.ID + alloc2.Name = "test-alloc.two[0]" + alloc2.Namespace = job2.Namespace + summary2 := mock.JobSummary(alloc2.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary2)) + + alloc3 := mockAlloc() + alloc3.ID = uuid.Generate() + alloc3.JobID = job3.ID + alloc3.Name = "test-alloc.three[0]" + alloc3.Namespace = job3.Namespace + summary3 := mock.JobSummary(alloc3.JobID) + require.NoError(t, fsmState.UpsertJobSummary(inc(), summary3)) + + // Upsert the allocs + require.NoError(t, fsmState.UpsertAllocs(structs.MsgTypeTestSetup, inc(), []*structs.Allocation{alloc1, alloc2, alloc3})) + + req := request("alloc", structs.AllNamespacesSentinel, root.SecretID, structs.Allocs) + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + require.Len(t, resp.Matches[structs.Allocs], 3) + require.Equal(t, alloc1.Name, resp.Matches[structs.Allocs][0].ID) + require.Equal(t, []string{"teamA", alloc1.ID}, resp.Matches[structs.Allocs][0].Scope) + require.Equal(t, alloc2.Name, resp.Matches[structs.Allocs][1].ID) + require.Equal(t, []string{"teamB", alloc2.ID}, resp.Matches[structs.Allocs][1].Scope) + require.Equal(t, alloc3.Name, resp.Matches[structs.Allocs][2].ID) + require.Equal(t, []string{"teamC", alloc3.ID}, resp.Matches[structs.Allocs][2].Scope) + }) + + // Allow plugin read and wildcard namespace + t.Run("with plugin:read", func(t *testing.T) { + policy := mock.PluginPolicy("read") + mock.CreatePolicy(t, fsmState, inc(), "plugin-read-policy", policy) + + token := mock.CreateToken(t, fsmState, inc(), []string{"plugin-read-policy"}) + + // Create CSI plugins + state.CreateTestCSIPlugin(s.fsm.State(), "plugin-for-teams") + state.CreateTestCSIPlugin(s.fsm.State(), "plugin-for-ops") + + req := request("teams", structs.AllNamespacesSentinel, token.SecretID, structs.Plugins) + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + + require.Len(t, resp.Matches[structs.Plugins], 1) + require.Empty(t, resp.Matches[structs.Plugins][0].Scope) // no scope + }) +} + +func TestSearch_FuzzySearch_Job(t *testing.T) { + t.Parallel() + + s, cleanupS := TestServer(t, func(c *Config) { + c.NumSchedulers = 0 + }) + defer cleanupS() + codec := rpcClient(t, s) + testutil.WaitForLeader(t, s.RPC) + fsmState := s.fsm.State() + + job := mock.Job() + job.Name = "demo-sleep" + job.Namespace = "team-sleepy" + job.TaskGroups = []*structs.TaskGroup{{ + Name: "qa-sleeper-group-one", + Services: []*structs.Service{{ + Name: "qa-group-sleep-svc-one", + }}, + Tasks: []*structs.Task{{ + Name: "qa-sleep-task-one", + Services: []*structs.Service{{ + Name: "some-sleepy-task-svc-one", + }}, + Driver: "docker", + Config: map[string]interface{}{ + "image": "sleeper:latest", + }, + }}, + }, { + Name: "prod-sleeper-group-one", + Tasks: []*structs.Task{{ + Name: "prod-sleep-task-one", + Driver: "exec", + Config: map[string]interface{}{ + "command": "/bin/sleep", + }, + }, { + Name: "prod-task-two", + Driver: "raw_exec", + Config: map[string]interface{}{ + "command": "/usr/sbin/sleep", + }, + Services: []*structs.Service{{ + Name: "some-sleepy-task-svc-two", + }}, + }}, + }, { + Name: "sleep-in-java", + Tasks: []*structs.Task{{ + Name: "prod-java-sleep", + Driver: "java", + Config: map[string]interface{}{ + "class": "sleep.class", + }, + }}, + }} + + ns := mock.Namespace() + ns.Name = job.Namespace + require.NoError(t, fsmState.UpsertNamespaces(2000, []*structs.Namespace{ns})) + registerJob(s, t, job) + require.NoError(t, fsmState.UpsertNode(structs.MsgTypeTestSetup, 1003, mock.Node())) + + t.Run("sleep", func(t *testing.T) { + req := &structs.FuzzySearchRequest{ + Text: "sleep", + Context: structs.Jobs, + QueryOptions: structs.QueryOptions{ + Region: "global", + Namespace: job.Namespace, + }, + } + var resp structs.FuzzySearchResponse + require.NoError(t, msgpackrpc.CallWithCodec(codec, "Search.FuzzySearch", req, &resp)) + m := resp.Matches + require.Equal(t, uint64(1000), resp.Index) // job is explicit search context, has id=1000 + + // just the one job + require.Len(t, m[structs.Jobs], 1) + + // 3 services (1 group, 2 task) + require.Len(t, m[structs.Services], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "some-sleepy-task-svc-one", + Scope: []string{"team-sleepy", job.ID, "qa-sleeper-group-one", "qa-sleep-task-one"}, + }, { + ID: "some-sleepy-task-svc-two", + Scope: []string{"team-sleepy", job.ID, "prod-sleeper-group-one", "prod-task-two"}, + }, { + ID: "qa-group-sleep-svc-one", + Scope: []string{"team-sleepy", job.ID, "qa-sleeper-group-one"}, + }}, m[structs.Services]) + + // 3 groups + require.Len(t, m[structs.Groups], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleep-in-java", + Scope: []string{"team-sleepy", job.ID}, + }, { + ID: "qa-sleeper-group-one", + Scope: []string{"team-sleepy", job.ID}, + }, { + ID: "prod-sleeper-group-one", + Scope: []string{"team-sleepy", job.ID}, + }}, m[structs.Groups]) + + // 3 tasks (1 does not match) + require.Len(t, m[structs.Tasks], 3) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "qa-sleep-task-one", + Scope: []string{"team-sleepy", job.ID, "qa-sleeper-group-one"}, + }, { + ID: "prod-sleep-task-one", + Scope: []string{"team-sleepy", job.ID, "prod-sleeper-group-one"}, + }, { + ID: "prod-java-sleep", + Scope: []string{"team-sleepy", job.ID, "sleep-in-java"}, + }}, m[structs.Tasks]) + + // 2 tasks with command + require.Len(t, m[structs.Commands], 2) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "/bin/sleep", + Scope: []string{"team-sleepy", job.ID, "prod-sleeper-group-one", "prod-sleep-task-one"}, + }, { + ID: "/usr/sbin/sleep", + Scope: []string{"team-sleepy", job.ID, "prod-sleeper-group-one", "prod-task-two"}, + }}, m[structs.Commands]) + + // 1 task with image + require.Len(t, m[structs.Images], 1) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleeper:latest", + Scope: []string{"team-sleepy", job.ID, "qa-sleeper-group-one", "qa-sleep-task-one"}, + }}, m[structs.Images]) + + // 1 task with class + require.Len(t, m[structs.Classes], 1) + require.Equal(t, []structs.FuzzyMatch{{ + ID: "sleep.class", + Scope: []string{"team-sleepy", job.ID, "sleep-in-java", "prod-java-sleep"}, + }}, m[structs.Classes]) + }) +} + +func TestSearch_FuzzySearch_fuzzyIndex(t *testing.T) { + for _, tc := range []struct { + name, text string + exp int + }{ + {name: "foo-bar-baz", text: "bar", exp: 4}, + {name: "Foo-Bar-Baz", text: "bar", exp: 4}, + {name: "foo-bar-baz", text: "zap", exp: -1}, + } { + result := fuzzyIndex(tc.name, tc.text) + require.Equal(t, tc.exp, result, "name: %s, text: %s, exp: %d, got: %d", tc.name, tc.text, tc.exp, result) + } } diff --git a/nomad/structs/search.go b/nomad/structs/search.go new file mode 100644 index 000000000000..3003f4dce00a --- /dev/null +++ b/nomad/structs/search.go @@ -0,0 +1,129 @@ +package structs + +// Context defines the scope in which a search for Nomad object operates, and +// is also used to query the matching index value for this context. +type Context string + +const ( + // Individual context types. + Allocs Context = "allocs" + Deployments Context = "deployment" + Evals Context = "evals" + Jobs Context = "jobs" + Nodes Context = "nodes" + Namespaces Context = "namespaces" + Quotas Context = "quotas" + Recommendations Context = "recommendations" + ScalingPolicies Context = "scaling_policy" + Plugins Context = "plugins" + Volumes Context = "volumes" + + // Subtypes used in fuzzy matching. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Union context types. + All Context = "all" +) + +// SearchConfig is used in servers to configure search API options. +type SearchConfig struct { + // FuzzyEnabled toggles whether the FuzzySearch API is enabled. If not + // enabled, requests to /v1/search/fuzzy will reply with a 404 response code. + FuzzyEnabled bool `hcl:"fuzzy_enabled"` + + // LimitQuery limits the number of objects searched in the FuzzySearch API. + // The results are indicated as truncated if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server when + // the FuzzySearch API is enabled. + LimitQuery int `hcl:"limit_query"` + + // LimitResults limits the number of results provided by the FuzzySearch API. + // The results are indicated as truncate if the limit is reached. + // + // Lowering this value can reduce resource consumption of Nomad server per + // fuzzy search request when the FuzzySearch API is enabled. + LimitResults int `hcl:"limit_results"` + + // MinTermLength is the minimum length of Text required before the FuzzySearch + // API will return results. + // + // Increasing this value can avoid resource consumption on Nomad server by + // reducing searches with less meaningful results. + MinTermLength int `hcl:"min_term_length"` +} + +// SearchResponse is used to return matches and information about whether +// the match list is truncated specific to each type of Context. +type SearchResponse struct { + // Map of Context types to ids which match a specified prefix + Matches map[Context][]string + + // Truncations indicates whether the matches for a particular Context have + // been truncated + Truncations map[Context]bool + + QueryMeta +} + +// SearchRequest is used to parameterize a request, and returns a +// list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type SearchRequest struct { + // Prefix is what ids are matched to. I.e, if the given prefix were + // "a", potential matches might be "abcd" or "aabb" + Prefix string + + // Context is the type that can be matched against. A context can be a job, + // node, evaluation, allocation, or empty (indicated every context should be + // matched) + Context Context + + QueryOptions +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. + Truncations map[Context]bool + + QueryMeta +} + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context Context + + QueryOptions +} diff --git a/nomad/structs/structs.go b/nomad/structs/structs.go index 6f1b110401a6..65b874ad6d42 100644 --- a/nomad/structs/structs.go +++ b/nomad/structs/structs.go @@ -193,25 +193,6 @@ var ( validNamespaceName = regexp.MustCompile("^[a-zA-Z0-9-]{1,128}$") ) -// Context defines the scope in which a search for Nomad object operates, and -// is also used to query the matching index value for this context -type Context string - -const ( - Allocs Context = "allocs" - Deployments Context = "deployment" - Evals Context = "evals" - Jobs Context = "jobs" - Nodes Context = "nodes" - Namespaces Context = "namespaces" - Quotas Context = "quotas" - Recommendations Context = "recommendations" - ScalingPolicies Context = "scaling_policy" - All Context = "all" - Plugins Context = "plugins" - Volumes Context = "volumes" -) - // NamespacedID is a tuple of an ID and a namespace type NamespacedID struct { ID string @@ -581,35 +562,6 @@ type NodeSpecificRequest struct { QueryOptions } -// SearchResponse is used to return matches and information about whether -// the match list is truncated specific to each type of context. -type SearchResponse struct { - // Map of context types to ids which match a specified prefix - Matches map[Context][]string - - // Truncations indicates whether the matches for a particular context have - // been truncated - Truncations map[Context]bool - - QueryMeta -} - -// SearchRequest is used to parameterize a request, and returns a -// list of matches made up of jobs, allocations, evaluations, and/or nodes, -// along with whether or not the information returned is truncated. -type SearchRequest struct { - // Prefix is what ids are matched to. I.e, if the given prefix were - // "a", potential matches might be "abcd" or "aabb" - Prefix string - - // Context is the type that can be matched against. A context can be a job, - // node, evaluation, allocation, or empty (indicated every context should be - // matched) - Context Context - - QueryOptions -} - // JobRegisterRequest is used for Job.Register endpoint // to register a job as being a schedulable entity. type JobRegisterRequest struct { diff --git a/nomad/testing.go b/nomad/testing.go index a70f43fd02bb..a3bc9b2d1a39 100644 --- a/nomad/testing.go +++ b/nomad/testing.go @@ -95,6 +95,14 @@ func TestServer(t testing.T, cb func(*Config)) (*Server, func()) { // Disable consul autojoining: tests typically join servers directly config.ConsulConfig.ServerAutoJoin = &f + // Enable fuzzy search API + config.SearchConfig = &structs.SearchConfig{ + FuzzyEnabled: true, + LimitQuery: 20, + LimitResults: 100, + MinTermLength: 2, + } + // Invoke the callback if any if cb != nil { cb(config) diff --git a/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go b/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go index 399424df5bba..b973f733bd02 100644 --- a/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go +++ b/vendor/github.com/hashicorp/nomad/api/contexts/contexts.go @@ -1,9 +1,12 @@ +// Package contexts provides constants used with the Nomad Search API. package contexts -// Context defines the scope in which a search for Nomad object operates +// Context defines the scope in which a search for Nomad object operates. type Context string const ( + // These Context types are used to reference the high level Nomad object + // types than can be searched. Allocs Context = "allocs" Deployments Context = "deployment" Evals Context = "evals" @@ -15,5 +18,16 @@ const ( ScalingPolicies Context = "scaling_policy" Plugins Context = "plugins" Volumes Context = "volumes" - All Context = "all" + + // These Context types are used to associate a search result from a lower + // level Nomad object with one of the higher level Context types above. + Groups Context = "groups" + Services Context = "services" + Tasks Context = "tasks" + Images Context = "images" + Commands Context = "commands" + Classes Context = "classes" + + // Context used to represent the set of all the higher level Context types. + All Context = "all" ) diff --git a/vendor/github.com/hashicorp/nomad/api/search.go b/vendor/github.com/hashicorp/nomad/api/search.go index 6a6cb9b59e17..3b020827a495 100644 --- a/vendor/github.com/hashicorp/nomad/api/search.go +++ b/vendor/github.com/hashicorp/nomad/api/search.go @@ -13,7 +13,7 @@ func (c *Client) Search() *Search { return &Search{client: c} } -// PrefixSearch returns a list of matches for a particular context and prefix. +// PrefixSearch returns a set of matches for a particular context and prefix. func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryOptions) (*SearchResponse, *QueryMeta, error) { var resp SearchResponse req := &SearchRequest{Prefix: prefix, Context: context} @@ -26,14 +26,72 @@ func (s *Search) PrefixSearch(prefix string, context contexts.Context, q *QueryO return &resp, qm, nil } +type SearchResponse struct { + Matches map[contexts.Context][]string + Truncations map[contexts.Context]bool + QueryMeta +} + type SearchRequest struct { Prefix string Context contexts.Context QueryOptions } -type SearchResponse struct { - Matches map[contexts.Context][]string +// FuzzySearch returns a set of matches for a given context and string. +func (s *Search) FuzzySearch(text string, context contexts.Context, q *QueryOptions) (*FuzzySearchResponse, *QueryMeta, error) { + var resp FuzzySearchResponse + + req := &FuzzySearchRequest{ + Context: context, + Text: text, + } + + qm, err := s.client.putQuery("/v1/search/fuzzy", req, &resp, q) + if err != nil { + return nil, nil, err + } + + return &resp, qm, nil +} + +// FuzzyMatch is used to describe the ID of an object which may be a machine +// readable UUID or a human readable Name. If the object is a component of a Job, +// the Scope is a list of IDs starting from Namespace down to the parent object of +// ID. +// +// e.g. A Task-level service would have scope like, +// ["", "", "", ""] +type FuzzyMatch struct { + ID string // ID is UUID or Name of object + Scope []string `json:",omitempty"` // IDs of parent objects +} + +// FuzzySearchResponse is used to return fuzzy matches and information about +// whether the match list is truncated specific to each type of searchable Context. +type FuzzySearchResponse struct { + // Matches is a map of Context types to IDs which fuzzy match a specified query. + Matches map[contexts.Context][]FuzzyMatch + + // Truncations indicates whether the matches for a particular Context have + // been truncated. Truncations map[contexts.Context]bool + QueryMeta } + +// FuzzySearchRequest is used to parameterize a fuzzy search request, and returns +// a list of matches made up of jobs, allocations, evaluations, and/or nodes, +// along with whether or not the information returned is truncated. +type FuzzySearchRequest struct { + // Text is what names are fuzzy-matched to. E.g. if the given text were + // "py", potential matches might be "python", "mypy", etc. of jobs, nodes, + // allocs, groups, services, commands, images, classes. + Text string + + // Context is the type that can be matched against. A Context of "all" indicates + // all Contexts types are queried for matching. + Context contexts.Context + + QueryOptions +} diff --git a/website/content/api-docs/search.mdx b/website/content/api-docs/search.mdx index f2d67ac6de94..fa3b95d35132 100644 --- a/website/content/api-docs/search.mdx +++ b/website/content/api-docs/search.mdx @@ -6,6 +6,8 @@ description: The /search endpoint is used to search for Nomad objects # Search HTTP API +## Prefix Searching + The `/search` endpoint returns matches for a given prefix and context, where a context can be jobs, allocations, evaluations, nodes, deployments, plugins, namespaces, or volumes. When using Nomad Enterprise, the allowed contexts @@ -41,7 +43,7 @@ job related results will not be returned. If the token is only valid for ### Sample Payload (for all contexts) -```javascript +```json { "Prefix": "abc", "Context": "all" @@ -90,7 +92,7 @@ $ curl \ ### Sample Payload (for a specific context) -```javascript +```json { "Prefix": "abc", "Context": "evals" @@ -118,3 +120,470 @@ $ curl \ } } ``` + +## Fuzzy Searching + +The `/search/fuzzy` endpoint returns partial substring matches for a given search +term and context, where a context can be jobs, allocations, nodes, plugins, or namespaces. +Additionally, fuzzy searching can be done across all contexts. For better control +over the performance implications of fuzzy searching on Nomad servers, aspects of +fuzzy searching can be tuned through the [search] stanza in Nomad agent config. + +Fuzzy search results are ordered starting with closest matching terms. Items of +a name that exactly matches the search term are listed first. + +| Method | Path | Produces | +| ------ | ------------------ | ------------------ | +| `POST` | `/v1/search/fuzzy` | `application/json` | + +The table below shows this endpoint's support for +[blocking queries](/api-docs#blocking-queries) and +[required ACLs](/api-docs#acls). + +| Blocking Queries | ACL Required | +| ---------------- | ----------------------------------------------------------- | +| `NO` | `node:read, namespace:read-jobs, namespace:csi-list-plugin` | + +When ACLs are enabled, requests must have a token valid for `node:read`, `plugin:read` or +`namespace:read-jobs` roles. If the token is only valid for a portion of these +capabilities, then results will include results including only data readable with +the given token. + +### Parameters + +- `Text` `(string: )` - Specifies the identifier against which + matches will be found. For example, if the given text were "py", potential + fuzzy matches might be "python", "spying", or "happy". +- `Context` `(string: )` - Defines the scope in which a search for a + prefix operates. Contexts can be: "jobs", "allocs", "nodes", "plugins", or + "all", where "all" means every context will be searched. When "all" is selected, + additional prefix matches will be included for the "deployments", "evals", and + "volumes" types. When searching in the "jobs" context, results that fuzzy match + "groups", "services", "tasks", "images", "commands", and "classes" are also + included in the results. + +### Scope + +Fuzzy match results are accompanied with a `Scope` field which is used to uniquely +identify the matched object, in a way that the Nomad API can be queried again for +additional information. The data provided by scope varies depending on the type +of matched object, described below. + +### Sample Payload (for jobs) + +```json +{ + "Text": "py", + "Context": "jobs" +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Sample Response + +```json +{ + "Index": 90, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "services": [ + { + "ID": "python-logger", + "Scope": [ + "default", + "example-python", + "my-spy-app", + "my-python-task" + ] + }, + { + "ID": "super-spy-service", + "Scope": [ + "default", + "example-python", + "my-spy-app" + ] + } + ], + "tasks": [ + { + "ID": "my-python-task", + "Scope": [ + "default", + "example-python", + "my-spy-app" + ] + } + ], + "images": [ + { + "ID": "python:3", + "Scope": [ + "default", + "example-python", + "my-spy-app", + "my-python-task" + ] + } + ], + "jobs": [ + { + "ID": "example-python", + "Scope": [ + "default" + ] + } + ], + "groups": [ + { + "ID": "my-spy-app", + "Scope": [ + "default", + "example-python" + ] + } + ] + }, + "Truncations": { + "jobs": false + } +} +``` + +##### Scope (jobs) + +- `Scope[0]` : Namespace +- `Scope[1]` : Job ID + +##### Scope (groups) + +- `Scope[0]` : Namespace +- `Scope[1]` : Job ID + +##### Scope (tasks) + +- `Scope[0]` : Namespace +- `Scope[1]` : Job ID +- `Scope[2]` : Group + +##### Scope (group services) + +- `Scope[0]` : Namespace +- `Scope[1]` : Group + +#### Scope (task services) + +- `Scope[0]` : Namespace +- `Scope[1]` : Job ID +- `Scope[2]` : Group +- `Scope[3]` : Task + +#### Scope (commands/images/classes) + +- `Scope[0]` : Namespace +- `Scope[1]` : Job ID +- `Scope[2]` : Group +- `Scope[3]` : Task + +### Sample Payload (for nodes) + +```json +{ + "Text": "lab", + "Context": "nodes" +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Sample Response + +```json +{ + "Index": 9, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "nodes": [ + { + "ID": "nomad-lab1", + "Scope": [ + "c48cd39f-dfe1-9cc0-9c62-617d199854be" + ] + } + ] + }, + "Truncations": { + "nodes": false + } +} +``` + +##### Scope (nodes) + +- `Scope[0]` : Node ID + +### Sample Payload (for allocs) + +```json +{ + "Text":"py", + "Context":"allocs" +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Sample Response + +```json +{ + "Index": 136, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "allocs": [ + { + "ID": "example-python.my-spy-app[0]", + "Scope": [ + "default", + "0fb703d1-ba4d-116f-13aa-27f31f046858" + ] + } + ] + }, + "Truncations": { + "allocs": false + } +} +``` + +#### Scope (allocs) + +- `Scope[0]` : Namespace +- `Scope[1]` : Alloc ID + + +### Sample Payload (for plugins) + +```json +{ + "Text": "aws", + "Context": "plugins" +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Sample Response + +```json +{ + "Index": 0, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "plugins": [ + { + "ID": "aws-efs0" + } + ] + }, + "Truncations": { + "plugins": false + } +} +``` + +### Sample Payload (for all) + +```json +{ + "Index": 260, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "services": [ + { + "ID": "python-logger", + "Scope": [ + "default", + "example-python", + "my-spy-app", + "my-python-task" + ] + }, + { + "ID": "super-spy-service", + "Scope": [ + "default", + "example-python", + "my-spy-app" + ] + } + ], + "tasks": [ + { + "ID": "my-python-task", + "Scope": [ + "default", + "example-python", + "my-spy-app" + ] + } + ], + "jobs": [ + { + "ID": "example-python", + "Scope": [ + "default" + ] + } + ], + "evals": [], + "scaling_policy": [], + "groups": [ + { + "ID": "my-spy-app", + "Scope": [ + "default", + "example-python" + ] + } + ], + "images": [ + { + "ID": "python:3", + "Scope": [ + "default", + "example-python", + "my-spy-app", + "my-python-task" + ] + } + ], + "plugins": [ + { + "ID": "aws-spy-plugin" + } + ], + "deployment": [], + "volumes": [], + "allocs": [ + { + "ID": "example-python.my-spy-app[0]", + "Scope": [ + "default", + "48608246-4c28-0446-f3d1-c67e3bc650ad" + ] + } + ] + }, + "Truncations": { + "deployment": false, + "volumes": false, + "plugins": false, + "namespaces": false, + "scaling_policy": false, + "evals": false, + "allocs": false, + "jobs": false, + "nodes": false + } +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Prefix matching when fuzzy searching + +If the search Context is `all` when fuzzy searching, the object types that are +identified only with UUIDs are also concurrently prefix-searched. Those types include +deployments, evals, volumes, and quotas (Enterprise). + +### Sample Payload (prefix match) + +```json +{ + "Text":"cc", + "Context":"all" +} +``` + +### Sample Request + +```shell-session +$ curl \ + --request POST \ + --data @payload.json \ + https://localhost:4646/v1/search/fuzzy +``` + +### Sample Result +```json +{ + "Index": 267, + "KnownLeader": true, + "LastContact": 0, + "Matches": { + "scaling_policy": [], + "evals": [], + "deployment": [ + { + "ID": "cc786388-e071-31ec-5821-b829839f9681" + } + ], + "volumes": [] + }, + "Truncations": { + "deployment": false, + "volumes": false, + "plugins": false, + "namespaces": false, + "scaling_policy": false, + "evals": false, + "allocs": false, + "jobs": false, + "nodes": false + } +} +``` + +[search]: /docs/configuration/search diff --git a/website/content/docs/configuration/search.mdx b/website/content/docs/configuration/search.mdx new file mode 100644 index 000000000000..edeea5130b40 --- /dev/null +++ b/website/content/docs/configuration/search.mdx @@ -0,0 +1,50 @@ +--- +layout: docs +page_title: search Stanza - Agent Configuration +sidebar_title: search +description: >- + The "search" stanza specifies configuration for the search API provided + by the Nomad servers. +--- + +# `search` Stanza + + + +The `search` stanza specifies configuration for the search API provided by the +Nomad servers. + +```hcl +server { + search { + fuzzy_enabled = true + limit_query = 200 + limit_results = 1000 + min_term_length = 5 + } +} +``` + +## `search` Parameters + +- `fuzzy_enabled` `(bool: true)` - Specifies whether the [fuzzy search API][fuzzy] + is enabled. If not enabled, requests to the fuzzy search API endpoint will return + an error response. + +- `limit_query` `(int: 20)` - Specifies the maximum number of Nomad objects to + search through per context type in the Nomad server before truncating results. + Setting this parameter to high value may degrade Nomad server performance. + +- `limit_results` `(int: 100)` - Specifies the maximum number of matching results + to accumulate per context type in the API response before truncating results. + Setting this parameter to a high value may cause excessively large API response sizes. + +- `min_term_length` `(int: 2)` - Specifies the minimum size of the search term + allowed for matching with the fuzzy search API. Setting this value higher can + prevent unnecessary load on the Nomad server from broad queries. + +[fuzzy]: /api-docs/search#fuzzy-searching diff --git a/website/content/docs/configuration/server.mdx b/website/content/docs/configuration/server.mdx index 781d763575c6..0154ac88cf53 100644 --- a/website/content/docs/configuration/server.mdx +++ b/website/content/docs/configuration/server.mdx @@ -194,6 +194,9 @@ server { in place of the Nomad version when custom upgrades are enabled in Autopilot. For more information, see the [Autopilot Guide](https://learn.hashicorp.com/tutorials/nomad/autopilot). +- `search` ([search][search]: nil) - Specifies configuration parameters + for the Nomad search API. + ### Deprecated Parameters - `retry_join` `(array: [])` - Specifies a list of server addresses to @@ -312,3 +315,4 @@ server { [bootstrapping a cluster]: /docs/faq#bootstrapping [rfc4648]: https://tools.ietf.org/html/rfc4648#section-5 [`nomad operator keygen`]: /docs/commands/operator/keygen +[search]: /docs/configuration/search diff --git a/website/data/docs-nav-data.json b/website/data/docs-nav-data.json index 14446d6bb0a2..526695ee9f11 100644 --- a/website/data/docs-nav-data.json +++ b/website/data/docs-nav-data.json @@ -180,6 +180,10 @@ "title": "sentinel", "path": "configuration/sentinel" }, + { + "title": "search", + "path": "configuration/search" + }, { "title": "server", "path": "configuration/server"