Skip to content

Commit

Permalink
Make default search limit configurable (#1022)
Browse files Browse the repository at this point in the history
* Make default search limit configurable

* Add check user doesn't pass 0 or negative limit

* Add check user doesn't pass 0 or negative limit - but properly lol

* Drop redundant limit check

* Access searchDefaultLimit directly from config

* Rename search_default_limit -> search_default_result_limit

* Add maxResults check again, if limit is not set the caller probably wants some default limit
  • Loading branch information
yvrhdn authored Oct 12, 2021
1 parent 2ff99b6 commit ddbfd51
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 13 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,13 @@
* [ENHANCEMENT] Add search block headers for wal blocks [#963](https://github.com/grafana/tempo/pull/963) (@mdisibio)
* [ENHANCEMENT] Add support for vulture sending long running traces [#951](https://github.com/grafana/tempo/pull/951) (@zalegrala)
* [ENHANCEMENT] Support global denylist and per-tenant allowlist of tags for search data. [#960](https://github.com/grafana/tempo/pull/960) (@annanay25)
* [ENHANCEMENT] Add `search_query_timeout` to Querier config. [#984](https://github.com/grafana/tempo/pull/984) (@kvrhdn)
* [ENHANCEMENT] Add `search_query_timeout` to querier config. [#984](https://github.com/grafana/tempo/pull/984) (@kvrhdn)
* [ENHANCEMENT] Jsonnet: add `$._config.memcached.memory_limit_mb` [#987](https://github.com/grafana/tempo/pull/987) (@kvrhdn)
* [ENHANCEMENT] Upgrade jsonnet-libs to 1.19 and update tk examples [#1001](https://github.com/grafana/tempo/pull/1001) (@mapno)
* [ENHANCEMENT] Shard tenant index creation by tenant and add functionality to handle stale indexes. [#1005](https://github.com/grafana/tempo/pull/1005) (@joe-elliott)
* [ENHANCEMENT] Added a configurable buffer for WAL writes. [#1018](https://github.com/grafana/tempo/pull/1018) (@joe-elliott)
* [ENHANCEMENT] **BREAKING CHANGE** Support partial results from failed block queries [#1007](https://github.com/grafana/tempo/pull/1007) (@mapno)
* [ENHANCEMENT] Add `search_default_limit` to querier config. [#1022](https://github.com/grafana/tempo/pull/1022) (@kvrhdn)
Querier [`GET /querier/api/traces/<traceid>`](https://grafana.com/docs/tempo/latest/api_docs/#query) response's body has been modified
to return `tempopb.TraceByIDResponse` instead of simply `tempopb.Trace`. This will cause a disruption of the read path during rollout of the change.
* [BUGFIX] Update port spec for GCS docker-compose example [#869](https://github.com/grafana/tempo/pull/869) (@zalegrala)
Expand Down
9 changes: 9 additions & 0 deletions docs/tempo/website/configuration/_index.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,15 @@ The Querier is responsible for querying the backends/cache for the traceID.
# querier config block
querier:
# Timeout for trace lookup requests
[query_timeout: <duration> | default = 10s]
# Timeout for search requests
[search_query_timeout: <duration> | default = 30s]
# Limit used for search requests if none is set by the caller
[search_default_result_limit: <int> | default = 20]
# config of the worker that connects to the query frontend
frontend_worker:
Expand Down
7 changes: 4 additions & 3 deletions modules/ingester/instance_search.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ import (

func (i *instance) Search(ctx context.Context, req *tempopb.SearchRequest) (*tempopb.SearchResponse, error) {

maxResults := 20
if req.Limit != 0 {
maxResults = int(req.Limit)
maxResults := int(req.Limit)
// if limit is not set, use a safe default
if maxResults == 0 {
maxResults = 20
}

p := search.NewSearchPipeline(req)
Expand Down
12 changes: 7 additions & 5 deletions modules/querier/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,19 @@ import (

// Config for a querier.
type Config struct {
TraceLookupQueryTimeout time.Duration `yaml:"query_timeout"`
SearchQueryTimeout time.Duration `yaml:"search_query_timeout"`
ExtraQueryDelay time.Duration `yaml:"extra_query_delay,omitempty"`
MaxConcurrentQueries int `yaml:"max_concurrent_queries"`
Worker cortex_worker.Config `yaml:"frontend_worker"`
TraceLookupQueryTimeout time.Duration `yaml:"query_timeout"`
SearchQueryTimeout time.Duration `yaml:"search_query_timeout"`
SearchDefaultResultLimit uint32 `yaml:"search_default_result_limit"`
ExtraQueryDelay time.Duration `yaml:"extra_query_delay,omitempty"`
MaxConcurrentQueries int `yaml:"max_concurrent_queries"`
Worker cortex_worker.Config `yaml:"frontend_worker"`
}

// RegisterFlagsAndApplyDefaults register flags.
func (cfg *Config) RegisterFlagsAndApplyDefaults(prefix string, f *flag.FlagSet) {
cfg.TraceLookupQueryTimeout = 10 * time.Second
cfg.SearchQueryTimeout = 30 * time.Second
cfg.SearchDefaultResultLimit = 20
cfg.ExtraQueryDelay = 0
cfg.MaxConcurrentQueries = 5
cfg.Worker = cortex_worker.Config{
Expand Down
7 changes: 5 additions & 2 deletions modules/querier/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,8 @@ func (q *Querier) SearchHandler(w http.ResponseWriter, r *http.Request) {
defer span.Finish()

req := &tempopb.SearchRequest{
Tags: map[string]string{},
Tags: map[string]string{},
Limit: q.cfg.SearchDefaultResultLimit,
}

for k, v := range r.URL.Query() {
Expand Down Expand Up @@ -193,7 +194,9 @@ func (q *Querier) SearchHandler(w http.ResponseWriter, r *http.Request) {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
req.Limit = uint32(limit)
if limit > 0 {
req.Limit = uint32(limit)
}
}

resp, err := q.Search(ctx, req)
Expand Down
4 changes: 2 additions & 2 deletions modules/querier/querier.go
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,11 @@ func New(cfg Config, clientCfg ingester_client.Config, ring ring.ReadRing, store
return q, nil
}

func (q *Querier) CreateAndRegisterWorker(tracesHandler http.Handler) error {
func (q *Querier) CreateAndRegisterWorker(handler http.Handler) error {
q.cfg.Worker.MaxConcurrentRequests = q.cfg.MaxConcurrentQueries
worker, err := cortex_worker.NewQuerierWorker(
q.cfg.Worker,
httpgrpc_server.NewServer(tracesHandler),
httpgrpc_server.NewServer(handler),
log.Logger,
nil,
)
Expand Down

0 comments on commit ddbfd51

Please sign in to comment.