Skip to content

Commit

Permalink
Split up QueryTimeout for trace and search queries
Browse files Browse the repository at this point in the history
  • Loading branch information
kvrhdn committed Sep 23, 2021
1 parent 050e6d4 commit 38a2aef
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 6 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
* [ENHANCEMENT] Add search block headers for wal blocks [#963](https://github.com/grafana/tempo/pull/963) (@mdisibio)
* [ENHANCEMENT] Add support for vulture sending long running traces [#951](https://github.com/grafana/tempo/pull/951) (@zalegrala)
* [ENHANCEMENT] Support global denylist and per-tenant allowlist of tags for search data. [#960](https://github.com/grafana/tempo/pull/960) (@annanay25)
* [ENHANCEMENT] Add `search_query_timeout` to Querier config. [#984](https://github.com/grafana/tempo/pull/984) (@kvrhdn)
* [BUGFIX] Update port spec for GCS docker-compose example [#869](https://github.com/grafana/tempo/pull/869) (@zalegrala)
* [BUGFIX] Fix "magic number" errors and other block mishandling when an ingester forcefully shuts down [#937](https://github.com/grafana/tempo/issues/937) (@mdisibio)
* [BUGFIX] Fix compactor memory leak [#806](https://github.com/grafana/tempo/pull/806) (@mdisibio)
Expand Down
6 changes: 4 additions & 2 deletions modules/querier/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,17 @@ import (

// Config for a querier.
type Config struct {
QueryTimeout time.Duration `yaml:"query_timeout"`
TraceIDQueryTimeout time.Duration `yaml:"query_timeout"`
SearchQueryTimeout time.Duration `yaml:"search_query_timeout"`
ExtraQueryDelay time.Duration `yaml:"extra_query_delay,omitempty"`
MaxConcurrentQueries int `yaml:"max_concurrent_queries"`
Worker cortex_worker.Config `yaml:"frontend_worker"`
}

// RegisterFlagsAndApplyDefaults register flags.
func (cfg *Config) RegisterFlagsAndApplyDefaults(prefix string, f *flag.FlagSet) {
cfg.QueryTimeout = 10 * time.Second
cfg.TraceIDQueryTimeout = 10 * time.Second
cfg.SearchQueryTimeout = 30 * time.Second
cfg.ExtraQueryDelay = 0
cfg.MaxConcurrentQueries = 5
cfg.Worker = cortex_worker.Config{
Expand Down
8 changes: 4 additions & 4 deletions modules/querier/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ const (
// TraceByIDHandler is a http.HandlerFunc to retrieve traces
func (q *Querier) TraceByIDHandler(w http.ResponseWriter, r *http.Request) {
// Enforce the query timeout while querying backends
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.QueryTimeout))
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.TraceIDQueryTimeout))
defer cancel()

span, ctx := opentracing.StartSpanFromContext(ctx, "Querier.TraceByIDHandler")
Expand Down Expand Up @@ -150,7 +150,7 @@ func validateAndSanitizeRequest(r *http.Request) (string, string, string, error)

func (q *Querier) SearchHandler(w http.ResponseWriter, r *http.Request) {
// Enforce the query timeout while querying backends
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.QueryTimeout))
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.SearchQueryTimeout))
defer cancel()

span, ctx := opentracing.StartSpanFromContext(ctx, "Querier.SearchHandler")
Expand Down Expand Up @@ -212,7 +212,7 @@ func (q *Querier) SearchHandler(w http.ResponseWriter, r *http.Request) {

func (q *Querier) SearchTagsHandler(w http.ResponseWriter, r *http.Request) {
// Enforce the query timeout while querying backends
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.QueryTimeout))
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.SearchQueryTimeout))
defer cancel()

span, ctx := opentracing.StartSpanFromContext(ctx, "Querier.SearchTagsHandler")
Expand All @@ -236,7 +236,7 @@ func (q *Querier) SearchTagsHandler(w http.ResponseWriter, r *http.Request) {

func (q *Querier) SearchTagValuesHandler(w http.ResponseWriter, r *http.Request) {
// Enforce the query timeout while querying backends
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.QueryTimeout))
ctx, cancel := context.WithDeadline(r.Context(), time.Now().Add(q.cfg.SearchQueryTimeout))
defer cancel()

span, ctx := opentracing.StartSpanFromContext(ctx, "Querier.SearchTagValuesHandler")
Expand Down

0 comments on commit 38a2aef

Please sign in to comment.