diff --git a/pkg/querier/bucket_cache.go b/pkg/querier/bucket_cache.go index 5f37ec6a08e2..da6493a4206d 100644 --- a/pkg/querier/bucket_cache.go +++ b/pkg/querier/bucket_cache.go @@ -490,7 +490,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac key string } seriesMap := make(map[seriesKey]*qbtypes.TimeSeries, estimatedSeries) - var queryName string for _, bucket := range buckets { var tsData *qbtypes.TimeSeriesData @@ -499,11 +498,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac continue } - // Preserve the query name from the first bucket - if queryName == "" && tsData.QueryName != "" { - queryName = tsData.QueryName - } - for _, aggBucket := range tsData.Aggregations { for _, series := range aggBucket.Series { // Create series key from labels @@ -549,7 +543,6 @@ func (bc *bucketCache) mergeTimeSeriesValues(ctx context.Context, buckets []*cac // Convert map back to slice result := &qbtypes.TimeSeriesData{ - QueryName: queryName, Aggregations: make([]*qbtypes.AggregationBucket, 0, len(aggMap)), } @@ -738,9 +731,7 @@ func (bc *bucketCache) trimResultToFluxBoundary(result *qbtypes.Result, fluxBoun case qbtypes.RequestTypeTimeSeries: // Trim time series data if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok && tsData != nil { - trimmedData := &qbtypes.TimeSeriesData{ - QueryName: tsData.QueryName, - } + trimmedData := &qbtypes.TimeSeriesData{} for _, aggBucket := range tsData.Aggregations { trimmedBucket := &qbtypes.AggregationBucket{ @@ -807,7 +798,6 @@ func (bc *bucketCache) filterResultToTimeRange(result *qbtypes.Result, startMs, case qbtypes.RequestTypeTimeSeries: if tsData, ok := result.Value.(*qbtypes.TimeSeriesData); ok { filteredData := &qbtypes.TimeSeriesData{ - QueryName: tsData.QueryName, Aggregations: make([]*qbtypes.AggregationBucket, 0, len(tsData.Aggregations)), } diff --git a/pkg/querier/bucket_cache_test.go b/pkg/querier/bucket_cache_test.go index 57d9a53aca2f..c52fdfb2e474 100644 --- a/pkg/querier/bucket_cache_test.go +++ b/pkg/querier/bucket_cache_test.go @@ -169,9 +169,8 @@ func TestBucketCache_Put_And_Get(t *testing.T) { assert.Equal(t, []string{"test warning"}, cached.Warnings) // Verify the time series data - tsData, ok := cached.Value.(*qbtypes.TimeSeriesData) + _, ok := cached.Value.(*qbtypes.TimeSeriesData) require.True(t, ok) - assert.Equal(t, "A", tsData.QueryName) } func TestBucketCache_PartialHit(t *testing.T) { @@ -1077,7 +1076,6 @@ func TestBucketCache_FilteredCachedResults(t *testing.T) { // Verify the cached result only contains values within the requested range tsData, ok := cached.Value.(*qbtypes.TimeSeriesData) require.True(t, ok) - assert.Equal(t, "A", tsData.QueryName) require.Len(t, tsData.Aggregations, 1) require.Len(t, tsData.Aggregations[0].Series, 1) diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index 4e9de50b1c16..cedef6b5d715 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -385,6 +385,15 @@ func (q *querier) run( if err != nil { return nil, err } + switch v := result.Value.(type) { + case *qbtypes.TimeSeriesData: + v.QueryName = name + case *qbtypes.ScalarData: + v.QueryName = name + case *qbtypes.RawData: + v.QueryName = name + } + results[name] = result.Value warnings = append(warnings, result.Warnings...) warningsDocURL = result.WarningsDocURL