diff --git a/pkg/querier/builder_query.go b/pkg/querier/builder_query.go index 7afef0ea545e..5123b2e25bab 100644 --- a/pkg/querier/builder_query.go +++ b/pkg/querier/builder_query.go @@ -9,6 +9,7 @@ import ( "time" "github.com/ClickHouse/clickhouse-go/v2" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/telemetrystore" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" @@ -205,6 +206,10 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string, rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, args...) if err != nil { + if errors.Is(err, context.DeadlineExceeded) { + return nil, errors.Newf(errors.TypeTimeout, errors.CodeTimeout, "Query timed out"). + WithAdditional("Try refining your search by adding relevant resource attributes filtering") + } return nil, err } defer rows.Close() diff --git a/pkg/querier/signozquerier/provider.go b/pkg/querier/signozquerier/provider.go index 9c7408250abc..c9801880c40d 100644 --- a/pkg/querier/signozquerier/provider.go +++ b/pkg/querier/signozquerier/provider.go @@ -89,6 +89,9 @@ func newProvider( resourceFilterFieldMapper, resourceFilterConditionBuilder, telemetryMetadataStore, + telemetrylogs.DefaultFullTextColumn, + telemetrylogs.BodyJSONStringSearchPrefix, + telemetrylogs.GetBodyJSONKey, ) logAggExprRewriter := querybuilder.NewAggExprRewriter( telemetrylogs.DefaultFullTextColumn, diff --git a/pkg/querybuilder/fallback_expr.go b/pkg/querybuilder/fallback_expr.go index 3a7367156f8b..ca69bf16a39f 100644 --- a/pkg/querybuilder/fallback_expr.go +++ b/pkg/querybuilder/fallback_expr.go @@ -94,3 +94,11 @@ func CollisionHandledFinalExpr( return multiIfStmt, allArgs, nil } + +func GroupByKeys(keys []qbtypes.GroupByKey) []string { + k := []string{} + for _, key := range keys { + k = append(k, "`"+key.Name+"`") + } + return k +} diff --git a/pkg/querybuilder/resourcefilter/statement_builder.go b/pkg/querybuilder/resourcefilter/statement_builder.go index edb4621f29a6..1deb32eee3d8 100644 --- a/pkg/querybuilder/resourcefilter/statement_builder.go +++ b/pkg/querybuilder/resourcefilter/statement_builder.go @@ -38,6 +38,10 @@ type resourceFilterStatementBuilder[T any] struct { conditionBuilder qbtypes.ConditionBuilder metadataStore telemetrytypes.MetadataStore signal telemetrytypes.Signal + + fullTextColumn *telemetrytypes.TelemetryFieldKey + jsonBodyPrefix string + jsonKeyToKey qbtypes.JsonKeyToFieldFunc } // Ensure interface compliance at compile time @@ -64,12 +68,18 @@ func NewLogResourceFilterStatementBuilder( fieldMapper qbtypes.FieldMapper, conditionBuilder qbtypes.ConditionBuilder, metadataStore telemetrytypes.MetadataStore, + fullTextColumn *telemetrytypes.TelemetryFieldKey, + jsonBodyPrefix string, + jsonKeyToKey qbtypes.JsonKeyToFieldFunc, ) *resourceFilterStatementBuilder[qbtypes.LogAggregation] { return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{ fieldMapper: fieldMapper, conditionBuilder: conditionBuilder, metadataStore: metadataStore, signal: telemetrytypes.SignalLogs, + fullTextColumn: fullTextColumn, + jsonBodyPrefix: jsonBodyPrefix, + jsonKeyToKey: jsonKeyToKey, } } @@ -140,7 +150,11 @@ func (b *resourceFilterStatementBuilder[T]) addConditions( FieldMapper: b.fieldMapper, ConditionBuilder: b.conditionBuilder, FieldKeys: keys, + FullTextColumn: b.fullTextColumn, + JsonBodyPrefix: b.jsonBodyPrefix, + JsonKeyToKey: b.jsonKeyToKey, SkipFullTextFilter: true, + SkipFunctionCalls: true, Variables: variables, }) diff --git a/pkg/querybuilder/where_clause_visitor.go b/pkg/querybuilder/where_clause_visitor.go index c5a73fba9ba1..2f05ec3b93ec 100644 --- a/pkg/querybuilder/where_clause_visitor.go +++ b/pkg/querybuilder/where_clause_visitor.go @@ -29,7 +29,10 @@ type filterExpressionVisitor struct { jsonKeyToKey qbtypes.JsonKeyToFieldFunc skipResourceFilter bool skipFullTextFilter bool + skipFunctionCalls bool variables map[string]qbtypes.VariableItem + + keysWithWarnings map[string]bool } type FilterExprVisitorOpts struct { @@ -42,6 +45,7 @@ type FilterExprVisitorOpts struct { JsonKeyToKey qbtypes.JsonKeyToFieldFunc SkipResourceFilter bool SkipFullTextFilter bool + SkipFunctionCalls bool Variables map[string]qbtypes.VariableItem } @@ -57,7 +61,9 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis jsonKeyToKey: opts.JsonKeyToKey, skipResourceFilter: opts.SkipResourceFilter, skipFullTextFilter: opts.SkipFullTextFilter, + skipFunctionCalls: opts.SkipFunctionCalls, variables: opts.Variables, + keysWithWarnings: make(map[string]bool), } } @@ -547,6 +553,10 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an // VisitFunctionCall handles function calls like has(), hasAny(), etc. func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any { + if v.skipFunctionCalls { + return "true" + } + // Get function name based on which token is present var functionName string if ctx.HAS() != nil { @@ -690,7 +700,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { } } - if len(fieldKeysForName) > 1 { + if len(fieldKeysForName) > 1 && !v.keysWithWarnings[keyName] { // this is warning state, we must have a unambiguous key v.warnings = append(v.warnings, fmt.Sprintf( "key `%s` is ambiguous, found %d different combinations of field context and data type: %v", @@ -698,6 +708,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { len(fieldKeysForName), fieldKeysForName, )) + v.keysWithWarnings[keyName] = true } return fieldKeysForName diff --git a/pkg/telemetrylogs/condition_builder.go b/pkg/telemetrylogs/condition_builder.go index d3329f81dfb3..30555084ceab 100644 --- a/pkg/telemetrylogs/condition_builder.go +++ b/pkg/telemetrylogs/condition_builder.go @@ -9,6 +9,7 @@ import ( schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "golang.org/x/exp/maps" "github.com/huandu/go-sqlbuilder" ) @@ -148,7 +149,7 @@ func (c *conditionBuilder) conditionFor( } // if the field is intrinsic, it always exists - if slices.Contains(IntrinsicFields, key.Name) { + if slices.Contains(maps.Keys(IntrinsicFields), key.Name) { return "true", nil } @@ -210,7 +211,7 @@ func (c *conditionBuilder) ConditionFor( // skip adding exists filter for intrinsic fields // with an exception for body json search field, _ := c.fm.FieldFor(ctx, key) - if slices.Contains(IntrinsicFields, field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { + if slices.Contains(maps.Keys(IntrinsicFields), field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) { return condition, nil } diff --git a/pkg/telemetrylogs/condition_builder_test.go b/pkg/telemetrylogs/condition_builder_test.go index efbb45af0d71..3ad02863abac 100644 --- a/pkg/telemetrylogs/condition_builder_test.go +++ b/pkg/telemetrylogs/condition_builder_test.go @@ -337,7 +337,7 @@ func TestConditionForJSONBodySearch(t *testing.T) { }, operator: qbtypes.FilterOperatorEqual, value: "GET", - expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."method"'), 'String') = ?`, + expectedSQL: `JSON_VALUE(body, '$."http"."method"') = ?`, expectedError: nil, }, { @@ -417,7 +417,7 @@ func TestConditionForJSONBodySearch(t *testing.T) { }, operator: qbtypes.FilterOperatorContains, value: "200", - expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) LIKE LOWER(?)`, + expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) LIKE LOWER(?)`, expectedError: nil, }, { @@ -427,7 +427,7 @@ func TestConditionForJSONBodySearch(t *testing.T) { }, operator: qbtypes.FilterOperatorNotContains, value: "200", - expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) NOT LIKE LOWER(?)`, + expectedSQL: `LOWER(JSON_VALUE(body, '$."http"."status_code"')) NOT LIKE LOWER(?)`, expectedError: nil, }, { diff --git a/pkg/telemetrylogs/const.go b/pkg/telemetrylogs/const.go index d1834c67b814..9b0c38a45978 100644 --- a/pkg/telemetrylogs/const.go +++ b/pkg/telemetrylogs/const.go @@ -10,7 +10,57 @@ var ( FieldDataType: telemetrytypes.FieldDataTypeString, } BodyJSONStringSearchPrefix = `body.` - IntrinsicFields = []string{ - "body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version", + IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{ + "body": { + Name: "body", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "trace_id": { + Name: "trace_id", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "span_id": { + Name: "span_id", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "trace_flags": { + Name: "trace_flags", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "severity_text": { + Name: "severity_text", + Description: "Log level. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitytext)", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "severity_number": { + Name: "severity_number", + Description: "Numerical value of the severity. Learn more [here](https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber)", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "scope_name": { + Name: "scope_name", + Description: "Logger name. Learn more about instrumentation scope [here](https://opentelemetry.io/docs/concepts/instrumentation-scope/)", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextScope, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "scope_version": { + Name: "scope_version", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextScope, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, } ) diff --git a/pkg/telemetrylogs/filter_expr_logs_body_json_test.go b/pkg/telemetrylogs/filter_expr_logs_body_json_test.go index e9ede23b7687..a57371cb5558 100644 --- a/pkg/telemetrylogs/filter_expr_logs_body_json_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_body_json_test.go @@ -73,7 +73,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) { category: "json", query: "body.message = hello", shouldPass: true, - expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."message"'), 'String') = ? AND JSON_EXISTS(body, '$."message"'))`, + expectedQuery: `WHERE (JSON_VALUE(body, '$."message"') = ? AND JSON_EXISTS(body, '$."message"'))`, expectedArgs: []any{"hello"}, expectedErrorContains: "", }, @@ -113,7 +113,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) { category: "json", query: "body.message REGEXP 'a*'", shouldPass: true, - expectedQuery: `WHERE (match(JSONExtract(JSON_VALUE(body, '$."message"'), 'String'), ?) AND JSON_EXISTS(body, '$."message"'))`, + expectedQuery: `WHERE (match(JSON_VALUE(body, '$."message"'), ?) AND JSON_EXISTS(body, '$."message"'))`, expectedArgs: []any{"a*"}, expectedErrorContains: "", }, @@ -121,7 +121,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) { category: "json", query: `body.message CONTAINS "hello 'world'"`, shouldPass: true, - expectedQuery: `WHERE (LOWER(JSONExtract(JSON_VALUE(body, '$."message"'), 'String')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`, + expectedQuery: `WHERE (LOWER(JSON_VALUE(body, '$."message"')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`, expectedArgs: []any{"%hello 'world'%"}, expectedErrorContains: "", }, @@ -136,7 +136,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) { category: "json", query: `body.name IN ('hello', 'world')`, shouldPass: true, - expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ? OR JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ?) AND JSON_EXISTS(body, '$."name"'))`, + expectedQuery: `WHERE ((JSON_VALUE(body, '$."name"') = ? OR JSON_VALUE(body, '$."name"') = ?) AND JSON_EXISTS(body, '$."name"'))`, expectedArgs: []any{"hello", "world"}, expectedErrorContains: "", }, diff --git a/pkg/telemetrylogs/json.go b/pkg/telemetrylogs/json.go index 89b2f27626b6..86a7408c040a 100644 --- a/pkg/telemetrylogs/json.go +++ b/pkg/telemetrylogs/json.go @@ -61,7 +61,7 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp } // check if it is array - if strings.HasSuffix(key.Name, "[*]") { + if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") { valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))} } @@ -74,6 +74,8 @@ func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string { for _, part := range parts { if strings.HasSuffix(part, "[*]") { newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[*]"))) + } else if strings.HasSuffix(part, "[]") { + newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[]"))) } else { newParts = append(newParts, fmt.Sprintf(`"%s"`, part)) } @@ -94,8 +96,12 @@ func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, op return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value } - // for all other types, we need to extract the value from the JSON_VALUE - return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value + if dataType != telemetrytypes.FieldDataTypeString { + // for all types except strings, we need to extract the value from the JSON_VALUE + return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value + } + // for string types, we should compare with the JSON_VALUE + return fmt.Sprintf("JSON_VALUE(body, '$.%s')", getBodyJSONPath(key)), value } func GetBodyJSONKeyForExists(_ context.Context, key *telemetrytypes.TelemetryFieldKey, _ qbtypes.FilterOperator, _ any) string { diff --git a/pkg/telemetrylogs/statement_builder.go b/pkg/telemetrylogs/statement_builder.go index 503815339fbe..0ea14b154060 100644 --- a/pkg/telemetrylogs/statement_builder.go +++ b/pkg/telemetrylogs/statement_builder.go @@ -270,10 +270,11 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery( // Constrain the main query to the rows that appear in the CTE. tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", ")) - sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) + sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) // Group by all dimensions - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) if query.Having != nil && query.Having.Expression != "" { // Rewrite having expression to use SQL column names rewriter := querybuilder.NewHavingExpressionRewriter() @@ -290,7 +291,8 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery( finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) } else { - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) if query.Having != nil && query.Having.Expression != "" { rewriter := querybuilder.NewHavingExpressionRewriter() rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations) @@ -380,7 +382,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery( } // Group by dimensions - sb.GroupBy("ALL") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) // Add having clause if needed if query.Having != nil && query.Having.Expression != "" { @@ -492,7 +494,7 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter( return "", nil, err } - sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)") + sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)") return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil } diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go index 812e2d78ec4a..26e38dac7469 100644 --- a/pkg/telemetrylogs/stmt_builder_test.go +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -30,6 +30,9 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation fm, cb, mockMetadataStore, + DefaultFullTextColumn, + BodyJSONStringSearchPrefix, + GetBodyJSONKey, ) } @@ -65,7 +68,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, @@ -104,7 +107,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, diff --git a/pkg/telemetrymetadata/metadata.go b/pkg/telemetrymetadata/metadata.go index 5198a3d98bc2..e9ec26920354 100644 --- a/pkg/telemetrymetadata/metadata.go +++ b/pkg/telemetrymetadata/metadata.go @@ -16,6 +16,7 @@ import ( qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" + "golang.org/x/exp/maps" ) var ( @@ -208,6 +209,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector } keys = append(keys, key) + mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key } if rows.Err() != nil { @@ -215,8 +217,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector } staticKeys := []string{"isRoot", "isEntrypoint"} - staticKeys = append(staticKeys, telemetrytraces.IntrinsicFields...) - staticKeys = append(staticKeys, telemetrytraces.CalculatedFields...) + staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...) + staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...) // add matching intrinsic and matching calculated fields for _, key := range staticKeys { @@ -228,6 +230,19 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector } } if found { + if field, exists := telemetrytraces.IntrinsicFields[key]; exists { + if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added { + keys = append(keys, &field) + } + continue + } + + if field, exists := telemetrytraces.CalculatedFields[key]; exists { + if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added { + keys = append(keys, &field) + } + continue + } keys = append(keys, &telemetrytypes.TelemetryFieldKey{ Name: key, FieldContext: telemetrytypes.FieldContextSpan, @@ -361,6 +376,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors } keys = append(keys, key) + mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key } if rows.Err() != nil { @@ -368,7 +384,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors } staticKeys := []string{} - staticKeys = append(staticKeys, telemetrylogs.IntrinsicFields...) + staticKeys = append(staticKeys, maps.Keys(telemetrylogs.IntrinsicFields)...) // add matching intrinsic and matching calculated fields for _, key := range staticKeys { @@ -380,6 +396,13 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors } } if found { + if field, exists := telemetrylogs.IntrinsicFields[key]; exists { + if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added { + keys = append(keys, &field) + } + continue + } + keys = append(keys, &telemetrytypes.TelemetryFieldKey{ Name: key, FieldContext: telemetrytypes.FieldContextLog, diff --git a/pkg/telemetrymetrics/statement_builder.go b/pkg/telemetrymetrics/statement_builder.go index abf050467992..b1228160e49c 100644 --- a/pkg/telemetrymetrics/statement_builder.go +++ b/pkg/telemetrymetrics/statement_builder.go @@ -258,7 +258,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDeltaFastPath( sb.GTE("unix_milli", start), sb.LT("unix_milli", end), ) - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...) return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args @@ -320,7 +321,8 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE( sb.AddWhereClause(filterWhere) } - sb.GroupBy("ALL") + sb.GroupBy("fingerprint") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse) return fmt.Sprintf("(%s) AS filtered_time_series", q), args, nil @@ -375,7 +377,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggDelta( sb.GTE("unix_milli", start), sb.LT("unix_milli", end), ) - sb.GroupBy("ALL") + sb.GroupBy("fingerprint", "ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) sb.OrderBy("fingerprint", "ts") q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...) @@ -412,7 +415,8 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified( baseSb.GTE("unix_milli", start), baseSb.LT("unix_milli", end), ) - baseSb.GroupBy("ALL") + baseSb.GroupBy("fingerprint", "ts") + baseSb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) baseSb.OrderBy("fingerprint", "ts") innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...) @@ -438,7 +442,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified( wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name)) } wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr)) - wrapped.From(fmt.Sprintf("(%s) WINDOW increase_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery)) + wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery)) q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...) return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil default: @@ -465,7 +469,8 @@ func (b *metricQueryStatementBuilder) buildSpatialAggregationCTE( if query.Aggregations[0].ValueFilter != nil { sb.Where(sb.EQ("per_series_value", query.Aggregations[0].ValueFilter.Value)) } - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse) return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args diff --git a/pkg/telemetrymetrics/stmt_builder_test.go b/pkg/telemetrymetrics/stmt_builder_test.go index 651eb57688ce..669eea7002c7 100644 --- a/pkg/telemetrymetrics/stmt_builder_test.go +++ b/pkg/telemetrymetrics/stmt_builder_test.go @@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte", + Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte", Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0}, }, expectedErr: nil, @@ -82,7 +82,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte", + Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte", Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000)}, }, expectedErr: nil, @@ -114,7 +114,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", + Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_latency", uint64(1747947419000), uint64(1747983448000)}, }, expectedErr: nil, @@ -147,7 +147,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte", + Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte", Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983448000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947419000), uint64(1747983448000), 0}, }, expectedErr: nil, @@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", + Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0}, }, expectedErr: nil, diff --git a/pkg/telemetrytraces/condition_builder.go b/pkg/telemetrytraces/condition_builder.go index 08a68fa1259e..c45329397ea5 100644 --- a/pkg/telemetrytraces/condition_builder.go +++ b/pkg/telemetrytraces/condition_builder.go @@ -11,6 +11,7 @@ import ( qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" + "golang.org/x/exp/maps" ) type conditionBuilder struct { @@ -129,10 +130,10 @@ func (c *conditionBuilder) conditionFor( // key membership checks, so depending on the column type, the condition changes case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: // if the field is intrinsic, it always exists - if slices.Contains(IntrinsicFields, tblFieldName) || - slices.Contains(CalculatedFields, tblFieldName) || - slices.Contains(IntrinsicFieldsDeprecated, tblFieldName) || - slices.Contains(CalculatedFieldsDeprecated, tblFieldName) { + if slices.Contains(maps.Keys(IntrinsicFields), tblFieldName) || + slices.Contains(maps.Keys(CalculatedFields), tblFieldName) || + slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), tblFieldName) || + slices.Contains(maps.Keys(CalculatedFieldsDeprecated), tblFieldName) { return "true", nil } @@ -205,10 +206,10 @@ func (c *conditionBuilder) ConditionFor( if operator.AddDefaultExistsFilter() { // skip adding exists filter for intrinsic fields field, _ := c.fm.FieldFor(ctx, key) - if slices.Contains(IntrinsicFields, field) || - slices.Contains(IntrinsicFieldsDeprecated, field) || - slices.Contains(CalculatedFields, field) || - slices.Contains(CalculatedFieldsDeprecated, field) { + if slices.Contains(maps.Keys(IntrinsicFields), field) || + slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), field) || + slices.Contains(maps.Keys(CalculatedFields), field) || + slices.Contains(maps.Keys(CalculatedFieldsDeprecated), field) { return condition, nil } diff --git a/pkg/telemetrytraces/const.go b/pkg/telemetrytraces/const.go index d961016a88a5..bddd5f7063ab 100644 --- a/pkg/telemetrytraces/const.go +++ b/pkg/telemetrytraces/const.go @@ -3,89 +3,320 @@ package telemetrytraces import "github.com/SigNoz/signoz/pkg/types/telemetrytypes" var ( - IntrinsicFields = []string{ - "trace_id", - "span_id", - "trace_state", - "parent_span_id", - "flags", - "name", - "kind", - "kind_string", - "duration_nano", - "status_code", - "status_message", - "status_code_string", + IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{ + "trace_id": { + Name: "trace_id", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "span_id": { + Name: "span_id", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "trace_state": { + Name: "trace_state", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "parent_span_id": { + Name: "parent_span_id", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "flags": { + Name: "flags", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "name": { + Name: "name", + Description: "Name of the span", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "kind": { + Name: "kind", + Description: "Span kind enum (number). Use `kind_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "kind_string": { + Name: "kind_string", + Description: "Span kind enum (string). Known values are ['Client', 'Server', 'Internal', 'Producer', 'Consumer']. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-kind)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "duration_nano": { + Name: "duration_nano", + Description: "Span duration", + Unit: "ns", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "status_code": { + Name: "status_code", + Description: "Span status code enum (number). Use `status_code_string` instead. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "status_message": { + Name: "status_message", + Description: "Span status message. Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "status_code_string": { + Name: "status_code_string", + Description: "Span status code enum (string). Learn more [here](https://opentelemetry.io/docs/concepts/signals/traces/#span-status)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, } - IntrinsicFieldsDeprecated = []string{ - "traceID", - "spanID", - "parentSpanID", - "spanKind", - "durationNano", - "statusCode", - "statusMessage", - "statusCodeString", + IntrinsicFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{ + "traceID": { + Name: "traceID", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "spanID": { + Name: "spanID", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "parentSpanID": { + Name: "parentSpanID", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "spanKind": { + Name: "spanKind", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "durationNano": { + Name: "durationNano", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "statusCode": { + Name: "statusCode", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "statusMessage": { + Name: "statusMessage", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "statusCodeString": { + Name: "statusCodeString", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, } - CalculatedFields = []string{ - "response_status_code", - "external_http_url", - "http_url", - "external_http_method", - "http_method", - "http_host", - "db_name", - "db_operation", - "has_error", - "is_remote", + CalculatedFields = map[string]telemetrytypes.TelemetryFieldKey{ + "response_status_code": { + Name: "response_status_code", + Description: "Derived response status code from the HTTP/RPC status code attributes. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#response_status_code)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "external_http_url": { + Name: "external_http_url", + Description: "The hostname of the external HTTP URL. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_url)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "http_url": { + Name: "http_url", + Description: "HTTP URL of the request. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_url)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "external_http_method": { + Name: "external_http_method", + Description: "HTTP request method of client spans. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#external_http_method)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "http_method": { + Name: "http_method", + Description: "The HTTP request method. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_method)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "http_host": { + Name: "http_host", + Description: "The HTTP host or server address. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#http_host)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "db_name": { + Name: "db_name", + Description: "The database name or namespace. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_name)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "db_operation": { + Name: "db_operation", + Description: "The database operation being performed. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#db_operation)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "has_error": { + Name: "has_error", + Description: "Whether the span has an error. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#has_error)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeBool, + }, + "is_remote": { + Name: "is_remote", + Description: "Whether the span is remote. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#is_remote)", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeBool, + }, } - CalculatedFieldsDeprecated = []string{ - "responseStatusCode", - "externalHttpUrl", - "httpUrl", - "externalHttpMethod", - "httpMethod", - "httpHost", - "dbName", - "dbOperation", - "hasError", - "isRemote", + CalculatedFieldsDeprecated = map[string]telemetrytypes.TelemetryFieldKey{ + "responseStatusCode": { + Name: "responseStatusCode", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + "externalHttpUrl": { + Name: "externalHttpUrl", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "httpUrl": { + Name: "httpUrl", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "externalHttpMethod": { + Name: "externalHttpMethod", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "httpMethod": { + Name: "httpMethod", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "httpHost": { + Name: "httpHost", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "dbName": { + Name: "dbName", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "dbOperation": { + Name: "dbOperation", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "hasError": { + Name: "hasError", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeBool, + }, + "isRemote": { + Name: "isRemote", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeBool, + }, } SpanSearchScopeRoot = "isroot" SpanSearchScopeEntryPoint = "isentrypoint" DefaultFields = []telemetrytypes.TelemetryFieldKey{ { - Name: "timestamp", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "timestamp", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, }, { - Name: "span_id", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "span_id", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, }, { - Name: "trace_id", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "trace_id", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, }, { - Name: "name", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "name", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, }, { Name: "service.name", + Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString, Materialized: true, }, { - Name: "duration_nano", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "duration_nano", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, }, { - Name: "response_status_code", - FieldContext: telemetrytypes.FieldContextSpan, + Name: "response_status_code", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, }, } ) diff --git a/pkg/telemetrytraces/statement_builder.go b/pkg/telemetrytraces/statement_builder.go index 9cb15d0ed89a..20df9162f170 100644 --- a/pkg/telemetrytraces/statement_builder.go +++ b/pkg/telemetrytraces/statement_builder.go @@ -305,10 +305,11 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery( // Constrain the main query to the rows that appear in the CTE. tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", ")) - sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) + sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) // Group by all dimensions - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) if query.Having != nil && query.Having.Expression != "" { rewriter := querybuilder.NewHavingExpressionRewriter() rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations) @@ -323,7 +324,8 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery( finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) } else { - sb.GroupBy("ALL") + sb.GroupBy("ts") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) if query.Having != nil && query.Having.Expression != "" { rewriter := querybuilder.NewHavingExpressionRewriter() rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations) @@ -412,7 +414,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery( } // Group by dimensions - sb.GroupBy("ALL") + sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...) // Add having clause if needed if query.Having != nil && query.Having.Expression != "" && !skipHaving { @@ -521,7 +523,7 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter( return "", nil, err } - sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)") + sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)") return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil } diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go index 07ca898be6e1..c89c14c93d45 100644 --- a/pkg/telemetrytraces/stmt_builder_test.go +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -59,7 +59,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil,