chore: more updates

This commit is contained in:
srikanthccv 2025-06-18 15:30:29 +05:30
parent f267ed8ad1
commit 8d03e35d84
47 changed files with 4579 additions and 493 deletions

View File

@ -75,15 +75,17 @@ comparison
| key NOT CONTAINS value | key NOT CONTAINS value
; ;
// in(...) or in[...] // in(...) or in[...] - now also supports variables
inClause inClause
: IN LPAREN valueList RPAREN : IN LPAREN valueList RPAREN
| IN LBRACK valueList RBRACK | IN LBRACK valueList RBRACK
| IN variable // NEW: support for IN $var, IN {{var}}, IN [[var]]
; ;
notInClause notInClause
: NOT IN LPAREN valueList RPAREN : NOT IN LPAREN valueList RPAREN
| NOT IN LBRACK valueList RBRACK | NOT IN LBRACK valueList RBRACK
| NOT IN variable // NEW: support for NOT IN $var, etc.
; ;
// List of values for in(...) or in[...] // List of values for in(...) or in[...]
@ -126,13 +128,21 @@ array
/* /*
* A 'value' can be a string literal (double or single-quoted), * A 'value' can be a string literal (double or single-quoted),
// a numeric literal, boolean, or a "bare" token as needed. // a numeric literal, boolean, a "bare" token, or a variable.
*/ */
value value
: QUOTED_TEXT : QUOTED_TEXT
| NUMBER | NUMBER
| BOOL | BOOL
| KEY | KEY
| variable // NEW: variables can be used as values
;
// NEW: Variable rule to support different variable syntaxes
variable
: DOLLAR_VAR
| CURLY_VAR
| SQUARE_VAR
; ;
/* /*
@ -190,6 +200,11 @@ BOOL
| [Ff][Aa][Ll][Ss][Ee] | [Ff][Aa][Ll][Ss][Ee]
; ;
// NEW: Variable token types
DOLLAR_VAR : '$' [a-zA-Z_] [a-zA-Z0-9._]* ;
CURLY_VAR : '{{' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* '}}' ;
SQUARE_VAR : '[[' [ \t]* '.'? [a-zA-Z_] [a-zA-Z0-9._]* [ \t]* ']]' ;
fragment SIGN : [+-] ; fragment SIGN : [+-] ;
// Numbers: optional sign, then digits, optional fractional part, // Numbers: optional sign, then digits, optional fractional part,

View File

@ -70,7 +70,7 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
} }
} }
name := r.URL.Query().Get("name") name := r.URL.Query().Get("searchText")
req = telemetrytypes.FieldKeySelector{ req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli, StartUnixMilli: startUnixMilli,
@ -92,8 +92,10 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request") return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
} }
name := r.URL.Query().Get("name")
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery") existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("value") value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails. // Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit")) limit, err := strconv.Atoi(r.URL.Query().Get("limit"))

File diff suppressed because one or more lines are too long

View File

@ -26,11 +26,14 @@ HAS=25
HASANY=26 HASANY=26
HASALL=27 HASALL=27
BOOL=28 BOOL=28
NUMBER=29 DOLLAR_VAR=29
QUOTED_TEXT=30 CURLY_VAR=30
KEY=31 SQUARE_VAR=31
WS=32 NUMBER=32
FREETEXT=33 QUOTED_TEXT=33
KEY=34
WS=35
FREETEXT=36
'('=1 '('=1
')'=2 ')'=2
'['=3 '['=3

File diff suppressed because one or more lines are too long

View File

@ -26,11 +26,14 @@ HAS=25
HASANY=26 HASANY=26
HASALL=27 HASALL=27
BOOL=28 BOOL=28
NUMBER=29 DOLLAR_VAR=29
QUOTED_TEXT=30 CURLY_VAR=30
KEY=31 SQUARE_VAR=31
WS=32 NUMBER=32
FREETEXT=33 QUOTED_TEXT=33
KEY=34
WS=35
FREETEXT=36
'('=1 '('=1
')'=2 ')'=2
'['=3 '['=3

View File

@ -117,6 +117,12 @@ func (s *BaseFilterQueryListener) EnterValue(ctx *ValueContext) {}
// ExitValue is called when production value is exited. // ExitValue is called when production value is exited.
func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {} func (s *BaseFilterQueryListener) ExitValue(ctx *ValueContext) {}
// EnterVariable is called when production variable is entered.
func (s *BaseFilterQueryListener) EnterVariable(ctx *VariableContext) {}
// ExitVariable is called when production variable is exited.
func (s *BaseFilterQueryListener) ExitVariable(ctx *VariableContext) {}
// EnterKey is called when production key is entered. // EnterKey is called when production key is entered.
func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {} func (s *BaseFilterQueryListener) EnterKey(ctx *KeyContext) {}

View File

@ -72,6 +72,10 @@ func (v *BaseFilterQueryVisitor) VisitValue(ctx *ValueContext) interface{} {
return v.VisitChildren(ctx) return v.VisitChildren(ctx)
} }
func (v *BaseFilterQueryVisitor) VisitVariable(ctx *VariableContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} { func (v *BaseFilterQueryVisitor) VisitKey(ctx *KeyContext) interface{} {
return v.VisitChildren(ctx) return v.VisitChildren(ctx)
} }

View File

@ -50,178 +50,213 @@ func filterquerylexerLexerInit() {
"", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS", "", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE", "NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR", "BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
"FREETEXT", "NUMBER", "QUOTED_TEXT", "KEY", "WS", "FREETEXT",
} }
staticData.RuleNames = []string{ staticData.RuleNames = []string{
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS", "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE", "NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR", "BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "HAS", "HASANY", "HASALL", "BOOL", "DOLLAR_VAR", "CURLY_VAR", "SQUARE_VAR",
"SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS",
"FREETEXT", "KEY", "WS", "DIGIT", "FREETEXT",
} }
staticData.PredictionContextCache = antlr.NewPredictionContextCache() staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{ staticData.serializedATN = []int32{
4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 0, 36, 404, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 1, 0, 1,
4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3,
1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 5, 97, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9,
1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1,
8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 124, 8, 13, 11, 13, 12, 13, 125,
14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1,
8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 143, 8, 15, 11, 15, 12, 15, 144,
16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1,
1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17,
18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 167, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1,
3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 184, 8, 19, 1, 20,
22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1,
1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25,
26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1,
1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 227,
1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235, 8, 27, 1, 28, 1, 28, 1, 28, 5, 28, 232, 8, 28, 10, 28, 12, 28, 235, 9,
8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 241, 8, 29, 10, 29, 12, 29, 244,
29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251, 9, 29, 1, 29, 3, 29, 247, 8, 29, 1, 29, 1, 29, 5, 29, 251, 8, 29, 10, 29,
8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29, 12, 29, 254, 9, 29, 1, 29, 5, 29, 257, 8, 29, 10, 29, 12, 29, 260, 9, 29,
12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 269, 8, 30, 10,
11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30, 30, 12, 30, 272, 9, 30, 1, 30, 3, 30, 275, 8, 30, 1, 30, 1, 30, 5, 30,
1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 5, 30, 285, 8, 30, 10, 30,
30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30, 12, 30, 288, 9, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 3, 32, 296,
1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12, 8, 32, 1, 32, 4, 32, 299, 8, 32, 11, 32, 12, 32, 300, 1, 32, 1, 32, 5,
31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 32, 305, 8, 32, 10, 32, 12, 32, 308, 9, 32, 3, 32, 310, 8, 32, 1, 32, 1,
1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9, 32, 3, 32, 314, 8, 32, 1, 32, 4, 32, 317, 8, 32, 11, 32, 12, 32, 318, 3,
34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36, 32, 321, 8, 32, 1, 32, 3, 32, 324, 8, 32, 1, 32, 1, 32, 4, 32, 328, 8,
1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3, 32, 11, 32, 12, 32, 329, 1, 32, 1, 32, 3, 32, 334, 8, 32, 1, 32, 4, 32,
2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 337, 8, 32, 11, 32, 12, 32, 338, 3, 32, 341, 8, 32, 3, 32, 343, 8, 32,
25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 349, 8, 33, 10, 33, 12, 33, 352, 9,
43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 359, 8, 33, 10, 33, 12, 33,
61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2, 362, 9, 33, 1, 33, 3, 33, 365, 8, 33, 1, 34, 1, 34, 5, 34, 369, 8, 34,
0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 10, 34, 12, 34, 372, 9, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1,
0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 386, 8, 37, 10, 37, 12, 37,
0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87, 389, 9, 37, 1, 38, 4, 38, 392, 8, 38, 11, 38, 12, 38, 393, 1, 38, 1, 38,
87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 1, 39, 1, 39, 1, 40, 4, 40, 401, 8, 40, 11, 40, 12, 40, 402, 0, 0, 41,
82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11,
67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20,
104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29,
102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 59, 30, 61, 31, 63, 0, 65, 32, 67, 33, 69, 0, 71, 0, 73, 0, 75, 34, 77,
92, 2, 0, 65, 90, 97, 122, 5, 0, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122, 35, 79, 0, 81, 36, 1, 0, 32, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105,
3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110,
39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1, 110, 2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32,
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 2, 0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2,
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2,
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0,
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0,
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 3, 0, 65, 90, 95, 95, 97, 122,
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 5, 0, 46, 46, 48, 57, 65, 90, 95, 95, 97, 122, 2, 0, 43, 43, 45, 45, 2,
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 2, 0, 65, 90, 97, 122, 5, 0, 45,
0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75, 45, 48, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0,
1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93,
83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0, 93, 437, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1,
0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15,
1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0,
0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0,
1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0,
0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0,
1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1,
0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61,
1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0,
0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78, 77, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 1, 83, 1, 0, 0, 0, 3, 85, 1, 0, 0, 0,
5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0, 5, 87, 1, 0, 0, 0, 7, 89, 1, 0, 0, 0, 9, 91, 1, 0, 0, 0, 11, 96, 1, 0,
81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1, 0, 0, 13, 98, 1, 0, 0, 0, 15, 101, 1, 0, 0, 0, 17, 104, 1, 0, 0, 0, 19,
0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0, 106, 1, 0, 0, 0, 21, 109, 1, 0, 0, 0, 23, 111, 1, 0, 0, 0, 25, 114, 1,
88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1, 0, 0, 0, 27, 119, 1, 0, 0, 0, 29, 132, 1, 0, 0, 0, 31, 138, 1, 0, 0, 0,
0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0, 33, 152, 1, 0, 0, 0, 35, 160, 1, 0, 0, 0, 37, 168, 1, 0, 0, 0, 39, 175,
94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1, 1, 0, 0, 0, 41, 185, 1, 0, 0, 0, 43, 188, 1, 0, 0, 0, 45, 192, 1, 0, 0,
0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0, 0, 47, 196, 1, 0, 0, 0, 49, 199, 1, 0, 0, 0, 51, 203, 1, 0, 0, 0, 53, 210,
101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 1, 0, 0, 0, 55, 226, 1, 0, 0, 0, 57, 228, 1, 0, 0, 0, 59, 236, 1, 0, 0,
22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24, 0, 61, 264, 1, 0, 0, 0, 63, 292, 1, 0, 0, 0, 65, 342, 1, 0, 0, 0, 67, 364,
1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2, 1, 0, 0, 0, 69, 366, 1, 0, 0, 0, 71, 373, 1, 0, 0, 0, 73, 376, 1, 0, 0,
0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0, 0, 75, 380, 1, 0, 0, 0, 77, 391, 1, 0, 0, 0, 79, 397, 1, 0, 0, 0, 81, 400,
114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117, 1, 0, 0, 0, 83, 84, 5, 40, 0, 0, 84, 2, 1, 0, 0, 0, 85, 86, 5, 41, 0, 0,
116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 86, 4, 1, 0, 0, 0, 87, 88, 5, 91, 0, 0, 88, 6, 1, 0, 0, 0, 89, 90, 5, 93,
1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1, 0, 0, 90, 8, 1, 0, 0, 0, 91, 92, 5, 44, 0, 0, 92, 10, 1, 0, 0, 0, 93, 97,
0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0, 5, 61, 0, 0, 94, 95, 5, 61, 0, 0, 95, 97, 5, 61, 0, 0, 96, 93, 1, 0, 0,
126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129, 0, 96, 94, 1, 0, 0, 0, 97, 12, 1, 0, 0, 0, 98, 99, 5, 33, 0, 0, 99, 100,
130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7, 5, 61, 0, 0, 100, 14, 1, 0, 0, 0, 101, 102, 5, 60, 0, 0, 102, 103, 5, 62,
4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0, 0, 0, 103, 16, 1, 0, 0, 0, 104, 105, 5, 60, 0, 0, 105, 18, 1, 0, 0, 0,
0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, 0, 108, 20, 1, 0, 0, 0, 109,
139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142, 110, 5, 62, 0, 0, 110, 22, 1, 0, 0, 0, 111, 112, 5, 62, 0, 0, 112, 113,
7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3, 5, 61, 0, 0, 113, 24, 1, 0, 0, 0, 114, 115, 7, 0, 0, 0, 115, 116, 7, 1,
0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0, 0, 0, 116, 117, 7, 2, 0, 0, 117, 118, 7, 3, 0, 0, 118, 26, 1, 0, 0, 0,
148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151, 119, 120, 7, 4, 0, 0, 120, 121, 7, 5, 0, 0, 121, 123, 7, 6, 0, 0, 122,
152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7, 124, 7, 7, 0, 0, 123, 122, 1, 0, 0, 0, 124, 125, 1, 0, 0, 0, 125, 123,
3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11, 1, 0, 0, 0, 125, 126, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 128, 7, 0,
0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0,
160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163, 131, 28, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134, 7, 0, 0, 0, 134, 135,
164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167, 7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3, 0, 0, 137, 30, 1, 0,
7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15, 0, 0, 138, 139, 7, 4, 0, 0, 139, 140, 7, 5, 0, 0, 140, 142, 7, 6, 0, 0,
0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0, 141, 143, 7, 7, 0, 0, 142, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144,
173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176, 142, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 1, 0, 0, 0, 146, 147,
178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40, 7, 1, 0, 0, 147, 148, 7, 0, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 2,
1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0, 0, 0, 150, 151, 7, 3, 0, 0, 151, 32, 1, 0, 0, 0, 152, 153, 7, 8, 0, 0,
0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0, 153, 154, 7, 3, 0, 0, 154, 155, 7, 6, 0, 0, 155, 156, 7, 9, 0, 0, 156,
185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188, 157, 7, 3, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159, 7, 4, 0, 0, 159, 34, 1,
189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192, 0, 0, 0, 160, 161, 7, 3, 0, 0, 161, 162, 7, 10, 0, 0, 162, 163, 7, 1, 0,
7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16, 0, 163, 164, 7, 11, 0, 0, 164, 166, 7, 6, 0, 0, 165, 167, 7, 11, 0, 0,
0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 36, 1, 0, 0, 0, 168, 169,
198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201, 7, 12, 0, 0, 169, 170, 7, 3, 0, 0, 170, 171, 7, 13, 0, 0, 171, 172, 7,
202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205, 3, 0, 0, 172, 173, 7, 10, 0, 0, 173, 174, 7, 14, 0, 0, 174, 38, 1, 0, 0,
7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7, 0, 175, 176, 7, 15, 0, 0, 176, 177, 7, 5, 0, 0, 177, 178, 7, 4, 0, 0, 178,
16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0, 179, 7, 6, 0, 0, 179, 180, 7, 16, 0, 0, 180, 181, 7, 1, 0, 0, 181, 183,
0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0, 7, 4, 0, 0, 182, 184, 7, 11, 0, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0,
214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217, 0, 0, 184, 40, 1, 0, 0, 0, 185, 186, 7, 1, 0, 0, 186, 187, 7, 4, 0, 0,
218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211, 187, 42, 1, 0, 0, 0, 188, 189, 7, 4, 0, 0, 189, 190, 7, 5, 0, 0, 190, 191,
1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22, 7, 6, 0, 0, 191, 44, 1, 0, 0, 0, 192, 193, 7, 16, 0, 0, 193, 194, 7, 4,
0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0, 0, 0, 194, 195, 7, 17, 0, 0, 195, 46, 1, 0, 0, 0, 196, 197, 7, 5, 0, 0,
225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228, 197, 198, 7, 12, 0, 0, 198, 48, 1, 0, 0, 0, 199, 200, 7, 18, 0, 0, 200,
227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 201, 7, 16, 0, 0, 201, 202, 7, 11, 0, 0, 202, 50, 1, 0, 0, 0, 203, 204,
1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73, 7, 18, 0, 0, 204, 205, 7, 16, 0, 0, 205, 206, 7, 11, 0, 0, 206, 207, 7,
36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 16, 0, 0, 207, 208, 7, 4, 0, 0, 208, 209, 7, 19, 0, 0, 209, 52, 1, 0, 0,
236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 0, 210, 211, 7, 18, 0, 0, 211, 212, 7, 16, 0, 0, 212, 213, 7, 11, 0, 0,
232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 213, 214, 7, 16, 0, 0, 214, 215, 7, 0, 0, 0, 215, 216, 7, 0, 0, 0, 216,
7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 54, 1, 0, 0, 0, 217, 218, 7, 6, 0, 0, 218, 219, 7, 12, 0, 0, 219, 220,
0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0, 7, 20, 0, 0, 220, 227, 7, 3, 0, 0, 221, 222, 7, 21, 0, 0, 222, 223, 7,
0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 16, 0, 0, 223, 224, 7, 0, 0, 0, 224, 225, 7, 11, 0, 0, 225, 227, 7, 3,
249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 0, 0, 226, 217, 1, 0, 0, 0, 226, 221, 1, 0, 0, 0, 227, 56, 1, 0, 0, 0,
273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254, 228, 229, 5, 36, 0, 0, 229, 233, 7, 22, 0, 0, 230, 232, 7, 23, 0, 0, 231,
1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73, 230, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 233, 234,
36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 1, 0, 0, 0, 234, 58, 1, 0, 0, 0, 235, 233, 1, 0, 0, 0, 236, 237, 5, 123,
259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262, 0, 0, 237, 238, 5, 123, 0, 0, 238, 242, 1, 0, 0, 0, 239, 241, 7, 7, 0,
264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266, 0, 240, 239, 1, 0, 0, 0, 241, 244, 1, 0, 0, 0, 242, 240, 1, 0, 0, 0, 242,
1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1, 243, 1, 0, 0, 0, 243, 246, 1, 0, 0, 0, 244, 242, 1, 0, 0, 0, 245, 247,
0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0, 5, 46, 0, 0, 246, 245, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 248, 1, 0,
0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 0, 0, 248, 252, 7, 22, 0, 0, 249, 251, 7, 23, 0, 0, 250, 249, 1, 0, 0,
225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5, 0, 251, 254, 1, 0, 0, 0, 252, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253,
34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0, 258, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 257, 7, 7, 0, 0, 256, 255,
0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 1, 0, 0, 0, 257, 260, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 258, 259, 1, 0,
280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282, 0, 0, 259, 261, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 261, 262, 5, 125, 0,
280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289, 0, 262, 263, 5, 125, 0, 0, 263, 60, 1, 0, 0, 0, 264, 265, 5, 91, 0, 0,
8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1, 265, 266, 5, 91, 0, 0, 266, 270, 1, 0, 0, 0, 267, 269, 7, 7, 0, 0, 268,
0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 267, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271,
0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 46,
295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62, 0, 0, 274, 273, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0,
1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1, 276, 280, 7, 22, 0, 0, 277, 279, 7, 23, 0, 0, 278, 277, 1, 0, 0, 0, 279,
0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 286,
0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 285, 7, 7, 0, 0, 284, 283, 1, 0,
305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0,
5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63, 287, 289, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 290, 5, 93, 0, 0, 290,
31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65, 291, 5, 93, 0, 0, 291, 62, 1, 0, 0, 0, 292, 293, 7, 24, 0, 0, 293, 64,
32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0, 1, 0, 0, 0, 294, 296, 3, 63, 31, 0, 295, 294, 1, 0, 0, 0, 295, 296, 1,
0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 0, 0, 0, 296, 298, 1, 0, 0, 0, 297, 299, 3, 79, 39, 0, 298, 297, 1, 0,
318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7, 0, 0, 299, 300, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0,
27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0, 301, 309, 1, 0, 0, 0, 302, 306, 5, 46, 0, 0, 303, 305, 3, 79, 39, 0, 304,
0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326, 303, 1, 0, 0, 0, 305, 308, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 306, 307,
72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 309, 302, 1, 0,
29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 320, 1, 0, 0, 0, 311, 313, 7, 3, 0, 0,
0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160, 312, 314, 3, 63, 31, 0, 313, 312, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314,
177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272, 316, 1, 0, 0, 0, 315, 317, 3, 79, 39, 0, 316, 315, 1, 0, 0, 0, 317, 318,
278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0, 1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 321, 1, 0,
0, 0, 320, 311, 1, 0, 0, 0, 320, 321, 1, 0, 0, 0, 321, 343, 1, 0, 0, 0,
322, 324, 3, 63, 31, 0, 323, 322, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324,
325, 1, 0, 0, 0, 325, 327, 5, 46, 0, 0, 326, 328, 3, 79, 39, 0, 327, 326,
1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 327, 1, 0, 0, 0, 329, 330, 1, 0,
0, 0, 330, 340, 1, 0, 0, 0, 331, 333, 7, 3, 0, 0, 332, 334, 3, 63, 31,
0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 336, 1, 0, 0, 0, 335,
337, 3, 79, 39, 0, 336, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 336,
1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 341, 1, 0, 0, 0, 340, 331, 1, 0,
0, 0, 340, 341, 1, 0, 0, 0, 341, 343, 1, 0, 0, 0, 342, 295, 1, 0, 0, 0,
342, 323, 1, 0, 0, 0, 343, 66, 1, 0, 0, 0, 344, 350, 5, 34, 0, 0, 345,
349, 8, 25, 0, 0, 346, 347, 5, 92, 0, 0, 347, 349, 9, 0, 0, 0, 348, 345,
1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0,
0, 0, 350, 351, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0,
353, 365, 5, 34, 0, 0, 354, 360, 5, 39, 0, 0, 355, 359, 8, 26, 0, 0, 356,
357, 5, 92, 0, 0, 357, 359, 9, 0, 0, 0, 358, 355, 1, 0, 0, 0, 358, 356,
1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 360, 361, 1, 0,
0, 0, 361, 363, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0, 363, 365, 5, 39, 0, 0,
364, 344, 1, 0, 0, 0, 364, 354, 1, 0, 0, 0, 365, 68, 1, 0, 0, 0, 366, 370,
7, 27, 0, 0, 367, 369, 7, 28, 0, 0, 368, 367, 1, 0, 0, 0, 369, 372, 1,
0, 0, 0, 370, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 70, 1, 0, 0,
0, 372, 370, 1, 0, 0, 0, 373, 374, 5, 91, 0, 0, 374, 375, 5, 93, 0, 0,
375, 72, 1, 0, 0, 0, 376, 377, 5, 91, 0, 0, 377, 378, 5, 42, 0, 0, 378,
379, 5, 93, 0, 0, 379, 74, 1, 0, 0, 0, 380, 387, 3, 69, 34, 0, 381, 382,
5, 46, 0, 0, 382, 386, 3, 69, 34, 0, 383, 386, 3, 71, 35, 0, 384, 386,
3, 73, 36, 0, 385, 381, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 384, 1,
0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0,
0, 388, 76, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 392, 7, 29, 0, 0, 391,
390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394,
1, 0, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 38, 0, 0, 396, 78, 1, 0,
0, 0, 397, 398, 7, 30, 0, 0, 398, 80, 1, 0, 0, 0, 399, 401, 8, 31, 0, 0,
400, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402,
403, 1, 0, 0, 0, 403, 82, 1, 0, 0, 0, 39, 0, 96, 125, 144, 166, 183, 226,
233, 242, 246, 252, 258, 270, 274, 280, 286, 295, 300, 306, 309, 313, 318,
320, 323, 329, 333, 338, 340, 342, 348, 350, 358, 360, 364, 370, 385, 387,
393, 402, 1, 6, 0, 0,
} }
deserializer := antlr.NewATNDeserializer(nil) deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN) staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@ -290,9 +325,12 @@ const (
FilterQueryLexerHASANY = 26 FilterQueryLexerHASANY = 26
FilterQueryLexerHASALL = 27 FilterQueryLexerHASALL = 27
FilterQueryLexerBOOL = 28 FilterQueryLexerBOOL = 28
FilterQueryLexerNUMBER = 29 FilterQueryLexerDOLLAR_VAR = 29
FilterQueryLexerQUOTED_TEXT = 30 FilterQueryLexerCURLY_VAR = 30
FilterQueryLexerKEY = 31 FilterQueryLexerSQUARE_VAR = 31
FilterQueryLexerWS = 32 FilterQueryLexerNUMBER = 32
FilterQueryLexerFREETEXT = 33 FilterQueryLexerQUOTED_TEXT = 33
FilterQueryLexerKEY = 34
FilterQueryLexerWS = 35
FilterQueryLexerFREETEXT = 36
) )

View File

@ -56,6 +56,9 @@ type FilterQueryListener interface {
// EnterValue is called when entering the value production. // EnterValue is called when entering the value production.
EnterValue(c *ValueContext) EnterValue(c *ValueContext)
// EnterVariable is called when entering the variable production.
EnterVariable(c *VariableContext)
// EnterKey is called when entering the key production. // EnterKey is called when entering the key production.
EnterKey(c *KeyContext) EnterKey(c *KeyContext)
@ -107,6 +110,9 @@ type FilterQueryListener interface {
// ExitValue is called when exiting the value production. // ExitValue is called when exiting the value production.
ExitValue(c *ValueContext) ExitValue(c *ValueContext)
// ExitVariable is called when exiting the variable production.
ExitVariable(c *VariableContext)
// ExitKey is called when exiting the key production. // ExitKey is called when exiting the key production.
ExitKey(c *KeyContext) ExitKey(c *KeyContext)
} }

File diff suppressed because it is too large Load Diff

View File

@ -56,6 +56,9 @@ type FilterQueryVisitor interface {
// Visit a parse tree produced by FilterQueryParser#value. // Visit a parse tree produced by FilterQueryParser#value.
VisitValue(ctx *ValueContext) interface{} VisitValue(ctx *ValueContext) interface{}
// Visit a parse tree produced by FilterQueryParser#variable.
VisitVariable(ctx *VariableContext) interface{}
// Visit a parse tree produced by FilterQueryParser#key. // Visit a parse tree produced by FilterQueryParser#key.
VisitKey(ctx *KeyContext) interface{} VisitKey(ctx *KeyContext) interface{}
} }

View File

@ -18,6 +18,7 @@ type builderQuery[T any] struct {
telemetryStore telemetrystore.TelemetryStore telemetryStore telemetrystore.TelemetryStore
stmtBuilder qbtypes.StatementBuilder[T] stmtBuilder qbtypes.StatementBuilder[T]
spec qbtypes.QueryBuilderQuery[T] spec qbtypes.QueryBuilderQuery[T]
variables map[string]qbtypes.VariableItem
fromMS uint64 fromMS uint64
toMS uint64 toMS uint64
@ -32,11 +33,13 @@ func newBuilderQuery[T any](
spec qbtypes.QueryBuilderQuery[T], spec qbtypes.QueryBuilderQuery[T],
tr qbtypes.TimeRange, tr qbtypes.TimeRange,
kind qbtypes.RequestType, kind qbtypes.RequestType,
variables map[string]qbtypes.VariableItem,
) *builderQuery[T] { ) *builderQuery[T] {
return &builderQuery[T]{ return &builderQuery[T]{
telemetryStore: telemetryStore, telemetryStore: telemetryStore,
stmtBuilder: stmtBuilder, stmtBuilder: stmtBuilder,
spec: spec, spec: spec,
variables: variables,
fromMS: tr.From, fromMS: tr.From,
toMS: tr.To, toMS: tr.To,
kind: kind, kind: kind,
@ -174,7 +177,7 @@ func (q *builderQuery[T]) Execute(ctx context.Context) (*qbtypes.Result, error)
return q.executeWindowList(ctx) return q.executeWindowList(ctx)
} }
stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec) stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec, q.variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -278,7 +281,7 @@ func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Resul
q.spec.Offset = 0 q.spec.Offset = 0
q.spec.Limit = need q.spec.Limit = need
stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec) stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec, q.variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -3,8 +3,11 @@ package querier
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sort" "sort"
"strings"
"github.com/SigNoz/govaluate"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
) )
@ -106,12 +109,15 @@ func postProcessBuilderQuery[T any](
q *querier, q *querier,
result *qbtypes.Result, result *qbtypes.Result,
query qbtypes.QueryBuilderQuery[T], query qbtypes.QueryBuilderQuery[T],
_ *qbtypes.QueryRangeRequest, req *qbtypes.QueryRangeRequest,
) *qbtypes.Result { ) *qbtypes.Result {
// Apply functions // Apply functions
if len(query.Functions) > 0 { if len(query.Functions) > 0 {
result = q.applyFunctions(result, query.Functions) // For builder queries, use the query's own step
step := query.StepInterval.Duration.Milliseconds()
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
result = q.applyFunctions(result, functions)
} }
return result return result
@ -130,7 +136,10 @@ func postProcessMetricQuery(
} }
if len(query.Functions) > 0 { if len(query.Functions) > 0 {
result = q.applyFunctions(result, query.Functions) // For metric queries, use the query's own step
step := query.StepInterval.Duration.Milliseconds()
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
result = q.applyFunctions(result, functions)
} }
// Apply reduce to for scalar request type // Apply reduce to for scalar request type
@ -222,6 +231,11 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
if result != nil { if result != nil {
results[name] = result results[name] = result
} }
} else if req.RequestType == qbtypes.RequestTypeScalar {
result := q.processScalarFormula(ctx, results, formula, req)
if result != nil {
results[name] = result
}
} }
} }
@ -233,7 +247,7 @@ func (q *querier) processTimeSeriesFormula(
ctx context.Context, ctx context.Context,
results map[string]*qbtypes.Result, results map[string]*qbtypes.Result,
formula qbtypes.QueryBuilderFormula, formula qbtypes.QueryBuilderFormula,
_ *qbtypes.QueryRangeRequest, req *qbtypes.QueryRangeRequest,
) *qbtypes.Result { ) *qbtypes.Result {
// Prepare time series data for formula evaluation // Prepare time series data for formula evaluation
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData) timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
@ -278,12 +292,218 @@ func (q *querier) processTimeSeriesFormula(
} }
if len(formula.Functions) > 0 { if len(formula.Functions) > 0 {
result = q.applyFunctions(result, formula.Functions) // For formulas, calculate GCD of steps from queries in the expression
step := q.calculateFormulaStep(formula.Expression, req)
functions := q.prepareFillZeroArgsWithStep(formula.Functions, req, step)
result = q.applyFunctions(result, functions)
} }
return result return result
} }
// processScalarFormula handles formula evaluation for scalar data
//
// NOTE: This implementation has a known limitation with formulas that reference
// specific aggregations by index (e.g., "A.0", "A.1") or multiple aggregations
// from the same query (e.g., "A.0 * 2 + A.1"). The FormulaEvaluator's series
// matching logic doesn't work correctly when converting scalar data to time series
// format for these cases.
//
// Currently supported:
// - Formulas between different queries: "A / B", "A * 2 + B"
// - Simple references: "A" (defaults to first aggregation)
//
// Not supported:
// - Indexed aggregation references: "A.0", "A.1"
// - Multiple aggregations from same query: "A.0 + A.1"
//
// To properly support this, we would need to either:
// 1. Fix the FormulaEvaluator's series lookup logic for scalar-converted data
// 2. Implement a dedicated scalar formula evaluator
func (q *querier) processScalarFormula(
ctx context.Context,
results map[string]*qbtypes.Result,
formula qbtypes.QueryBuilderFormula,
req *qbtypes.QueryRangeRequest,
) *qbtypes.Result {
// Convert scalar data to time series format with zero timestamp
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
for queryName, result := range results {
if scalarData, ok := result.Value.(*qbtypes.ScalarData); ok {
// Convert scalar to time series
tsData := &qbtypes.TimeSeriesData{
QueryName: scalarData.QueryName,
Aggregations: make([]*qbtypes.AggregationBucket, 0),
}
// Find aggregation columns
aggColumns := make(map[int]int) // aggregation index -> column index
for colIdx, col := range scalarData.Columns {
if col.Type == qbtypes.ColumnTypeAggregation {
aggColumns[int(col.AggregationIndex)] = colIdx
}
}
// Group rows by their label sets
type labeledRowData struct {
labels []*qbtypes.Label
values map[int]float64 // aggregation index -> value
}
// First pass: group all rows by their label combination
rowsByLabels := make(map[string]*labeledRowData)
for _, row := range scalarData.Data {
// Build labels from group columns
labels := make([]*qbtypes.Label, 0)
for i, col := range scalarData.Columns {
if col.Type == qbtypes.ColumnTypeGroup && i < len(row) {
labels = append(labels, &qbtypes.Label{
Key: col.TelemetryFieldKey,
Value: row[i],
})
}
}
labelKey := qbtypes.GetUniqueSeriesKey(labels)
// Get or create row data
rowData, exists := rowsByLabels[labelKey]
if !exists {
rowData = &labeledRowData{
labels: labels,
values: make(map[int]float64),
}
rowsByLabels[labelKey] = rowData
}
// Store all aggregation values from this row
for aggIdx, colIdx := range aggColumns {
if colIdx < len(row) {
if val, ok := toFloat64(row[colIdx]); ok {
rowData.values[aggIdx] = val
}
}
}
}
// Get sorted label keys for consistent ordering
labelKeys := make([]string, 0, len(rowsByLabels))
for key := range rowsByLabels {
labelKeys = append(labelKeys, key)
}
slices.Sort(labelKeys)
// Create aggregation buckets
aggIndices := make([]int, 0, len(aggColumns))
for aggIdx := range aggColumns {
aggIndices = append(aggIndices, aggIdx)
}
slices.Sort(aggIndices)
// For each aggregation, create a bucket with series in consistent order
for _, aggIdx := range aggIndices {
colIdx := aggColumns[aggIdx]
bucket := &qbtypes.AggregationBucket{
Index: aggIdx,
Alias: scalarData.Columns[colIdx].Name,
Meta: scalarData.Columns[colIdx].Meta,
Series: make([]*qbtypes.TimeSeries, 0),
}
// Create series in the same order (by label key)
for _, labelKey := range labelKeys {
rowData := rowsByLabels[labelKey]
// Only create series if we have a value for this aggregation
if val, exists := rowData.values[aggIdx]; exists {
series := &qbtypes.TimeSeries{
Labels: rowData.labels,
Values: []*qbtypes.TimeSeriesValue{{
Timestamp: 0,
Value: val,
}},
}
bucket.Series = append(bucket.Series, series)
}
}
tsData.Aggregations = append(tsData.Aggregations, bucket)
}
timeSeriesData[queryName] = tsData
}
}
// Create formula evaluator
canDefaultZero := make(map[string]bool)
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
if err != nil {
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
return nil
}
// Evaluate the formula
formulaSeries, err := evaluator.EvaluateFormula(timeSeriesData)
if err != nil {
q.logger.ErrorContext(ctx, "failed to evaluate formula", "error", err, "formula", formula.Name)
return nil
}
// Convert back to scalar format
scalarResult := &qbtypes.ScalarData{
QueryName: formula.Name,
Columns: make([]*qbtypes.ColumnDescriptor, 0),
Data: make([][]any, 0),
}
// Build columns from first series
if len(formulaSeries) > 0 && len(formulaSeries[0].Labels) > 0 {
// Add group columns
for _, label := range formulaSeries[0].Labels {
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
TelemetryFieldKey: label.Key,
QueryName: formula.Name,
Type: qbtypes.ColumnTypeGroup,
})
}
}
// Add result column
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "__result"},
QueryName: formula.Name,
AggregationIndex: 0,
Type: qbtypes.ColumnTypeAggregation,
})
// Build rows
for _, series := range formulaSeries {
row := make([]any, len(scalarResult.Columns))
// Add group values
for i, label := range series.Labels {
if i < len(row)-1 {
row[i] = label.Value
}
}
// Add aggregation value (from single value at timestamp 0)
if len(series.Values) > 0 {
row[len(row)-1] = series.Values[0].Value
} else {
row[len(row)-1] = "n/a"
}
scalarResult.Data = append(scalarResult.Data, row)
}
return &qbtypes.Result{
Value: scalarResult,
}
}
// filterDisabledQueries removes results for disabled queries // filterDisabledQueries removes results for disabled queries
func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result { func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
filtered := make(map[string]*qbtypes.Result) filtered := make(map[string]*qbtypes.Result)
@ -650,3 +870,98 @@ func toFloat64(v any) (float64, bool) {
} }
return 0, false return 0, false
} }
// gcd calculates the greatest common divisor
func gcd(a, b int64) int64 {
if b == 0 {
return a
}
return gcd(b, a%b)
}
// prepareFillZeroArgsWithStep prepares fillZero function arguments with a specific step
func (q *querier) prepareFillZeroArgsWithStep(functions []qbtypes.Function, req *qbtypes.QueryRangeRequest, step int64) []qbtypes.Function {
// Check if we need to modify any functions
needsCopy := false
for _, fn := range functions {
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
needsCopy = true
break
}
}
// If no fillZero functions need arguments, return original slice
if !needsCopy {
return functions
}
// Only copy if we need to modify
updatedFunctions := make([]qbtypes.Function, len(functions))
copy(updatedFunctions, functions)
// Process each function
for i, fn := range updatedFunctions {
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
// Set the arguments: start, end, step
fn.Args = []qbtypes.FunctionArg{
{Value: float64(req.Start)},
{Value: float64(req.End)},
{Value: float64(step)},
}
updatedFunctions[i] = fn
}
}
return updatedFunctions
}
// calculateFormulaStep calculates the GCD of steps from queries referenced in the formula
func (q *querier) calculateFormulaStep(expression string, req *qbtypes.QueryRangeRequest) int64 {
// Use govaluate to parse the expression and extract variables
// This is the same library used by FormulaEvaluator
parsedExpr, err := govaluate.NewEvaluableExpression(expression)
if err != nil {
// If we can't parse the expression, use default
return 60000
}
// Get the variables from the parsed expression
variables := parsedExpr.Vars()
// Extract base query names (e.g., "A" from "A.0" or "A.my_alias")
queryNames := make(map[string]bool)
for _, variable := range variables {
// Split by "." to get the base query name
parts := strings.Split(variable, ".")
if len(parts) > 0 {
queryNames[parts[0]] = true
}
}
var steps []int64
// Collect steps only from queries referenced in the formula
for _, query := range req.CompositeQuery.Queries {
info := getqueryInfo(query.Spec)
// Check if this query is referenced in the formula
if !info.Disabled && queryNames[info.Name] && info.Step.Duration > 0 {
stepMs := info.Step.Duration.Milliseconds()
if stepMs > 0 {
steps = append(steps, stepMs)
}
}
}
// If no steps found, use a default (60 seconds)
if len(steps) == 0 {
return 60000
}
// Calculate GCD of all steps
result := steps[0]
for i := 1; i < len(steps); i++ {
result = gcd(result, steps[i])
}
return result
}

View File

@ -0,0 +1,230 @@
package querier
import (
"testing"
"time"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func TestPrepareFillZeroArgsWithStep(t *testing.T) {
q := &querier{}
req := &qbtypes.QueryRangeRequest{
Start: 1000000,
End: 2000000,
}
tests := []struct {
name string
functions []qbtypes.Function
step int64
checkArgs bool
}{
{
name: "fillZero without args",
functions: []qbtypes.Function{
{
Name: qbtypes.FunctionNameFillZero,
Args: []qbtypes.FunctionArg{},
},
},
step: 30000, // 30 seconds
checkArgs: true,
},
{
name: "fillZero with existing args",
functions: []qbtypes.Function{
{
Name: qbtypes.FunctionNameFillZero,
Args: []qbtypes.FunctionArg{
{Value: 500000.0},
{Value: 1500000.0},
{Value: 15000.0},
},
},
},
step: 60000,
checkArgs: false, // Should not modify existing args
},
{
name: "other function should not be modified",
functions: []qbtypes.Function{
{
Name: qbtypes.FunctionNameAbsolute,
Args: []qbtypes.FunctionArg{},
},
},
step: 60000,
checkArgs: false,
},
{
name: "no copy when fillZero already has args",
functions: []qbtypes.Function{
{
Name: qbtypes.FunctionNameFillZero,
Args: []qbtypes.FunctionArg{
{Value: 1000.0},
{Value: 2000.0},
{Value: 500.0},
},
},
{
Name: qbtypes.FunctionNameAbsolute,
},
},
step: 60000,
checkArgs: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := q.prepareFillZeroArgsWithStep(tt.functions, req, tt.step)
if len(result) != len(tt.functions) {
t.Fatalf("Expected %d functions, got %d", len(tt.functions), len(result))
}
// Check if no copy was made when not needed
if tt.name == "no copy when fillZero already has args" || tt.name == "other function should not be modified" {
// Verify that the result is the same slice (no copy)
if &result[0] != &tt.functions[0] {
t.Errorf("Expected no copy, but a copy was made")
}
}
for _, fn := range result {
if fn.Name == qbtypes.FunctionNameFillZero && tt.checkArgs {
if len(fn.Args) != 3 {
t.Errorf("Expected 3 args for fillZero, got %d", len(fn.Args))
}
// Check start
if start, ok := fn.Args[0].Value.(float64); !ok || start != float64(req.Start) {
t.Errorf("Expected start %f, got %v", float64(req.Start), fn.Args[0].Value)
}
// Check end
if end, ok := fn.Args[1].Value.(float64); !ok || end != float64(req.End) {
t.Errorf("Expected end %f, got %v", float64(req.End), fn.Args[1].Value)
}
// Check step
if step, ok := fn.Args[2].Value.(float64); !ok || step != float64(tt.step) {
t.Errorf("Expected step %f, got %v", float64(tt.step), fn.Args[2].Value)
}
}
}
})
}
}
func TestCalculateFormulaStep(t *testing.T) {
tests := []struct {
name string
expression string
req *qbtypes.QueryRangeRequest
expected int64
}{
{
name: "single query reference",
expression: "A * 2",
req: &qbtypes.QueryRangeRequest{
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "A",
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "B",
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
},
},
},
},
},
expected: 60000, // Only A is referenced
},
{
name: "multiple query references",
expression: "A + B",
req: &qbtypes.QueryRangeRequest{
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "A",
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "B",
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
},
},
},
},
},
expected: 30000, // GCD of 30s and 60s
},
{
name: "complex expression",
expression: "(A + B) / C",
req: &qbtypes.QueryRangeRequest{
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "A",
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "B",
StepInterval: qbtypes.Step{Duration: 120 * time.Second},
},
},
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "C",
StepInterval: qbtypes.Step{Duration: 180 * time.Second},
},
},
},
},
},
expected: 60000, // GCD of 60s, 120s, and 180s
},
{
name: "no query references",
expression: "100",
req: &qbtypes.QueryRangeRequest{
CompositeQuery: qbtypes.CompositeQuery{
Queries: []qbtypes.QueryEnvelope{},
},
},
expected: 60000, // Default
},
}
q := &querier{}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := q.calculateFormulaStep(tt.expression, tt.req)
if result != tt.expected {
t.Errorf("Expected step %d, got %d", tt.expected, result)
}
})
}
}

View File

@ -197,13 +197,13 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]: case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
spec.ShiftBy = extractShiftFromBuilderQuery(spec) spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType) bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
queries[spec.Name] = bq queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval steps[spec.Name] = spec.StepInterval
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]: case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
spec.ShiftBy = extractShiftFromBuilderQuery(spec) spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType) bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
queries[spec.Name] = bq queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval steps[spec.Name] = spec.StepInterval
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]: case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
@ -216,7 +216,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
} }
spec.ShiftBy = extractShiftFromBuilderQuery(spec) spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType) bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
queries[spec.Name] = bq queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval steps[spec.Name] = spec.StepInterval
default: default:
@ -401,15 +401,15 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
case *builderQuery[qbtypes.TraceAggregation]: case *builderQuery[qbtypes.TraceAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec) qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind) adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind) return newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.LogAggregation]: case *builderQuery[qbtypes.LogAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec) qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind) adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind) return newBuilderQuery(q.telemetryStore, q.logStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.MetricAggregation]: case *builderQuery[qbtypes.MetricAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec) qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind) adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind) return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, qt.spec, adjustedTimeRange, qt.kind, qt.variables)
default: default:
return nil return nil
} }

View File

@ -79,6 +79,7 @@ func newProvider(
traceConditionBuilder, traceConditionBuilder,
resourceFilterStmtBuilder, resourceFilterStmtBuilder,
traceAggExprRewriter, traceAggExprRewriter,
telemetryStore,
) )
// Create log statement builder // Create log statement builder

View File

@ -0,0 +1,191 @@
package querybuilder
import (
"fmt"
"regexp"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// HavingExpressionRewriter rewrites having expressions to use the correct SQL column names
type HavingExpressionRewriter struct {
// Map of user-friendly names to SQL column names
columnMap map[string]string
}
// NewHavingExpressionRewriter creates a new having expression rewriter
func NewHavingExpressionRewriter() *HavingExpressionRewriter {
return &HavingExpressionRewriter{
columnMap: make(map[string]string),
}
}
// RewriteForTraces rewrites having expression for trace queries
func (r *HavingExpressionRewriter) RewriteForTraces(expression string, aggregations []qbtypes.TraceAggregation) string {
r.buildTraceColumnMap(aggregations)
return r.rewriteExpression(expression)
}
// RewriteForLogs rewrites having expression for log queries
func (r *HavingExpressionRewriter) RewriteForLogs(expression string, aggregations []qbtypes.LogAggregation) string {
r.buildLogColumnMap(aggregations)
return r.rewriteExpression(expression)
}
// RewriteForMetrics rewrites having expression for metric queries
func (r *HavingExpressionRewriter) RewriteForMetrics(expression string, aggregations []qbtypes.MetricAggregation) string {
r.buildMetricColumnMap(aggregations)
return r.rewriteExpression(expression)
}
// buildTraceColumnMap builds the column mapping for trace aggregations
func (r *HavingExpressionRewriter) buildTraceColumnMap(aggregations []qbtypes.TraceAggregation) {
r.columnMap = make(map[string]string)
for idx, agg := range aggregations {
sqlColumn := fmt.Sprintf("__result_%d", idx)
// Map alias if present
if agg.Alias != "" {
r.columnMap[agg.Alias] = sqlColumn
}
// Map expression
r.columnMap[agg.Expression] = sqlColumn
// Map __result{number} format
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
// For single aggregation, also map __result
if len(aggregations) == 1 {
r.columnMap["__result"] = sqlColumn
}
}
}
// buildLogColumnMap builds the column mapping for log aggregations
func (r *HavingExpressionRewriter) buildLogColumnMap(aggregations []qbtypes.LogAggregation) {
r.columnMap = make(map[string]string)
for idx, agg := range aggregations {
sqlColumn := fmt.Sprintf("__result_%d", idx)
// Map alias if present
if agg.Alias != "" {
r.columnMap[agg.Alias] = sqlColumn
}
// Map expression
r.columnMap[agg.Expression] = sqlColumn
// Map __result{number} format
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
// For single aggregation, also map __result
if len(aggregations) == 1 {
r.columnMap["__result"] = sqlColumn
}
}
}
// buildMetricColumnMap builds the column mapping for metric aggregations
func (r *HavingExpressionRewriter) buildMetricColumnMap(aggregations []qbtypes.MetricAggregation) {
r.columnMap = make(map[string]string)
// For metrics, we typically have a single aggregation that results in "value" column
// But we still need to handle the mapping for consistency
for idx, agg := range aggregations {
// For metrics, the column is usually "value" in the final select
sqlColumn := "value"
// Map different metric formats
metricName := agg.MetricName
// Don't map the plain metric name - it's ambiguous
// r.columnMap[metricName] = sqlColumn
// Map with space aggregation
if agg.SpaceAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s)", agg.SpaceAggregation.StringValue(), metricName)] = sqlColumn
}
// Map with time aggregation
if agg.TimeAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s)", agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
}
// Map with both aggregations
if agg.TimeAggregation.StringValue() != "" && agg.SpaceAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s(%s))", agg.SpaceAggregation.StringValue(), agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
}
// If no aggregations specified, map the plain metric name
if agg.TimeAggregation.StringValue() == "" && agg.SpaceAggregation.StringValue() == "" {
r.columnMap[metricName] = sqlColumn
}
// Map __result format
r.columnMap["__result"] = sqlColumn
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
}
}
// rewriteExpression rewrites the having expression using the column map
func (r *HavingExpressionRewriter) rewriteExpression(expression string) string {
// First, handle quoted strings to avoid replacing within them
quotedStrings := make(map[string]string)
quotePattern := regexp.MustCompile(`'[^']*'|"[^"]*"`)
quotedIdx := 0
expression = quotePattern.ReplaceAllStringFunc(expression, func(match string) string {
placeholder := fmt.Sprintf("__QUOTED_%d__", quotedIdx)
quotedStrings[placeholder] = match
quotedIdx++
return placeholder
})
// Sort column mappings by length (descending) to handle longer names first
// This prevents partial replacements (e.g., "count" being replaced in "count_distinct")
type mapping struct {
from string
to string
}
mappings := make([]mapping, 0, len(r.columnMap))
for from, to := range r.columnMap {
mappings = append(mappings, mapping{from: from, to: to})
}
// Sort by length descending
for i := 0; i < len(mappings); i++ {
for j := i + 1; j < len(mappings); j++ {
if len(mappings[j].from) > len(mappings[i].from) {
mappings[i], mappings[j] = mappings[j], mappings[i]
}
}
}
// Apply replacements
for _, m := range mappings {
// For function expressions (containing parentheses), we need special handling
if strings.Contains(m.from, "(") {
// Escape special regex characters in the function name
escapedFrom := regexp.QuoteMeta(m.from)
pattern := regexp.MustCompile(`\b` + escapedFrom)
expression = pattern.ReplaceAllString(expression, m.to)
} else {
// Use word boundaries to ensure we're replacing complete identifiers
pattern := regexp.MustCompile(`\b` + regexp.QuoteMeta(m.from) + `\b`)
expression = pattern.ReplaceAllString(expression, m.to)
}
}
// Restore quoted strings
for placeholder, original := range quotedStrings {
expression = strings.Replace(expression, placeholder, original, 1)
}
return expression
}

View File

@ -0,0 +1,281 @@
package querybuilder
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
func TestHavingExpressionRewriter_RewriteForTraces(t *testing.T) {
tests := []struct {
name string
expression string
aggregations []qbtypes.TraceAggregation
expected string
}{
{
name: "single aggregation with __result",
expression: "__result > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: ""},
},
expected: "__result_0 > 100",
},
{
name: "single aggregation with alias",
expression: "total_count > 100 AND total_count < 1000",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: "total_count"},
},
expected: "__result_0 > 100 AND __result_0 < 1000",
},
{
name: "multiple aggregations with aliases",
expression: "error_count > 10 OR success_count > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "countIf(status = 'error')", Alias: "error_count"},
{Expression: "countIf(status = 'success')", Alias: "success_count"},
},
expected: "__result_0 > 10 OR __result_1 > 100",
},
{
name: "expression reference",
expression: "count() > 50",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: ""},
},
expected: "__result_0 > 50",
},
{
name: "__result{number} format",
expression: "__result0 > 10 AND __result1 < 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: ""},
{Expression: "sum(duration)", Alias: ""},
},
expected: "__result_0 > 10 AND __result_1 < 100",
},
{
name: "complex expression with parentheses",
expression: "(total > 100 AND errors < 10) OR (total < 50 AND errors = 0)",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: "total"},
{Expression: "countIf(error = true)", Alias: "errors"},
},
expected: "(__result_0 > 100 AND __result_1 < 10) OR (__result_0 < 50 AND __result_1 = 0)",
},
{
name: "with quoted strings",
expression: "status = 'active' AND count > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "status", Alias: "status"},
{Expression: "count()", Alias: "count"},
},
expected: "__result_0 = 'active' AND __result_1 > 100",
},
{
name: "avoid partial replacements",
expression: "count_distinct > 10 AND count > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count_distinct(user_id)", Alias: "count_distinct"},
{Expression: "count()", Alias: "count"},
},
expected: "__result_0 > 10 AND __result_1 > 100",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := NewHavingExpressionRewriter()
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
if result != tt.expected {
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
}
})
}
}
func TestHavingExpressionRewriter_RewriteForLogs(t *testing.T) {
tests := []struct {
name string
expression string
aggregations []qbtypes.LogAggregation
expected string
}{
{
name: "single aggregation with __result",
expression: "__result > 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
},
expected: "__result_0 > 1000",
},
{
name: "multiple aggregations with aliases and expressions",
expression: "total_logs > 1000 AND avg(size) < 1024",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: "total_logs"},
{Expression: "avg(size)", Alias: ""},
},
expected: "__result_0 > 1000 AND __result_1 < 1024",
},
{
name: "complex boolean expression",
expression: "(error_logs > 100 AND error_logs < 1000) OR warning_logs > 5000",
aggregations: []qbtypes.LogAggregation{
{Expression: "countIf(level = 'error')", Alias: "error_logs"},
{Expression: "countIf(level = 'warning')", Alias: "warning_logs"},
},
expected: "(__result_0 > 100 AND __result_0 < 1000) OR __result_1 > 5000",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := NewHavingExpressionRewriter()
result := rewriter.RewriteForLogs(tt.expression, tt.aggregations)
if result != tt.expected {
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
}
})
}
}
func TestHavingExpressionRewriter_RewriteForMetrics(t *testing.T) {
tests := []struct {
name string
expression string
aggregations []qbtypes.MetricAggregation
expected string
}{
{
name: "metric with space aggregation",
expression: "avg(cpu_usage) > 80",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
SpaceAggregation: metrictypes.SpaceAggregationAvg,
},
},
expected: "value > 80",
},
{
name: "metric with time aggregation",
expression: "rate(requests) > 1000",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "requests",
TimeAggregation: metrictypes.TimeAggregationRate,
},
},
expected: "value > 1000",
},
{
name: "metric with both aggregations",
expression: "sum(rate(requests)) > 5000",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "requests",
TimeAggregation: metrictypes.TimeAggregationRate,
SpaceAggregation: metrictypes.SpaceAggregationSum,
},
},
expected: "value > 5000",
},
{
name: "metric with __result",
expression: "__result < 100",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "memory_usage",
SpaceAggregation: metrictypes.SpaceAggregationMax,
},
},
expected: "value < 100",
},
{
name: "metric name without aggregation",
expression: "temperature > 30",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "temperature",
},
},
expected: "value > 30",
},
{
name: "complex expression with parentheses",
expression: "(avg(cpu_usage) > 80 AND avg(cpu_usage) < 95) OR __result > 99",
aggregations: []qbtypes.MetricAggregation{
{
MetricName: "cpu_usage",
SpaceAggregation: metrictypes.SpaceAggregationAvg,
},
},
expected: "(value > 80 AND value < 95) OR value > 99",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := NewHavingExpressionRewriter()
result := rewriter.RewriteForMetrics(tt.expression, tt.aggregations)
if result != tt.expected {
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
}
})
}
}
func TestHavingExpressionRewriter_EdgeCases(t *testing.T) {
tests := []struct {
name string
expression string
aggregations []qbtypes.TraceAggregation
expected string
}{
{
name: "empty expression",
expression: "",
aggregations: []qbtypes.TraceAggregation{},
expected: "",
},
{
name: "no matching columns",
expression: "unknown_column > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "count()", Alias: "total"},
},
expected: "unknown_column > 100",
},
{
name: "expression within quoted string",
expression: "status = 'count() > 100' AND total > 100",
aggregations: []qbtypes.TraceAggregation{
{Expression: "status", Alias: "status"},
{Expression: "count()", Alias: "total"},
},
expected: "__result_0 = 'count() > 100' AND __result_1 > 100",
},
{
name: "double quotes",
expression: `name = "test" AND count > 10`,
aggregations: []qbtypes.TraceAggregation{
{Expression: "name", Alias: "name"},
{Expression: "count()", Alias: "count"},
},
expected: `__result_0 = "test" AND __result_1 > 10`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := NewHavingExpressionRewriter()
result := rewriter.RewriteForTraces(tt.expression, tt.aggregations)
if result != tt.expected {
t.Errorf("Expected: %s, Got: %s", tt.expected, result)
}
})
}
}

View File

@ -95,6 +95,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
end uint64, end uint64,
requestType qbtypes.RequestType, requestType qbtypes.RequestType,
query qbtypes.QueryBuilderQuery[T], query qbtypes.QueryBuilderQuery[T],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
config, exists := signalConfigs[b.signal] config, exists := signalConfigs[b.signal]
if !exists { if !exists {
@ -111,7 +112,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
return nil, err return nil, err
} }
if err := b.addConditions(ctx, q, start, end, query, keys); err != nil { if err := b.addConditions(ctx, q, start, end, query, keys, variables); err != nil {
return nil, err return nil, err
} }
@ -129,6 +130,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
start, end uint64, start, end uint64,
query qbtypes.QueryBuilderQuery[T], query qbtypes.QueryBuilderQuery[T],
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) error { ) error {
// Add filter condition if present // Add filter condition if present
if query.Filter != nil && query.Filter.Expression != "" { if query.Filter != nil && query.Filter.Expression != "" {
@ -139,6 +141,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
ConditionBuilder: b.conditionBuilder, ConditionBuilder: b.conditionBuilder,
FieldKeys: keys, FieldKeys: keys,
SkipFullTextFilter: true, SkipFullTextFilter: true,
Variables: variables,
}) })
if err != nil { if err != nil {

View File

@ -0,0 +1,81 @@
package querybuilder
import (
"context"
"testing"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
)
func TestWhereClauseVisitorVariableResolution(t *testing.T) {
// Test that the visitor properly resolves variables in VisitValue
variables := map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.QueryVariableType,
Value: "payment-service",
},
"status": {
Type: qbtypes.CustomVariableType,
Value: []string{"200", "201"},
},
"all": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
}
t.Run("visitor resolves variable in value", func(t *testing.T) {
sb := sqlbuilder.NewSelectBuilder()
opts := FilterExprVisitorOpts{
FieldMapper: &simpleFieldMapper{},
ConditionBuilder: &simpleConditionBuilder{},
FieldKeys: make(map[string][]*telemetrytypes.TelemetryFieldKey),
Builder: sb,
Variables: variables,
}
visitor := newFilterExpressionVisitor(opts)
assert.NotNil(t, visitor.variableResolver)
// Test that the variable resolver is properly initialized
assert.NotNil(t, visitor.variableResolver)
// Test variable resolution
value, skipFilter, err := visitor.variableResolver.ResolveVariable("service")
assert.NoError(t, err)
assert.Equal(t, "payment-service", value)
assert.False(t, skipFilter)
// Test __all__ variable
value, skipFilter, err = visitor.variableResolver.ResolveVariable("all")
assert.NoError(t, err)
assert.True(t, skipFilter)
})
}
// Simple mock implementations for testing
type simpleFieldMapper struct{}
func (m *simpleFieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (string, error) {
return key.Name, nil
}
func (m *simpleFieldMapper) ColumnFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
return nil, nil
}
func (m *simpleFieldMapper) ColumnExpressionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, error) {
return key.Name, nil
}
type simpleConditionBuilder struct{}
func (m *simpleConditionBuilder) ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
return sb.Equal(key.Name, value), nil
}

View File

@ -0,0 +1,145 @@
package querybuilder
import (
"fmt"
"regexp"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
// VariableResolver handles variable substitution in query expressions
type VariableResolver struct {
variables map[string]qbtypes.VariableItem
}
// NewVariableResolver creates a new VariableResolver
func NewVariableResolver(variables map[string]qbtypes.VariableItem) *VariableResolver {
return &VariableResolver{
variables: variables,
}
}
// Variable patterns:
// {{.var}} or {{var}}
// [[.var]] or [[var]]
// $var
var variablePatterns = []*regexp.Regexp{
regexp.MustCompile(`\{\{\.?(\w+)\}\}`), // {{.var}} or {{var}}
regexp.MustCompile(`\[\[\.?(\w+)\]\]`), // [[.var]] or [[var]]
regexp.MustCompile(`\$(\w+)`), // $var
}
// IsVariableReference checks if a value is a variable reference
func (r *VariableResolver) IsVariableReference(value string) (bool, string) {
// Check for exact match only (not partial)
for _, pattern := range variablePatterns {
matches := pattern.FindStringSubmatch(value)
if len(matches) > 1 && matches[0] == value {
return true, matches[1]
}
}
return false, ""
}
// ResolveVariable resolves a variable reference to its actual value
func (r *VariableResolver) ResolveVariable(varName string) (any, bool, error) {
item, exists := r.variables[varName]
if !exists {
return nil, false, fmt.Errorf("variable '%s' not found", varName)
}
// Check if this is a dynamic variable with special __all__ value
if item.Type == qbtypes.DynamicVariableType {
// Check for __all__ values which mean "skip filter"
switch v := item.Value.(type) {
case string:
if v == "__all__" {
return nil, true, nil // skip filter
}
case []any:
if len(v) == 1 {
if str, ok := v[0].(string); ok && str == "__all__" {
return nil, true, nil // skip filter
}
}
case []string:
if len(v) == 1 && v[0] == "__all__" {
return nil, true, nil // skip filter
}
}
}
return item.Value, false, nil
}
// ResolveFilterExpression resolves variables in a filter expression
// Returns the resolved expression and whether any filters should be skipped
func (r *VariableResolver) ResolveFilterExpression(expression string) (string, bool, error) {
if expression == "" {
return expression, false, nil
}
// Check if the entire expression is a variable
if isVar, varName := r.IsVariableReference(strings.TrimSpace(expression)); isVar {
value, skipFilter, err := r.ResolveVariable(varName)
if err != nil {
return "", false, err
}
if skipFilter {
return "", true, nil
}
// Convert value to string representation
return formatValue(value), false, nil
}
// For complex expressions, we need to find and replace variable references
// We'll iterate through all variables and check if they appear in the expression
resolvedExpr := expression
for _, pattern := range variablePatterns {
matches := pattern.FindAllStringSubmatch(expression, -1)
for _, match := range matches {
if len(match) > 1 {
varName := match[1]
value, skipFilter, err := r.ResolveVariable(varName)
if err != nil {
// Skip this variable if not found
continue
}
if skipFilter {
// If any variable indicates skip filter, skip the entire filter
return "", true, nil
}
// Replace the variable reference with its value
resolvedExpr = strings.ReplaceAll(resolvedExpr, match[0], formatValue(value))
}
}
}
return resolvedExpr, false, nil
}
// formatValue formats a value for use in a filter expression
func formatValue(value any) string {
switch v := value.(type) {
case string:
// Quote strings
return fmt.Sprintf("'%s'", strings.ReplaceAll(v, "'", "''"))
case []string:
// Format as array
parts := make([]string, len(v))
for i, s := range v {
parts[i] = fmt.Sprintf("'%s'", strings.ReplaceAll(s, "'", "''"))
}
return fmt.Sprintf("[%s]", strings.Join(parts, ", "))
case []any:
// Format as array
parts := make([]string, len(v))
for i, item := range v {
parts[i] = formatValue(item)
}
return fmt.Sprintf("[%s]", strings.Join(parts, ", "))
default:
return fmt.Sprintf("%v", v)
}
}

View File

@ -0,0 +1,317 @@
package querybuilder
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/stretchr/testify/assert"
)
func TestVariableResolver_IsVariableReference(t *testing.T) {
r := NewVariableResolver(nil)
tests := []struct {
name string
value string
isVar bool
varName string
}{
{
name: "double curly with dot",
value: "{{.myVar}}",
isVar: true,
varName: "myVar",
},
{
name: "double curly without dot",
value: "{{myVar}}",
isVar: true,
varName: "myVar",
},
{
name: "double square with dot",
value: "[[.myVar]]",
isVar: true,
varName: "myVar",
},
{
name: "double square without dot",
value: "[[myVar]]",
isVar: true,
varName: "myVar",
},
{
name: "dollar sign",
value: "$myVar",
isVar: true,
varName: "myVar",
},
{
name: "not a variable",
value: "myVar",
isVar: false,
varName: "",
},
{
name: "partial match",
value: "prefix{{myVar}}suffix",
isVar: false,
varName: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
isVar, varName := r.IsVariableReference(tt.value)
assert.Equal(t, tt.isVar, isVar)
assert.Equal(t, tt.varName, varName)
})
}
}
func TestVariableResolver_ResolveVariable(t *testing.T) {
variables := map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.QueryVariableType,
Value: "payment-service",
},
"status": {
Type: qbtypes.CustomVariableType,
Value: []string{"200", "201"},
},
"env": {
Type: qbtypes.TextBoxVariableType,
Value: "production",
},
"all_services": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
"all_array": {
Type: qbtypes.DynamicVariableType,
Value: []string{"__all__"},
},
}
r := NewVariableResolver(variables)
tests := []struct {
name string
varName string
wantValue any
wantSkip bool
wantErr bool
}{
{
name: "query variable",
varName: "service",
wantValue: "payment-service",
wantSkip: false,
wantErr: false,
},
{
name: "custom variable array",
varName: "status",
wantValue: []string{"200", "201"},
wantSkip: false,
wantErr: false,
},
{
name: "textbox variable",
varName: "env",
wantValue: "production",
wantSkip: false,
wantErr: false,
},
{
name: "dynamic variable with __all__",
varName: "all_services",
wantValue: nil,
wantSkip: true,
wantErr: false,
},
{
name: "dynamic variable with __all__ in array",
varName: "all_array",
wantValue: nil,
wantSkip: true,
wantErr: false,
},
{
name: "non-existent variable",
varName: "unknown",
wantValue: nil,
wantSkip: false,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
value, skipFilter, err := r.ResolveVariable(tt.varName)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.wantValue, value)
assert.Equal(t, tt.wantSkip, skipFilter)
}
})
}
}
func TestVariableResolver_ResolveFilterExpression(t *testing.T) {
variables := map[string]qbtypes.VariableItem{
"service": {
Type: qbtypes.QueryVariableType,
Value: "payment-service",
},
"status": {
Type: qbtypes.CustomVariableType,
Value: []string{"200", "201"},
},
"env": {
Type: qbtypes.TextBoxVariableType,
Value: "production",
},
"all": {
Type: qbtypes.DynamicVariableType,
Value: "__all__",
},
}
r := NewVariableResolver(variables)
tests := []struct {
name string
expression string
wantExpression string
wantSkip bool
wantErr bool
}{
{
name: "simple variable reference",
expression: "{{service}}",
wantExpression: "'payment-service'",
wantSkip: false,
wantErr: false,
},
{
name: "expression with variable",
expression: `service.name = "{{service}}"`,
wantExpression: `service.name = "'payment-service'"`,
wantSkip: false,
wantErr: false,
},
{
name: "expression with array variable",
expression: "status_code IN {{status}}",
wantExpression: `status_code IN ['200', '201']`,
wantSkip: false,
wantErr: false,
},
{
name: "multiple variables",
expression: `service.name = "{{service}}" AND environment = "{{env}}"`,
wantExpression: `service.name = "'payment-service'" AND environment = "'production'"`,
wantSkip: false,
wantErr: false,
},
{
name: "dollar variable syntax",
expression: `service.name = "$service"`,
wantExpression: `service.name = "'payment-service'"`,
wantSkip: false,
wantErr: false,
},
{
name: "double square brackets",
expression: `service.name = "[[service]]"`,
wantExpression: `service.name = "'payment-service'"`,
wantSkip: false,
wantErr: false,
},
{
name: "__all__ variable should skip filter",
expression: "service.name = {{all}}",
wantExpression: "",
wantSkip: true,
wantErr: false,
},
{
name: "expression with unknown variable",
expression: "service.name = {{unknown}}",
wantExpression: "service.name = {{unknown}}", // unchanged
wantSkip: false,
wantErr: false,
},
{
name: "no variables",
expression: "service.name = 'static-value'",
wantExpression: "service.name = 'static-value'",
wantSkip: false,
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resolved, skipFilter, err := r.ResolveFilterExpression(tt.expression)
if tt.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.wantExpression, resolved)
assert.Equal(t, tt.wantSkip, skipFilter)
}
})
}
}
func TestFormatValue(t *testing.T) {
tests := []struct {
name string
value any
want string
}{
{
name: "string value",
value: "test",
want: "'test'",
},
{
name: "string with quotes",
value: "test's value",
want: "'test''s value'",
},
{
name: "string array",
value: []string{"a", "b", "c"},
want: "['a', 'b', 'c']",
},
{
name: "interface array",
value: []any{"a", 123, "c"},
want: "['a', 123, 'c']",
},
{
name: "number",
value: 123,
want: "123",
},
{
name: "boolean",
value: true,
want: "true",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := formatValue(tt.value)
assert.Equal(t, tt.want, got)
})
}
}

View File

@ -29,6 +29,7 @@ type filterExpressionVisitor struct {
jsonKeyToKey qbtypes.JsonKeyToFieldFunc jsonKeyToKey qbtypes.JsonKeyToFieldFunc
skipResourceFilter bool skipResourceFilter bool
skipFullTextFilter bool skipFullTextFilter bool
variableResolver *VariableResolver
} }
type FilterExprVisitorOpts struct { type FilterExprVisitorOpts struct {
@ -41,10 +42,16 @@ type FilterExprVisitorOpts struct {
JsonKeyToKey qbtypes.JsonKeyToFieldFunc JsonKeyToKey qbtypes.JsonKeyToFieldFunc
SkipResourceFilter bool SkipResourceFilter bool
SkipFullTextFilter bool SkipFullTextFilter bool
Variables map[string]qbtypes.VariableItem
} }
// newFilterExpressionVisitor creates a new filterExpressionVisitor // newFilterExpressionVisitor creates a new filterExpressionVisitor
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor { func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
var variableResolver *VariableResolver
if opts.Variables != nil && len(opts.Variables) > 0 {
variableResolver = NewVariableResolver(opts.Variables)
}
return &filterExpressionVisitor{ return &filterExpressionVisitor{
fieldMapper: opts.FieldMapper, fieldMapper: opts.FieldMapper,
conditionBuilder: opts.ConditionBuilder, conditionBuilder: opts.ConditionBuilder,
@ -55,6 +62,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
jsonKeyToKey: opts.JsonKeyToKey, jsonKeyToKey: opts.JsonKeyToKey,
skipResourceFilter: opts.SkipResourceFilter, skipResourceFilter: opts.SkipResourceFilter,
skipFullTextFilter: opts.SkipFullTextFilter, skipFullTextFilter: opts.SkipFullTextFilter,
variableResolver: variableResolver,
} }
} }
@ -161,6 +169,8 @@ func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
return v.VisitValue(t) return v.VisitValue(t)
case *grammar.KeyContext: case *grammar.KeyContext:
return v.VisitKey(t) return v.VisitKey(t)
case *grammar.VariableContext:
return v.VisitVariable(t)
default: default:
return "" return ""
} }
@ -378,6 +388,11 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
if len(values) > 0 { if len(values) > 0 {
value := v.Visit(values[0]) value := v.Visit(values[0])
// Check if we should skip this filter due to __all__ variable
if strVal, ok := value.(string); ok && strVal == "__SKIP_FILTER__" {
return "true" // Return always true condition to skip filter
}
var op qbtypes.FilterOperator var op qbtypes.FilterOperator
// Handle each type of comparison // Handle each type of comparison
@ -433,12 +448,58 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
// VisitInClause handles IN expressions // VisitInClause handles IN expressions
func (v *filterExpressionVisitor) VisitInClause(ctx *grammar.InClauseContext) any { func (v *filterExpressionVisitor) VisitInClause(ctx *grammar.InClauseContext) any {
return v.Visit(ctx.ValueList()) // Check if it's a variable
if ctx.Variable() != nil {
value := v.Visit(ctx.Variable())
// If the variable resolved to "__SKIP_FILTER__", return empty array
if skipVal, ok := value.(string); ok && skipVal == "__SKIP_FILTER__" {
return []any{}
}
// If it's already an array, return it
if arr, ok := value.([]any); ok {
return arr
}
// Otherwise, wrap single value in array
return []any{value}
}
// Handle regular value list
if ctx.ValueList() != nil {
return v.Visit(ctx.ValueList())
}
return []any{}
} }
// VisitNotInClause handles NOT IN expressions // VisitNotInClause handles NOT IN expressions
func (v *filterExpressionVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any { func (v *filterExpressionVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any {
return v.Visit(ctx.ValueList()) // Check if it's a variable
if ctx.Variable() != nil {
value := v.Visit(ctx.Variable())
// If the variable resolved to "__SKIP_FILTER__", return empty array
if skipVal, ok := value.(string); ok && skipVal == "__SKIP_FILTER__" {
return []any{}
}
// If it's already an array, return it
if arr, ok := value.([]any); ok {
return arr
}
// Otherwise, wrap single value in array
return []any{value}
}
// Handle regular value list
if ctx.ValueList() != nil {
return v.Visit(ctx.ValueList())
}
return []any{}
} }
// VisitValueList handles comma-separated value lists // VisitValueList handles comma-separated value lists
@ -568,12 +629,79 @@ func (v *filterExpressionVisitor) VisitArray(ctx *grammar.ArrayContext) any {
return v.Visit(ctx.ValueList()) return v.Visit(ctx.ValueList())
} }
// VisitValue handles literal values: strings, numbers, booleans // VisitVariable handles variable resolution
func (v *filterExpressionVisitor) VisitVariable(ctx *grammar.VariableContext) any {
var varName string
var varText string
// Extract variable name based on syntax
if ctx.DOLLAR_VAR() != nil {
varText = ctx.DOLLAR_VAR().GetText()
varName = varText[1:] // Remove $
} else if ctx.CURLY_VAR() != nil {
varText = ctx.CURLY_VAR().GetText()
// Remove {{ }} and optional whitespace/dots
varName = strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(varText, "{{"), "}}"))
varName = strings.TrimPrefix(varName, ".")
} else if ctx.SQUARE_VAR() != nil {
varText = ctx.SQUARE_VAR().GetText()
// Remove [[ ]] and optional whitespace/dots
varName = strings.TrimSpace(strings.TrimSuffix(strings.TrimPrefix(varText, "[["), "]]"))
varName = strings.TrimPrefix(varName, ".")
} else {
v.errors = append(v.errors, "unknown variable type")
return nil
}
// If no variable resolver is provided, return the variable text
if v.variableResolver == nil {
v.errors = append(v.errors, fmt.Sprintf("variable %s used but no variable resolver provided", varText))
return varText
}
// Resolve the variable
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable %s: %v", varText, err))
return nil
}
if skipFilter {
return "__SKIP_FILTER__"
}
return resolvedValue
}
// VisitValue handles literal values: strings, numbers, booleans, variables
func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any { func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
// Check if this is a variable first
if ctx.Variable() != nil {
return v.Visit(ctx.Variable())
}
if ctx.QUOTED_TEXT() != nil { if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText() txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value // trim quotes and check for variable
return trimQuotes(txt) value := trimQuotes(txt)
// Check if this is a variable reference
if v.variableResolver != nil {
if isVar, varName := v.variableResolver.IsVariableReference(value); isVar {
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable: %s", err.Error()))
return value
}
if skipFilter {
// Return a special marker to indicate filter should be skipped
return "__SKIP_FILTER__"
}
return resolvedValue
}
}
return value
} else if ctx.NUMBER() != nil { } else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64) number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil { if err != nil {
@ -590,7 +718,25 @@ func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
// When the user writes an expression like `service.name=redis` // When the user writes an expression like `service.name=redis`
// The `redis` part is a VALUE context but parsed as a KEY token // The `redis` part is a VALUE context but parsed as a KEY token
// so we return the text as is // so we return the text as is
return ctx.KEY().GetText() keyText := ctx.KEY().GetText()
// Check if this is a variable reference
if v.variableResolver != nil {
if isVar, varName := v.variableResolver.IsVariableReference(keyText); isVar {
resolvedValue, skipFilter, err := v.variableResolver.ResolveVariable(varName)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to resolve variable: %s", err.Error()))
return keyText
}
if skipFilter {
// Return a special marker to indicate filter should be skipped
return "__SKIP_FILTER__"
}
return resolvedValue
}
}
return keyText
} }
return "" // Should not happen with valid input return "" // Should not happen with valid input

View File

@ -61,6 +61,7 @@ func (b *logQueryStatementBuilder) Build(
end uint64, end uint64,
requestType qbtypes.RequestType, requestType qbtypes.RequestType,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
start = querybuilder.ToNanoSecs(start) start = querybuilder.ToNanoSecs(start)
@ -77,11 +78,11 @@ func (b *logQueryStatementBuilder) Build(
switch requestType { switch requestType {
case qbtypes.RequestTypeRaw: case qbtypes.RequestTypeRaw:
return b.buildListQuery(ctx, q, query, start, end, keys) return b.buildListQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeTimeSeries: case qbtypes.RequestTypeTimeSeries:
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys) return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeScalar: case qbtypes.RequestTypeScalar:
return b.buildScalarQuery(ctx, q, query, start, end, keys, false) return b.buildScalarQuery(ctx, q, query, start, end, keys, variables, false)
} }
return nil, fmt.Errorf("unsupported request type: %s", requestType) return nil, fmt.Errorf("unsupported request type: %s", requestType)
@ -130,6 +131,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
@ -137,7 +139,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" { } else if frag != "" {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -153,7 +155,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions // Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -196,6 +198,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
@ -203,7 +206,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" { } else if frag != "" {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -246,7 +249,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
} }
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -254,10 +257,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
var finalSQL string var finalSQL string
var finalArgs []any var finalArgs []any
if query.Limit > 0 { if query.Limit > 0 && len(query.GroupBy) > 0 {
// build the scalar “top/bottom-N” query in its own builder. // build the scalar “top/bottom-N” query in its own builder.
cteSB := sqlbuilder.NewSelectBuilder() cteSB := sqlbuilder.NewSelectBuilder()
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true) cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, variables, true)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -272,7 +275,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
// Group by all dimensions // Group by all dimensions
sb.GroupBy("ALL") sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
combinedArgs := append(allGroupByArgs, allAggChArgs...) combinedArgs := append(allGroupByArgs, allAggChArgs...)
@ -286,7 +292,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
} else { } else {
sb.GroupBy("ALL") sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
combinedArgs := append(allGroupByArgs, allAggChArgs...) combinedArgs := append(allGroupByArgs, allAggChArgs...)
@ -312,6 +321,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
skipResourceCTE bool, skipResourceCTE bool,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
@ -320,7 +330,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" && !skipResourceCTE { } else if frag != "" && !skipResourceCTE {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -365,7 +375,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions // Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -375,7 +385,10 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
// Add having clause if needed // Add having clause if needed
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
// Add order by // Add order by
@ -414,11 +427,12 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
// buildFilterCondition builds SQL condition from filter expression // buildFilterCondition builds SQL condition from filter expression
func (b *logQueryStatementBuilder) addFilterCondition( func (b *logQueryStatementBuilder) addFilterCondition(
_ context.Context, ctx context.Context,
sb *sqlbuilder.SelectBuilder, sb *sqlbuilder.SelectBuilder,
start, end uint64, start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) ([]string, error) { ) ([]string, error) {
var filterWhereClause *sqlbuilder.WhereClause var filterWhereClause *sqlbuilder.WhereClause
@ -435,6 +449,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
FullTextColumn: b.fullTextColumn, FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix, JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey, JsonKeyToKey: b.jsonKeyToKey,
Variables: variables,
}) })
if err != nil { if err != nil {
@ -471,9 +486,10 @@ func (b *logQueryStatementBuilder) maybeAttachResourceFilter(
sb *sqlbuilder.SelectBuilder, sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64, start, end uint64,
variables map[string]qbtypes.VariableItem,
) (cteSQL string, cteArgs []any, err error) { ) (cteSQL string, cteArgs []any, err error) {
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end) stmt, err := b.buildResourceFilterCTE(ctx, query, start, end, variables)
if err != nil { if err != nil {
return "", nil, err return "", nil, err
} }
@ -487,6 +503,7 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
ctx context.Context, ctx context.Context,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64, start, end uint64,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
return b.resourceFilterStmtBuilder.Build( return b.resourceFilterStmtBuilder.Build(
@ -495,5 +512,6 @@ func (b *logQueryStatementBuilder) buildResourceFilterCTE(
end, end,
qbtypes.RequestTypeRaw, qbtypes.RequestTypeRaw,
query, query,
variables,
) )
} }

View File

@ -0,0 +1,78 @@
package telemetrylogs
import (
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/stretchr/testify/assert"
)
func TestHavingExpressionRewriter_LogQueries(t *testing.T) {
tests := []struct {
name string
havingExpression string
aggregations []qbtypes.LogAggregation
expectedExpression string
}{
{
name: "single aggregation with alias",
havingExpression: "total_logs > 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: "total_logs"},
},
expectedExpression: "__result_0 > 1000",
},
{
name: "multiple aggregations with complex expression",
havingExpression: "(total > 100 AND avg_duration < 500) OR total > 10000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: "total"},
{Expression: "avg(duration)", Alias: "avg_duration"},
},
expectedExpression: "(__result_0 > 100 AND __result_1 < 500) OR __result_0 > 10000",
},
{
name: "__result reference for single aggregation",
havingExpression: "__result > 500",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
},
expectedExpression: "__result_0 > 500",
},
{
name: "expression reference",
havingExpression: "sum(bytes) > 1024000",
aggregations: []qbtypes.LogAggregation{
{Expression: "sum(bytes)", Alias: ""},
},
expectedExpression: "__result_0 > 1024000",
},
{
name: "__result{number} format",
havingExpression: "__result0 > 100 AND __result1 < 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
{Expression: "sum(bytes)", Alias: ""},
},
expectedExpression: "__result_0 > 100 AND __result_1 < 1000",
},
{
name: "mixed aliases and expressions",
havingExpression: "error_count > 10 AND count() < 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
{Expression: "countIf(level='error')", Alias: "error_count"},
},
expectedExpression: "__result_1 > 10 AND __result_0 < 1000",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := querybuilder.NewHavingExpressionRewriter()
result := rewriter.RewriteForLogs(tt.havingExpression, tt.aggregations)
assert.Equal(t, tt.expectedExpression, result)
})
}
}

View File

@ -136,7 +136,7 @@ func TestStatementBuilder(t *testing.T) {
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query) q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, make(map[string]qbtypes.VariableItem))
if c.expectedErr != nil { if c.expectedErr != nil {
require.Error(t, err) require.Error(t, err)

View File

@ -76,6 +76,7 @@ func (b *metricQueryStatementBuilder) Build(
end uint64, end uint64,
_ qbtypes.RequestType, _ qbtypes.RequestType,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
keySelectors := getKeySelectors(query) keySelectors := getKeySelectors(query)
keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors) keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
@ -83,7 +84,7 @@ func (b *metricQueryStatementBuilder) Build(
return nil, err return nil, err
} }
return b.buildPipelineStatement(ctx, start, end, query, keys) return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
} }
// Fastpath (no fingerprint grouping) // Fastpath (no fingerprint grouping)
@ -139,6 +140,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
start, end uint64, start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
cteFragments []string cteFragments []string
@ -178,7 +180,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
// time_series_cte // time_series_cte
// this is applicable for all the queries // this is applicable for all the queries
if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys); err != nil { if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys, variables); err != nil {
return nil, err return nil, err
} }
@ -269,6 +271,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
start, end uint64, start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (string, []any, error) { ) (string, []any, error) {
sb := sqlbuilder.NewSelectBuilder() sb := sqlbuilder.NewSelectBuilder()
@ -281,6 +284,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
ConditionBuilder: b.cb, ConditionBuilder: b.cb,
FieldKeys: keys, FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"}, FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
}) })
if err != nil { if err != nil {
return "", nil, err return "", nil, err
@ -502,9 +506,25 @@ func (b *metricQueryStatementBuilder) buildFinalSelect(
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name)) sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
} }
sb.GroupBy("ts") sb.GroupBy("ts")
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" {
// Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else { } else {
sb.Select("*") sb.Select("*")
sb.From("__spatial_aggregation_cte") sb.From("__spatial_aggregation_cte")
// For non-percentile queries, we need to filter in WHERE clause since we're selecting from CTE
if query.Having != nil && query.Having.Expression != "" {
// Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Where(rewrittenExpr)
}
} }
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse) q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)

View File

@ -4,6 +4,7 @@ import (
"context" "context"
"fmt" "fmt"
"slices" "slices"
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@ -188,6 +189,11 @@ func (c *conditionBuilder) ConditionFor(
value any, value any,
sb *sqlbuilder.SelectBuilder, sb *sqlbuilder.SelectBuilder,
) (string, error) { ) (string, error) {
// Check if this is a span search scope field
if key.FieldContext == telemetrytypes.FieldContextSpan && c.isSpanScopeField(key.Name) {
return c.buildSpanScopeCondition(key, operator, value)
}
condition, err := c.conditionFor(ctx, key, operator, value, sb) condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil { if err != nil {
return "", err return "", err
@ -208,3 +214,41 @@ func (c *conditionBuilder) ConditionFor(
} }
return condition, nil return condition, nil
} }
func (c *conditionBuilder) isSpanScopeField(name string) bool {
keyName := strings.ToLower(name)
return keyName == SpanSearchScopeRoot || keyName == SpanSearchScopeEntryPoint
}
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, error) {
// Span scope fields only support = true operator
if operator != qbtypes.FilterOperatorEqual {
return "", fmt.Errorf("span scope field %s only supports '=' operator", key.Name)
}
// Check if value is true (can be bool true or string "true")
isTrue := false
switch v := value.(type) {
case bool:
isTrue = v
case string:
isTrue = strings.ToLower(v) == "true"
default:
return "", fmt.Errorf("span scope field %s expects boolean value, got %T", key.Name, value)
}
if !isTrue {
return "", fmt.Errorf("span scope field %s can only be filtered with value 'true'", key.Name)
}
keyName := strings.ToLower(key.Name)
switch keyName {
case SpanSearchScopeRoot:
return "parent_span_id = ''", nil
case SpanSearchScopeEntryPoint:
return fmt.Sprintf("((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName), nil
default:
return "", fmt.Errorf("invalid span search scope: %s", key.Name)
}
}

View File

@ -0,0 +1,6 @@
package telemetrytraces
const (
SpanSearchScopeRoot = "isroot"
SpanSearchScopeEntryPoint = "isentrypoint"
)

View File

@ -150,6 +150,12 @@ func (m *defaultFieldMapper) getColumn(
return indexV3Columns["attributes_bool"], nil return indexV3Columns["attributes_bool"], nil
} }
case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified: case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified:
// Check if this is a span scope field
if strings.ToLower(key.Name) == SpanSearchScopeRoot || strings.ToLower(key.Name) == SpanSearchScopeEntryPoint {
// Return a dummy column for span scope fields
// The actual SQL will be generated in the condition builder
return &schema.Column{Name: key.Name, Type: schema.ColumnTypeBool}, nil
}
if col, ok := indexV3Columns[key.Name]; ok { if col, ok := indexV3Columns[key.Name]; ok {
return col, nil return col, nil
} }
@ -171,6 +177,13 @@ func (m *defaultFieldMapper) FieldFor(
ctx context.Context, ctx context.Context,
key *telemetrytypes.TelemetryFieldKey, key *telemetrytypes.TelemetryFieldKey,
) (string, error) { ) (string, error) {
// Special handling for span scope fields
if key.FieldContext == telemetrytypes.FieldContextSpan &&
(strings.ToLower(key.Name) == SpanSearchScopeRoot || strings.ToLower(key.Name) == SpanSearchScopeEntryPoint) {
// Return the field name as-is, the condition builder will handle the SQL generation
return key.Name, nil
}
column, err := m.getColumn(ctx, key) column, err := m.getColumn(ctx, key)
if err != nil { if err != nil {
return "", err return "", err

View File

@ -0,0 +1,142 @@
package telemetrytraces
import (
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSpanScopeFilterExpression(t *testing.T) {
// Test that span scope fields work in filter expressions
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
tests := []struct {
name string
expression string
expectedCondition string
expectError bool
}{
{
name: "simple isroot filter",
expression: "isroot = true",
expectedCondition: "parent_span_id = ''",
},
{
name: "simple isentrypoint filter",
expression: "isentrypoint = true",
expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
},
{
name: "combined filter with AND",
expression: "isroot = true AND has_error = true",
expectedCondition: "parent_span_id = ''",
},
{
name: "combined filter with OR",
expression: "isentrypoint = true OR has_error = true",
expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Parse the expression and build the where clause
sb := sqlbuilder.NewSelectBuilder()
// Prepare field keys for span scope fields
fieldKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
fieldKeys["isroot"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
}}
fieldKeys["isentrypoint"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "isentrypoint",
FieldContext: telemetrytypes.FieldContextSpan,
}}
fieldKeys["has_error"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "has_error",
FieldContext: telemetrytypes.FieldContextSpan,
}}
whereClause, _, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: fieldKeys,
Builder: sb,
})
if tt.expectError {
assert.Error(t, err)
} else {
require.NoError(t, err)
require.NotNil(t, whereClause)
// Apply the where clause to the builder and get the SQL
sb.AddWhereClause(whereClause)
whereSQL, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
t.Logf("Generated SQL: %s", whereSQL)
assert.Contains(t, whereSQL, tt.expectedCondition)
}
})
}
}
func TestSpanScopeWithResourceFilter(t *testing.T) {
// Test that span scope fields are marked as SkipResourceFilter
tests := []struct {
name string
expression string
}{
{
name: "isroot should skip resource filter",
expression: "isroot = true",
},
{
name: "isentrypoint should skip resource filter",
expression: "isentrypoint = true",
},
{
name: "combined expression should skip resource filter",
expression: "isroot = true AND service.name = 'api'",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// For now, just verify the expression parses correctly
// In a real implementation, we'd need to check that the resource filter
// is properly skipped when span scope fields are present
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
// Prepare field keys for the test
fieldKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
fieldKeys["isroot"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
}}
fieldKeys["isentrypoint"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "isentrypoint",
FieldContext: telemetrytypes.FieldContextSpan,
}}
fieldKeys["service.name"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}}
_, _, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: fieldKeys,
SkipResourceFilter: false, // This would be set by the statement builder
})
assert.NoError(t, err)
})
}
}

View File

@ -0,0 +1,181 @@
package telemetrytraces
import (
"context"
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/assert"
)
func TestSpanScopeConditions(t *testing.T) {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
sb := sqlbuilder.NewSelectBuilder()
ctx := context.Background()
tests := []struct {
name string
key *telemetrytypes.TelemetryFieldKey
operator qbtypes.FilterOperator
value any
expectedSQL string
expectedError bool
}{
{
name: "isroot = true",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: true,
expectedSQL: "parent_span_id = ''",
},
{
name: "isroot = 'true' (string)",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: "true",
expectedSQL: "parent_span_id = ''",
},
{
name: "isroot = 'TRUE' (uppercase)",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isRoot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: "TRUE",
expectedSQL: "parent_span_id = ''",
},
{
name: "isentrypoint = true",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isentrypoint",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: true,
expectedSQL: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
},
{
name: "isEntryPoint = true (mixed case)",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isEntryPoint",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: true,
expectedSQL: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
},
{
name: "isroot with wrong operator",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorNotEqual,
value: true,
expectedError: true,
},
{
name: "isroot = false",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: false,
expectedError: true,
},
{
name: "isroot with non-boolean value",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: 123,
expectedError: true,
},
{
name: "regular span field",
key: &telemetrytypes.TelemetryFieldKey{
Name: "name",
FieldContext: telemetrytypes.FieldContextSpan,
},
operator: qbtypes.FilterOperatorEqual,
value: "test-span",
expectedSQL: "$1", // sqlbuilder uses placeholder syntax
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
condition, err := cb.ConditionFor(ctx, tt.key, tt.operator, tt.value, sb)
if tt.expectedError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.expectedSQL, condition)
}
})
}
}
func TestSpanScopeFieldMapper(t *testing.T) {
fm := NewFieldMapper()
ctx := context.Background()
tests := []struct {
name string
key *telemetrytypes.TelemetryFieldKey
expectField string
expectError bool
}{
{
name: "isroot field",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isroot",
FieldContext: telemetrytypes.FieldContextSpan,
},
expectField: "isroot",
},
{
name: "isentrypoint field",
key: &telemetrytypes.TelemetryFieldKey{
Name: "isentrypoint",
FieldContext: telemetrytypes.FieldContextSpan,
},
expectField: "isentrypoint",
},
{
name: "regular span field",
key: &telemetrytypes.TelemetryFieldKey{
Name: "name",
FieldContext: telemetrytypes.FieldContextSpan,
},
expectField: "name",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
field, err := fm.FieldFor(ctx, tt.key)
if tt.expectError {
assert.Error(t, err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.expectField, field)
}
})
}
}

View File

@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder" "github.com/huandu/go-sqlbuilder"
@ -25,6 +26,7 @@ type traceQueryStatementBuilder struct {
cb qbtypes.ConditionBuilder cb qbtypes.ConditionBuilder
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation] resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
aggExprRewriter qbtypes.AggExprRewriter aggExprRewriter qbtypes.AggExprRewriter
telemetryStore telemetrystore.TelemetryStore
} }
var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil) var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil)
@ -36,6 +38,7 @@ func NewTraceQueryStatementBuilder(
conditionBuilder qbtypes.ConditionBuilder, conditionBuilder qbtypes.ConditionBuilder,
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation], resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
aggExprRewriter qbtypes.AggExprRewriter, aggExprRewriter qbtypes.AggExprRewriter,
telemetryStore telemetrystore.TelemetryStore,
) *traceQueryStatementBuilder { ) *traceQueryStatementBuilder {
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces") tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
return &traceQueryStatementBuilder{ return &traceQueryStatementBuilder{
@ -45,6 +48,7 @@ func NewTraceQueryStatementBuilder(
cb: conditionBuilder, cb: conditionBuilder,
resourceFilterStmtBuilder: resourceFilterStmtBuilder, resourceFilterStmtBuilder: resourceFilterStmtBuilder,
aggExprRewriter: aggExprRewriter, aggExprRewriter: aggExprRewriter,
telemetryStore: telemetryStore,
} }
} }
@ -55,6 +59,7 @@ func (b *traceQueryStatementBuilder) Build(
end uint64, end uint64,
requestType qbtypes.RequestType, requestType qbtypes.RequestType,
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
start = querybuilder.ToNanoSecs(start) start = querybuilder.ToNanoSecs(start)
@ -67,16 +72,37 @@ func (b *traceQueryStatementBuilder) Build(
return nil, err return nil, err
} }
// Check if filter contains trace_id(s) and optimize time range if needed
if query.Filter != nil && query.Filter.Expression != "" && b.telemetryStore != nil {
traceIDs, found := ExtractTraceIDsFromFilter(query.Filter.Expression, keys)
if found && len(traceIDs) > 0 {
// Create trace time range finder
finder := NewTraceTimeRangeFinder(b.telemetryStore)
// Get the actual time range of the trace(s)
traceStart, traceEnd, err := finder.GetTraceTimeRangeMulti(ctx, traceIDs)
if err != nil {
// Log the error but continue with original time range
b.logger.Debug("failed to get trace time range", "trace_ids", traceIDs, "error", err)
} else if traceStart > 0 && traceEnd > 0 {
// Use the trace's actual time range instead of the request's time range
start = traceStart
end = traceEnd
b.logger.Debug("optimized time range for traces", "trace_ids", traceIDs, "start", start, "end", end)
}
}
}
// Create SQL builder // Create SQL builder
q := sqlbuilder.NewSelectBuilder() q := sqlbuilder.NewSelectBuilder()
switch requestType { switch requestType {
case qbtypes.RequestTypeRaw: case qbtypes.RequestTypeRaw:
return b.buildListQuery(ctx, q, query, start, end, keys) return b.buildListQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeTimeSeries: case qbtypes.RequestTypeTimeSeries:
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys) return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeScalar: case qbtypes.RequestTypeScalar:
return b.buildScalarQuery(ctx, q, query, start, end, keys, false) return b.buildScalarQuery(ctx, q, query, start, end, keys, variables, false, false)
} }
return nil, fmt.Errorf("unsupported request type: %s", requestType) return nil, fmt.Errorf("unsupported request type: %s", requestType)
@ -134,6 +160,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
@ -141,7 +168,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" { } else if frag != "" {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -172,7 +199,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
// Add filter conditions // Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -215,6 +242,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
@ -222,7 +250,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" { } else if frag != "" {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -265,7 +293,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
} }
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -273,10 +301,10 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
var finalSQL string var finalSQL string
var finalArgs []any var finalArgs []any
if query.Limit > 0 { if query.Limit > 0 && len(query.GroupBy) > 0 {
// build the scalar “top/bottom-N” query in its own builder. // build the scalar “top/bottom-N” query in its own builder.
cteSB := sqlbuilder.NewSelectBuilder() cteSB := sqlbuilder.NewSelectBuilder()
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true) cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, variables, true, true)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -290,8 +318,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Group by all dimensions // Group by all dimensions
sb.GroupBy("ALL") sb.GroupBy("ALL")
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
combinedArgs := append(allGroupByArgs, allAggChArgs...) combinedArgs := append(allGroupByArgs, allAggChArgs...)
@ -303,8 +335,12 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
} else { } else {
sb.GroupBy("ALL") sb.GroupBy("ALL")
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
combinedArgs := append(allGroupByArgs, allAggChArgs...) combinedArgs := append(allGroupByArgs, allAggChArgs...)
@ -329,7 +365,9 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64, start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
skipResourceCTE bool, skipResourceCTE bool,
skipHaving bool,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
var ( var (
@ -337,7 +375,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
cteArgs [][]any cteArgs [][]any
) )
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err return nil, err
} else if frag != "" && !skipResourceCTE { } else if frag != "" && !skipResourceCTE {
cteFragments = append(cteFragments, frag) cteFragments = append(cteFragments, frag)
@ -381,7 +419,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
// Add filter conditions // Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -390,8 +428,11 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
sb.GroupBy("ALL") sb.GroupBy("ALL")
// Add having clause if needed // Add having clause if needed
if query.Having != nil && query.Having.Expression != "" { if query.Having != nil && query.Having.Expression != "" && !skipHaving {
sb.Having(query.Having.Expression) // Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
} }
// Add order by // Add order by
@ -430,11 +471,12 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
// buildFilterCondition builds SQL condition from filter expression // buildFilterCondition builds SQL condition from filter expression
func (b *traceQueryStatementBuilder) addFilterCondition( func (b *traceQueryStatementBuilder) addFilterCondition(
_ context.Context, ctx context.Context,
sb *sqlbuilder.SelectBuilder, sb *sqlbuilder.SelectBuilder,
start, end uint64, start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) ([]string, error) { ) ([]string, error) {
var filterWhereClause *sqlbuilder.WhereClause var filterWhereClause *sqlbuilder.WhereClause
@ -448,6 +490,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
ConditionBuilder: b.cb, ConditionBuilder: b.cb,
FieldKeys: keys, FieldKeys: keys,
SkipResourceFilter: true, SkipResourceFilter: true,
Variables: variables,
}) })
if err != nil { if err != nil {
@ -484,9 +527,10 @@ func (b *traceQueryStatementBuilder) maybeAttachResourceFilter(
sb *sqlbuilder.SelectBuilder, sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64, start, end uint64,
variables map[string]qbtypes.VariableItem,
) (cteSQL string, cteArgs []any, err error) { ) (cteSQL string, cteArgs []any, err error) {
stmt, err := b.buildResourceFilterCTE(ctx, query, start, end) stmt, err := b.buildResourceFilterCTE(ctx, query, start, end, variables)
if err != nil { if err != nil {
return "", nil, err return "", nil, err
} }
@ -500,6 +544,7 @@ func (b *traceQueryStatementBuilder) buildResourceFilterCTE(
ctx context.Context, ctx context.Context,
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64, start, end uint64,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) { ) (*qbtypes.Statement, error) {
return b.resourceFilterStmtBuilder.Build( return b.resourceFilterStmtBuilder.Build(
@ -508,5 +553,6 @@ func (b *traceQueryStatementBuilder) buildResourceFilterCTE(
end, end,
qbtypes.RequestTypeRaw, qbtypes.RequestTypeRaw,
query, query,
variables,
) )
} }

View File

@ -82,12 +82,13 @@ func TestStatementBuilder(t *testing.T) {
cb, cb,
resourceFilterStmtBuilder, resourceFilterStmtBuilder,
aggExprRewriter, aggExprRewriter,
nil, // telemetryStore is nil for tests
) )
for _, c := range cases { for _, c := range cases {
t.Run(c.name, func(t *testing.T) { t.Run(c.name, func(t *testing.T) {
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query) q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
if c.expectedErr != nil { if c.expectedErr != nil {
require.Error(t, err) require.Error(t, err)

View File

@ -0,0 +1,221 @@
package telemetrytraces
import (
"strings"
grammar "github.com/SigNoz/signoz/pkg/parser/grammar"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
)
// traceIDExtractor is a visitor that extracts trace IDs from filter expressions
type traceIDExtractor struct {
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
traceIDs []string
found bool
}
// ExtractTraceIDsFromFilter uses ANTLR parser to extract trace IDs from a filter expression
func ExtractTraceIDsFromFilter(filterExpr string, fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, bool) {
// Check if we have a trace_id field in the field keys
var hasTraceIDField bool
for fieldName, keys := range fieldKeys {
if strings.ToLower(fieldName) == "trace_id" || strings.ToLower(fieldName) == "traceid" {
for _, key := range keys {
if key.FieldContext == telemetrytypes.FieldContextSpan {
hasTraceIDField = true
break
}
}
}
if hasTraceIDField {
break
}
}
if !hasTraceIDField {
return nil, false
}
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(filterExpr)
lexer := grammar.NewFilterQueryLexer(input)
// Set up error handling
lexer.RemoveErrorListeners()
tokens := antlr.NewCommonTokenStream(lexer, 0)
parser := grammar.NewFilterQueryParser(tokens)
parser.RemoveErrorListeners()
// Parse the query
tree := parser.Query()
// Visit the parse tree with our trace ID extractor
extractor := &traceIDExtractor{
fieldKeys: fieldKeys,
}
extractor.Visit(tree)
return extractor.traceIDs, extractor.found
}
// Visit dispatches to the specific visit method based on node type
func (e *traceIDExtractor) Visit(tree antlr.ParseTree) any {
if tree == nil {
return nil
}
switch t := tree.(type) {
case *grammar.QueryContext:
return e.VisitQuery(t)
case *grammar.ExpressionContext:
return e.VisitExpression(t)
case *grammar.OrExpressionContext:
return e.VisitOrExpression(t)
case *grammar.AndExpressionContext:
return e.VisitAndExpression(t)
case *grammar.UnaryExpressionContext:
return e.VisitUnaryExpression(t)
case *grammar.PrimaryContext:
return e.VisitPrimary(t)
case *grammar.ComparisonContext:
return e.VisitComparison(t)
case *grammar.InClauseContext:
return e.VisitInClause(t)
default:
// For other node types, visit children
for i := 0; i < tree.GetChildCount(); i++ {
if child := tree.GetChild(i); child != nil {
if parseTree, ok := child.(antlr.ParseTree); ok {
e.Visit(parseTree)
}
}
}
}
return nil
}
func (e *traceIDExtractor) VisitQuery(ctx *grammar.QueryContext) any {
return e.Visit(ctx.Expression())
}
func (e *traceIDExtractor) VisitExpression(ctx *grammar.ExpressionContext) any {
return e.Visit(ctx.OrExpression())
}
func (e *traceIDExtractor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
for _, expr := range ctx.AllAndExpression() {
e.Visit(expr)
}
return nil
}
func (e *traceIDExtractor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
for _, expr := range ctx.AllUnaryExpression() {
e.Visit(expr)
}
return nil
}
func (e *traceIDExtractor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
return e.Visit(ctx.Primary())
}
func (e *traceIDExtractor) VisitPrimary(ctx *grammar.PrimaryContext) any {
if ctx.OrExpression() != nil {
return e.Visit(ctx.OrExpression())
} else if ctx.Comparison() != nil {
return e.Visit(ctx.Comparison())
}
return nil
}
func (e *traceIDExtractor) VisitComparison(ctx *grammar.ComparisonContext) any {
// Get the key
keyCtx := ctx.Key()
if keyCtx == nil {
return nil
}
keyText := keyCtx.GetText()
// Check if this is a trace_id field
isTraceIDField := false
for fieldName, keys := range e.fieldKeys {
if strings.EqualFold(keyText, fieldName) && (strings.ToLower(fieldName) == "trace_id" || strings.ToLower(fieldName) == "traceid") {
for _, key := range keys {
if key.FieldContext == telemetrytypes.FieldContextSpan {
isTraceIDField = true
break
}
}
}
if isTraceIDField {
break
}
}
if !isTraceIDField {
return nil
}
// Check the operator
if ctx.EQUALS() != nil {
// Handle single value comparison
values := ctx.AllValue()
if len(values) > 0 {
if value := e.extractValue(values[0]); value != "" {
e.traceIDs = append(e.traceIDs, value)
e.found = true
}
}
} else if ctx.InClause() != nil {
// Handle IN clause
return e.Visit(ctx.InClause())
}
return nil
}
func (e *traceIDExtractor) VisitInClause(ctx *grammar.InClauseContext) any {
valueListCtx := ctx.ValueList()
if valueListCtx == nil {
return nil
}
// Extract all values from the value list
for _, valueCtx := range valueListCtx.AllValue() {
if value := e.extractValue(valueCtx); value != "" {
e.traceIDs = append(e.traceIDs, value)
e.found = true
}
}
return nil
}
func (e *traceIDExtractor) extractValue(ctx grammar.IValueContext) string {
if ctx.QUOTED_TEXT() != nil {
// Remove quotes
text := ctx.QUOTED_TEXT().GetText()
if len(text) >= 2 {
return text[1 : len(text)-1]
}
} else if ctx.KEY() != nil {
return ctx.KEY().GetText()
} else if ctx.NUMBER() != nil {
return ctx.NUMBER().GetText()
}
return ""
}
// ExtractTraceIDFromFilter extracts a single trace ID from a filter expression if present
// Deprecated: Use ExtractTraceIDsFromFilter instead
func ExtractTraceIDFromFilter(filterExpr string, fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey) (string, bool) {
traceIDs, found := ExtractTraceIDsFromFilter(filterExpr, fieldKeys)
if found && len(traceIDs) > 0 {
return traceIDs[0], true
}
return "", false
}

View File

@ -0,0 +1,81 @@
package telemetrytraces
import (
"context"
"database/sql"
"fmt"
"strings"
"github.com/SigNoz/signoz/pkg/telemetrystore"
)
const (
SignozSpansTableName = "distributed_signoz_spans"
SignozSpansLocalTableName = "signoz_spans"
)
// TraceTimeRangeFinder finds the time range of a trace given its ID
type TraceTimeRangeFinder struct {
telemetryStore telemetrystore.TelemetryStore
}
func NewTraceTimeRangeFinder(telemetryStore telemetrystore.TelemetryStore) *TraceTimeRangeFinder {
return &TraceTimeRangeFinder{
telemetryStore: telemetryStore,
}
}
// GetTraceTimeRange queries the signoz_spans table to find the start and end time of a trace
func (f *TraceTimeRangeFinder) GetTraceTimeRange(ctx context.Context, traceID string) (startNano, endNano uint64, err error) {
traceIDs := []string{traceID}
return f.GetTraceTimeRangeMulti(ctx, traceIDs)
}
// GetTraceTimeRangeMulti queries the signoz_spans table to find the start and end time across multiple traces
func (f *TraceTimeRangeFinder) GetTraceTimeRangeMulti(ctx context.Context, traceIDs []string) (startNano, endNano uint64, err error) {
if len(traceIDs) == 0 {
return 0, 0, fmt.Errorf("no trace IDs provided")
}
// Clean the trace IDs - remove any quotes
cleanedIDs := make([]string, len(traceIDs))
for i, id := range traceIDs {
cleanedIDs[i] = strings.Trim(id, "'\"")
}
// Build placeholders for the IN clause
placeholders := make([]string, len(cleanedIDs))
args := make([]any, len(cleanedIDs))
for i, id := range cleanedIDs {
placeholders[i] = "?"
args[i] = id
}
// Query to find min and max timestamp across all traces
query := fmt.Sprintf(`
SELECT
toUnixTimestamp64Nano(min(timestamp)) as start_time,
toUnixTimestamp64Nano(max(timestamp)) as end_time
FROM %s.%s
WHERE traceID IN (%s)
AND timestamp >= now() - INTERVAL 30 DAY
`, DBName, SignozSpansTableName, strings.Join(placeholders, ", "))
row := f.telemetryStore.ClickhouseDB().QueryRow(ctx, query, args...)
err = row.Scan(&startNano, &endNano)
if err != nil {
if err == sql.ErrNoRows {
return 0, 0, fmt.Errorf("traces not found: %v", cleanedIDs)
}
return 0, 0, fmt.Errorf("failed to query trace time range: %w", err)
}
// Add some buffer time (1 second before and after)
if startNano > 1_000_000_000 { // 1 second in nanoseconds
startNano -= 1_000_000_000
}
endNano += 1_000_000_000
return startNano, endNano, nil
}

View File

@ -0,0 +1,188 @@
package telemetrytraces
import (
"context"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTraceTimeRangeOptimization(t *testing.T) {
// This test verifies that when a trace_id filter is present,
// the statement builder can optimize the time range
// (though without a real DB connection, it will use the original time range)
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
// Setup field keys including trace_id
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
// Add trace_id to the keys map
mockMetadataStore.KeysMap["trace_id"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
Signal: telemetrytypes.SignalTraces,
}}
mockMetadataStore.KeysMap["name"] = []*telemetrytypes.TelemetryFieldKey{{
Name: "name",
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
Signal: telemetrytypes.SignalTraces,
}}
resourceFilterFM := resourcefilter.NewFieldMapper()
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
resourceFilterFM,
resourceFilterCB,
mockMetadataStore,
)
aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil)
// Create statement builder with nil telemetryStore (no DB connection in unit test)
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil, // telemetryStore is nil - optimization won't happen but code path is tested
)
tests := []struct {
name string
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]
expectTimeOptimization bool
}{
{
name: "query with trace_id filter",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "trace_id = '12345abc' AND service.name = 'api'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
},
},
expectTimeOptimization: true, // would optimize if telemetryStore was provided
},
{
name: "query with trace_id IN filter",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "trace_id IN ['12345abc', '67890def'] AND service.name = 'api'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
},
},
expectTimeOptimization: true, // would optimize if telemetryStore was provided
},
{
name: "query without trace_id filter",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
Filter: &qbtypes.Filter{
Expression: "service.name = 'api'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
},
},
expectTimeOptimization: false,
},
{
name: "query with empty filter",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "name", FieldContext: telemetrytypes.FieldContextSpan},
},
},
expectTimeOptimization: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ctx := context.Background()
// Build the query
stmt, err := statementBuilder.Build(
ctx,
1747947419000, // start time in ms
1747983448000, // end time in ms
qbtypes.RequestTypeRaw,
tt.query,
nil, // no variables
)
require.NoError(t, err)
require.NotNil(t, stmt)
// Verify the query was built successfully
assert.NotEmpty(t, stmt.Query)
// In a real scenario with telemetryStore, we would verify that
// the time range was optimized when trace_id is present
if tt.query.Filter != nil && tt.query.Filter.Expression != "" {
traceIDs, found := ExtractTraceIDsFromFilter(tt.query.Filter.Expression, mockMetadataStore.KeysMap)
assert.Equal(t, tt.expectTimeOptimization, found && len(traceIDs) > 0)
}
})
}
}
func TestTraceTimeRangeFinderQuery(t *testing.T) {
// This test verifies the SQL query generated by GetTraceTimeRange
expectedQuery := `
SELECT
toUnixTimestamp64Nano(min(timestamp)) as start_time,
toUnixTimestamp64Nano(max(timestamp)) as end_time
FROM signoz_traces.distributed_signoz_spans
WHERE traceID = ?
AND timestamp >= now() - INTERVAL 30 DAY
`
// Remove extra whitespace for comparison
expectedQuery = normalizeQuery(expectedQuery)
// The actual query from the function
actualQuery := `
SELECT
toUnixTimestamp64Nano(min(timestamp)) as start_time,
toUnixTimestamp64Nano(max(timestamp)) as end_time
FROM signoz_traces.distributed_signoz_spans
WHERE traceID = ?
AND timestamp >= now() - INTERVAL 30 DAY
`
actualQuery = normalizeQuery(actualQuery)
assert.Equal(t, expectedQuery, actualQuery)
}
func normalizeQuery(query string) string {
// Simple normalization: remove leading/trailing whitespace and collapse multiple spaces
lines := []string{}
for _, line := range strings.Split(strings.TrimSpace(query), "\n") {
line = strings.TrimSpace(line)
if line != "" {
lines = append(lines, line)
}
}
return strings.Join(lines, " ")
}

View File

@ -0,0 +1,111 @@
package telemetrytraces
import (
"context"
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestGetTraceTimeRangeMulti(t *testing.T) {
// Test the SQL query generated for multiple trace IDs
ctx := context.Background()
tests := []struct {
name string
traceIDs []string
expectErr bool
}{
{
name: "single trace ID",
traceIDs: []string{"trace1"},
expectErr: false,
},
{
name: "multiple trace IDs",
traceIDs: []string{"trace1", "trace2", "trace3"},
expectErr: false,
},
{
name: "empty trace IDs",
traceIDs: []string{},
expectErr: true,
},
{
name: "trace IDs with quotes",
traceIDs: []string{"'trace1'", `"trace2"`, "trace3"},
expectErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Without a real telemetry store, we can only test the error cases
finder := &TraceTimeRangeFinder{telemetryStore: nil}
if tt.expectErr {
_, _, err := finder.GetTraceTimeRangeMulti(ctx, tt.traceIDs)
assert.Error(t, err)
}
// With a nil telemetry store, non-error cases will panic when trying to query
// This is expected and we skip those tests
})
}
}
func TestTraceIDExtractionWithComplexFilters(t *testing.T) {
tests := []struct {
name string
filterExpr string
expectIDs []string
}{
{
name: "nested parentheses with trace_id",
filterExpr: "((trace_id = 'abc') AND (service = 'api'))",
expectIDs: []string{"abc"},
},
{
name: "OR condition with multiple trace_ids",
filterExpr: "trace_id = 'abc' OR trace_id = 'def'",
expectIDs: []string{"abc", "def"},
},
{
name: "IN clause with OR condition",
filterExpr: "trace_id IN ['a', 'b'] OR trace_id = 'c'",
expectIDs: []string{"a", "b", "c"},
},
{
name: "complex nested conditions",
filterExpr: "(service = 'api' AND (trace_id IN ['x', 'y'] OR duration > 100)) AND status = 200",
expectIDs: []string{"x", "y"},
},
}
fieldKeys := map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
"service": {{
Name: "service",
FieldContext: telemetrytypes.FieldContextResource,
}},
"duration": {{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
}},
"status": {{
Name: "status",
FieldContext: telemetrytypes.FieldContextSpan,
}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ids, found := ExtractTraceIDsFromFilter(tt.filterExpr, fieldKeys)
assert.True(t, found)
assert.Equal(t, tt.expectIDs, ids)
})
}
}

View File

@ -0,0 +1,262 @@
package telemetrytraces
import (
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestExtractTraceIDsFromFilter(t *testing.T) {
tests := []struct {
name string
filterExpr string
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
expectIDs []string
expectFound bool
}{
{
name: "simple trace_id filter",
filterExpr: "trace_id = '123abc'",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"123abc"},
expectFound: true,
},
{
name: "trace_id IN with square brackets",
filterExpr: "trace_id IN ['123abc', '456def', '789ghi']",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"123abc", "456def", "789ghi"},
expectFound: true,
},
{
name: "trace_id IN with parentheses",
filterExpr: "trace_id IN ('aaa', 'bbb', 'ccc')",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"aaa", "bbb", "ccc"},
expectFound: true,
},
{
name: "trace_id IN with double quotes",
filterExpr: `trace_id IN ["111", "222", "333"]`,
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"111", "222", "333"},
expectFound: true,
},
{
name: "trace_id IN with mixed quotes",
filterExpr: `trace_id IN ['abc', "def", 'ghi']`,
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"abc", "def", "ghi"},
expectFound: true,
},
{
name: "trace_id IN with single value",
filterExpr: "trace_id IN ['single']",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"single"},
expectFound: true,
},
{
name: "trace_id IN in complex filter",
filterExpr: "service.name = 'api' AND trace_id IN ['x1', 'x2'] AND duration > 100",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}},
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
"duration": {{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: []string{"x1", "x2"},
expectFound: true,
},
{
name: "no trace_id in filter",
filterExpr: "service.name = 'api' AND duration > 100",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}},
"duration": {{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectIDs: nil,
expectFound: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ids, found := ExtractTraceIDsFromFilter(tt.filterExpr, tt.fieldKeys)
assert.Equal(t, tt.expectFound, found)
assert.Equal(t, tt.expectIDs, ids)
})
}
}
func TestExtractTraceIDFromFilter(t *testing.T) {
tests := []struct {
name string
filterExpr string
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
expectID string
expectFound bool
}{
{
name: "simple trace_id filter",
filterExpr: "trace_id = '123abc'",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "123abc",
expectFound: true,
},
{
name: "trace_id filter with double quotes",
filterExpr: `trace_id = "456def"`,
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "456def",
expectFound: true,
},
{
name: "traceId alternative name",
filterExpr: "traceId = '789ghi'",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"traceId": {{
Name: "traceId",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "789ghi",
expectFound: true,
},
{
name: "trace_id in complex filter",
filterExpr: "service.name = 'api' AND trace_id = 'xyz123' AND duration > 100",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}},
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
"duration": {{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "xyz123",
expectFound: true,
},
{
name: "no trace_id in filter",
filterExpr: "service.name = 'api' AND duration > 100",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
}},
"duration": {{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "",
expectFound: false,
},
{
name: "trace_id field not in span context",
filterExpr: "trace_id = '123'",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextAttribute,
}},
},
expectID: "",
expectFound: false,
},
{
name: "unquoted trace_id value",
filterExpr: "trace_id = abc123def",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "abc123def",
expectFound: true,
},
{
name: "trace_id with parentheses",
filterExpr: "(trace_id = '123') AND (service = 'api')",
fieldKeys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
}},
},
expectID: "123",
expectFound: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
id, found := ExtractTraceIDFromFilter(tt.filterExpr, tt.fieldKeys)
assert.Equal(t, tt.expectFound, found)
assert.Equal(t, tt.expectID, id)
})
}
}

View File

@ -30,6 +30,7 @@ var (
FunctionNameMedian7 = FunctionName{valuer.NewString("median7")} FunctionNameMedian7 = FunctionName{valuer.NewString("median7")}
FunctionNameTimeShift = FunctionName{valuer.NewString("timeShift")} FunctionNameTimeShift = FunctionName{valuer.NewString("timeShift")}
FunctionNameAnomaly = FunctionName{valuer.NewString("anomaly")} FunctionNameAnomaly = FunctionName{valuer.NewString("anomaly")}
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
) )
// ApplyFunction applies the given function to the result data // ApplyFunction applies the given function to the result data
@ -89,6 +90,24 @@ func ApplyFunction(fn Function, result *TimeSeries) *TimeSeries {
// Placeholder for anomaly detection as function that can be used in dashboards other than // Placeholder for anomaly detection as function that can be used in dashboards other than
// the anomaly alert // the anomaly alert
return result return result
case FunctionNameFillZero:
// fillZero expects 3 arguments: start, end, step (all in milliseconds)
if len(args) < 3 {
return result
}
start, err := parseFloat64Arg(args[0].Value)
if err != nil {
return result
}
end, err := parseFloat64Arg(args[1].Value)
if err != nil {
return result
}
step, err := parseFloat64Arg(args[2].Value)
if err != nil || step <= 0 {
return result
}
return funcFillZero(result, int64(start), int64(end), int64(step))
} }
return result return result
} }
@ -357,3 +376,44 @@ func ApplyFunctions(functions []Function, result *TimeSeries) *TimeSeries {
} }
return result return result
} }
// funcFillZero fills gaps in time series with zeros at regular step intervals
// It takes start, end, and step parameters (all in milliseconds) to ensure consistent filling
func funcFillZero(result *TimeSeries, start, end, step int64) *TimeSeries {
if step <= 0 {
return result
}
// Align start and end to step boundaries
// Round start down to the nearest step boundary
alignedStart := (start / step) * step
// Round end up to the nearest step boundary
alignedEnd := ((end + step - 1) / step) * step
// Create a map of existing values for quick lookup
existingValues := make(map[int64]*TimeSeriesValue)
for _, v := range result.Values {
existingValues[v.Timestamp] = v
}
// Create a new slice to hold all values (existing + filled)
filledValues := make([]*TimeSeriesValue, 0)
// Generate all timestamps from aligned start to aligned end with the given step
for ts := alignedStart; ts <= alignedEnd; ts += step {
if val, exists := existingValues[ts]; exists {
// Use existing value
filledValues = append(filledValues, val)
} else {
// Fill with zero
filledValues = append(filledValues, &TimeSeriesValue{
Timestamp: ts,
Value: 0,
Partial: false,
})
}
}
result.Values = filledValues
return result
}

View File

@ -0,0 +1,237 @@
package querybuildertypesv5
import (
"testing"
)
func TestFuncFillZero(t *testing.T) {
tests := []struct {
name string
input *TimeSeries
start int64
end int64
step int64
expected *TimeSeries
}{
{
name: "no gaps",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 3000, Value: 3.0},
},
},
start: 1000,
end: 3000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 3000, Value: 3.0},
},
},
},
{
name: "single gap",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 3000, Value: 3.0},
},
},
start: 1000,
end: 3000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 0},
{Timestamp: 3000, Value: 3.0},
},
},
},
{
name: "multiple gaps",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 3000, Value: 3.0},
{Timestamp: 6000, Value: 6.0},
},
},
start: 1000,
end: 6000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 0},
{Timestamp: 3000, Value: 3.0},
{Timestamp: 4000, Value: 0},
{Timestamp: 5000, Value: 0},
{Timestamp: 6000, Value: 6.0},
},
},
},
{
name: "irregular gaps",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 5000, Value: 5.0},
{Timestamp: 6000, Value: 6.0},
},
},
start: 1000,
end: 6000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 3000, Value: 0},
{Timestamp: 4000, Value: 0},
{Timestamp: 5000, Value: 5.0},
{Timestamp: 6000, Value: 6.0},
},
},
},
{
name: "empty series",
input: &TimeSeries{
Values: []*TimeSeriesValue{},
},
start: 1000,
end: 3000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 0},
{Timestamp: 2000, Value: 0},
{Timestamp: 3000, Value: 0},
},
},
},
{
name: "single value",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
},
},
start: 1000,
end: 3000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 0},
{Timestamp: 3000, Value: 0},
},
},
},
{
name: "values outside range",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 500, Value: 0.5},
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 4000, Value: 4.0},
{Timestamp: 5000, Value: 5.0},
},
},
start: 1000,
end: 4000,
step: 1000,
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: 2.0},
{Timestamp: 3000, Value: 0},
{Timestamp: 4000, Value: 4.0},
},
},
},
{
name: "unaligned start and end",
input: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 60000, Value: 1.0},
{Timestamp: 120000, Value: 2.0},
{Timestamp: 240000, Value: 4.0},
},
},
start: 50000, // Not aligned to 60s
end: 250000, // Not aligned to 60s
step: 60000, // 60 seconds
expected: &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 0, Value: 0}, // Aligned start
{Timestamp: 60000, Value: 1.0},
{Timestamp: 120000, Value: 2.0},
{Timestamp: 180000, Value: 0}, // Filled gap
{Timestamp: 240000, Value: 4.0},
{Timestamp: 300000, Value: 0}, // Aligned end
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := funcFillZero(tt.input, tt.start, tt.end, tt.step)
if len(result.Values) != len(tt.expected.Values) {
t.Fatalf("Expected %d values, got %d", len(tt.expected.Values), len(result.Values))
}
for i, val := range result.Values {
if val.Timestamp != tt.expected.Values[i].Timestamp {
t.Errorf("At index %d: expected timestamp %d, got %d",
i, tt.expected.Values[i].Timestamp, val.Timestamp)
}
if val.Value != tt.expected.Values[i].Value {
t.Errorf("At index %d: expected value %f, got %f",
i, tt.expected.Values[i].Value, val.Value)
}
}
})
}
}
func TestApplyFunction_FillZero(t *testing.T) {
input := &TimeSeries{
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 10.0},
{Timestamp: 3000, Value: 30.0},
{Timestamp: 4000, Value: 40.0},
},
}
fn := Function{
Name: FunctionNameFillZero,
Args: []FunctionArg{
{Value: 1000.0}, // start
{Value: 4000.0}, // end
{Value: 1000.0}, // step
},
}
result := ApplyFunction(fn, input)
// Verify the result has filled gaps
expectedCount := 4 // 1000, 2000, 3000, 4000
if len(result.Values) != expectedCount {
t.Fatalf("Expected %d values after fillZero, got %d", expectedCount, len(result.Values))
}
// Check that the gap at 2000 was filled with 0
if result.Values[1].Timestamp != 2000 || result.Values[1].Value != 0 {
t.Errorf("Expected gap to be filled with 0 at timestamp 2000, got %v at %d",
result.Values[1].Value, result.Values[1].Timestamp)
}
}

View File

@ -49,5 +49,5 @@ type Statement struct {
// StatementBuilder builds the query. // StatementBuilder builds the query.
type StatementBuilder[T any] interface { type StatementBuilder[T any] interface {
// Build builds the query. // Build builds the query.
Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderQuery[T]) (*Statement, error) Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderQuery[T], variables map[string]VariableItem) (*Statement, error)
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
) )
type QueryEnvelope struct { type QueryEnvelope struct {
@ -176,6 +177,20 @@ func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
return nil return nil
} }
type VariableType struct{ valuer.String }
var (
QueryVariableType = VariableType{valuer.NewString("query")}
DynamicVariableType = VariableType{valuer.NewString("dynamic")}
CustomVariableType = VariableType{valuer.NewString("custom")}
TextBoxVariableType = VariableType{valuer.NewString("textbox")}
)
type VariableItem struct {
Type VariableType `json:"type"`
Value any `json:"value"`
}
type QueryRangeRequest struct { type QueryRangeRequest struct {
// SchemaVersion is the version of the schema to use for the request payload. // SchemaVersion is the version of the schema to use for the request payload.
SchemaVersion string `json:"schemaVersion"` SchemaVersion string `json:"schemaVersion"`
@ -188,7 +203,7 @@ type QueryRangeRequest struct {
// CompositeQuery is the composite query to use for the request. // CompositeQuery is the composite query to use for the request.
CompositeQuery CompositeQuery `json:"compositeQuery"` CompositeQuery CompositeQuery `json:"compositeQuery"`
// Variables is the variables to use for the request. // Variables is the variables to use for the request.
Variables map[string]any `json:"variables,omitempty"` Variables map[string]VariableItem `json:"variables,omitempty"`
// NoCache is a flag to disable caching for the request. // NoCache is a flag to disable caching for the request.
NoCache bool `json:"noCache,omitempty"` NoCache bool `json:"noCache,omitempty"`

View File

@ -1,7 +1,9 @@
package querybuildertypesv5 package querybuildertypesv5
import ( import (
"encoding/json"
"fmt" "fmt"
"math"
"slices" "slices"
"strings" "strings"
"time" "time"
@ -144,3 +146,50 @@ type RawRow struct {
Timestamp time.Time `json:"timestamp"` Timestamp time.Time `json:"timestamp"`
Data map[string]*any `json:"data"` Data map[string]*any `json:"data"`
} }
// MarshalJSON implements custom JSON marshaling for TimeSeriesValue to handle NaN/Inf values
func (t TimeSeriesValue) MarshalJSON() ([]byte, error) {
// Create a custom struct with interface{} fields to handle special float values
type CustomTimeSeriesValue struct {
Timestamp int64 `json:"timestamp"`
Value any `json:"value"`
Partial bool `json:"partial,omitempty"`
Values []any `json:"values,omitempty"`
Bucket *Bucket `json:"bucket,omitempty"`
}
custom := CustomTimeSeriesValue{
Timestamp: t.Timestamp,
Partial: t.Partial,
Bucket: t.Bucket,
}
// Handle the Value field
if math.IsNaN(t.Value) {
custom.Value = "NaN"
} else if math.IsInf(t.Value, 1) {
custom.Value = "Inf"
} else if math.IsInf(t.Value, -1) {
custom.Value = "-Inf"
} else {
custom.Value = t.Value
}
// Handle the Values slice
if t.Values != nil {
custom.Values = make([]interface{}, len(t.Values))
for i, v := range t.Values {
if math.IsNaN(v) {
custom.Values[i] = "NaN"
} else if math.IsInf(v, 1) {
custom.Values[i] = "Inf"
} else if math.IsInf(v, -1) {
custom.Values[i] = "-Inf"
} else {
custom.Values[i] = v
}
}
}
return json.Marshal(custom)
}

View File

@ -0,0 +1,107 @@
package querybuildertypesv5
import (
"encoding/json"
"math"
"strings"
"testing"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
func TestTimeSeriesValue_MarshalJSON(t *testing.T) {
tests := []struct {
name string
value TimeSeriesValue
expected string
}{
{
name: "normal value",
value: TimeSeriesValue{
Timestamp: 1234567890,
Value: 42.5,
},
expected: `{"timestamp":1234567890,"value":42.5}`,
},
{
name: "NaN value",
value: TimeSeriesValue{
Timestamp: 1234567890,
Value: math.NaN(),
},
expected: `{"timestamp":1234567890,"value":"NaN"}`,
},
{
name: "positive infinity",
value: TimeSeriesValue{
Timestamp: 1234567890,
Value: math.Inf(1),
},
expected: `{"timestamp":1234567890,"value":"Inf"}`,
},
{
name: "negative infinity",
value: TimeSeriesValue{
Timestamp: 1234567890,
Value: math.Inf(-1),
},
expected: `{"timestamp":1234567890,"value":"-Inf"}`,
},
{
name: "values array with NaN",
value: TimeSeriesValue{
Timestamp: 1234567890,
Value: 1.0,
Values: []float64{1.0, math.NaN(), 3.0, math.Inf(1)},
},
expected: `{"timestamp":1234567890,"value":1,"values":[1,"NaN",3,"Inf"]}`,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := json.Marshal(tt.value)
if err != nil {
t.Errorf("MarshalJSON() error = %v", err)
return
}
if string(got) != tt.expected {
t.Errorf("MarshalJSON() = %v, want %v", string(got), tt.expected)
}
})
}
}
func TestTimeSeries_MarshalJSON_WithNaN(t *testing.T) {
ts := &TimeSeries{
Labels: []*Label{
{Key: telemetrytypes.TelemetryFieldKey{Name: "test"}, Value: "value"},
},
Values: []*TimeSeriesValue{
{Timestamp: 1000, Value: 1.0},
{Timestamp: 2000, Value: math.NaN()},
{Timestamp: 3000, Value: math.Inf(1)},
},
}
data, err := json.Marshal(ts)
if err != nil {
t.Fatalf("Failed to marshal TimeSeries: %v", err)
}
// Verify the JSON is valid by unmarshaling into a generic structure
var result map[string]interface{}
if err := json.Unmarshal(data, &result); err != nil {
t.Fatalf("Failed to unmarshal result: %v", err)
}
// The unmarshaling won't work directly since we're now using strings for special values
// Just verify that the JSON contains the expected string representations
jsonStr := string(data)
if !strings.Contains(jsonStr, `"value":"NaN"`) {
t.Errorf("Expected JSON to contain NaN as string, got %s", jsonStr)
}
if !strings.Contains(jsonStr, `"value":"Inf"`) {
t.Errorf("Expected JSON to contain Inf as string, got %s", jsonStr)
}
}

View File

@ -80,6 +80,7 @@ func ValidateFunctionName(name FunctionName) error {
FunctionNameMedian7, FunctionNameMedian7,
FunctionNameTimeShift, FunctionNameTimeShift,
FunctionNameAnomaly, FunctionNameAnomaly,
FunctionNameFillZero,
} }
if slices.Contains(validFunctions, name) { if slices.Contains(validFunctions, name) {
@ -106,8 +107,8 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
return err return err
} }
// Validate aggregations only for non-raw request types // Validate aggregations only for non-raw request types and non-disabled queries
if requestType != RequestTypeRaw { if requestType != RequestTypeRaw && !q.Disabled {
if err := q.validateAggregations(); err != nil { if err := q.validateAggregations(); err != nil {
return err return err
} }
@ -128,9 +129,23 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
return err return err
} }
// Validate order by // Validate order by (for aggregation queries)
if err := q.validateOrderBy(); err != nil { if requestType != RequestTypeRaw && len(q.Aggregations) > 0 {
return err if err := q.validateOrderByForAggregation(); err != nil {
return err
}
} else {
// For non-aggregation queries, use regular validation
if err := q.validateOrderBy(); err != nil {
return err
}
}
// Validate having clause
if requestType != RequestTypeRaw {
if err := q.validateHaving(); err != nil {
return err
}
} }
return nil return nil
@ -326,6 +341,118 @@ func (q *QueryBuilderQuery[T]) validateOrderBy() error {
return nil return nil
} }
// validateOrderByForAggregation validates order by clauses for aggregation queries
// For aggregation queries, order by can only reference:
// 1. Group by keys
// 2. Aggregation expressions or aliases
// 3. Aggregation index (0, 1, 2, etc.)
func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
// First validate basic order by constraints
if err := q.validateOrderBy(); err != nil {
return err
}
// Build valid order by keys
validOrderKeys := make(map[string]bool)
// Add group by keys
for _, gb := range q.GroupBy {
validOrderKeys[gb.TelemetryFieldKey.Name] = true
}
// Add aggregation aliases and expressions
for i, agg := range q.Aggregations {
// Add index-based reference
validOrderKeys[fmt.Sprintf("%d", i)] = true
// Add alias if present
switch v := any(agg).(type) {
case TraceAggregation:
if v.Alias != "" {
validOrderKeys[v.Alias] = true
}
// Also allow ordering by expression
validOrderKeys[v.Expression] = true
case LogAggregation:
if v.Alias != "" {
validOrderKeys[v.Alias] = true
}
// Also allow ordering by expression
validOrderKeys[v.Expression] = true
case MetricAggregation:
// For metrics, we allow ordering by special patterns
// Examples: __result, sum(cpu_usage), avg(memory), etc.
// Also allow the generic __result pattern
validOrderKeys["__result"] = true
// For metrics with aggregations, also allow patterns like:
// - {spaceAggregation}({metricName})
// - {timeAggregation}({metricName})
// - {spaceAggregation}({timeAggregation}({metricName}))
// But these are validated during SQL generation since they're complex patterns
}
}
// Validate each order by clause
for i, order := range q.Order {
orderKey := order.Key.Name
// Check if this is a valid order by key
if !validOrderKeys[orderKey] {
orderId := fmt.Sprintf("order by clause #%d", i+1)
if q.Name != "" {
orderId = fmt.Sprintf("order by clause #%d in query '%s'", i+1, q.Name)
}
// Build helpful error message
validKeys := []string{}
for k := range validOrderKeys {
validKeys = append(validKeys, k)
}
slices.Sort(validKeys)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid order by key '%s' for %s",
orderKey,
orderId,
).WithAdditional(
fmt.Sprintf("For aggregation queries, order by can only reference group by keys, aggregation aliases/expressions, or aggregation indices. Valid keys are: %s", strings.Join(validKeys, ", ")),
)
}
}
return nil
}
// validateHaving validates having clause for aggregation queries
// Having clause can only reference aggregation results, not non-aggregated columns
func (q *QueryBuilderQuery[T]) validateHaving() error {
if q.Having == nil || q.Having.Expression == "" {
return nil
}
// Basic validation of having expression structure
// The detailed validation of the expression (parsing, valid operators, etc.)
// is done during query execution
// For now, we just ensure that having is only used with aggregations
if len(q.Aggregations) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"having clause can only be used with aggregation queries",
)
}
// TODO: We could add more validation here in the future, such as:
// 1. Parsing the expression to ensure it's syntactically valid
// 2. Checking that all referenced fields are aggregation results
// 3. Validating operators and values
// But this would require expression parsing logic here
return nil
}
// ValidateQueryRangeRequest validates the entire query range request // ValidateQueryRangeRequest validates the entire query range request
func (r *QueryRangeRequest) Validate() error { func (r *QueryRangeRequest) Validate() error {
// Validate time range // Validate time range