2025-04-11 19:41:02 +05:30
|
|
|
package telemetrymetadata
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"context"
|
|
|
|
|
"fmt"
|
2025-05-25 11:40:39 +05:30
|
|
|
"log/slog"
|
2025-07-12 16:47:59 +05:30
|
|
|
"slices"
|
2025-06-23 14:00:50 +05:30
|
|
|
"strings"
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
"github.com/SigNoz/signoz/pkg/errors"
|
2025-06-10 18:26:28 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/factory"
|
2025-05-27 20:54:48 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
2025-06-23 14:00:50 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
2025-04-11 19:41:02 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
2025-06-23 14:00:50 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/telemetrytraces"
|
2025-06-16 23:11:28 +05:30
|
|
|
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
2025-04-11 19:41:02 +05:30
|
|
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
|
|
|
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
|
|
|
|
"github.com/huandu/go-sqlbuilder"
|
2025-07-02 10:39:16 +05:30
|
|
|
"golang.org/x/exp/maps"
|
2025-04-11 19:41:02 +05:30
|
|
|
)
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
ErrFailedToGetTracesKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get traces keys")
|
|
|
|
|
ErrFailedToGetLogsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get logs keys")
|
|
|
|
|
ErrFailedToGetTblStatement = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get tbl statement")
|
|
|
|
|
ErrFailedToGetMetricsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get metrics keys")
|
2025-08-07 16:50:37 +05:30
|
|
|
ErrFailedToGetMeterKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get meter keys")
|
|
|
|
|
ErrFailedToGetMeterValues = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get meter values")
|
2025-04-11 19:41:02 +05:30
|
|
|
ErrFailedToGetRelatedValues = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get related values")
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
type telemetryMetaStore struct {
|
2025-08-12 18:10:35 +05:30
|
|
|
logger *slog.Logger
|
|
|
|
|
telemetrystore telemetrystore.TelemetryStore
|
|
|
|
|
tracesDBName string
|
|
|
|
|
tracesFieldsTblName string
|
|
|
|
|
spanAttributesKeysTblName string
|
|
|
|
|
indexV3TblName string
|
|
|
|
|
metricsDBName string
|
|
|
|
|
metricsFieldsTblName string
|
|
|
|
|
meterDBName string
|
|
|
|
|
meterFieldsTblName string
|
|
|
|
|
logsDBName string
|
|
|
|
|
logsFieldsTblName string
|
|
|
|
|
logAttributeKeysTblName string
|
|
|
|
|
logResourceKeysTblName string
|
|
|
|
|
logsV2TblName string
|
|
|
|
|
relatedMetadataDBName string
|
|
|
|
|
relatedMetadataTblName string
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-05-16 20:09:57 +05:30
|
|
|
fm qbtypes.FieldMapper
|
2025-04-11 19:41:02 +05:30
|
|
|
conditionBuilder qbtypes.ConditionBuilder
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func escapeForLike(s string) string {
|
|
|
|
|
return strings.ReplaceAll(strings.ReplaceAll(s, `_`, `\_`), `%`, `\%`)
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
func NewTelemetryMetaStore(
|
2025-06-10 18:26:28 +05:30
|
|
|
settings factory.ProviderSettings,
|
2025-04-11 19:41:02 +05:30
|
|
|
telemetrystore telemetrystore.TelemetryStore,
|
|
|
|
|
tracesDBName string,
|
|
|
|
|
tracesFieldsTblName string,
|
2025-08-12 18:10:35 +05:30
|
|
|
spanAttributesKeysTblName string,
|
2025-04-11 19:41:02 +05:30
|
|
|
indexV3TblName string,
|
|
|
|
|
metricsDBName string,
|
|
|
|
|
metricsFieldsTblName string,
|
2025-08-07 16:50:37 +05:30
|
|
|
meterDBName string,
|
|
|
|
|
meterFieldsTblName string,
|
2025-04-11 19:41:02 +05:30
|
|
|
logsDBName string,
|
|
|
|
|
logsV2TblName string,
|
|
|
|
|
logsFieldsTblName string,
|
2025-08-12 18:10:35 +05:30
|
|
|
logAttributeKeysTblName string,
|
|
|
|
|
logResourceKeysTblName string,
|
2025-04-11 19:41:02 +05:30
|
|
|
relatedMetadataDBName string,
|
|
|
|
|
relatedMetadataTblName string,
|
2025-04-18 19:03:17 +05:30
|
|
|
) telemetrytypes.MetadataStore {
|
2025-06-16 23:11:28 +05:30
|
|
|
metadataSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrymetadata")
|
2025-04-23 14:35:56 +05:30
|
|
|
|
2025-05-16 20:09:57 +05:30
|
|
|
t := &telemetryMetaStore{
|
2025-08-12 18:10:35 +05:30
|
|
|
logger: metadataSettings.Logger(),
|
|
|
|
|
telemetrystore: telemetrystore,
|
|
|
|
|
tracesDBName: tracesDBName,
|
|
|
|
|
tracesFieldsTblName: tracesFieldsTblName,
|
|
|
|
|
spanAttributesKeysTblName: spanAttributesKeysTblName,
|
|
|
|
|
indexV3TblName: indexV3TblName,
|
|
|
|
|
metricsDBName: metricsDBName,
|
|
|
|
|
metricsFieldsTblName: metricsFieldsTblName,
|
|
|
|
|
meterDBName: meterDBName,
|
|
|
|
|
meterFieldsTblName: meterFieldsTblName,
|
|
|
|
|
logsDBName: logsDBName,
|
|
|
|
|
logsV2TblName: logsV2TblName,
|
|
|
|
|
logsFieldsTblName: logsFieldsTblName,
|
|
|
|
|
logAttributeKeysTblName: logAttributeKeysTblName,
|
|
|
|
|
logResourceKeysTblName: logResourceKeysTblName,
|
|
|
|
|
relatedMetadataDBName: relatedMetadataDBName,
|
|
|
|
|
relatedMetadataTblName: relatedMetadataTblName,
|
2025-04-18 19:03:17 +05:30
|
|
|
}
|
2025-05-16 20:09:57 +05:30
|
|
|
|
|
|
|
|
fm := NewFieldMapper()
|
|
|
|
|
conditionBuilder := NewConditionBuilder(fm)
|
|
|
|
|
|
|
|
|
|
t.fm = fm
|
|
|
|
|
t.conditionBuilder = conditionBuilder
|
|
|
|
|
|
|
|
|
|
return t
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// tracesTblStatementToFieldKeys returns materialised attribute/resource/scope keys from the traces table
|
|
|
|
|
func (t *telemetryMetaStore) tracesTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
|
|
|
|
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.tracesDBName, t.indexV3TblName)
|
|
|
|
|
statements := []telemetrytypes.ShowCreateTableStatement{}
|
|
|
|
|
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for idx := range materialisedKeys {
|
|
|
|
|
materialisedKeys[idx].Signal = telemetrytypes.SignalTraces
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return materialisedKeys, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// getTracesKeys returns the keys from the spans that match the field selection criteria
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
if len(fieldKeySelectors) == 0 {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, true, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// pre-fetch the materialised keys from the traces table
|
|
|
|
|
matKeys, err := t.tracesTblStatementToFieldKeys(ctx)
|
|
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
|
|
|
|
for _, key := range matKeys {
|
|
|
|
|
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
sb := sqlbuilder.Select(
|
|
|
|
|
"tagKey AS tag_key",
|
|
|
|
|
"tagType AS tag_type",
|
|
|
|
|
"dataType AS tag_data_type",
|
|
|
|
|
`CASE
|
|
|
|
|
// WHEN tagType = 'spanfield' THEN 1
|
|
|
|
|
WHEN tagType = 'resource' THEN 2
|
|
|
|
|
// WHEN tagType = 'scope' THEN 3
|
|
|
|
|
WHEN tagType = 'tag' THEN 4
|
|
|
|
|
ELSE 5
|
|
|
|
|
END as priority`,
|
|
|
|
|
).From(t.tracesDBName + "." + t.spanAttributesKeysTblName)
|
2025-04-11 19:41:02 +05:30
|
|
|
var limit int
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
searchTexts := []string{}
|
2025-07-12 16:47:59 +05:30
|
|
|
dataTypes := []telemetrytypes.FieldDataType{}
|
2025-06-23 14:00:50 +05:30
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
conds := []string{}
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
// TODO(srikanthccv): support time filtering for span attribute keys
|
|
|
|
|
// if fieldKeySelector.StartUnixMilli != 0 {
|
|
|
|
|
// conds = append(conds, sb.GE("unix_milli", fieldKeySelector.StartUnixMilli))
|
|
|
|
|
// }
|
|
|
|
|
// if fieldKeySelector.EndUnixMilli != 0 {
|
|
|
|
|
// conds = append(conds, sb.LE("unix_milli", fieldKeySelector.EndUnixMilli))
|
|
|
|
|
// }
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
// key part of the selector
|
|
|
|
|
fieldKeyConds := []string{}
|
|
|
|
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
2025-08-12 18:10:35 +05:30
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.E("tagKey", fieldKeySelector.Name))
|
2025-04-11 19:41:02 +05:30
|
|
|
} else {
|
2025-08-12 18:10:35 +05:30
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.ILike("tagKey", "%"+escapeForLike(fieldKeySelector.Name)+"%"))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
searchTexts = append(searchTexts, fieldKeySelector.Name)
|
2025-07-12 16:47:59 +05:30
|
|
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
dataTypes = append(dataTypes, fieldKeySelector.FieldDataType)
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
// now look at the field context
|
2025-08-12 18:10:35 +05:30
|
|
|
// we don't write most of intrinsic fields to keys table
|
|
|
|
|
// for this reason we don't want to apply tagType if the field context
|
|
|
|
|
// is not attribute or resource attribute
|
2025-07-18 18:37:57 +05:30
|
|
|
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified &&
|
|
|
|
|
(fieldKeySelector.FieldContext == telemetrytypes.FieldContextAttribute ||
|
|
|
|
|
fieldKeySelector.FieldContext == telemetrytypes.FieldContextResource) {
|
2025-08-12 18:10:35 +05:30
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.E("tagType", fieldKeySelector.FieldContext.TagType()))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// now look at the field data type
|
|
|
|
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
2025-08-12 18:10:35 +05:30
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.E("dataType", fieldKeySelector.FieldDataType.TagDataType()))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
conds = append(conds, sb.And(fieldKeyConds...))
|
|
|
|
|
limit += fieldKeySelector.Limit
|
|
|
|
|
}
|
2025-08-18 15:11:53 +05:30
|
|
|
// the span_attribute_keys has historically pushed the top level column as attributes
|
|
|
|
|
sb.Where(sb.Or(conds...)).Where("isColumn = false")
|
2025-08-12 18:10:35 +05:30
|
|
|
sb.GroupBy("tagKey", "tagType", "dataType")
|
2025-04-11 19:41:02 +05:30
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 1000
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mainSb := sqlbuilder.Select("tag_key", "tag_type", "tag_data_type", "max(priority) as priority")
|
|
|
|
|
mainSb.From(mainSb.BuilderAs(sb, "sub_query"))
|
|
|
|
|
mainSb.GroupBy("tag_key", "tag_type", "tag_data_type")
|
|
|
|
|
mainSb.OrderBy("priority")
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
mainSb.Limit(limit + 1)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
query, args := mainSb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var name string
|
|
|
|
|
var fieldContext telemetrytypes.FieldContext
|
|
|
|
|
var fieldDataType telemetrytypes.FieldDataType
|
|
|
|
|
var priority uint8
|
|
|
|
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
|
|
|
|
|
|
|
|
|
// if there is no materialised column, create a key with the field context and data type
|
|
|
|
|
if !ok {
|
|
|
|
|
key = &telemetrytypes.TelemetryFieldKey{
|
|
|
|
|
Name: name,
|
2025-04-23 14:35:56 +05:30
|
|
|
Signal: telemetrytypes.SignalTraces,
|
2025-04-11 19:41:02 +05:30
|
|
|
FieldContext: fieldContext,
|
|
|
|
|
FieldDataType: fieldDataType,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
keys = append(keys, key)
|
2025-07-02 10:39:16 +05:30
|
|
|
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if rows.Err() != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// hit the limit? (only counting DB results)
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
2025-07-12 16:47:59 +05:30
|
|
|
staticKeys := []string{"isRoot", "isEntryPoint"}
|
2025-07-02 10:39:16 +05:30
|
|
|
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...)
|
|
|
|
|
staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...)
|
2025-06-23 14:00:50 +05:30
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// Add matching intrinsic and matching calculated fields
|
|
|
|
|
// These don't count towards the limit
|
2025-06-23 14:00:50 +05:30
|
|
|
for _, key := range staticKeys {
|
|
|
|
|
found := false
|
|
|
|
|
for _, v := range searchTexts {
|
|
|
|
|
if v == "" || strings.Contains(key, v) {
|
|
|
|
|
found = true
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-12 16:47:59 +05:30
|
|
|
|
|
|
|
|
// skip the keys that don't match data type
|
|
|
|
|
if field, exists := telemetrytraces.IntrinsicFields[key]; exists {
|
|
|
|
|
if len(dataTypes) > 0 &&
|
|
|
|
|
slices.Index(dataTypes, field.FieldDataType) == -1 &&
|
|
|
|
|
field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if field, exists := telemetrytraces.CalculatedFields[key]; exists {
|
|
|
|
|
if len(dataTypes) > 0 &&
|
|
|
|
|
slices.Index(dataTypes, field.FieldDataType) == -1 &&
|
|
|
|
|
field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
if found {
|
2025-07-02 10:39:16 +05:30
|
|
|
if field, exists := telemetrytraces.IntrinsicFields[key]; exists {
|
|
|
|
|
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
|
|
|
|
keys = append(keys, &field)
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if field, exists := telemetrytraces.CalculatedFields[key]; exists {
|
|
|
|
|
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
|
|
|
|
keys = append(keys, &field)
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
}
|
2025-06-23 14:00:50 +05:30
|
|
|
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
|
|
|
|
Name: key,
|
|
|
|
|
FieldContext: telemetrytypes.FieldContextSpan,
|
|
|
|
|
Signal: telemetrytypes.SignalTraces,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
return keys, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// logsTblStatementToFieldKeys returns materialised attribute/resource/scope keys from the logs table
|
|
|
|
|
func (t *telemetryMetaStore) logsTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
|
|
|
|
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.logsDBName, t.logsV2TblName)
|
|
|
|
|
statements := []telemetrytypes.ShowCreateTableStatement{}
|
|
|
|
|
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for idx := range materialisedKeys {
|
|
|
|
|
materialisedKeys[idx].Signal = telemetrytypes.SignalLogs
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return materialisedKeys, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// getLogsKeys returns the keys from the spans that match the field selection criteria
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
if len(fieldKeySelectors) == 0 {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, true, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// pre-fetch the materialised keys from the logs table
|
|
|
|
|
matKeys, err := t.logsTblStatementToFieldKeys(ctx)
|
|
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
|
|
|
|
for _, key := range matKeys {
|
|
|
|
|
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
// queries for both attribute and resource keys tables
|
|
|
|
|
var queries []string
|
|
|
|
|
var allArgs []any
|
2025-08-08 17:04:09 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
// tables to query based on field selectors
|
|
|
|
|
queryAttributeTable := false
|
|
|
|
|
queryResourceTable := false
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
for _, selector := range fieldKeySelectors {
|
|
|
|
|
if selector.FieldContext == telemetrytypes.FieldContextUnspecified {
|
|
|
|
|
// unspecified context, query both tables
|
|
|
|
|
queryAttributeTable = true
|
|
|
|
|
queryResourceTable = true
|
|
|
|
|
break
|
|
|
|
|
} else if selector.FieldContext == telemetrytypes.FieldContextAttribute {
|
|
|
|
|
queryAttributeTable = true
|
|
|
|
|
} else if selector.FieldContext == telemetrytypes.FieldContextResource {
|
|
|
|
|
queryResourceTable = true
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-06-23 14:00:50 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
tablesToQuery := []struct {
|
|
|
|
|
fieldContext telemetrytypes.FieldContext
|
|
|
|
|
shouldQuery bool
|
|
|
|
|
}{
|
|
|
|
|
{telemetrytypes.FieldContextAttribute, queryAttributeTable},
|
|
|
|
|
{telemetrytypes.FieldContextResource, queryResourceTable},
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
for _, table := range tablesToQuery {
|
|
|
|
|
if !table.shouldQuery {
|
|
|
|
|
continue
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
fieldContext := table.fieldContext
|
|
|
|
|
|
|
|
|
|
// table name based on field context
|
|
|
|
|
var tblName string
|
|
|
|
|
if fieldContext == telemetrytypes.FieldContextAttribute {
|
|
|
|
|
tblName = t.logsDBName + "." + t.logAttributeKeysTblName
|
2025-04-11 19:41:02 +05:30
|
|
|
} else {
|
2025-08-12 18:10:35 +05:30
|
|
|
tblName = t.logsDBName + "." + t.logResourceKeysTblName
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-12 18:10:35 +05:30
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select(
|
|
|
|
|
"name AS tag_key",
|
|
|
|
|
fmt.Sprintf("'%s' AS tag_type", fieldContext.TagType()),
|
2025-08-18 15:11:53 +05:30
|
|
|
"lower(datatype) AS tag_data_type", // in logs, we had some historical data with capital and small case
|
2025-08-12 18:10:35 +05:30
|
|
|
fmt.Sprintf(`%d AS priority`, getPriorityForContext(fieldContext)),
|
|
|
|
|
).From(tblName)
|
|
|
|
|
|
|
|
|
|
var limit int
|
|
|
|
|
conds := []string{}
|
|
|
|
|
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
|
|
|
|
// Include this selector if:
|
|
|
|
|
// 1. It has unspecified context (matches all tables)
|
|
|
|
|
// 2. Its context matches the current table's context
|
|
|
|
|
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified &&
|
|
|
|
|
fieldKeySelector.FieldContext != fieldContext {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// key part of the selector
|
|
|
|
|
fieldKeyConds := []string{}
|
|
|
|
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
|
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.E("name", fieldKeySelector.Name))
|
|
|
|
|
} else {
|
|
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.ILike("name", "%"+escapeForLike(fieldKeySelector.Name)+"%"))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// now look at the field data type
|
|
|
|
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
fieldKeyConds = append(fieldKeyConds, sb.E("datatype", fieldKeySelector.FieldDataType.TagDataType()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(fieldKeyConds) > 0 {
|
|
|
|
|
conds = append(conds, sb.And(fieldKeyConds...))
|
|
|
|
|
}
|
|
|
|
|
limit += fieldKeySelector.Limit
|
2025-07-12 16:47:59 +05:30
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
if len(conds) > 0 {
|
|
|
|
|
sb.Where(sb.Or(conds...))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
sb.GroupBy("name", "datatype")
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 1000
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
queries = append(queries, query)
|
|
|
|
|
allArgs = append(allArgs, args...)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(queries) == 0 {
|
|
|
|
|
// No matching contexts, return empty result
|
|
|
|
|
return []*telemetrytypes.TelemetryFieldKey{}, true, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Combine queries with UNION ALL
|
|
|
|
|
var limit int
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
2025-04-11 19:41:02 +05:30
|
|
|
limit += fieldKeySelector.Limit
|
|
|
|
|
}
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 1000
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
mainQuery := fmt.Sprintf(`
|
|
|
|
|
SELECT tag_key, tag_type, tag_data_type, max(priority) as priority
|
|
|
|
|
FROM (
|
|
|
|
|
%s
|
|
|
|
|
) AS combined_results
|
|
|
|
|
GROUP BY tag_key, tag_type, tag_data_type
|
|
|
|
|
ORDER BY priority
|
|
|
|
|
LIMIT %d
|
|
|
|
|
`, strings.Join(queries, " UNION ALL "), limit+1)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, mainQuery, allArgs...)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
2025-08-12 18:10:35 +05:30
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
2025-08-12 18:10:35 +05:30
|
|
|
searchTexts := []string{}
|
|
|
|
|
dataTypes := []telemetrytypes.FieldDataType{}
|
|
|
|
|
|
|
|
|
|
// Collect search texts and data types for static field matching
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
|
|
|
|
searchTexts = append(searchTexts, fieldKeySelector.Name)
|
|
|
|
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
dataTypes = append(dataTypes, fieldKeySelector.FieldDataType)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var name string
|
|
|
|
|
var fieldContext telemetrytypes.FieldContext
|
|
|
|
|
var fieldDataType telemetrytypes.FieldDataType
|
|
|
|
|
var priority uint8
|
|
|
|
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
|
|
|
|
|
|
|
|
|
// if there is no materialised column, create a key with the field context and data type
|
|
|
|
|
if !ok {
|
|
|
|
|
key = &telemetrytypes.TelemetryFieldKey{
|
|
|
|
|
Name: name,
|
2025-04-23 14:35:56 +05:30
|
|
|
Signal: telemetrytypes.SignalLogs,
|
2025-04-11 19:41:02 +05:30
|
|
|
FieldContext: fieldContext,
|
|
|
|
|
FieldDataType: fieldDataType,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
keys = append(keys, key)
|
2025-07-02 10:39:16 +05:30
|
|
|
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if rows.Err() != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// hit the limit? (only counting DB results)
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
staticKeys := []string{}
|
2025-07-02 10:39:16 +05:30
|
|
|
staticKeys = append(staticKeys, maps.Keys(telemetrylogs.IntrinsicFields)...)
|
2025-06-23 14:00:50 +05:30
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// Add matching intrinsic and matching calculated fields
|
|
|
|
|
// These don't count towards the limit
|
2025-06-23 14:00:50 +05:30
|
|
|
for _, key := range staticKeys {
|
|
|
|
|
found := false
|
|
|
|
|
for _, v := range searchTexts {
|
|
|
|
|
if v == "" || strings.Contains(key, v) {
|
|
|
|
|
found = true
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-07-12 16:47:59 +05:30
|
|
|
|
|
|
|
|
// skip the keys that don't match data type
|
|
|
|
|
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
|
|
|
|
|
if len(dataTypes) > 0 &&
|
|
|
|
|
slices.Index(dataTypes, field.FieldDataType) == -1 &&
|
|
|
|
|
field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
if found {
|
2025-07-02 10:39:16 +05:30
|
|
|
if field, exists := telemetrylogs.IntrinsicFields[key]; exists {
|
|
|
|
|
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
|
|
|
|
keys = append(keys, &field)
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2025-06-23 14:00:50 +05:30
|
|
|
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
|
|
|
|
Name: key,
|
|
|
|
|
FieldContext: telemetrytypes.FieldContextLog,
|
|
|
|
|
Signal: telemetrytypes.SignalLogs,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
return keys, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-12 18:10:35 +05:30
|
|
|
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
|
|
|
|
|
switch ctx {
|
|
|
|
|
case telemetrytypes.FieldContextLog:
|
|
|
|
|
return 1
|
|
|
|
|
case telemetrytypes.FieldContextResource:
|
|
|
|
|
return 2
|
|
|
|
|
case telemetrytypes.FieldContextScope:
|
|
|
|
|
return 3
|
|
|
|
|
case telemetrytypes.FieldContextAttribute:
|
|
|
|
|
return 4
|
|
|
|
|
default:
|
|
|
|
|
return 5
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
// getMetricsKeys returns the keys from the metrics that match the field selection criteria
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getMetricsKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
if len(fieldKeySelectors) == 0 {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, true, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
sb := sqlbuilder.
|
|
|
|
|
Select("attr_name as name", "attr_type as field_context", "attr_datatype as field_data_type", `
|
|
|
|
|
CASE
|
|
|
|
|
WHEN attr_type = 'resource' THEN 1
|
|
|
|
|
WHEN attr_type = 'scope' THEN 2
|
|
|
|
|
WHEN attr_type = 'point' THEN 3
|
|
|
|
|
ELSE 4
|
|
|
|
|
END as priority`).
|
|
|
|
|
From(t.metricsDBName + "." + t.metricsFieldsTblName)
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var limit int
|
|
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
conds := []string{}
|
2025-04-11 19:41:02 +05:30
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
2025-04-23 14:35:56 +05:30
|
|
|
fieldConds := []string{}
|
2025-04-11 19:41:02 +05:30
|
|
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
2025-04-23 14:35:56 +05:30
|
|
|
fieldConds = append(fieldConds, sb.E("attr_name", fieldKeySelector.Name))
|
2025-04-11 19:41:02 +05:30
|
|
|
} else {
|
2025-08-06 23:05:39 +05:30
|
|
|
fieldConds = append(fieldConds, sb.ILike("attr_name", "%"+escapeForLike(fieldKeySelector.Name)+"%"))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
2025-06-26 15:10:31 +05:30
|
|
|
fieldConds = append(fieldConds, sb.NotLike("attr_name", "\\_\\_%"))
|
2025-04-23 14:35:56 +05:30
|
|
|
|
2025-06-26 15:10:31 +05:30
|
|
|
// note: type and datatype do not have much significance in metrics
|
2025-04-23 14:35:56 +05:30
|
|
|
|
2025-06-26 15:10:31 +05:30
|
|
|
// if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
|
|
|
|
// fieldConds = append(fieldConds, sb.E("attr_type", fieldKeySelector.FieldContext.TagType()))
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
// if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
// fieldConds = append(fieldConds, sb.E("attr_datatype", fieldKeySelector.FieldDataType.TagDataType()))
|
|
|
|
|
// }
|
2025-04-23 14:35:56 +05:30
|
|
|
|
|
|
|
|
if fieldKeySelector.MetricContext != nil {
|
|
|
|
|
fieldConds = append(fieldConds, sb.E("metric_name", fieldKeySelector.MetricContext.MetricName))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
conds = append(conds, sb.And(fieldConds...))
|
2025-04-11 19:41:02 +05:30
|
|
|
limit += fieldKeySelector.Limit
|
|
|
|
|
}
|
2025-04-23 14:35:56 +05:30
|
|
|
sb.Where(sb.Or(conds...))
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.GroupBy("name", "field_context", "field_data_type")
|
2025-04-23 14:35:56 +05:30
|
|
|
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 1000
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mainSb := sqlbuilder.Select("name", "field_context", "field_data_type", "max(priority) as priority")
|
|
|
|
|
mainSb.From(mainSb.BuilderAs(sb, "sub_query"))
|
|
|
|
|
mainSb.GroupBy("name", "field_context", "field_data_type")
|
|
|
|
|
mainSb.OrderBy("priority")
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
mainSb.Limit(limit + 1)
|
2025-04-23 14:35:56 +05:30
|
|
|
|
|
|
|
|
query, args := mainSb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var name string
|
2025-04-23 14:35:56 +05:30
|
|
|
var fieldContext telemetrytypes.FieldContext
|
|
|
|
|
var fieldDataType telemetrytypes.FieldDataType
|
|
|
|
|
var priority uint8
|
|
|
|
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-04-23 14:35:56 +05:30
|
|
|
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
2025-04-11 19:41:02 +05:30
|
|
|
Name: name,
|
2025-04-23 14:35:56 +05:30
|
|
|
Signal: telemetrytypes.SignalMetrics,
|
|
|
|
|
FieldContext: fieldContext,
|
|
|
|
|
FieldDataType: fieldDataType,
|
|
|
|
|
})
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if rows.Err() != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return keys, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-07 16:50:37 +05:30
|
|
|
// getMeterKeys returns the keys from the meter metrics that match the field selection criteria
|
|
|
|
|
func (t *telemetryMetaStore) getMeterSourceMetricKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
|
|
|
|
if len(fieldKeySelectors) == 0 {
|
|
|
|
|
return nil, true, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select("DISTINCT arrayJoin(JSONExtractKeys(labels)) as attr_name").From(t.meterDBName + "." + t.meterFieldsTblName)
|
|
|
|
|
conds := []string{}
|
|
|
|
|
var limit int
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
|
|
|
|
fieldConds := []string{}
|
|
|
|
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
|
|
|
|
fieldConds = append(fieldConds, sb.E("attr_name", fieldKeySelector.Name))
|
|
|
|
|
} else {
|
|
|
|
|
fieldConds = append(fieldConds, sb.Like("attr_name", "%"+fieldKeySelector.Name+"%"))
|
|
|
|
|
}
|
|
|
|
|
fieldConds = append(fieldConds, sb.NotLike("attr_name", "\\_\\_%"))
|
|
|
|
|
|
|
|
|
|
if fieldKeySelector.MetricContext != nil {
|
|
|
|
|
fieldConds = append(fieldConds, sb.E("metric_name", fieldKeySelector.MetricContext.MetricName))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
conds = append(conds, sb.And(fieldConds...))
|
|
|
|
|
limit += fieldKeySelector.Limit
|
|
|
|
|
}
|
|
|
|
|
sb.Where(sb.Or(conds...))
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 1000
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sb.Limit(limit)
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
2025-08-07 16:50:37 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
|
|
|
|
rowCount := 0
|
|
|
|
|
for rows.Next() {
|
|
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var name string
|
|
|
|
|
err = rows.Scan(&name)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
2025-08-07 16:50:37 +05:30
|
|
|
}
|
|
|
|
|
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
|
|
|
|
|
Name: name,
|
|
|
|
|
Signal: telemetrytypes.SignalMetrics,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if rows.Err() != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterKeys.Error())
|
2025-08-07 16:50:37 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return keys, complete, nil
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
var keys []*telemetrytypes.TelemetryFieldKey
|
2025-08-06 23:05:39 +05:30
|
|
|
var complete bool = true
|
2025-04-11 19:41:02 +05:30
|
|
|
var err error
|
2025-05-16 20:09:57 +05:30
|
|
|
selectors := []*telemetrytypes.FieldKeySelector{}
|
|
|
|
|
|
|
|
|
|
if fieldKeySelector != nil {
|
|
|
|
|
selectors = []*telemetrytypes.FieldKeySelector{fieldKeySelector}
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
switch fieldKeySelector.Signal {
|
|
|
|
|
case telemetrytypes.SignalTraces:
|
2025-08-06 23:05:39 +05:30
|
|
|
keys, complete, err = t.getTracesKeys(ctx, selectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalLogs:
|
2025-08-06 23:05:39 +05:30
|
|
|
keys, complete, err = t.getLogsKeys(ctx, selectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalMetrics:
|
2025-08-07 16:50:37 +05:30
|
|
|
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
|
|
|
|
|
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
|
|
|
|
|
} else {
|
|
|
|
|
keys, complete, err = t.getMetricsKeys(ctx, selectors)
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalUnspecified:
|
|
|
|
|
// get traces keys
|
2025-08-06 23:05:39 +05:30
|
|
|
tracesKeys, tracesComplete, err := t.getTracesKeys(ctx, selectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
keys = append(keys, tracesKeys...)
|
|
|
|
|
|
|
|
|
|
// get logs keys
|
2025-08-06 23:05:39 +05:30
|
|
|
logsKeys, logsComplete, err := t.getLogsKeys(ctx, selectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
keys = append(keys, logsKeys...)
|
|
|
|
|
|
|
|
|
|
// get metrics keys
|
2025-08-06 23:05:39 +05:30
|
|
|
metricsKeys, metricsComplete, err := t.getMetricsKeys(ctx, selectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
keys = append(keys, metricsKeys...)
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
complete = tracesComplete && logsComplete && metricsComplete
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
|
|
|
|
for _, key := range keys {
|
|
|
|
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
return mapOfKeys, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
logsSelectors := []*telemetrytypes.FieldKeySelector{}
|
|
|
|
|
tracesSelectors := []*telemetrytypes.FieldKeySelector{}
|
|
|
|
|
metricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
2025-08-07 16:50:37 +05:30
|
|
|
meterSourceMetricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
for _, fieldKeySelector := range fieldKeySelectors {
|
|
|
|
|
switch fieldKeySelector.Signal {
|
|
|
|
|
case telemetrytypes.SignalLogs:
|
|
|
|
|
logsSelectors = append(logsSelectors, fieldKeySelector)
|
|
|
|
|
case telemetrytypes.SignalTraces:
|
|
|
|
|
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
|
|
|
|
case telemetrytypes.SignalMetrics:
|
2025-08-07 16:50:37 +05:30
|
|
|
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
|
|
|
|
|
meterSourceMetricsSelectors = append(meterSourceMetricsSelectors, fieldKeySelector)
|
|
|
|
|
} else {
|
|
|
|
|
metricsSelectors = append(metricsSelectors, fieldKeySelector)
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalUnspecified:
|
|
|
|
|
logsSelectors = append(logsSelectors, fieldKeySelector)
|
|
|
|
|
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
|
|
|
|
metricsSelectors = append(metricsSelectors, fieldKeySelector)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
logsKeys, logsComplete, err := t.getLogsKeys(ctx, logsSelectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
tracesKeys, tracesComplete, err := t.getTracesKeys(ctx, tracesSelectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
metricsKeys, metricsComplete, err := t.getMetricsKeys(ctx, metricsSelectors)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-07 16:50:37 +05:30
|
|
|
meterSourceMetricsKeys, _, err := t.getMeterSourceMetricKeys(ctx, meterSourceMetricsSelectors)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, false, err
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
// Complete only if all queries are complete
|
|
|
|
|
complete := logsComplete && tracesComplete && metricsComplete
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
|
|
|
|
for _, key := range logsKeys {
|
|
|
|
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
|
|
|
|
}
|
|
|
|
|
for _, key := range tracesKeys {
|
|
|
|
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
|
|
|
|
}
|
|
|
|
|
for _, key := range metricsKeys {
|
|
|
|
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
|
|
|
|
}
|
2025-08-07 16:50:37 +05:30
|
|
|
for _, key := range meterSourceMetricsKeys {
|
|
|
|
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
return mapOfKeys, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (t *telemetryMetaStore) GetKey(ctx context.Context, fieldKeySelector *telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
2025-08-06 23:05:39 +05:30
|
|
|
keys, _, err := t.GetKeys(ctx, fieldKeySelector)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
return keys[fieldKeySelector.Name], nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) ([]string, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
// nothing to return as "related" value if there is nothing to filter on
|
|
|
|
|
if fieldValueSelector.ExistingQuery == "" {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, true, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-05-16 20:09:57 +05:30
|
|
|
key := &telemetrytypes.TelemetryFieldKey{
|
2025-04-11 19:41:02 +05:30
|
|
|
Name: fieldValueSelector.Name,
|
|
|
|
|
Signal: fieldValueSelector.Signal,
|
|
|
|
|
FieldContext: fieldValueSelector.FieldContext,
|
|
|
|
|
FieldDataType: fieldValueSelector.FieldDataType,
|
|
|
|
|
}
|
|
|
|
|
|
2025-05-16 20:09:57 +05:30
|
|
|
selectColumn, err := t.fm.FieldFor(ctx, key)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
if err != nil {
|
|
|
|
|
// we don't have a explicit column to select from the related metadata table
|
|
|
|
|
// so we will select either from resource_attributes or attributes table
|
|
|
|
|
// in that order
|
2025-05-16 20:09:57 +05:30
|
|
|
resourceColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
2025-04-23 14:35:56 +05:30
|
|
|
Name: key.Name,
|
|
|
|
|
FieldContext: telemetrytypes.FieldContextResource,
|
|
|
|
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
|
|
|
|
})
|
2025-05-16 20:09:57 +05:30
|
|
|
attributeColumn, _ := t.fm.FieldFor(ctx, &telemetrytypes.TelemetryFieldKey{
|
2025-04-23 14:35:56 +05:30
|
|
|
Name: key.Name,
|
|
|
|
|
FieldContext: telemetrytypes.FieldContextAttribute,
|
|
|
|
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
|
|
|
|
})
|
|
|
|
|
selectColumn = fmt.Sprintf("if(notEmpty(%s), %s, %s)", resourceColumn, resourceColumn, attributeColumn)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select("DISTINCT " + selectColumn).From(t.relatedMetadataDBName + "." + t.relatedMetadataTblName)
|
|
|
|
|
|
|
|
|
|
if len(fieldValueSelector.ExistingQuery) != 0 {
|
2025-05-27 20:54:48 +05:30
|
|
|
keySelectors := querybuilder.QueryStringToKeysSelectors(fieldValueSelector.ExistingQuery)
|
|
|
|
|
for _, keySelector := range keySelectors {
|
|
|
|
|
keySelector.Signal = fieldValueSelector.Signal
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
keys, _, err := t.GetKeysMulti(ctx, keySelectors)
|
2025-05-27 20:54:48 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-05-27 20:54:48 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-04 21:02:54 +05:30
|
|
|
whereClause, err := querybuilder.PrepareWhereClause(fieldValueSelector.ExistingQuery, querybuilder.FilterExprVisitorOpts{
|
2025-08-12 18:10:35 +05:30
|
|
|
Logger: t.logger,
|
2025-05-27 20:54:48 +05:30
|
|
|
FieldMapper: t.fm,
|
|
|
|
|
ConditionBuilder: t.conditionBuilder,
|
|
|
|
|
FieldKeys: keys,
|
|
|
|
|
})
|
2025-04-23 14:35:56 +05:30
|
|
|
if err == nil {
|
2025-08-04 21:02:54 +05:30
|
|
|
sb.AddWhereClause(whereClause.WhereClause)
|
2025-05-16 20:09:57 +05:30
|
|
|
} else {
|
2025-05-25 11:40:39 +05:30
|
|
|
t.logger.WarnContext(ctx, "error parsing existing query for related values", "error", err)
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.StartUnixMilli != 0 {
|
|
|
|
|
sb.Where(sb.GE("unix_milli", fieldValueSelector.StartUnixMilli))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.EndUnixMilli != 0 {
|
|
|
|
|
sb.Where(sb.LE("unix_milli", fieldValueSelector.EndUnixMilli))
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-26 19:37:25 +05:30
|
|
|
if fieldValueSelector.Value != "" {
|
|
|
|
|
var conds []string
|
|
|
|
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextAttribute &&
|
|
|
|
|
fieldValueSelector.FieldContext != telemetrytypes.FieldContextResource {
|
|
|
|
|
origContext := key.FieldContext
|
|
|
|
|
|
|
|
|
|
// search on attributes
|
|
|
|
|
key.FieldContext = telemetrytypes.FieldContextAttribute
|
|
|
|
|
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
|
|
|
|
if err == nil {
|
|
|
|
|
conds = append(conds, cond)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// search on resource
|
|
|
|
|
key.FieldContext = telemetrytypes.FieldContextResource
|
|
|
|
|
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
|
|
|
|
if err == nil {
|
|
|
|
|
conds = append(conds, cond)
|
|
|
|
|
}
|
|
|
|
|
key.FieldContext = origContext
|
|
|
|
|
} else {
|
|
|
|
|
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
|
|
|
|
|
if err == nil {
|
|
|
|
|
conds = append(conds, cond)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if len(conds) != 0 {
|
|
|
|
|
// see `expr` in condition_builder.go, if key doesn't exist we don't check for value
|
|
|
|
|
// hence, this is join of conditions on resource and attributes
|
|
|
|
|
sb.Where(sb.And(conds...))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
sb.Limit(limit + 1)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
2025-05-25 11:40:39 +05:30
|
|
|
t.logger.DebugContext(ctx, "query for related values", "query", query, "args", args)
|
2025-04-23 14:35:56 +05:30
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, ErrFailedToGetRelatedValues
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
var attributeValues []string
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var value string
|
|
|
|
|
if err := rows.Scan(&value); err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, ErrFailedToGetRelatedValues
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
if value != "" {
|
|
|
|
|
attributeValues = append(attributeValues, value)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return attributeValues, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) GetRelatedValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) ([]string, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
return t.getRelatedValues(ctx, fieldValueSelector)
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getSpanFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
// build the query to get the keys from the spans that match the field selection criteria
|
2025-08-06 23:05:39 +05:30
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.tracesDBName + "." + t.tracesFieldsTblName)
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Name != "" {
|
|
|
|
|
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// now look at the field context
|
|
|
|
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
|
|
|
|
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// now look at the field data type
|
|
|
|
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Value != "" {
|
|
|
|
|
if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeString {
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.Where(sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
2025-04-11 19:41:02 +05:30
|
|
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeNumber {
|
|
|
|
|
sb.Where(sb.IsNotNull("number_value"))
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.Where(sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
2025-05-15 19:59:40 +05:30
|
|
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
// or b/w string and number
|
|
|
|
|
sb.Where(sb.Or(
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
|
|
|
|
sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
2025-05-15 19:59:40 +05:30
|
|
|
))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
sb.Limit(limit + 1)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
values := &telemetrytypes.TelemetryFieldValues{}
|
|
|
|
|
seen := make(map[string]bool)
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
|
|
|
|
totalCount := 0 // Track total unique values
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var stringValue string
|
|
|
|
|
var numberValue float64
|
|
|
|
|
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
// Only add values if we haven't hit the limit yet
|
|
|
|
|
if totalCount < limit {
|
|
|
|
|
if _, ok := seen[stringValue]; !ok && stringValue != "" {
|
|
|
|
|
values.StringValues = append(values.StringValues, stringValue)
|
|
|
|
|
seen[stringValue] = true
|
|
|
|
|
totalCount++
|
|
|
|
|
}
|
|
|
|
|
if _, ok := seen[fmt.Sprintf("%f", numberValue)]; !ok && numberValue != 0 && totalCount < limit {
|
|
|
|
|
values.NumberValues = append(values.NumberValues, numberValue)
|
|
|
|
|
seen[fmt.Sprintf("%f", numberValue)] = true
|
|
|
|
|
totalCount++
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return values, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
2025-04-11 19:41:02 +05:30
|
|
|
// build the query to get the keys from the spans that match the field selection criteria
|
2025-08-06 23:05:39 +05:30
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.logsDBName + "." + t.logsFieldsTblName)
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Name != "" {
|
|
|
|
|
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
|
|
|
|
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Value != "" {
|
|
|
|
|
if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeString {
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.Where(sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
2025-04-11 19:41:02 +05:30
|
|
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeNumber {
|
|
|
|
|
sb.Where(sb.IsNotNull("number_value"))
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.Where(sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
2025-05-15 19:59:40 +05:30
|
|
|
} else if fieldValueSelector.FieldDataType == telemetrytypes.FieldDataTypeUnspecified {
|
|
|
|
|
// or b/w string and number
|
|
|
|
|
sb.Where(sb.Or(
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
|
|
|
|
sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
2025-05-15 19:59:40 +05:30
|
|
|
))
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
sb.Limit(limit + 1)
|
2025-04-11 19:41:02 +05:30
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
values := &telemetrytypes.TelemetryFieldValues{}
|
|
|
|
|
seen := make(map[string]bool)
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
|
|
|
|
totalCount := 0 // Track total unique values
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
var stringValue string
|
|
|
|
|
var numberValue float64
|
|
|
|
|
if err := rows.Scan(&stringValue, &numberValue); err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
// Only add values if we haven't hit the limit yet
|
|
|
|
|
if totalCount < limit {
|
|
|
|
|
if _, ok := seen[stringValue]; !ok && stringValue != "" {
|
|
|
|
|
values.StringValues = append(values.StringValues, stringValue)
|
|
|
|
|
seen[stringValue] = true
|
|
|
|
|
totalCount++
|
|
|
|
|
}
|
|
|
|
|
if _, ok := seen[fmt.Sprintf("%f", numberValue)]; !ok && numberValue != 0 && totalCount < limit {
|
|
|
|
|
values.NumberValues = append(values.NumberValues, numberValue)
|
|
|
|
|
seen[fmt.Sprintf("%f", numberValue)] = true
|
|
|
|
|
totalCount++
|
|
|
|
|
}
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return values, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// getMetricFieldValues returns field values and whether the result is complete
|
|
|
|
|
func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
2025-04-23 14:35:56 +05:30
|
|
|
sb := sqlbuilder.
|
|
|
|
|
Select("DISTINCT attr_string_value").
|
|
|
|
|
From(t.metricsDBName + "." + t.metricsFieldsTblName)
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Name != "" {
|
|
|
|
|
sb.Where(sb.E("attr_name", fieldValueSelector.Name))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.E("attr_type", fieldValueSelector.FieldContext.TagType()))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.E("attr_datatype", fieldValueSelector.FieldDataType.TagDataType()))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.MetricContext != nil {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.E("metric_name", fieldValueSelector.MetricContext.MetricName))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.StartUnixMilli > 0 {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.GE("last_reported_unix_milli", fieldValueSelector.StartUnixMilli))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.EndUnixMilli > 0 {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.LE("first_reported_unix_milli", fieldValueSelector.EndUnixMilli))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Value != "" {
|
|
|
|
|
if fieldValueSelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
2025-05-15 19:59:40 +05:30
|
|
|
sb.Where(sb.E("attr_string_value", fieldValueSelector.Value))
|
2025-04-23 14:35:56 +05:30
|
|
|
} else {
|
2025-08-06 23:05:39 +05:30
|
|
|
sb.Where(sb.ILike("attr_string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
sb.Limit(limit + 1)
|
2025-04-23 14:35:56 +05:30
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
values := &telemetrytypes.TelemetryFieldValues{}
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount := 0
|
2025-04-23 14:35:56 +05:30
|
|
|
for rows.Next() {
|
2025-08-06 23:05:39 +05:30
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-23 14:35:56 +05:30
|
|
|
var stringValue string
|
|
|
|
|
if err := rows.Scan(&stringValue); err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMetricsKeys.Error())
|
2025-04-23 14:35:56 +05:30
|
|
|
}
|
|
|
|
|
values.StringValues = append(values.StringValues, stringValue)
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
|
|
|
|
|
return values, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-07 16:50:37 +05:30
|
|
|
func (t *telemetryMetaStore) getMeterSourceMetricFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
|
|
|
|
sb := sqlbuilder.Select("DISTINCT arrayJoin(JSONExtractKeysAndValues(labels, 'String')) AS attr").
|
|
|
|
|
From(t.meterDBName + "." + t.meterFieldsTblName)
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Name != "" {
|
|
|
|
|
sb.Where(sb.E("attr.1", fieldValueSelector.Name))
|
|
|
|
|
}
|
|
|
|
|
sb.Where(sb.NotLike("attr.1", "\\_\\_%"))
|
|
|
|
|
|
|
|
|
|
if fieldValueSelector.Value != "" {
|
|
|
|
|
if fieldValueSelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
|
|
|
|
sb.Where(sb.E("attr.2", fieldValueSelector.Value))
|
|
|
|
|
} else {
|
|
|
|
|
sb.Where(sb.Like("attr.2", "%"+fieldValueSelector.Value+"%"))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
sb.Where(sb.NE("attr.2", ""))
|
|
|
|
|
|
|
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
|
|
|
|
}
|
|
|
|
|
// query one extra to check if we hit the limit
|
|
|
|
|
sb.Limit(limit + 1)
|
|
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
2025-08-07 16:50:37 +05:30
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
values := &telemetrytypes.TelemetryFieldValues{}
|
|
|
|
|
rowCount := 0
|
|
|
|
|
for rows.Next() {
|
|
|
|
|
rowCount++
|
|
|
|
|
// reached the limit, we know there are more results
|
|
|
|
|
if rowCount > limit {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var attribute []string
|
|
|
|
|
if err := rows.Scan(&attribute); err != nil {
|
2025-09-01 17:10:13 +05:30
|
|
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetMeterValues.Error())
|
2025-08-07 16:50:37 +05:30
|
|
|
}
|
|
|
|
|
if len(attribute) > 1 {
|
|
|
|
|
values.StringValues = append(values.StringValues, attribute[1])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// hit the limit?
|
|
|
|
|
complete := rowCount <= limit
|
|
|
|
|
return values, complete, nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
func populateAllUnspecifiedValues(allUnspecifiedValues *telemetrytypes.TelemetryFieldValues, mapOfValues map[any]bool, mapOfRelatedValues map[any]bool, values *telemetrytypes.TelemetryFieldValues, limit int) bool {
|
|
|
|
|
complete := true
|
|
|
|
|
totalCount := len(mapOfValues) + len(mapOfRelatedValues)
|
|
|
|
|
|
2025-05-15 19:59:40 +05:30
|
|
|
for _, value := range values.StringValues {
|
2025-08-06 23:05:39 +05:30
|
|
|
if totalCount >= limit {
|
|
|
|
|
complete = false
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-05-15 19:59:40 +05:30
|
|
|
if _, ok := mapOfValues[value]; !ok {
|
|
|
|
|
mapOfValues[value] = true
|
|
|
|
|
allUnspecifiedValues.StringValues = append(allUnspecifiedValues.StringValues, value)
|
2025-08-06 23:05:39 +05:30
|
|
|
totalCount++
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
2025-05-15 19:59:40 +05:30
|
|
|
for _, value := range values.NumberValues {
|
2025-08-06 23:05:39 +05:30
|
|
|
if totalCount >= limit {
|
|
|
|
|
complete = false
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-05-15 19:59:40 +05:30
|
|
|
if _, ok := mapOfValues[value]; !ok {
|
|
|
|
|
mapOfValues[value] = true
|
|
|
|
|
allUnspecifiedValues.NumberValues = append(allUnspecifiedValues.NumberValues, value)
|
2025-08-06 23:05:39 +05:30
|
|
|
totalCount++
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, value := range values.RelatedValues {
|
2025-08-06 23:05:39 +05:30
|
|
|
if totalCount >= limit {
|
|
|
|
|
complete = false
|
|
|
|
|
break
|
|
|
|
|
}
|
2025-05-15 19:59:40 +05:30
|
|
|
if _, ok := mapOfRelatedValues[value]; !ok {
|
|
|
|
|
mapOfRelatedValues[value] = true
|
|
|
|
|
allUnspecifiedValues.RelatedValues = append(allUnspecifiedValues.RelatedValues, value)
|
2025-08-06 23:05:39 +05:30
|
|
|
totalCount++
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
return complete
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
|
|
|
|
|
2025-08-06 23:05:39 +05:30
|
|
|
// GetAllValues returns all values and whether the result is complete
|
|
|
|
|
func (t *telemetryMetaStore) GetAllValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
2025-05-15 19:59:40 +05:30
|
|
|
values := &telemetrytypes.TelemetryFieldValues{}
|
2025-08-06 23:05:39 +05:30
|
|
|
var complete bool = true
|
2025-04-11 19:41:02 +05:30
|
|
|
var err error
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
limit := fieldValueSelector.Limit
|
|
|
|
|
if limit == 0 {
|
|
|
|
|
limit = 50
|
|
|
|
|
}
|
|
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
switch fieldValueSelector.Signal {
|
|
|
|
|
case telemetrytypes.SignalTraces:
|
2025-08-06 23:05:39 +05:30
|
|
|
values, complete, err = t.getSpanFieldValues(ctx, fieldValueSelector)
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalLogs:
|
2025-08-06 23:05:39 +05:30
|
|
|
values, complete, err = t.getLogFieldValues(ctx, fieldValueSelector)
|
2025-04-11 19:41:02 +05:30
|
|
|
case telemetrytypes.SignalMetrics:
|
2025-08-07 16:50:37 +05:30
|
|
|
if fieldValueSelector.Source == telemetrytypes.SourceMeter {
|
|
|
|
|
values, complete, err = t.getMeterSourceMetricFieldValues(ctx, fieldValueSelector)
|
|
|
|
|
} else {
|
|
|
|
|
values, complete, err = t.getMetricFieldValues(ctx, fieldValueSelector)
|
|
|
|
|
}
|
2025-05-15 19:59:40 +05:30
|
|
|
case telemetrytypes.SignalUnspecified:
|
|
|
|
|
mapOfValues := make(map[any]bool)
|
|
|
|
|
mapOfRelatedValues := make(map[any]bool)
|
|
|
|
|
allUnspecifiedValues := &telemetrytypes.TelemetryFieldValues{}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
tracesValues, tracesComplete, err := t.getSpanFieldValues(ctx, fieldValueSelector)
|
2025-05-15 19:59:40 +05:30
|
|
|
if err == nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
populateComplete := populateAllUnspecifiedValues(allUnspecifiedValues, mapOfValues, mapOfRelatedValues, tracesValues, limit)
|
|
|
|
|
complete = complete && tracesComplete && populateComplete
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
logsValues, logsComplete, err := t.getLogFieldValues(ctx, fieldValueSelector)
|
2025-05-15 19:59:40 +05:30
|
|
|
if err == nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
populateComplete := populateAllUnspecifiedValues(allUnspecifiedValues, mapOfValues, mapOfRelatedValues, logsValues, limit)
|
|
|
|
|
complete = complete && logsComplete && populateComplete
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
|
|
|
|
metricsValues, metricsComplete, err := t.getMetricFieldValues(ctx, fieldValueSelector)
|
2025-05-15 19:59:40 +05:30
|
|
|
if err == nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
populateComplete := populateAllUnspecifiedValues(allUnspecifiedValues, mapOfValues, mapOfRelatedValues, metricsValues, limit)
|
|
|
|
|
complete = complete && metricsComplete && populateComplete
|
2025-05-15 19:59:40 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
2025-05-15 19:59:40 +05:30
|
|
|
values = allUnspecifiedValues
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
|
2025-04-11 19:41:02 +05:30
|
|
|
if err != nil {
|
2025-08-06 23:05:39 +05:30
|
|
|
return nil, false, err
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-08-06 23:05:39 +05:30
|
|
|
return values, complete, nil
|
2025-04-11 19:41:02 +05:30
|
|
|
}
|
2025-06-16 23:11:28 +05:30
|
|
|
|
|
|
|
|
func (t *telemetryMetaStore) FetchTemporality(ctx context.Context, metricName string) (metrictypes.Temporality, error) {
|
|
|
|
|
if metricName == "" {
|
|
|
|
|
return metrictypes.Unknown, errors.Newf(errors.TypeInternal, errors.CodeInternal, "metric name cannot be empty")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
temporalityMap, err := t.FetchTemporalityMulti(ctx, metricName)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return metrictypes.Unknown, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
temporality, ok := temporalityMap[metricName]
|
|
|
|
|
if !ok {
|
|
|
|
|
return metrictypes.Unknown, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return temporality, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (t *telemetryMetaStore) FetchTemporalityMulti(ctx context.Context, metricNames ...string) (map[string]metrictypes.Temporality, error) {
|
|
|
|
|
if len(metricNames) == 0 {
|
|
|
|
|
return make(map[string]metrictypes.Temporality), nil
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-07 16:50:37 +05:30
|
|
|
result := make(map[string]metrictypes.Temporality)
|
|
|
|
|
metricsTemporality, err := t.fetchMetricsTemporality(ctx, metricNames...)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2025-08-08 17:39:06 +05:30
|
|
|
// TODO: return error after table migration are run
|
|
|
|
|
meterMetricsTemporality, _ := t.fetchMeterSourceMetricsTemporality(ctx, metricNames...)
|
2025-08-07 16:50:37 +05:30
|
|
|
|
|
|
|
|
// For metrics not found in the database, set to Unknown
|
|
|
|
|
for _, metricName := range metricNames {
|
|
|
|
|
if temporality, exists := metricsTemporality[metricName]; exists {
|
|
|
|
|
result[metricName] = temporality
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
if temporality, exists := meterMetricsTemporality[metricName]; exists {
|
|
|
|
|
result[metricName] = temporality
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
result[metricName] = metrictypes.Unknown
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (t *telemetryMetaStore) fetchMetricsTemporality(ctx context.Context, metricNames ...string) (map[string]metrictypes.Temporality, error) {
|
2025-06-16 23:11:28 +05:30
|
|
|
result := make(map[string]metrictypes.Temporality)
|
|
|
|
|
|
|
|
|
|
// Build query to fetch temporality for all metrics
|
|
|
|
|
// We use attr_string_value where attr_name = '__temporality__'
|
|
|
|
|
// Note: The columns are mixed in the current data - temporality column contains metric_name
|
|
|
|
|
// and metric_name column contains temporality value, so we use the correct mapping
|
|
|
|
|
sb := sqlbuilder.Select(
|
2025-06-26 15:10:31 +05:30
|
|
|
"metric_name",
|
|
|
|
|
"argMax(temporality, last_reported_unix_milli) as temporality",
|
2025-06-16 23:11:28 +05:30
|
|
|
).From(t.metricsDBName + "." + t.metricsFieldsTblName)
|
|
|
|
|
|
|
|
|
|
// Filter by metric names (in the temporality column due to data mix-up)
|
2025-06-26 15:10:31 +05:30
|
|
|
sb.Where(sb.In("metric_name", metricNames))
|
2025-06-16 23:11:28 +05:30
|
|
|
|
|
|
|
|
// Group by metric name to get one temporality per metric
|
2025-06-26 15:10:31 +05:30
|
|
|
sb.GroupBy("metric_name")
|
2025-06-16 23:11:28 +05:30
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
t.logger.DebugContext(ctx, "fetching metric temporality", "query", query, "args", args)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to fetch metric temporality")
|
|
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
// Process results
|
|
|
|
|
for rows.Next() {
|
|
|
|
|
var metricName, temporalityStr string
|
|
|
|
|
if err := rows.Scan(&metricName, &temporalityStr); err != nil {
|
|
|
|
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to scan temporality result")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Convert string to Temporality type
|
|
|
|
|
var temporality metrictypes.Temporality
|
|
|
|
|
switch temporalityStr {
|
|
|
|
|
case "Delta":
|
|
|
|
|
temporality = metrictypes.Delta
|
|
|
|
|
case "Cumulative":
|
|
|
|
|
temporality = metrictypes.Cumulative
|
|
|
|
|
case "Unspecified":
|
|
|
|
|
temporality = metrictypes.Unspecified
|
|
|
|
|
default:
|
|
|
|
|
// Unknown or empty temporality
|
|
|
|
|
temporality = metrictypes.Unknown
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result[metricName] = temporality
|
|
|
|
|
}
|
|
|
|
|
|
2025-08-07 16:50:37 +05:30
|
|
|
return result, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (t *telemetryMetaStore) fetchMeterSourceMetricsTemporality(ctx context.Context, metricNames ...string) (map[string]metrictypes.Temporality, error) {
|
|
|
|
|
result := make(map[string]metrictypes.Temporality)
|
|
|
|
|
|
|
|
|
|
sb := sqlbuilder.Select(
|
|
|
|
|
"metric_name",
|
|
|
|
|
"argMax(temporality, unix_milli) as temporality",
|
|
|
|
|
).From(t.meterDBName + "." + t.meterFieldsTblName)
|
|
|
|
|
|
|
|
|
|
// Filter by metric names (in the temporality column due to data mix-up)
|
|
|
|
|
sb.Where(sb.In("metric_name", metricNames))
|
|
|
|
|
|
|
|
|
|
// Group by metric name to get one temporality per metric
|
|
|
|
|
sb.GroupBy("metric_name")
|
|
|
|
|
|
|
|
|
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
|
|
|
|
|
|
|
|
|
t.logger.DebugContext(ctx, "fetching meter metrics temporality", "query", query, "args", args)
|
|
|
|
|
|
|
|
|
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to fetch meter metric temporality")
|
|
|
|
|
}
|
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
|
|
// Process results
|
|
|
|
|
for rows.Next() {
|
|
|
|
|
var metricName, temporalityStr string
|
|
|
|
|
if err := rows.Scan(&metricName, &temporalityStr); err != nil {
|
|
|
|
|
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to scan temporality result")
|
2025-06-16 23:11:28 +05:30
|
|
|
}
|
2025-08-07 16:50:37 +05:30
|
|
|
|
|
|
|
|
// Convert string to Temporality type
|
|
|
|
|
var temporality metrictypes.Temporality
|
|
|
|
|
switch temporalityStr {
|
|
|
|
|
case "Delta":
|
|
|
|
|
temporality = metrictypes.Delta
|
|
|
|
|
case "Cumulative":
|
|
|
|
|
temporality = metrictypes.Cumulative
|
|
|
|
|
case "Unspecified":
|
|
|
|
|
temporality = metrictypes.Unspecified
|
|
|
|
|
default:
|
|
|
|
|
// Unknown or empty temporality
|
|
|
|
|
temporality = metrictypes.Unknown
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
result[metricName] = temporality
|
2025-06-16 23:11:28 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
|
}
|