mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-22 09:56:57 +00:00
Merge branch 'main' of github.com:SigNoz/signoz into fix/prom-aggr
This commit is contained in:
commit
7f19df18c3
7
.github/CODEOWNERS
vendored
7
.github/CODEOWNERS
vendored
@ -12,4 +12,9 @@
|
|||||||
/pkg/factory/ @grandwizard28
|
/pkg/factory/ @grandwizard28
|
||||||
/pkg/types/ @grandwizard28
|
/pkg/types/ @grandwizard28
|
||||||
.golangci.yml @grandwizard28
|
.golangci.yml @grandwizard28
|
||||||
**/(zeus|licensing|sqlmigration)/ @vikrantgupta25
|
/pkg/zeus/ @vikrantgupta25
|
||||||
|
/pkg/licensing/ @vikrantgupta25
|
||||||
|
/pkg/sqlmigration/ @vikrantgupta25
|
||||||
|
/ee/zeus/ @vikrantgupta25
|
||||||
|
/ee/licensing/ @vikrantgupta25
|
||||||
|
/ee/sqlmigration/ @vikrantgupta25
|
||||||
@ -100,12 +100,18 @@ services:
|
|||||||
# - "9000:9000"
|
# - "9000:9000"
|
||||||
# - "8123:8123"
|
# - "8123:8123"
|
||||||
# - "9181:9181"
|
# - "9181:9181"
|
||||||
|
|
||||||
|
configs:
|
||||||
|
- source: clickhouse-config
|
||||||
|
target: /etc/clickhouse-server/config.xml
|
||||||
|
- source: clickhouse-users
|
||||||
|
target: /etc/clickhouse-server/users.xml
|
||||||
|
- source: clickhouse-custom-function
|
||||||
|
target: /etc/clickhouse-server/custom-function.xml
|
||||||
|
- source: clickhouse-cluster
|
||||||
|
target: /etc/clickhouse-server/config.d/cluster.xml
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
- ../common/clickhouse/config.xml:/etc/clickhouse-server/config.xml
|
|
||||||
- ../common/clickhouse/users.xml:/etc/clickhouse-server/users.xml
|
|
||||||
- ../common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
|
|
||||||
- ../common/clickhouse/user_scripts:/var/lib/clickhouse/user_scripts/
|
|
||||||
- ../common/clickhouse/cluster.xml:/etc/clickhouse-server/config.d/cluster.xml
|
|
||||||
- clickhouse:/var/lib/clickhouse/
|
- clickhouse:/var/lib/clickhouse/
|
||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
@ -117,9 +123,10 @@ services:
|
|||||||
- "8080:8080" # signoz port
|
- "8080:8080" # signoz port
|
||||||
# - "6060:6060" # pprof port
|
# - "6060:6060" # pprof port
|
||||||
volumes:
|
volumes:
|
||||||
- ../common/signoz/prometheus.yml:/root/config/prometheus.yml
|
|
||||||
- ../common/dashboards:/root/config/dashboards
|
|
||||||
- sqlite:/var/lib/signoz/
|
- sqlite:/var/lib/signoz/
|
||||||
|
configs:
|
||||||
|
- source: signoz-prometheus-config
|
||||||
|
target: /root/config/prometheus.yml
|
||||||
environment:
|
environment:
|
||||||
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
- SIGNOZ_ALERTMANAGER_PROVIDER=signoz
|
||||||
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
- SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://clickhouse:9000
|
||||||
@ -147,9 +154,11 @@ services:
|
|||||||
- --manager-config=/etc/manager-config.yaml
|
- --manager-config=/etc/manager-config.yaml
|
||||||
- --copy-path=/var/tmp/collector-config.yaml
|
- --copy-path=/var/tmp/collector-config.yaml
|
||||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||||
volumes:
|
configs:
|
||||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
- source: otel-collector-config
|
||||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
target: /etc/otel-collector-config.yaml
|
||||||
|
- source: otel-manager-config
|
||||||
|
target: /etc/manager-config.yaml
|
||||||
environment:
|
environment:
|
||||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
||||||
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
- LOW_CARDINAL_EXCEPTION_GROUPING=false
|
||||||
@ -186,3 +195,26 @@ volumes:
|
|||||||
name: signoz-sqlite
|
name: signoz-sqlite
|
||||||
zookeeper-1:
|
zookeeper-1:
|
||||||
name: signoz-zookeeper-1
|
name: signoz-zookeeper-1
|
||||||
|
|
||||||
|
configs:
|
||||||
|
clickhouse-config:
|
||||||
|
file: ../common/clickhouse/config.xml
|
||||||
|
clickhouse-users:
|
||||||
|
file: ../common/clickhouse/users.xml
|
||||||
|
clickhouse-custom-function:
|
||||||
|
file: ../common/clickhouse/custom-function.xml
|
||||||
|
clickhouse-cluster:
|
||||||
|
file: ../common/clickhouse/cluster.xml
|
||||||
|
|
||||||
|
signoz-prometheus-config:
|
||||||
|
file: ../common/signoz/prometheus.yml
|
||||||
|
# If you have multiple dashboard files, you can list them individually:
|
||||||
|
# dashboard-foo:
|
||||||
|
# file: ../common/dashboards/foo.json
|
||||||
|
# dashboard-bar:
|
||||||
|
# file: ../common/dashboards/bar.json
|
||||||
|
|
||||||
|
otel-collector-config:
|
||||||
|
file: ./otel-collector-config.yaml
|
||||||
|
otel-manager-config:
|
||||||
|
file: ../common/signoz/otel-collector-opamp-config.yaml
|
||||||
|
|||||||
@ -6,11 +6,13 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/analyticstypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/SigNoz/signoz/pkg/zeus"
|
"github.com/SigNoz/signoz/pkg/zeus"
|
||||||
@ -23,16 +25,17 @@ type provider struct {
|
|||||||
config licensing.Config
|
config licensing.Config
|
||||||
settings factory.ScopedProviderSettings
|
settings factory.ScopedProviderSettings
|
||||||
orgGetter organization.Getter
|
orgGetter organization.Getter
|
||||||
|
analytics analytics.Analytics
|
||||||
stopChan chan struct{}
|
stopChan chan struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
|
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) {
|
||||||
return New(ctx, providerSettings, config, store, zeus, orgGetter)
|
return New(ctx, providerSettings, config, store, zeus, orgGetter, analytics)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter) (licensing.Licensing, error) {
|
func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) (licensing.Licensing, error) {
|
||||||
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
|
settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing")
|
||||||
licensestore := sqllicensingstore.New(sqlstore)
|
licensestore := sqllicensingstore.New(sqlstore)
|
||||||
return &provider{
|
return &provider{
|
||||||
@ -42,6 +45,7 @@ func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Conf
|
|||||||
settings: settings,
|
settings: settings,
|
||||||
orgGetter: orgGetter,
|
orgGetter: orgGetter,
|
||||||
stopChan: make(chan struct{}),
|
stopChan: make(chan struct{}),
|
||||||
|
analytics: analytics,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -159,6 +163,25 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stats := licensetypes.NewStatsFromLicense(activeLicense)
|
||||||
|
provider.analytics.Send(ctx,
|
||||||
|
analyticstypes.Track{
|
||||||
|
UserId: "stats_" + organizationID.String(),
|
||||||
|
Event: "License Updated",
|
||||||
|
Properties: analyticstypes.NewPropertiesFromMap(stats),
|
||||||
|
Context: &analyticstypes.Context{
|
||||||
|
Extra: map[string]interface{}{
|
||||||
|
analyticstypes.KeyGroupID: organizationID.String(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
analyticstypes.Group{
|
||||||
|
UserId: "stats_" + organizationID.String(),
|
||||||
|
GroupId: organizationID.String(),
|
||||||
|
Traits: analyticstypes.NewTraitsFromMap(stats),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -12,6 +12,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||||
"github.com/SigNoz/signoz/ee/zeus"
|
"github.com/SigNoz/signoz/ee/zeus"
|
||||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||||
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/config"
|
"github.com/SigNoz/signoz/pkg/config"
|
||||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||||
@ -134,8 +135,8 @@ func main() {
|
|||||||
zeus.Config(),
|
zeus.Config(),
|
||||||
httpzeus.NewProviderFactory(),
|
httpzeus.NewProviderFactory(),
|
||||||
licensing.Config(24*time.Hour, 3),
|
licensing.Config(24*time.Hour, 3),
|
||||||
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
|
func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] {
|
||||||
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter)
|
return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||||
},
|
},
|
||||||
signoz.NewEmailingProviderFactories(),
|
signoz.NewEmailingProviderFactories(),
|
||||||
signoz.NewCacheProviderFactories(),
|
signoz.NewCacheProviderFactories(),
|
||||||
|
|||||||
@ -78,7 +78,7 @@
|
|||||||
"fontfaceobserver": "2.3.0",
|
"fontfaceobserver": "2.3.0",
|
||||||
"history": "4.10.1",
|
"history": "4.10.1",
|
||||||
"html-webpack-plugin": "5.5.0",
|
"html-webpack-plugin": "5.5.0",
|
||||||
"http-proxy-middleware": "3.0.3",
|
"http-proxy-middleware": "3.0.5",
|
||||||
"http-status-codes": "2.3.0",
|
"http-status-codes": "2.3.0",
|
||||||
"i18next": "^21.6.12",
|
"i18next": "^21.6.12",
|
||||||
"i18next-browser-languagedetector": "^6.1.3",
|
"i18next-browser-languagedetector": "^6.1.3",
|
||||||
@ -250,7 +250,7 @@
|
|||||||
"xml2js": "0.5.0",
|
"xml2js": "0.5.0",
|
||||||
"phin": "^3.7.1",
|
"phin": "^3.7.1",
|
||||||
"body-parser": "1.20.3",
|
"body-parser": "1.20.3",
|
||||||
"http-proxy-middleware": "3.0.3",
|
"http-proxy-middleware": "3.0.5",
|
||||||
"cross-spawn": "7.0.5",
|
"cross-spawn": "7.0.5",
|
||||||
"cookie": "^0.7.1",
|
"cookie": "^0.7.1",
|
||||||
"serialize-javascript": "6.0.2",
|
"serialize-javascript": "6.0.2",
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import { matchPath, useLocation } from 'react-router-dom';
|
|||||||
import { SuccessResponseV2 } from 'types/api';
|
import { SuccessResponseV2 } from 'types/api';
|
||||||
import APIError from 'types/api/error';
|
import APIError from 'types/api/error';
|
||||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||||
|
import { OrgPreference } from 'types/api/preferences/preference';
|
||||||
import { Organization } from 'types/api/user/getOrganization';
|
import { Organization } from 'types/api/user/getOrganization';
|
||||||
import { UserResponse } from 'types/api/user/getUser';
|
import { UserResponse } from 'types/api/user/getUser';
|
||||||
import { USER_ROLES } from 'types/roles';
|
import { USER_ROLES } from 'types/roles';
|
||||||
@ -96,8 +97,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
|||||||
usersData.data
|
usersData.data
|
||||||
) {
|
) {
|
||||||
const isOnboardingComplete = orgPreferences?.find(
|
const isOnboardingComplete = orgPreferences?.find(
|
||||||
(preference: Record<string, any>) =>
|
(preference: OrgPreference) =>
|
||||||
preference.key === ORG_PREFERENCES.ORG_ONBOARDING,
|
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
|
||||||
)?.value;
|
)?.value;
|
||||||
|
|
||||||
const isFirstUser = checkFirstTimeUser();
|
const isFirstUser = checkFirstTimeUser();
|
||||||
|
|||||||
@ -85,7 +85,13 @@ function LabelSelect({
|
|||||||
}, [handleBlur]);
|
}, [handleBlur]);
|
||||||
|
|
||||||
const handleLabelChange = (event: ChangeEvent<HTMLInputElement>): void => {
|
const handleLabelChange = (event: ChangeEvent<HTMLInputElement>): void => {
|
||||||
setCurrentVal(event.target?.value.replace(':', ''));
|
// Remove the colon if it's the last character.
|
||||||
|
// As the colon is used to separate the key and value in the query.
|
||||||
|
setCurrentVal(
|
||||||
|
event.target?.value.endsWith(':')
|
||||||
|
? event.target?.value.slice(0, -1)
|
||||||
|
: event.target?.value,
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleClose = (key: string): void => {
|
const handleClose = (key: string): void => {
|
||||||
|
|||||||
@ -18,6 +18,27 @@
|
|||||||
background-color: yellow;
|
background-color: yellow;
|
||||||
border-radius: 6px;
|
border-radius: 6px;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
|
|
||||||
|
.event-dot {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
transform: translate(-50%, -50%) rotate(45deg);
|
||||||
|
width: 6px;
|
||||||
|
height: 6px;
|
||||||
|
background-color: var(--bg-robin-500);
|
||||||
|
border: 1px solid var(--bg-robin-600);
|
||||||
|
cursor: pointer;
|
||||||
|
z-index: 1;
|
||||||
|
|
||||||
|
&.error {
|
||||||
|
background-color: var(--bg-cherry-500);
|
||||||
|
border-color: var(--bg-cherry-600);
|
||||||
|
}
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
transform: translate(-50%, -50%) rotate(45deg) scale(1.5);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import { Tooltip } from 'antd';
|
|||||||
import Color from 'color';
|
import Color from 'color';
|
||||||
import TimelineV2 from 'components/TimelineV2/TimelineV2';
|
import TimelineV2 from 'components/TimelineV2/TimelineV2';
|
||||||
import { themeColors } from 'constants/theme';
|
import { themeColors } from 'constants/theme';
|
||||||
|
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||||
import {
|
import {
|
||||||
@ -20,6 +21,7 @@ import { useHistory, useLocation } from 'react-router-dom';
|
|||||||
import { ListRange, Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
import { ListRange, Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||||
import { FlamegraphSpan } from 'types/api/trace/getTraceFlamegraph';
|
import { FlamegraphSpan } from 'types/api/trace/getTraceFlamegraph';
|
||||||
import { Span } from 'types/api/trace/getTraceV2';
|
import { Span } from 'types/api/trace/getTraceV2';
|
||||||
|
import { toFixed } from 'utils/toFixed';
|
||||||
|
|
||||||
interface ITraceMetadata {
|
interface ITraceMetadata {
|
||||||
startTime: number;
|
startTime: number;
|
||||||
@ -91,7 +93,31 @@ function Success(props: ISuccessProps): JSX.Element {
|
|||||||
searchParams.set('spanId', span.spanId);
|
searchParams.set('spanId', span.spanId);
|
||||||
history.replace({ search: searchParams.toString() });
|
history.replace({ search: searchParams.toString() });
|
||||||
}}
|
}}
|
||||||
/>
|
>
|
||||||
|
{span.event?.map((event) => {
|
||||||
|
const eventTimeMs = event.timeUnixNano / 1e6;
|
||||||
|
const eventOffsetPercent =
|
||||||
|
((eventTimeMs - span.timestamp) / (span.durationNano / 1e6)) * 100;
|
||||||
|
const clampedOffset = Math.max(1, Math.min(eventOffsetPercent, 99));
|
||||||
|
const { isError } = event;
|
||||||
|
const { time, timeUnitName } = convertTimeToRelevantUnit(
|
||||||
|
eventTimeMs - span.timestamp,
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<Tooltip
|
||||||
|
key={`${span.spanId}-event-${event.name}-${event.timeUnixNano}`}
|
||||||
|
title={`${event.name} @ ${toFixed(time, 2)} ${timeUnitName}`}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`event-dot ${isError ? 'error' : ''}`}
|
||||||
|
style={{
|
||||||
|
left: `${clampedOffset}%`,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
);
|
);
|
||||||
})}
|
})}
|
||||||
|
|||||||
@ -56,7 +56,7 @@
|
|||||||
gap: 6px;
|
gap: 6px;
|
||||||
|
|
||||||
.diamond {
|
.diamond {
|
||||||
fill: var(--bg-cherry-500);
|
fill: var(--bg-robin-500);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -3,8 +3,8 @@ import './Events.styles.scss';
|
|||||||
import { Collapse, Input, Tooltip, Typography } from 'antd';
|
import { Collapse, Input, Tooltip, Typography } from 'antd';
|
||||||
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
|
||||||
import { Diamond } from 'lucide-react';
|
import { Diamond } from 'lucide-react';
|
||||||
import { useMemo, useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { Event, Span } from 'types/api/trace/getTraceV2';
|
import { Span } from 'types/api/trace/getTraceV2';
|
||||||
|
|
||||||
import NoData from '../NoData/NoData';
|
import NoData from '../NoData/NoData';
|
||||||
|
|
||||||
@ -17,14 +17,7 @@ interface IEventsTableProps {
|
|||||||
function EventsTable(props: IEventsTableProps): JSX.Element {
|
function EventsTable(props: IEventsTableProps): JSX.Element {
|
||||||
const { span, startTime, isSearchVisible } = props;
|
const { span, startTime, isSearchVisible } = props;
|
||||||
const [fieldSearchInput, setFieldSearchInput] = useState<string>('');
|
const [fieldSearchInput, setFieldSearchInput] = useState<string>('');
|
||||||
const events: Event[] = useMemo(() => {
|
const events = span.event;
|
||||||
const tempEvents = [];
|
|
||||||
for (let i = 0; i < span.event?.length; i++) {
|
|
||||||
const parsedEvent = JSON.parse(span.event[i]);
|
|
||||||
tempEvents.push(parsedEvent);
|
|
||||||
}
|
|
||||||
return tempEvents;
|
|
||||||
}, [span.event]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="events-table">
|
<div className="events-table">
|
||||||
@ -81,7 +74,18 @@ function EventsTable(props: IEventsTableProps): JSX.Element {
|
|||||||
)}
|
)}
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
<Typography.Text className="timestamp-text">
|
<Typography.Text className="timestamp-text">
|
||||||
after the start
|
since trace start
|
||||||
|
</Typography.Text>
|
||||||
|
</div>
|
||||||
|
<div className="timestamp-container">
|
||||||
|
<Typography.Text className="attribute-value">
|
||||||
|
{getYAxisFormattedValue(
|
||||||
|
`${(event.timeUnixNano || 0) / 1e6 - span.timestamp}`,
|
||||||
|
'ms',
|
||||||
|
)}
|
||||||
|
</Typography.Text>
|
||||||
|
<Typography.Text className="timestamp-text">
|
||||||
|
since span start
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -0,0 +1,71 @@
|
|||||||
|
.no-linked-spans {
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
height: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.linked-spans-container {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 12px;
|
||||||
|
|
||||||
|
.item {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
justify-content: flex-start;
|
||||||
|
|
||||||
|
.item-key {
|
||||||
|
color: var(--bg-vanilla-100);
|
||||||
|
font-family: Inter;
|
||||||
|
font-size: 14px;
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
line-height: 20px; /* 142.857% */
|
||||||
|
letter-spacing: -0.07px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.value-wrapper {
|
||||||
|
display: flex;
|
||||||
|
padding: 2px 8px;
|
||||||
|
align-items: center;
|
||||||
|
width: fit-content;
|
||||||
|
max-width: 100%;
|
||||||
|
gap: 8px;
|
||||||
|
border-radius: 50px;
|
||||||
|
border: 1px solid var(--bg-slate-400);
|
||||||
|
background: var(--bg-slate-500);
|
||||||
|
|
||||||
|
.item-value {
|
||||||
|
color: var(--bg-vanilla-400);
|
||||||
|
font-family: Inter;
|
||||||
|
font-size: 14px;
|
||||||
|
font-style: normal;
|
||||||
|
font-weight: 400;
|
||||||
|
line-height: 20px; /* 142.857% */
|
||||||
|
letter-spacing: 0.56px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.lightMode {
|
||||||
|
.linked-spans-container {
|
||||||
|
.item {
|
||||||
|
.item-key {
|
||||||
|
color: var(--bg-ink-100);
|
||||||
|
}
|
||||||
|
|
||||||
|
.value-wrapper {
|
||||||
|
border: 1px solid var(--bg-vanilla-300);
|
||||||
|
background: var(--bg-vanilla-300);
|
||||||
|
|
||||||
|
.item-value {
|
||||||
|
color: var(--bg-ink-400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -0,0 +1,77 @@
|
|||||||
|
import './LinkedSpans.styles.scss';
|
||||||
|
|
||||||
|
import { Button, Tooltip, Typography } from 'antd';
|
||||||
|
import ROUTES from 'constants/routes';
|
||||||
|
import { formUrlParams } from 'container/TraceDetail/utils';
|
||||||
|
import { useCallback } from 'react';
|
||||||
|
import { Span } from 'types/api/trace/getTraceV2';
|
||||||
|
|
||||||
|
import NoData from '../NoData/NoData';
|
||||||
|
|
||||||
|
interface LinkedSpansProps {
|
||||||
|
span: Span;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SpanReference {
|
||||||
|
traceId: string;
|
||||||
|
spanId: string;
|
||||||
|
refType: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function LinkedSpans(props: LinkedSpansProps): JSX.Element {
|
||||||
|
const { span } = props;
|
||||||
|
|
||||||
|
const getLink = useCallback((item: SpanReference): string | null => {
|
||||||
|
if (!item.traceId || !item.spanId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return `${ROUTES.TRACE}/${item.traceId}${formUrlParams({
|
||||||
|
spanId: item.spanId,
|
||||||
|
levelUp: 0,
|
||||||
|
levelDown: 0,
|
||||||
|
})}`;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Filter out CHILD_OF references as they are parent-child relationships
|
||||||
|
const linkedSpans =
|
||||||
|
span.references?.filter((ref: SpanReference) => ref.refType !== 'CHILD_OF') ||
|
||||||
|
[];
|
||||||
|
|
||||||
|
if (linkedSpans.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="no-linked-spans">
|
||||||
|
<NoData name="linked spans" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="linked-spans-container">
|
||||||
|
{linkedSpans.map((item: SpanReference) => {
|
||||||
|
const link = getLink(item);
|
||||||
|
return (
|
||||||
|
<div className="item" key={item.spanId}>
|
||||||
|
<Typography.Text className="item-key" ellipsis>
|
||||||
|
Linked Span ID
|
||||||
|
</Typography.Text>
|
||||||
|
<div className="value-wrapper">
|
||||||
|
<Tooltip title={item.spanId}>
|
||||||
|
{link ? (
|
||||||
|
<Typography.Link href={link} className="item-value" ellipsis>
|
||||||
|
{item.spanId}
|
||||||
|
</Typography.Link>
|
||||||
|
) : (
|
||||||
|
<Button type="link" className="item-value" disabled>
|
||||||
|
{item.spanId}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</Tooltip>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default LinkedSpans;
|
||||||
@ -158,20 +158,29 @@
|
|||||||
border-bottom: 1px solid var(--bg-slate-400) !important;
|
border-bottom: 1px solid var(--bg-slate-400) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.attributes-tab-btn {
|
.ant-tabs-tab {
|
||||||
|
margin: 0 !important;
|
||||||
|
padding: 0 2px !important;
|
||||||
|
min-width: 36px;
|
||||||
|
height: 36px;
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
justify-content: center;
|
||||||
.attributes-tab-btn:hover {
|
|
||||||
background: unset;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.events-tab-btn {
|
.attributes-tab-btn,
|
||||||
|
.events-tab-btn,
|
||||||
|
.linked-spans-tab-btn {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
width: 100%;
|
||||||
|
padding: 4px 8px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.events-tab-btn:hover {
|
.attributes-tab-btn:hover,
|
||||||
|
.events-tab-btn:hover,
|
||||||
|
.linked-spans-tab-btn:hover {
|
||||||
background: unset;
|
background: unset;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -261,3 +270,9 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.linked-spans-tab-btn {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|||||||
@ -10,13 +10,14 @@ import { getTraceToLogsQuery } from 'container/TraceDetail/SelectedSpanDetails/c
|
|||||||
import createQueryParams from 'lib/createQueryParams';
|
import createQueryParams from 'lib/createQueryParams';
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||||
import { Anvil, Bookmark, PanelRight, Search } from 'lucide-react';
|
import { Anvil, Bookmark, Link2, PanelRight, Search } from 'lucide-react';
|
||||||
import { Dispatch, SetStateAction, useState } from 'react';
|
import { Dispatch, SetStateAction, useState } from 'react';
|
||||||
import { Span } from 'types/api/trace/getTraceV2';
|
import { Span } from 'types/api/trace/getTraceV2';
|
||||||
import { formatEpochTimestamp } from 'utils/timeUtils';
|
import { formatEpochTimestamp } from 'utils/timeUtils';
|
||||||
|
|
||||||
import Attributes from './Attributes/Attributes';
|
import Attributes from './Attributes/Attributes';
|
||||||
import Events from './Events/Events';
|
import Events from './Events/Events';
|
||||||
|
import LinkedSpans from './LinkedSpans/LinkedSpans';
|
||||||
|
|
||||||
const FIVE_MINUTES_IN_MS = 5 * 60 * 1000;
|
const FIVE_MINUTES_IN_MS = 5 * 60 * 1000;
|
||||||
interface ISpanDetailsDrawerProps {
|
interface ISpanDetailsDrawerProps {
|
||||||
@ -74,6 +75,19 @@ function SpanDetailsDrawer(props: ISpanDetailsDrawerProps): JSX.Element {
|
|||||||
/>
|
/>
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: (
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
icon={<Link2 size="14" />}
|
||||||
|
className="linked-spans-tab-btn"
|
||||||
|
>
|
||||||
|
Links
|
||||||
|
</Button>
|
||||||
|
),
|
||||||
|
key: 'linked-spans',
|
||||||
|
children: <LinkedSpans span={span} />,
|
||||||
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
const onLogsHandler = (): void => {
|
const onLogsHandler = (): void => {
|
||||||
|
|||||||
@ -273,6 +273,27 @@
|
|||||||
border-radius: 6px;
|
border-radius: 6px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.event-dot {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
transform: translate(-50%, -50%) rotate(45deg);
|
||||||
|
width: 6px;
|
||||||
|
height: 6px;
|
||||||
|
background-color: var(--bg-robin-500);
|
||||||
|
border: 1px solid var(--bg-robin-600);
|
||||||
|
cursor: pointer;
|
||||||
|
z-index: 1;
|
||||||
|
|
||||||
|
&.error {
|
||||||
|
background-color: var(--bg-cherry-500);
|
||||||
|
border-color: var(--bg-cherry-600);
|
||||||
|
}
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
transform: translate(-50%, -50%) rotate(45deg) scale(1.5);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.span-line-text {
|
.span-line-text {
|
||||||
position: relative;
|
position: relative;
|
||||||
top: 40%;
|
top: 40%;
|
||||||
|
|||||||
@ -240,8 +240,33 @@ export function SpanDuration({
|
|||||||
left: `${leftOffset}%`,
|
left: `${leftOffset}%`,
|
||||||
width: `${width}%`,
|
width: `${width}%`,
|
||||||
backgroundColor: color,
|
backgroundColor: color,
|
||||||
|
position: 'relative',
|
||||||
}}
|
}}
|
||||||
/>
|
>
|
||||||
|
{span.event?.map((event) => {
|
||||||
|
const eventTimeMs = event.timeUnixNano / 1e6;
|
||||||
|
const eventOffsetPercent =
|
||||||
|
((eventTimeMs - span.timestamp) / (span.durationNano / 1e6)) * 100;
|
||||||
|
const clampedOffset = Math.max(1, Math.min(eventOffsetPercent, 99));
|
||||||
|
const { isError } = event;
|
||||||
|
const { time, timeUnitName } = convertTimeToRelevantUnit(
|
||||||
|
eventTimeMs - span.timestamp,
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<Tooltip
|
||||||
|
key={`${span.spanId}-event-${event.name}-${event.timeUnixNano}`}
|
||||||
|
title={`${event.name} @ ${toFixed(time, 2)} ${timeUnitName}`}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`event-dot ${isError ? 'error' : ''}`}
|
||||||
|
style={{
|
||||||
|
left: `${clampedOffset}%`,
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
{hasActionButtons && <SpanLineActionButtons span={span} />}
|
{hasActionButtons && <SpanLineActionButtons span={span} />}
|
||||||
<Tooltip title={`${toFixed(time, 2)} ${timeUnitName}`}>
|
<Tooltip title={`${toFixed(time, 2)} ${timeUnitName}`}>
|
||||||
<Typography.Text
|
<Typography.Text
|
||||||
|
|||||||
@ -7,6 +7,13 @@ export interface GetTraceFlamegraphPayloadProps {
|
|||||||
selectedSpanId: string;
|
selectedSpanId: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Event {
|
||||||
|
name: string;
|
||||||
|
timeUnixNano: number;
|
||||||
|
attributeMap: Record<string, string>;
|
||||||
|
isError: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface FlamegraphSpan {
|
export interface FlamegraphSpan {
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
durationNano: number;
|
durationNano: number;
|
||||||
@ -17,6 +24,7 @@ export interface FlamegraphSpan {
|
|||||||
serviceName: string;
|
serviceName: string;
|
||||||
name: string;
|
name: string;
|
||||||
level: number;
|
level: number;
|
||||||
|
event: Event[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface GetTraceFlamegraphSuccessResponse {
|
export interface GetTraceFlamegraphSuccessResponse {
|
||||||
|
|||||||
@ -13,6 +13,7 @@ export interface Event {
|
|||||||
name: string;
|
name: string;
|
||||||
timeUnixNano: number;
|
timeUnixNano: number;
|
||||||
attributeMap: Record<string, string>;
|
attributeMap: Record<string, string>;
|
||||||
|
isError: boolean;
|
||||||
}
|
}
|
||||||
export interface Span {
|
export interface Span {
|
||||||
timestamp: number;
|
timestamp: number;
|
||||||
@ -27,7 +28,7 @@ export interface Span {
|
|||||||
name: string;
|
name: string;
|
||||||
references: any;
|
references: any;
|
||||||
tagMap: Record<string, string>;
|
tagMap: Record<string, string>;
|
||||||
event: string[];
|
event: Event[];
|
||||||
rootName: string;
|
rootName: string;
|
||||||
statusMessage: string;
|
statusMessage: string;
|
||||||
statusCodeString: string;
|
statusCodeString: string;
|
||||||
|
|||||||
@ -9966,10 +9966,10 @@ http-proxy-agent@^4.0.1:
|
|||||||
agent-base "6"
|
agent-base "6"
|
||||||
debug "4"
|
debug "4"
|
||||||
|
|
||||||
http-proxy-middleware@3.0.3, http-proxy-middleware@^2.0.7:
|
http-proxy-middleware@3.0.5, http-proxy-middleware@^2.0.7:
|
||||||
version "3.0.3"
|
version "3.0.5"
|
||||||
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-3.0.3.tgz#dc1313c75bd00d81e103823802551ee30130ebd1"
|
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-3.0.5.tgz#9dcde663edc44079bc5a9c63e03fe5e5d6037fab"
|
||||||
integrity sha512-usY0HG5nyDUwtqpiZdETNbmKtw3QQ1jwYFZ9wi5iHzX2BcILwQKtYDJPo7XHTsu5Z0B2Hj3W9NNnbd+AjFWjqg==
|
integrity sha512-GLZZm1X38BPY4lkXA01jhwxvDoOkkXqjgVyUzVxiEK4iuRu03PZoYHhHRwxnfhQMDuaxi3vVri0YgSro/1oWqg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/http-proxy" "^1.17.15"
|
"@types/http-proxy" "^1.17.15"
|
||||||
debug "^4.3.6"
|
debug "^4.3.6"
|
||||||
|
|||||||
@ -867,7 +867,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadataCache(ctx contex
|
|||||||
return cachedTraceData, nil
|
return cachedTraceData, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError) {
|
func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, error) {
|
||||||
response := new(model.GetWaterfallSpansForTraceWithMetadataResponse)
|
response := new(model.GetWaterfallSpansForTraceWithMetadataResponse)
|
||||||
var startTime, endTime, durationNano, totalErrorSpans, totalSpans uint64
|
var startTime, endTime, durationNano, totalErrorSpans, totalSpans uint64
|
||||||
var spanIdToSpanNodeMap = map[string]*model.Span{}
|
var spanIdToSpanNodeMap = map[string]*model.Span{}
|
||||||
@ -916,7 +916,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
|||||||
err := json.Unmarshal([]byte(item.References), &ref)
|
err := json.Unmarshal([]byte(item.References), &ref)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("getWaterfallSpansForTraceWithMetadata: error unmarshalling references", zap.Error(err), zap.String("traceID", traceID))
|
zap.L().Error("getWaterfallSpansForTraceWithMetadata: error unmarshalling references", zap.Error(err), zap.String("traceID", traceID))
|
||||||
return nil, model.BadRequest(fmt.Errorf("getWaterfallSpansForTraceWithMetadata: error unmarshalling references %w", err))
|
return nil, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, "getWaterfallSpansForTraceWithMetadata: error unmarshalling references %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
// merge attributes_number and attributes_bool to attributes_string
|
// merge attributes_number and attributes_bool to attributes_string
|
||||||
@ -930,6 +930,17 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
|||||||
item.Attributes_string[k] = v
|
item.Attributes_string[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
|
events := make([]model.Event, 0)
|
||||||
|
for _, event := range item.Events {
|
||||||
|
var eventMap model.Event
|
||||||
|
err = json.Unmarshal([]byte(event), &eventMap)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error unmarshalling events", zap.Error(err))
|
||||||
|
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "getWaterfallSpansForTraceWithMetadata: error in unmarshalling events %s", err.Error())
|
||||||
|
}
|
||||||
|
events = append(events, eventMap)
|
||||||
|
}
|
||||||
|
|
||||||
jsonItem := model.Span{
|
jsonItem := model.Span{
|
||||||
SpanID: item.SpanID,
|
SpanID: item.SpanID,
|
||||||
TraceID: item.TraceID,
|
TraceID: item.TraceID,
|
||||||
@ -942,7 +953,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
|||||||
StatusCodeString: item.StatusCodeString,
|
StatusCodeString: item.StatusCodeString,
|
||||||
SpanKind: item.SpanKind,
|
SpanKind: item.SpanKind,
|
||||||
References: ref,
|
References: ref,
|
||||||
Events: item.Events,
|
Events: events,
|
||||||
TagMap: item.Attributes_string,
|
TagMap: item.Attributes_string,
|
||||||
Children: make([]*model.Span, 0),
|
Children: make([]*model.Span, 0),
|
||||||
}
|
}
|
||||||
@ -998,6 +1009,7 @@ func (r *ClickHouseReader) GetWaterfallSpansForTraceWithMetadata(ctx context.Con
|
|||||||
StatusMessage: "",
|
StatusMessage: "",
|
||||||
StatusCodeString: "",
|
StatusCodeString: "",
|
||||||
SpanKind: "",
|
SpanKind: "",
|
||||||
|
Events: make([]model.Event, 0),
|
||||||
Children: make([]*model.Span, 0),
|
Children: make([]*model.Span, 0),
|
||||||
}
|
}
|
||||||
missingSpan.Children = append(missingSpan.Children, spanNode)
|
missingSpan.Children = append(missingSpan.Children, spanNode)
|
||||||
@ -1075,7 +1087,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTraceCache(ctx context.Context,
|
|||||||
return cachedTraceData, nil
|
return cachedTraceData, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError) {
|
func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, error) {
|
||||||
trace := new(model.GetFlamegraphSpansForTraceResponse)
|
trace := new(model.GetFlamegraphSpansForTraceResponse)
|
||||||
var startTime, endTime, durationNano uint64
|
var startTime, endTime, durationNano uint64
|
||||||
var spanIdToSpanNodeMap = map[string]*model.FlamegraphSpan{}
|
var spanIdToSpanNodeMap = map[string]*model.FlamegraphSpan{}
|
||||||
@ -1097,7 +1109,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Info("cache miss for getFlamegraphSpansForTrace", zap.String("traceID", traceID))
|
zap.L().Info("cache miss for getFlamegraphSpansForTrace", zap.String("traceID", traceID))
|
||||||
|
|
||||||
searchScanResponses, err := r.GetSpansForTrace(ctx, traceID, fmt.Sprintf("SELECT timestamp, duration_nano, span_id, trace_id, has_error,references, resource_string_service$$name, name FROM %s.%s WHERE trace_id=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3 ORDER BY timestamp ASC, name ASC", r.TraceDB, r.traceTableName))
|
searchScanResponses, err := r.GetSpansForTrace(ctx, traceID, fmt.Sprintf("SELECT timestamp, duration_nano, span_id, trace_id, has_error,references, resource_string_service$$name, name, events FROM %s.%s WHERE trace_id=$1 and ts_bucket_start>=$2 and ts_bucket_start<=$3 ORDER BY timestamp ASC, name ASC", r.TraceDB, r.traceTableName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -1111,7 +1123,18 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID
|
|||||||
err := json.Unmarshal([]byte(item.References), &ref)
|
err := json.Unmarshal([]byte(item.References), &ref)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("Error unmarshalling references", zap.Error(err))
|
zap.L().Error("Error unmarshalling references", zap.Error(err))
|
||||||
return nil, &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("error in unmarshalling references: %w", err)}
|
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "getFlamegraphSpansForTrace: error in unmarshalling references %s", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
events := make([]model.Event, 0)
|
||||||
|
for _, event := range item.Events {
|
||||||
|
var eventMap model.Event
|
||||||
|
err = json.Unmarshal([]byte(event), &eventMap)
|
||||||
|
if err != nil {
|
||||||
|
zap.L().Error("Error unmarshalling events", zap.Error(err))
|
||||||
|
return nil, errorsV2.Newf(errorsV2.TypeInternal, errorsV2.CodeInternal, "getFlamegraphSpansForTrace: error in unmarshalling events %s", err.Error())
|
||||||
|
}
|
||||||
|
events = append(events, eventMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
jsonItem := model.FlamegraphSpan{
|
jsonItem := model.FlamegraphSpan{
|
||||||
@ -1122,6 +1145,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID
|
|||||||
DurationNano: item.DurationNano,
|
DurationNano: item.DurationNano,
|
||||||
HasError: item.HasError,
|
HasError: item.HasError,
|
||||||
References: ref,
|
References: ref,
|
||||||
|
Events: events,
|
||||||
Children: make([]*model.FlamegraphSpan, 0),
|
Children: make([]*model.FlamegraphSpan, 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1160,6 +1184,7 @@ func (r *ClickHouseReader) GetFlamegraphSpansForTrace(ctx context.Context, orgID
|
|||||||
TimeUnixNano: spanNode.TimeUnixNano,
|
TimeUnixNano: spanNode.TimeUnixNano,
|
||||||
DurationNano: spanNode.DurationNano,
|
DurationNano: spanNode.DurationNano,
|
||||||
HasError: false,
|
HasError: false,
|
||||||
|
Events: make([]model.Event, 0),
|
||||||
Children: make([]*model.FlamegraphSpan, 0),
|
Children: make([]*model.FlamegraphSpan, 0),
|
||||||
}
|
}
|
||||||
missingSpan.Children = append(missingSpan.Children, spanNode)
|
missingSpan.Children = append(missingSpan.Children, spanNode)
|
||||||
|
|||||||
@ -1787,7 +1787,7 @@ func (aH *APIHandler) GetWaterfallSpansForTraceWithMetadata(w http.ResponseWrite
|
|||||||
|
|
||||||
result, apiErr := aH.reader.GetWaterfallSpansForTraceWithMetadata(r.Context(), orgID, traceID, req)
|
result, apiErr := aH.reader.GetWaterfallSpansForTraceWithMetadata(r.Context(), orgID, traceID, req)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
render.Error(w, apiErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1821,7 +1821,7 @@ func (aH *APIHandler) GetFlamegraphSpansForTrace(w http.ResponseWriter, r *http.
|
|||||||
|
|
||||||
result, apiErr := aH.reader.GetFlamegraphSpansForTrace(r.Context(), orgID, traceID, req)
|
result, apiErr := aH.reader.GetFlamegraphSpansForTrace(r.Context(), orgID, traceID, req)
|
||||||
if apiErr != nil {
|
if apiErr != nil {
|
||||||
RespondError(w, apiErr, nil)
|
render.Error(w, apiErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -42,8 +42,8 @@ type Reader interface {
|
|||||||
|
|
||||||
// Search Interfaces
|
// Search Interfaces
|
||||||
SearchTraces(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error)
|
SearchTraces(ctx context.Context, params *model.SearchTracesParams) (*[]model.SearchSpansResult, error)
|
||||||
GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, *model.ApiError)
|
GetWaterfallSpansForTraceWithMetadata(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetWaterfallSpansForTraceWithMetadataParams) (*model.GetWaterfallSpansForTraceWithMetadataResponse, error)
|
||||||
GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError)
|
GetFlamegraphSpansForTrace(ctx context.Context, orgID valuer.UUID, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, error)
|
||||||
|
|
||||||
// Setter Interfaces
|
// Setter Interfaces
|
||||||
SetTTL(ctx context.Context, orgID string, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
SetTTL(ctx context.Context, orgID string, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/config"
|
"github.com/SigNoz/signoz/pkg/config"
|
||||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||||
@ -122,7 +123,7 @@ func main() {
|
|||||||
zeus.Config{},
|
zeus.Config{},
|
||||||
noopzeus.NewProviderFactory(),
|
noopzeus.NewProviderFactory(),
|
||||||
licensing.Config{},
|
licensing.Config{},
|
||||||
func(_ sqlstore.SQLStore, _ zeus.Zeus, _ organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
func(_ sqlstore.SQLStore, _ zeus.Zeus, _ organization.Getter, _ analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] {
|
||||||
return nooplicensing.NewFactory()
|
return nooplicensing.NewFactory()
|
||||||
},
|
},
|
||||||
signoz.NewEmailingProviderFactories(),
|
signoz.NewEmailingProviderFactories(),
|
||||||
|
|||||||
@ -288,7 +288,7 @@ type Span struct {
|
|||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
References []OtelSpanRef `json:"references,omitempty"`
|
References []OtelSpanRef `json:"references,omitempty"`
|
||||||
TagMap map[string]string `json:"tagMap"`
|
TagMap map[string]string `json:"tagMap"`
|
||||||
Events []string `json:"event"`
|
Events []Event `json:"event"`
|
||||||
RootName string `json:"rootName"`
|
RootName string `json:"rootName"`
|
||||||
StatusMessage string `json:"statusMessage"`
|
StatusMessage string `json:"statusMessage"`
|
||||||
StatusCodeString string `json:"statusCodeString"`
|
StatusCodeString string `json:"statusCodeString"`
|
||||||
@ -311,6 +311,7 @@ type FlamegraphSpan struct {
|
|||||||
ServiceName string `json:"serviceName"`
|
ServiceName string `json:"serviceName"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Level int64 `json:"level"`
|
Level int64 `json:"level"`
|
||||||
|
Events []Event `json:"event"`
|
||||||
References []OtelSpanRef `json:"references,omitempty"`
|
References []OtelSpanRef `json:"references,omitempty"`
|
||||||
Children []*FlamegraphSpan `json:"children"`
|
Children []*FlamegraphSpan `json:"children"`
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||||
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/cache"
|
"github.com/SigNoz/signoz/pkg/cache"
|
||||||
"github.com/SigNoz/signoz/pkg/emailing"
|
"github.com/SigNoz/signoz/pkg/emailing"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
@ -54,7 +55,7 @@ func New(
|
|||||||
zeusConfig zeus.Config,
|
zeusConfig zeus.Config,
|
||||||
zeusProviderFactory factory.ProviderFactory[zeus.Zeus, zeus.Config],
|
zeusProviderFactory factory.ProviderFactory[zeus.Zeus, zeus.Config],
|
||||||
licenseConfig licensing.Config,
|
licenseConfig licensing.Config,
|
||||||
licenseProviderFactory func(sqlstore.SQLStore, zeus.Zeus, organization.Getter) factory.ProviderFactory[licensing.Licensing, licensing.Config],
|
licenseProviderFactory func(sqlstore.SQLStore, zeus.Zeus, organization.Getter, analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config],
|
||||||
emailingProviderFactories factory.NamedMap[factory.ProviderFactory[emailing.Emailing, emailing.Config]],
|
emailingProviderFactories factory.NamedMap[factory.ProviderFactory[emailing.Emailing, emailing.Config]],
|
||||||
cacheProviderFactories factory.NamedMap[factory.ProviderFactory[cache.Cache, cache.Config]],
|
cacheProviderFactories factory.NamedMap[factory.ProviderFactory[cache.Cache, cache.Config]],
|
||||||
webProviderFactories factory.NamedMap[factory.ProviderFactory[web.Web, web.Config]],
|
webProviderFactories factory.NamedMap[factory.ProviderFactory[web.Web, web.Config]],
|
||||||
@ -235,7 +236,7 @@ func New(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter)
|
licensingProviderFactory := licenseProviderFactory(sqlstore, zeus, orgGetter, analytics)
|
||||||
licensing, err := licensingProviderFactory.New(
|
licensing, err := licensingProviderFactory.New(
|
||||||
ctx,
|
ctx,
|
||||||
providerSettings,
|
providerSettings,
|
||||||
|
|||||||
@ -216,5 +216,20 @@ func (provider *provider) collectOrg(ctx context.Context, orgID valuer.UUID) map
|
|||||||
stats["telemetry.metrics.count"] = metrics
|
stats["telemetry.metrics.count"] = metrics
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var tracesLastSeenAt time.Time
|
||||||
|
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT max(timestamp) FROM signoz_traces.distributed_signoz_index_v3").Scan(&tracesLastSeenAt); err == nil {
|
||||||
|
stats["telemetry.traces.last_observed.time"] = tracesLastSeenAt.UTC()
|
||||||
|
}
|
||||||
|
|
||||||
|
var logsLastSeenAt time.Time
|
||||||
|
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT fromUnixTimestamp64Nano(max(timestamp)) FROM signoz_logs.distributed_logs_v2").Scan(&logsLastSeenAt); err == nil {
|
||||||
|
stats["telemetry.logs.last_observed.time"] = logsLastSeenAt.UTC()
|
||||||
|
}
|
||||||
|
|
||||||
|
var metricsLastSeenAt time.Time
|
||||||
|
if err := provider.telemetryStore.ClickhouseDB().QueryRow(ctx, "SELECT toDateTime(max(unix_milli) / 1000) FROM signoz_metrics.distributed_samples_v4").Scan(&metricsLastSeenAt); err == nil {
|
||||||
|
stats["telemetry.metrics.last_observed.time"] = metricsLastSeenAt.UTC()
|
||||||
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|||||||
@ -32,6 +32,8 @@ type License struct {
|
|||||||
PlanName valuer.String
|
PlanName valuer.String
|
||||||
Features []*Feature
|
Features []*Feature
|
||||||
Status valuer.String
|
Status valuer.String
|
||||||
|
State string
|
||||||
|
FreeUntil time.Time
|
||||||
ValidFrom int64
|
ValidFrom int64
|
||||||
ValidUntil int64
|
ValidUntil int64
|
||||||
CreatedAt time.Time
|
CreatedAt time.Time
|
||||||
@ -165,6 +167,21 @@ func NewLicense(data []byte, organizationID valuer.UUID) (*License, error) {
|
|||||||
planName = PlanNameBasic
|
planName = PlanNameBasic
|
||||||
}
|
}
|
||||||
|
|
||||||
|
state, err := extractKeyFromMapStringInterface[string](licenseData, "state")
|
||||||
|
if err != nil {
|
||||||
|
state = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
freeUntilStr, err := extractKeyFromMapStringInterface[string](licenseData, "free_until")
|
||||||
|
if err != nil {
|
||||||
|
freeUntilStr = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
freeUntil, err := time.Parse(time.RFC3339, freeUntilStr)
|
||||||
|
if err != nil {
|
||||||
|
freeUntil = time.Time{}
|
||||||
|
}
|
||||||
|
|
||||||
featuresFromZeus := make([]*Feature, 0)
|
featuresFromZeus := make([]*Feature, 0)
|
||||||
if _features, ok := licenseData["features"]; ok {
|
if _features, ok := licenseData["features"]; ok {
|
||||||
featuresData, err := json.Marshal(_features)
|
featuresData, err := json.Marshal(_features)
|
||||||
@ -224,6 +241,8 @@ func NewLicense(data []byte, organizationID valuer.UUID) (*License, error) {
|
|||||||
ValidFrom: validFrom,
|
ValidFrom: validFrom,
|
||||||
ValidUntil: validUntil,
|
ValidUntil: validUntil,
|
||||||
Status: status,
|
Status: status,
|
||||||
|
State: state,
|
||||||
|
FreeUntil: freeUntil,
|
||||||
CreatedAt: time.Now(),
|
CreatedAt: time.Now(),
|
||||||
UpdatedAt: time.Now(),
|
UpdatedAt: time.Now(),
|
||||||
LastValidatedAt: time.Now(),
|
LastValidatedAt: time.Now(),
|
||||||
@ -306,6 +325,21 @@ func NewLicenseFromStorableLicense(storableLicense *StorableLicense) (*License,
|
|||||||
}
|
}
|
||||||
validUntil := int64(_validUntil)
|
validUntil := int64(_validUntil)
|
||||||
|
|
||||||
|
state, err := extractKeyFromMapStringInterface[string](storableLicense.Data, "state")
|
||||||
|
if err != nil {
|
||||||
|
state = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
freeUntilStr, err := extractKeyFromMapStringInterface[string](storableLicense.Data, "free_until")
|
||||||
|
if err != nil {
|
||||||
|
freeUntilStr = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
freeUntil, err := time.Parse(time.RFC3339, freeUntilStr)
|
||||||
|
if err != nil {
|
||||||
|
freeUntil = time.Time{}
|
||||||
|
}
|
||||||
|
|
||||||
return &License{
|
return &License{
|
||||||
ID: storableLicense.ID,
|
ID: storableLicense.ID,
|
||||||
Key: storableLicense.Key,
|
Key: storableLicense.Key,
|
||||||
@ -315,6 +349,8 @@ func NewLicenseFromStorableLicense(storableLicense *StorableLicense) (*License,
|
|||||||
ValidFrom: validFrom,
|
ValidFrom: validFrom,
|
||||||
ValidUntil: validUntil,
|
ValidUntil: validUntil,
|
||||||
Status: status,
|
Status: status,
|
||||||
|
State: state,
|
||||||
|
FreeUntil: freeUntil,
|
||||||
CreatedAt: storableLicense.CreatedAt,
|
CreatedAt: storableLicense.CreatedAt,
|
||||||
UpdatedAt: storableLicense.UpdatedAt,
|
UpdatedAt: storableLicense.UpdatedAt,
|
||||||
LastValidatedAt: storableLicense.LastValidatedAt,
|
LastValidatedAt: storableLicense.LastValidatedAt,
|
||||||
@ -325,8 +361,10 @@ func NewLicenseFromStorableLicense(storableLicense *StorableLicense) (*License,
|
|||||||
|
|
||||||
func NewStatsFromLicense(license *License) map[string]any {
|
func NewStatsFromLicense(license *License) map[string]any {
|
||||||
return map[string]any{
|
return map[string]any{
|
||||||
"license.plan": license.PlanName.StringValue(),
|
"license.id": license.ID.StringValue(),
|
||||||
"license.id": license.ID.StringValue(),
|
"license.plan.name": license.PlanName.StringValue(),
|
||||||
|
"license.state.name": license.State,
|
||||||
|
"license.free_until.time": license.FreeUntil.UTC(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -74,7 +74,7 @@ func TestNewLicenseV3(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Parse the entire license properly",
|
name: "Parse the entire license properly",
|
||||||
data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`),
|
data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1,"state":"test","free_until":"2025-05-16T11:17:48.124202Z"}`),
|
||||||
pass: true,
|
pass: true,
|
||||||
expected: &License{
|
expected: &License{
|
||||||
ID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"),
|
ID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"),
|
||||||
@ -87,11 +87,15 @@ func TestNewLicenseV3(t *testing.T) {
|
|||||||
"status": "ACTIVE",
|
"status": "ACTIVE",
|
||||||
"valid_from": float64(1730899309),
|
"valid_from": float64(1730899309),
|
||||||
"valid_until": float64(-1),
|
"valid_until": float64(-1),
|
||||||
|
"state": "test",
|
||||||
|
"free_until": "2025-05-16T11:17:48.124202Z",
|
||||||
},
|
},
|
||||||
PlanName: PlanNameEnterprise,
|
PlanName: PlanNameEnterprise,
|
||||||
ValidFrom: 1730899309,
|
ValidFrom: 1730899309,
|
||||||
ValidUntil: -1,
|
ValidUntil: -1,
|
||||||
Status: valuer.NewString("ACTIVE"),
|
Status: valuer.NewString("ACTIVE"),
|
||||||
|
State: "test",
|
||||||
|
FreeUntil: time.Date(2025, 5, 16, 11, 17, 48, 124202000, time.UTC),
|
||||||
Features: make([]*Feature, 0),
|
Features: make([]*Feature, 0),
|
||||||
OrganizationID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"),
|
OrganizationID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"),
|
||||||
},
|
},
|
||||||
|
|||||||
@ -12,6 +12,7 @@ var (
|
|||||||
QueryTypeFormula = QueryType{valuer.NewString("builder_formula")}
|
QueryTypeFormula = QueryType{valuer.NewString("builder_formula")}
|
||||||
QueryTypeSubQuery = QueryType{valuer.NewString("builder_sub_query")}
|
QueryTypeSubQuery = QueryType{valuer.NewString("builder_sub_query")}
|
||||||
QueryTypeJoin = QueryType{valuer.NewString("builder_join")}
|
QueryTypeJoin = QueryType{valuer.NewString("builder_join")}
|
||||||
|
QueryTypeTraceOperator = QueryType{valuer.NewString("builder_trace_operator")}
|
||||||
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
||||||
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
||||||
)
|
)
|
||||||
|
|||||||
@ -74,6 +74,13 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
|||||||
}
|
}
|
||||||
q.Spec = spec
|
q.Spec = spec
|
||||||
|
|
||||||
|
case QueryTypeTraceOperator:
|
||||||
|
var spec QueryBuilderTraceOperator
|
||||||
|
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||||
|
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid trace operator spec")
|
||||||
|
}
|
||||||
|
q.Spec = spec
|
||||||
|
|
||||||
case QueryTypePromQL:
|
case QueryTypePromQL:
|
||||||
var spec PromQuery
|
var spec PromQuery
|
||||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||||
|
|||||||
@ -102,6 +102,341 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
},
|
},
|
||||||
wantErr: false,
|
wantErr: false,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "valid trace operator query with simple expression",
|
||||||
|
jsonData: `{
|
||||||
|
"schemaVersion": "v1",
|
||||||
|
"start": 1640995200000,
|
||||||
|
"end": 1640998800000,
|
||||||
|
"requestType": "time_series",
|
||||||
|
"compositeQuery": {
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": {
|
||||||
|
"expression": "service.name = 'checkoutservice'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_trace_operator",
|
||||||
|
"spec": {
|
||||||
|
"name": "trace_flow_analysis",
|
||||||
|
"expression": "A => B",
|
||||||
|
"filter": {
|
||||||
|
"expression": "trace_duration > 200ms AND span_count >= 5"
|
||||||
|
},
|
||||||
|
"orderBy": [{
|
||||||
|
"key": {
|
||||||
|
"name": "trace_duration"
|
||||||
|
},
|
||||||
|
"direction": "desc"
|
||||||
|
}],
|
||||||
|
"limit": 100,
|
||||||
|
"cursor": "eyJsYXN0X3RyYWNlX2lkIjoiYWJjZGVmIn0="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"service": "frontend"
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: QueryRangeRequest{
|
||||||
|
SchemaVersion: "v1",
|
||||||
|
Start: 1640995200000,
|
||||||
|
End: 1640998800000,
|
||||||
|
RequestType: RequestTypeTimeSeries,
|
||||||
|
CompositeQuery: CompositeQuery{
|
||||||
|
Queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "service.name = 'checkoutservice'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "trace_flow_analysis",
|
||||||
|
Expression: "A => B",
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "trace_duration > 200ms AND span_count >= 5",
|
||||||
|
},
|
||||||
|
Order: []OrderBy{{
|
||||||
|
Key: OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "trace_duration"}},
|
||||||
|
Direction: OrderDirectionDesc,
|
||||||
|
}},
|
||||||
|
Limit: 100,
|
||||||
|
Cursor: "eyJsYXN0X3RyYWNlX2lkIjoiYWJjZGVmIn0=",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Variables: map[string]any{
|
||||||
|
"service": "frontend",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
name: "valid trace operator with complex expression and span_count ordering",
|
||||||
|
jsonData: `{
|
||||||
|
"schemaVersion": "v1",
|
||||||
|
"start": 1640995200000,
|
||||||
|
"end": 1640998800000,
|
||||||
|
"requestType": "time_series",
|
||||||
|
"compositeQuery": {
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": { "expression": "service.name = 'frontend'" }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "B",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": { "expression": "hasError = true" }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "C",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": { "expression": "response_status_code = '200'" }
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_trace_operator",
|
||||||
|
"spec": {
|
||||||
|
"name": "complex_trace_analysis",
|
||||||
|
"expression": "A => (B && NOT C)",
|
||||||
|
"filter": { "expression": "trace_duration BETWEEN 100ms AND 5s AND span_count IN (5, 10, 15)" },
|
||||||
|
"orderBy": [{
|
||||||
|
"key": { "name": "span_count" },
|
||||||
|
"direction": "asc"
|
||||||
|
}],
|
||||||
|
"limit": 50,
|
||||||
|
"functions": [{ "name": "absolute", "args": [] }]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: QueryRangeRequest{
|
||||||
|
SchemaVersion: "v1",
|
||||||
|
Start: 1640995200000,
|
||||||
|
End: 1640998800000,
|
||||||
|
RequestType: RequestTypeTimeSeries,
|
||||||
|
CompositeQuery: CompositeQuery{Queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{Expression: "service.name = 'frontend'"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{Expression: "hasError = true"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "C",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{Expression: "response_status_code = '200'"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "complex_trace_analysis",
|
||||||
|
Expression: "A => (B && NOT C)",
|
||||||
|
Filter: &Filter{Expression: "trace_duration BETWEEN 100ms AND 5s AND span_count IN (5, 10, 15)"},
|
||||||
|
Order: []OrderBy{{
|
||||||
|
Key: OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: OrderBySpanCount.StringValue()}},
|
||||||
|
Direction: OrderDirectionAsc,
|
||||||
|
}},
|
||||||
|
Limit: 50,
|
||||||
|
Functions: []Function{{Name: FunctionNameAbsolute, Args: []FunctionArg{}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid trace operator with NOT expression",
|
||||||
|
jsonData: `{
|
||||||
|
"schemaVersion": "v1",
|
||||||
|
"start": 1640995200000,
|
||||||
|
"end": 1640998800000,
|
||||||
|
"requestType": "time_series",
|
||||||
|
"compositeQuery": {
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": {
|
||||||
|
"expression": "service.name = 'frontend'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_trace_operator",
|
||||||
|
"spec": {
|
||||||
|
"name": "not_trace_analysis",
|
||||||
|
"expression": "NOT A",
|
||||||
|
"filter": {
|
||||||
|
"expression": "trace_duration < 1s"
|
||||||
|
},
|
||||||
|
"disabled": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: QueryRangeRequest{
|
||||||
|
SchemaVersion: "v1",
|
||||||
|
Start: 1640995200000,
|
||||||
|
End: 1640998800000,
|
||||||
|
RequestType: RequestTypeTimeSeries,
|
||||||
|
CompositeQuery: CompositeQuery{
|
||||||
|
Queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "service.name = 'frontend'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "not_trace_analysis",
|
||||||
|
Expression: "NOT A",
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "trace_duration < 1s",
|
||||||
|
},
|
||||||
|
Disabled: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "trace operator with binary NOT (exclusion)",
|
||||||
|
jsonData: `{
|
||||||
|
"schemaVersion": "v1",
|
||||||
|
"start": 1640995200000,
|
||||||
|
"end": 1640998800000,
|
||||||
|
"requestType": "time_series",
|
||||||
|
"compositeQuery": {
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": {
|
||||||
|
"expression": "service.name = 'frontend'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_query",
|
||||||
|
"spec": {
|
||||||
|
"name": "B",
|
||||||
|
"signal": "traces",
|
||||||
|
"filter": {
|
||||||
|
"expression": "hasError = true"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "builder_trace_operator",
|
||||||
|
"spec": {
|
||||||
|
"name": "exclusion_analysis",
|
||||||
|
"expression": "A NOT B",
|
||||||
|
"filter": {
|
||||||
|
"expression": "span_count > 3"
|
||||||
|
},
|
||||||
|
"limit": 75
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
expected: QueryRangeRequest{
|
||||||
|
SchemaVersion: "v1",
|
||||||
|
Start: 1640995200000,
|
||||||
|
End: 1640998800000,
|
||||||
|
RequestType: RequestTypeTimeSeries,
|
||||||
|
CompositeQuery: CompositeQuery{
|
||||||
|
Queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "service.name = 'frontend'",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "hasError = true",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "exclusion_analysis",
|
||||||
|
Expression: "A NOT B",
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "span_count > 3",
|
||||||
|
},
|
||||||
|
Limit: 75,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantErr: false,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "valid log builder query",
|
name: "valid log builder query",
|
||||||
jsonData: `{
|
jsonData: `{
|
||||||
@ -120,8 +455,8 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
"expression": "severity_text = 'ERROR'"
|
"expression": "severity_text = 'ERROR'"
|
||||||
},
|
},
|
||||||
"selectFields": [{
|
"selectFields": [{
|
||||||
"key": "body",
|
"name": "body",
|
||||||
"type": "log"
|
"fieldContext": "log"
|
||||||
}],
|
}],
|
||||||
"limit": 50,
|
"limit": 50,
|
||||||
"offset": 10
|
"offset": 10
|
||||||
@ -177,8 +512,8 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
}],
|
}],
|
||||||
"stepInterval": 120,
|
"stepInterval": 120,
|
||||||
"groupBy": [{
|
"groupBy": [{
|
||||||
"key": "method",
|
"name": "method",
|
||||||
"type": "tag"
|
"fieldContext": "attribute"
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
}]
|
}]
|
||||||
@ -270,7 +605,7 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
"name": "error_rate",
|
"name": "error_rate",
|
||||||
"expression": "A / B * 100",
|
"expression": "A / B * 100",
|
||||||
"functions": [{
|
"functions": [{
|
||||||
"name": "cut_off_min",
|
"name": "cutOffMin",
|
||||||
"args": [{
|
"args": [{
|
||||||
"value": "0.3"
|
"value": "0.3"
|
||||||
}]
|
}]
|
||||||
@ -436,7 +771,6 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "B",
|
|
||||||
"type": "builder_formula",
|
"type": "builder_formula",
|
||||||
"spec": {
|
"spec": {
|
||||||
"name": "rate",
|
"name": "rate",
|
||||||
@ -526,7 +860,6 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
"requestType": "time_series",
|
"requestType": "time_series",
|
||||||
"compositeQuery": {
|
"compositeQuery": {
|
||||||
"queries": [{
|
"queries": [{
|
||||||
"name": "A",
|
|
||||||
"type": "unknown_type",
|
"type": "unknown_type",
|
||||||
"spec": {}
|
"spec": {}
|
||||||
}]
|
}]
|
||||||
@ -543,9 +876,9 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
"requestType": "time_series",
|
"requestType": "time_series",
|
||||||
"compositeQuery": {
|
"compositeQuery": {
|
||||||
"queries": [{
|
"queries": [{
|
||||||
"name": "A",
|
|
||||||
"type": "builder_query",
|
"type": "builder_query",
|
||||||
"spec": {
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
"signal": "unknown_signal",
|
"signal": "unknown_signal",
|
||||||
"aggregations": []
|
"aggregations": []
|
||||||
}
|
}
|
||||||
@ -563,9 +896,9 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
"requestType": "time_series",
|
"requestType": "time_series",
|
||||||
"compositeQuery": {
|
"compositeQuery": {
|
||||||
"queries": [{
|
"queries": [{
|
||||||
"name": "A",
|
|
||||||
"type": "builder_query",
|
"type": "builder_query",
|
||||||
"spec": {
|
"spec": {
|
||||||
|
"name": "A",
|
||||||
"signal": "traces",
|
"signal": "traces",
|
||||||
"aggregations": [],
|
"aggregations": [],
|
||||||
"stepInterval": "invalid_duration"
|
"stepInterval": "invalid_duration"
|
||||||
@ -650,6 +983,21 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
assert.Equal(t, expectedSpec.Right.Name, actualSpec.Right.Name)
|
assert.Equal(t, expectedSpec.Right.Name, actualSpec.Right.Name)
|
||||||
assert.Equal(t, expectedSpec.Type, actualSpec.Type)
|
assert.Equal(t, expectedSpec.Type, actualSpec.Type)
|
||||||
assert.Equal(t, expectedSpec.On, actualSpec.On)
|
assert.Equal(t, expectedSpec.On, actualSpec.On)
|
||||||
|
case QueryTypeTraceOperator:
|
||||||
|
expectedSpec := expectedQuery.Spec.(QueryBuilderTraceOperator)
|
||||||
|
actualSpec, ok := actualQuery.Spec.(QueryBuilderTraceOperator)
|
||||||
|
require.True(t, ok, "Expected QueryBuilderTraceOperator but got %T", actualQuery.Spec)
|
||||||
|
assert.Equal(t, expectedSpec.Name, actualSpec.Name)
|
||||||
|
assert.Equal(t, expectedSpec.Expression, actualSpec.Expression)
|
||||||
|
assert.Equal(t, expectedSpec.Limit, actualSpec.Limit)
|
||||||
|
assert.Equal(t, expectedSpec.Cursor, actualSpec.Cursor)
|
||||||
|
assert.Equal(t, len(expectedSpec.Order), len(actualSpec.Order))
|
||||||
|
for i, expectedOrder := range expectedSpec.Order {
|
||||||
|
if i < len(actualSpec.Order) {
|
||||||
|
assert.Equal(t, expectedOrder.Key.Name, actualSpec.Order[i].Key.Name)
|
||||||
|
assert.Equal(t, expectedOrder.Direction, actualSpec.Order[i].Direction)
|
||||||
|
}
|
||||||
|
}
|
||||||
case QueryTypePromQL:
|
case QueryTypePromQL:
|
||||||
expectedSpec := expectedQuery.Spec.(PromQuery)
|
expectedSpec := expectedQuery.Spec.(PromQuery)
|
||||||
actualSpec, ok := actualQuery.Spec.(PromQuery)
|
actualSpec, ok := actualQuery.Spec.(PromQuery)
|
||||||
@ -673,3 +1021,507 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseTraceExpression(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
expression string
|
||||||
|
expectError bool
|
||||||
|
checkResult func(t *testing.T, result *TraceOperand)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "simple query reference",
|
||||||
|
expression: "A",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.QueryRef)
|
||||||
|
assert.Equal(t, "A", result.QueryRef.Name)
|
||||||
|
assert.Nil(t, result.Operator)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "simple implication",
|
||||||
|
expression: "A => B",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||||
|
assert.NotNil(t, result.Left)
|
||||||
|
assert.NotNil(t, result.Right)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "B", result.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "and operation",
|
||||||
|
expression: "A && B",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorAnd, *result.Operator)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "B", result.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "or operation",
|
||||||
|
expression: "A || B",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorOr, *result.Operator)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "B", result.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "unary NOT operation",
|
||||||
|
expression: "NOT A",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorNot, *result.Operator)
|
||||||
|
assert.NotNil(t, result.Left)
|
||||||
|
assert.Nil(t, result.Right)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "binary NOT operation",
|
||||||
|
expression: "A NOT B",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorExclude, *result.Operator)
|
||||||
|
assert.NotNil(t, result.Left)
|
||||||
|
assert.NotNil(t, result.Right)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "B", result.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "complex expression with precedence",
|
||||||
|
expression: "A => B && C || D",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
// Should parse as: A => (B && (C || D)) due to precedence: NOT > || > && > =>
|
||||||
|
// The parsing finds operators from lowest precedence first
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
|
||||||
|
// Right side should be an AND operation (next lowest precedence after =>)
|
||||||
|
assert.NotNil(t, result.Right.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorAnd, *result.Right.Operator)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "simple parentheses",
|
||||||
|
expression: "(A)",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.QueryRef)
|
||||||
|
assert.Equal(t, "A", result.QueryRef.Name)
|
||||||
|
assert.Nil(t, result.Operator)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "parentheses expression",
|
||||||
|
expression: "A => (B || C)",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
|
||||||
|
// Right side should be an OR operation
|
||||||
|
assert.NotNil(t, result.Right.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorOr, *result.Right.Operator)
|
||||||
|
assert.Equal(t, "B", result.Right.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "C", result.Right.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "nested NOT with parentheses",
|
||||||
|
expression: "NOT (A && B)",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorNot, *result.Operator)
|
||||||
|
assert.Nil(t, result.Right) // Unary operator
|
||||||
|
|
||||||
|
// Left side should be an AND operation
|
||||||
|
assert.NotNil(t, result.Left.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorAnd, *result.Left.Operator)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid query reference with numbers",
|
||||||
|
expression: "123",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid query reference with special chars",
|
||||||
|
expression: "A-B",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty expression",
|
||||||
|
expression: "",
|
||||||
|
expectError: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
name: "expression with extra whitespace",
|
||||||
|
expression: " A => B ",
|
||||||
|
expectError: false,
|
||||||
|
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||||
|
assert.NotNil(t, result.Operator)
|
||||||
|
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||||
|
assert.Equal(t, "A", result.Left.QueryRef.Name)
|
||||||
|
assert.Equal(t, "B", result.Right.QueryRef.Name)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
result, err := parseTraceExpression(tt.expression)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
assert.Error(t, err)
|
||||||
|
assert.Nil(t, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, result)
|
||||||
|
if tt.checkResult != nil {
|
||||||
|
tt.checkResult(t, result)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestQueryBuilderTraceOperator_ValidateTraceOperator(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
traceOperator QueryBuilderTraceOperator
|
||||||
|
queries []QueryEnvelope
|
||||||
|
expectError bool
|
||||||
|
errorContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "valid trace operator with trace queries",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
Filter: &Filter{
|
||||||
|
Expression: "trace_duration > 200ms",
|
||||||
|
},
|
||||||
|
Order: []OrderBy{{
|
||||||
|
Key: OrderByKey{
|
||||||
|
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: OrderByTraceDuration.StringValue(),
|
||||||
|
FieldContext: telemetrytypes.FieldContextSpan,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Direction: OrderDirectionDesc,
|
||||||
|
}},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "empty expression",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "expression cannot be empty",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "referenced query does not exist",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "query 'B' referenced in trace operator expression does not exist or is not a trace query",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "referenced query is not trace signal",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[LogAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "query 'B' referenced in trace operator expression does not exist or is not a trace query",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid orderBy field",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A",
|
||||||
|
Order: []OrderBy{{
|
||||||
|
Key: OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "invalid_string"}},
|
||||||
|
Direction: OrderDirectionDesc,
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{Name: "A", Signal: telemetrytypes.SignalTraces},
|
||||||
|
}},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "orderBy[0] field must be either 'span_count' or 'trace_duration'",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalid pagination limit",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A",
|
||||||
|
Limit: -1,
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "limit must be non-negative",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "limit exceeds maximum",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A",
|
||||||
|
Limit: 15000,
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "limit cannot exceed 10000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "valid returnSpansFrom",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
ReturnSpansFrom: "A",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "returnSpansFrom references non-existent query",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
ReturnSpansFrom: "C",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "returnSpansFrom references query 'C' which does not exist or is not a trace query",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "returnSpansFrom references query not in expression",
|
||||||
|
traceOperator: QueryBuilderTraceOperator{
|
||||||
|
Name: "test_operator",
|
||||||
|
Expression: "A => B",
|
||||||
|
ReturnSpansFrom: "C",
|
||||||
|
},
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "A",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "B",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeBuilder,
|
||||||
|
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||||
|
Name: "C",
|
||||||
|
Signal: telemetrytypes.SignalTraces,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "returnSpansFrom references query 'C' which is not used in the expression",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := tt.traceOperator.ValidateTraceOperator(tt.queries)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
assert.Error(t, err)
|
||||||
|
if tt.errorContains != "" {
|
||||||
|
assert.Contains(t, err.Error(), tt.errorContains)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestValidateUniqueTraceOperator(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
queries []QueryEnvelope
|
||||||
|
expectError bool
|
||||||
|
errorContains string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no trace operators",
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{Type: QueryTypeBuilder},
|
||||||
|
{Type: QueryTypeFormula},
|
||||||
|
},
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single trace operator",
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{Type: QueryTypeBuilder},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "T1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{Type: QueryTypeFormula},
|
||||||
|
},
|
||||||
|
expectError: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple trace operators",
|
||||||
|
queries: []QueryEnvelope{
|
||||||
|
{Type: QueryTypeBuilder},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "T1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: QueryTypeTraceOperator,
|
||||||
|
Spec: QueryBuilderTraceOperator{
|
||||||
|
Name: "T2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{Type: QueryTypeFormula},
|
||||||
|
},
|
||||||
|
expectError: true,
|
||||||
|
errorContains: "only one trace operator is allowed per request, found 2 trace operators: [T1 T2]",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
err := ValidateUniqueTraceOperator(tt.queries)
|
||||||
|
|
||||||
|
if tt.expectError {
|
||||||
|
assert.Error(t, err)
|
||||||
|
if tt.errorContains != "" {
|
||||||
|
assert.Contains(t, err.Error(), tt.errorContains)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -0,0 +1,438 @@
|
|||||||
|
package querybuildertypesv5
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TraceOperatorType struct{ valuer.String }
|
||||||
|
|
||||||
|
var (
|
||||||
|
TraceOperatorDirectDescendant = TraceOperatorType{valuer.NewString("=>")}
|
||||||
|
TraceOperatorIndirectDescendant = TraceOperatorType{valuer.NewString("->")}
|
||||||
|
TraceOperatorAnd = TraceOperatorType{valuer.NewString("&&")}
|
||||||
|
TraceOperatorOr = TraceOperatorType{valuer.NewString("||")}
|
||||||
|
TraceOperatorNot = TraceOperatorType{valuer.NewString("NOT")}
|
||||||
|
TraceOperatorExclude = TraceOperatorType{valuer.NewString("NOT")}
|
||||||
|
)
|
||||||
|
|
||||||
|
type TraceOrderBy struct {
|
||||||
|
valuer.String
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
OrderBySpanCount = TraceOrderBy{valuer.NewString("span_count")}
|
||||||
|
OrderByTraceDuration = TraceOrderBy{valuer.NewString("trace_duration")}
|
||||||
|
)
|
||||||
|
|
||||||
|
type QueryBuilderTraceOperator struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Disabled bool `json:"disabled,omitempty"`
|
||||||
|
|
||||||
|
Expression string `json:"expression"`
|
||||||
|
|
||||||
|
Filter *Filter `json:"filter,omitempty"`
|
||||||
|
|
||||||
|
// User-configurable span return strategy - which query's spans to return
|
||||||
|
ReturnSpansFrom string `json:"returnSpansFrom,omitempty"`
|
||||||
|
|
||||||
|
// Trace-specific ordering (only span_count and trace_duration allowed)
|
||||||
|
Order []OrderBy `json:"orderBy,omitempty"`
|
||||||
|
|
||||||
|
Aggregations []TraceAggregation `json:"aggregations,omitempty"`
|
||||||
|
StepInterval Step `json:"stepInterval,omitempty"`
|
||||||
|
GroupBy []GroupByKey `json:"groupBy,omitempty"`
|
||||||
|
|
||||||
|
Limit int `json:"limit,omitempty"`
|
||||||
|
Cursor string `json:"cursor,omitempty"`
|
||||||
|
|
||||||
|
// Other post-processing options
|
||||||
|
SelectFields []telemetrytypes.TelemetryFieldKey `json:"selectFields,omitempty"`
|
||||||
|
Functions []Function `json:"functions,omitempty"`
|
||||||
|
|
||||||
|
// Internal parsed representation (not exposed in JSON)
|
||||||
|
ParsedExpression *TraceOperand `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TraceOperand represents the internal parsed tree structure
|
||||||
|
type TraceOperand struct {
|
||||||
|
// For leaf nodes - reference to a query
|
||||||
|
QueryRef *TraceOperatorQueryRef `json:"-"`
|
||||||
|
|
||||||
|
// For nested operations
|
||||||
|
Operator *TraceOperatorType `json:"-"`
|
||||||
|
Left *TraceOperand `json:"-"`
|
||||||
|
Right *TraceOperand `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TraceOperatorQueryRef represents a reference to another query
|
||||||
|
type TraceOperatorQueryRef struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseExpression parses the expression string into a tree structure
|
||||||
|
func (q *QueryBuilderTraceOperator) ParseExpression() error {
|
||||||
|
if q.Expression == "" {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"expression cannot be empty",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed, err := parseTraceExpression(q.Expression)
|
||||||
|
if err != nil {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
err,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"failed to parse expression '%s'",
|
||||||
|
q.Expression,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.ParsedExpression = parsed
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateTraceOperator validates that all referenced queries exist and are trace queries
|
||||||
|
func (q *QueryBuilderTraceOperator) ValidateTraceOperator(queries []QueryEnvelope) error {
|
||||||
|
// Parse the expression
|
||||||
|
if err := q.ParseExpression(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate orderBy field if present
|
||||||
|
if err := q.ValidateOrderBy(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate pagination parameters
|
||||||
|
if err := q.ValidatePagination(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a map of query names to track if they exist and their signal type
|
||||||
|
availableQueries := make(map[string]telemetrytypes.Signal)
|
||||||
|
|
||||||
|
// Only collect trace queries
|
||||||
|
for _, query := range queries {
|
||||||
|
if query.Type == QueryTypeBuilder {
|
||||||
|
switch spec := query.Spec.(type) {
|
||||||
|
case QueryBuilderQuery[TraceAggregation]:
|
||||||
|
if spec.Signal == telemetrytypes.SignalTraces {
|
||||||
|
availableQueries[spec.Name] = spec.Signal
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all query names referenced in the expression
|
||||||
|
referencedQueries := q.collectReferencedQueries(q.ParsedExpression)
|
||||||
|
|
||||||
|
// Validate that all referenced queries exist and are trace queries
|
||||||
|
for _, queryName := range referencedQueries {
|
||||||
|
signal, exists := availableQueries[queryName]
|
||||||
|
if !exists {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"query '%s' referenced in trace operator expression does not exist or is not a trace query",
|
||||||
|
queryName,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This check is redundant since we only add trace queries to availableQueries, but keeping for clarity
|
||||||
|
if signal != telemetrytypes.SignalTraces {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"query '%s' must be a trace query, but found signal '%s'",
|
||||||
|
queryName,
|
||||||
|
signal,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate ReturnSpansFrom if specified
|
||||||
|
if q.ReturnSpansFrom != "" {
|
||||||
|
if _, exists := availableQueries[q.ReturnSpansFrom]; !exists {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"returnSpansFrom references query '%s' which does not exist or is not a trace query",
|
||||||
|
q.ReturnSpansFrom,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the query is referenced in the expression
|
||||||
|
found := false
|
||||||
|
for _, queryName := range referencedQueries {
|
||||||
|
if queryName == q.ReturnSpansFrom {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"returnSpansFrom references query '%s' which is not used in the expression '%s'",
|
||||||
|
q.ReturnSpansFrom,
|
||||||
|
q.Expression,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateOrderBy validates the orderBy field
|
||||||
|
func (q *QueryBuilderTraceOperator) ValidateOrderBy() error {
|
||||||
|
if len(q.Order) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, orderBy := range q.Order {
|
||||||
|
// Validate field is one of the allowed values
|
||||||
|
fieldName := orderBy.Key.Name
|
||||||
|
if fieldName != OrderBySpanCount.StringValue() && fieldName != OrderByTraceDuration.StringValue() {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"orderBy[%d] field must be either '%s' or '%s', got '%s'",
|
||||||
|
i, OrderBySpanCount.StringValue(), OrderByTraceDuration.StringValue(), fieldName,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate direction
|
||||||
|
if orderBy.Direction != OrderDirectionAsc && orderBy.Direction != OrderDirectionDesc {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"orderBy[%d] direction must be either 'asc' or 'desc', got '%s'",
|
||||||
|
i, orderBy.Direction,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidatePagination validates pagination parameters (AIP-158 compliance)
|
||||||
|
func (q *QueryBuilderTraceOperator) ValidatePagination() error {
|
||||||
|
if q.Limit < 0 {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"limit must be non-negative, got %d",
|
||||||
|
q.Limit,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// For production use, you might want to enforce maximum limits
|
||||||
|
if q.Limit > 10000 {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"limit cannot exceed 10000, got %d",
|
||||||
|
q.Limit,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectReferencedQueries collects all query names referenced in the expression tree
|
||||||
|
func (q *QueryBuilderTraceOperator) collectReferencedQueries(operand *TraceOperand) []string {
|
||||||
|
if operand == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var queries []string
|
||||||
|
|
||||||
|
if operand.QueryRef != nil {
|
||||||
|
queries = append(queries, operand.QueryRef.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recursively collect from children
|
||||||
|
queries = append(queries, q.collectReferencedQueries(operand.Left)...)
|
||||||
|
queries = append(queries, q.collectReferencedQueries(operand.Right)...)
|
||||||
|
|
||||||
|
// Remove duplicates
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
unique := []string{}
|
||||||
|
for _, q := range queries {
|
||||||
|
if !seen[q] {
|
||||||
|
seen[q] = true
|
||||||
|
unique = append(unique, q)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return unique
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateUniqueTraceOperator ensures only one trace operator exists in queries
|
||||||
|
func ValidateUniqueTraceOperator(queries []QueryEnvelope) error {
|
||||||
|
traceOperatorCount := 0
|
||||||
|
var traceOperatorNames []string
|
||||||
|
|
||||||
|
for _, query := range queries {
|
||||||
|
if query.Type == QueryTypeTraceOperator {
|
||||||
|
// Extract the name from the trace operator spec
|
||||||
|
if spec, ok := query.Spec.(QueryBuilderTraceOperator); ok {
|
||||||
|
traceOperatorCount++
|
||||||
|
traceOperatorNames = append(traceOperatorNames, spec.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if traceOperatorCount > 1 {
|
||||||
|
return errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"only one trace operator is allowed per request, found %d trace operators: %v",
|
||||||
|
traceOperatorCount,
|
||||||
|
traceOperatorNames,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseTraceExpression parses an expression string into a tree structure
|
||||||
|
// Handles precedence: NOT (highest) > || > && > => (lowest)
|
||||||
|
func parseTraceExpression(expr string) (*TraceOperand, error) {
|
||||||
|
expr = strings.TrimSpace(expr)
|
||||||
|
|
||||||
|
// Handle parentheses
|
||||||
|
if strings.HasPrefix(expr, "(") && strings.HasSuffix(expr, ")") {
|
||||||
|
// Check if parentheses are balanced
|
||||||
|
if isBalancedParentheses(expr[1 : len(expr)-1]) {
|
||||||
|
return parseTraceExpression(expr[1 : len(expr)-1])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle unary NOT operator (prefix)
|
||||||
|
if strings.HasPrefix(expr, "NOT ") {
|
||||||
|
operand, err := parseTraceExpression(expr[4:])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
notOp := TraceOperatorNot
|
||||||
|
return &TraceOperand{
|
||||||
|
Operator: ¬Op,
|
||||||
|
Left: operand,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find binary operators with lowest precedence first (=> has lowest precedence)
|
||||||
|
// Order: => (lowest) < && < || < NOT (highest)
|
||||||
|
operators := []string{"=>", "&&", "||", " NOT "}
|
||||||
|
|
||||||
|
for _, op := range operators {
|
||||||
|
if pos := findOperatorPosition(expr, op); pos != -1 {
|
||||||
|
leftExpr := strings.TrimSpace(expr[:pos])
|
||||||
|
rightExpr := strings.TrimSpace(expr[pos+len(op):])
|
||||||
|
|
||||||
|
left, err := parseTraceExpression(leftExpr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
right, err := parseTraceExpression(rightExpr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var opType TraceOperatorType
|
||||||
|
switch strings.TrimSpace(op) {
|
||||||
|
case "=>":
|
||||||
|
opType = TraceOperatorDirectDescendant
|
||||||
|
case "&&":
|
||||||
|
opType = TraceOperatorAnd
|
||||||
|
case "||":
|
||||||
|
opType = TraceOperatorOr
|
||||||
|
case "NOT":
|
||||||
|
opType = TraceOperatorExclude // Binary NOT (A NOT B)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &TraceOperand{
|
||||||
|
Operator: &opType,
|
||||||
|
Left: left,
|
||||||
|
Right: right,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no operators found, this should be a query reference
|
||||||
|
if matched, _ := regexp.MatchString(`^[A-Za-z][A-Za-z0-9_]*$`, expr); !matched {
|
||||||
|
return nil, errors.WrapInvalidInputf(
|
||||||
|
nil,
|
||||||
|
errors.CodeInvalidInput,
|
||||||
|
"invalid query reference '%s'",
|
||||||
|
expr,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &TraceOperand{
|
||||||
|
QueryRef: &TraceOperatorQueryRef{Name: expr},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// isBalancedParentheses checks if parentheses are balanced in the expression
|
||||||
|
func isBalancedParentheses(expr string) bool {
|
||||||
|
depth := 0
|
||||||
|
for _, char := range expr {
|
||||||
|
if char == '(' {
|
||||||
|
depth++
|
||||||
|
} else if char == ')' {
|
||||||
|
depth--
|
||||||
|
if depth < 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return depth == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// findOperatorPosition finds the position of an operator, respecting parentheses
|
||||||
|
func findOperatorPosition(expr, op string) int {
|
||||||
|
depth := 0
|
||||||
|
opLen := len(op)
|
||||||
|
|
||||||
|
// Scan from right to left to find the rightmost operator at depth 0
|
||||||
|
for i := len(expr) - 1; i >= 0; i-- {
|
||||||
|
char := expr[i]
|
||||||
|
|
||||||
|
// Update depth based on parentheses (scanning right to left)
|
||||||
|
if char == ')' {
|
||||||
|
depth++
|
||||||
|
} else if char == '(' {
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only check for operators when we're at depth 0 (outside parentheses)
|
||||||
|
// and make sure we have enough characters for the operator
|
||||||
|
if depth == 0 && i+opLen <= len(expr) {
|
||||||
|
// Check if the substring matches our operator
|
||||||
|
if expr[i:i+opLen] == op {
|
||||||
|
// For " NOT " (binary), ensure proper spacing
|
||||||
|
if op == " NOT " {
|
||||||
|
// Make sure it's properly space-padded
|
||||||
|
if i > 0 && i+opLen < len(expr) {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// For other operators (=>, &&, ||), return immediately
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user