diff --git a/.devenv/docker/clickhouse/compose.yaml b/.devenv/docker/clickhouse/compose.yaml index 6ab934337d05..e8c72d679ef7 100644 --- a/.devenv/docker/clickhouse/compose.yaml +++ b/.devenv/docker/clickhouse/compose.yaml @@ -40,7 +40,7 @@ services: timeout: 5s retries: 3 schema-migrator-sync: - image: signoz/signoz-schema-migrator:v0.128.0 + image: signoz/signoz-schema-migrator:v0.128.2 container_name: schema-migrator-sync command: - sync @@ -53,7 +53,7 @@ services: condition: service_healthy restart: on-failure schema-migrator-async: - image: signoz/signoz-schema-migrator:v0.128.0 + image: signoz/signoz-schema-migrator:v0.128.2 container_name: schema-migrator-async command: - async diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9758323f7d9e..099383f29faf 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -7,14 +7,38 @@ /frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv /deploy/ @SigNoz/devops .github @SigNoz/devops + +# Scaffold Owners /pkg/config/ @grandwizard28 /pkg/errors/ @grandwizard28 /pkg/factory/ @grandwizard28 /pkg/types/ @grandwizard28 +/pkg/valuer/ @grandwizard28 +/cmd/ @grandwizard28 .golangci.yml @grandwizard28 + +# Zeus Owners /pkg/zeus/ @vikrantgupta25 -/pkg/licensing/ @vikrantgupta25 -/pkg/sqlmigration/ @vikrantgupta25 /ee/zeus/ @vikrantgupta25 +/pkg/licensing/ @vikrantgupta25 /ee/licensing/ @vikrantgupta25 -/ee/sqlmigration/ @vikrantgupta25 \ No newline at end of file + +# SQL Owners +/pkg/sqlmigration/ @vikrantgupta25 +/ee/sqlmigration/ @vikrantgupta25 +/pkg/sqlschema/ @vikrantgupta25 +/ee/sqlschema/ @vikrantgupta25 + +# Analytics Owners +/pkg/analytics/ @vikrantgupta25 +/pkg/statsreporter/ @vikrantgupta25 + +# Querier Owners +/pkg/querier/ @srikanthccv +/pkg/variables/ @srikanthccv +/pkg/types/querybuildertypes/ @srikanthccv +/pkg/querybuilder/ @srikanthccv +/pkg/telemetrylogs/ @srikanthccv +/pkg/telemetrymetadata/ @srikanthccv +/pkg/telemetrymetrics/ @srikanthccv +/pkg/telemetrytraces/ @srikanthccv diff --git a/.github/workflows/build-community.yaml b/.github/workflows/build-community.yaml index b0d0ec70b326..1a80393c7ba0 100644 --- a/.github/workflows/build-community.yaml +++ b/.github/workflows/build-community.yaml @@ -66,7 +66,7 @@ jobs: GO_NAME: signoz-community GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_PATH: frontend/build - GO_BUILD_CONTEXT: ./pkg/query-service + GO_BUILD_CONTEXT: ./cmd/community GO_BUILD_FLAGS: >- -tags timetzdata -ldflags='-linkmode external -extldflags \"-static\" -s -w @@ -78,6 +78,6 @@ jobs: -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr' GO_CGO_ENABLED: 1 DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' - DOCKER_DOCKERFILE_PATH: ./pkg/query-service/Dockerfile.multi-arch + DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch DOCKER_MANIFEST: true DOCKER_PROVIDERS: dockerhub diff --git a/.github/workflows/build-enterprise.yaml b/.github/workflows/build-enterprise.yaml index 36aba8533f98..abc8ddaf0c76 100644 --- a/.github/workflows/build-enterprise.yaml +++ b/.github/workflows/build-enterprise.yaml @@ -96,7 +96,7 @@ jobs: GO_VERSION: 1.23 GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_PATH: frontend/build - GO_BUILD_CONTEXT: ./ee/query-service + GO_BUILD_CONTEXT: ./cmd/enterprise GO_BUILD_FLAGS: >- -tags timetzdata -ldflags='-linkmode external -extldflags \"-static\" -s -w @@ -112,6 +112,6 @@ jobs: -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr' GO_CGO_ENABLED: 1 DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' - DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch + DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch DOCKER_MANIFEST: true DOCKER_PROVIDERS: ${{ needs.prepare.outputs.docker_providers }} diff --git a/.github/workflows/build-staging.yaml b/.github/workflows/build-staging.yaml index dec3ba121772..89d829c67076 100644 --- a/.github/workflows/build-staging.yaml +++ b/.github/workflows/build-staging.yaml @@ -95,7 +95,7 @@ jobs: GO_VERSION: 1.23 GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }} GO_INPUT_ARTIFACT_PATH: frontend/build - GO_BUILD_CONTEXT: ./ee/query-service + GO_BUILD_CONTEXT: ./cmd/enterprise GO_BUILD_FLAGS: >- -tags timetzdata -ldflags='-linkmode external -extldflags \"-static\" -s -w @@ -111,7 +111,7 @@ jobs: -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr' GO_CGO_ENABLED: 1 DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}' - DOCKER_DOCKERFILE_PATH: ./ee/query-service/Dockerfile.multi-arch + DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch DOCKER_MANIFEST: true DOCKER_PROVIDERS: gcp staging: diff --git a/.github/workflows/gor-signoz-community.yaml b/.github/workflows/gor-signoz-community.yaml index 5fb3ff1cb55d..5e3b51c92228 100644 --- a/.github/workflows/gor-signoz-community.yaml +++ b/.github/workflows/gor-signoz-community.yaml @@ -36,7 +36,7 @@ jobs: - ubuntu-latest - macos-latest env: - CONFIG_PATH: pkg/query-service/.goreleaser.yaml + CONFIG_PATH: cmd/community/.goreleaser.yaml runs-on: ${{ matrix.os }} steps: - name: checkout @@ -100,7 +100,7 @@ jobs: needs: build env: DOCKER_CLI_EXPERIMENTAL: "enabled" - WORKDIR: pkg/query-service + WORKDIR: cmd/community steps: - name: checkout uses: actions/checkout@v4 diff --git a/.github/workflows/gor-signoz.yaml b/.github/workflows/gor-signoz.yaml index 4f8f923fe834..dc70413aff01 100644 --- a/.github/workflows/gor-signoz.yaml +++ b/.github/workflows/gor-signoz.yaml @@ -50,7 +50,7 @@ jobs: - ubuntu-latest - macos-latest env: - CONFIG_PATH: ee/query-service/.goreleaser.yaml + CONFIG_PATH: cmd/enterprise/.goreleaser.yaml runs-on: ${{ matrix.os }} steps: - name: checkout diff --git a/.github/workflows/integrationci.yaml b/.github/workflows/integrationci.yaml index f756f3865585..d1d39e2eb08e 100644 --- a/.github/workflows/integrationci.yaml +++ b/.github/workflows/integrationci.yaml @@ -20,9 +20,9 @@ jobs: - sqlite clickhouse-version: - 24.1.2-alpine - - 24.12-alpine + - 25.5.6 schema-migrator-version: - - v0.128.0 + - v0.128.1 postgres-version: - 15 if: | diff --git a/LICENSE b/LICENSE index 2fef891b370a..7e1ae4f6bad7 100644 --- a/LICENSE +++ b/LICENSE @@ -2,7 +2,7 @@ Copyright (c) 2020-present SigNoz Inc. Portions of this software are licensed as follows: -* All content that resides under the "ee/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE". +* All content that resides under the "ee/" and the "cmd/enterprise/" directory of this repository, if that directory exists, is licensed under the license defined in "ee/LICENSE". * All third party components incorporated into the SigNoz Software are licensed under the original license provided by the owner of the applicable component. * Content outside of the above mentioned directories or restrictions above is available under the "MIT Expat" license as defined below. diff --git a/Makefile b/Makefile index 31b1764b206a..061bffc64ee4 100644 --- a/Makefile +++ b/Makefile @@ -20,18 +20,18 @@ GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO = -X github.com/SigNoz/signoz/ee/zeus.depreca GO_BUILD_VERSION_LDFLAGS = -X github.com/SigNoz/signoz/pkg/version.version=$(VERSION) -X github.com/SigNoz/signoz/pkg/version.hash=$(COMMIT_SHORT_SHA) -X github.com/SigNoz/signoz/pkg/version.time=$(TIMESTAMP) -X github.com/SigNoz/signoz/pkg/version.branch=$(BRANCH_NAME) GO_BUILD_ARCHS_COMMUNITY = $(addprefix go-build-community-,$(ARCHS)) -GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/pkg/query-service +GO_BUILD_CONTEXT_COMMUNITY = $(SRC)/cmd/community GO_BUILD_LDFLAGS_COMMUNITY = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=community GO_BUILD_ARCHS_ENTERPRISE = $(addprefix go-build-enterprise-,$(ARCHS)) GO_BUILD_ARCHS_ENTERPRISE_RACE = $(addprefix go-build-enterprise-race-,$(ARCHS)) -GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/ee/query-service +GO_BUILD_CONTEXT_ENTERPRISE = $(SRC)/cmd/enterprise GO_BUILD_LDFLAGS_ENTERPRISE = $(GO_BUILD_VERSION_LDFLAGS) -X github.com/SigNoz/signoz/pkg/version.variant=enterprise $(GO_BUILD_LDFLAG_ZEUS_URL) $(GO_BUILD_LDFLAG_LICENSE_SIGNOZ_IO) DOCKER_BUILD_ARCHS_COMMUNITY = $(addprefix docker-build-community-,$(ARCHS)) -DOCKERFILE_COMMUNITY = $(SRC)/pkg/query-service/Dockerfile +DOCKERFILE_COMMUNITY = $(SRC)/cmd/community/Dockerfile DOCKER_REGISTRY_COMMUNITY ?= docker.io/signoz/signoz-community DOCKER_BUILD_ARCHS_ENTERPRISE = $(addprefix docker-build-enterprise-,$(ARCHS)) -DOCKERFILE_ENTERPRISE = $(SRC)/ee/query-service/Dockerfile +DOCKERFILE_ENTERPRISE = $(SRC)/cmd/enterprise/Dockerfile DOCKER_REGISTRY_ENTERPRISE ?= docker.io/signoz/signoz JS_BUILD_CONTEXT = $(SRC)/frontend @@ -74,7 +74,7 @@ go-run-enterprise: ## Runs the enterprise go backend server SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \ SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \ go run -race \ - $(GO_BUILD_CONTEXT_ENTERPRISE)/main.go \ + $(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \ --config ./conf/prometheus.yml \ --cluster cluster @@ -92,7 +92,7 @@ go-run-community: ## Runs the community go backend server SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \ SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \ go run -race \ - $(GO_BUILD_CONTEXT_COMMUNITY)/main.go \ + $(GO_BUILD_CONTEXT_COMMUNITY)/*.go \ --config ./conf/prometheus.yml \ --cluster cluster diff --git a/README.md b/README.md index cc0925878a77..b3c4c83d8f07 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,6 @@

All your logs, metrics, and traces in one place. Monitor your application, spot issues before they occur and troubleshoot downtime quickly with rich context. SigNoz is a cost-effective open-source alternative to Datadog and New Relic. Visit signoz.io for the full documentation, tutorials, and guide.

- Downloads GitHub issues tweet @@ -231,6 +230,8 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu - [Shaheer Kochai](https://github.com/ahmadshaheer) - [Amlan Kumar Nandy](https://github.com/amlannandy) - [Sahil Khan](https://github.com/sawhil) +- [Aditya Singh](https://github.com/aks07) +- [Abhi Kumar](https://github.com/ahrefabhi) #### DevOps diff --git a/pkg/query-service/.goreleaser.yaml b/cmd/community/.goreleaser.yaml similarity index 97% rename from pkg/query-service/.goreleaser.yaml rename to cmd/community/.goreleaser.yaml index 3ffd74d07f76..16a7fe7d95e4 100644 --- a/pkg/query-service/.goreleaser.yaml +++ b/cmd/community/.goreleaser.yaml @@ -11,7 +11,7 @@ before: builds: - id: signoz binary: bin/signoz - main: pkg/query-service/main.go + main: cmd/community env: - CGO_ENABLED=1 - >- diff --git a/pkg/query-service/Dockerfile b/cmd/community/Dockerfile similarity index 91% rename from pkg/query-service/Dockerfile rename to cmd/community/Dockerfile index 38609cc182fa..22433506f6a2 100644 --- a/pkg/query-service/Dockerfile +++ b/cmd/community/Dockerfile @@ -16,4 +16,4 @@ COPY frontend/build/ /etc/signoz/web/ RUN chmod 755 /root /root/signoz -ENTRYPOINT ["./signoz"] +ENTRYPOINT ["./signoz", "server"] \ No newline at end of file diff --git a/pkg/query-service/Dockerfile.multi-arch b/cmd/community/Dockerfile.multi-arch similarity index 90% rename from pkg/query-service/Dockerfile.multi-arch rename to cmd/community/Dockerfile.multi-arch index 229b50d84349..3a6c479a604e 100644 --- a/pkg/query-service/Dockerfile.multi-arch +++ b/cmd/community/Dockerfile.multi-arch @@ -17,4 +17,4 @@ COPY frontend/build/ /etc/signoz/web/ RUN chmod 755 /root /root/signoz-community -ENTRYPOINT ["./signoz-community"] +ENTRYPOINT ["./signoz-community", "server"] diff --git a/cmd/community/main.go b/cmd/community/main.go new file mode 100644 index 000000000000..e188635734d3 --- /dev/null +++ b/cmd/community/main.go @@ -0,0 +1,18 @@ +package main + +import ( + "log/slog" + + "github.com/SigNoz/signoz/cmd" + "github.com/SigNoz/signoz/pkg/instrumentation" +) + +func main() { + // initialize logger for logging in the cmd/ package. This logger is different from the logger used in the application. + logger := instrumentation.NewLogger(instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}}) + + // register a list of commands to the root command + registerServer(cmd.RootCmd, logger) + + cmd.Execute(logger) +} diff --git a/cmd/community/server.go b/cmd/community/server.go new file mode 100644 index 000000000000..9def8a147ace --- /dev/null +++ b/cmd/community/server.go @@ -0,0 +1,116 @@ +package main + +import ( + "context" + "log/slog" + "time" + + "github.com/SigNoz/signoz/cmd" + "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" + "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/licensing/nooplicensing" + "github.com/SigNoz/signoz/pkg/modules/organization" + "github.com/SigNoz/signoz/pkg/query-service/app" + "github.com/SigNoz/signoz/pkg/signoz" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/version" + "github.com/SigNoz/signoz/pkg/zeus" + "github.com/SigNoz/signoz/pkg/zeus/noopzeus" + "github.com/spf13/cobra" +) + +func registerServer(parentCmd *cobra.Command, logger *slog.Logger) { + var flags signoz.DeprecatedFlags + + serverCmd := &cobra.Command{ + Use: "server", + Short: "Run the SigNoz server", + FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true}, + RunE: func(currCmd *cobra.Command, args []string) error { + config, err := cmd.NewSigNozConfig(currCmd.Context(), flags) + if err != nil { + return err + } + + return runServer(currCmd.Context(), config, logger) + }, + } + + flags.RegisterFlags(serverCmd) + parentCmd.AddCommand(serverCmd) +} + +func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) error { + // print the version + version.Info.PrettyPrint(config.Version) + + // add enterprise sqlstore factories to the community sqlstore factories + sqlstoreFactories := signoz.NewSQLStoreProviderFactories() + if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil { + logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err) + return err + } + + jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour) + + signoz, err := signoz.New( + ctx, + config, + jwt, + zeus.Config{}, + noopzeus.NewProviderFactory(), + licensing.Config{}, + func(_ sqlstore.SQLStore, _ zeus.Zeus, _ organization.Getter, _ analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] { + return nooplicensing.NewFactory() + }, + signoz.NewEmailingProviderFactories(), + signoz.NewCacheProviderFactories(), + signoz.NewWebProviderFactories(), + func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { + return signoz.NewSQLSchemaProviderFactories(sqlstore) + }, + signoz.NewSQLStoreProviderFactories(), + signoz.NewTelemetryStoreProviderFactories(), + ) + if err != nil { + logger.ErrorContext(ctx, "failed to create signoz", "error", err) + return err + } + + server, err := app.NewServer(config, signoz, jwt) + if err != nil { + logger.ErrorContext(ctx, "failed to create server", "error", err) + return err + } + + if err := server.Start(ctx); err != nil { + logger.ErrorContext(ctx, "failed to start server", "error", err) + return err + } + + signoz.Start(ctx) + + if err := signoz.Wait(ctx); err != nil { + logger.ErrorContext(ctx, "failed to start signoz", "error", err) + return err + } + + err = server.Stop(ctx) + if err != nil { + logger.ErrorContext(ctx, "failed to stop server", "error", err) + return err + } + + err = signoz.Stop(ctx) + if err != nil { + logger.ErrorContext(ctx, "failed to stop signoz", "error", err) + return err + } + + return nil +} diff --git a/cmd/config.go b/cmd/config.go new file mode 100644 index 000000000000..4e627c4f91d1 --- /dev/null +++ b/cmd/config.go @@ -0,0 +1,45 @@ +package cmd + +import ( + "context" + "fmt" + "log/slog" + "os" + + "github.com/SigNoz/signoz/pkg/config" + "github.com/SigNoz/signoz/pkg/config/envprovider" + "github.com/SigNoz/signoz/pkg/config/fileprovider" + "github.com/SigNoz/signoz/pkg/signoz" +) + +func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) { + config, err := signoz.NewConfig( + ctx, + config.ResolverConfig{ + Uris: []string{"env:"}, + ProviderFactories: []config.ProviderFactory{ + envprovider.NewFactory(), + fileprovider.NewFactory(), + }, + }, + flags, + ) + if err != nil { + return signoz.Config{}, err + } + + return config, nil +} + +func NewJWTSecret(_ context.Context, _ *slog.Logger) string { + jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") + if len(jwtSecret) == 0 { + fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!") + fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.") + fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.") + fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.") + fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.") + } + + return jwtSecret +} diff --git a/ee/query-service/.goreleaser.yaml b/cmd/enterprise/.goreleaser.yaml similarity index 98% rename from ee/query-service/.goreleaser.yaml rename to cmd/enterprise/.goreleaser.yaml index e3982597d11c..43d3e9d34025 100644 --- a/ee/query-service/.goreleaser.yaml +++ b/cmd/enterprise/.goreleaser.yaml @@ -11,7 +11,7 @@ before: builds: - id: signoz binary: bin/signoz - main: ee/query-service/main.go + main: cmd/enterprise env: - CGO_ENABLED=1 - >- diff --git a/ee/query-service/Dockerfile b/cmd/enterprise/Dockerfile similarity index 90% rename from ee/query-service/Dockerfile rename to cmd/enterprise/Dockerfile index 2c8a7cd809fe..798055afb1fa 100644 --- a/ee/query-service/Dockerfile +++ b/cmd/enterprise/Dockerfile @@ -16,4 +16,4 @@ COPY frontend/build/ /etc/signoz/web/ RUN chmod 755 /root /root/signoz -ENTRYPOINT ["./signoz"] \ No newline at end of file +ENTRYPOINT ["./signoz", "server"] diff --git a/ee/query-service/Dockerfile.integration b/cmd/enterprise/Dockerfile.integration similarity index 92% rename from ee/query-service/Dockerfile.integration rename to cmd/enterprise/Dockerfile.integration index 40a76a5bbca6..fe3eb583f7c3 100644 --- a/ee/query-service/Dockerfile.integration +++ b/cmd/enterprise/Dockerfile.integration @@ -23,6 +23,7 @@ COPY go.mod go.sum ./ RUN go mod download +COPY ./cmd/ ./cmd/ COPY ./ee/ ./ee/ COPY ./pkg/ ./pkg/ COPY ./templates/email /root/templates @@ -33,4 +34,4 @@ RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz RUN chmod 755 /root /root/signoz -ENTRYPOINT ["/root/signoz"] +ENTRYPOINT ["/root/signoz", "server"] diff --git a/ee/query-service/Dockerfile.multi-arch b/cmd/enterprise/Dockerfile.multi-arch similarity index 92% rename from ee/query-service/Dockerfile.multi-arch rename to cmd/enterprise/Dockerfile.multi-arch index 1bf8b8b43d08..776548fcb9da 100644 --- a/ee/query-service/Dockerfile.multi-arch +++ b/cmd/enterprise/Dockerfile.multi-arch @@ -17,4 +17,4 @@ COPY frontend/build/ /etc/signoz/web/ RUN chmod 755 /root /root/signoz -ENTRYPOINT ["./signoz"] +ENTRYPOINT ["./signoz", "server"] diff --git a/cmd/enterprise/main.go b/cmd/enterprise/main.go new file mode 100644 index 000000000000..e188635734d3 --- /dev/null +++ b/cmd/enterprise/main.go @@ -0,0 +1,18 @@ +package main + +import ( + "log/slog" + + "github.com/SigNoz/signoz/cmd" + "github.com/SigNoz/signoz/pkg/instrumentation" +) + +func main() { + // initialize logger for logging in the cmd/ package. This logger is different from the logger used in the application. + logger := instrumentation.NewLogger(instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}}) + + // register a list of commands to the root command + registerServer(cmd.RootCmd, logger) + + cmd.Execute(logger) +} diff --git a/cmd/enterprise/server.go b/cmd/enterprise/server.go new file mode 100644 index 000000000000..56344ea8b2eb --- /dev/null +++ b/cmd/enterprise/server.go @@ -0,0 +1,124 @@ +package main + +import ( + "context" + "log/slog" + "time" + + "github.com/SigNoz/signoz/cmd" + enterpriselicensing "github.com/SigNoz/signoz/ee/licensing" + "github.com/SigNoz/signoz/ee/licensing/httplicensing" + enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app" + "github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema" + "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" + enterprisezeus "github.com/SigNoz/signoz/ee/zeus" + "github.com/SigNoz/signoz/ee/zeus/httpzeus" + "github.com/SigNoz/signoz/pkg/analytics" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/modules/organization" + "github.com/SigNoz/signoz/pkg/signoz" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/version" + "github.com/SigNoz/signoz/pkg/zeus" + "github.com/spf13/cobra" +) + +func registerServer(parentCmd *cobra.Command, logger *slog.Logger) { + var flags signoz.DeprecatedFlags + + serverCmd := &cobra.Command{ + Use: "server", + Short: "Run the SigNoz server", + FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true}, + RunE: func(currCmd *cobra.Command, args []string) error { + config, err := cmd.NewSigNozConfig(currCmd.Context(), flags) + if err != nil { + return err + } + + return runServer(currCmd.Context(), config, logger) + }, + } + + flags.RegisterFlags(serverCmd) + parentCmd.AddCommand(serverCmd) +} + +func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) error { + // print the version + version.Info.PrettyPrint(config.Version) + + // add enterprise sqlstore factories to the community sqlstore factories + sqlstoreFactories := signoz.NewSQLStoreProviderFactories() + if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil { + logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err) + return err + } + + jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour) + + signoz, err := signoz.New( + ctx, + config, + jwt, + enterprisezeus.Config(), + httpzeus.NewProviderFactory(), + enterpriselicensing.Config(24*time.Hour, 3), + func(sqlstore sqlstore.SQLStore, zeus zeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] { + return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics) + }, + signoz.NewEmailingProviderFactories(), + signoz.NewCacheProviderFactories(), + signoz.NewWebProviderFactories(), + func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { + existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore) + if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil { + panic(err) + } + + return existingFactories + }, + sqlstoreFactories, + signoz.NewTelemetryStoreProviderFactories(), + ) + if err != nil { + logger.ErrorContext(ctx, "failed to create signoz", "error", err) + return err + } + + server, err := enterpriseapp.NewServer(config, signoz, jwt) + if err != nil { + logger.ErrorContext(ctx, "failed to create server", "error", err) + return err + } + + if err := server.Start(ctx); err != nil { + logger.ErrorContext(ctx, "failed to start server", "error", err) + return err + } + + signoz.Start(ctx) + + if err := signoz.Wait(ctx); err != nil { + logger.ErrorContext(ctx, "failed to start signoz", "error", err) + return err + } + + err = server.Stop(ctx) + if err != nil { + logger.ErrorContext(ctx, "failed to stop server", "error", err) + return err + } + + err = signoz.Stop(ctx) + if err != nil { + logger.ErrorContext(ctx, "failed to stop signoz", "error", err) + return err + } + + return nil +} diff --git a/cmd/root.go b/cmd/root.go new file mode 100644 index 000000000000..a080b3764505 --- /dev/null +++ b/cmd/root.go @@ -0,0 +1,33 @@ +package cmd + +import ( + "log/slog" + "os" + + "github.com/SigNoz/signoz/pkg/version" + "github.com/spf13/cobra" + "go.uber.org/zap" //nolint:depguard +) + +var RootCmd = &cobra.Command{ + Use: "signoz", + Short: "OpenTelemetry-Native Logs, Metrics and Traces in a single pane", + Version: version.Info.Version(), + SilenceUsage: true, + SilenceErrors: true, + CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true}, +} + +func Execute(logger *slog.Logger) { + zapLogger := newZapLogger() + zap.ReplaceGlobals(zapLogger) + defer func() { + _ = zapLogger.Sync() + }() + + err := RootCmd.Execute() + if err != nil { + logger.ErrorContext(RootCmd.Context(), "error running command", "error", err) + os.Exit(1) + } +} diff --git a/cmd/zap.go b/cmd/zap.go new file mode 100644 index 000000000000..4f043eaf6bcf --- /dev/null +++ b/cmd/zap.go @@ -0,0 +1,15 @@ +package cmd + +import ( + "go.uber.org/zap" //nolint:depguard + "go.uber.org/zap/zapcore" //nolint:depguard +) + +// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead. +func newZapLogger() *zap.Logger { + config := zap.NewProductionConfig() + config.EncoderConfig.TimeKey = "timestamp" + config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder + logger, _ := config.Build() + return logger +} diff --git a/deploy/docker-swarm/docker-compose.ha.yaml b/deploy/docker-swarm/docker-compose.ha.yaml index e9731b79b91f..9ad2cd558772 100644 --- a/deploy/docker-swarm/docker-compose.ha.yaml +++ b/deploy/docker-swarm/docker-compose.ha.yaml @@ -174,7 +174,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:v0.88.1 + image: signoz/signoz:v0.90.1 command: - --config=/root/config/prometheus.yml ports: @@ -194,6 +194,7 @@ services: - TELEMETRY_ENABLED=true - DEPLOYMENT_TYPE=docker-swarm - SIGNOZ_JWT_SECRET=secret + - DOT_METRICS_ENABLED=true healthcheck: test: - CMD @@ -206,7 +207,7 @@ services: retries: 3 otel-collector: !!merge <<: *db-depend - image: signoz/signoz-otel-collector:v0.128.0 + image: signoz/signoz-otel-collector:v0.128.2 command: - --config=/etc/otel-collector-config.yaml - --manager-config=/etc/manager-config.yaml @@ -230,7 +231,7 @@ services: - signoz schema-migrator: !!merge <<: *common - image: signoz/signoz-schema-migrator:v0.128.0 + image: signoz/signoz-schema-migrator:v0.128.2 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker-swarm/docker-compose.yaml b/deploy/docker-swarm/docker-compose.yaml index a3b7e678a2c3..1d9f518d8178 100644 --- a/deploy/docker-swarm/docker-compose.yaml +++ b/deploy/docker-swarm/docker-compose.yaml @@ -115,7 +115,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:v0.88.1 + image: signoz/signoz:v0.90.1 command: - --config=/root/config/prometheus.yml ports: @@ -135,6 +135,7 @@ services: - GODEBUG=netdns=go - TELEMETRY_ENABLED=true - DEPLOYMENT_TYPE=docker-swarm + - DOT_METRICS_ENABLED=true healthcheck: test: - CMD @@ -147,7 +148,7 @@ services: retries: 3 otel-collector: !!merge <<: *db-depend - image: signoz/signoz-otel-collector:v0.128.0 + image: signoz/signoz-otel-collector:v0.128.2 command: - --config=/etc/otel-collector-config.yaml - --manager-config=/etc/manager-config.yaml @@ -173,7 +174,7 @@ services: - signoz schema-migrator: !!merge <<: *common - image: signoz/signoz-schema-migrator:v0.128.0 + image: signoz/signoz-schema-migrator:v0.128.2 deploy: restart_policy: condition: on-failure diff --git a/deploy/docker-swarm/otel-collector-config.yaml b/deploy/docker-swarm/otel-collector-config.yaml index 93e07f9bd10d..48baa16b34c1 100644 --- a/deploy/docker-swarm/otel-collector-config.yaml +++ b/deploy/docker-swarm/otel-collector-config.yaml @@ -26,7 +26,7 @@ processors: detectors: [env, system] timeout: 2s signozspanmetrics/delta: - metrics_exporter: clickhousemetricswrite, signozclickhousemetrics + metrics_exporter: signozclickhousemetrics metrics_flush_interval: 60s latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] dimensions_cache_size: 100000 @@ -60,14 +60,6 @@ exporters: datasource: tcp://clickhouse:9000/signoz_traces low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING} use_new_schema: true - clickhousemetricswrite: - endpoint: tcp://clickhouse:9000/signoz_metrics - resource_to_telemetry_conversion: - enabled: true - disable_v2: true - clickhousemetricswrite/prometheus: - endpoint: tcp://clickhouse:9000/signoz_metrics - disable_v2: true signozclickhousemetrics: dsn: tcp://clickhouse:9000/signoz_metrics clickhouselogsexporter: @@ -89,11 +81,11 @@ service: metrics: receivers: [otlp] processors: [batch] - exporters: [clickhousemetricswrite, signozclickhousemetrics] + exporters: [signozclickhousemetrics] metrics/prometheus: receivers: [prometheus] processors: [batch] - exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics] + exporters: [signozclickhousemetrics] logs: receivers: [otlp] processors: [batch] diff --git a/deploy/docker/docker-compose.ha.yaml b/deploy/docker/docker-compose.ha.yaml index 4e660b7aa1c8..215b0eed073d 100644 --- a/deploy/docker/docker-compose.ha.yaml +++ b/deploy/docker/docker-compose.ha.yaml @@ -177,7 +177,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:${VERSION:-v0.88.1} + image: signoz/signoz:${VERSION:-v0.90.1} container_name: signoz command: - --config=/root/config/prometheus.yml @@ -197,6 +197,7 @@ services: - GODEBUG=netdns=go - TELEMETRY_ENABLED=true - DEPLOYMENT_TYPE=docker-standalone-amd + - DOT_METRICS_ENABLED=true healthcheck: test: - CMD @@ -210,7 +211,7 @@ services: # TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing? otel-collector: !!merge <<: *db-depend - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2} container_name: signoz-otel-collector command: - --config=/etc/otel-collector-config.yaml @@ -236,7 +237,7 @@ services: condition: service_healthy schema-migrator-sync: !!merge <<: *common - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2} container_name: schema-migrator-sync command: - sync @@ -247,7 +248,7 @@ services: condition: service_healthy schema-migrator-async: !!merge <<: *db-depend - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2} container_name: schema-migrator-async command: - async diff --git a/deploy/docker/docker-compose.yaml b/deploy/docker/docker-compose.yaml index e4a8abb29d52..7bc052176f5c 100644 --- a/deploy/docker/docker-compose.yaml +++ b/deploy/docker/docker-compose.yaml @@ -110,7 +110,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:${VERSION:-v0.88.1} + image: signoz/signoz:${VERSION:-v0.90.1} container_name: signoz command: - --config=/root/config/prometheus.yml @@ -130,6 +130,7 @@ services: - GODEBUG=netdns=go - TELEMETRY_ENABLED=true - DEPLOYMENT_TYPE=docker-standalone-amd + - DOT_METRICS_ENABLED=true healthcheck: test: - CMD @@ -142,7 +143,7 @@ services: retries: 3 otel-collector: !!merge <<: *db-depend - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.128.2} container_name: signoz-otel-collector command: - --config=/etc/otel-collector-config.yaml @@ -164,7 +165,7 @@ services: condition: service_healthy schema-migrator-sync: !!merge <<: *common - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2} container_name: schema-migrator-sync command: - sync @@ -176,7 +177,7 @@ services: restart: on-failure schema-migrator-async: !!merge <<: *db-depend - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.0} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.128.2} container_name: schema-migrator-async command: - async diff --git a/deploy/docker/otel-collector-config.yaml b/deploy/docker/otel-collector-config.yaml index a9e6db66044f..48baa16b34c1 100644 --- a/deploy/docker/otel-collector-config.yaml +++ b/deploy/docker/otel-collector-config.yaml @@ -26,7 +26,7 @@ processors: detectors: [env, system] timeout: 2s signozspanmetrics/delta: - metrics_exporter: clickhousemetricswrite, signozclickhousemetrics + metrics_exporter: signozclickhousemetrics metrics_flush_interval: 60s latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ] dimensions_cache_size: 100000 @@ -60,14 +60,6 @@ exporters: datasource: tcp://clickhouse:9000/signoz_traces low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING} use_new_schema: true - clickhousemetricswrite: - endpoint: tcp://clickhouse:9000/signoz_metrics - disable_v2: true - resource_to_telemetry_conversion: - enabled: true - clickhousemetricswrite/prometheus: - endpoint: tcp://clickhouse:9000/signoz_metrics - disable_v2: true signozclickhousemetrics: dsn: tcp://clickhouse:9000/signoz_metrics clickhouselogsexporter: @@ -89,11 +81,11 @@ service: metrics: receivers: [otlp] processors: [batch] - exporters: [clickhousemetricswrite, signozclickhousemetrics] + exporters: [signozclickhousemetrics] metrics/prometheus: receivers: [prometheus] processors: [batch] - exporters: [clickhousemetricswrite/prometheus, signozclickhousemetrics] + exporters: [signozclickhousemetrics] logs: receivers: [otlp] processors: [batch] diff --git a/docs/otel-demo-docs.md b/docs/otel-demo-docs.md index af989efbfdc5..2c0fc92ec522 100644 --- a/docs/otel-demo-docs.md +++ b/docs/otel-demo-docs.md @@ -16,7 +16,7 @@ __Table of Contents__ - [Prerequisites](#prerequisites-1) - [Install Helm Repo and Charts](#install-helm-repo-and-charts) - [Start the OpenTelemetry Demo App](#start-the-opentelemetry-demo-app-1) - - [Moniitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes) + - [Monitor with SigNoz (Kubernetes)](#monitor-with-signoz-kubernetes) - [What's next](#whats-next) diff --git a/ee/query-service/.dockerignore b/ee/query-service/.dockerignore deleted file mode 100644 index 9521c5060b39..000000000000 --- a/ee/query-service/.dockerignore +++ /dev/null @@ -1,4 +0,0 @@ -.vscode -README.md -signoz.db -bin \ No newline at end of file diff --git a/ee/query-service/main.go b/ee/query-service/main.go deleted file mode 100644 index c7b6b2d23e6d..000000000000 --- a/ee/query-service/main.go +++ /dev/null @@ -1,189 +0,0 @@ -package main - -import ( - "context" - "flag" - "os" - "time" - - "github.com/SigNoz/signoz/ee/licensing" - "github.com/SigNoz/signoz/ee/licensing/httplicensing" - "github.com/SigNoz/signoz/ee/query-service/app" - "github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema" - "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" - "github.com/SigNoz/signoz/ee/zeus" - "github.com/SigNoz/signoz/ee/zeus/httpzeus" - "github.com/SigNoz/signoz/pkg/analytics" - "github.com/SigNoz/signoz/pkg/config" - "github.com/SigNoz/signoz/pkg/config/envprovider" - "github.com/SigNoz/signoz/pkg/config/fileprovider" - "github.com/SigNoz/signoz/pkg/factory" - pkglicensing "github.com/SigNoz/signoz/pkg/licensing" - "github.com/SigNoz/signoz/pkg/modules/organization" - baseconst "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/sqlschema" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/version" - pkgzeus "github.com/SigNoz/signoz/pkg/zeus" - - "go.uber.org/zap" - "go.uber.org/zap/zapcore" -) - -// Deprecated: Please use the logger from pkg/instrumentation. -func initZapLog() *zap.Logger { - config := zap.NewProductionConfig() - config.EncoderConfig.TimeKey = "timestamp" - config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder - logger, _ := config.Build() - return logger -} - -func main() { - var promConfigPath, skipTopLvlOpsPath string - - // disables rule execution but allows change to the rule definition - var disableRules bool - - // the url used to build link in the alert messages in slack and other systems - var ruleRepoURL string - var cluster string - - var useLogsNewSchema bool - var useTraceNewSchema bool - var cacheConfigPath, fluxInterval, fluxIntervalForTraceDetail string - var preferSpanMetrics bool - - var maxIdleConns int - var maxOpenConns int - var dialTimeout time.Duration - var gatewayUrl string - var useLicensesV3 bool - - // Deprecated - flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs") - // Deprecated - flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces") - // Deprecated - flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") - // Deprecated - flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") - // Deprecated - flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") - flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") - // Deprecated - flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool.)") - // Deprecated - flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time.)") - // Deprecated - flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection.)") - // Deprecated - flag.StringVar(&ruleRepoURL, "rules.repo-url", baseconst.AlertHelpPage, "(host address used to build rule link in alert messages)") - // Deprecated - flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") - flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)") - flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)") - flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") - flag.StringVar(&gatewayUrl, "gateway-url", "", "(url to the gateway)") - // Deprecated - flag.BoolVar(&useLicensesV3, "use-licenses-v3", false, "use licenses_v3 schema for licenses") - flag.Parse() - - loggerMgr := initZapLog() - zap.ReplaceGlobals(loggerMgr) - defer loggerMgr.Sync() // flushes buffer, if any - ctx := context.Background() - - config, err := signoz.NewConfig(ctx, config.ResolverConfig{ - Uris: []string{"env:"}, - ProviderFactories: []config.ProviderFactory{ - envprovider.NewFactory(), - fileprovider.NewFactory(), - }, - }, signoz.DeprecatedFlags{ - MaxIdleConns: maxIdleConns, - MaxOpenConns: maxOpenConns, - DialTimeout: dialTimeout, - Config: promConfigPath, - FluxInterval: fluxInterval, - FluxIntervalForTraceDetail: fluxIntervalForTraceDetail, - Cluster: cluster, - GatewayUrl: gatewayUrl, - }) - if err != nil { - zap.L().Fatal("Failed to create config", zap.Error(err)) - } - - version.Info.PrettyPrint(config.Version) - - sqlStoreFactories := signoz.NewSQLStoreProviderFactories() - if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil { - zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err)) - } - - jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") - - if len(jwtSecret) == 0 { - zap.L().Warn("No JWT secret key is specified.") - } else { - zap.L().Info("JWT secret key set successfully.") - } - - jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour) - - signoz, err := signoz.New( - context.Background(), - config, - jwt, - zeus.Config(), - httpzeus.NewProviderFactory(), - licensing.Config(24*time.Hour, 3), - func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus, orgGetter organization.Getter, analytics analytics.Analytics) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] { - return httplicensing.NewProviderFactory(sqlstore, zeus, orgGetter, analytics) - }, - signoz.NewEmailingProviderFactories(), - signoz.NewCacheProviderFactories(), - signoz.NewWebProviderFactories(), - func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { - existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore) - if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil { - zap.L().Fatal("Failed to add postgressqlschema factory", zap.Error(err)) - } - - return existingFactories - }, - sqlStoreFactories, - signoz.NewTelemetryStoreProviderFactories(), - ) - if err != nil { - zap.L().Fatal("Failed to create signoz", zap.Error(err)) - } - - server, err := app.NewServer(config, signoz, jwt) - if err != nil { - zap.L().Fatal("Failed to create server", zap.Error(err)) - } - - if err := server.Start(ctx); err != nil { - zap.L().Fatal("Could not start server", zap.Error(err)) - } - - signoz.Start(ctx) - - if err := signoz.Wait(ctx); err != nil { - zap.L().Fatal("Failed to start signoz", zap.Error(err)) - } - - err = server.Stop(ctx) - if err != nil { - zap.L().Fatal("Failed to stop server", zap.Error(err)) - } - - err = signoz.Stop(ctx) - if err != nil { - zap.L().Fatal("Failed to stop signoz", zap.Error(err)) - } -} diff --git a/frontend/package.json b/frontend/package.json index f4707155990d..1dd5ea7c94f0 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -213,7 +213,9 @@ "eslint-plugin-simple-import-sort": "^7.0.0", "eslint-plugin-sonarjs": "^0.12.0", "husky": "^7.0.4", - "image-webpack-loader": "8.1.0", + "image-minimizer-webpack-plugin": "^4.0.0", + "imagemin": "^8.0.1", + "imagemin-svgo": "^10.0.1", "is-ci": "^3.0.1", "jest-styled-components": "^7.0.8", "lint-staged": "^12.5.0", @@ -230,6 +232,7 @@ "redux-mock-store": "1.5.4", "sass": "1.66.1", "sass-loader": "13.3.2", + "sharp": "^0.33.4", "ts-jest": "^27.1.5", "ts-node": "^10.2.1", "typescript-plugin-css-modules": "5.0.1", @@ -254,6 +257,7 @@ "cross-spawn": "7.0.5", "cookie": "^0.7.1", "serialize-javascript": "6.0.2", - "prismjs": "1.30.0" + "prismjs": "1.30.0", + "got": "11.8.5" } } diff --git a/frontend/public/Logos/argocd.svg b/frontend/public/Logos/argocd.svg new file mode 100644 index 000000000000..ef6eff54507a --- /dev/null +++ b/frontend/public/Logos/argocd.svg @@ -0,0 +1 @@ + diff --git a/frontend/public/Logos/azure-mysql.svg b/frontend/public/Logos/azure-mysql.svg new file mode 100644 index 000000000000..d1504cc763fa --- /dev/null +++ b/frontend/public/Logos/azure-mysql.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/cloudflare.svg b/frontend/public/Logos/cloudflare.svg new file mode 100644 index 000000000000..84ce0dc01650 --- /dev/null +++ b/frontend/public/Logos/cloudflare.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/dynamodb.svg b/frontend/public/Logos/dynamodb.svg new file mode 100644 index 000000000000..bd4f2c30f503 --- /dev/null +++ b/frontend/public/Logos/dynamodb.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_Amazon-DynamoDB_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/public/Logos/elk.svg b/frontend/public/Logos/elk.svg new file mode 100644 index 000000000000..d240ad568c88 --- /dev/null +++ b/frontend/public/Logos/elk.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/frontend/public/Logos/external-api-monitoring.svg b/frontend/public/Logos/external-api-monitoring.svg new file mode 100644 index 000000000000..327eaa950649 --- /dev/null +++ b/frontend/public/Logos/external-api-monitoring.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/public/Logos/github-actions.svg b/frontend/public/Logos/github-actions.svg new file mode 100644 index 000000000000..2929e024f398 --- /dev/null +++ b/frontend/public/Logos/github-actions.svg @@ -0,0 +1 @@ + diff --git a/frontend/public/Logos/github.svg b/frontend/public/Logos/github.svg new file mode 100644 index 000000000000..3ff1cee7c9b8 --- /dev/null +++ b/frontend/public/Logos/github.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/frontend/public/Logos/http-monitoring.svg b/frontend/public/Logos/http-monitoring.svg new file mode 100644 index 000000000000..b495ddf19894 --- /dev/null +++ b/frontend/public/Logos/http-monitoring.svg @@ -0,0 +1 @@ +IETF-Badge-HTTP \ No newline at end of file diff --git a/frontend/public/Logos/jenkins.svg b/frontend/public/Logos/jenkins.svg new file mode 100644 index 000000000000..c4e7b880d36b --- /dev/null +++ b/frontend/public/Logos/jenkins.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/newrelic.svg b/frontend/public/Logos/newrelic.svg new file mode 100644 index 000000000000..e2d586bf7184 --- /dev/null +++ b/frontend/public/Logos/newrelic.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/openai.svg b/frontend/public/Logos/openai.svg new file mode 100644 index 000000000000..94eb50385dc9 --- /dev/null +++ b/frontend/public/Logos/openai.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/s3.svg b/frontend/public/Logos/s3.svg new file mode 100644 index 000000000000..cd203eaad6e8 --- /dev/null +++ b/frontend/public/Logos/s3.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/snowflake.svg b/frontend/public/Logos/snowflake.svg new file mode 100644 index 000000000000..f491c273133e --- /dev/null +++ b/frontend/public/Logos/snowflake.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/Logos/sns.svg b/frontend/public/Logos/sns.svg new file mode 100644 index 000000000000..6cb54adab20c --- /dev/null +++ b/frontend/public/Logos/sns.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_AWS-Simple-Notification-Service_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/public/Logos/sqs.svg b/frontend/public/Logos/sqs.svg new file mode 100644 index 000000000000..b19102943cd7 --- /dev/null +++ b/frontend/public/Logos/sqs.svg @@ -0,0 +1 @@ +AWS Simple Queue Service (SQS) \ No newline at end of file diff --git a/frontend/public/Logos/systemd.svg b/frontend/public/Logos/systemd.svg new file mode 100644 index 000000000000..4a9d9492ff72 --- /dev/null +++ b/frontend/public/Logos/systemd.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/frontend/public/Logos/wordpress.svg b/frontend/public/Logos/wordpress.svg new file mode 100644 index 000000000000..916903f23b17 --- /dev/null +++ b/frontend/public/Logos/wordpress.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/public/locales/en-GB/failedPayment.json b/frontend/public/locales/en-GB/failedPayment.json index a624e47c7db0..05ec0cc68d47 100644 --- a/frontend/public/locales/en-GB/failedPayment.json +++ b/frontend/public/locales/en-GB/failedPayment.json @@ -8,5 +8,6 @@ "actNow": "Act now to avoid any disruptions and continue where you left off.", "contactAdmin": "Contact your admin to proceed with the upgrade.", "continueMyJourney": "Settle your bill to continue", - "somethingWentWrong": "Something went wrong" + "somethingWentWrong": "Something went wrong", + "refreshPaymentStatus": "Refresh Status" } diff --git a/frontend/public/locales/en/failedPayment.json b/frontend/public/locales/en/failedPayment.json index a624e47c7db0..05ec0cc68d47 100644 --- a/frontend/public/locales/en/failedPayment.json +++ b/frontend/public/locales/en/failedPayment.json @@ -8,5 +8,6 @@ "actNow": "Act now to avoid any disruptions and continue where you left off.", "contactAdmin": "Contact your admin to proceed with the upgrade.", "continueMyJourney": "Settle your bill to continue", - "somethingWentWrong": "Something went wrong" + "somethingWentWrong": "Something went wrong", + "refreshPaymentStatus": "Refresh Status" } diff --git a/frontend/src/api/changelog/getChangelogByVersion.ts b/frontend/src/api/changelog/getChangelogByVersion.ts index 849c3a9f15ed..b9b3f1f9c935 100644 --- a/frontend/src/api/changelog/getChangelogByVersion.ts +++ b/frontend/src/api/changelog/getChangelogByVersion.ts @@ -1,14 +1,32 @@ import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import axios, { AxiosError } from 'axios'; import { ErrorResponse, SuccessResponse } from 'types/api'; -import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion'; +import { + ChangelogSchema, + DeploymentType, +} from 'types/api/changelog/getChangelogByVersion'; const getChangelogByVersion = async ( versionId: string, + deployment_type?: DeploymentType, ): Promise | ErrorResponse> => { try { + let queryParams = `filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText`; + + if ( + deployment_type && + Object.values(DeploymentType).includes(deployment_type) + ) { + const excludedDeploymentType = + deployment_type === DeploymentType.CLOUD_ONLY + ? DeploymentType.OSS_ONLY + : DeploymentType.CLOUD_ONLY; + + queryParams = `${queryParams}&populate[features][filters][deployment_type][$notIn]=${excludedDeploymentType}`; + } + const response = await axios.get(` - https://cms.signoz.cloud/api/release-changelogs?filters[version][$eq]=${versionId}&populate[features][sort]=sort_order:asc&populate[features][populate][media][fields]=id,ext,url,mime,alternativeText + https://cms.signoz.cloud/api/release-changelogs?${queryParams} `); if (!Array.isArray(response.data.data) || response.data.data.length === 0) { diff --git a/frontend/src/api/pipeline/post.ts b/frontend/src/api/pipeline/post.ts index c2e7ca275706..9c774481f6d1 100644 --- a/frontend/src/api/pipeline/post.ts +++ b/frontend/src/api/pipeline/post.ts @@ -1,24 +1,20 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; import { Pipeline } from 'types/api/pipeline/def'; import { Props } from 'types/api/pipeline/post'; -const post = async ( - props: Props, -): Promise | ErrorResponse> => { +const post = async (props: Props): Promise> => { try { const response = await axios.post('/logs/pipelines', props.data); return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, + httpStatusCode: response.status, + data: response.data.data, }; } catch (error) { - return ErrorResponseHandler(error as AxiosError); + ErrorResponseHandlerV2(error as AxiosError); } }; diff --git a/frontend/src/api/v3/licenses/post.ts b/frontend/src/api/v3/licenses/post.ts new file mode 100644 index 000000000000..4cd971acc0e8 --- /dev/null +++ b/frontend/src/api/v3/licenses/post.ts @@ -0,0 +1,24 @@ +import { ApiV3Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { PayloadProps, Props } from 'types/api/licenses/apply'; + +const apply = async ( + props: Props, +): Promise> => { + try { + const response = await axios.post('/licenses', { + key: props.key, + }); + + return { + httpStatusCode: response.status, + data: response.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default apply; diff --git a/frontend/src/api/v3/licenses/put.ts b/frontend/src/api/v3/licenses/put.ts index 4cd971acc0e8..d07ad428de78 100644 --- a/frontend/src/api/v3/licenses/put.ts +++ b/frontend/src/api/v3/licenses/put.ts @@ -2,15 +2,11 @@ import { ApiV3Instance as axios } from 'api'; import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; -import { PayloadProps, Props } from 'types/api/licenses/apply'; +import { PayloadProps } from 'types/api/licenses/apply'; -const apply = async ( - props: Props, -): Promise> => { +const apply = async (): Promise> => { try { - const response = await axios.post('/licenses', { - key: props.key, - }); + const response = await axios.put('/licenses'); return { httpStatusCode: response.status, diff --git a/frontend/src/components/ChangelogModal/ChangelogModal.styles.scss b/frontend/src/components/ChangelogModal/ChangelogModal.styles.scss index 8f6e99208048..3772a8717cac 100644 --- a/frontend/src/components/ChangelogModal/ChangelogModal.styles.scss +++ b/frontend/src/components/ChangelogModal/ChangelogModal.styles.scss @@ -60,6 +60,7 @@ &-ctas { display: flex; + margin-left: auto; & svg { font-size: 14px; @@ -110,7 +111,7 @@ &-content { max-height: calc(100vh - 300px); overflow-y: auto; - padding: 16px; + padding: 16px 16px 18px 16px; border: 1px solid var(--bg-slate-500, #161922); border-top-width: 0; border-bottom-width: 0; diff --git a/frontend/src/components/ChangelogModal/ChangelogModal.tsx b/frontend/src/components/ChangelogModal/ChangelogModal.tsx index 90757cd2c7b8..db755eb2598b 100644 --- a/frontend/src/components/ChangelogModal/ChangelogModal.tsx +++ b/frontend/src/components/ChangelogModal/ChangelogModal.tsx @@ -2,27 +2,62 @@ import './ChangelogModal.styles.scss'; import { CheckOutlined, CloseOutlined } from '@ant-design/icons'; import { Button, Modal } from 'antd'; +import updateUserPreference from 'api/v1/user/preferences/name/update'; import cx from 'classnames'; +import { USER_PREFERENCES } from 'constants/userPreferences'; import dayjs from 'dayjs'; +import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; import { ChevronsDown, ScrollText } from 'lucide-react'; import { useAppContext } from 'providers/App/App'; import { useCallback, useEffect, useRef, useState } from 'react'; +import { useMutation } from 'react-query'; +import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion'; +import { UserPreference } from 'types/api/preferences/preference'; import ChangelogRenderer from './components/ChangelogRenderer'; interface Props { + changelog: ChangelogSchema; onClose: () => void; } -function ChangelogModal({ onClose }: Props): JSX.Element { +function ChangelogModal({ changelog, onClose }: Props): JSX.Element { const [hasScroll, setHasScroll] = useState(false); const changelogContentSectionRef = useRef(null); - const { changelog } = useAppContext(); + const { userPreferences, updateUserPreferenceInContext } = useAppContext(); const formattedReleaseDate = dayjs(changelog?.release_date).format( 'MMMM D, YYYY', ); + const { isCloudUser } = useGetTenantLicense(); + + const seenChangelogVersion = userPreferences?.find( + (preference) => + preference.name === USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION, + )?.value as string; + + const { mutate: updateUserPreferenceMutation } = useMutation( + updateUserPreference, + ); + + useEffect(() => { + // Update the seen version + if (seenChangelogVersion !== changelog.version) { + const version = { + name: USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION, + value: changelog.version, + }; + updateUserPreferenceInContext(version as UserPreference); + updateUserPreferenceMutation(version); + } + }, [ + seenChangelogVersion, + changelog.version, + updateUserPreferenceMutation, + updateUserPreferenceInContext, + ]); + const checkScroll = useCallback((): void => { if (changelogContentSectionRef.current) { const { @@ -89,18 +124,20 @@ function ChangelogModal({ onClose }: Props): JSX.Element { {changelog.features.length > 1 ? 'features' : 'feature'} )} -

- - -
+ {!isCloudUser && ( +
+ + +
+ )} {changelog && (
; }, ); - // mock useAppContext jest.mock('providers/App/App', () => ({ - useAppContext: jest.fn(() => ({ changelog: mockChangelog })), + useAppContext: jest.fn(() => ({ + updateUserPreferenceInContext: jest.fn(), + userPreferences: [ + { + name: USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION, + value: 'v1.0.0', + }, + ], + })), })); +function renderChangelog(onClose: () => void = jest.fn()): void { + render( + + + , + ); +} + describe('ChangelogModal', () => { it('renders modal with changelog data', () => { - render(); + renderChangelog(); expect( screen.getByText('What’s New ⎯ Changelog : June 10, 2025'), ).toBeInTheDocument(); @@ -48,14 +81,14 @@ describe('ChangelogModal', () => { it('calls onClose when Skip for now is clicked', () => { const onClose = jest.fn(); - render(); + renderChangelog(onClose); fireEvent.click(screen.getByText('Skip for now')); expect(onClose).toHaveBeenCalled(); }); it('opens migration docs when Update my workspace is clicked', () => { window.open = jest.fn(); - render(); + renderChangelog(); fireEvent.click(screen.getByText('Update my workspace')); expect(window.open).toHaveBeenCalledWith( 'https://github.com/SigNoz/signoz/releases', @@ -65,7 +98,7 @@ describe('ChangelogModal', () => { }); it('scrolls for more when Scroll for more is clicked', () => { - render(); + renderChangelog(); const scrollBtn = screen.getByTestId('scroll-more-btn'); const contentDiv = screen.getByTestId('changelog-content'); if (contentDiv) { diff --git a/frontend/src/components/ChangelogModal/__test__/ChangelogRenderer.test.tsx b/frontend/src/components/ChangelogModal/__test__/ChangelogRenderer.test.tsx index 4667c4116260..ed844f89b4b0 100644 --- a/frontend/src/components/ChangelogModal/__test__/ChangelogRenderer.test.tsx +++ b/frontend/src/components/ChangelogModal/__test__/ChangelogRenderer.test.tsx @@ -3,6 +3,10 @@ /* eslint-disable @typescript-eslint/explicit-function-return-type */ import { render, screen } from '@testing-library/react'; +import { + ChangelogSchema, + DeploymentType, +} from 'types/api/changelog/getChangelogByVersion'; import ChangelogRenderer from '../components/ChangelogRenderer'; @@ -15,23 +19,19 @@ jest.mock( }, ); -const mockChangelog = { +const mockChangelog: ChangelogSchema = { id: 1, - documentId: 'changelog-doc-1', - version: '1.0.0', + documentId: 'doc-1', + version: 'v1.0.0', createdAt: '2025-06-09T12:00:00Z', + updatedAt: '2025-06-09T13:00:00Z', + publishedAt: '2025-06-09T14:00:00Z', release_date: '2025-06-10', features: [ { id: 1, - documentId: '1', title: 'Feature 1', description: 'Description for feature 1', - sort_order: 1, - createdAt: '', - updatedAt: '', - publishedAt: '', - deployment_type: 'All', media: { id: 1, documentId: 'doc1', @@ -40,11 +40,15 @@ const mockChangelog = { mime: 'image/webp', alternativeText: null, }, + documentId: 'feature-1', + sort_order: 1, + createdAt: '2025-06-09T12:00:00Z', + updatedAt: '2025-06-09T13:00:00Z', + publishedAt: '2025-06-09T14:00:00Z', + deployment_type: DeploymentType.ALL, }, ], bug_fixes: 'Bug fix details', - updatedAt: '2025-06-09T12:00:00Z', - publishedAt: '2025-06-09T12:00:00Z', maintenance: 'Maintenance details', }; diff --git a/frontend/src/components/Logs/TableView/config.ts b/frontend/src/components/Logs/TableView/config.ts index e5571828e1d1..6723a8c95919 100644 --- a/frontend/src/components/Logs/TableView/config.ts +++ b/frontend/src/components/Logs/TableView/config.ts @@ -15,6 +15,7 @@ export function getDefaultCellStyle(isDarkMode?: boolean): CSSProperties { letterSpacing: '-0.07px', marginBottom: '0px', minWidth: '10rem', + width: '10rem', }; } diff --git a/frontend/src/components/Logs/TableView/useTableView.tsx b/frontend/src/components/Logs/TableView/useTableView.tsx index 9971f6d775ae..8b97f6cfd248 100644 --- a/frontend/src/components/Logs/TableView/useTableView.tsx +++ b/frontend/src/components/Logs/TableView/useTableView.tsx @@ -47,6 +47,14 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => { const { formatTimezoneAdjustedTimestamp } = useTimezone(); + const bodyColumnStyle = useMemo( + () => ({ + ...defaultTableStyle, + ...(fields.length > 2 ? { width: '50rem' } : {}), + }), + [fields.length], + ); + const columns: ColumnsType> = useMemo(() => { const fieldColumns: ColumnsType> = fields .filter((e) => !['id', 'body', 'timestamp'].includes(e.name)) @@ -136,7 +144,7 @@ export const useTableView = (props: UseTableViewProps): UseTableViewResult => { field: string | number, ): ColumnTypeRender> => ({ props: { - style: defaultTableStyle, + style: bodyColumnStyle, }, children: ( { linesPerRow, fontSize, formatTimezoneAdjustedTimestamp, + bodyColumnStyle, ]); return { columns, dataSource: flattenLogData }; diff --git a/frontend/src/components/RefreshPaymentStatus/RefreshPaymentStatus.tsx b/frontend/src/components/RefreshPaymentStatus/RefreshPaymentStatus.tsx new file mode 100644 index 000000000000..96073e2fdfab --- /dev/null +++ b/frontend/src/components/RefreshPaymentStatus/RefreshPaymentStatus.tsx @@ -0,0 +1,56 @@ +import { Button, Tooltip } from 'antd'; +import refreshPaymentStatus from 'api/v3/licenses/put'; +import cx from 'classnames'; +import { RefreshCcw } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; +import { useState } from 'react'; +import { useTranslation } from 'react-i18next'; + +function RefreshPaymentStatus({ + btnShape, + type, +}: { + btnShape?: 'default' | 'round' | 'circle'; + type?: 'button' | 'text' | 'tooltip'; +}): JSX.Element { + const { t } = useTranslation(['failedPayment']); + const { activeLicenseRefetch } = useAppContext(); + + const [isLoading, setIsLoading] = useState(false); + + const handleRefreshPaymentStatus = async (): Promise => { + setIsLoading(true); + + try { + await refreshPaymentStatus(); + + await Promise.all([activeLicenseRefetch()]); + } catch (e) { + console.error(e); + } + setIsLoading(false); + }; + + return ( + + + + + + ); +} +RefreshPaymentStatus.defaultProps = { + btnShape: 'default', + type: 'button', +}; + +export default RefreshPaymentStatus; diff --git a/frontend/src/components/SignozRadioGroup/SignozRadioGroup.styles.scss b/frontend/src/components/SignozRadioGroup/SignozRadioGroup.styles.scss index 583ebec2346a..5c2a7b56f93e 100644 --- a/frontend/src/components/SignozRadioGroup/SignozRadioGroup.styles.scss +++ b/frontend/src/components/SignozRadioGroup/SignozRadioGroup.styles.scss @@ -1,6 +1,12 @@ .signoz-radio-group.ant-radio-group { color: var(--text-vanilla-400); + &.ant-radio-group-disabled { + opacity: 0.5; + pointer-events: none; + cursor: not-allowed; + } + .view-title { display: flex; gap: var(--margin-2); @@ -37,6 +43,22 @@ // Light mode styles .lightMode { .signoz-radio-group { + &.ant-radio-group-disabled { + .tab, + .selected_view { + background: var(--bg-vanilla-200) !important; + border-color: var(--bg-vanilla-400) !important; + color: var(--text-ink-400) !important; + } + + .tab:hover, + .selected_view:hover { + background: var(--bg-vanilla-200) !important; + border-color: var(--bg-vanilla-400) !important; + color: var(--text-ink-400) !important; + } + } + .tab { background: var(--bg-vanilla-100); } diff --git a/frontend/src/components/SignozRadioGroup/SignozRadioGroup.tsx b/frontend/src/components/SignozRadioGroup/SignozRadioGroup.tsx index 71a1f255e283..3bb789e7749c 100644 --- a/frontend/src/components/SignozRadioGroup/SignozRadioGroup.tsx +++ b/frontend/src/components/SignozRadioGroup/SignozRadioGroup.tsx @@ -13,6 +13,7 @@ interface SignozRadioGroupProps { options: Option[]; onChange: (e: RadioChangeEvent) => void; className?: string; + disabled?: boolean; } function SignozRadioGroup({ @@ -20,6 +21,7 @@ function SignozRadioGroup({ options, onChange, className = '', + disabled = false, }: SignozRadioGroupProps): JSX.Element { return ( {options.map((option) => ( ({ updatedOrder: getOrder(params.get(urlKey.order)), @@ -89,6 +91,7 @@ function AllErrors(): JSX.Element { getUpdatedPageSize: getUpdatePageSize(params.get(urlKey.pageSize)), getUpdatedExceptionType: getFilterString(params.get(urlKey.exceptionType)), getUpdatedServiceName: getFilterString(params.get(urlKey.serviceName)), + getUpdatedCompositeQuery: getFilterString(params.get(urlKey.compositeQuery)), }), [params], ); @@ -203,6 +206,7 @@ function AllErrors(): JSX.Element { offset: getUpdatedOffset, orderParam: getUpdatedParams, pageSize: getUpdatedPageSize, + compositeQuery: getUpdatedCompositeQuery, }; if (exceptionFilterValue && exceptionFilterValue !== 'undefined') { @@ -222,6 +226,7 @@ function AllErrors(): JSX.Element { getUpdatedPageSize, getUpdatedParams, getUpdatedServiceName, + getUpdatedCompositeQuery, pathname, updatedOrder, ], @@ -430,6 +435,7 @@ function AllErrors(): JSX.Element { serviceName: getFilterString(params.get(urlKey.serviceName)), exceptionType: getFilterString(params.get(urlKey.exceptionType)), }); + const compositeQuery = params.get(urlKey.compositeQuery) || ''; history.replace( `${pathname}?${createQueryParams({ order: updatedOrder, @@ -438,6 +444,7 @@ function AllErrors(): JSX.Element { pageSize, exceptionType, serviceName, + compositeQuery, })}`, ); } diff --git a/frontend/src/container/AllError/utils.ts b/frontend/src/container/AllError/utils.ts index c13cd8fd52d6..c3266d0031d3 100644 --- a/frontend/src/container/AllError/utils.ts +++ b/frontend/src/container/AllError/utils.ts @@ -18,6 +18,7 @@ export const urlKey = { pageSize: 'pageSize', exceptionType: 'exceptionType', serviceName: 'serviceName', + compositeQuery: 'compositeQuery', }; export const isOrderParams = (orderBy: string | null): orderBy is OrderBy => diff --git a/frontend/src/container/AppLayout/AppLayout.styles.scss b/frontend/src/container/AppLayout/AppLayout.styles.scss index b314ab1a973a..a34929618bf1 100644 --- a/frontend/src/container/AppLayout/AppLayout.styles.scss +++ b/frontend/src/container/AppLayout/AppLayout.styles.scss @@ -4,6 +4,21 @@ .app-banner-wrapper { position: relative; width: 100%; + + .refresh-payment-status { + display: inline-flex; + align-items: center; + gap: 4px; + margin-left: 4px; + + .refresh-payment-status-btn-wrapper { + display: inline-block; + + &:hover { + text-decoration: underline; + } + } + } } .app-layout { @@ -12,24 +27,24 @@ width: 100%; &.isWorkspaceRestricted { - height: calc(100% - 32px); + height: calc(100% - 48px); // same styles as its either trial expired or payment failed &.isTrialExpired { - height: calc(100% - 64px); + height: calc(100% - 96px); } &.isPaymentFailed { - height: calc(100% - 64px); + height: calc(100% - 96px); } } &.isTrialExpired { - height: calc(100% - 32px); + height: calc(100% - 48px); } &.isPaymentFailed { - height: calc(100% - 32px); + height: calc(100% - 48px); } .app-content { @@ -196,5 +211,5 @@ .workspace-restricted-banner, .trial-expiry-banner, .payment-failed-banner { - height: 32px; + height: 48px; } diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx index 9938c4800004..b0022d032141 100644 --- a/frontend/src/container/AppLayout/index.tsx +++ b/frontend/src/container/AppLayout/index.tsx @@ -13,8 +13,10 @@ import manageCreditCardApi from 'api/v1/portal/create'; import getUserLatestVersion from 'api/v1/version/getLatestVersion'; import getUserVersion from 'api/v1/version/getVersion'; import cx from 'classnames'; +import ChangelogModal from 'components/ChangelogModal/ChangelogModal'; import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway'; import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; +import RefreshPaymentStatus from 'components/RefreshPaymentStatus/RefreshPaymentStatus'; import { Events } from 'constants/events'; import { FeatureKeys } from 'constants/features'; import { LOCALSTORAGE } from 'constants/localStorage'; @@ -26,6 +28,7 @@ import dayjs from 'dayjs'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; import { useNotifications } from 'hooks/useNotifications'; +import useTabVisibility from 'hooks/useTabFocus'; import history from 'lib/history'; import { isNull } from 'lodash-es'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; @@ -54,7 +57,10 @@ import { } from 'types/actions/app'; import { ErrorResponse, SuccessResponse, SuccessResponseV2 } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; -import { ChangelogSchema } from 'types/api/changelog/getChangelogByVersion'; +import { + ChangelogSchema, + DeploymentType, +} from 'types/api/changelog/getChangelogByVersion'; import APIError from 'types/api/error'; import { LicenseEvent, @@ -86,6 +92,9 @@ function AppLayout(props: AppLayoutProps): JSX.Element { featureFlagsFetchError, userPreferences, updateChangelog, + toggleChangelogModal, + showChangelogModal, + changelog, } = useAppContext(); const { notifications } = useNotifications(); @@ -138,6 +147,17 @@ function AppLayout(props: AppLayoutProps): JSX.Element { const { isCloudUser: isCloudUserVal } = useGetTenantLicense(); + const changelogForTenant = isCloudUserVal + ? DeploymentType.CLOUD_ONLY + : DeploymentType.OSS_ONLY; + + const seenChangelogVersion = userPreferences?.find( + (preference) => + preference.name === USER_PREFERENCES.LAST_SEEN_CHANGELOG_VERSION, + )?.value as string; + + const isVisible = useTabVisibility(); + const [ getUserVersionResponse, getUserLatestVersionResponse, @@ -155,12 +175,43 @@ function AppLayout(props: AppLayoutProps): JSX.Element { }, { queryFn: (): Promise | ErrorResponse> => - getChangelogByVersion(latestVersion), - queryKey: ['getChangelogByVersion', latestVersion], - enabled: isLoggedIn && !isCloudUserVal && Boolean(latestVersion), + getChangelogByVersion(latestVersion, changelogForTenant), + queryKey: ['getChangelogByVersion', latestVersion, changelogForTenant], + enabled: isLoggedIn && Boolean(latestVersion), }, ]); + useEffect(() => { + // refetch the changelog only when the current tab becomes active + there isn't an active request + if (!getChangelogByVersionResponse.isLoading && isVisible) { + getChangelogByVersionResponse.refetch(); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isVisible]); + + useEffect(() => { + let timer: ReturnType; + if ( + isCloudUserVal && + Boolean(latestVersion) && + latestVersion !== seenChangelogVersion + ) { + // Automatically open the changelog modal for cloud users after 1s, if they've not seen this version before. + timer = setTimeout(() => { + toggleChangelogModal(); + }, 1000); + } + + return (): void => { + clearInterval(timer); + }; + }, [ + isCloudUserVal, + latestVersion, + seenChangelogVersion, + toggleChangelogModal, + ]); + useEffect(() => { if (getUserLatestVersionResponse.status === 'idle' && isLoggedIn) { getUserLatestVersionResponse.refetch(); @@ -615,6 +666,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element { upgrade to continue using SigNoz features. + + {' '} + | Already upgraded? + ) : ( 'Please contact your administrator for upgrading to a paid plan.' @@ -641,6 +696,10 @@ function AppLayout(props: AppLayoutProps): JSX.Element { pay the bill to continue using SigNoz features. + + {' '} + | Already paid? + ) : ( ' Please contact your administrator to pay the bill.' @@ -683,6 +742,9 @@ function AppLayout(props: AppLayoutProps): JSX.Element { {showAddCreditCardModal && } + {showChangelogModal && changelog && ( + + )} ); } diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index e2f4bc847fc0..04118a9accdc 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -20,6 +20,7 @@ import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage'; import logEvent from 'api/common/logEvent'; import updateCreditCardApi from 'api/v1/checkout/create'; import manageCreditCardApi from 'api/v1/portal/create'; +import RefreshPaymentStatus from 'components/RefreshPaymentStatus/RefreshPaymentStatus'; import Spinner from 'components/Spinner'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; @@ -440,14 +441,15 @@ export default function BillingContainer(): JSX.Element { ) : null} - + @@ -463,6 +465,8 @@ export default function BillingContainer(): JSX.Element { ? t('manage_billing') : t('upgrade_plan')} + + diff --git a/frontend/src/container/CreateAlertChannels/CreateAlertChannels.styles.scss b/frontend/src/container/CreateAlertChannels/CreateAlertChannels.styles.scss index 07fab33150b0..669df6535faa 100644 --- a/frontend/src/container/CreateAlertChannels/CreateAlertChannels.styles.scss +++ b/frontend/src/container/CreateAlertChannels/CreateAlertChannels.styles.scss @@ -13,3 +13,14 @@ margin-bottom: 16px; } } + +.lightMode { + .create-alert-channels-container { + background: var(--bg-vanilla-100); + border-color: var(--bg-vanilla-300); + + .form-alert-channels-title { + color: var(--bg-ink-100); + } + } +} diff --git a/frontend/src/container/Home/Home.tsx b/frontend/src/container/Home/Home.tsx index cf133ec84cd3..0870766e3979 100644 --- a/frontend/src/container/Home/Home.tsx +++ b/frontend/src/container/Home/Home.tsx @@ -2,7 +2,7 @@ import './Home.styles.scss'; import { Color } from '@signozhq/design-tokens'; -import { Alert, Button, Popover } from 'antd'; +import { Button, Popover } from 'antd'; import logEvent from 'api/common/logEvent'; import { HostListPayload } from 'api/infraMonitoring/getHostLists'; import { K8sPodsListPayload } from 'api/infraMonitoring/getK8sPodsList'; @@ -320,8 +320,6 @@ export default function Home(): JSX.Element { } }, [hostData, k8sPodsData, handleUpdateChecklistDoneItem]); - const { isCloudUser, isEnterpriseSelfHostedUser } = useGetTenantLicense(); - useEffect(() => { logEvent('Homepage: Visited', {}); }, []); @@ -706,33 +704,6 @@ export default function Home(): JSX.Element { )}
- {(isCloudUser || isEnterpriseSelfHostedUser) && ( -
-
- - We're updating our metric ingestion processing pipeline. - Currently, metric names and labels are normalized to replace dots and - other special characters with underscores (_). This restriction will - soon be removed. Learn more{' '} - - here - - . - - } - type="warning" - showIcon - /> -
-
- )} - {!isWelcomeChecklistSkipped && !loadingUserPreferences && ( diff --git a/frontend/src/container/IngestionSettings/MultiIngestionSettings.tsx b/frontend/src/container/IngestionSettings/MultiIngestionSettings.tsx index ef95b76afbbc..f82a8e5a1d72 100644 --- a/frontend/src/container/IngestionSettings/MultiIngestionSettings.tsx +++ b/frontend/src/container/IngestionSettings/MultiIngestionSettings.tsx @@ -38,6 +38,7 @@ import dayjs from 'dayjs'; import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData'; import { useGetAllIngestionsKeys } from 'hooks/IngestionKeys/useGetAllIngestionKeys'; import useDebouncedFn from 'hooks/useDebouncedFunction'; +import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; import { useNotifications } from 'hooks/useNotifications'; import { isNil, isUndefined } from 'lodash-es'; import { @@ -167,6 +168,8 @@ function MultiIngestionSettings(): JSX.Element { const [totalIngestionKeys, setTotalIngestionKeys] = useState(0); + const { isEnterpriseSelfHostedUser } = useGetTenantLicense(); + const [ hasCreateLimitForIngestionKeyError, setHasCreateLimitForIngestionKeyError, @@ -293,7 +296,7 @@ function MultiIngestionSettings(): JSX.Element { isLoading: isLoadingDeploymentsData, isFetching: isFetchingDeploymentsData, isError: isErrorDeploymentsData, - } = useGetDeploymentsData(true); + } = useGetDeploymentsData(!isEnterpriseSelfHostedUser); const { mutate: createIngestionKey, @@ -1308,7 +1311,8 @@ function MultiIngestionSettings(): JSX.Element { {!isErrorDeploymentsData && !isLoadingDeploymentsData && - !isFetchingDeploymentsData && ( + !isFetchingDeploymentsData && + deploymentsData && (
Ingestion URL
diff --git a/frontend/src/container/Licenses/ApplyLicenseForm.tsx b/frontend/src/container/Licenses/ApplyLicenseForm.tsx index 38d774f549f6..48e2b454fb7e 100644 --- a/frontend/src/container/Licenses/ApplyLicenseForm.tsx +++ b/frontend/src/container/Licenses/ApplyLicenseForm.tsx @@ -1,5 +1,5 @@ import { Button, Form, Input } from 'antd'; -import apply from 'api/v3/licenses/put'; +import apply from 'api/v3/licenses/post'; import { useNotifications } from 'hooks/useNotifications'; import { useState } from 'react'; import { useTranslation } from 'react-i18next'; diff --git a/frontend/src/container/LogDetailedView/BodyTitleRenderer.styles.ts b/frontend/src/container/LogDetailedView/BodyTitleRenderer.styles.ts index f5d157ee2e54..1f7502b92555 100644 --- a/frontend/src/container/LogDetailedView/BodyTitleRenderer.styles.ts +++ b/frontend/src/container/LogDetailedView/BodyTitleRenderer.styles.ts @@ -1,6 +1,9 @@ import styled from 'styled-components'; export const TitleWrapper = styled.span` + user-select: text !important; + cursor: text; + .hover-reveal { visibility: hidden; } diff --git a/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx b/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx index 856c2b01dc28..8eb1b9aa8c61 100644 --- a/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx +++ b/frontend/src/container/LogDetailedView/BodyTitleRenderer.tsx @@ -71,8 +71,13 @@ function BodyTitleRenderer({ onClick: onClickHandler, }; + const handleTextSelection = (e: React.MouseEvent): void => { + // Prevent tree node click when user is trying to select text + e.stopPropagation(); + }; + return ( - + diff --git a/frontend/src/container/LogDetailedView/TableView/TableViewActions.styles.scss b/frontend/src/container/LogDetailedView/TableView/TableViewActions.styles.scss index f5a45ef41655..5f5e94c74bb3 100644 --- a/frontend/src/container/LogDetailedView/TableView/TableViewActions.styles.scss +++ b/frontend/src/container/LogDetailedView/TableView/TableViewActions.styles.scss @@ -11,6 +11,18 @@ } } +.selectable-tree { + .ant-tree-node-content-wrapper { + user-select: text !important; + cursor: text !important; + } + + .ant-tree-title { + user-select: text !important; + cursor: text !important; + } +} + .table-view-actions-content { .ant-popover-inner { border-radius: 4px; diff --git a/frontend/src/container/LogDetailedView/TableView/TableViewActions.tsx b/frontend/src/container/LogDetailedView/TableView/TableViewActions.tsx index f09e9a5c5771..af0a75e78517 100644 --- a/frontend/src/container/LogDetailedView/TableView/TableViewActions.tsx +++ b/frontend/src/container/LogDetailedView/TableView/TableViewActions.tsx @@ -53,7 +53,12 @@ const convert = new Convert(); // Memoized Tree Component const MemoizedTree = React.memo<{ treeData: any[] }>(({ treeData }) => ( - + )); MemoizedTree.displayName = 'MemoizedTree'; diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx index 6712c609c2ee..1c591c1ef06b 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/TableRow.tsx @@ -57,6 +57,10 @@ export default function TableRow({ [currentLog, handleSetActiveContextLog], ); + const hasSingleColumn = + tableColumns.filter((column) => column.key !== 'state-indicator').length === + 1; + return ( <> {tableColumns.map((column) => { @@ -80,9 +84,11 @@ export default function TableRow({ {cloneElement(children, props)} diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx index 9aa982abe700..339bdf135b12 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/index.tsx @@ -135,6 +135,7 @@ const InfinityTable = forwardRef( fontSize={tableViewProps?.fontSize} // eslint-disable-next-line react/jsx-props-no-spreading {...(isDragColumn && { className: 'dragHandler' })} + columnKey={column.key as string} > {(column.title as string).replace(/^\w/, (c) => c.toUpperCase())} diff --git a/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts b/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts index a22e7a4cc0c6..5d5d7477339d 100644 --- a/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts +++ b/frontend/src/container/LogsExplorerList/InfinityTableView/styles.ts @@ -8,13 +8,25 @@ interface TableHeaderCellStyledProps { $isDragColumn: boolean; $isDarkMode: boolean; $isLogIndicator?: boolean; + $hasSingleColumn?: boolean; fontSize?: FontSize; + columnKey?: string; } export const TableStyled = styled.table` width: 100%; `; +const getTimestampColumnWidth = ( + columnKey?: string, + $hasSingleColumn?: boolean, +): string => + columnKey === 'timestamp' + ? $hasSingleColumn + ? 'width: 100%;' + : 'width: 10%;' + : ''; + export const TableCellStyled = styled.td` padding: 0.5rem; ${({ fontSize }): string => @@ -29,9 +41,12 @@ export const TableCellStyled = styled.td` props.$isDarkMode ? 'inherit' : themeColors.whiteCream}; ${({ $isLogIndicator }): string => - $isLogIndicator ? 'padding: 0 0 0 8px;width: 15px;' : ''} + $isLogIndicator ? 'padding: 0 0 0 8px;width: 1%;' : ''} color: ${(props): string => props.$isDarkMode ? themeColors.white : themeColors.bckgGrey}; + + ${({ columnKey, $hasSingleColumn }): string => + getTimestampColumnWidth(columnKey, $hasSingleColumn)} `; export const TableRowStyled = styled.tr<{ @@ -86,7 +101,11 @@ export const TableHeaderCellStyled = styled.th` : fontSize === FontSize.LARGE ? `font-size:14px; line-height:24px; padding: 0.5rem;` : ``}; - ${({ $isLogIndicator }): string => ($isLogIndicator ? 'padding: 0px; ' : '')} + ${({ $isLogIndicator }): string => + $isLogIndicator ? 'padding: 0px; width: 1%;' : ''} color: ${(props): string => props.$isDarkMode ? 'var(--bg-vanilla-100, #fff)' : themeColors.bckgGrey}; + + ${({ columnKey, $hasSingleColumn }): string => + getTimestampColumnWidth(columnKey, $hasSingleColumn)} `; diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx index 9241f8293829..548b59848bbf 100644 --- a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx +++ b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/ApDexMetricsApplication.tsx @@ -3,9 +3,11 @@ import { useGetMetricMeta } from 'hooks/apDex/useGetMetricMeta'; import useErrorNotification from 'hooks/useErrorNotification'; import { useParams } from 'react-router-dom'; +import { FeatureKeys } from '../../../../../constants/features'; +import { useAppContext } from '../../../../../providers/App/App'; +import { WidgetKeys } from '../../../constant'; import { IServiceName } from '../../types'; import ApDexMetrics from './ApDexMetrics'; -import { metricMeta } from './constants'; import { ApDexDataSwitcherProps } from './types'; function ApDexMetricsApplication({ @@ -18,7 +20,19 @@ function ApDexMetricsApplication({ const { servicename: encodedServiceName } = useParams(); const servicename = decodeURIComponent(encodedServiceName); - const { data, isLoading, error } = useGetMetricMeta(metricMeta, servicename); + const { featureFlags } = useAppContext(); + const dotMetricsEnabled = + featureFlags?.find((flag) => flag.name === FeatureKeys.DOT_METRICS_ENABLED) + ?.active || false; + + const signozLatencyBucketMetrics = dotMetricsEnabled + ? WidgetKeys.Signoz_latency_bucket + : WidgetKeys.Signoz_latency_bucket_norm; + + const { data, isLoading, error } = useGetMetricMeta( + signozLatencyBucketMetrics, + servicename, + ); useErrorNotification(error); if (isLoading) { diff --git a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/constants.ts b/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/constants.ts deleted file mode 100644 index 91467b372fcb..000000000000 --- a/frontend/src/container/MetricsApplication/Tabs/Overview/ApDex/constants.ts +++ /dev/null @@ -1 +0,0 @@ -export const metricMeta = 'signoz_latency_bucket'; diff --git a/frontend/src/container/OnboardingQuestionaire/index.tsx b/frontend/src/container/OnboardingQuestionaire/index.tsx index 905132b4bfdb..57832e28c2c5 100644 --- a/frontend/src/container/OnboardingQuestionaire/index.tsx +++ b/frontend/src/container/OnboardingQuestionaire/index.tsx @@ -254,7 +254,7 @@ function OnboardingQuestionaire(): JSX.Element { setCurrentStep(2); }} onNext={handleUpdateProfile} - onWillDoLater={(): void => setCurrentStep(4)} + onWillDoLater={handleUpdateProfile} /> )} diff --git a/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx b/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx index 55f368f977ed..f7ed864621c3 100644 --- a/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx +++ b/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @@ -434,6 +434,9 @@ function OnboardingAddDataSource(): JSX.Element { history.push(ROUTES.LOGS); break; case 'metrics': + history.push(ROUTES.METRICS_EXPLORER); + break; + case 'dashboards': history.push(ROUTES.ALL_DASHBOARD); break; case 'infra-monitoring-hosts': @@ -454,6 +457,9 @@ function OnboardingAddDataSource(): JSX.Element { case 'home': history.push(ROUTES.HOME); break; + case 'api-monitoring': + history.push(ROUTES.API_MONITORING); + break; default: history.push(ROUTES.APPLICATION); } diff --git a/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json b/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json index af4c43c0d8a7..59facfca98a7 100644 --- a/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json +++ b/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @@ -33,6 +33,60 @@ "imgUrl": "/Logos/grafana.svg", "link": "https://signoz.io/docs/migration/migrate-from-grafana/" }, + { + "dataSource": "migrate-from-elk", + "label": "From ELK", + "tags": ["migrate to SigNoz"], + "module": "home", + "relatedSearchKeywords": [ + "elk", + "elasticsearch", + "logstash", + "kibana", + "elastic stack", + "migration", + "elastic", + "opentelemetry" + ], + "imgUrl": "/Logos/elk.svg", + "link": "https://signoz.io/docs/migration/migrate-from-elk-to-signoz/" + }, + { + "dataSource": "migrate-from-newrelic", + "label": "From New Relic", + "tags": ["migrate to SigNoz"], + "module": "home", + "relatedSearchKeywords": [ + "new relic", + "newrelic", + "apm migration", + "opentelemetry", + "migration guide", + "migrate", + "migration" + ], + "imgUrl": "/Logos/newrelic.svg", + "link": "https://signoz.io/docs/migration/migrate-from-newrelic-to-signoz/" + }, + { + "dataSource": "migrate-signoz-self-host-to-cloud", + "label": "From SigNoz Self-Host", + "tags": ["migrate to SigNoz"], + "module": "home", + "relatedSearchKeywords": [ + "signoz self-hosted", + "signoz cloud", + "migration", + "self-host to cloud", + "data migration", + "migrate", + "migration", + "selfhosted signoz", + "self-host" + ], + "imgUrl": "/Logos/signoz-brand-logo.svg", + "link": "https://signoz.io/docs/migration/migrate-from-signoz-self-host-to-signoz-cloud/" + }, { "dataSource": "java", "entityID": "dataSource", @@ -1139,6 +1193,57 @@ "relatedSearchKeywords": ["tracing", "nginx server", "nginx proxy", "nginx"], "id": "nginx-tracing", "link": "https://signoz.io/docs/instrumentation/opentelemetry-nginx/" + }, + { + "dataSource": "opentelemetry-wordpress", + "label": "WordPress", + "imgUrl": "/Logos/wordpress.svg", + "tags": ["apm"], + "module": "apm", + "relatedSearchKeywords": [ + "apm", + "wordpress", + "wordpress monitoring", + "wordpress tracing", + "wordpress performance", + "wordpress observability", + "opentelemetry wordpress", + "otel wordpress", + "wordpress instrumentation", + "monitor wordpress site", + "wordpress apm", + "wordpress metrics", + "wordpress php monitoring", + "wordpress plugin monitoring", + "wordpress to signoz" + ], + "id": "opentelemetry-wordpress", + "link": "https://signoz.io/docs/instrumentation/opentelemetry-wordpress/" + }, + { + "dataSource": "opentelemetry-cloudflare", + "label": "Cloudflare", + "imgUrl": "/Logos/cloudflare.svg", + "tags": ["apm"], + "module": "apm", + "relatedSearchKeywords": [ + "apm", + "cloudflare", + "cloudflare workers", + "cloudflare monitoring", + "cloudflare tracing", + "cloudflare observability", + "opentelemetry cloudflare", + "otel cloudflare", + "cloudflare instrumentation", + "monitor cloudflare workers", + "cloudflare apm", + "cloudflare metrics", + "edge computing monitoring", + "cloudflare to signoz" + ], + "id": "opentelemetry-cloudflare", + "link": "https://signoz.io/docs/instrumentation/opentelemetry-cloudflare/" }, { "dataSource": "kubernetes-pod-logs", @@ -1266,6 +1371,29 @@ "id": "syslogs", "link": "https://signoz.io/docs/userguide/collecting_syslogs/" }, + { + "dataSource": "systemd-logs", + "label": "Systemd Logs", + "imgUrl": "/Logos/systemd.svg", + "tags": ["logs"], + "module": "logs", + "relatedSearchKeywords": [ + "systemd logs", + "journalctl logs", + "collect systemd logs", + "systemd log monitoring", + "systemd log collection", + "systemd opentelemetry", + "systemd to otel", + "linux systemd monitoring", + "journald logs", + "systemd logs to signoz", + "systemctl", + "journald" + ], + "id": "systemd-logs", + "link": "https://signoz.io/docs/logs-management/send-logs/collect-systemd-logs/" + }, { "dataSource": "fluentd", "label": "FluentD", @@ -1617,7 +1745,7 @@ "dataSource": "docker-container-metrics", "label": "Docker Container Metrics", "tags": ["metrics"], - "module": "metrics", + "module": "dashboards", "relatedSearchKeywords": [ "docker container metrics", "monitor docker containers", @@ -1657,7 +1785,7 @@ "dataSource": "ec2-infrastructure-metrics", "label": "EC2 Infra Metrics", "tags": ["AWS"], - "module": "metrics", + "module": "infra-monitoring-hosts", "relatedSearchKeywords": [ "ec2 infrastructure metrics", "monitor aws ec2", @@ -1677,7 +1805,7 @@ "dataSource": "ecs-ec2", "label": "ECS EC2", "tags": ["AWS"], - "module": "metrics", + "module": "dashboards", "relatedSearchKeywords": [ "ecs ec2 monitoring", "ecs ec2 logs and metrics", @@ -1697,7 +1825,7 @@ "dataSource": "ecs-external", "label": "ECS External", "tags": ["AWS"], - "module": "metrics", + "module": "dashboards", "relatedSearchKeywords": [ "ecs external monitoring", "external ecs observability", @@ -1717,7 +1845,7 @@ "dataSource": "ecs-fargate", "label": "ECS Fargate", "tags": ["AWS"], - "module": "metrics", + "module": "dashboards", "relatedSearchKeywords": [ "ecs fargate monitoring", "fargate logs and metrics", @@ -2065,6 +2193,26 @@ ] } }, + { + "dataSource": "azure-mysql-flexible-server", + "label": "Azure MySQL Flexible Server", + "tags": ["Azure"], + "module": "metrics", + "relatedSearchKeywords": [ + "azure mysql flexible server", + "mysql flexible server monitoring", + "azure mysql metrics", + "mysql database monitoring azure", + "opentelemetry mysql azure", + "azure mysql observability", + "mysql flexible server logs", + "azure mysql telemetry", + "mysql azure performance monitoring", + "azure mysql to signoz" + ], + "imgUrl": "/Logos/azure-mysql.svg", + "link": "https://signoz.io/docs/azure-monitoring/mysql-flexible-server/" + }, { "dataSource": "cloud-functions", "label": "Cloud functions", @@ -2465,6 +2613,29 @@ ] } }, + { + "dataSource": "openai-monitoring", + "label": "OpenAI Monitoring", + "imgUrl": "/Logos/openai.svg", + "tags": ["LLM Monitoring"], + "module": "apm", + "relatedSearchKeywords": [ + "openai monitoring", + "openai tracing", + "openai observability", + "monitor openai api", + "openai performance monitoring", + "openai instrumentation", + "opentelemetry openai", + "otel openai", + "openai metrics", + "openai to signoz", + "openai logs", + "open ai", + "llm" + ], + "link": "https://signoz.io/docs/llm/opentelemetry-openai-monitoring/" + }, { "dataSource": "llm-monitoring", "label": "LLM Monitoring", @@ -2485,6 +2656,226 @@ ], "link": "https://signoz.io/docs/community/llm-monitoring/" }, + { + "dataSource": "http-endpoints-monitoring", + "label": "HTTP Endpoints Monitoring", + "imgUrl": "/Logos/http-monitoring.svg", + "tags": ["Synthetic Monitoring"], + "module": "metrics", + "relatedSearchKeywords": [ + "http endpoints monitoring", + "synthetic monitoring", + "uptime monitoring", + "endpoint health checks", + "api monitoring", + "website monitoring", + "http response monitoring", + "endpoint performance monitoring", + "synthetic tests", + "monitor http endpoints" + ], + "link": "https://signoz.io/docs/monitor-http-endpoints/" + }, + { + "dataSource": "external-api-monitoring-setup", + "label": "External API Monitoring Setup", + "imgUrl": "/Logos/external-api-monitoring.svg", + "tags": ["api-monitoring"], + "module": "api-monitoring", + "relatedSearchKeywords": [ + "external api monitoring", + "api monitoring setup", + "monitor external apis", + "api observability", + "api performance monitoring", + "api health monitoring", + "third party api monitoring", + "api endpoint monitoring", + "api latency monitoring", + "api uptime monitoring", + "rest api monitoring", + "api metrics", + "api telemetry", + "monitor api calls", + "api monitoring configuration" + ], + "link": "https://signoz.io/docs/external-api-monitoring/setup/" + }, + { + "dataSource": "github-metrics", + "label": "GitHub Metrics", + "imgUrl": "/Logos/github.svg", + "tags": ["CICD"], + "module": "metrics", + "relatedSearchKeywords": [ + "github metrics", + "github monitoring", + "github observability", + "monitor github repos", + "github telemetry", + "github api metrics", + "github repository monitoring", + "github to signoz", + "github cicd monitoring", + "github actions metrics" + ], + "link": "https://signoz.io/docs/cicd/github/github-metrics/" + }, + { + "dataSource": "github-actions-traces", + "label": "GitHub Actions Traces", + "imgUrl": "/Logos/github.svg", + "tags": ["CICD"], + "module": "apm", + "relatedSearchKeywords": [ + "github actions traces", + "github actions monitoring", + "github actions observability", + "github actions tracing", + "monitor github actions", + "github workflow monitoring", + "github cicd tracing", + "github actions to signoz", + "github actions telemetry", + "github workflow observability" + ], + "link": "https://signoz.io/docs/cicd/github/github-actions-traces/" + }, + { + "dataSource": "jenkins-agent-node-monitoring", + "label": "Jenkins Agent Node Monitoring", + "imgUrl": "/Logos/jenkins.svg", + "tags": ["CICD"], + "module": "metrics", + "relatedSearchKeywords": [ + "jenkins agent monitoring", + "jenkins node monitoring", + "jenkins observability", + "monitor jenkins agents", + "jenkins telemetry", + "jenkins infrastructure monitoring", + "jenkins agent metrics", + "jenkins to signoz", + "jenkins cicd monitoring", + "jenkins build agents" + ], + "link": "https://signoz.io/docs/cicd/jenkins/agent-node-monitoring/" + }, + { + "dataSource": "jenkins-tracing", + "label": "Jenkins Tracing", + "imgUrl": "/Logos/jenkins.svg", + "tags": ["CICD"], + "module": "apm", + "relatedSearchKeywords": [ + "jenkins tracing", + "jenkins monitoring", + "jenkins observability", + "jenkins pipeline tracing", + "monitor jenkins builds", + "jenkins workflow monitoring", + "jenkins cicd tracing", + "jenkins to signoz", + "jenkins telemetry", + "jenkins pipeline observability" + ], + "link": "https://signoz.io/docs/cicd/jenkins/jenkins-tracing/" + }, + { + "dataSource": "argocd-metrics", + "label": "ArgoCD Metrics", + "imgUrl": "/Logos/argocd.svg", + "tags": ["CICD"], + "module": "dashboards", + "relatedSearchKeywords": [ + "argocd metrics", + "argocd monitoring", + "argocd observability", + "monitor argocd", + "argocd telemetry", + "argocd gitops monitoring", + "argocd deployment monitoring", + "argocd to signoz", + "argocd cicd monitoring", + "gitops monitoring" + ], + "link": "https://signoz.io/docs/cicd/argocd/argocd-metrics/" + }, + { + "dataSource": "self-hosted-kafka", + "label": "Self-Hosted Kafka", + "imgUrl": "/Logos/kafka.svg", + "tags": ["Messaging Queues"], + "module": "messaging-queues-kafka", + "relatedSearchKeywords": [ + "self hosted kafka", + "kafka setup", + "kafka open source", + "kafka observability", + "kafka integration" + ], + "link": "https://signoz.io/docs/messaging-queues/kafka/" + }, + { + "dataSource": "amazon-msk", + "label": "Amazon MSK", + "imgUrl": "/Logos/amazon-msk.svg", + "tags": ["Messaging Queues"], + "module": "messaging-queues-kafka", + "relatedSearchKeywords": [ + "amazon msk", + "msk kafka", + "aws kafka", + "msk tracing", + "msk monitoring" + ], + "link": "https://signoz.io/docs/messaging-queues/msk/" + }, + { + "dataSource": "confluent-kafka", + "label": "Confluent Kafka", + "imgUrl": "/Logos/confluent-kafka.svg", + "tags": ["Messaging Queues"], + "module": "messaging-queues-kafka", + "relatedSearchKeywords": [ + "confluent kafka", + "confluent cloud", + "kafka tracing", + "kafka cloud", + "kafka monitoring" + ], + "link": "https://signoz.io/docs/messaging-queues/confluent-kafka/" + }, + { + "dataSource": "strimzi-kafka", + "label": "Strimzi Kafka", + "imgUrl": "/Logos/strimzi.svg", + "tags": ["Messaging Queues"], + "module": "messaging-queues-kafka", + "relatedSearchKeywords": [ + "strimzi kafka", + "kafka on kubernetes", + "strimzi operator", + "kafka helm chart", + "monitor kafka strimzi" + ], + "link": "https://signoz.io/docs/messaging-queues/strimzi/" + }, + { + "dataSource": "celery", + "label": "Celery", + "imgUrl": "/Logos/celery.svg", + "tags": ["Messaging Queues"], + "module": "messaging-queues-celery", + "relatedSearchKeywords": [ + "celery python", + "celery tracing", + "celery monitoring", + "task queue tracing", + "celery opentelemetry" + ], + "link": "https://signoz.io/docs/messaging-queues/celery-setup/" + }, { "dataSource": "android-java", "label": "Android Java", @@ -2605,87 +2996,17 @@ ], "link": "https://signoz.io/docs/frontend-monitoring/document-load/" }, - { - "dataSource": "self-hosted-kafka", - "label": "Self-Hosted Kafka", - "imgUrl": "/Logos/kafka.svg", - "tags": ["Messaging Queues"], - "module": "messaging-queues-kafka", - "relatedSearchKeywords": [ - "self hosted kafka", - "kafka setup", - "kafka open source", - "kafka observability", - "kafka integration" - ], - "link": "https://signoz.io/docs/messaging-queues/kafka/" - }, - { - "dataSource": "amazon-msk", - "label": "Amazon MSK", - "imgUrl": "/Logos/amazon-msk.svg", - "tags": ["Messaging Queues"], - "module": "messaging-queues-kafka", - "relatedSearchKeywords": [ - "amazon msk", - "msk kafka", - "aws kafka", - "msk tracing", - "msk monitoring" - ], - "link": "https://signoz.io/docs/messaging-queues/msk/" - }, - { - "dataSource": "confluent-kafka", - "label": "Confluent Kafka", - "imgUrl": "/Logos/confluent-kafka.svg", - "tags": ["Messaging Queues"], - "module": "messaging-queues-kafka", - "relatedSearchKeywords": [ - "confluent kafka", - "confluent cloud", - "kafka tracing", - "kafka cloud", - "kafka monitoring" - ], - "link": "https://signoz.io/docs/messaging-queues/confluent-kafka/" - }, - { - "dataSource": "strimzi-kafka", - "label": "Strimzi Kafka", - "imgUrl": "/Logos/strimzi.svg", - "tags": ["Messaging Queues"], - "module": "messaging-queues-kafka", - "relatedSearchKeywords": [ - "strimzi kafka", - "kafka on kubernetes", - "strimzi operator", - "kafka helm chart", - "monitor kafka strimzi" - ], - "link": "https://signoz.io/docs/messaging-queues/strimzi/" - }, - { - "dataSource": "celery", - "label": "Celery", - "imgUrl": "/Logos/celery.svg", - "tags": ["Messaging Queues"], - "module": "messaging-queues-celery", - "relatedSearchKeywords": [ - "celery python", - "celery tracing", - "celery monitoring", - "task queue tracing", - "celery opentelemetry" - ], - "link": "https://signoz.io/docs/messaging-queues/celery-setup/" - }, { "dataSource": "redis", "label": "Redis", "tags": ["integrations", "database"], "module": "integrations", - "relatedSearchKeywords": ["redis", "redis logs", "redis metrics", "database"], + "relatedSearchKeywords": [ + "redis", + "redis logs", + "redis metrics", + "database" + ], "imgUrl": "/Logos/redis.svg", "link": "/integrations?integration=builtin-redis", "internalRedirect": true @@ -2748,6 +3069,24 @@ "link": "/integrations?integration=builtin-clickhouse", "internalRedirect": true }, + { + "dataSource": "snowflake", + "label": "Snowflake", + "tags": ["integrations", "metrics"], + "module": "metrics", + "relatedSearchKeywords": [ + "snowflake", + "snowflake metrics", + "snowflake monitoring", + "snowflake observability", + "data warehouse monitoring", + "snowflake telemetry", + "snowflake performance monitoring", + "snowflake to signoz" + ], + "imgUrl": "/Logos/snowflake.svg", + "link": "https://signoz.io/docs/integrations/snowflake/" + }, { "dataSource": "aws-rds-postgresql", "label": "AWS RDS (PostgreSQL)", @@ -2802,7 +3141,7 @@ { "dataSource": "aws-alb", "label": "AWS ALB - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "alb", @@ -2819,7 +3158,7 @@ { "dataSource": "api-gateway", "label": "AWS API Gateway - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "api gateway", @@ -2832,10 +3171,27 @@ "link": "/integrations?integration=aws-integration&service=api-gateway", "internalRedirect": true }, + { + "dataSource": "aws-dynamodb", + "label": "DynamoDB - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "dynamodb", + "aws dynamodb", + "dynamodb logs", + "dynamodb metrics", + "nosql database", + "dynamodb monitoring" + ], + "imgUrl": "/Logos/dynamodb.svg", + "link": "/integrations?integration=aws-integration&service=dynamodb", + "internalRedirect": true + }, { "dataSource": "ec2", "label": "EC2 - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "ec2", @@ -2848,10 +3204,61 @@ "link": "/integrations?integration=aws-integration&service=ec2", "internalRedirect": true }, + { + "dataSource": "aws-ecs-one-click", + "label": "ECS - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "ecs", + "aws ecs", + "ecs logs", + "ecs metrics", + "container service", + "ecs monitoring" + ], + "imgUrl": "/Logos/ecs.svg", + "link": "/integrations?integration=aws-integration&service=ecs", + "internalRedirect": true + }, + { + "dataSource": "aws-eks-one-click", + "label": "EKS - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "eks", + "aws eks", + "eks logs", + "eks metrics", + "kubernetes service", + "eks monitoring" + ], + "imgUrl": "/Logos/eks.svg", + "link": "/integrations?integration=aws-integration&service=eks", + "internalRedirect": true + }, + { + "dataSource": "aws-elasticache-one-click", + "label": "ElastiCache - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "elasticache", + "aws elasticache", + "elasticache logs", + "elasticache metrics", + "cache service", + "elasticache monitoring" + ], + "imgUrl": "/Logos/elasticache.svg", + "link": "/integrations?integration=aws-integration&service=elasticache", + "internalRedirect": true + }, { "dataSource": "aws-lambda", "label": "AWS Lambda - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "aws lambda", @@ -2867,7 +3274,7 @@ { "dataSource": "amazon-msk", "label": "Amazon MSK - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "amazon msk", @@ -2883,7 +3290,7 @@ { "dataSource": "amazon-rds", "label": "Amazon RDS - One Click", - "tags": ["integrations"], + "tags": ["integrations", "AWS"], "module": "integrations", "relatedSearchKeywords": [ "amazon rds", @@ -2896,6 +3303,57 @@ "link": "/integrations?integration=aws-integration&service=rds", "internalRedirect": true }, + { + "dataSource": "aws-s3-sync", + "label": "S3 Sync - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "s3 sync", + "aws s3", + "s3 logs", + "s3 metrics", + "object storage", + "s3 monitoring" + ], + "imgUrl": "/Logos/s3.svg", + "link": "/integrations?integration=aws-integration&service=s3sync", + "internalRedirect": true + }, + { + "dataSource": "aws-sns", + "label": "SNS - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "sns", + "aws sns", + "sns logs", + "sns metrics", + "notification service", + "sns monitoring" + ], + "imgUrl": "/Logos/sns.svg", + "link": "/integrations?integration=aws-integration&service=sns", + "internalRedirect": true + }, + { + "dataSource": "aws-sqs", + "label": "SQS - One Click", + "tags": ["integrations", "AWS"], + "module": "integrations", + "relatedSearchKeywords": [ + "sqs", + "aws sqs", + "sqs logs", + "sqs metrics", + "queue service", + "sqs monitoring" + ], + "imgUrl": "/Logos/sqs.svg", + "link": "/integrations?integration=aws-integration&service=sqs", + "internalRedirect": true + }, { "dataSource": "temporal", "label": "Temporal", diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.styles.scss b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.styles.scss new file mode 100644 index 000000000000..546b6d95eab6 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.styles.scss @@ -0,0 +1,6 @@ +.json-flattening-form { + margin-top: 16px; + &__item { + margin-bottom: 12px; + } +} diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.tsx new file mode 100644 index 000000000000..cfdc16fe4362 --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/JsonFlattening.tsx @@ -0,0 +1,110 @@ +import './JsonFlattening.styles.scss'; + +import { InfoCircleOutlined } from '@ant-design/icons'; +import { Form, Input, Space, Switch, Tooltip } from 'antd'; +import { useEffect, useState } from 'react'; +import { ProcessorData } from 'types/api/pipeline/def'; + +import { PREDEFINED_MAPPING } from '../config'; +import KeyValueList from './KeyValueList'; + +interface JsonFlatteningProps { + selectedProcessorData?: ProcessorData; + isAdd: boolean; +} + +function JsonFlattening({ + selectedProcessorData, + isAdd, +}: JsonFlatteningProps): JSX.Element | null { + const form = Form.useFormInstance(); + const mappingValue = selectedProcessorData?.mapping || {}; + const enableFlattening = Form.useWatch('enable_flattening', form); + const enablePaths = Form.useWatch('enable_paths', form); + + const [enableMapping, setEnableMapping] = useState( + !!mappingValue && Object.keys(mappingValue).length > 0, + ); + + const selectedMapping = selectedProcessorData?.mapping; + useEffect(() => { + if (!enableMapping) { + form.setFieldsValue({ mapping: undefined }); + } else if (form.getFieldValue('mapping') === undefined) { + form.setFieldsValue({ + mapping: selectedMapping || PREDEFINED_MAPPING, + }); + } + }, [enableMapping, form, selectedMapping]); + + const handleEnableMappingChange = (checked: boolean): void => { + setEnableMapping(checked); + }; + + const handleEnablePathsChange = (checked: boolean): void => { + form.setFieldValue('enable_paths', checked); + }; + + if (!enableFlattening) { + return null; + } + + return ( +
+ + + + Enable Paths + + + + {enablePaths && ( + + + + )} + + + + + Enable Mapping + + + + + + + {enableMapping && ( + + + + )} +
+ ); +} + +JsonFlattening.defaultProps = { + selectedProcessorData: undefined, +}; + +export default JsonFlattening; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/KeyValueList.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/KeyValueList.tsx new file mode 100644 index 000000000000..b8a4c952ed1c --- /dev/null +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/FormFields/KeyValueList.tsx @@ -0,0 +1,47 @@ +import { Form, Select } from 'antd'; + +import { PREDEFINED_MAPPING } from '../config'; + +interface KeyValueListProps { + value?: Record; + onChange?: (value: Record) => void; +} + +function KeyValueList({ + value = PREDEFINED_MAPPING, + onChange, +}: KeyValueListProps): JSX.Element { + const handleValueChange = (key: string, newValue: string[]): void => { + const newMapping = { + ...value, + [key]: newValue, + }; + if (onChange) { + onChange(newMapping); + } + }; + + return ( +
+ {Object.keys(value).map((key) => ( + + ; } @@ -68,40 +80,82 @@ function ProcessorFieldInput({ )} - {fieldData.fieldName}} - name={fieldData.name} - initialValue={fieldData.initialValue} - rules={fieldData.rules ? fieldData.rules : formValidationRules} - dependencies={fieldData.dependencies || []} - > - {inputField} - + {fieldData.name === 'enable_flattening' ? ( + + + { + form.setFieldValue('enable_flattening', checked); + }} + /> + {fieldData.fieldName} + + + ) : ( + {fieldData.fieldName}} + name={fieldData.name} + initialValue={fieldData.initialValue} + rules={fieldData.rules ? fieldData.rules : formValidationRules} + dependencies={fieldData.dependencies || []} + > + {inputField} + + )} + {fieldData.name === 'enable_flattening' && inputField}
); } +ProcessorFieldInput.defaultProps = { + selectedProcessorData: undefined, +}; + interface ProcessorFieldInputProps { fieldData: ProcessorFormField; + selectedProcessorData?: ProcessorData; + isAdd: boolean; } -function ProcessorForm({ processorType }: ProcessorFormProps): JSX.Element { +function ProcessorForm({ + processorType, + selectedProcessorData, + isAdd, +}: ProcessorFormProps): JSX.Element { return (
{processorFields[processorType]?.map((fieldData: ProcessorFormField) => ( ))}
); } +ProcessorForm.defaultProps = { + selectedProcessorData: undefined, +}; + interface ProcessorFormProps { processorType: string; + selectedProcessorData?: ProcessorData; + isAdd: boolean; } export default ProcessorForm; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts index 2c9a67689849..4205de722e4f 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/config.ts @@ -136,6 +136,13 @@ export const processorFields: { [key: string]: Array } = { name: 'parse_to', initialValue: 'attributes', }, + { + id: 4, + fieldName: 'Enable Flattening', + placeholder: '', + name: 'enable_flattening', + initialValue: false, + }, ], regex_parser: [ { @@ -458,3 +465,14 @@ export const processorFields: { [key: string]: Array } = { }, ], }; + +export const PREDEFINED_MAPPING = { + environment: ['service.env', 'environment', 'env'], + host: ['host', 'hostname', 'host.name'], + message: ['message', 'msg', 'log'], + service: ['service', 'appname'], + severity: ['status', 'severity', 'level'], + span_id: ['span_id', 'span.id'], + trace_flags: ['flags'], + trace_id: ['trace_id', 'trace.id'], +}; diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx index 661fc4043ae9..7d9edbc48bec 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/index.tsx @@ -160,6 +160,7 @@ function AddNewProcessor({ width={800} footer={null} onCancel={onCancelModal} + destroyOnClose >
- + diff --git a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.scss b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.scss index ef6acfe83813..20af7b763a0d 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.scss +++ b/frontend/src/container/PipelinePage/PipelineListsView/AddNewProcessor/styles.scss @@ -24,3 +24,7 @@ flex-grow: 1; margin-left: 2.5rem; } + +.enable-flattening-switch .ant-form-item-control-input { + min-height: unset !important; +} diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx index 17761ad99f9f..d00f3fad830a 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineExpandView.tsx @@ -219,21 +219,6 @@ function PipelineExpandView({ moveRow: moveProcessorRow, } as React.HTMLAttributes); - const processorData = useMemo( - () => - expandedPipelineData?.config && - expandedPipelineData?.config.map( - (item: ProcessorData): ProcessorData => ({ - id: item.id, - orderId: item.orderId, - type: item.type, - name: item.name, - enabled: item.enabled, - }), - ), - [expandedPipelineData], - ); - const getLocales = (): TableLocale => ({ emptyText: , }); @@ -248,7 +233,7 @@ function PipelineExpandView({ rowKey="id" size="small" components={tableComponents} - dataSource={processorData} + dataSource={expandedPipelineData?.config} pagination={false} onRow={onRowHandler} footer={footer} diff --git a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx index 71b353defec6..9b261f0403ef 100644 --- a/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx +++ b/frontend/src/container/PipelinePage/PipelineListsView/PipelineListsView.tsx @@ -6,8 +6,9 @@ import { ExpandableConfig } from 'antd/es/table/interface'; import logEvent from 'api/common/logEvent'; import savePipeline from 'api/pipeline/post'; import { useNotifications } from 'hooks/useNotifications'; -import { isUndefined } from 'lodash-es'; +import { isEqual, isUndefined } from 'lodash-es'; import cloneDeep from 'lodash-es/cloneDeep'; +import { useErrorModal } from 'providers/ErrorModalProvider'; import React, { useCallback, useEffect, @@ -18,6 +19,7 @@ import React, { import { DndProvider } from 'react-dnd'; import { HTML5Backend } from 'react-dnd-html5-backend'; import { useTranslation } from 'react-i18next'; +import APIError from 'types/api/error'; import { ActionMode, ActionType, @@ -75,7 +77,7 @@ function PipelinesListEmptyState(): JSX.Element { here @@ -95,6 +97,7 @@ function PipelineListsView({ pipelineData, refetchPipelineLists, }: PipelineListsViewProps): JSX.Element { + const { showErrorModal } = useErrorModal(); const [pipelineForm] = Form.useForm(); const { t } = useTranslation(['pipeline', 'common']); const [modal, contextHolder] = Modal.useModal(); @@ -407,30 +410,68 @@ function PipelineListsView({ return undefined; }, [isEditingActionMode, addNewPipelineHandler, t]); + const getModifiedJsonFlatteningConfigs = useCallback( + () => + currPipelineData.flatMap((pipeline) => { + const prevPipeline = prevPipelineData.find((p) => p.name === pipeline.name); + + return (pipeline.config || []) + .filter((processor) => { + const prevProcessor = prevPipeline?.config?.find( + (p) => p.name === processor.name, + ); + return ( + processor.type === 'json_parser' && + (!prevProcessor || + prevProcessor.enable_flattening !== processor.enable_flattening || + prevProcessor.enable_paths !== processor.enable_paths || + prevProcessor.path_prefix !== processor.path_prefix || + !isEqual(prevProcessor.mapping, processor.mapping)) + ); + }) + .map((processor) => ({ + enableFlattening: !!processor.enable_flattening, + enablePaths: !!processor.enable_paths, + pathPrefix: processor.path_prefix || '', + mapping: processor.mapping || {}, + })); + }), + [currPipelineData, prevPipelineData], + ); + const onSaveConfigurationHandler = useCallback(async () => { const modifiedPipelineData = currPipelineData.map((item: PipelineData) => { const pipelineData = { ...item }; delete pipelineData?.id; return pipelineData; }); - const response = await savePipeline({ - data: { pipelines: modifiedPipelineData }, - }); - if (response.statusCode === 200) { + try { + const response = await savePipeline({ + data: { pipelines: modifiedPipelineData }, + }); refetchPipelineLists(); setActionMode(ActionMode.Viewing); setShowSaveButton(undefined); - const pipelinesInDB = response.payload?.pipelines || []; + const pipelinesInDB = response.data?.pipelines || []; setCurrPipelineData(pipelinesInDB); setPrevPipelineData(pipelinesInDB); + // Log modified JSON flattening configurations + const modifiedConfigs = getModifiedJsonFlatteningConfigs(); + if (modifiedConfigs.length > 0) { + logEvent('Logs pipeline: Saved JSON Flattening Configuration', { + count: modifiedConfigs.length, + configurations: modifiedConfigs, + }); + } + logEvent('Logs: Pipelines: Saved Pipelines', { count: pipelinesInDB.length, enabled: pipelinesInDB.filter((p) => p.enabled).length, source: 'signoz-ui', }); - } else { + } catch (error) { modifiedPipelineData.forEach((item: PipelineData) => { const pipelineData = item; pipelineData.id = v4(); @@ -438,15 +479,19 @@ function PipelineListsView({ }); setActionMode(ActionMode.Editing); setShowSaveButton(ActionMode.Editing); - notifications.error({ - message: 'Error', - description: response.error || t('something_went_wrong'), - }); + showErrorModal(error as APIError); setCurrPipelineData(modifiedPipelineData); setPrevPipelineData(modifiedPipelineData); } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [currPipelineData, notifications, refetchPipelineLists, setActionMode, t]); + }, [ + currPipelineData, + notifications, + refetchPipelineLists, + setActionMode, + t, + getModifiedJsonFlatteningConfigs, + ]); const onCancelConfigurationHandler = useCallback((): void => { setActionMode(ActionMode.Viewing); diff --git a/frontend/src/container/PipelinePage/mocks/pipeline.ts b/frontend/src/container/PipelinePage/mocks/pipeline.ts index db309b0e50f4..5dd7b37a8489 100644 --- a/frontend/src/container/PipelinePage/mocks/pipeline.ts +++ b/frontend/src/container/PipelinePage/mocks/pipeline.ts @@ -58,6 +58,15 @@ export const pipelineMockData: Array = [ from: 'attributes.auth', to: 'attributes.username', }, + { + orderId: 3, + enabled: true, + type: 'json_parser', + id: 'jsonparser', + name: 'json parser', + from: 'attributes.auth', + to: 'attributes.username', + }, ], createdBy: 'nityananda@signoz.io', createdAt: '2023-03-07T16:56:53.36071141Z', diff --git a/frontend/src/container/PipelinePage/tests/AddNewProcessor.test.tsx b/frontend/src/container/PipelinePage/tests/AddNewProcessor.test.tsx index 8a226861bf83..e1e961e125aa 100644 --- a/frontend/src/container/PipelinePage/tests/AddNewProcessor.test.tsx +++ b/frontend/src/container/PipelinePage/tests/AddNewProcessor.test.tsx @@ -1,8 +1,16 @@ -import { render } from 'tests/test-utils'; +import { fireEvent, screen, waitFor } from '@testing-library/react'; +import { render as customRender } from 'tests/test-utils'; +import { ProcessorData } from 'types/api/pipeline/def'; import { pipelineMockData } from '../mocks/pipeline'; import AddNewProcessor from '../PipelineListsView/AddNewProcessor'; +// Mock the config module to set JSON parser as default +jest.mock('../PipelineListsView/AddNewProcessor/config', () => ({ + ...jest.requireActual('../PipelineListsView/AddNewProcessor/config'), + DEFAULT_PROCESSOR_TYPE: 'json_parser', +})); + jest.mock('uplot', () => { const paths = { spline: jest.fn(), @@ -17,44 +25,233 @@ jest.mock('uplot', () => { }; }); -beforeAll(() => { - Object.defineProperty(window, 'matchMedia', { - writable: true, - value: jest.fn().mockImplementation((query) => ({ - matches: false, - media: query, - onchange: null, - addListener: jest.fn(), - removeListener: jest.fn(), - addEventListener: jest.fn(), - removeEventListener: jest.fn(), - dispatchEvent: jest.fn(), - })), - }); -}); - const selectedProcessorData = { id: '1', orderId: 1, - type: 'grok_parser', - name: 'grok use common', - output: 'grokusecommon', + type: 'json_parser', + name: 'json parser', + output: 'jsonparser', }; -describe('PipelinePage container test', () => { - it('should render AddNewProcessor section', () => { - const setActionType = jest.fn(); - const isActionType = 'add-processor'; - const { asFragment } = render( - , - ); - expect(asFragment()).toMatchSnapshot(); +// Constants for repeated text +const ENABLE_PATHS_TEXT = 'Enable Paths'; +const ENABLE_MAPPING_TEXT = 'Enable Mapping'; +const PATH_PREFIX_LABEL = 'Path Prefix'; + +// Helper function to render AddNewProcessor with JSON parser type +const renderJsonProcessor = ({ + selectedProcessorData: processorData = selectedProcessorData, + isActionType = 'add-processor', +}: { + selectedProcessorData?: ProcessorData; + isActionType?: 'add-processor' | 'edit-processor'; +}): ReturnType => { + const defaultProps = { + isActionType, + setActionType: jest.fn(), + selectedProcessorData: processorData, + setShowSaveButton: jest.fn(), + expandedPipelineData: pipelineMockData[2], + setExpandedPipelineData: jest.fn(), + }; + + // eslint-disable-next-line react/jsx-props-no-spreading + return customRender(); +}; + +describe('JSON Flattening Processor Tests', () => { + describe('Enable/Disable Flattening', () => { + it('should display the form when enable flattening is turned on', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: true, + }, + }); + + // Verify the JSON flattening form is displayed + expect(screen.queryByText(ENABLE_PATHS_TEXT)).toBeInTheDocument(); + expect(screen.queryByText(ENABLE_MAPPING_TEXT)).toBeInTheDocument(); + }); + it('should not display the form when enable flattening is turned off', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: false, + }, + }); + + // Verify the JSON flattening form is not displayed + expect(screen.queryByText(ENABLE_PATHS_TEXT)).not.toBeInTheDocument(); + expect(screen.queryByText(ENABLE_MAPPING_TEXT)).not.toBeInTheDocument(); + }); + it('should display the form when enable flattening switch is toggled on', async () => { + renderJsonProcessor({}); + + // Wait for the component to render and find the enable flattening switch + await waitFor(() => { + expect(screen.getByRole('switch')).toBeInTheDocument(); + }); + + // Find the enable flattening switch + const enableFlatteningSwitch = screen.getByRole('switch'); + // Turn on the switch + fireEvent.click(enableFlatteningSwitch); + + // Verify the JSON flattening form is displayed + await waitFor(() => { + expect(screen.getByText(ENABLE_PATHS_TEXT)).toBeInTheDocument(); + expect(screen.getByText(ENABLE_MAPPING_TEXT)).toBeInTheDocument(); + }); + }); + it('should hide the form when enable flattening switch is toggled off', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: true, + }, + }); + + // Wait for the component to render and find the switches + await waitFor(() => { + expect(screen.getAllByRole('switch')[0]).toBeInTheDocument(); + }); + + // Find the enable flattening switch + const enableFlatteningSwitch = screen.getAllByRole('switch')[0]; + // Turn off the switch + fireEvent.click(enableFlatteningSwitch); + await waitFor(() => { + expect(screen.queryByText(ENABLE_PATHS_TEXT)).not.toBeInTheDocument(); + expect(screen.queryByText(ENABLE_MAPPING_TEXT)).not.toBeInTheDocument(); + }); + }); + }); + + describe('Enable/Disable Paths', () => { + it('should toggle path prefix visibility when enable paths switch is toggled', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: true, + enable_paths: false, + }, + }); + + // Wait for the component to render and find the switches + await waitFor(() => { + expect(screen.getAllByRole('switch')[1]).toBeInTheDocument(); + }); + + // In add mode, enable_paths is always true initially, so the path prefix should be visible + await waitFor(() => { + expect(screen.getByLabelText(PATH_PREFIX_LABEL)).toBeInTheDocument(); + }); + + // Find the enable paths switch (second switch in the form) and turn it off + const enablePathsSwitch = screen.getAllByRole('switch')[1]; + fireEvent.click(enablePathsSwitch); + + // Verify the path prefix field is now hidden + await waitFor(() => { + expect(screen.queryByLabelText(PATH_PREFIX_LABEL)).not.toBeInTheDocument(); + }); + + // Turn the paths switch back on + fireEvent.click(enablePathsSwitch); + + // Verify the path prefix field is displayed again + await waitFor(() => { + expect(screen.getByLabelText(PATH_PREFIX_LABEL)).toBeInTheDocument(); + }); + }); + it('should hide path prefix when enable paths switch is turned off', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: true, + enable_paths: true, + }, + }); + + // Wait for the component to render and find the switches + await waitFor(() => { + expect(screen.getAllByRole('switch')[1]).toBeInTheDocument(); + }); + + // Verify the path prefix is initially visible + await waitFor(() => { + expect(screen.getByLabelText(PATH_PREFIX_LABEL)).toBeInTheDocument(); + }); + + // Find the enable paths switch and turn it off + const enablePathsSwitch = screen.getAllByRole('switch')[1]; + fireEvent.click(enablePathsSwitch); + + // Verify the path prefix field is now hidden + await waitFor(() => { + expect(screen.queryByLabelText(PATH_PREFIX_LABEL)).not.toBeInTheDocument(); + }); + }); + }); + + describe('Enable/Disable Mapping', () => { + it('should display the mapping fields when enable mapping is turned on', async () => { + renderJsonProcessor({ + selectedProcessorData: { + ...selectedProcessorData, + enable_flattening: true, + enable_paths: true, + mapping: { + environment: ['existing.env'], + host: ['existing.host'], + }, + }, + }); + + // Verify the mapping fields are displayed + await waitFor(() => { + expect(screen.getByText('environment')).toBeInTheDocument(); + expect(screen.getByText('host')).toBeInTheDocument(); + }); + }); + }); + + describe('Edit Processor Flow', () => { + it('should load existing processor data correctly when editing', async () => { + const existingProcessorData = { + id: '1', + orderId: 1, + type: 'json_parser', + name: 'test json parser', + output: 'testoutput', + enable_flattening: true, + enable_paths: true, + path_prefix: 'existing.prefix', + enable_mapping: true, + mapping: { + environment: ['existing.env'], + host: ['existing.host'], + }, + }; + + renderJsonProcessor({ + selectedProcessorData: existingProcessorData, + isActionType: 'edit-processor', + }); + + // Verify the form is displayed with existing data + await waitFor(() => { + expect(screen.getByDisplayValue('existing.prefix')).toBeInTheDocument(); + }); + + // Verify flattening is enabled + const enableFlatteningSwitch = screen.getAllByRole('switch')[0]; + expect(enableFlatteningSwitch).toBeChecked(); + + // Verify paths is enabled + const enablePathsSwitch = screen.getAllByRole('switch')[1]; + expect(enablePathsSwitch).toBeChecked(); + }); }); }); diff --git a/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx b/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx index 003e0ac63e69..3542e0b3165c 100644 --- a/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx +++ b/frontend/src/container/PipelinePage/tests/PipelineListsView.test.tsx @@ -192,7 +192,7 @@ describe('PipelinePage container test', () => { '.ant-table-expanded-row [data-icon="delete"]', ); - expect(deleteBtns.length).toBe(2); + expect(deleteBtns.length).toBe(3); // delete pipeline await fireEvent.click(deleteBtns[0] as HTMLElement); @@ -213,7 +213,7 @@ describe('PipelinePage container test', () => { expect( document.querySelectorAll('.ant-table-expanded-row [data-icon="delete"]') .length, - ).toBe(1); + ).toBe(2); }); it('should be able to toggle and delete pipeline', async () => { diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/AddNewProcessor.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/AddNewProcessor.test.tsx.snap deleted file mode 100644 index 1a91dcb17c9e..000000000000 --- a/frontend/src/container/PipelinePage/tests/__snapshots__/AddNewProcessor.test.tsx.snap +++ /dev/null @@ -1,3 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`PipelinePage container test should render AddNewProcessor section 1`] = ``; diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap index 6e27fbea9759..13b89bec4355 100644 --- a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelineExpandView.test.tsx.snap @@ -124,6 +124,37 @@ exports[`PipelinePage should render PipelineExpandView section 1`] = `
+ + + + + 3 + + + + +
+ json parser +
+ +
diff --git a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap index 1261f7282c83..de2cf63c5f4a 100644 --- a/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap +++ b/frontend/src/container/PipelinePage/tests/__snapshots__/PipelinePageLayout.test.tsx.snap @@ -118,7 +118,7 @@ exports[`PipelinePage container test should render PipelinePageLayout section 1` learn_more  here diff --git a/frontend/src/container/SideNav/SideNav.styles.scss b/frontend/src/container/SideNav/SideNav.styles.scss index aad45823f9db..6a1ad75ff6a0 100644 --- a/frontend/src/container/SideNav/SideNav.styles.scss +++ b/frontend/src/container/SideNav/SideNav.styles.scss @@ -133,12 +133,6 @@ margin-right: 4px; } - &.cloud-user { - .version { - cursor: default; - } - } - &.version-update-notification { .license-type { background: var(--bg-robin-500); @@ -829,6 +823,20 @@ } } +.nav-item-label-container { + display: flex; + align-items: center; + justify-content: space-between; + + & span { + display: block; + max-width: 170px; + white-space: nowrap; /* Prevents line breaks */ + overflow: hidden; /* Hides overflowing content */ + text-overflow: ellipsis; + } +} + .nav-dropdown-overlay { .ant-dropdown-menu { margin-left: 8px !important; diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index 27f653d59cdd..5fcbe51e6be5 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -23,7 +23,6 @@ import logEvent from 'api/common/logEvent'; import { Logout } from 'api/utils'; import updateUserPreference from 'api/v1/user/preferences/name/update'; import cx from 'classnames'; -import ChangelogModal from 'components/ChangelogModal/ChangelogModal'; import { FeatureKeys } from 'constants/features'; import ROUTES from 'constants/routes'; import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts'; @@ -35,16 +34,20 @@ import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; import { isArray } from 'lodash-es'; import { + ArrowUpRight, Check, ChevronDown, ChevronsDown, ChevronUp, Cog, Ellipsis, + GitCommitVertical, GripVertical, + LampDesk, Logs, MousePointerClick, PackagePlus, + ScrollText, X, } from 'lucide-react'; import { useAppContext } from 'providers/App/App'; @@ -74,7 +77,11 @@ import { primaryMenuItems, } from './menuItems'; import NavItem from './NavItem/NavItem'; -import { SidebarItem } from './sideNav.types'; +import { + CHANGELOG_LABEL, + DropdownSeparator, + SidebarItem, +} from './sideNav.types'; import { getActiveMenuKeyFromPath } from './sideNav.utils'; function SortableFilter({ item }: { item: SidebarItem }): JSX.Element { @@ -126,6 +133,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { isLoggedIn, userPreferences, changelog, + toggleChangelogModal, updateUserPreferenceInContext, } = useAppContext(); @@ -143,7 +151,9 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { const [ helpSupportDropdownMenuItems, setHelpSupportDropdownMenuItems, - ] = useState(DefaultHelpSupportDropdownMenuItems); + ] = useState<(SidebarItem | DropdownSeparator)[]>( + DefaultHelpSupportDropdownMenuItems, + ); const [pinnedMenuItems, setPinnedMenuItems] = useState([]); @@ -157,7 +167,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { const [hasScroll, setHasScroll] = useState(false); const navTopSectionRef = useRef(null); - const [showChangelogModal, setShowChangelogModal] = useState(false); const checkScroll = useCallback((): void => { if (navTopSectionRef.current) { @@ -511,7 +520,9 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { useEffect(() => { if (!isAdmin) { setHelpSupportDropdownMenuItems((prevState) => - prevState.filter((item) => item.key !== 'invite-collaborators'), + prevState.filter( + (item) => !('key' in item) || item.key !== 'invite-collaborators', + ), ); } @@ -527,9 +538,64 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { ) ) { setHelpSupportDropdownMenuItems((prevState) => - prevState.filter((item) => item.key !== 'chat-support'), + prevState.filter((item) => !('key' in item) || item.key !== 'chat-support'), ); } + + if (changelog) { + const firstTwoFeatures = changelog.features.slice(0, 2); + const dropdownItems: SidebarItem[] = firstTwoFeatures.map( + (feature, idx) => ({ + key: `changelog-${idx + 1}`, + label: ( +
+ {feature.title} +
+ ), + icon: idx === 0 ? : , + itemKey: `changelog-${idx + 1}`, + }), + ); + const changelogKey = CHANGELOG_LABEL.toLowerCase().replace(' ', '-'); + setHelpSupportDropdownMenuItems((prevState) => { + if (dropdownItems.length === 0) { + return [ + ...prevState, + { + key: changelogKey, + label: ( +
+ {CHANGELOG_LABEL} + +
+ ), + icon: , + itemKey: changelogKey, + }, + ]; + } + + return [ + ...prevState, + { + type: 'group', + label: "WHAT's NEW", + }, + ...dropdownItems, + { + key: changelogKey, + label: ( +
+ {CHANGELOG_LABEL} + +
+ ), + icon: , + itemKey: changelogKey, + }, + ]; + }); + } // eslint-disable-next-line react-hooks/exhaustive-deps }, [ isAdmin, @@ -537,6 +603,7 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { isPremiumSupportEnabled, isCloudUser, trialInfo, + changelog, ]); const [isCurrentOrgSettings] = useComponentPermission( @@ -668,34 +735,41 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { const handleHelpSupportMenuItemClick = (info: SidebarItem): void => { const item = helpSupportDropdownMenuItems.find( - (item) => item.key === info.key, + (item) => !('type' in item) && item.key === info.key, ); - if (item?.isExternal && item?.url) { + if (item && !('type' in item) && item.isExternal && item.url) { window.open(item.url, '_blank'); } - logEvent('Help Popover: Item clicked', { - menuRoute: item?.key, - menuLabel: item?.label, - }); + if (item && !('type' in item)) { + logEvent('Help Popover: Item clicked', { + menuRoute: item.key, + menuLabel: String(item.label), + }); - switch (item?.key) { - case ROUTES.SHORTCUTS: - history.push(ROUTES.SHORTCUTS); - break; - case 'invite-collaborators': - history.push(`${ROUTES.ORG_SETTINGS}#invite-team-members`); - break; - case 'chat-support': - if (window.pylon) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - window.Pylon('show'); - } - break; - default: - break; + switch (item.key) { + case ROUTES.SHORTCUTS: + history.push(ROUTES.SHORTCUTS); + break; + case 'invite-collaborators': + history.push(`${ROUTES.ORG_SETTINGS}#invite-team-members`); + break; + case 'chat-support': + if (window.pylon) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + window.Pylon('show'); + } + break; + case 'changelog-1': + case 'changelog-2': + case CHANGELOG_LABEL.toLowerCase().replace(' ', '-'): + toggleChangelogModal(); + break; + default: + break; + } } }; @@ -734,12 +808,12 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element { }; const onClickVersionHandler = useCallback((): void => { - if (isCloudUser || !changelog) { + if (!changelog) { return; } - setShowChangelogModal(true); - }, [isCloudUser, changelog]); + toggleChangelogModal(); + }, [changelog, toggleChangelogModal]); useEffect(() => { if (!isLatestVersion && !isCloudUser) { @@ -1045,9 +1119,6 @@ function SideNav({ isPinned }: { isPinned: boolean }): JSX.Element {
- {showChangelogModal && ( - setShowChangelogModal(false)} /> - )} ); } diff --git a/frontend/src/container/SideNav/sideNav.types.ts b/frontend/src/container/SideNav/sideNav.types.ts index 4783ece9d8a1..cfd61b499847 100644 --- a/frontend/src/container/SideNav/sideNav.types.ts +++ b/frontend/src/container/SideNav/sideNav.types.ts @@ -22,6 +22,13 @@ export interface SidebarItem { itemKey?: string; } +export const CHANGELOG_LABEL = 'Full Changelog'; + +export interface DropdownSeparator { + type: 'divider' | 'group'; + label?: ReactNode; +} + export enum SecondaryMenuItemKey { Slack = 'slack', Version = 'version', diff --git a/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx b/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx index abe5f8610071..5e8d536d62d4 100644 --- a/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx +++ b/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx @@ -22,6 +22,7 @@ import { ChevronRight, Leaf, } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; import { Dispatch, SetStateAction, @@ -70,10 +71,10 @@ function SpanOverview({ handleCollapseUncollapse: (id: string, collapse: boolean) => void; selectedSpan: Span | undefined; setSelectedSpan: Dispatch>; - handleAddSpanToFunnel: (span: Span) => void; }): JSX.Element { const isRootSpan = span.level === 0; + const { hasEditPermission } = useAppContext(); let color = generateColor(span.serviceName, themeColors.traceDetailColors); if (span.hasError) { @@ -152,23 +153,32 @@ function SpanOverview({ {!!span.serviceName && !!span.name && (
· -
)} @@ -215,6 +225,27 @@ export function SpanDuration({ setHasActionButtons(false); }; + // Calculate text positioning to handle overflow cases + const textStyle = useMemo(() => { + const spanRightEdge = leftOffset + width; + const textWidthApprox = 8; // Approximate text width in percentage + + // If span would cause text overflow, right-align text to span end + if (leftOffset > 100 - textWidthApprox) { + return { + right: `${100 - spanRightEdge}%`, + color, + textAlign: 'right' as const, + }; + } + + // Default: left-align text to span start + return { + left: `${leftOffset}%`, + color, + }; + }, [leftOffset, width, color]); + return (
{`${toFixed(time, 2)} ${timeUnitName}`}
@@ -311,6 +342,16 @@ function getWaterfallColumns({ /> ), size: 450, + /** + * Note: The TanStack table currently does not support percentage-based column sizing. + * Therefore, we specify both `minSize` and `maxSize` for the "span-name" column to ensure + * that its width remains between 240px and 900px. Setting a `maxSize` here is important + * because the "span-duration" column has column resizing disabled, making it difficult + * to enforce a minimum width for that column. By constraining the "span-name" column, + * we indirectly control the minimum width available for the "span-duration" column. + */ + minSize: 240, + maxSize: 900, }), columnDefHelper.display({ id: 'span-duration', diff --git a/frontend/src/pages/ChannelsEdit/ChannelsEdit.styles.scss b/frontend/src/pages/ChannelsEdit/ChannelsEdit.styles.scss index 25ed5659e7e0..c16f11449008 100644 --- a/frontend/src/pages/ChannelsEdit/ChannelsEdit.styles.scss +++ b/frontend/src/pages/ChannelsEdit/ChannelsEdit.styles.scss @@ -12,3 +12,14 @@ margin-bottom: 16px; } } + +.lightMode { + .edit-alert-channels-container { + background: var(--bg-vanilla-100); + border-color: var(--bg-vanilla-300); + + .form-alert-channels-title { + color: var(--bg-ink-100); + } + } +} diff --git a/frontend/src/pages/Settings/Settings.tsx b/frontend/src/pages/Settings/Settings.tsx index 4117e995545c..0b5252e519fc 100644 --- a/frontend/src/pages/Settings/Settings.tsx +++ b/frontend/src/pages/Settings/Settings.tsx @@ -110,7 +110,7 @@ function SettingsPage(): JSX.Element { item.key === ROUTES.INTEGRATIONS || item.key === ROUTES.API_KEYS || item.key === ROUTES.ORG_SETTINGS || - item.key === ROUTES.SHORTCUTS + item.key === ROUTES.INGESTION_SETTINGS ? true : item.isEnabled, })); @@ -120,7 +120,11 @@ function SettingsPage(): JSX.Element { // eslint-disable-next-line sonarjs/no-identical-functions updatedItems = updatedItems.map((item) => ({ ...item, - isEnabled: item.key === ROUTES.INTEGRATIONS ? true : item.isEnabled, + isEnabled: + item.key === ROUTES.INTEGRATIONS || + item.key === ROUTES.INGESTION_SETTINGS + ? true + : item.isEnabled, })); } } @@ -130,9 +134,7 @@ function SettingsPage(): JSX.Element { updatedItems = updatedItems.map((item) => ({ ...item, isEnabled: - item.key === ROUTES.API_KEYS || - item.key === ROUTES.ORG_SETTINGS || - item.key === ROUTES.SHORTCUTS + item.key === ROUTES.API_KEYS || item.key === ROUTES.ORG_SETTINGS ? true : item.isEnabled, })); diff --git a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelConfiguration.tsx b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelConfiguration.tsx index 799e53dda786..824de35b03ae 100644 --- a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelConfiguration.tsx +++ b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelConfiguration.tsx @@ -8,6 +8,7 @@ import { PencilLine } from 'lucide-react'; import FunnelItemPopover from 'pages/TracesFunnels/components/FunnelsList/FunnelItemPopover'; import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext'; import CopyToClipboard from 'periscope/components/CopyToClipboard'; +import { useAppContext } from 'providers/App/App'; import { memo, useState } from 'react'; import { Span } from 'types/api/trace/getTraceV2'; import { FunnelData } from 'types/api/traceFunnels'; @@ -33,6 +34,7 @@ function FunnelConfiguration({ triggerAutoSave, showNotifications, }: FunnelConfigurationProps): JSX.Element { + const { hasEditPermission } = useAppContext(); const { triggerSave } = useFunnelContext(); const { isPopoverOpen, @@ -62,7 +64,10 @@ function FunnelConfiguration({
} onClick={(): void => setIsDescriptionModalOpen(true)} aria-label="Edit Funnel Description" + disabled={!hasEditPermission} /> diff --git a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.styles.scss b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.styles.scss index fe54b9ca80c6..2e5315a6b33d 100644 --- a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.styles.scss +++ b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.styles.scss @@ -10,6 +10,37 @@ border: 1px solid var(--bg-slate-500); border-radius: 6px; width: 100%; + + &--readonly { + opacity: 0.7; + + .filters { + pointer-events: none; + .ant-select-selector { + cursor: not-allowed; + } + + .ant-select { + cursor: not-allowed; + } + + .query-builder-search-v2 { + .ant-select-selector { + cursor: not-allowed; + } + + .ant-select { + cursor: not-allowed; + } + } + } + + .error__switch { + opacity: 0.5; + cursor: not-allowed; + } + } + .step-popover { opacity: 0; width: 22px; diff --git a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.tsx b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.tsx index f7eb60ae0891..9788baf52768 100644 --- a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.tsx +++ b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStep.tsx @@ -1,12 +1,14 @@ import './FunnelStep.styles.scss'; import { Button, Divider, Form, Switch, Tooltip } from 'antd'; +import cx from 'classnames'; import { FilterSelect } from 'components/CeleryOverview/CeleryOverviewConfigOptions/CeleryOverviewConfigOptions'; import { QueryParams } from 'constants/query'; import { initialQueriesMap } from 'constants/queryBuilder'; import QueryBuilderSearchV2 from 'container/QueryBuilder/filters/QueryBuilderSearchV2/QueryBuilderSearchV2'; import { HardHat, PencilLine } from 'lucide-react'; import { useFunnelContext } from 'pages/TracesFunnels/FunnelContext'; +import { useAppContext } from 'providers/App/App'; import { useMemo, useState } from 'react'; import { FunnelStepData } from 'types/api/traceFunnels'; import { DataSource } from 'types/common/queryBuilder'; @@ -69,8 +71,14 @@ function FunnelStep({ const query = updatedCurrentQuery?.builder?.queryData[0] || null; + const { hasEditPermission } = useAppContext(); + return ( -
+
@@ -92,12 +100,19 @@ function FunnelStep({ )}
- +
@@ -144,8 +163,11 @@ function FunnelStep({ shouldSetQueryParams={false} values={stepData.span_name} isMultiple={false} - onChange={(v): void => - onStepChange(index, { span_name: (v ?? '') as string }) + onChange={ + hasEditPermission + ? (v): void => + onStepChange(index, { span_name: (v ?? '') as string }) + : undefined } /> @@ -156,7 +178,11 @@ function FunnelStep({ onStepChange(index, { filters: query })} + onChange={ + hasEditPermission + ? (query): void => onStepChange(index, { filters: query }) + : (): void => {} + } hasPopupContainer={false} placeholder="Search for filters..." suffixIcon={} @@ -172,6 +198,7 @@ function FunnelStep({ className="error__switch" size="small" checked={stepData.has_errors} + disabled={!hasEditPermission} onChange={(): void => onStepChange(index, { has_errors: !stepData.has_errors }) } diff --git a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStepPopover.tsx b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStepPopover.tsx index 7ddbd17569e5..6684a58064ab 100644 --- a/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStepPopover.tsx +++ b/frontend/src/pages/TracesFunnelDetails/components/FunnelConfiguration/FunnelStepPopover.tsx @@ -1,6 +1,7 @@ import { Button, Popover, Tooltip } from 'antd'; import cx from 'classnames'; import { Ellipsis, PencilLine, Trash2 } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; import { useState } from 'react'; import { FunnelStepData } from 'types/api/traceFunnels'; @@ -27,6 +28,7 @@ interface FunnelStepActionsProps { setIsAddDetailsModalOpen: (isOpen: boolean) => void; setIsDeleteModalOpen: (isOpen: boolean) => void; stepsCount: number; + hasEditPermission: boolean; } function FunnelStepActions({ @@ -34,6 +36,7 @@ function FunnelStepActions({ setIsAddDetailsModalOpen, setIsDeleteModalOpen, stepsCount, + hasEditPermission, }: FunnelStepActionsProps): JSX.Element { return (
@@ -41,6 +44,7 @@ function FunnelStepActions({ type="text" className="funnel-item__action-btn" icon={} + disabled={!hasEditPermission} onClick={(): void => { setIsPopoverOpen(false); setIsAddDetailsModalOpen(true); @@ -49,12 +53,21 @@ function FunnelStepActions({ Add details - + + + )}
{/* Display InterStepConfig only between steps */} @@ -76,23 +87,41 @@ function StepsContent({ className="steps-content__add-step" description={ !isTraceDetailsPage ? ( - + + ) : ( - + + ) } /> diff --git a/frontend/src/pages/TracesFunnels/components/FunnelsEmptyState/FunnelsEmptyState.tsx b/frontend/src/pages/TracesFunnels/components/FunnelsEmptyState/FunnelsEmptyState.tsx index f6a50ab892d0..78bf63f80216 100644 --- a/frontend/src/pages/TracesFunnels/components/FunnelsEmptyState/FunnelsEmptyState.tsx +++ b/frontend/src/pages/TracesFunnels/components/FunnelsEmptyState/FunnelsEmptyState.tsx @@ -3,6 +3,7 @@ import './FunnelsEmptyState.styles.scss'; import { Button } from 'antd'; import LearnMore from 'components/LearnMore/LearnMore'; import { Plus } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; interface FunnelsEmptyStateProps { onCreateFunnel?: () => void; @@ -11,6 +12,8 @@ interface FunnelsEmptyStateProps { function FunnelsEmptyState({ onCreateFunnel, }: FunnelsEmptyStateProps): JSX.Element { + const { hasEditPermission } = useAppContext(); + return (
@@ -29,14 +32,16 @@ function FunnelsEmptyState({
- + {hasEditPermission && ( + + )}
diff --git a/frontend/src/pages/TracesFunnels/components/FunnelsList/FunnelItemPopover.tsx b/frontend/src/pages/TracesFunnels/components/FunnelsList/FunnelItemPopover.tsx index 2fbc21b172bf..2da9a99fcd0b 100644 --- a/frontend/src/pages/TracesFunnels/components/FunnelsList/FunnelItemPopover.tsx +++ b/frontend/src/pages/TracesFunnels/components/FunnelsList/FunnelItemPopover.tsx @@ -1,6 +1,7 @@ -import { Button, Popover } from 'antd'; +import { Button, Popover, Tooltip } from 'antd'; import cx from 'classnames'; import { Ellipsis, PencilLine, Trash2 } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; import { useState } from 'react'; import { FunnelData } from 'types/api/traceFunnels'; @@ -61,6 +62,7 @@ function FunnelItemPopover({ }: FunnelItemPopoverProps): JSX.Element { const [isRenameModalOpen, setIsRenameModalOpen] = useState(false); const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false); + const { hasEditPermission } = useAppContext(); const handleRenameCancel = (): void => { setIsRenameModalOpen(false); @@ -71,6 +73,19 @@ function FunnelItemPopover({ e.stopPropagation(); }; + if (!hasEditPermission) { + return ( + + + +
); } diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss index 56b07ff41393..53667471b885 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss @@ -48,7 +48,7 @@ $dark-theme: 'darkMode'; &__actions { display: flex; align-items: center; - gap: 16px; + gap: 8px; .ant-btn-link { color: var(--text-vanilla-400); diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx index e45900366598..fd279af05cfc 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.test.tsx @@ -24,9 +24,6 @@ describe('WorkspaceLocked', () => { }); expect(workspaceLocked).toBeInTheDocument(); - const gotQuestionText = await screen.findByText(/got question?/i); - expect(gotQuestionText).toBeInTheDocument(); - const contactUsBtn = await screen.findByRole('button', { name: /Contact Us/i, }); diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx index dc680983ceb8..dd5db716b90c 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx @@ -18,6 +18,7 @@ import { } from 'antd'; import logEvent from 'api/common/logEvent'; import updateCreditCardApi from 'api/v1/checkout/create'; +import RefreshPaymentStatus from 'components/RefreshPaymentStatus/RefreshPaymentStatus'; import ROUTES from 'constants/routes'; import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; @@ -289,26 +290,28 @@ export default function WorkspaceBlocked(): JSX.Element { {isAdmin && ( - + + + + + )} - - Got Questions? - - - - - - + + + + + + + + + + )}
diff --git a/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx b/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx index 3633eb7135d5..a58f0c8d6a01 100644 --- a/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx +++ b/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx @@ -4,6 +4,7 @@ import { Alert, Button, Col, + Flex, Modal, Row, Skeleton, @@ -11,6 +12,7 @@ import { Typography, } from 'antd'; import manageCreditCardApi from 'api/v1/portal/create'; +import RefreshPaymentStatus from 'components/RefreshPaymentStatus/RefreshPaymentStatus'; import ROUTES from 'constants/routes'; import dayjs from 'dayjs'; import { useNotifications } from 'hooks/useNotifications'; @@ -146,9 +148,9 @@ function WorkspaceSuspended(): JSX.Element { justify="center" align="middle" className="workspace-suspended__modal__cta" - gutter={[16, 16]} + gutter={[8, 8]} > - + - + + )}
diff --git a/frontend/src/periscope.scss b/frontend/src/periscope.scss index a5f0f598f9d9..71a23dc14f89 100644 --- a/frontend/src/periscope.scss +++ b/frontend/src/periscope.scss @@ -54,6 +54,20 @@ } } + &.text { + color: var(--bg-vanilla-100) !important; + background-color: transparent !important; + border: none; + box-shadow: none; + box-shadow: none; + padding: 4px 4px; + + &:hover { + color: var(--bg-vanilla-300) !important; + background-color: transparent !important; + } + } + &.success { color: var(--bg-forest-400) !important; border-radius: 2px; diff --git a/frontend/src/providers/App/App.tsx b/frontend/src/providers/App/App.tsx index e7b9ab9aa95a..9b01f1a5bf9f 100644 --- a/frontend/src/providers/App/App.tsx +++ b/frontend/src/providers/App/App.tsx @@ -61,6 +61,8 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { const [org, setOrg] = useState(null); const [changelog, setChangelog] = useState(null); + const [showChangelogModal, setShowChangelogModal] = useState(false); + // if the user.id is not present, for migration older cases then we need to logout only for current logged in users! useEffect(() => { if (!user.id && isLoggedIn) { @@ -262,6 +264,10 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { [setChangelog], ); + const toggleChangelogModal = useCallback(() => { + setShowChangelogModal((prev) => !prev); + }, []); + // global event listener for AFTER_LOGIN event to start the user fetch post all actions are complete useGlobalEventListener('AFTER_LOGIN', (event) => { if (event.detail) { @@ -306,13 +312,17 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { orgPreferencesFetchError, activeLicense, changelog, + showChangelogModal, activeLicenseRefetch, updateUser, updateOrgPreferences, updateUserPreferenceInContext, updateOrg, updateChangelog, + toggleChangelogModal, versionData: versionData?.payload || null, + hasEditPermission: + user?.role === USER_ROLES.ADMIN || user?.role === USER_ROLES.EDITOR, }), [ trialInfo, @@ -331,9 +341,11 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { activeLicenseRefetch, orgPreferencesFetchError, changelog, + showChangelogModal, updateUserPreferenceInContext, updateOrg, updateChangelog, + toggleChangelogModal, user, userFetchError, versionData, diff --git a/frontend/src/providers/App/types.ts b/frontend/src/providers/App/types.ts index 1fc32028def4..40fdd1ac5099 100644 --- a/frontend/src/providers/App/types.ts +++ b/frontend/src/providers/App/types.ts @@ -28,13 +28,16 @@ export interface IAppContext { featureFlagsFetchError: unknown; orgPreferencesFetchError: unknown; changelog: ChangelogSchema | null; + showChangelogModal: boolean; activeLicenseRefetch: () => void; updateUser: (user: IUser) => void; updateOrgPreferences: (orgPreferences: OrgPreference[]) => void; updateUserPreferenceInContext: (userPreference: UserPreference) => void; updateOrg(orgId: string, updatedOrgName: string): void; updateChangelog(payload: ChangelogSchema): void; + toggleChangelogModal(): void; versionData: PayloadProps | null; + hasEditPermission: boolean; } // User diff --git a/frontend/src/tests/test-utils.tsx b/frontend/src/tests/test-utils.tsx index ba4257f93cd2..687c6b7e1b5f 100644 --- a/frontend/src/tests/test-utils.tsx +++ b/frontend/src/tests/test-utils.tsx @@ -22,7 +22,7 @@ import { LicenseState, LicenseStatus, } from 'types/api/licensesV3/getActive'; -import { ROLES } from 'types/roles'; +import { ROLES, USER_ROLES } from 'types/roles'; const queryClient = new QueryClient({ defaultOptions: { @@ -162,6 +162,7 @@ export function getAppContextMock( displayName: 'Pentagon', }, ], + hasEditPermission: role === USER_ROLES.ADMIN || role === USER_ROLES.EDITOR, isFetchingUser: false, userFetchError: null, featureFlags: [ @@ -233,11 +234,13 @@ export function getAppContextMock( isFetchingOrgPreferences: false, orgPreferencesFetchError: null, isLoggedIn: true, + showChangelogModal: false, updateUser: jest.fn(), updateOrg: jest.fn(), updateOrgPreferences: jest.fn(), activeLicenseRefetch: jest.fn(), updateChangelog: jest.fn(), + toggleChangelogModal: jest.fn(), versionData: { version: '1.0.0', ee: 'Y', diff --git a/frontend/src/types/api/changelog/getChangelogByVersion.ts b/frontend/src/types/api/changelog/getChangelogByVersion.ts index 735d8c4b02b5..cca55906a87f 100644 --- a/frontend/src/types/api/changelog/getChangelogByVersion.ts +++ b/frontend/src/types/api/changelog/getChangelogByVersion.ts @@ -8,6 +8,12 @@ export type Media = { [key: string]: any; // Allow other fields (e.g., mime, size) to be flexible }; +export enum DeploymentType { + ALL = 'All', + CLOUD_ONLY = 'Cloud only', + OSS_ONLY = 'OSS Only', +} + type Feature = { id: number; documentId: string; @@ -17,7 +23,7 @@ type Feature = { updatedAt: string; publishedAt: string; description: string; - deployment_type: string | null; + deployment_type: DeploymentType; media: Media | null; }; diff --git a/frontend/src/types/api/pipeline/def.ts b/frontend/src/types/api/pipeline/def.ts index 3aef70f1fdbc..956954cf3212 100644 --- a/frontend/src/types/api/pipeline/def.ts +++ b/frontend/src/types/api/pipeline/def.ts @@ -31,6 +31,13 @@ export interface ProcessorData { // time parser fields layout_type?: string; layout?: string; + + // json flattening fields + enable_flattening?: boolean; + enable_paths?: boolean; + path_prefix?: string; + enable_mapping?: boolean; + mapping?: Record; } export interface PipelineData { diff --git a/frontend/webpack.config.js b/frontend/webpack.config.js index d9f277d9c33d..1fab72625ac1 100644 --- a/frontend/webpack.config.js +++ b/frontend/webpack.config.js @@ -119,24 +119,8 @@ const config = { ], }, { - test: /\.(png|jpe?g|gif|svg)$/i, - use: [ - { - loader: 'file-loader', - }, - { - loader: 'image-webpack-loader', - options: { - bypassOnDebug: true, - optipng: { - optimizationLevel: 7, - }, - gifsicle: { - interlaced: false, - }, - }, - }, - ], + test: /\.(jpe?g|png|gif|svg)$/i, + type: 'asset', }, { test: /\.(ttf|eot|woff|woff2)$/, diff --git a/frontend/webpack.config.prod.js b/frontend/webpack.config.prod.js index b6ef58f3cdad..9cfe202cfa08 100644 --- a/frontend/webpack.config.prod.js +++ b/frontend/webpack.config.prod.js @@ -14,6 +14,7 @@ const CssMinimizerPlugin = require('css-minimizer-webpack-plugin'); const MiniCssExtractPlugin = require('mini-css-extract-plugin'); const { BundleAnalyzerPlugin } = require('webpack-bundle-analyzer'); const { RetryChunkLoadPlugin } = require('webpack-retry-chunk-load-plugin'); +const ImageMinimizerPlugin = require('image-minimizer-webpack-plugin'); dotenv.config(); @@ -135,24 +136,8 @@ const config = { ], }, { - test: /\.(png|jpe?g|gif|svg)$/i, - use: [ - { - loader: 'file-loader', - }, - { - loader: 'image-webpack-loader', - options: { - bypassOnDebug: true, - optipng: { - optimizationLevel: 7, - }, - gifsicle: { - interlaced: false, - }, - }, - }, - ], + test: /\.(jpe?g|png|gif|svg)$/i, + type: 'asset', }, { @@ -212,6 +197,55 @@ const config = { }, }), new CssMinimizerPlugin(), + new ImageMinimizerPlugin({ + minimizer: [ + { + implementation: ImageMinimizerPlugin.sharpMinify, + options: { + encodeOptions: { + jpeg: { + quality: 80, + }, + webp: { + lossless: true, + }, + avif: { + lossless: true, + }, + png: {}, + gif: {}, + }, + }, + }, + { + implementation: ImageMinimizerPlugin.imageminMinify, + options: { + plugins: [ + [ + 'svgo', + { + plugins: [ + { + name: 'preset-default', + params: { + overrides: { + removeViewBox: false, + addAttributesToSVGElement: { + params: { + attributes: [{ xmlns: 'http://www.w3.org/2000/svg' }], + }, + }, + }, + }, + }, + ], + }, + ], + ], + }, + }, + ], + }), ], }, performance: { diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 282bfef489be..400d7f9426db 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -2530,6 +2530,13 @@ dependencies: tslib "^2.0.0" +"@emnapi/runtime@^1.2.0": + version "1.4.4" + resolved "https://registry.yarnpkg.com/@emnapi/runtime/-/runtime-1.4.4.tgz#19a8f00719c51124e2d0fbf4aaad3fa7b0c92524" + integrity sha512-hHyapA4A3gPaDCNfiqyZUStTMqIkKRshqPIuDOXv1hcBnD4U3l8cP0T1HMCfGRxQ6V64TGCcoswChANyOAwbQg== + dependencies: + tslib "^2.4.0" + "@emotion/hash@^0.8.0": version "0.8.0" resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz" @@ -2662,6 +2669,119 @@ resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== +"@img/sharp-darwin-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.33.5.tgz#ef5b5a07862805f1e8145a377c8ba6e98813ca08" + integrity sha512-UT4p+iz/2H4twwAoLCqfA9UH5pI6DggwKEGuaPy7nCVQ8ZsiY5PIcrRvD1DzuY3qYL07NtIQcWnBSY/heikIFQ== + optionalDependencies: + "@img/sharp-libvips-darwin-arm64" "1.0.4" + +"@img/sharp-darwin-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.33.5.tgz#e03d3451cd9e664faa72948cc70a403ea4063d61" + integrity sha512-fyHac4jIc1ANYGRDxtiqelIbdWkIuQaI84Mv45KvGRRxSAa7o7d1ZKAOBaYbnepLC1WqxfpimdeWfvqqSGwR2Q== + optionalDependencies: + "@img/sharp-libvips-darwin-x64" "1.0.4" + +"@img/sharp-libvips-darwin-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.0.4.tgz#447c5026700c01a993c7804eb8af5f6e9868c07f" + integrity sha512-XblONe153h0O2zuFfTAbQYAX2JhYmDHeWikp1LM9Hul9gVPjFY427k6dFEcOL72O01QxQsWi761svJ/ev9xEDg== + +"@img/sharp-libvips-darwin-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.0.4.tgz#e0456f8f7c623f9dbfbdc77383caa72281d86062" + integrity sha512-xnGR8YuZYfJGmWPvmlunFaWJsb9T/AO2ykoP3Fz/0X5XV2aoYBPkX6xqCQvUTKKiLddarLaxpzNe+b1hjeWHAQ== + +"@img/sharp-libvips-linux-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.0.4.tgz#979b1c66c9a91f7ff2893556ef267f90ebe51704" + integrity sha512-9B+taZ8DlyyqzZQnoeIvDVR/2F4EbMepXMc/NdVbkzsJbzkUjhXv/70GQJ7tdLA4YJgNP25zukcxpX2/SueNrA== + +"@img/sharp-libvips-linux-arm@1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.0.5.tgz#99f922d4e15216ec205dcb6891b721bfd2884197" + integrity sha512-gvcC4ACAOPRNATg/ov8/MnbxFDJqf/pDePbBnuBDcjsI8PssmjoKMAz4LtLaVi+OnSb5FK/yIOamqDwGmXW32g== + +"@img/sharp-libvips-linux-s390x@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.0.4.tgz#f8a5eb1f374a082f72b3f45e2fb25b8118a8a5ce" + integrity sha512-u7Wz6ntiSSgGSGcjZ55im6uvTrOxSIS8/dgoVMoiGE9I6JAfU50yH5BoDlYA1tcuGS7g/QNtetJnxA6QEsCVTA== + +"@img/sharp-libvips-linux-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.0.4.tgz#d4c4619cdd157774906e15770ee119931c7ef5e0" + integrity sha512-MmWmQ3iPFZr0Iev+BAgVMb3ZyC4KeFc3jFxnNbEPas60e1cIfevbtuyf9nDGIzOaW9PdnDciJm+wFFaTlj5xYw== + +"@img/sharp-libvips-linuxmusl-arm64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.0.4.tgz#166778da0f48dd2bded1fa3033cee6b588f0d5d5" + integrity sha512-9Ti+BbTYDcsbp4wfYib8Ctm1ilkugkA/uscUn6UXK1ldpC1JjiXbLfFZtRlBhjPZ5o1NCLiDbg8fhUPKStHoTA== + +"@img/sharp-libvips-linuxmusl-x64@1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.0.4.tgz#93794e4d7720b077fcad3e02982f2f1c246751ff" + integrity sha512-viYN1KX9m+/hGkJtvYYp+CCLgnJXwiQB39damAO7WMdKWlIhmYTfHjwSbQeUK/20vY154mwezd9HflVFM1wVSw== + +"@img/sharp-linux-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.33.5.tgz#edb0697e7a8279c9fc829a60fc35644c4839bb22" + integrity sha512-JMVv+AMRyGOHtO1RFBiJy/MBsgz0x4AWrT6QoEVVTyh1E39TrCUpTRI7mx9VksGX4awWASxqCYLCV4wBZHAYxA== + optionalDependencies: + "@img/sharp-libvips-linux-arm64" "1.0.4" + +"@img/sharp-linux-arm@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-arm/-/sharp-linux-arm-0.33.5.tgz#422c1a352e7b5832842577dc51602bcd5b6f5eff" + integrity sha512-JTS1eldqZbJxjvKaAkxhZmBqPRGmxgu+qFKSInv8moZ2AmT5Yib3EQ1c6gp493HvrvV8QgdOXdyaIBrhvFhBMQ== + optionalDependencies: + "@img/sharp-libvips-linux-arm" "1.0.5" + +"@img/sharp-linux-s390x@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.33.5.tgz#f5c077926b48e97e4a04d004dfaf175972059667" + integrity sha512-y/5PCd+mP4CA/sPDKl2961b+C9d+vPAveS33s6Z3zfASk2j5upL6fXVPZi7ztePZ5CuH+1kW8JtvxgbuXHRa4Q== + optionalDependencies: + "@img/sharp-libvips-linux-s390x" "1.0.4" + +"@img/sharp-linux-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linux-x64/-/sharp-linux-x64-0.33.5.tgz#d806e0afd71ae6775cc87f0da8f2d03a7c2209cb" + integrity sha512-opC+Ok5pRNAzuvq1AG0ar+1owsu842/Ab+4qvU879ippJBHvyY5n2mxF1izXqkPYlGuP/M556uh53jRLJmzTWA== + optionalDependencies: + "@img/sharp-libvips-linux-x64" "1.0.4" + +"@img/sharp-linuxmusl-arm64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.33.5.tgz#252975b915894fb315af5deea174651e208d3d6b" + integrity sha512-XrHMZwGQGvJg2V/oRSUfSAfjfPxO+4DkiRh6p2AFjLQztWUuY/o8Mq0eMQVIY7HJ1CDQUJlxGGZRw1a5bqmd1g== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-arm64" "1.0.4" + +"@img/sharp-linuxmusl-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.33.5.tgz#3f4609ac5d8ef8ec7dadee80b560961a60fd4f48" + integrity sha512-WT+d/cgqKkkKySYmqoZ8y3pxx7lx9vVejxW/W4DOFMYVSkErR+w7mf2u8m/y4+xHe7yY9DAXQMWQhpnMuFfScw== + optionalDependencies: + "@img/sharp-libvips-linuxmusl-x64" "1.0.4" + +"@img/sharp-wasm32@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-wasm32/-/sharp-wasm32-0.33.5.tgz#6f44f3283069d935bb5ca5813153572f3e6f61a1" + integrity sha512-ykUW4LVGaMcU9lu9thv85CbRMAwfeadCJHRsg2GmeRa/cJxsVY9Rbd57JcMxBkKHag5U/x7TSBpScF4U8ElVzg== + dependencies: + "@emnapi/runtime" "^1.2.0" + +"@img/sharp-win32-ia32@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.33.5.tgz#1a0c839a40c5351e9885628c85f2e5dfd02b52a9" + integrity sha512-T36PblLaTwuVJ/zw/LaH0PdZkRz5rd3SmMHX8GSmR7vtNSP5Z6bQkExdSK7xGWyxLw4sUknBuugTelgw2faBbQ== + +"@img/sharp-win32-x64@0.33.5": + version "0.33.5" + resolved "https://registry.yarnpkg.com/@img/sharp-win32-x64/-/sharp-win32-x64-0.33.5.tgz#56f00962ff0c4e0eb93d34a047d29fa995e3e342" + integrity sha512-MpY/o8/8kj+EcnxwvrP4aTJSWw/aZ7JIGR4aBeZkZw5B7/Jn+tY9/VNwtcoGmdT7GfggGIU4kygOMSbYnOrAbg== + "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" @@ -3683,10 +3803,10 @@ resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== -"@sindresorhus/is@^0.7.0": - version "0.7.0" - resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.7.0.tgz#9a06f4f137ee84d7df0460c1fdb1135ffa6c50fd" - integrity sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow== +"@sindresorhus/is@^4.0.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== "@sinonjs/commons@^1.7.0": version "1.8.6" @@ -3702,6 +3822,13 @@ dependencies: "@sinonjs/commons" "^1.7.0" +"@szmarczak/http-timer@^4.0.5": + version "4.0.6" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" + integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== + dependencies: + defer-to-connect "^2.0.0" + "@tanstack/react-table@8.20.6": version "8.20.6" resolved "https://registry.yarnpkg.com/@tanstack/react-table/-/react-table-8.20.6.tgz#a1f3103327aa59aa621931f4087a7604a21054d0" @@ -3778,6 +3905,11 @@ resolved "https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.4.3.tgz" integrity sha512-kCUc5MEwaEMakkO5x7aoD+DLi02ehmEM2QCGWvNqAS1dV/fAvORWEjnjsEIvml59M7Y5kCkWN6fCCyPOe8OL6Q== +"@tokenizer/token@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@tokenizer/token/-/token-0.3.0.tgz#fe98a93fe789247e998c75e74e9c7c63217aa276" + integrity sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A== + "@tootallnate/once@1": version "1.1.2" resolved "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz" @@ -3873,6 +4005,16 @@ dependencies: "@types/node" "*" +"@types/cacheable-request@^6.0.1": + version "6.0.3" + resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183" + integrity sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw== + dependencies: + "@types/http-cache-semantics" "*" + "@types/keyv" "^3.1.4" + "@types/node" "*" + "@types/responselike" "^1.0.0" + "@types/color-convert@*": version "2.0.0" resolved "https://registry.npmjs.org/@types/color-convert/-/color-convert-2.0.0.tgz" @@ -4074,14 +4216,6 @@ resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.14.tgz#319b63ad6df705ee2a65a73ef042c8271e696613" integrity sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg== -"@types/glob@^7.1.1": - version "7.2.0" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" - integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== - dependencies: - "@types/minimatch" "*" - "@types/node" "*" - "@types/graceful-fs@^4.1.2", "@types/graceful-fs@^4.1.3": version "4.1.6" resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.6.tgz#e14b2576a1c25026b7f02ede1de3b84c3a1efeae" @@ -4121,6 +4255,11 @@ resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz#4fc33a00c1d0c16987b1a20cf92d20614c55ac35" integrity sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg== +"@types/http-cache-semantics@*": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz#b979ebad3919799c979b17c72621c0bc0a31c6c4" + integrity sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA== + "@types/http-errors@*": version "2.0.4" resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.4.tgz#7eb47726c391b7345a6ec35ad7f4de469cf5ba4f" @@ -4188,6 +4327,13 @@ resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== +"@types/keyv@^3.1.4": + version "3.1.4" + resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.4.tgz#3ccdb1c6751b0c7e52300bcdacd5bcbf8faa75b6" + integrity sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg== + dependencies: + "@types/node" "*" + "@types/lodash-es@^4.17.4": version "4.17.7" resolved "https://registry.npmjs.org/@types/lodash-es/-/lodash-es-4.17.7.tgz" @@ -4236,11 +4382,6 @@ dependencies: mini-css-extract-plugin "*" -"@types/minimatch@*": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" - integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== - "@types/minimist@^1.2.0": version "1.2.2" resolved "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.2.tgz" @@ -4435,6 +4576,13 @@ dependencies: redux "^4.0.5" +"@types/responselike@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.3.tgz#cc29706f0a397cfe6df89debfe4bf5cea159db50" + integrity sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw== + dependencies: + "@types/node" "*" + "@types/retry@0.12.2": version "0.12.2" resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.2.tgz#ed279a64fa438bb69f2480eda44937912bb7480a" @@ -5064,6 +5212,13 @@ abab@^2.0.3, abab@^2.0.5: resolved "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz" integrity sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA== +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: version "1.3.8" resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" @@ -5337,18 +5492,6 @@ anymatch@^3.0.3, anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" -arch@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/arch/-/arch-2.2.0.tgz#1bc47818f305764f23ab3306b0bfc086c5a29d11" - integrity sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ== - -archive-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/archive-type/-/archive-type-4.0.0.tgz#f92e72233056dfc6969472749c267bdb046b1d70" - integrity sha512-zV4Ky0v1F8dBrdYElwTvQhweQ0P7Kwc1aluqJsYtOBP01jXcWCyW2IEfI1YiqsG+Iy7ZR+o5LF1N+PGECBxHWA== - dependencies: - file-type "^4.2.0" - arg@^4.1.0: version "4.1.3" resolved "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz" @@ -5412,6 +5555,11 @@ array-union@^2.1.0: resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== +array-union@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-3.0.1.tgz#da52630d327f8b88cfbfb57728e2af5cd9b6b975" + integrity sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw== + array.prototype.findlastindex@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz#b37598438f97b579166940814e2c0493a4f50207" @@ -6029,67 +6177,11 @@ big.js@^5.2.2: resolved "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -bin-build@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bin-build/-/bin-build-3.0.0.tgz#c5780a25a8a9f966d8244217e6c1f5082a143861" - integrity sha512-jcUOof71/TNAI2uM5uoUaDq2ePcVBQ3R/qhxAz1rX7UfvduAL/RXD3jXzvn8cVcDJdGVkiR1shal3OH0ImpuhA== - dependencies: - decompress "^4.0.0" - download "^6.2.2" - execa "^0.7.0" - p-map-series "^1.0.0" - tempfile "^2.0.0" - -bin-check@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bin-check/-/bin-check-4.1.0.tgz#fc495970bdc88bb1d5a35fc17e65c4a149fc4a49" - integrity sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA== - dependencies: - execa "^0.7.0" - executable "^4.1.0" - -bin-version-check@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/bin-version-check/-/bin-version-check-4.0.0.tgz#7d819c62496991f80d893e6e02a3032361608f71" - integrity sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ== - dependencies: - bin-version "^3.0.0" - semver "^5.6.0" - semver-truncate "^1.1.2" - -bin-version@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/bin-version/-/bin-version-3.1.0.tgz#5b09eb280752b1bd28f0c9db3f96f2f43b6c0839" - integrity sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ== - dependencies: - execa "^1.0.0" - find-versions "^3.0.0" - -bin-wrapper@^4.0.0, bin-wrapper@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/bin-wrapper/-/bin-wrapper-4.1.0.tgz#99348f2cf85031e3ef7efce7e5300aeaae960605" - integrity sha512-hfRmo7hWIXPkbpi0ZltboCMVrU+0ClXR/JgbCKKjlDjQf6igXa7OwdqNcFWQZPZTgiY7ZpzE3+LjjkLiTN2T7Q== - dependencies: - bin-check "^4.1.0" - bin-version-check "^4.0.0" - download "^7.1.0" - import-lazy "^3.1.0" - os-filter-obj "^2.0.0" - pify "^4.0.1" - binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bl@^1.0.0: - version "1.2.3" - resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" - integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== - dependencies: - readable-stream "^2.3.5" - safe-buffer "^5.1.1" - bl@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" @@ -6195,34 +6287,11 @@ bser@2.1.1: dependencies: node-int64 "^0.4.0" -buffer-alloc-unsafe@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" - integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== - -buffer-alloc@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" - integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== - dependencies: - buffer-alloc-unsafe "^1.1.0" - buffer-fill "^1.0.0" - -buffer-crc32@~0.2.3: - version "0.2.13" - resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242" - integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ== - buffer-equal@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz" integrity sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA== -buffer-fill@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" - integrity sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ== - buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" @@ -6233,7 +6302,7 @@ buffer-to-arraybuffer@^0.0.5: resolved "https://registry.npmjs.org/buffer-to-arraybuffer/-/buffer-to-arraybuffer-0.0.5.tgz" integrity sha512-3dthu5CYiVB1DEJp61FtApNnNndTckcqe4pFcLdvHtrpG+kcyekCJKg4MRiDcFW7A6AODnXB9U4dwQiCW5kzJQ== -buffer@^5.2.1, buffer@^5.5.0: +buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -6266,18 +6335,23 @@ bytes@3.1.2: resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== -cacheable-request@^2.1.1: - version "2.1.4" - resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d" - integrity sha512-vag0O2LKZ/najSoUwDbVlnlCFvhBE/7mGTY2B5FgCBDcRD+oVV1HYTOwM6JZfMg/hIcM6IwnTZ1uQQL5/X3xIQ== +cacheable-lookup@^5.0.3: + version "5.0.4" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" + integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== + +cacheable-request@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.4.tgz#7a33ebf08613178b403635be7b899d3e69bbe817" + integrity sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg== dependencies: - clone-response "1.0.2" - get-stream "3.0.0" - http-cache-semantics "3.8.1" - keyv "3.0.0" - lowercase-keys "1.0.0" - normalize-url "2.0.1" - responselike "1.0.2" + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^4.0.0" + lowercase-keys "^2.0.0" + normalize-url "^6.0.1" + responselike "^2.0.0" call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" @@ -6371,16 +6445,6 @@ cardboard-vr-display@^1.0.19: nosleep.js "^0.7.0" webvr-polyfill-dpdb "^1.0.17" -caw@^2.0.0, caw@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/caw/-/caw-2.0.1.tgz#6c3ca071fc194720883c2dc5da9b074bfc7e9e95" - integrity sha512-Cg8/ZSBEa8ZVY9HspcGUYaK63d/bN7rqS3CYCzEGUxuYv6UlmcjzDUz2fCFFHyTvUW5Pk0I+3hkA3iXlIj6guA== - dependencies: - get-proxy "^2.0.0" - isurl "^1.0.0-alpha5" - tunnel-agent "^0.6.0" - url-to-options "^1.0.1" - ccount@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/ccount/-/ccount-2.0.1.tgz#17a3bf82302e0870d6da43a01311a8bc02a3ecf5" @@ -6585,10 +6649,10 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== +clone-response@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" @@ -6658,7 +6722,7 @@ color-string@^1.9.0: color-name "^1.0.0" simple-swizzle "^0.2.2" -color@^4.2.1: +color@^4.2.1, color@^4.2.3: version "4.2.3" resolved "https://registry.npmjs.org/color/-/color-4.2.3.tgz" integrity sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A== @@ -6693,7 +6757,7 @@ comma-separated-tokens@^2.0.0: resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz#4e89c9458acb61bc8fef19f4529973b2392839ee" integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== -commander@2, commander@^2.20.0, commander@^2.20.3, commander@^2.8.1: +commander@2, commander@^2.20.0, commander@^2.20.3: version "2.20.3" resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -6782,14 +6846,6 @@ concat-map@0.0.1: resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== -config-chain@^1.1.11: - version "1.1.13" - resolved "https://registry.yarnpkg.com/config-chain/-/config-chain-1.1.13.tgz#fad0795aa6a6cdaff9ed1b68e9dff94372c232f4" - integrity sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ== - dependencies: - ini "^1.3.4" - proto-list "~1.2.1" - confusing-browser-globals@^1.0.10: version "1.0.11" resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz" @@ -6800,7 +6856,7 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -content-disposition@0.5.4, content-disposition@^0.5.2: +content-disposition@0.5.4: version "0.5.4" resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== @@ -6954,7 +7010,7 @@ cross-fetch@3.1.5: dependencies: node-fetch "2.6.7" -cross-spawn@7.0.5, cross-spawn@^5.0.1, cross-spawn@^6.0.0, cross-spawn@^6.0.5, cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@7.0.5, cross-spawn@^6.0.5, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.5.tgz#910aac880ff5243da96b728bc6521a5f6c2f2f82" integrity sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug== @@ -7196,14 +7252,6 @@ custom-event-polyfill@^1.0.6: resolved "https://registry.npmjs.org/custom-event-polyfill/-/custom-event-polyfill-1.0.7.tgz" integrity sha512-TDDkd5DkaZxZFM8p+1I3yAlvM3rSr1wbrOliG4yJiwinMZN8z/iGL7BTlDkrJcYTmgUSb4ywVCc3ZaUtOtC76w== -cwebp-bin@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/cwebp-bin/-/cwebp-bin-7.0.1.tgz#cb1303bf43f645ba5b2ece342773c4a93574d4f4" - integrity sha512-Ko5ADY74/dbfd8xG0+f+MUP9UKjCe1TG4ehpW0E5y4YlPdwDJlGrSzSR4/Yonxpm9QmZE1RratkIxFlKeyo3FA== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.1" - "d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3": version "3.2.3" resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.3.tgz" @@ -7475,65 +7523,19 @@ decode-uri-component@^0.2.0: resolved "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz" integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== -decompress-response@^3.2.0, decompress-response@^3.3.0: +decompress-response@^3.3.0: version "3.3.0" resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz" integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== dependencies: mimic-response "^1.0.0" -decompress-tar@^4.0.0, decompress-tar@^4.1.0, decompress-tar@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-tar/-/decompress-tar-4.1.1.tgz#718cbd3fcb16209716e70a26b84e7ba4592e5af1" - integrity sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ== +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== dependencies: - file-type "^5.2.0" - is-stream "^1.1.0" - tar-stream "^1.5.2" - -decompress-tarbz2@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz#3082a5b880ea4043816349f378b56c516be1a39b" - integrity sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A== - dependencies: - decompress-tar "^4.1.0" - file-type "^6.1.0" - is-stream "^1.1.0" - seek-bzip "^1.0.5" - unbzip2-stream "^1.0.9" - -decompress-targz@^4.0.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/decompress-targz/-/decompress-targz-4.1.1.tgz#c09bc35c4d11f3de09f2d2da53e9de23e7ce1eee" - integrity sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w== - dependencies: - decompress-tar "^4.1.1" - file-type "^5.2.0" - is-stream "^1.1.0" - -decompress-unzip@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/decompress-unzip/-/decompress-unzip-4.0.1.tgz#deaaccdfd14aeaf85578f733ae8210f9b4848f69" - integrity sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw== - dependencies: - file-type "^3.8.0" - get-stream "^2.2.0" - pify "^2.3.0" - yauzl "^2.4.2" - -decompress@^4.0.0, decompress@^4.2.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/decompress/-/decompress-4.2.1.tgz#007f55cc6a62c055afa37c07eb6a4ee1b773f118" - integrity sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ== - dependencies: - decompress-tar "^4.0.0" - decompress-tarbz2 "^4.0.0" - decompress-targz "^4.0.0" - decompress-unzip "^4.0.1" - graceful-fs "^4.1.10" - make-dir "^1.0.0" - pify "^2.3.0" - strip-dirs "^2.0.0" + mimic-response "^3.1.0" dedent@^0.7.0: version "0.7.0" @@ -7601,6 +7603,11 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" +defer-to-connect@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" + integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== + define-data-property@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" @@ -7655,6 +7662,11 @@ destroy@1.2.0: resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== +detect-libc@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.4.tgz#f04715b8ba815e53b4d8109655b6508a6865a7e8" + integrity sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA== + detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" @@ -7850,51 +7862,11 @@ dotenv@^16.3.1: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.5.tgz#cdd3b3b604cb327e286b4762e13502f717cb099f" integrity sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg== -download@^6.2.2: - version "6.2.5" - resolved "https://registry.yarnpkg.com/download/-/download-6.2.5.tgz#acd6a542e4cd0bb42ca70cfc98c9e43b07039714" - integrity sha512-DpO9K1sXAST8Cpzb7kmEhogJxymyVUd5qz/vCOSyvwtp2Klj2XcDt5YUuasgxka44SxF0q5RriKIwJmQHG2AuA== - dependencies: - caw "^2.0.0" - content-disposition "^0.5.2" - decompress "^4.0.0" - ext-name "^5.0.0" - file-type "5.2.0" - filenamify "^2.0.0" - get-stream "^3.0.0" - got "^7.0.0" - make-dir "^1.0.0" - p-event "^1.0.0" - pify "^3.0.0" - -download@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/download/-/download-7.1.0.tgz#9059aa9d70b503ee76a132897be6dec8e5587233" - integrity sha512-xqnBTVd/E+GxJVrX5/eUJiLYjCGPwMpdL+jGhGU57BvtcA7wwhtHVbXBeUk51kOpW3S7Jn3BQbN9Q1R1Km2qDQ== - dependencies: - archive-type "^4.0.0" - caw "^2.0.1" - content-disposition "^0.5.2" - decompress "^4.2.0" - ext-name "^5.0.0" - file-type "^8.1.0" - filenamify "^2.0.0" - get-stream "^3.0.0" - got "^8.3.1" - make-dir "^1.2.0" - p-event "^2.1.0" - pify "^3.0.0" - dtype@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/dtype/-/dtype-2.0.0.tgz" integrity sha512-s2YVcLKdFGS0hpFqJaTwscsyt0E8nNFdmo73Ocd81xNPj4URI4rj6D60A+vFMIw7BXWlb4yRkEwfBqcZzPGiZg== -duplexer3@^0.1.4: - version "0.1.5" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" - integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== - duplexer@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -7955,7 +7927,7 @@ encodeurl@~2.0.0: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== -end-of-stream@^1.0.0, end-of-stream@^1.1.0: +end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== @@ -8165,7 +8137,7 @@ escape-html@~1.0.3: resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== @@ -8550,6 +8522,11 @@ event-source-polyfill@1.0.31: resolved "https://registry.npmjs.org/event-source-polyfill/-/event-source-polyfill-1.0.31.tgz" integrity sha512-4IJSItgS/41IxN5UVAVuAyczwZF7ZIEsM1XAoUzIHA6A+xzusEZUutdXz2Nr+MQPLxfTiCvqE79/C8HT8fKFvA== +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + eventemitter3@5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-5.0.1.tgz#53f5ffd0a492ac800721bb42c66b841de96423c4" @@ -8565,58 +8542,6 @@ events@^3.2.0, events@^3.3.0: resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -exec-buffer@^3.0.0, exec-buffer@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/exec-buffer/-/exec-buffer-3.2.0.tgz#b1686dbd904c7cf982e652c1f5a79b1e5573082b" - integrity sha512-wsiD+2Tp6BWHoVv3B+5Dcx6E7u5zky+hUwOHjuH2hKSLR3dvRmX8fk8UD8uqQixHs4Wk6eDmiegVrMPjKj7wpA== - dependencies: - execa "^0.7.0" - p-finally "^1.0.0" - pify "^3.0.0" - rimraf "^2.5.4" - tempfile "^2.0.0" - -execa@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" - integrity sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw== - dependencies: - cross-spawn "^5.0.1" - get-stream "^3.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a" - integrity sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA== - dependencies: - cross-spawn "^7.0.0" - get-stream "^5.0.0" - human-signals "^1.1.1" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.0" - onetime "^5.1.0" - signal-exit "^3.0.2" - strip-final-newline "^2.0.0" - execa@^5.0.0, execa@^5.1.1: version "5.1.1" resolved "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz" @@ -8632,13 +8557,6 @@ execa@^5.0.0, execa@^5.1.1: signal-exit "^3.0.3" strip-final-newline "^2.0.0" -executable@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/executable/-/executable-4.1.1.tgz#41532bff361d3e57af4d763b70582db18f5d133c" - integrity sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg== - dependencies: - pify "^2.2.0" - exit@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz" @@ -8702,21 +8620,6 @@ express@^4.21.2: utils-merge "1.0.1" vary "~1.1.2" -ext-list@^2.0.0: - version "2.2.2" - resolved "https://registry.yarnpkg.com/ext-list/-/ext-list-2.2.2.tgz#0b98e64ed82f5acf0f2931babf69212ef52ddd37" - integrity sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA== - dependencies: - mime-db "^1.28.0" - -ext-name@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ext-name/-/ext-name-5.0.0.tgz#70781981d183ee15d13993c8822045c506c8f0a6" - integrity sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ== - dependencies: - ext-list "^2.0.0" - sort-keys-length "^1.0.0" - extend@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" @@ -8741,7 +8644,7 @@ fast-diff@^1.1.2: resolved "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz" integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== -fast-glob@^3.0.3, fast-glob@^3.2.11, fast-glob@^3.3.0: +fast-glob@^3.2.11, fast-glob@^3.2.7, fast-glob@^3.3.0: version "3.3.3" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.3.tgz#d06d585ce8dba90a16b0505c543c3ccfb3aeb818" integrity sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg== @@ -8833,13 +8736,6 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" -fd-slicer@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fd-slicer/-/fd-slicer-1.1.0.tgz#25c7c89cb1f9077f8891bbe61d8f390eae256f1e" - integrity sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g== - dependencies: - pend "~1.2.0" - fflate@^0.4.8: version "0.4.8" resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae" @@ -8872,54 +8768,14 @@ file-saver@^2.0.2: resolved "https://registry.yarnpkg.com/file-saver/-/file-saver-2.0.5.tgz#d61cfe2ce059f414d899e9dd6d4107ee25670c38" integrity sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA== -file-type@5.2.0, file-type@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-5.2.0.tgz#2ddbea7c73ffe36368dfae49dc338c058c2b8ad6" - integrity sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ== - -file-type@^10.4.0, file-type@^10.5.0: - version "10.11.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-10.11.0.tgz#2961d09e4675b9fb9a3ee6b69e9cd23f43fd1890" - integrity sha512-uzk64HRpUZyTGZtVuvrjP0FYxzQrBf4rojot6J65YMEbwBLB0CWm0CLojVpwpmFmxcE/lkvYICgfcGozbBq6rw== - -file-type@^12.0.0: - version "12.4.2" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-12.4.2.tgz#a344ea5664a1d01447ee7fb1b635f72feb6169d9" - integrity sha512-UssQP5ZgIOKelfsaB5CuGAL+Y+q7EmONuiwF3N5HAH0t27rvrttgi6Ra9k/+DVaY9UF6+ybxu5pOXLUdA8N7Vg== - -file-type@^3.8.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-3.9.0.tgz#257a078384d1db8087bc449d107d52a52672b9e9" - integrity sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA== - -file-type@^4.2.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-4.4.0.tgz#1b600e5fca1fbdc6e80c0a70c71c8dba5f7906c5" - integrity sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ== - -file-type@^6.1.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-6.2.0.tgz#e50cd75d356ffed4e306dc4f5bcf52a79903a919" - integrity sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg== - -file-type@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/file-type/-/file-type-8.1.0.tgz#244f3b7ef641bbe0cca196c7276e4b332399f68c" - integrity sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ== - -filename-reserved-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz#abf73dfab735d045440abfea2d91f389ebbfa229" - integrity sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ== - -filenamify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/filenamify/-/filenamify-2.1.0.tgz#88faf495fb1b47abfd612300002a16228c677ee9" - integrity sha512-ICw7NTT6RsDp2rnYKVd8Fu4cr6ITzGy3+u4vUujPkabyaz+03F24NWEX7fs5fp+kBonlaqPH8fAO2NM+SXt/JA== +file-type@^16.5.3: + version "16.5.4" + resolved "https://registry.yarnpkg.com/file-type/-/file-type-16.5.4.tgz#474fb4f704bee427681f98dd390058a172a6c2fd" + integrity sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw== dependencies: - filename-reserved-regex "^2.0.0" - strip-outer "^1.0.0" - trim-repeated "^1.0.0" + readable-web-to-node-stream "^3.0.0" + strtok3 "^6.2.4" + token-types "^4.1.1" fill-range@^7.1.1: version "7.1.1" @@ -8973,13 +8829,6 @@ find-up@^6.3.0: locate-path "^7.1.0" path-exists "^5.0.0" -find-versions@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/find-versions/-/find-versions-3.2.0.tgz#10297f98030a786829681690545ef659ed1d254e" - integrity sha512-P8WRou2S+oe222TOCHitLy8zj+SIsVJh52VP4lvXkaFVnOFFdoWv1H1Jjvel1aI6NCFOAaeAVm8qrI0odiLcww== - dependencies: - semver-regex "^2.0.0" - flat-cache@^3.0.4: version "3.0.4" resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" @@ -9096,24 +8945,11 @@ fresh@0.5.2: resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== -from2@^2.1.1: - version "2.3.0" - resolved "https://registry.yarnpkg.com/from2/-/from2-2.3.0.tgz#8bfb5502bde4a4d36cfdeea007fcca21d7e382af" - integrity sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g== - dependencies: - inherits "^2.0.1" - readable-stream "^2.0.0" - fromentries@^1.3.2: version "1.3.2" resolved "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz" integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg== -fs-constants@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" - integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== - fs-extra@^10.0.0: version "10.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" @@ -9226,34 +9062,7 @@ get-package-type@^0.1.0: resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== -get-proxy@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/get-proxy/-/get-proxy-2.1.0.tgz#349f2b4d91d44c4d4d4e9cba2ad90143fac5ef93" - integrity sha512-zmZIaQTWnNQb4R4fJUEp/FC51eZsc6EkErspy3xtIYStaq8EB/hDIWipxsal+E8rz0qD7f2sL/NA9Xee4RInJw== - dependencies: - npm-conf "^1.1.0" - -get-stream@3.0.0, get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== - -get-stream@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-2.3.1.tgz#5f38f93f346009666ee0150a054167f91bdd95de" - integrity sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA== - dependencies: - object-assign "^4.0.1" - pinkie-promise "^2.0.0" - -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.0.0: +get-stream@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== @@ -9273,15 +9082,6 @@ get-symbol-description@^1.0.0: call-bind "^1.0.2" get-intrinsic "^1.1.1" -gifsicle@^5.0.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/gifsicle/-/gifsicle-5.3.0.tgz#499713c6f1e89ebbc3630da3a74fdb4697913b4e" - integrity sha512-FJTpgdj1Ow/FITB7SVza5HlzXa+/lqEY0tHQazAJbuAdvyJtkH4wIdsR2K414oaTwRXHFLLF+tYbipj+OpYg+Q== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - execa "^5.0.0" - git-raw-commits@^2.0.0: version "2.0.11" resolved "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.11.tgz" @@ -9378,20 +9178,6 @@ globalthis@^1.0.3: dependencies: define-properties "^1.1.3" -globby@^10.0.0: - version "10.0.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-10.0.2.tgz#277593e745acaa4646c3ab411289ec47a0392543" - integrity sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg== - dependencies: - "@types/glob" "^7.1.1" - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.0.3" - glob "^7.1.3" - ignore "^5.1.1" - merge2 "^1.2.3" - slash "^3.0.0" - globby@^11.0.3, globby@^11.1.0: version "11.1.0" resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" @@ -9404,6 +9190,18 @@ globby@^11.0.3, globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" +globby@^12.0.0: + version "12.2.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-12.2.0.tgz#2ab8046b4fba4ff6eede835b29f678f90e3d3c22" + integrity sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA== + dependencies: + array-union "^3.0.1" + dir-glob "^3.0.1" + fast-glob "^3.2.7" + ignore "^5.1.9" + merge2 "^1.4.1" + slash "^4.0.0" + globby@^13.1.1: version "13.2.2" resolved "https://registry.yarnpkg.com/globby/-/globby-13.2.2.tgz#63b90b1bf68619c2135475cbd4e71e66aa090592" @@ -9422,50 +9220,24 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -got@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a" - integrity sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw== +got@11.8.5: + version "11.8.5" + resolved "https://registry.yarnpkg.com/got/-/got-11.8.5.tgz#ce77d045136de56e8f024bebb82ea349bc730046" + integrity sha512-o0Je4NvQObAuZPHLFoRSkdG2lTgtcynqymzg2Vupdx6PorhaT5MCbIyXG6d4D94kk8ZG57QeosgdiqfJWhEhlQ== dependencies: - decompress-response "^3.2.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-plain-obj "^1.1.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - p-cancelable "^0.3.0" - p-timeout "^1.1.1" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - url-parse-lax "^1.0.0" - url-to-options "^1.0.1" + "@sindresorhus/is" "^4.0.0" + "@szmarczak/http-timer" "^4.0.5" + "@types/cacheable-request" "^6.0.1" + "@types/responselike" "^1.0.0" + cacheable-lookup "^5.0.3" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + http2-wrapper "^1.0.0-beta.5.2" + lowercase-keys "^2.0.0" + p-cancelable "^2.0.0" + responselike "^2.0.0" -got@^8.3.1: - version "8.3.2" - resolved "https://registry.yarnpkg.com/got/-/got-8.3.2.tgz#1d23f64390e97f776cac52e5b936e5f514d2e937" - integrity sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw== - dependencies: - "@sindresorhus/is" "^0.7.0" - cacheable-request "^2.1.1" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - into-stream "^3.1.0" - is-retry-allowed "^1.1.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - mimic-response "^1.0.0" - p-cancelable "^0.4.0" - p-timeout "^2.0.1" - pify "^3.0.0" - safe-buffer "^5.1.1" - timed-out "^4.0.1" - url-parse-lax "^3.0.0" - url-to-options "^1.0.1" - -graceful-fs@^4.1.10, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.2, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.11, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.8, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -9526,23 +9298,11 @@ has-proto@^1.0.1: resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz" integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== -has-symbol-support-x@^1.4.1: - version "1.4.2" - resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" - integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== - has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has-to-string-tag-x@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" - integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== - dependencies: - has-symbol-support-x "^1.4.1" - has-tostringtag@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" @@ -9921,10 +9681,10 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== +http-cache-semantics@^4.0.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz#205f4db64f8562b76a4ff9235aa5279839a09dd5" + integrity sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ== http-deceiver@^1.2.7: version "1.2.7" @@ -9992,6 +9752,14 @@ http-status-codes@2.3.0: resolved "https://registry.yarnpkg.com/http-status-codes/-/http-status-codes-2.3.0.tgz#987fefb28c69f92a43aecc77feec2866349a8bfc" integrity sha512-RJ8XvFvpPM/Dmc5SV+dC4y5PCeOhT3x1Hq0NU3rjGeg5a/CqlhZ7uudknPwZFz4aeAXDcbAyaeP7GAo9lvngtA== +http2-wrapper@^1.0.0-beta.5.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" + integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.0.0" + https-proxy-agent@^5.0.0: version "5.0.1" resolved "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz" @@ -10000,11 +9768,6 @@ https-proxy-agent@^5.0.0: agent-base "6" debug "4" -human-signals@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" - integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== - human-signals@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz" @@ -10080,100 +9843,44 @@ ignore@^5.1.1, ignore@^5.1.8, ignore@^5.2.0: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== -ignore@^5.2.4: +ignore@^5.1.9, ignore@^5.2.4: version "5.3.2" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.2.tgz#3cd40e729f3643fd87cb04e50bf0eb722bc596f5" integrity sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g== +image-minimizer-webpack-plugin@^4.0.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/image-minimizer-webpack-plugin/-/image-minimizer-webpack-plugin-4.1.3.tgz#728e89d153978f49396fe1881aa11fe6cef57f83" + integrity sha512-yJvYlLAZosu2iqlGF81BEUHfUiWRPD05krtoax9Ffst3Yzbn3X7p04VXambwlx3uhbSwH/BeyM5+bJHQksnuyw== + dependencies: + schema-utils "^4.2.0" + serialize-javascript "^6.0.2" + image-size@~0.5.0: version "0.5.5" resolved "https://registry.npmjs.org/image-size/-/image-size-0.5.5.tgz" integrity sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ== -image-webpack-loader@8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/image-webpack-loader/-/image-webpack-loader-8.1.0.tgz#cd97172e1e7304ef5eb898344fc25bbb650fc7d7" - integrity sha512-bxzMIBNu42KGo6Bc9YMB0QEUt+XuVTl2ZSX3oGAlbsqYOkxkT4TEWvVsnwUkCRCYISJrMCEc/s0y8OYrmEfUOg== +imagemin-svgo@^10.0.1: + version "10.0.1" + resolved "https://registry.yarnpkg.com/imagemin-svgo/-/imagemin-svgo-10.0.1.tgz#bc592950831c13998a40cb248f6e82e0b0b5c3dd" + integrity sha512-v27/UTGkb3vrm5jvjsMGQ2oxaDfSOTBfJOgmFO2fYepx05bY1IqWCK13aDytVR+l9w9eOlq0NMCLbxJlghYb2g== dependencies: - imagemin "^7.0.1" - loader-utils "^2.0.0" - object-assign "^4.1.1" - schema-utils "^2.7.1" - optionalDependencies: - imagemin-gifsicle "^7.0.0" - imagemin-mozjpeg "^9.0.0" - imagemin-optipng "^8.0.0" - imagemin-pngquant "^9.0.2" - imagemin-svgo "^9.0.0" - imagemin-webp "^7.0.0" + is-svg "^4.3.1" + svgo "^2.5.0" -imagemin-gifsicle@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/imagemin-gifsicle/-/imagemin-gifsicle-7.0.0.tgz#1a7ab136a144c4678657ba3b6c412f80805d26b0" - integrity sha512-LaP38xhxAwS3W8PFh4y5iQ6feoTSF+dTAXFRUEYQWYst6Xd+9L/iPk34QGgK/VO/objmIlmq9TStGfVY2IcHIA== +imagemin@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/imagemin/-/imagemin-8.0.1.tgz#8b29ecb78197d8f0eac6a782f2e6b38fb3780d9e" + integrity sha512-Q/QaPi+5HuwbZNtQRqUVk6hKacI6z9iWiCSQBisAv7uBynZwO7t1svkryKl7+iSQbkU/6t9DWnHz04cFs2WY7w== dependencies: - execa "^1.0.0" - gifsicle "^5.0.0" - is-gif "^3.0.0" - -imagemin-mozjpeg@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/imagemin-mozjpeg/-/imagemin-mozjpeg-9.0.0.tgz#d1af26d0b43d75a41c211051c1910da59d9d2324" - integrity sha512-TwOjTzYqCFRgROTWpVSt5UTT0JeCuzF1jswPLKALDd89+PmrJ2PdMMYeDLYZ1fs9cTovI9GJd68mRSnuVt691w== - dependencies: - execa "^4.0.0" - is-jpg "^2.0.0" - mozjpeg "^7.0.0" - -imagemin-optipng@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/imagemin-optipng/-/imagemin-optipng-8.0.0.tgz#b88e5cf6da25cc8479e07cdf38c3ae0479df7ef2" - integrity sha512-CUGfhfwqlPjAC0rm8Fy+R2DJDBGjzy2SkfyT09L8rasnF9jSoHFqJ1xxSZWK6HVPZBMhGPMxCTL70OgTHlLF5A== - dependencies: - exec-buffer "^3.0.0" - is-png "^2.0.0" - optipng-bin "^7.0.0" - -imagemin-pngquant@^9.0.2: - version "9.0.2" - resolved "https://registry.yarnpkg.com/imagemin-pngquant/-/imagemin-pngquant-9.0.2.tgz#38155702b0cc4f60f671ba7c2b086ea3805d9567" - integrity sha512-cj//bKo8+Frd/DM8l6Pg9pws1pnDUjgb7ae++sUX1kUVdv2nrngPykhiUOgFeE0LGY/LmUbCf4egCHC4YUcZSg== - dependencies: - execa "^4.0.0" - is-png "^2.0.0" - is-stream "^2.0.0" - ow "^0.17.0" - pngquant-bin "^6.0.0" - -imagemin-svgo@^9.0.0: - version "9.0.0" - resolved "https://registry.yarnpkg.com/imagemin-svgo/-/imagemin-svgo-9.0.0.tgz#749370804608917a67d4ff590f07a87756aec006" - integrity sha512-uNgXpKHd99C0WODkrJ8OO/3zW3qjgS4pW7hcuII0RcHN3tnKxDjJWcitdVC/TZyfIqSricU8WfrHn26bdSW62g== - dependencies: - is-svg "^4.2.1" - svgo "^2.1.0" - -imagemin-webp@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/imagemin-webp/-/imagemin-webp-7.0.0.tgz#df000ec927855d74d4cfafec8558ac418c88d2a9" - integrity sha512-JoYjvHNgBLgrQAkeCO7T5iNc8XVpiBmMPZmiXMhalC7K6gwY/3DCEUfNxVPOmNJ+NIJlJFvzcMR9RBxIE74Xxw== - dependencies: - cwebp-bin "^7.0.1" - exec-buffer "^3.2.0" - is-cwebp-readable "^3.0.0" - -imagemin@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/imagemin/-/imagemin-7.0.1.tgz#f6441ca647197632e23db7d971fffbd530c87dbf" - integrity sha512-33AmZ+xjZhg2JMCe+vDf6a9mzWukE7l+wAtesjE7KyteqqKjzxv7aVQeWnul1Ve26mWvEQqyPwl0OctNBfSR9w== - dependencies: - file-type "^12.0.0" - globby "^10.0.0" - graceful-fs "^4.2.2" + file-type "^16.5.3" + globby "^12.0.0" + graceful-fs "^4.2.8" junk "^3.1.0" - make-dir "^3.0.0" - p-pipe "^3.0.0" - replace-ext "^1.0.0" + p-pipe "^4.0.0" + replace-ext "^2.0.0" + slash "^3.0.0" immediate@~3.0.5: version "3.0.6" @@ -10193,11 +9900,6 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-lazy@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-3.1.0.tgz#891279202c8a2280fdbd6674dbd8da1a1dfc67cc" - integrity sha512-8/gvXvX2JMn0F+CDlSC4l6kOmVaLOO3XLkksI7CI3Ud95KDYJuYur2b9P/PUt/i/pDAMd/DulQsNbbbmRRsDIQ== - import-local@^3.0.2: version "3.1.0" resolved "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz" @@ -10304,14 +10006,6 @@ interpret@^3.1.1: resolved "https://registry.yarnpkg.com/interpret/-/interpret-3.1.1.tgz#5be0ceed67ca79c6c4bc5cf0d7ee843dcea110c4" integrity sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ== -into-stream@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-3.1.0.tgz#96fb0a936c12babd6ff1752a17d05616abd094c6" - integrity sha512-TcdjPibTksa1NQximqep2r17ISRiNE9fwlfbg3F8ANdvP5/yrFTew86VcO//jk4QTaMlbjypPBq76HN2zaKfZQ== - dependencies: - from2 "^2.1.1" - p-is-promise "^1.1.0" - invariant@^2.2.4: version "2.2.4" resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" @@ -10440,13 +10134,6 @@ is-core-module@^2.13.0: dependencies: has "^1.0.3" -is-cwebp-readable@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-cwebp-readable/-/is-cwebp-readable-3.0.0.tgz#0554aaa400977a2fc4de366d8c0244f13cde58cb" - integrity sha512-bpELc7/Q1/U5MWHn4NdHI44R3jxk0h9ew9ljzabiRl70/UIjL/ZAqRMb52F5+eke/VC8yTiv4Ewryo1fPWidvA== - dependencies: - file-type "^10.5.0" - is-date-object@^1.0.1, is-date-object@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz" @@ -10501,13 +10188,6 @@ is-generator-function@^1.0.7: dependencies: has-tostringtag "^1.0.0" -is-gif@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-gif/-/is-gif-3.0.0.tgz#c4be60b26a301d695bb833b20d9b5d66c6cf83b1" - integrity sha512-IqJ/jlbw5WJSNfwQ/lHEDXF8rxhRgF6ythk2oiEvhpG29F704eX9NO6TvPfMiq9DrbwgcEDnETYNcZDPewQoVw== - dependencies: - file-type "^10.4.0" - is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" @@ -10537,21 +10217,11 @@ is-interactive@^1.0.0: resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== -is-jpg@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-jpg/-/is-jpg-2.0.0.tgz#2e1997fa6e9166eaac0242daae443403e4ef1d97" - integrity sha512-ODlO0ruzhkzD3sdynIainVP5eoOFNN85rxA1+cwwnPe4dKyX0r5+hxNO5XpCrxlHcmb9vkOit9mhRD2JVuimHg== - is-map@^2.0.1, is-map@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz" integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== -is-natural-number@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-natural-number/-/is-natural-number-4.0.1.tgz#ab9d76e1db4ced51e35de0c72ebecf09f734cde8" - integrity sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ== - is-negative-zero@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz" @@ -10589,12 +10259,7 @@ is-obj@^2.0.0: resolved "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" - integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== - -is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: +is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz" integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== @@ -10616,11 +10281,6 @@ is-plain-object@^5.0.0: resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== -is-png@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-png/-/is-png-2.0.0.tgz#ee8cbc9e9b050425cedeeb4a6fb74a649b0a4a8d" - integrity sha512-4KPGizaVGj2LK7xwJIz8o5B2ubu1D/vcQsgOGFEDlpcvgZHto4gBnyd0ig7Ws+67ixmwKoNmu0hYnpo6AaKb5g== - is-potential-custom-element-name@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz" @@ -10641,11 +10301,6 @@ is-regex@^1.1.4: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-retry-allowed@^1.0.0, is-retry-allowed@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" - integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== - is-set@^2.0.1, is-set@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz" @@ -10658,11 +10313,6 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.0.0, is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== - is-stream@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" @@ -10675,7 +10325,7 @@ is-string@^1.0.5, is-string@^1.0.7: dependencies: has-tostringtag "^1.0.0" -is-svg@^4.2.1: +is-svg@^4.3.1: version "4.4.0" resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-4.4.0.tgz#34db20a38146be5f2b3060154da33d11e6f74b7c" integrity sha512-v+AgVwiK5DsGtT9ng+m4mClp6zDAmwrW8nZi6Gg15qzvBnRWWdfWA1TGaXyCDnWq5g5asofIgMVl3PjKxvk1ug== @@ -10828,14 +10478,6 @@ istanbul-reports@^3.1.3: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" -isurl@^1.0.0-alpha5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" - integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== - dependencies: - has-to-string-tag-x "^1.2.0" - is-object "^1.0.1" - jerrypick@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/jerrypick/-/jerrypick-1.1.1.tgz" @@ -11427,10 +11069,10 @@ jsesc@~0.5.0: resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" - integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== json-parse-better-errors@^1.0.1: version "1.0.2" @@ -11525,12 +11167,12 @@ kapsule@1, kapsule@^1.14: dependencies: lodash-es "4" -keyv@3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.0.0.tgz#44923ba39e68b12a7cec7df6c3268c031f2ef373" - integrity sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA== +keyv@^4.0.0: + version "4.5.4" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" + integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== dependencies: - json-buffer "3.0.0" + json-buffer "3.0.1" kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" @@ -11855,15 +11497,10 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" -lowercase-keys@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.0.tgz#4e3366b39e7f5457e35f1324bdf6f88d0bfc7306" - integrity sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A== - -lowercase-keys@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== lowlight@^1.17.0: version "1.20.0" @@ -11909,13 +11546,6 @@ magic-string@0.30.8: dependencies: "@jridgewell/sourcemap-codec" "^1.4.15" -make-dir@^1.0.0, make-dir@^1.2.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" - integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== - dependencies: - pify "^3.0.0" - make-dir@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz" @@ -12264,7 +11894,7 @@ merge-stream@^2.0.0: resolved "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -merge2@^1.2.3, merge2@^1.3.0, merge2@^1.4.1: +merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== @@ -12691,11 +12321,6 @@ mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== -mime-db@^1.28.0: - version "1.53.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.53.0.tgz#3cb63cd820fc29896d9d4e8c32ab4fcd74ccb447" - integrity sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg== - mime-types@^2.1.12, mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.17, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" @@ -12718,6 +12343,11 @@ mimic-response@^1.0.0: resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + min-document@^2.19.0: version "2.19.0" resolved "https://registry.npmjs.org/min-document/-/min-document-2.19.0.tgz" @@ -12831,14 +12461,6 @@ motion@12.4.13: framer-motion "^12.4.13" tslib "^2.4.0" -mozjpeg@^7.0.0: - version "7.1.1" - resolved "https://registry.yarnpkg.com/mozjpeg/-/mozjpeg-7.1.1.tgz#dfb61953536e66fcabd4ae795e7a312d42a51f18" - integrity sha512-iIDxWvzhWvLC9mcRJ1uSkiKaj4drF58oCqK2bITm5c2Jt6cJ8qQjSSru2PCaysG+hLIinryj8mgz5ZJzOYTv1A== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - mri@^1.1.0: version "1.2.0" resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" @@ -13074,14 +12696,10 @@ normalize-range@^0.1.2: resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== -normalize-url@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-2.0.1.tgz#835a9da1551fa26f70e92329069a23aa6574d7e6" - integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== - dependencies: - prepend-http "^2.0.0" - query-string "^5.0.1" - sort-keys "^2.0.0" +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== nosleep.js@^0.7.0: version "0.7.0" @@ -13093,14 +12711,6 @@ not@^0.1.0: resolved "https://registry.yarnpkg.com/not/-/not-0.1.0.tgz#c9691c1746c55dcfbe54cbd8bd4ff041bc2b519d" integrity sha512-5PDmaAsVfnWUgTUbJ3ERwn7u79Z0dYxN9ErxCpVJJqe2RK0PJ3z+iFUxuqjwtlDDegXvtWoxD/3Fzxox7tFGWA== -npm-conf@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/npm-conf/-/npm-conf-1.1.3.tgz#256cc47bd0e218c259c4e9550bf413bc2192aff9" - integrity sha512-Yic4bZHJOt9RCFbRP3GgpqhScOY4HH3V2P8yBj6CeYq118Qr+BLXqT2JvpJ00mryLESpgOxf5XlFv4ZjXxLScw== - dependencies: - config-chain "^1.1.11" - pify "^3.0.0" - npm-run-all@latest: version "4.1.5" resolved "https://registry.yarnpkg.com/npm-run-all/-/npm-run-all-4.1.5.tgz#04476202a15ee0e2e214080861bff12a51d98fba" @@ -13116,14 +12726,7 @@ npm-run-all@latest: shell-quote "^1.6.1" string.prototype.padend "^3.0.0" -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw== - dependencies: - path-key "^2.0.0" - -npm-run-path@^4.0.0, npm-run-path@^4.0.1: +npm-run-path@^4.0.1: version "4.0.1" resolved "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz" integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== @@ -13343,14 +12946,6 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" -optipng-bin@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/optipng-bin/-/optipng-bin-7.0.1.tgz#beb8e55a52f8a26f885ee57ab44fcf62397d6972" - integrity sha512-W99mpdW7Nt2PpFiaO+74pkht7KEqkXkeRomdWXfEz3SALZ6hns81y/pm1dsGZ6ItUIfchiNIP6ORDr1zETU1jA== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.0" - ora@^5.4.1: version "5.4.1" resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" @@ -13366,13 +12961,6 @@ ora@^5.4.1: strip-ansi "^6.0.0" wcwidth "^1.0.1" -os-filter-obj@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/os-filter-obj/-/os-filter-obj-2.0.0.tgz#1c0b62d5f3a2442749a2d139e6dddee6e81d8d16" - integrity sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg== - dependencies: - arch "^2.1.0" - os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" @@ -13393,46 +12981,10 @@ overlayscrollbars@^2.8.1: resolved "https://registry.yarnpkg.com/overlayscrollbars/-/overlayscrollbars-2.9.2.tgz#056020a3811742b58b754fab6f775d49bd109be9" integrity sha512-iDT84r39i7oWP72diZN2mbJUsn/taCq568aQaIrc84S87PunBT7qtsVltAF2esk7ORTRjQDnfjVYoqqTzgs8QA== -ow@^0.17.0: - version "0.17.0" - resolved "https://registry.yarnpkg.com/ow/-/ow-0.17.0.tgz#4f938999fed6264c9048cd6254356e0f1e7f688c" - integrity sha512-i3keDzDQP5lWIe4oODyDFey1qVrq2hXKTuTH2VpqwpYtzPiKZt2ziRI4NBQmgW40AnV5Euz17OyWweCb+bNEQA== - dependencies: - type-fest "^0.11.0" - -p-cancelable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" - integrity sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw== - -p-cancelable@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.4.1.tgz#35f363d67d52081c8d9585e37bcceb7e0bbcb2a0" - integrity sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ== - -p-event@^1.0.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-event/-/p-event-1.3.0.tgz#8e6b4f4f65c72bc5b6fe28b75eda874f96a4a085" - integrity sha512-hV1zbA7gwqPVFcapfeATaNjQ3J0NuzorHPyG8GPL9g/Y/TplWVBVoCKCXL6Ej2zscrCEv195QNWJXuBH6XZuzA== - dependencies: - p-timeout "^1.1.1" - -p-event@^2.1.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/p-event/-/p-event-2.3.1.tgz#596279ef169ab2c3e0cae88c1cfbb08079993ef6" - integrity sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA== - dependencies: - p-timeout "^2.0.1" - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow== - -p-is-promise@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-1.1.0.tgz#9c9456989e9f6588017b0434d56097675c3da05e" - integrity sha512-zL7VE4JVS2IFSkR2GQKDSPEVxkoH43/p7oEnwpdCndKYJO0HVeRB7fA8TJwuLOTBREtK0ea8eHaxdwcpob5dmg== +p-cancelable@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" + integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== p-limit@^2.2.0: version "2.3.0" @@ -13476,13 +13028,6 @@ p-locate@^6.0.0: dependencies: p-limit "^4.0.0" -p-map-series@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-map-series/-/p-map-series-1.0.0.tgz#bf98fe575705658a9e1351befb85ae4c1f07bdca" - integrity sha512-4k9LlvY6Bo/1FcIdV33wqZQES0Py+iKISU9Uc8p8AjWoZPnFKMpVIVD3s0EYn4jzLh1I+WeUZkJ0Yoa4Qfw3Kg== - dependencies: - p-reduce "^1.0.0" - p-map@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz" @@ -13490,15 +13035,10 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-pipe@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/p-pipe/-/p-pipe-3.1.0.tgz#48b57c922aa2e1af6a6404cb7c6bf0eb9cc8e60e" - integrity sha512-08pj8ATpzMR0Y80x50yJHn37NF6vjrqHutASaX5LiH5npS9XPvrUmscd9MF5R4fuYRHOxQR1FfMIlF7AzwoPqw== - -p-reduce@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-reduce/-/p-reduce-1.0.0.tgz#18c2b0dd936a4690a529f8231f58a0fdb6a47dfa" - integrity sha512-3Tx1T3oM1xO/Y8Gj0sWyE78EIJZ+t+aEmXUdvQgvGmSMri7aPTHoovbXEreWKkL5j21Er60XAWLTzKbAKYOujQ== +p-pipe@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-pipe/-/p-pipe-4.0.0.tgz#7e5424569351b2ab452a47826acb93ce09ad6a2c" + integrity sha512-HkPfFklpZQPUKBFXzKFB6ihLriIHxnmuQdK9WmLDwe4hf2PdhhfWT/FJa+pc3bA1ywvKXtedxIRmd4Y7BTXE4w== p-retry@^6.2.0: version "6.2.1" @@ -13509,20 +13049,6 @@ p-retry@^6.2.0: is-network-error "^1.0.0" retry "^0.13.1" -p-timeout@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386" - integrity sha512-gb0ryzr+K2qFqFv6qi3khoeqMZF/+ajxQipEF6NteZVnvz9tzdsfAVj3lYtn1gAXvH5lfLwfxEII799gt/mRIA== - dependencies: - p-finally "^1.0.0" - -p-timeout@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-2.0.1.tgz#d8dd1979595d2dc0139e1fe46b8b646cb3cdf038" - integrity sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA== - dependencies: - p-finally "^1.0.0" - p-try@^2.0.0: version "2.2.0" resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" @@ -13675,11 +13201,6 @@ path-is-absolute@^1.0.0: resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== -path-key@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== - path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" @@ -13735,10 +13256,10 @@ pbf@3.2.1: ieee754 "^1.1.12" resolve-protobuf-schema "^2.1.0" -pend@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" - integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== +peek-readable@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/peek-readable/-/peek-readable-4.1.0.tgz#4ece1111bf5c2ad8867c314c81356847e8a62e72" + integrity sha512-ZI3LnwUv5nOGbQzD9c2iDG6toheuXSZP5esSHBjopsXH4dg19soufvpUGA3uohi5anFtGb2lhAVdHzH6R/Evvg== performance-now@^2.1.0: version "2.1.0" @@ -13781,11 +13302,6 @@ pidtree@^0.5.0: resolved "https://registry.npmjs.org/pidtree/-/pidtree-0.5.0.tgz" integrity sha512-9nxspIM7OpZuhBxPg73Zvyq7j1QMPMPsGKTqRc2XOaFQauDvoNz9fM1Wdkjmeo7l9GXOZiRs97sPkuayl39wjA== -pify@^2.2.0, pify@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== - pify@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" @@ -13796,18 +13312,6 @@ pify@^4.0.1: resolved "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw== - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== - pirates@^4.0.4: version "4.0.5" resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz" @@ -13827,15 +13331,6 @@ pkg-dir@^7.0.0: dependencies: find-up "^6.3.0" -pngquant-bin@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/pngquant-bin/-/pngquant-bin-6.0.1.tgz#2b5789ca219eeb4d8509ab1ae082092801b7f07e" - integrity sha512-Q3PUyolfktf+hYio6wsg3SanQzEU/v8aICg/WpzxXcuCMRb7H2Q81okfpcEztbMvw25ILjd3a87doj2N9kvbpQ== - dependencies: - bin-build "^3.0.0" - bin-wrapper "^4.0.1" - execa "^4.0.0" - polished@4: version "4.2.2" resolved "https://registry.npmjs.org/polished/-/polished-4.2.2.tgz" @@ -14147,16 +13642,6 @@ prelude-ls@~1.1.2: resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz" integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg== - -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" - integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== - present@0.0.6: version "0.0.6" resolved "https://registry.npmjs.org/present/-/present-0.0.6.tgz" @@ -14259,11 +13744,6 @@ property-information@^6.0.0: resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.3.0.tgz#ba4a06ec6b4e1e90577df9931286953cdf4282c3" integrity sha512-gVNZ74nqhRMiIUYWGQdosYetaKc83x8oT41a0LlV3AAFCAZwCpg4vmGkq8t34+cUhp3cnM4XDiU/7xlgK7HGrg== -proto-list@~1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" - integrity sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA== - protocol-buffers-schema@^3.3.1: version "3.6.0" resolved "https://registry.npmjs.org/protocol-buffers-schema/-/protocol-buffers-schema-3.6.0.tgz" @@ -14355,6 +13835,11 @@ quick-lru@^4.0.1: resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz" integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + quick-lru@^6.1.1: version "6.1.1" resolved "https://registry.npmjs.org/quick-lru/-/quick-lru-6.1.1.tgz" @@ -15194,7 +14679,7 @@ readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.6, readable-stre string_decoder "^1.1.1" util-deprecate "^1.0.1" -readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.3.0, readable-stream@^2.3.5, readable-stream@~2.3.6: +readable-stream@^2.0.1, readable-stream@~2.3.6: version "2.3.8" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -15207,6 +14692,24 @@ readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.3.0, readable string_decoder "~1.1.1" util-deprecate "~1.0.1" +readable-stream@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.7.0.tgz#cedbd8a1146c13dfff8dab14068028d58c15ac91" + integrity sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg== + dependencies: + abort-controller "^3.0.0" + buffer "^6.0.3" + events "^3.3.0" + process "^0.11.10" + string_decoder "^1.3.0" + +readable-web-to-node-stream@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.4.tgz#392ba37707af5bf62d725c36c1b5d6ef4119eefc" + integrity sha512-9nX56alTf5bwXQ3ZDipHJhusu9NTQJ/CVPtb/XHAJCXihZeitfJvIRS4GqQ/mfIoOE3IelHMrpayVrosdHBuLw== + dependencies: + readable-stream "^4.7.0" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" @@ -15517,10 +15020,10 @@ renderkid@^3.0.0: lodash "^4.17.21" strip-ansi "^6.0.1" -replace-ext@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.1.tgz#2d6d996d04a15855d967443631dd5f77825b016a" - integrity sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw== +replace-ext@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-2.0.0.tgz#9471c213d22e1bcc26717cd6e50881d88f812b06" + integrity sha512-UszKE5KVK6JvyD92nzMn9cDapSk6w/CaFZ96CnmDMUqH9oowfxF/ZjRITD25H4DnOQClLA4/j7jLGXXLVKxAug== require-directory@^2.1.1: version "2.1.1" @@ -15547,6 +15050,11 @@ resize-observer-polyfill@^1.5.1: resolved "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz" integrity sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg== +resolve-alpn@^1.0.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" + integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== + resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz" @@ -15606,12 +15114,12 @@ resolve@^2.0.0-next.4: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" - integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== +responselike@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" + integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== dependencies: - lowercase-keys "^1.0.0" + lowercase-keys "^2.0.0" restore-cursor@^3.1.0: version "3.1.0" @@ -15643,13 +15151,6 @@ rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: dependencies: glob "^7.1.3" -rimraf@^2.5.4: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - robust-predicates@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771" @@ -15713,7 +15214,7 @@ safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@~5.2.0: +safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -15767,7 +15268,7 @@ scheduler@^0.23.0: dependencies: loose-envify "^1.1.0" -schema-utils@^2.7.0, schema-utils@^2.7.1: +schema-utils@^2.7.0: version "2.7.1" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== @@ -15807,13 +15308,6 @@ scroll-into-view-if-needed@^3.1.0: dependencies: compute-scroll-into-view "^3.0.2" -seek-bzip@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/seek-bzip/-/seek-bzip-1.0.6.tgz#35c4171f55a680916b52a07859ecf3b5857f21c4" - integrity sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ== - dependencies: - commander "^2.8.1" - select-hose@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz" @@ -15827,19 +15321,7 @@ selfsigned@^2.4.1: "@types/node-forge" "^1.3.0" node-forge "^1" -semver-regex@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/semver-regex/-/semver-regex-2.0.0.tgz#a93c2c5844539a770233379107b38c7b4ac9d338" - integrity sha512-mUdIBBvdn0PLOeP3TEkMH7HHeUP3GjsXCwKarjv/kGmUFOYg1VqEemKhoQpWMu6X2I8kHeuVdGibLGkVK+/5Qw== - -semver-truncate@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/semver-truncate/-/semver-truncate-1.1.2.tgz#57f41de69707a62709a7e0104ba2117109ea47e8" - integrity sha512-V1fGg9i4CL3qesB6U0L6XAm4xOJiHmt4QAacazumuasc03BvtFGIMCduv01JWQ69Nv+JST9TqhSCiJoxoY031w== - dependencies: - semver "^5.3.0" - -"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.3.0, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: +"semver@2 || 3 || 4 || 5", semver@7.3.7, semver@7.5.4, semver@7.x, semver@^5.6.0, semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1, semver@^7.2.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.6.3: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== @@ -15865,7 +15347,7 @@ send@0.19.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-javascript@6.0.2, serialize-javascript@^6.0.0, serialize-javascript@^6.0.1: +serialize-javascript@6.0.2, serialize-javascript@^6.0.0, serialize-javascript@^6.0.1, serialize-javascript@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.2.tgz#defa1e055c83bf6d59ea805d8da862254eb6a6c2" integrity sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g== @@ -15944,6 +15426,35 @@ shallowequal@^1.1.0: resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz" integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== +sharp@^0.33.4: + version "0.33.5" + resolved "https://registry.yarnpkg.com/sharp/-/sharp-0.33.5.tgz#13e0e4130cc309d6a9497596715240b2ec0c594e" + integrity sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw== + dependencies: + color "^4.2.3" + detect-libc "^2.0.3" + semver "^7.6.3" + optionalDependencies: + "@img/sharp-darwin-arm64" "0.33.5" + "@img/sharp-darwin-x64" "0.33.5" + "@img/sharp-libvips-darwin-arm64" "1.0.4" + "@img/sharp-libvips-darwin-x64" "1.0.4" + "@img/sharp-libvips-linux-arm" "1.0.5" + "@img/sharp-libvips-linux-arm64" "1.0.4" + "@img/sharp-libvips-linux-s390x" "1.0.4" + "@img/sharp-libvips-linux-x64" "1.0.4" + "@img/sharp-libvips-linuxmusl-arm64" "1.0.4" + "@img/sharp-libvips-linuxmusl-x64" "1.0.4" + "@img/sharp-linux-arm" "0.33.5" + "@img/sharp-linux-arm64" "0.33.5" + "@img/sharp-linux-s390x" "0.33.5" + "@img/sharp-linux-x64" "0.33.5" + "@img/sharp-linuxmusl-arm64" "0.33.5" + "@img/sharp-linuxmusl-x64" "0.33.5" + "@img/sharp-wasm32" "0.33.5" + "@img/sharp-win32-ia32" "0.33.5" + "@img/sharp-win32-x64" "0.33.5" + shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" @@ -15985,7 +15496,7 @@ side-channel@^1.0.6: get-intrinsic "^1.2.4" object-inspect "^1.13.1" -signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: +signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: version "3.0.7" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== @@ -16080,27 +15591,6 @@ sort-desc@^0.1.1: resolved "https://registry.npmjs.org/sort-desc/-/sort-desc-0.1.1.tgz" integrity sha512-jfZacW5SKOP97BF5rX5kQfJmRVZP5/adDUTY8fCSPvNcXDVpUEe2pr/iKGlcyZzchRJZrswnp68fgk3qBXgkJw== -sort-keys-length@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/sort-keys-length/-/sort-keys-length-1.0.1.tgz#9cb6f4f4e9e48155a6aa0671edd336ff1479a188" - integrity sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw== - dependencies: - sort-keys "^1.0.0" - -sort-keys@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-1.1.2.tgz#441b6d4d346798f1b4e49e8920adfba0e543f9ad" - integrity sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg== - dependencies: - is-plain-obj "^1.0.0" - -sort-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" - integrity sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg== - dependencies: - is-plain-obj "^1.0.0" - sort-object@^0.3.2: version "0.3.2" resolved "https://registry.npmjs.org/sort-object/-/sort-object-0.3.2.tgz" @@ -16396,7 +15886,7 @@ string.prototype.trimstart@^1.0.6: define-properties "^1.1.4" es-abstract "^1.20.4" -string_decoder@^1.1.1: +string_decoder@^1.1.1, string_decoder@^1.3.0: version "1.3.0" resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== @@ -16442,18 +15932,6 @@ strip-bom@^4.0.0: resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== -strip-dirs@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/strip-dirs/-/strip-dirs-2.1.0.tgz#4987736264fc344cf20f6c34aca9d13d1d4ed6c5" - integrity sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g== - dependencies: - is-natural-number "^4.0.1" - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q== - strip-final-newline@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz" @@ -16471,18 +15949,19 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== -strip-outer@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/strip-outer/-/strip-outer-1.0.1.tgz#b2fd2abf6604b9d1e6013057195df836b8a9d631" - integrity sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg== - dependencies: - escape-string-regexp "^1.0.2" - strnum@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.1.2.tgz#57bca4fbaa6f271081715dbc9ed7cee5493e28e4" integrity sha512-vrN+B7DBIoTTZjnPNewwhx6cBA/H+IS7rfW68n7XxC1y7uoiGQBxaKzqucGUgavX15dJgiGztLJ8vxuEzwqBdA== +strtok3@^6.2.4: + version "6.3.0" + resolved "https://registry.yarnpkg.com/strtok3/-/strtok3-6.3.0.tgz#358b80ffe6d5d5620e19a073aa78ce947a90f9a0" + integrity sha512-fZtbhtvI9I48xDSywd/somNqgUHl2L2cstmXCCif0itOf96jeW18MBSyrLuNicYQVkvpOxkZtkzujiTJ9LW5Jw== + dependencies: + "@tokenizer/token" "^0.3.0" + peek-readable "^4.1.0" + style-loader@1.3.0: version "1.3.0" resolved "https://registry.npmjs.org/style-loader/-/style-loader-1.3.0.tgz" @@ -16607,7 +16086,7 @@ svg-path-properties@^1.0.4: resolved "https://registry.yarnpkg.com/svg-path-properties/-/svg-path-properties-1.3.0.tgz#7f47e61dcac380c9f4d04f642df7e69b127274fa" integrity sha512-R1+z37FrqyS3UXDhajNfvMxKI0smuVdedqOo4YbAQUfGqA86B9mGvr2IEXrwjjvGzCtdIKy/ad9N8m6YclaKAw== -svgo@^2.1.0: +svgo@^2.5.0: version "2.8.0" resolved "https://registry.yarnpkg.com/svgo/-/svgo-2.8.0.tgz#4ff80cce6710dc2795f0c7c74101e6764cfccd24" integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== @@ -16658,32 +16137,6 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== -tar-stream@^1.5.2: - version "1.6.2" - resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" - integrity sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A== - dependencies: - bl "^1.0.0" - buffer-alloc "^1.2.0" - end-of-stream "^1.0.0" - fs-constants "^1.0.0" - readable-stream "^2.3.0" - to-buffer "^1.1.1" - xtend "^4.0.0" - -temp-dir@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" - integrity sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ== - -tempfile@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/tempfile/-/tempfile-2.0.0.tgz#6b0446856a9b1114d1856ffcbe509cccb0977265" - integrity sha512-ZOn6nJUgvgC09+doCEF3oB+r3ag7kUvlsXEGX069QRD60p+P3uP7XG9N2/at+EyIRGSN//ZY3LyEotA1YpmjuA== - dependencies: - temp-dir "^1.0.0" - uuid "^3.0.1" - terminal-link@^2.0.0: version "2.1.1" resolved "https://registry.npmjs.org/terminal-link/-/terminal-link-2.1.1.tgz" @@ -16833,7 +16286,7 @@ thunky@^1.0.2: resolved "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -timed-out@^4.0.0, timed-out@^4.0.1: +timed-out@^4.0.1: version "4.0.1" resolved "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz" integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA== @@ -16870,11 +16323,6 @@ tmpl@1.0.5: resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== -to-buffer@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80" - integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg== - to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" @@ -16897,6 +16345,14 @@ toidentifier@1.0.1: resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== +token-types@^4.1.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/token-types/-/token-types-4.2.1.tgz#0f897f03665846982806e138977dbe72d44df753" + integrity sha512-6udB24Q737UD/SDsKAHI9FCRP7Bqc9D/MQUV02ORQg5iskjtLJlZJNdN4kKtcdtwCeWIwIHDGaUsTsCCAa8sFQ== + dependencies: + "@tokenizer/token" "^0.3.0" + ieee754 "^1.2.1" + topojson-client@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/topojson-client/-/topojson-client-3.1.0.tgz#22e8b1ed08a2b922feeb4af6f53b6ef09a467b99" @@ -16946,13 +16402,6 @@ trim-newlines@^3.0.0: resolved "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz" integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== -trim-repeated@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/trim-repeated/-/trim-repeated-1.0.0.tgz#e3646a2ea4e891312bf7eace6cfb05380bc01c21" - integrity sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg== - dependencies: - escape-string-regexp "^1.0.2" - trough@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/trough/-/trough-2.1.0.tgz#0f7b511a4fde65a46f18477ab38849b22c554876" @@ -17056,13 +16505,6 @@ tsutils@^3.21.0: dependencies: tslib "^1.8.1" -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - tween-functions@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/tween-functions/-/tween-functions-1.2.0.tgz#1ae3a50e7c60bb3def774eac707acbca73bbc3ff" @@ -17099,11 +16541,6 @@ type-detect@4.0.8: resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== -type-fest@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1" - integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== - type-fest@^0.18.0: version "0.18.1" resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz" @@ -17225,14 +16662,6 @@ unbox-primitive@^1.0.2: has-symbols "^1.0.3" which-boxed-primitive "^1.0.2" -unbzip2-stream@^1.0.9: - version "1.4.3" - resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" - integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== - dependencies: - buffer "^5.2.1" - through "^2.3.8" - unicode-canonical-property-names-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" @@ -17427,20 +16856,6 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - integrity sha512-BVA4lR5PIviy2PMseNd2jbFQ+jwSwQGdJejf5ctd1rEXt0Ypd7yanUK9+lYechVlN5VaTJGsu2U/3MDDu6KgBA== - dependencies: - prepend-http "^1.0.1" - -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" - integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== - dependencies: - prepend-http "^2.0.0" - url-parse@^1.5.3: version "1.5.10" resolved "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz" @@ -17454,11 +16869,6 @@ url-set-query@^1.0.0: resolved "https://registry.npmjs.org/url-set-query/-/url-set-query-1.0.0.tgz" integrity sha512-3AChu4NiXquPfeckE5R5cGdiHCMWJx1dwCWOmWIL4KHAziJNOFIYJlpGFeKDvwLPHovZRCxK3cYlwzqI9Vp+Gg== -url-to-options@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" - integrity sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A== - use-isomorphic-layout-effect@^1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz" @@ -17511,11 +16921,6 @@ utils-merge@1.0.1: resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== -uuid@^3.0.1: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - uuid@^8.3.2: version "8.3.2" resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" @@ -18153,14 +17558,6 @@ yargs@^17.0.0, yargs@^17.3.1: y18n "^5.0.5" yargs-parser "^21.1.1" -yauzl@^2.4.2: - version "2.10.0" - resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.10.0.tgz#c7eb17c93e112cb1086fa6d8e51fb0667b79a5f9" - integrity sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g== - dependencies: - buffer-crc32 "~0.2.3" - fd-slicer "~1.1.0" - yn@3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz" diff --git a/go.mod b/go.mod index 91a48e27154b..672d210c90dd 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/ClickHouse/clickhouse-go/v2 v2.36.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.111.43-aded056 + github.com/SigNoz/signoz-otel-collector v0.128.1 github.com/antlr4-go/antlr/v4 v4.13.1 github.com/antonmedv/expr v1.15.3 github.com/cespare/xxhash/v2 v2.3.0 @@ -50,6 +50,7 @@ require ( github.com/sethvargo/go-password v0.2.0 github.com/smartystreets/goconvey v1.8.1 github.com/soheilhy/cmux v0.1.5 + github.com/spf13/cobra v1.9.1 github.com/srikanthccv/ClickHouse-go-mock v0.12.0 github.com/stretchr/testify v1.10.0 github.com/tidwall/gjson v1.18.0 @@ -206,7 +207,6 @@ require ( github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect github.com/smarty/assertions v1.15.0 // indirect - github.com/spf13/cobra v1.9.1 // indirect github.com/spf13/pflag v1.0.6 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/tidwall/match v1.1.1 // indirect diff --git a/go.sum b/go.sum index e48fcf1320c0..e48341f9919e 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= -github.com/SigNoz/signoz-otel-collector v0.111.43-aded056 h1:lJ7262JHZlHX7KuUlQa8vpWCdgZKwlZ2P6sUmZEqNLE= -github.com/SigNoz/signoz-otel-collector v0.111.43-aded056/go.mod h1:AHfJ2N/74IXsrbYEPAlqfJeKg006VTt63vBZglUK3jY= +github.com/SigNoz/signoz-otel-collector v0.128.1 h1:D0bKMrRNgcKreKKYoakCr5jTWj1srupbNwGIvpHMihw= +github.com/SigNoz/signoz-otel-collector v0.128.1/go.mod h1:vFQLsJFzQwVkO1ltIMH+z9KKuTZTn/P0lKu2mNYDBpE= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= diff --git a/grammar/FilterQuery.g4 b/grammar/FilterQuery.g4 index 9b84cbd0e20d..204c5fdd3275 100644 --- a/grammar/FilterQuery.g4 +++ b/grammar/FilterQuery.g4 @@ -208,7 +208,7 @@ QUOTED_TEXT ) ; -fragment SEGMENT : [a-zA-Z$] [a-zA-Z0-9$_:\-]* ; +fragment SEGMENT : [a-zA-Z$_] [a-zA-Z0-9$_:\-/]* ; fragment EMPTY_BRACKS : '[' ']' ; fragment OLD_JSON_BRACKS: '[' '*' ']'; diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go index 764b1632c39c..8d8cd867d7d0 100644 --- a/pkg/errors/errors.go +++ b/pkg/errors/errors.go @@ -79,6 +79,22 @@ func Wrapf(cause error, t typ, code Code, format string, args ...interface{}) *b } } +// WithAdditional wraps an existing base error with a new formatted message. +// It is used when the original error already contains type and code. +func WithAdditional(cause error, format string, args ...interface{}) *base { + t, c, m, e, u, a := Unwrapb(cause) + b := &base{ + t: t, + c: c, + m: m, + e: e, + u: u, + a: a, + } + + return b.WithAdditional(append(a, fmt.Sprintf(format, args...))...) +} + // WithUrl adds a url to the base error and returns a new base error. func (b *base) WithUrl(u string) *base { return &base{ @@ -169,3 +185,11 @@ func WrapInvalidInputf(cause error, code Code, format string, args ...interface{ func NewInvalidInputf(code Code, format string, args ...interface{}) *base { return Newf(TypeInvalidInput, code, format, args...) } + +func WrapUnexpectedf(cause error, code Code, format string, args ...interface{}) *base { + return Wrapf(cause, TypeInvalidInput, code, format, args...) +} + +func NewUnexpectedf(code Code, format string, args ...interface{}) *base { + return Newf(TypeInvalidInput, code, format, args...) +} diff --git a/pkg/errors/type.go b/pkg/errors/type.go index 3663f9df667c..80d0dbbefa01 100644 --- a/pkg/errors/type.go +++ b/pkg/errors/type.go @@ -11,6 +11,7 @@ var ( TypeForbidden = typ{"forbidden"} TypeCanceled = typ{"canceled"} TypeTimeout = typ{"timeout"} + TypeUnexpected = typ{"unexpected"} // Generic mismatch of expectations ) // Defines custom error types diff --git a/pkg/modules/user/impluser/handler.go b/pkg/modules/user/impluser/handler.go index cbd8bfa9b534..c3d7751c81da 100644 --- a/pkg/modules/user/impluser/handler.go +++ b/pkg/modules/user/impluser/handler.go @@ -289,43 +289,6 @@ func (h *handler) UpdateUser(w http.ResponseWriter, r *http.Request) { return } - existingUser, err := h.module.GetUserByID(ctx, claims.OrgID, id) - if err != nil { - render.Error(w, err) - return - } - - // only displayName, role can be updated - if user.DisplayName == "" { - user.DisplayName = existingUser.DisplayName - } - - if user.Role == "" { - user.Role = existingUser.Role - } - - if user.Role != existingUser.Role && claims.Role != types.RoleAdmin { - render.Error(w, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")) - return - } - - // Make sure that the request is not demoting the last admin user. - // also an admin user can only change role of their own or other user - if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin.String() { - adminUsers, err := h.module.GetUsersByRoleInOrg(ctx, claims.OrgID, types.RoleAdmin) - if err != nil { - render.Error(w, err) - return - } - - if len(adminUsers) == 1 { - render.Error(w, errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot demote the last admin")) - return - } - } - - user.UpdatedAt = time.Now() - updatedUser, err := h.module.UpdateUser(ctx, claims.OrgID, id, &user, claims.UserID) if err != nil { render.Error(w, err) diff --git a/pkg/modules/user/impluser/module.go b/pkg/modules/user/impluser/module.go index e22cb1c0ca1b..2a0c80e32556 100644 --- a/pkg/modules/user/impluser/module.go +++ b/pkg/modules/user/impluser/module.go @@ -176,18 +176,69 @@ func (m *Module) ListUsers(ctx context.Context, orgID string) ([]*types.Gettable } func (m *Module) UpdateUser(ctx context.Context, orgID string, id string, user *types.User, updatedBy string) (*types.User, error) { - user, err := m.store.UpdateUser(ctx, orgID, id, user) + + existingUser, err := m.GetUserByID(ctx, orgID, id) if err != nil { return nil, err } - traits := types.NewTraitsFromUser(user) + requestor, err := m.GetUserByID(ctx, orgID, updatedBy) + if err != nil { + return nil, err + } + + // only displayName, role can be updated + if user.DisplayName == "" { + user.DisplayName = existingUser.DisplayName + } + + if user.Role == "" { + user.Role = existingUser.Role + } + + if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin.String() { + return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles") + } + + // Make sure that the request is not demoting the last admin user. + // also an admin user can only change role of their own or other user + if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin.String() { + adminUsers, err := m.GetUsersByRoleInOrg(ctx, orgID, types.RoleAdmin) + if err != nil { + return nil, err + } + + if len(adminUsers) == 1 { + return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot demote the last admin") + } + } + + user.UpdatedAt = time.Now() + + updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user) + if err != nil { + return nil, err + } + + traits := types.NewTraitsFromUser(updatedUser) m.analytics.IdentifyUser(ctx, user.OrgID, user.ID.String(), traits) traits["updated_by"] = updatedBy m.analytics.TrackUser(ctx, user.OrgID, user.ID.String(), "User Updated", traits) - return user, nil + // if the role is updated then send an email + if existingUser.Role != updatedUser.Role { + if err := m.emailing.SendHTML(ctx, existingUser.Email, "Your Role is updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{ + "CustomerName": existingUser.DisplayName, + "UpdatedByEmail": requestor.Email, + "OldRole": existingUser.Role, + "NewRole": updatedUser.Role, + }); err != nil { + m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err) + } + } + + return updatedUser, nil } func (m *Module) DeleteUser(ctx context.Context, orgID string, id string, deletedBy string) error { diff --git a/pkg/parser/grammar/FilterQueryLexer.interp b/pkg/parser/grammar/FilterQueryLexer.interp index 7c22ebda90b0..a5c3dfe3fd5e 100644 --- a/pkg/parser/grammar/FilterQueryLexer.interp +++ b/pkg/parser/grammar/FilterQueryLexer.interp @@ -118,4 +118,4 @@ mode names: DEFAULT_MODE atn: -[4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118, 8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137, 8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29, 1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235, 8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3, 29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251, 8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29, 12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29, 11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30, 1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12, 31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9, 34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29, 61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 3, 0, 36, 36, 65, 90, 97, 122, 6, 0, 36, 36, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0, 0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154, 1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0, 0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193, 1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296, 1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0, 0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0, 114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117, 116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0, 126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7, 4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142, 7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3, 0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0, 148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151, 152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7, 3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11, 0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163, 164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167, 7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15, 0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0, 173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176, 178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40, 1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0, 0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0, 185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188, 189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192, 7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16, 0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0, 198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201, 202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205, 7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7, 16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0, 0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0, 214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217, 218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228, 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73, 36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73, 36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262, 264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266, 1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0, 0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5, 34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0, 0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289, 8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62, 1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304, 305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308, 5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63, 31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65, 32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7, 27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326, 72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8, 29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160, 177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272, 278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0] \ No newline at end of file +[4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118, 8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137, 8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29, 1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235, 8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3, 29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251, 8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29, 12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29, 11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30, 1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12, 31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9, 34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29, 61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0, 0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154, 1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0, 0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193, 1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296, 1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0, 0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0, 114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117, 116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0, 126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7, 4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142, 7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3, 0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0, 148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151, 152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7, 3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11, 0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163, 164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167, 7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15, 0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0, 173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176, 178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40, 1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0, 0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0, 185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188, 189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192, 7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16, 0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0, 198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201, 202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205, 7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7, 16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0, 0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0, 214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217, 218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228, 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73, 36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, 232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73, 36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262, 264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266, 1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0, 0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5, 34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0, 0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289, 8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62, 1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304, 305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308, 5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63, 31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65, 32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7, 27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326, 72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8, 29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160, 177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272, 278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0] \ No newline at end of file diff --git a/pkg/parser/grammar/filterquery_lexer.go b/pkg/parser/grammar/filterquery_lexer.go index c43f13c81c4f..79ae7c5a7f61 100644 --- a/pkg/parser/grammar/filterquery_lexer.go +++ b/pkg/parser/grammar/filterquery_lexer.go @@ -110,118 +110,119 @@ func filterquerylexerLexerInit() { 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, - 92, 3, 0, 36, 36, 65, 90, 97, 122, 6, 0, 36, 36, 45, 45, 48, 58, 65, 90, - 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, - 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, - 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, - 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, - 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, - 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, - 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, - 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, - 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, - 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, - 0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, - 1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, - 92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, - 0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, - 113, 1, 0, 0, 0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, - 0, 0, 0, 35, 154, 1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, - 41, 179, 1, 0, 0, 0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, - 1, 0, 0, 0, 49, 193, 1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, - 0, 55, 220, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, - 1, 0, 0, 0, 63, 296, 1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, - 0, 69, 310, 1, 0, 0, 0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, - 1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, - 80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, - 0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, - 5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, - 0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, - 5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, - 0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, - 5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, - 0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, - 107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, - 7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, - 0, 0, 114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, - 117, 116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, - 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, - 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, - 0, 0, 126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, - 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, - 7, 4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, - 0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, - 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, - 142, 7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, - 7, 3, 0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, - 0, 0, 148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, - 151, 152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, - 7, 3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11, - 0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0, - 160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163, - 164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167, - 7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15, - 0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0, - 173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176, - 178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40, - 1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0, - 0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0, - 185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188, - 189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192, - 7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16, - 0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0, - 198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201, - 202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205, - 7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7, - 16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0, - 0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0, - 214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217, - 218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211, - 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22, - 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0, - 225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228, - 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, - 1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73, - 36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, - 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239, - 232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, - 7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, - 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0, - 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, - 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, - 273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254, - 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73, - 36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, - 259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262, - 264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266, - 1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1, - 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0, - 0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, - 225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5, - 34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0, - 0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, - 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282, - 280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289, - 8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1, - 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, - 0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, - 295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62, - 1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1, - 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, - 0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304, - 305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308, - 5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63, - 31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65, - 32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0, - 0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317, - 318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7, - 27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0, - 0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326, - 72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8, - 29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0, - 0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160, - 177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272, - 278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0, + 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, + 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, + 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, + 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, + 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, + 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, + 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, + 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, + 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, + 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, + 0, 55, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, + 0, 0, 71, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, + 0, 0, 5, 81, 1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, + 1, 0, 0, 0, 13, 92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, + 19, 100, 1, 0, 0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, + 1, 0, 0, 0, 27, 113, 1, 0, 0, 0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, + 0, 33, 146, 1, 0, 0, 0, 35, 154, 1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, + 1, 0, 0, 0, 41, 179, 1, 0, 0, 0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, + 0, 47, 190, 1, 0, 0, 0, 49, 193, 1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, + 1, 0, 0, 0, 55, 220, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, + 0, 61, 294, 1, 0, 0, 0, 63, 296, 1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, + 1, 0, 0, 0, 69, 310, 1, 0, 0, 0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, + 0, 75, 330, 1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, + 5, 41, 0, 0, 80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, + 83, 84, 5, 93, 0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, + 0, 0, 0, 87, 91, 5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, + 90, 87, 1, 0, 0, 0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, + 33, 0, 0, 93, 94, 5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, + 96, 97, 5, 62, 0, 0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, + 0, 0, 0, 100, 101, 5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, + 0, 103, 104, 5, 62, 0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, + 107, 5, 61, 0, 0, 107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, + 7, 1, 0, 0, 110, 111, 7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, + 0, 0, 113, 114, 7, 4, 0, 0, 114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, + 116, 118, 7, 7, 0, 0, 117, 116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, + 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, + 7, 0, 0, 0, 122, 123, 7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, + 0, 0, 125, 28, 1, 0, 0, 0, 126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, + 128, 129, 7, 1, 0, 0, 129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, + 30, 1, 0, 0, 0, 132, 133, 7, 4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, + 6, 0, 0, 135, 137, 7, 7, 0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, + 0, 138, 136, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, + 141, 7, 1, 0, 0, 141, 142, 7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, + 7, 2, 0, 0, 144, 145, 7, 3, 0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, + 0, 0, 147, 148, 7, 3, 0, 0, 148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, + 150, 151, 7, 3, 0, 0, 151, 152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, + 34, 1, 0, 0, 0, 154, 155, 7, 3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, + 7, 1, 0, 0, 157, 158, 7, 11, 0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, + 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, + 162, 163, 7, 12, 0, 0, 163, 164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, + 166, 7, 3, 0, 0, 166, 167, 7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, + 1, 0, 0, 0, 169, 170, 7, 15, 0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, + 0, 0, 172, 173, 7, 6, 0, 0, 173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, + 175, 177, 7, 4, 0, 0, 176, 178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, + 178, 1, 0, 0, 0, 178, 40, 1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, + 4, 0, 0, 181, 42, 1, 0, 0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, + 0, 184, 185, 7, 6, 0, 0, 185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, + 188, 7, 4, 0, 0, 188, 189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, + 7, 5, 0, 0, 191, 192, 7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, + 0, 0, 194, 195, 7, 16, 0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, + 197, 198, 7, 18, 0, 0, 198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, + 201, 7, 16, 0, 0, 201, 202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, + 1, 0, 0, 0, 204, 205, 7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, + 11, 0, 0, 207, 208, 7, 16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, + 0, 0, 210, 54, 1, 0, 0, 0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, + 213, 214, 7, 20, 0, 0, 214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, + 217, 7, 16, 0, 0, 217, 218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, + 7, 3, 0, 0, 220, 211, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, + 0, 0, 222, 223, 7, 22, 0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, + 0, 225, 224, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, + 229, 3, 73, 36, 0, 228, 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, + 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, + 0, 0, 233, 235, 3, 73, 36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, + 0, 236, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, + 236, 1, 0, 0, 0, 239, 232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, + 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, + 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, + 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, + 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, + 1, 0, 0, 0, 251, 273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, + 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, + 0, 256, 258, 3, 73, 36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, + 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, + 263, 7, 3, 0, 0, 262, 264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, + 1, 0, 0, 0, 264, 266, 1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, + 0, 0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, + 0, 269, 271, 1, 0, 0, 0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, + 273, 1, 0, 0, 0, 272, 225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, + 0, 0, 0, 274, 280, 5, 34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, + 0, 0, 277, 279, 9, 0, 0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, + 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, + 283, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, + 5, 39, 0, 0, 285, 289, 8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, + 0, 0, 0, 288, 285, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, + 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, + 290, 1, 0, 0, 0, 293, 295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, + 1, 0, 0, 0, 295, 62, 1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, + 0, 0, 298, 297, 1, 0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, + 300, 301, 1, 0, 0, 0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, + 5, 91, 0, 0, 304, 305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, + 0, 0, 307, 308, 5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, + 310, 317, 3, 63, 31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, + 313, 316, 3, 65, 32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, + 313, 1, 0, 0, 0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, + 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, + 0, 0, 320, 322, 7, 27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, + 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, + 326, 6, 35, 0, 0, 326, 72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, + 1, 0, 0, 0, 329, 331, 8, 29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, + 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, + 30, 0, 90, 119, 138, 160, 177, 220, 225, 230, 236, 239, 243, 248, 250, + 253, 259, 263, 268, 270, 272, 278, 280, 288, 290, 294, 300, 315, 317, 323, + 332, 1, 6, 0, 0, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) diff --git a/pkg/prometheus/clickhouseprometheus/client.go b/pkg/prometheus/clickhouseprometheus/client.go index d033286b4cf9..092f304319b0 100644 --- a/pkg/prometheus/clickhouseprometheus/client.go +++ b/pkg/prometheus/clickhouseprometheus/client.go @@ -188,10 +188,12 @@ func (client *client) querySamples(ctx context.Context, start int64, end int64, var res []*prompb.TimeSeries var ts *prompb.TimeSeries var fingerprint, prevFingerprint uint64 - var timestampMs int64 + var timestampMs, prevTimestamp int64 var value float64 var flags uint32 + prevTimestamp = math.MinInt64 + for rows.Next() { if err := rows.Scan(&metricName, &fingerprint, ×tampMs, &value, &flags); err != nil { return nil, err @@ -209,12 +211,18 @@ func (client *client) querySamples(ctx context.Context, start int64, end int64, ts = &prompb.TimeSeries{ Labels: labels, } + prevTimestamp = math.MinInt64 } if flags&1 == 1 { value = math.Float64frombits(promValue.StaleNaN) } + if timestampMs == prevTimestamp { + continue + } + prevTimestamp = timestampMs + // add samples to current time series ts.Samples = append(ts.Samples, prompb.Sample{ Timestamp: timestampMs, diff --git a/pkg/prometheus/clickhouseprometheus/client_query_test.go b/pkg/prometheus/clickhouseprometheus/client_query_test.go new file mode 100644 index 000000000000..18f42cfe8aa4 --- /dev/null +++ b/pkg/prometheus/clickhouseprometheus/client_query_test.go @@ -0,0 +1,113 @@ +package clickhouseprometheus + +import ( + "context" + "github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest" + cmock "github.com/srikanthccv/ClickHouse-go-mock" + "testing" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/SigNoz/signoz/pkg/telemetrystore" + "github.com/prometheus/prometheus/prompb" + "github.com/stretchr/testify/assert" +) + +// Test for querySamples method +func TestClient_QuerySamples(t *testing.T) { + ctx := context.Background() + cols := make([]cmock.ColumnType, 0) + cols = append(cols, cmock.ColumnType{Name: "metric_name", Type: "String"}) + cols = append(cols, cmock.ColumnType{Name: "fingerprint", Type: "UInt64"}) + cols = append(cols, cmock.ColumnType{Name: "unix_milli", Type: "Int64"}) + cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"}) + cols = append(cols, cmock.ColumnType{Name: "flags", Type: "UInt32"}) + tests := []struct { + name string + start int64 + end int64 + fingerprints map[uint64][]prompb.Label + metricName string + subQuery string + args []any + setupMock func(mock cmock.ClickConnMockCommon, args ...any) + expectedTimeSeries int + expectError bool + description string + result []*prompb.TimeSeries + }{ + { + name: "successful samples retrieval", + start: int64(1000), + end: int64(2000), + fingerprints: map[uint64][]prompb.Label{ + 123: { + {Name: "__name__", Value: "cpu_usage"}, + {Name: "instance", Value: "localhost:9090"}, + }, + 456: { + {Name: "__name__", Value: "cpu_usage"}, + {Name: "instance", Value: "localhost:9091"}, + }, + }, + metricName: "cpu_usage", + subQuery: "SELECT metric_name, fingerprint, unix_milli, value, flags", + expectedTimeSeries: 2, + expectError: false, + description: "Should successfully retrieve samples for multiple time series", + setupMock: func(mock cmock.ClickConnMockCommon, args ...any) { + values := [][]interface{}{ + {"cpu_usage", uint64(123), int64(1001), float64(1.1), uint32(0)}, + {"cpu_usage", uint64(123), int64(1001), float64(1.1), uint32(0)}, + {"cpu_usage", uint64(456), int64(1001), float64(1.2), uint32(0)}, + {"cpu_usage", uint64(456), int64(1001), float64(1.2), uint32(0)}, + {"cpu_usage", uint64(456), int64(1001), float64(1.2), uint32(0)}, + } + mock.ExpectQuery("SELECT metric_name, fingerprint, unix_milli, value, flags").WithArgs(args...).WillReturnRows( + cmock.NewRows(cols, values), + ) + }, + result: []*prompb.TimeSeries{ + { + Labels: []prompb.Label{ + {Name: "__name__", Value: "cpu_usage"}, + {Name: "instance", Value: "localhost:9090"}, + }, + Samples: []prompb.Sample{ + {Timestamp: 1001, Value: 1.1}, + }, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: "cpu_usage"}, + {Name: "instance", Value: "localhost:9091"}, + }, + Samples: []prompb.Sample{ + {Timestamp: 1001, Value: 1.2}, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherRegexp) + readClient := client{telemetryStore: telemetryStore} + if tt.setupMock != nil { + tt.setupMock(telemetryStore.Mock(), tt.metricName, tt.start, tt.end) + + } + result, err := readClient.querySamples(ctx, tt.start, tt.end, tt.fingerprints, tt.metricName, tt.subQuery, tt.args) + + if tt.expectError { + assert.Error(t, err) + assert.Nil(t, result) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expectedTimeSeries, len(result)) + assert.Equal(t, result, tt.result) + } + + }) + } +} diff --git a/pkg/querier/builder_query.go b/pkg/querier/builder_query.go index 5123b2e25bab..756dbd318b7b 100644 --- a/pkg/querier/builder_query.go +++ b/pkg/querier/builder_query.go @@ -89,6 +89,12 @@ func (q *builderQuery[T]) Fingerprint() string { // Add filter if present if q.spec.Filter != nil && q.spec.Filter.Expression != "" { parts = append(parts, fmt.Sprintf("filter=%s", q.spec.Filter.Expression)) + + for name, item := range q.variables { + if strings.Contains(q.spec.Filter.Expression, "$"+name) { + parts = append(parts, fmt.Sprintf("%s=%s", name, fmt.Sprint(item.Value))) + } + } } // Add group by keys @@ -210,6 +216,15 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string, return nil, errors.Newf(errors.TypeTimeout, errors.CodeTimeout, "Query timed out"). WithAdditional("Try refining your search by adding relevant resource attributes filtering") } + + if !errors.Is(err, context.Canceled) { + return nil, errors.Newf( + errors.TypeInternal, + errors.CodeInternal, + "Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.", + ) + } + return nil, err } defer rows.Close() diff --git a/pkg/querier/clickhouse_query.go b/pkg/querier/clickhouse_query.go index 0df12da8f9b8..86f744d0814b 100644 --- a/pkg/querier/clickhouse_query.go +++ b/pkg/querier/clickhouse_query.go @@ -63,7 +63,7 @@ func (q *chSQLQuery) Window() (uint64, uint64) { return q.fromMS, q.toMS } func (q *chSQLQuery) renderVars(query string, vars map[string]qbtypes.VariableItem, start, end uint64) (string, error) { varsData := map[string]any{} for k, v := range vars { - varsData[k] = formatValueForCH(v) + varsData[k] = formatValueForCH(v.Value) } querybuilder.AssignReservedVars(varsData, start, end) diff --git a/pkg/querier/consume.go b/pkg/querier/consume.go index 06d8f4d0e3da..3b1ab29efcd6 100644 --- a/pkg/querier/consume.go +++ b/pkg/querier/consume.go @@ -5,6 +5,7 @@ import ( "math" "reflect" "regexp" + "slices" "sort" "strconv" "strings" @@ -17,6 +18,10 @@ import ( var ( aggRe = regexp.MustCompile(`^__result_(\d+)$`) + // legacyReservedColumnTargetAliases identifies result value from a user + // written clickhouse query. The column alias indcate which value is + // to be considered as final result (or target) + legacyReservedColumnTargetAliases = []string{"__result", "__value", "result", "res", "value"} ) // consume reads every row and shapes it into the payload expected for the @@ -131,6 +136,9 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt } else if numericColsCount == 1 { // classic single-value query fallbackValue = val fallbackSeen = true + } else if slices.Contains(legacyReservedColumnTargetAliases, name) { + fallbackValue = val + fallbackSeen = true } else { // numeric label lblVals = append(lblVals, fmt.Sprint(val)) @@ -150,6 +158,9 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt } else if numericColsCount == 1 { // classic single-value query fallbackValue = val fallbackSeen = true + } else if slices.Contains(legacyReservedColumnTargetAliases, name) { + fallbackValue = val + fallbackSeen = true } else { // numeric label lblVals = append(lblVals, fmt.Sprint(val)) @@ -306,12 +317,7 @@ func readAsScalar(rows driver.Rows, queryName string) (*qbtypes.ScalarData, erro // 2. deref each slot into the output row row := make([]any, len(scan)) for i, cell := range scan { - valPtr := reflect.ValueOf(cell) - if valPtr.Kind() == reflect.Pointer && !valPtr.IsNil() { - row[i] = valPtr.Elem().Interface() - } else { - row[i] = nil // Nullable columns come back as nil pointers - } + row[i] = derefValue(cell) } data = append(data, row) } @@ -326,6 +332,23 @@ func readAsScalar(rows driver.Rows, queryName string) (*qbtypes.ScalarData, erro }, nil } +func derefValue(v interface{}) interface{} { + if v == nil { + return nil + } + + val := reflect.ValueOf(v) + + for val.Kind() == reflect.Ptr { + if val.IsNil() { + return nil + } + val = val.Elem() + } + + return val.Interface() +} + func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) { colNames := rows.Columns() diff --git a/pkg/querier/postprocess.go b/pkg/querier/postprocess.go index d001d96ea87d..924e64173c9e 100644 --- a/pkg/querier/postprocess.go +++ b/pkg/querier/postprocess.go @@ -9,6 +9,7 @@ import ( "strings" "github.com/SigNoz/govaluate" + "github.com/SigNoz/signoz/pkg/querybuilder" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" ) @@ -44,6 +45,73 @@ func getQueryName(spec any) string { return getqueryInfo(spec).Name } +func StepIntervalForQuery(req *qbtypes.QueryRangeRequest, name string) int64 { + stepsMap := make(map[string]int64) + for _, query := range req.CompositeQuery.Queries { + switch spec := query.Spec.(type) { + case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]: + stepsMap[spec.Name] = int64(spec.StepInterval.Seconds()) + case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]: + stepsMap[spec.Name] = int64(spec.StepInterval.Seconds()) + case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]: + stepsMap[spec.Name] = int64(spec.StepInterval.Seconds()) + case qbtypes.PromQuery: + stepsMap[spec.Name] = int64(spec.Step.Seconds()) + } + } + + if step, ok := stepsMap[name]; ok { + return step + } + + exprStr := "" + + for _, query := range req.CompositeQuery.Queries { + switch spec := query.Spec.(type) { + case qbtypes.QueryBuilderFormula: + if spec.Name == name { + exprStr = spec.Expression + } + } + } + + expression, _ := govaluate.NewEvaluableExpressionWithFunctions(exprStr, qbtypes.EvalFuncs()) + + steps := []int64{} + + for _, v := range expression.Vars() { + steps = append(steps, stepsMap[v]) + } + + return querybuilder.LCMList(steps) +} + +func NumAggregationForQuery(req *qbtypes.QueryRangeRequest, name string) int64 { + numAgg := 0 + for _, query := range req.CompositeQuery.Queries { + switch spec := query.Spec.(type) { + case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]: + if spec.Name == name { + numAgg += 1 + } + case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]: + if spec.Name == name { + numAgg += 1 + } + case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]: + if spec.Name == name { + numAgg += 1 + } + case qbtypes.QueryBuilderFormula: + if spec.Name == name { + numAgg += 1 + } + } + } + + return int64(numAgg) +} + func (q *querier) postProcessResults(ctx context.Context, results map[string]any, req *qbtypes.QueryRangeRequest) (map[string]any, error) { // Convert results to typed format for processing typedResults := make(map[string]*qbtypes.Result) @@ -81,6 +149,18 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any // Apply table formatting for UI if requested if req.FormatOptions != nil && req.FormatOptions.FormatTableResultForUI && req.RequestType == qbtypes.RequestTypeScalar { + + // merge result only needed for non-CH query + if len(req.CompositeQuery.Queries) == 1 { + if req.CompositeQuery.Queries[0].Type == qbtypes.QueryTypeClickHouseSQL { + retResult := map[string]any{} + for name, v := range typedResults { + retResult[name] = v.Value + } + return retResult, nil + } + } + // Format results as a table - this merges all queries into a single table tableResult := q.formatScalarResultsAsTable(typedResults, req) @@ -96,6 +176,36 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any return tableResult, nil } + if req.RequestType == qbtypes.RequestTypeTimeSeries && req.FormatOptions != nil && req.FormatOptions.FillGaps { + for name := range typedResults { + funcs := []qbtypes.Function{{Name: qbtypes.FunctionNameFillZero}} + funcs = q.prepareFillZeroArgsWithStep(funcs, req, StepIntervalForQuery(req, name)) + // empty time series if it doesn't exist + tsData, ok := typedResults[name].Value.(*qbtypes.TimeSeriesData) + if !ok { + tsData = &qbtypes.TimeSeriesData{} + } + + if len(tsData.Aggregations) == 0 { + numAgg := NumAggregationForQuery(req, name) + tsData.Aggregations = make([]*qbtypes.AggregationBucket, numAgg) + for idx := range numAgg { + tsData.Aggregations[idx] = &qbtypes.AggregationBucket{ + Index: int(idx), + Series: []*qbtypes.TimeSeries{ + { + Labels: make([]*qbtypes.Label, 0), + Values: make([]*qbtypes.TimeSeriesValue, 0), + }, + }, + } + } + } + + typedResults[name] = q.applyFunctions(typedResults[name], funcs) + } + } + // Convert back to map[string]any finalResults := make(map[string]any) for name, result := range typedResults { @@ -131,6 +241,19 @@ func postProcessMetricQuery( req *qbtypes.QueryRangeRequest, ) *qbtypes.Result { + config := query.Aggregations[0] + spaceAggOrderBy := fmt.Sprintf("%s(%s)", config.SpaceAggregation.StringValue(), config.MetricName) + timeAggOrderBy := fmt.Sprintf("%s(%s)", config.TimeAggregation.StringValue(), config.MetricName) + timeSpaceAggOrderBy := fmt.Sprintf("%s(%s(%s))", config.SpaceAggregation.StringValue(), config.TimeAggregation.StringValue(), config.MetricName) + + for idx := range query.Order { + if query.Order[idx].Key.Name == spaceAggOrderBy || + query.Order[idx].Key.Name == timeAggOrderBy || + query.Order[idx].Key.Name == timeSpaceAggOrderBy { + query.Order[idx].Key.Name = qbtypes.DefaultOrderByKey + } + } + if query.Limit > 0 { result = q.applySeriesLimit(result, query.Limit, query.Order) } @@ -224,6 +347,13 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes // Process each formula for name, formula := range formulaQueries { + + for idx := range formula.Order { + if formula.Order[idx].Key.Name == formula.Name || formula.Order[idx].Key.Name == formula.Expression { + formula.Order[idx].Key.Name = qbtypes.DefaultOrderByKey + } + } + // Check if we're dealing with time series or scalar data if req.RequestType == qbtypes.RequestTypeTimeSeries { result := q.processTimeSeriesFormula(ctx, results, formula, req) diff --git a/pkg/querier/promql_query.go b/pkg/querier/promql_query.go index 04f3c4c84bc0..e39ce65268d2 100644 --- a/pkg/querier/promql_query.go +++ b/pkg/querier/promql_query.go @@ -63,7 +63,7 @@ func (q *promqlQuery) Window() (uint64, uint64) { func (q *promqlQuery) renderVars(query string, vars map[string]qbv5.VariableItem, start, end uint64) (string, error) { varsData := map[string]any{} for k, v := range vars { - varsData[k] = formatValueForProm(v) + varsData[k] = formatValueForProm(v.Value) } querybuilder.AssignReservedVars(varsData, start, end) diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go index 6d6ea16fa62b..c9c5457a69d6 100644 --- a/pkg/querier/querier.go +++ b/pkg/querier/querier.go @@ -162,6 +162,17 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)), } } + + req.CompositeQuery.Queries[idx].Spec = spec + } + } else if query.Type == qbtypes.QueryTypePromQL { + switch spec := query.Spec.(type) { + case qbtypes.PromQuery: + if spec.Step.Seconds() == 0 { + spec.Step = qbtypes.Step{ + Duration: time.Second * time.Duration(querybuilder.RecommendedStepIntervalForMetric(req.Start, req.End)), + } + } req.CompositeQuery.Queries[idx].Spec = spec } } @@ -221,6 +232,10 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype spec.Aggregations[i].Temporality = temp } } + // TODO(srikanthccv): warn when the metric is missing + if spec.Aggregations[i].Temporality == metrictypes.Unknown { + spec.Aggregations[i].Temporality = metrictypes.Unspecified + } } spec.ShiftBy = extractShiftFromBuilderQuery(spec) timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) diff --git a/pkg/query-service/.dockerignore b/pkg/query-service/.dockerignore deleted file mode 100644 index 01d98e1bd87e..000000000000 --- a/pkg/query-service/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -.vscode -README.md -signoz.db \ No newline at end of file diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 4f5266c51975..7bc1d1dd2329 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -2858,11 +2858,11 @@ func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, orgID valuer.U WHERE metric_name = $1 AND unix_milli >= $2 AND type = 'Histogram' - AND JSONExtractString(labels, 'service_name') = $3 + AND (JSONExtractString(labels, 'service_name') = $3 OR JSONExtractString(labels, 'service.name') = $4) GROUP BY le ORDER BY le`, signozMetricDBName, signozTSTableNameV41Day) - rows, err := r.db.Query(ctx, query, metricName, unixMilli, serviceName) + rows, err := r.db.Query(ctx, query, metricName, unixMilli, serviceName, serviceName) if err != nil { zap.L().Error("Error while querying histogram buckets", zap.Error(err)) return nil, fmt.Errorf("error while querying histogram buckets: %s", err.Error()) diff --git a/pkg/query-service/app/cloudintegrations/controller.go b/pkg/query-service/app/cloudintegrations/controller.go index e617d6e7dbca..e996c7a26e21 100644 --- a/pkg/query-service/app/cloudintegrations/controller.go +++ b/pkg/query-service/app/cloudintegrations/controller.go @@ -115,7 +115,7 @@ func (c *Controller) GenerateConnectionUrl(ctx context.Context, orgId string, cl } // TODO(Raj): parameterized this in follow up changes - agentVersion := "v0.0.4" + agentVersion := "v0.0.5" connectionUrl := fmt.Sprintf( "https://%s.console.aws.amazon.com/cloudformation/home?region=%s#/stacks/quickcreate?", diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/elasticache/assets/dashboards/redis_overview_dot.json b/pkg/query-service/app/cloudintegrations/services/definitions/aws/elasticache/assets/dashboards/redis_overview_dot.json index 4cfbd83db283..330f3b2b51c5 100644 --- a/pkg/query-service/app/cloudintegrations/services/definitions/aws/elasticache/assets/dashboards/redis_overview_dot.json +++ b/pkg/query-service/app/cloudintegrations/services/definitions/aws/elasticache/assets/dashboards/redis_overview_dot.json @@ -175,7 +175,7 @@ "multiSelect": false, "name": "Account", "order": 0, - "queryValue": "SELECT DISTINCT JSONExtractString(labels, 'cloud.account.id') AS cloud.account.id FROM signoz_metrics.distributed_time_series_v4_1day WHERE metric_name = 'aws_ElastiCache_CPUUtilization_max' GROUP BY cloud.account.id", + "queryValue": "SELECT DISTINCT JSONExtractString(labels, 'cloud.account.id') AS `cloud.account.id` FROM signoz_metrics.distributed_time_series_v4_1day WHERE metric_name = 'aws_ElastiCache_CPUUtilization_max' GROUP BY `cloud.account.id`", "showALLOption": false, "sort": "DISABLED", "textboxValue": "", diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index ff5144250ea3..0e0c9e036946 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -4527,6 +4527,56 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result } properties := queryInfoResult.ToMap() + referrer := r.Header.Get("Referer") + + if referrer == "" { + return + } + + properties["referrer"] = referrer + + logsExplorerMatched, _ := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer) + traceExplorerMatched, _ := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer) + metricsExplorerMatched, _ := regexp.MatchString(`/metrics-explorer/explorer(?:\?.*)?$`, referrer) + dashboardMatched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) + alertMatched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) + + switch { + case dashboardMatched: + properties["module_name"] = "dashboard" + case alertMatched: + properties["module_name"] = "rule" + case metricsExplorerMatched: + properties["module_name"] = "metrics-explorer" + case logsExplorerMatched: + properties["module_name"] = "logs-explorer" + case traceExplorerMatched: + properties["module_name"] = "traces-explorer" + default: + return + } + + if dashboardMatched { + if dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`); err == nil { + if matches := dashboardIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { + properties["dashboard_id"] = matches[1] + } + } + + if widgetIDRegex, err := regexp.Compile(`widgetId=([a-f0-9\-]+)`); err == nil { + if matches := widgetIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { + properties["widget_id"] = matches[1] + } + } + } + + if alertMatched { + if alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`); err == nil { + if matches := alertIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { + properties["rule_id"] = matches[1] + } + } + } // Check if result is empty or has no data if len(result) == 0 { @@ -4551,47 +4601,6 @@ func (aH *APIHandler) sendQueryResultEvents(r *http.Request, result []*v3.Result } } - referrer := r.Header.Get("Referer") - - if referrer == "" { - aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties) - return - } - - properties["referrer"] = referrer - - if matched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer); matched { - - if dashboardIDRegex, err := regexp.Compile(`/dashboard/([a-f0-9\-]+)/`); err == nil { - if matches := dashboardIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { - properties["dashboard_id"] = matches[1] - } - } - - if widgetIDRegex, err := regexp.Compile(`widgetId=([a-f0-9\-]+)`); err == nil { - if matches := widgetIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { - properties["widget_id"] = matches[1] - } - } - - properties["module_name"] = "dashboard" - aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties) - return - } - - if matched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer); matched { - - if alertIDRegex, err := regexp.Compile(`ruleId=(\d+)`); err == nil { - if matches := alertIDRegex.FindStringSubmatch(referrer); len(matches) > 1 { - properties["alert_id"] = matches[1] - } - } - - properties["module_name"] = "rule" - aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties) - return - } - aH.Signoz.Analytics.TrackUser(r.Context(), claims.OrgID, claims.UserID, "Telemetry Query Returned Results", properties) } diff --git a/pkg/query-service/app/logparsingpipeline/error.go b/pkg/query-service/app/logparsingpipeline/error.go new file mode 100644 index 000000000000..bd127a69cc3e --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/error.go @@ -0,0 +1,8 @@ +package logparsingpipeline + +import "github.com/SigNoz/signoz/pkg/errors" + +var ( + CodeInvalidOperatorType = errors.MustNewCode("operator_type_mismatch") + CodeFieldNilCheckType = errors.MustNewCode("operator_field_nil_check") +) diff --git a/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go index e8b4bf1fb980..913678d0cb9d 100644 --- a/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go +++ b/pkg/query-service/app/logparsingpipeline/pipelineBuilder.go @@ -6,13 +6,15 @@ import ( "slices" "strings" + signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/queryBuilderToExpr" "github.com/SigNoz/signoz/pkg/types/pipelinetypes" "github.com/antonmedv/expr" "github.com/antonmedv/expr/ast" "github.com/antonmedv/expr/parser" - "github.com/pkg/errors" + "github.com/google/uuid" ) const ( @@ -38,7 +40,7 @@ func PreparePipelineProcessor(gettablePipelines []pipelinetypes.GettablePipeline operators, err := getOperators(v.Config) if err != nil { - return nil, nil, errors.Wrap(err, "failed to prepare operators") + return nil, nil, err } if len(operators) == 0 { @@ -47,7 +49,7 @@ func PreparePipelineProcessor(gettablePipelines []pipelinetypes.GettablePipeline filterExpr, err := queryBuilderToExpr.Parse(v.Filter) if err != nil { - return nil, nil, errors.Wrap(err, "failed to parse pipeline filter") + return nil, nil, err } router := []pipelinetypes.PipelineOperator{ @@ -93,10 +95,6 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin filteredOp := []pipelinetypes.PipelineOperator{} for i, operator := range ops { if operator.Enabled { - if len(filteredOp) > 0 { - filteredOp[len(filteredOp)-1].Output = operator.ID - } - if operator.Type == "regex_parser" { parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom) if err != nil { @@ -124,16 +122,13 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin operator.If = parseFromNotNilCheck } else if operator.Type == "json_parser" { - parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom) + operators, err := processJSONParser(&operator) if err != nil { - return nil, fmt.Errorf( - "couldn't generate nil check for parseFrom of json parser op %s: %w", operator.Name, err, - ) + return nil, fmt.Errorf("couldn't process json_parser op %s: %s", operator.Name, err) } - operator.If = fmt.Sprintf( - `%s && ((type(%s) == "string" && %s matches "^\\s*{.*}\\s*$" ) || type(%s) == "map")`, - parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, - ) + + filteredOp = append(filteredOp, operators...) + continue // Continue here to skip deduplication of json_parser operator } else if operator.Type == "add" { if strings.HasPrefix(operator.Value, "EXPR(") && strings.HasSuffix(operator.Value, ")") { expression := strings.TrimSuffix(strings.TrimPrefix(operator.Value, "EXPR("), ")") @@ -148,7 +143,6 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin operator.If = fieldsNotNilCheck } } - } else if operator.Type == "move" || operator.Type == "copy" { fromNotNilCheck, err := fieldNotNilCheck(operator.From) if err != nil { @@ -157,7 +151,6 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin ) } operator.If = fromNotNilCheck - } else if operator.Type == "remove" { fieldNotNilCheck, err := fieldNotNilCheck(operator.Field) if err != nil { @@ -166,10 +159,8 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin ) } operator.If = fieldNotNilCheck - } else if operator.Type == "trace_parser" { cleanTraceParser(&operator) - } else if operator.Type == "time_parser" { parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom) if err != nil { @@ -202,19 +193,11 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin } // TODO(Raj): Maybe add support for gotime too eventually - } else if operator.Type == "severity_parser" { - parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom) + err := processSeverityParser(&operator) if err != nil { - return nil, fmt.Errorf( - "couldn't generate nil check for parseFrom of severity parser %s: %w", operator.Name, err, - ) + return nil, err } - operator.If = fmt.Sprintf( - `%s && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`, - parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, - ) - } filteredOp = append(filteredOp, operator) @@ -222,9 +205,193 @@ func getOperators(ops []pipelinetypes.PipelineOperator) ([]pipelinetypes.Pipelin filteredOp[len(filteredOp)-1].Output = "" } } + + for idx := range filteredOp { + if idx > 0 { + filteredOp[idx-1].Output = filteredOp[idx].ID + } + } return filteredOp, nil } +func processSeverityParser(operator *pipelinetypes.PipelineOperator) error { + if operator.Type != "severity_parser" { + return errors.NewUnexpectedf(CodeInvalidOperatorType, "operator type received %s", operator.Type) + } + + parseFromNotNilCheck, err := fieldNotNilCheck(operator.ParseFrom) + if err != nil { + return errors.WrapInvalidInputf(err, CodeFieldNilCheckType, + "couldn't generate nil check for parseFrom of severity parser %s", operator.Name, + ) + } + operator.If = fmt.Sprintf( + `%s && ( type(%s) == "string" || ( type(%s) in ["int", "float"] && %s == float(int(%s)) ) )`, + parseFromNotNilCheck, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, operator.ParseFrom, + ) + + return nil +} + +// processJSONParser converts simple JSON parser operator into multiple operators for JSONMapping of default variables +func processJSONParser(parent *pipelinetypes.PipelineOperator) ([]pipelinetypes.PipelineOperator, error) { + if parent.Type != "json_parser" { + return nil, errors.NewUnexpectedf(CodeInvalidOperatorType, "operator type received %s", parent.Type) + } + + parseFromNotNilCheck, err := fieldNotNilCheck(parent.ParseFrom) + if err != nil { + return nil, errors.WrapInvalidInputf(err, CodeFieldNilCheckType, + "couldn't generate nil check for parseFrom of json parser op %s: %s", parent.Name, err, + ) + } + parent.If = fmt.Sprintf( + `%s && ((type(%s) == "string" && isJSON(%s) && type(fromJSON(unquote(%s))) == "map" ) || type(%s) == "map")`, + parseFromNotNilCheck, parent.ParseFrom, parent.ParseFrom, parent.ParseFrom, parent.ParseFrom, + ) + if parent.EnableFlattening { + parent.MaxFlatteningDepth = constants.MaxJSONFlatteningDepth + } + + // return if no mapping available + if parent.Mapping == nil { + return []pipelinetypes.PipelineOperator{*parent}, nil + } + + mapping := parent.Mapping + children := []pipelinetypes.PipelineOperator{} + + // cloning since the same function is used when saving pipelines (POST request) hence reversing + // the same array inplace ends up with saving mapping in a reversed order in database + cloneAndReverse := func(input []string) []string { + cloned := slices.Clone(input) + slices.Reverse(cloned) + + return cloned + } + + generateCustomID := func() string { + return fmt.Sprintf("%s-json-parser", uuid.NewString()) // json-parser helps in identifying processors part of JSON Parser + } + + // reusable move operator function + generateMoveOperators := func(keywords []string, to string) error { + for _, keyword := range cloneAndReverse(keywords) { + operator := pipelinetypes.PipelineOperator{ + Type: "move", + ID: generateCustomID(), + OnError: signozstanzahelper.SendOnErrorQuiet, + From: fmt.Sprintf(`%s["%s"]`, parent.ParseTo, keyword), + To: to, + } + + fromNotNilCheck, err := fieldNotNilCheck(operator.From) + if err != nil { + return err + } + + operator.If = fromNotNilCheck + children = append(children, operator) + } + + return nil + } + + // JSONMapping: host + err = generateMoveOperators(mapping[pipelinetypes.Host], `resource["host.name"]`) + if err != nil { + return nil, err + } + + // JSONMapping: service + err = generateMoveOperators(mapping[pipelinetypes.Service], `resource["service.name"]`) + if err != nil { + return nil, err + } + + // JSONMapping: trace_id + for _, keyword := range cloneAndReverse(mapping[pipelinetypes.TraceID]) { + operator := pipelinetypes.PipelineOperator{ + Type: "trace_parser", + ID: generateCustomID(), + OnError: signozstanzahelper.SendOnErrorQuiet, + TraceParser: &pipelinetypes.TraceParser{ + TraceId: &pipelinetypes.ParseFrom{ + ParseFrom: fmt.Sprintf(`%s["%s"]`, parent.ParseTo, keyword), + }, + }, + } + + children = append(children, operator) + } + + // JSONMapping: span_id + for _, keyword := range cloneAndReverse(mapping[pipelinetypes.SpanID]) { + operator := pipelinetypes.PipelineOperator{ + Type: "trace_parser", + ID: generateCustomID(), + OnError: signozstanzahelper.SendOnErrorQuiet, + TraceParser: &pipelinetypes.TraceParser{ + SpanId: &pipelinetypes.ParseFrom{ + ParseFrom: fmt.Sprintf(`%s["%s"]`, parent.ParseTo, keyword), + }, + }, + } + + children = append(children, operator) + } + + // JSONMapping: trace_flags + for _, keyword := range cloneAndReverse(mapping[pipelinetypes.TraceFlags]) { + operator := pipelinetypes.PipelineOperator{ + Type: "trace_parser", + ID: generateCustomID(), + OnError: signozstanzahelper.SendOnErrorQuiet, + TraceParser: &pipelinetypes.TraceParser{ + TraceFlags: &pipelinetypes.ParseFrom{ + ParseFrom: fmt.Sprintf(`%s["%s"]`, parent.ParseTo, keyword), + }, + }, + } + + children = append(children, operator) + } + + // JSONMapping: severity + for _, keyword := range cloneAndReverse(mapping[pipelinetypes.Severity]) { + operator := pipelinetypes.PipelineOperator{ + Type: "severity_parser", + ID: generateCustomID(), + OnError: signozstanzahelper.SendOnErrorQuiet, + ParseFrom: fmt.Sprintf(`%s["%s"]`, parent.ParseTo, keyword), + } + err := processSeverityParser(&operator) + if err != nil { + return nil, err + } + + operator.Mapping = pipelinetypes.DefaultSeverityMapping + children = append(children, operator) + } + + // JSONMapping: environment + err = generateMoveOperators(mapping[pipelinetypes.Environment], `resource["deployment.environment.name"]`) + if err != nil { + return nil, err + } + + // JSONMapping: body + err = generateMoveOperators(mapping[pipelinetypes.Message], `body`) + if err != nil { + return nil, err + } + + // removed mapping reference so it doesn't appear in Collector's config + parent.Mapping = nil + return append(append([]pipelinetypes.PipelineOperator{}, *parent), children...), nil +} + +// TODO: (Piyush) remove this in future func cleanTraceParser(operator *pipelinetypes.PipelineOperator) { if operator.TraceId != nil && len(operator.TraceId.ParseFrom) < 1 { operator.TraceId = nil @@ -241,7 +408,7 @@ func cleanTraceParser(operator *pipelinetypes.PipelineOperator) { func fieldNotNilCheck(fieldPath string) (string, error) { _, err := expr.Compile(fieldPath) if err != nil { - return "", fmt.Errorf("invalid fieldPath %s: %w", fieldPath, err) + return "", errors.WrapInvalidInputf(err, CodeFieldNilCheckType, "invalid fieldPath %s", fieldPath) } // helper for turning `.` into `?.` in field paths. @@ -270,7 +437,7 @@ func fieldNotNilCheck(fieldPath string) (string, error) { // should come out to be (attributes.test != nil && attributes.test["a.b"]?.value != nil) collectionNotNilCheck, err := fieldNotNilCheck(parts[0]) if err != nil { - return "", fmt.Errorf("couldn't generate nil check for %s: %w", parts[0], err) + return "", errors.WithAdditional(err, "couldn't generate nil check for %s", parts[0]) } // generate nil check for entire path. diff --git a/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go b/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go index 5791cf98d79e..0e60dd9e1b6a 100644 --- a/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go +++ b/pkg/query-service/app/logparsingpipeline/pipelineBuilder_test.go @@ -7,12 +7,14 @@ import ( "testing" "time" + signozstanzahelper "github.com/SigNoz/signoz-otel-collector/processor/signozlogspipelineprocessor/stanza/operator/helper" "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/pipelinetypes" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry" . "github.com/smartystreets/goconvey/convey" "github.com/stretchr/testify/require" @@ -841,3 +843,147 @@ func TestContainsFilterIsCaseInsensitive(t *testing.T) { _, test2Exists := result[0].Attributes_string["test2"] require.False(test2Exists) } + +func TestProcessJSONParser_WithFlatteningAndMapping(t *testing.T) { + parserID := uuid.NewString() + outputID := uuid.NewString() + + parent := &pipelinetypes.PipelineOperator{ + Type: "json_parser", + ID: parserID, + Name: "Parse JSON", + OrderId: 1, + Enabled: true, + ParseFrom: "body", + ParseTo: "attributes", + Output: outputID, + EnableFlattening: true, + EnablePaths: false, + PathPrefix: "", + Mapping: map[string][]string{ + pipelinetypes.Host: {"host", "hostname"}, + pipelinetypes.Service: {"service", "syslog.appname"}, + pipelinetypes.Severity: {"status", "severity", "level", "syslog.severity"}, + pipelinetypes.TraceID: {"trace_id"}, + pipelinetypes.SpanID: {"span_id"}, + pipelinetypes.Message: {"message", "msg", "log"}, + pipelinetypes.TraceFlags: {"flags"}, + pipelinetypes.Environment: {"service.env"}, + }, + } + + // Total children generated = sum(len(mapping values)) + severity_parser + trace_parser ops + expectedMoveOps := len(parent.Mapping[pipelinetypes.Host]) + + len(parent.Mapping[pipelinetypes.Service]) + + len(parent.Mapping[pipelinetypes.Message]) + + len(parent.Mapping[pipelinetypes.Environment]) + expectedTraceOps := len(parent.Mapping[pipelinetypes.TraceID]) + + len(parent.Mapping[pipelinetypes.SpanID]) + + len(parent.Mapping[pipelinetypes.TraceFlags]) + expectedSeverityOps := len(parent.Mapping[pipelinetypes.Severity]) // severity_parser + + totalOps := expectedMoveOps + expectedTraceOps + expectedSeverityOps + + ops, err := processJSONParser(parent) + require.NoError(t, err) + require.NotEmpty(t, ops) + + // Parent is always first + parentOp := ops[0] + require.Equal(t, "json_parser", parentOp.Type) + require.Equal(t, 1, parentOp.MaxFlatteningDepth) + require.Nil(t, parentOp.Mapping) // Mapping should be removed + require.Nil(t, parent.Mapping) // Mapping should be removed + require.Contains(t, parentOp.If, `isJSON(body)`) + require.Contains(t, parentOp.If, `type(body)`) + + require.Equal(t, 1+totalOps, len(ops)) + + var traceParserCount, moveCount, severityParserCount int + for _, op := range ops[1:] { + require.NotEmpty(t, op.ID) + require.Equal(t, op.OnError, signozstanzahelper.SendOnErrorQuiet) + + switch op.Type { + case "move": + require.NotEmpty(t, op.From) + require.NotEmpty(t, op.To) + moveCount++ + case "trace_parser": + require.NotNil(t, op.TraceParser) + traceParserCount++ + case "severity_parser": + require.NotEmpty(t, op.ParseFrom) + require.NotEmpty(t, op.If) + severityParserCount++ + default: + t.Errorf("unexpected operator type: %s", op.Type) + } + } + + require.Equal(t, expectedMoveOps, moveCount) + require.Equal(t, expectedTraceOps, traceParserCount) + require.Equal(t, expectedSeverityOps, severityParserCount) +} + +func TestProcessJSONParser_WithoutMapping(t *testing.T) { + parent := &pipelinetypes.PipelineOperator{ + Type: "json_parser", + ID: uuid.NewString(), + Name: "Parse JSON", + OrderId: 1, + Enabled: true, + ParseFrom: "body", + ParseTo: "attributes", + EnableFlattening: true, + EnablePaths: true, + PathPrefix: "parsed", + Mapping: nil, // No mapping + } + + ops, err := processJSONParser(parent) + require.NoError(t, err) + require.Len(t, ops, 1) // Only the parent operator should exist + + op := ops[0] + require.Equal(t, "json_parser", op.Type) + require.Equal(t, 1, op.MaxFlatteningDepth) + require.True(t, op.EnableFlattening) + require.True(t, op.EnablePaths) + require.Equal(t, "parsed", op.PathPrefix) + require.Contains(t, op.If, `isJSON(body)`) +} + +func TestProcessJSONParser_Simple(t *testing.T) { + parent := &pipelinetypes.PipelineOperator{ + Type: "json_parser", + ID: uuid.NewString(), + Name: "Parse JSON", + OrderId: 1, + Enabled: true, + ParseFrom: "body", + ParseTo: "attributes", + } + + ops, err := processJSONParser(parent) + require.NoError(t, err) + require.Len(t, ops, 1) // Only the parent operator should exist + + op := ops[0] + require.Equal(t, "json_parser", op.Type) + require.Equal(t, 0, op.MaxFlatteningDepth) + require.False(t, op.EnableFlattening) + require.False(t, op.EnablePaths) + require.Equal(t, "", op.PathPrefix) + require.Contains(t, op.If, `isJSON(body)`) +} + +func TestProcessJSONParser_InvalidType(t *testing.T) { + parent := &pipelinetypes.PipelineOperator{ + Type: "copy", // Invalid type + } + + _, err := processJSONParser(parent) + require.Error(t, err) + require.Contains(t, err.Error(), "operator type received copy") +} diff --git a/pkg/query-service/app/logparsingpipeline/severity_parser_test.go b/pkg/query-service/app/logparsingpipeline/severity_parser_test.go index ec707dfb95f9..4ff9b53de4c1 100644 --- a/pkg/query-service/app/logparsingpipeline/severity_parser_test.go +++ b/pkg/query-service/app/logparsingpipeline/severity_parser_test.go @@ -183,7 +183,7 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) { Enabled: true, Name: "severity parser", ParseFrom: "attributes.test_severity", - SeverityMapping: map[string][]string{ + Mapping: map[string][]string{ "debug": {"debug"}, }, OverwriteSeverityText: true, @@ -199,7 +199,7 @@ func TestNoCollectorErrorsFromSeverityParserForMismatchedLogs(t *testing.T) { Enabled: true, Name: "severity parser", ParseFrom: "attributes.test_severity", - SeverityMapping: map[string][]string{ + Mapping: map[string][]string{ "debug": {"debug"}, }, OverwriteSeverityText: true, diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 3be0bef5b90e..0b784192672a 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -604,6 +604,7 @@ var StaticFieldsTraces = map[string]v3.AttributeKey{} var IsDotMetricsEnabled = false var PreferSpanMetrics = false +var MaxJSONFlatteningDepth = 1 func init() { StaticFieldsTraces = maps.Clone(NewStaticFieldsTraces) @@ -614,6 +615,12 @@ func init() { if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" { PreferSpanMetrics = true } + + // set max flattening depth + depth, err := strconv.Atoi(GetOrDefaultEnv(maxJSONFlatteningDepth, "1")) + if err == nil { + MaxJSONFlatteningDepth = depth + } } const TRACE_V4_MAX_PAGINATION_LIMIT = 10000 @@ -641,3 +648,4 @@ func GetDefaultSiteURL() string { } const DotMetricsEnabled = "DOT_METRICS_ENABLED" +const maxJSONFlatteningDepth = "MAX_JSON_FLATTENING_DEPTH" diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go deleted file mode 100644 index 90cad098a876..000000000000 --- a/pkg/query-service/main.go +++ /dev/null @@ -1,176 +0,0 @@ -package main - -import ( - "context" - "flag" - "os" - "time" - - "github.com/SigNoz/signoz/pkg/analytics" - "github.com/SigNoz/signoz/pkg/config" - "github.com/SigNoz/signoz/pkg/config/envprovider" - "github.com/SigNoz/signoz/pkg/config/fileprovider" - "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/licensing" - "github.com/SigNoz/signoz/pkg/licensing/nooplicensing" - "github.com/SigNoz/signoz/pkg/modules/organization" - "github.com/SigNoz/signoz/pkg/query-service/app" - "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/sqlschema" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/version" - "github.com/SigNoz/signoz/pkg/zeus" - "github.com/SigNoz/signoz/pkg/zeus/noopzeus" - - "go.uber.org/zap" - "go.uber.org/zap/zapcore" -) - -func initZapLog() *zap.Logger { - config := zap.NewProductionConfig() - config.EncoderConfig.TimeKey = "timestamp" - config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder - logger, _ := config.Build() - return logger -} - -func main() { - var promConfigPath, skipTopLvlOpsPath string - - // disables rule execution but allows change to the rule definition - var disableRules bool - - var useLogsNewSchema bool - var useTraceNewSchema bool - // the url used to build link in the alert messages in slack and other systems - var ruleRepoURL, cacheConfigPath, fluxInterval, fluxIntervalForTraceDetail string - var cluster string - - var preferSpanMetrics bool - - var maxIdleConns int - var maxOpenConns int - var dialTimeout time.Duration - - // Deprecated - flag.BoolVar(&useLogsNewSchema, "use-logs-new-schema", false, "use logs_v2 schema for logs") - // Deprecated - flag.BoolVar(&useTraceNewSchema, "use-trace-new-schema", false, "use new schema for traces") - // Deprecated - flag.StringVar(&promConfigPath, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") - // Deprecated - flag.StringVar(&skipTopLvlOpsPath, "skip-top-level-ops", "", "(config file to skip top level operations)") - // Deprecated - flag.BoolVar(&disableRules, "rules.disable", false, "(disable rule evaluation)") - // Deprecated - flag.BoolVar(&preferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") - // Deprecated - flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)") - // Deprecated - flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") - // Deprecated - flag.StringVar(&fluxInterval, "flux-interval", "5m", "(the interval to exclude data from being cached to avoid incorrect cache for data in motion)") - // Deprecated - flag.StringVar(&fluxIntervalForTraceDetail, "flux-interval-trace-detail", "2m", "(the interval to exclude data from being cached to avoid incorrect cache for trace data in motion)") - // Deprecated - flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") - // Deprecated - flag.StringVar(&cluster, "cluster-name", "cluster", "(cluster name - defaults to 'cluster')") - // Deprecated - flag.IntVar(&maxIdleConns, "max-idle-conns", 50, "(number of connections to maintain in the pool, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") - // Deprecated - flag.IntVar(&maxOpenConns, "max-open-conns", 100, "(max connections for use at any time, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") - // Deprecated - flag.DurationVar(&dialTimeout, "dial-timeout", 5*time.Second, "(the maximum time to establish a connection, only used with clickhouse if not set in ClickHouseUrl env var DSN.)") - flag.Parse() - - loggerMgr := initZapLog() - zap.ReplaceGlobals(loggerMgr) - defer loggerMgr.Sync() // flushes buffer, if any - - logger := loggerMgr.Sugar() - - config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{ - Uris: []string{"env:"}, - ProviderFactories: []config.ProviderFactory{ - envprovider.NewFactory(), - fileprovider.NewFactory(), - }, - }, signoz.DeprecatedFlags{ - MaxIdleConns: maxIdleConns, - MaxOpenConns: maxOpenConns, - DialTimeout: dialTimeout, - Config: promConfigPath, - FluxInterval: fluxInterval, - FluxIntervalForTraceDetail: fluxIntervalForTraceDetail, - PreferSpanMetrics: preferSpanMetrics, - Cluster: cluster, - }) - if err != nil { - zap.L().Fatal("Failed to create config", zap.Error(err)) - } - - version.Info.PrettyPrint(config.Version) - - // Read the jwt secret key - jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") - - if len(jwtSecret) == 0 { - zap.L().Warn("No JWT secret key is specified.") - } else { - zap.L().Info("JWT secret key set successfully.") - } - - jwt := authtypes.NewJWT(jwtSecret, 30*time.Minute, 30*24*time.Hour) - - signoz, err := signoz.New( - context.Background(), - config, - jwt, - zeus.Config{}, - noopzeus.NewProviderFactory(), - licensing.Config{}, - func(_ sqlstore.SQLStore, _ zeus.Zeus, _ organization.Getter, _ analytics.Analytics) factory.ProviderFactory[licensing.Licensing, licensing.Config] { - return nooplicensing.NewFactory() - }, - signoz.NewEmailingProviderFactories(), - signoz.NewCacheProviderFactories(), - signoz.NewWebProviderFactories(), - func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { - return signoz.NewSQLSchemaProviderFactories(sqlstore) - }, - signoz.NewSQLStoreProviderFactories(), - signoz.NewTelemetryStoreProviderFactories(), - ) - if err != nil { - zap.L().Fatal("Failed to create signoz", zap.Error(err)) - } - - server, err := app.NewServer(config, signoz, jwt) - if err != nil { - logger.Fatal("Failed to create server", zap.Error(err)) - } - - if err := server.Start(context.Background()); err != nil { - logger.Fatal("Could not start servers", zap.Error(err)) - } - - signoz.Start(context.Background()) - - if err := signoz.Wait(context.Background()); err != nil { - zap.L().Fatal("Failed to start signoz", zap.Error(err)) - } - - err = server.Stop(context.Background()) - if err != nil { - zap.L().Fatal("Failed to stop server", zap.Error(err)) - } - - err = signoz.Stop(context.Background()) - if err != nil { - zap.L().Fatal("Failed to stop signoz", zap.Error(err)) - } - -} diff --git a/pkg/query-service/model/v3/v3.go b/pkg/query-service/model/v3/v3.go index 239e9acd72f2..7939401f9852 100644 --- a/pkg/query-service/model/v3/v3.go +++ b/pkg/query-service/model/v3/v3.go @@ -1140,6 +1140,9 @@ func (b *BuilderQuery) Validate(panelType PanelType) error { _, ok := function.Args[0].(float64) if !ok { // if string, attempt to convert to float + if _, ok := function.Args[0].(string); !ok { + return fmt.Errorf("threshold param should be a float") + } threshold, err := strconv.ParseFloat(function.Args[0].(string), 64) if err != nil { return fmt.Errorf("threshold param should be a float") diff --git a/pkg/querybuilder/fallback_expr.go b/pkg/querybuilder/fallback_expr.go index ca69bf16a39f..425e305699fe 100644 --- a/pkg/querybuilder/fallback_expr.go +++ b/pkg/querybuilder/fallback_expr.go @@ -56,6 +56,16 @@ func CollisionHandledFinalExpr( // the key didn't have the right context to be added to the query // we try to use the context we know of keysForField := keys[field.Name] + + if len(keysForField) == 0 { + // check if the key exists with {fieldContext}.{key} + // because the context could be legitimate prefix in user data, example `metric.max` + keyWithContext := fmt.Sprintf("%s.%s", field.FieldContext.StringValue(), field.Name) + if len(keys[keyWithContext]) > 0 { + keysForField = keys[keyWithContext] + } + } + if len(keysForField) == 0 { // - the context is not provided // - there are not keys for the field @@ -68,7 +78,7 @@ func CollisionHandledFinalExpr( return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction) } else { // not even a close match, return an error - return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name) + return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name) } } else { for _, key := range keysForField { @@ -90,6 +100,10 @@ func CollisionHandledFinalExpr( stmts = append(stmts, colName) } + for idx := range stmts { + stmts[idx] = sqlbuilder.Escape(stmts[idx]) + } + multiIfStmt := fmt.Sprintf("multiIf(%s, NULL)", strings.Join(stmts, ", ")) return multiIfStmt, allArgs, nil diff --git a/pkg/querybuilder/resourcefilter/statement_builder.go b/pkg/querybuilder/resourcefilter/statement_builder.go index 1deb32eee3d8..3a3cb7fe283d 100644 --- a/pkg/querybuilder/resourcefilter/statement_builder.go +++ b/pkg/querybuilder/resourcefilter/statement_builder.go @@ -155,6 +155,8 @@ func (b *resourceFilterStatementBuilder[T]) addConditions( JsonKeyToKey: b.jsonKeyToKey, SkipFullTextFilter: true, SkipFunctionCalls: true, + // there is no need for "key" not found error for resource filtering + IgnoreNotFoundKeys: true, Variables: variables, }) diff --git a/pkg/querybuilder/time.go b/pkg/querybuilder/time.go index f3e50db150da..d18d2ac0d91c 100644 --- a/pkg/querybuilder/time.go +++ b/pkg/querybuilder/time.go @@ -3,6 +3,10 @@ package querybuilder import ( "fmt" "math" + "time" + + "github.com/SigNoz/signoz/pkg/types/metrictypes" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" ) const ( @@ -69,7 +73,16 @@ func RecommendedStepIntervalForMetric(start, end uint64) uint64 { } // return the nearest lower multiple of 60 - return step - step%60 + recommended := step - step%60 + + // if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes + // if the time range is greater than 1 week, set the step interval to be multiple of 30 mins + if end-start >= uint64(24*time.Hour.Nanoseconds()) && end-start < uint64(7*24*time.Hour.Nanoseconds()) { + recommended = uint64(math.Round(float64(recommended)/300)) * 300 + } else if end-start >= uint64(7*24*time.Hour.Nanoseconds()) { + recommended = uint64(math.Round(float64(recommended)/1800)) * 1800 + } + return recommended } func MinAllowedStepIntervalForMetric(start, end uint64) uint64 { @@ -84,7 +97,64 @@ func MinAllowedStepIntervalForMetric(start, end uint64) uint64 { } // return the nearest lower multiple of 60 - return step - step%60 + minAllowed := step - step%60 + + // if the time range is greater than 1 day, and less than 1 week set the step interval to be multiple of 5 minutes + // if the time range is greater than 1 week, set the step interval to be multiple of 30 mins + if end-start >= uint64(24*time.Hour.Nanoseconds()) && end-start < uint64(7*24*time.Hour.Nanoseconds()) { + minAllowed = uint64(math.Round(float64(minAllowed)/300)) * 300 + } else if end-start >= uint64(7*24*time.Hour.Nanoseconds()) { + minAllowed = uint64(math.Round(float64(minAllowed)/1800)) * 1800 + } + return minAllowed +} + +func AdjustedMetricTimeRange(start, end, step uint64, mq qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) (uint64, uint64) { + // align the start to the step interval + start = start - (start % (step * 1000)) + // if the query is a rate query, we adjust the start time by one more step + // so that we can calculate the rate for the first data point + hasRunningDiff := false + for _, fn := range mq.Functions { + if fn.Name == qbtypes.FunctionNameRunningDiff { + hasRunningDiff = true + break + } + } + if (mq.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate || mq.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationIncrease) && + mq.Aggregations[0].Temporality != metrictypes.Delta { + start -= step * 1000 + } + if hasRunningDiff { + start -= step * 1000 + } + // align the end to the nearest minute + adjustStep := uint64(math.Min(float64(step), 60)) + end = end - (end % (adjustStep * 1000)) + return start, end +} + +func GCD(a, b int64) int64 { + for b != 0 { + a, b = b, a%b + } + return a +} + +func LCM(a, b int64) int64 { + return (a * b) / GCD(a, b) +} + +// LCMList computes the LCM of a list of int64 numbers. +func LCMList(nums []int64) int64 { + if len(nums) == 0 { + return 1 + } + result := nums[0] + for _, num := range nums[1:] { + result = LCM(result, num) + } + return result } func AssignReservedVars(vars map[string]any, start, end uint64) { diff --git a/pkg/querybuilder/where_clause_visitor.go b/pkg/querybuilder/where_clause_visitor.go index 2f05ec3b93ec..936803cd6276 100644 --- a/pkg/querybuilder/where_clause_visitor.go +++ b/pkg/querybuilder/where_clause_visitor.go @@ -30,6 +30,7 @@ type filterExpressionVisitor struct { skipResourceFilter bool skipFullTextFilter bool skipFunctionCalls bool + ignoreNotFoundKeys bool variables map[string]qbtypes.VariableItem keysWithWarnings map[string]bool @@ -46,6 +47,7 @@ type FilterExprVisitorOpts struct { SkipResourceFilter bool SkipFullTextFilter bool SkipFunctionCalls bool + IgnoreNotFoundKeys bool Variables map[string]qbtypes.VariableItem } @@ -62,6 +64,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis skipResourceFilter: opts.SkipResourceFilter, skipFullTextFilter: opts.SkipFullTextFilter, skipFunctionCalls: opts.SkipFunctionCalls, + ignoreNotFoundKeys: opts.IgnoreNotFoundKeys, variables: opts.Variables, keysWithWarnings: make(map[string]bool), } @@ -292,6 +295,15 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext) any { keys := v.Visit(ctx.Key()).([]*telemetrytypes.TelemetryFieldKey) + // if key is missing and can be ignored, the condition is ignored + if len(keys) == 0 && v.ignoreNotFoundKeys { + // Why do we return "true"? to prevent from create a empty tuple + // example, if the condition is (x AND (y OR z)) + // if we find ourselves ignoring all, then it creates and invalid + // condition (()) which throws invalid tuples error + return "true" + } + // this is used to skip the resource filtering on main table if // the query may use the resources table sub-query filter if v.skipResourceFilter { @@ -302,6 +314,13 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext } } keys = filteredKeys + if len(keys) == 0 { + // Why do we return "true"? to prevent from create a empty tuple + // example, if the condition is (resource.service.name='api' AND (env='prod' OR env='production')) + // if we find ourselves skipping all, then it creates and invalid + // condition (()) which throws invalid tuples error + return "true" + } } // Handle EXISTS specially @@ -429,7 +448,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext var varItem qbtypes.VariableItem varItem, ok = v.variables[var_] // if not present, try without `$` prefix - if !ok { + if !ok && len(var_) > 0 { varItem, ok = v.variables[var_[1:]] } @@ -680,6 +699,19 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { fieldKeysForName := v.fieldKeys[keyName] + // if the context is explicitly provided, filter out the remaining + // example, resource.attr = 'value', then we don't want to search on + // anything other than the resource attributes + if fieldKey.FieldContext != telemetrytypes.FieldContextUnspecified { + filteredKeys := []*telemetrytypes.TelemetryFieldKey{} + for _, item := range fieldKeysForName { + if item.FieldContext == fieldKey.FieldContext { + filteredKeys = append(filteredKeys, item) + } + } + fieldKeysForName = filteredKeys + } + // for the body json search, we need to add search on the body field even // if there is a field with the same name as attribute/resource attribute // Since it will ORed with the fieldKeysForName, it will not result empty @@ -691,9 +723,16 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any { } if len(fieldKeysForName) == 0 { + // check if the key exists with {fieldContext}.{key} + // because the context could be legitimate prefix in user data, example `span.div_num = 20` + keyWithContext := fmt.Sprintf("%s.%s", fieldKey.FieldContext.StringValue(), fieldKey.Name) + if len(v.fieldKeys[keyWithContext]) > 0 { + return v.fieldKeys[keyWithContext] + } + if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" && keyName == "" { v.errors = append(v.errors, "missing key for body json search - expected key of the form `body.key` (ex: `body.status`)") - } else { + } else if !v.ignoreNotFoundKeys { // TODO(srikanthccv): do we want to return an error here? // should we infer the type and auto-magically build a key for expression? v.errors = append(v.errors, fmt.Sprintf("key `%s` not found", fieldKey.Name)) @@ -718,8 +757,13 @@ func trimQuotes(txt string) string { if len(txt) >= 2 { if (txt[0] == '"' && txt[len(txt)-1] == '"') || (txt[0] == '\'' && txt[len(txt)-1] == '\'') { - return txt[1 : len(txt)-1] + txt = txt[1 : len(txt)-1] } } + + // unescape so clickhouse-go can escape it + // https://github.com/ClickHouse/clickhouse-go/blob/6c5ddb38dd2edc841a3b927711b841014759bede/bind.go#L278 + txt = strings.ReplaceAll(txt, `\\`, `\`) + txt = strings.ReplaceAll(txt, `\'`, `'`) return txt } diff --git a/pkg/signoz/config.go b/pkg/signoz/config.go index 63222c5ef4c2..5eda9f98b0cb 100644 --- a/pkg/signoz/config.go +++ b/pkg/signoz/config.go @@ -30,6 +30,7 @@ import ( "github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/web" + "github.com/spf13/cobra" ) // Config defines the entire input configuration of signoz. @@ -106,6 +107,28 @@ type DeprecatedFlags struct { GatewayUrl string } +func (df *DeprecatedFlags) RegisterFlags(cmd *cobra.Command) { + cmd.Flags().IntVar(&df.MaxIdleConns, "max-idle-conns", 50, "max idle connections to the database") + cmd.Flags().IntVar(&df.MaxOpenConns, "max-open-conns", 100, "max open connections to the database") + cmd.Flags().DurationVar(&df.DialTimeout, "dial-timeout", 5*time.Second, "dial timeout for the database") + cmd.Flags().StringVar(&df.Config, "config", "./config/prometheus.yml", "(prometheus config to read metrics)") + cmd.Flags().StringVar(&df.FluxInterval, "flux-interval", "5m", "flux interval") + cmd.Flags().StringVar(&df.FluxIntervalForTraceDetail, "flux-interval-for-trace-detail", "2m", "flux interval for trace detail") + cmd.Flags().BoolVar(&df.PreferSpanMetrics, "prefer-span-metrics", false, "(prefer span metrics for service level metrics)") + cmd.Flags().StringVar(&df.Cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") + cmd.Flags().StringVar(&df.GatewayUrl, "gateway-url", "", "(url to the gateway)") + + _ = cmd.Flags().MarkDeprecated("max-idle-conns", "use SIGNOZ_TELEMETRYSTORE_MAX__IDLE__CONNS instead") + _ = cmd.Flags().MarkDeprecated("max-open-conns", "use SIGNOZ_TELEMETRYSTORE_MAX__OPEN__CONNS instead") + _ = cmd.Flags().MarkDeprecated("dial-timeout", "use SIGNOZ_TELEMETRYSTORE_DIAL__TIMEOUT instead") + _ = cmd.Flags().MarkDeprecated("config", "use SIGNOZ_PROMETHEUS_CONFIG instead") + _ = cmd.Flags().MarkDeprecated("flux-interval", "use SIGNOZ_QUERIER_FLUX__INTERVAL instead") + _ = cmd.Flags().MarkDeprecated("flux-interval-for-trace-detail", "use SIGNOZ_QUERIER_FLUX__INTERVAL instead") + _ = cmd.Flags().MarkDeprecated("cluster", "use SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER instead") + _ = cmd.Flags().MarkDeprecated("prefer-span-metrics", "use USE_SPAN_METRICS instead") + _ = cmd.Flags().MarkDeprecated("gateway-url", "use SIGNOZ_GATEWAY_URL instead") +} + func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, deprecatedFlags DeprecatedFlags) (Config, error) { configFactories := []factory.ConfigFactory{ version.NewConfigFactory(), diff --git a/pkg/telemetrylogs/condition_builder.go b/pkg/telemetrylogs/condition_builder.go index 30555084ceab..74de1738785a 100644 --- a/pkg/telemetrylogs/condition_builder.go +++ b/pkg/telemetrylogs/condition_builder.go @@ -148,11 +148,6 @@ func (c *conditionBuilder) conditionFor( } } - // if the field is intrinsic, it always exists - if slices.Contains(maps.Keys(IntrinsicFields), key.Name) { - return "true", nil - } - var value any switch column.Type { case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}: diff --git a/pkg/telemetrylogs/condition_builder_test.go b/pkg/telemetrylogs/condition_builder_test.go index 3ad02863abac..5e8dceff4fd8 100644 --- a/pkg/telemetrylogs/condition_builder_test.go +++ b/pkg/telemetrylogs/condition_builder_test.go @@ -44,7 +44,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorGreaterThan, value: float64(100), - expectedSQL: "(attributes_number['request.duration'] > ? AND mapContains(attributes_number, 'request.duration') = ?)", + expectedSQL: "(toFloat64(attributes_number['request.duration']) > ? AND mapContains(attributes_number, 'request.duration') = ?)", expectedArgs: []any{float64(100), true}, expectedError: nil, }, @@ -57,7 +57,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorLessThan, value: float64(1024), - expectedSQL: "(attributes_number['request.size'] < ? AND mapContains(attributes_number, 'request.size') = ?)", + expectedSQL: "(toFloat64(attributes_number['request.size']) < ? AND mapContains(attributes_number, 'request.size') = ?)", expectedArgs: []any{float64(1024), true}, expectedError: nil, }, @@ -167,7 +167,8 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorExists, value: nil, - expectedSQL: "true", + expectedSQL: "WHERE body <> ?", + expectedArgs: []any{""}, expectedError: nil, }, { @@ -178,7 +179,8 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorNotExists, value: nil, - expectedSQL: "true", + expectedSQL: "WHERE body = ?", + expectedArgs: []any{""}, expectedError: nil, }, { diff --git a/pkg/telemetrylogs/field_mapper.go b/pkg/telemetrylogs/field_mapper.go index 78ff7d4cc36c..db76ecf43792 100644 --- a/pkg/telemetrylogs/field_mapper.go +++ b/pkg/telemetrylogs/field_mapper.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" "golang.org/x/exp/maps" ) @@ -159,7 +160,7 @@ func (m *fieldMapper) ColumnExpressionFor( // is it a static field? if _, ok := logsV2Columns[field.Name]; ok { // if it is, attach the column name directly - field.FieldContext = telemetrytypes.FieldContextSpan + field.FieldContext = telemetrytypes.FieldContextLog colName, _ = m.FieldFor(ctx, field) } else { // - the context is not provided @@ -173,7 +174,7 @@ func (m *fieldMapper) ColumnExpressionFor( return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction) } else { // not even a close match, return an error - return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name) + return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name) } } } else if len(keysForField) == 1 { @@ -190,5 +191,5 @@ func (m *fieldMapper) ColumnExpressionFor( } } - return fmt.Sprintf("%s AS `%s`", colName, field.Name), nil + return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(colName), field.Name), nil } diff --git a/pkg/telemetrylogs/filter_compiler.go b/pkg/telemetrylogs/filter_compiler.go deleted file mode 100644 index 69dc90bd5297..000000000000 --- a/pkg/telemetrylogs/filter_compiler.go +++ /dev/null @@ -1,55 +0,0 @@ -package telemetrylogs - -import ( - "context" - - "github.com/SigNoz/signoz/pkg/querybuilder" - qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" - "github.com/SigNoz/signoz/pkg/types/telemetrytypes" - "github.com/huandu/go-sqlbuilder" -) - -type FilterCompilerOpts struct { - FieldMapper qbtypes.FieldMapper - ConditionBuilder qbtypes.ConditionBuilder - MetadataStore telemetrytypes.MetadataStore - FullTextColumn *telemetrytypes.TelemetryFieldKey - JsonBodyPrefix string - JsonKeyToKey qbtypes.JsonKeyToFieldFunc - SkipResourceFilter bool -} - -type filterCompiler struct { - opts FilterCompilerOpts -} - -func NewFilterCompiler(opts FilterCompilerOpts) *filterCompiler { - return &filterCompiler{ - opts: opts, - } -} - -func (c *filterCompiler) Compile(ctx context.Context, expr string) (*sqlbuilder.WhereClause, []string, error) { - selectors := querybuilder.QueryStringToKeysSelectors(expr) - - keys, err := c.opts.MetadataStore.GetKeysMulti(ctx, selectors) - if err != nil { - return nil, nil, err - } - - filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(expr, querybuilder.FilterExprVisitorOpts{ - FieldMapper: c.opts.FieldMapper, - ConditionBuilder: c.opts.ConditionBuilder, - FieldKeys: keys, - FullTextColumn: c.opts.FullTextColumn, - JsonBodyPrefix: c.opts.JsonBodyPrefix, - JsonKeyToKey: c.opts.JsonKeyToKey, - SkipResourceFilter: c.opts.SkipResourceFilter, - }) - - if err != nil { - return nil, nil, err - } - - return filterWhereClause, warnings, nil -} diff --git a/pkg/telemetrylogs/filter_expr_logs_test.go b/pkg/telemetrylogs/filter_expr_logs_test.go index 2490ec09d7d9..6b9d5c64539d 100644 --- a/pkg/telemetrylogs/filter_expr_logs_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_test.go @@ -396,7 +396,7 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with conditions", query: "\"connection timeout\" duration>30", shouldPass: true, - expectedQuery: "WHERE (match(body, ?) AND (attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?))", + expectedQuery: "WHERE (match(body, ?) AND (toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?))", expectedArgs: []any{"connection timeout", float64(30), true}, expectedErrorContains: "", }, @@ -422,7 +422,7 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with parentheses", query: "error (status.code=500 OR status.code=503)", shouldPass: true, - expectedQuery: "WHERE (match(body, ?) AND (((attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?) OR (attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?))))", + expectedQuery: "WHERE (match(body, ?) AND (((toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?) OR (toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?))))", expectedArgs: []any{"error", float64(500), true, float64(503), true}, expectedErrorContains: "", }, @@ -430,7 +430,7 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with parentheses", query: "(status.code=500 OR status.code=503) error", shouldPass: true, - expectedQuery: "WHERE ((((attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?) OR (attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?))) AND match(body, ?))", + expectedQuery: "WHERE ((((toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?) OR (toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?))) AND match(body, ?))", expectedArgs: []any{float64(500), true, float64(503), true, "error"}, expectedErrorContains: "", }, @@ -438,7 +438,7 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with parentheses", query: "error AND (status.code=500 OR status.code=503)", shouldPass: true, - expectedQuery: "WHERE (match(body, ?) AND (((attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?) OR (attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?))))", + expectedQuery: "WHERE (match(body, ?) AND (((toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?) OR (toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?))))", expectedArgs: []any{"error", float64(500), true, float64(503), true}, expectedErrorContains: "", }, @@ -446,7 +446,7 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with parentheses", query: "(status.code=500 OR status.code=503) AND error", shouldPass: true, - expectedQuery: "WHERE ((((attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?) OR (attributes_number['status.code'] = ? AND mapContains(attributes_number, 'status.code') = ?))) AND match(body, ?))", + expectedQuery: "WHERE ((((toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?) OR (toFloat64(attributes_number['status.code']) = ? AND mapContains(attributes_number, 'status.code') = ?))) AND match(body, ?))", expectedArgs: []any{float64(500), true, float64(503), true, "error"}, expectedErrorContains: "", }, @@ -754,7 +754,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Basic equality", query: "status=200", shouldPass: true, - expectedQuery: "WHERE (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)", expectedArgs: []any{float64(200), true}, expectedErrorContains: "", }, @@ -762,7 +762,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Basic equality", query: "code=400", shouldPass: true, - expectedQuery: "WHERE (attributes_number['code'] = ? AND mapContains(attributes_number, 'code') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['code']) = ? AND mapContains(attributes_number, 'code') = ?)", expectedArgs: []any{float64(400), true}, expectedErrorContains: "", }, @@ -794,7 +794,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Basic equality", query: "count=0", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] = ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) = ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(0), true}, expectedErrorContains: "", }, @@ -812,7 +812,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Not equals", query: "status!=200", shouldPass: true, - expectedQuery: "WHERE attributes_number['status'] <> ?", + expectedQuery: "WHERE toFloat64(attributes_number['status']) <> ?", expectedArgs: []any{float64(200)}, expectedErrorContains: "", }, @@ -820,7 +820,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Not equals", query: "status<>200", shouldPass: true, - expectedQuery: "WHERE attributes_number['status'] <> ?", + expectedQuery: "WHERE toFloat64(attributes_number['status']) <> ?", expectedArgs: []any{float64(200)}, expectedErrorContains: "", }, @@ -828,7 +828,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Not equals", query: "code!=400", shouldPass: true, - expectedQuery: "WHERE attributes_number['code'] <> ?", + expectedQuery: "WHERE toFloat64(attributes_number['code']) <> ?", expectedArgs: []any{float64(400)}, expectedErrorContains: "", }, @@ -854,7 +854,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Less than", query: "count<10", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] < ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) < ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(10), true}, expectedErrorContains: "", }, @@ -862,7 +862,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Less than", query: "duration<1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(1000), true}, expectedErrorContains: "", }, @@ -872,7 +872,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Less than or equal", query: "count<=10", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] <= ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) <= ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(10), true}, expectedErrorContains: "", }, @@ -880,7 +880,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Less than or equal", query: "duration<=1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] <= ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) <= ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(1000), true}, expectedErrorContains: "", }, @@ -890,7 +890,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Greater than", query: "count>10", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(10), true}, expectedErrorContains: "", }, @@ -898,7 +898,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Greater than", query: "duration>1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(1000), true}, expectedErrorContains: "", }, @@ -908,7 +908,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Greater than or equal", query: "count>=10", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] >= ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) >= ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(10), true}, expectedErrorContains: "", }, @@ -916,7 +916,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Greater than or equal", query: "duration>=1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] >= ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) >= ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(1000), true}, expectedErrorContains: "", }, @@ -1062,7 +1062,7 @@ func TestFilterExprLogs(t *testing.T) { category: "BETWEEN operator", query: "count BETWEEN 1 AND 10", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] BETWEEN ? AND ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) BETWEEN ? AND ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(1), float64(10), true}, expectedErrorContains: "", }, @@ -1070,7 +1070,7 @@ func TestFilterExprLogs(t *testing.T) { category: "BETWEEN operator", query: "duration BETWEEN 100 AND 1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(100), float64(1000), true}, expectedErrorContains: "", }, @@ -1078,7 +1078,7 @@ func TestFilterExprLogs(t *testing.T) { category: "BETWEEN operator", query: "amount BETWEEN 0.1 AND 9.9", shouldPass: true, - expectedQuery: "WHERE (attributes_number['amount'] BETWEEN ? AND ? AND mapContains(attributes_number, 'amount') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['amount']) BETWEEN ? AND ? AND mapContains(attributes_number, 'amount') = ?)", expectedArgs: []any{0.1, 9.9, true}, expectedErrorContains: "", }, @@ -1088,7 +1088,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT BETWEEN operator", query: "count NOT BETWEEN 1 AND 10", shouldPass: true, - expectedQuery: "WHERE attributes_number['count'] NOT BETWEEN ? AND ?", + expectedQuery: "WHERE toFloat64(attributes_number['count']) NOT BETWEEN ? AND ?", expectedArgs: []any{float64(1), float64(10)}, expectedErrorContains: "", }, @@ -1096,7 +1096,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT BETWEEN operator", query: "duration NOT BETWEEN 100 AND 1000", shouldPass: true, - expectedQuery: "WHERE attributes_number['duration'] NOT BETWEEN ? AND ?", + expectedQuery: "WHERE toFloat64(attributes_number['duration']) NOT BETWEEN ? AND ?", expectedArgs: []any{float64(100), float64(1000)}, expectedErrorContains: "", }, @@ -1104,7 +1104,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT BETWEEN operator", query: "amount NOT BETWEEN 0.1 AND 9.9", shouldPass: true, - expectedQuery: "WHERE attributes_number['amount'] NOT BETWEEN ? AND ?", + expectedQuery: "WHERE toFloat64(attributes_number['amount']) NOT BETWEEN ? AND ?", expectedArgs: []any{0.1, 9.9}, expectedErrorContains: "", }, @@ -1114,7 +1114,7 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (parentheses)", query: "status IN (200, 201, 202)", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? OR attributes_number['status'] = ? OR attributes_number['status'] = ?) AND mapContains(attributes_number, 'status') = ?)", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? OR toFloat64(attributes_number['status']) = ? OR toFloat64(attributes_number['status']) = ?) AND mapContains(attributes_number, 'status') = ?)", expectedArgs: []any{float64(200), float64(201), float64(202), true}, expectedErrorContains: "", }, @@ -1122,7 +1122,7 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (parentheses)", query: "error.code IN (404, 500, 503)", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['error.code'] = ? OR attributes_number['error.code'] = ? OR attributes_number['error.code'] = ?) AND mapContains(attributes_number, 'error.code') = ?)", + expectedQuery: "WHERE ((toFloat64(attributes_number['error.code']) = ? OR toFloat64(attributes_number['error.code']) = ? OR toFloat64(attributes_number['error.code']) = ?) AND mapContains(attributes_number, 'error.code') = ?)", expectedArgs: []any{float64(404), float64(500), float64(503), true}, expectedErrorContains: "", }, @@ -1148,7 +1148,7 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (brackets)", query: "status IN [200, 201, 202]", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? OR attributes_number['status'] = ? OR attributes_number['status'] = ?) AND mapContains(attributes_number, 'status') = ?)", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? OR toFloat64(attributes_number['status']) = ? OR toFloat64(attributes_number['status']) = ?) AND mapContains(attributes_number, 'status') = ?)", expectedArgs: []any{float64(200), float64(201), float64(202), true}, expectedErrorContains: "", }, @@ -1156,7 +1156,7 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (brackets)", query: "error.code IN [404, 500, 503]", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['error.code'] = ? OR attributes_number['error.code'] = ? OR attributes_number['error.code'] = ?) AND mapContains(attributes_number, 'error.code') = ?)", + expectedQuery: "WHERE ((toFloat64(attributes_number['error.code']) = ? OR toFloat64(attributes_number['error.code']) = ? OR toFloat64(attributes_number['error.code']) = ?) AND mapContains(attributes_number, 'error.code') = ?)", expectedArgs: []any{float64(404), float64(500), float64(503), true}, expectedErrorContains: "", }, @@ -1182,7 +1182,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (parentheses)", query: "status NOT IN (400, 500)", shouldPass: true, - expectedQuery: "WHERE (attributes_number['status'] <> ? AND attributes_number['status'] <> ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['status']) <> ? AND toFloat64(attributes_number['status']) <> ?)", expectedArgs: []any{float64(400), float64(500)}, expectedErrorContains: "", }, @@ -1190,7 +1190,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (parentheses)", query: "error.code NOT IN (401, 403)", shouldPass: true, - expectedQuery: "WHERE (attributes_number['error.code'] <> ? AND attributes_number['error.code'] <> ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['error.code']) <> ? AND toFloat64(attributes_number['error.code']) <> ?)", expectedArgs: []any{float64(401), float64(403)}, expectedErrorContains: "", }, @@ -1216,7 +1216,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (brackets)", query: "status NOT IN [400, 500]", shouldPass: true, - expectedQuery: "WHERE (attributes_number['status'] <> ? AND attributes_number['status'] <> ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['status']) <> ? AND toFloat64(attributes_number['status']) <> ?)", expectedArgs: []any{float64(400), float64(500)}, expectedErrorContains: "", }, @@ -1224,7 +1224,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (brackets)", query: "error.code NOT IN [401, 403]", shouldPass: true, - expectedQuery: "WHERE (attributes_number['error.code'] <> ? AND attributes_number['error.code'] <> ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['error.code']) <> ? AND toFloat64(attributes_number['error.code']) <> ?)", expectedArgs: []any{float64(401), float64(403)}, expectedErrorContains: "", }, @@ -1335,7 +1335,7 @@ func TestFilterExprLogs(t *testing.T) { query: "email REGEXP \"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z]{2,}$\"", shouldPass: true, expectedQuery: "WHERE (match(attributes_string['email'], ?) AND mapContains(attributes_string, 'email') = ?)", - expectedArgs: []any{"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z]{2,}$", true}, + expectedArgs: []any{"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$", true}, expectedErrorContains: "", }, { @@ -1343,7 +1343,7 @@ func TestFilterExprLogs(t *testing.T) { query: "version REGEXP \"^v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$\"", shouldPass: true, expectedQuery: "WHERE (match(attributes_string['version'], ?) AND mapContains(attributes_string, 'version') = ?)", - expectedArgs: []any{"^v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$", true}, + expectedArgs: []any{"^v\\d+\\.\\d+\\.\\d+$", true}, expectedErrorContains: "", }, { @@ -1351,7 +1351,7 @@ func TestFilterExprLogs(t *testing.T) { query: "path REGEXP \"^/api/v\\\\d+/users/\\\\d+$\"", shouldPass: true, expectedQuery: "WHERE (match(attributes_string['path'], ?) AND mapContains(attributes_string, 'path') = ?)", - expectedArgs: []any{"^/api/v\\\\d+/users/\\\\d+$", true}, + expectedArgs: []any{"^/api/v\\d+/users/\\d+$", true}, expectedErrorContains: "", }, { @@ -1377,7 +1377,7 @@ func TestFilterExprLogs(t *testing.T) { query: "email NOT REGEXP \"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z]{2,}$\"", shouldPass: true, expectedQuery: "WHERE NOT match(attributes_string['email'], ?)", - expectedArgs: []any{"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\\\.[a-zA-Z]{2,}$"}, + expectedArgs: []any{"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$"}, expectedErrorContains: "", }, { @@ -1385,15 +1385,15 @@ func TestFilterExprLogs(t *testing.T) { query: "version NOT REGEXP \"^v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$\"", shouldPass: true, expectedQuery: "WHERE NOT match(attributes_string['version'], ?)", - expectedArgs: []any{"^v\\\\d+\\\\.\\\\d+\\\\.\\\\d+$"}, + expectedArgs: []any{"^v\\d+\\.\\d+\\.\\d+$"}, expectedErrorContains: "", }, { category: "NOT REGEXP operator", - query: "path NOT REGEXP \"^/api/v\\\\d+/users/\\\\d+$\"", + query: "path NOT REGEXP \"^/api/v\\d+/users/\\d+$\"", shouldPass: true, expectedQuery: "WHERE NOT match(attributes_string['path'], ?)", - expectedArgs: []any{"^/api/v\\\\d+/users/\\\\d+$"}, + expectedArgs: []any{"^/api/v\\d+/users/\\d+$"}, expectedErrorContains: "", }, @@ -1464,7 +1464,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit AND", query: "status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true}, expectedErrorContains: "", }, @@ -1472,7 +1472,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit AND", query: "count>0 AND duration<1000", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))", expectedArgs: []any{float64(0), true, float64(1000), true}, expectedErrorContains: "", }, @@ -1490,7 +1490,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit OR", query: "status=200 OR status=201", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) OR (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))", expectedArgs: []any{float64(200), true, float64(201), true}, expectedErrorContains: "", }, @@ -1506,7 +1506,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit OR", query: "count<10 OR count>100", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] < ? AND mapContains(attributes_number, 'count') = ?) OR (attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) < ? AND mapContains(attributes_number, 'count') = ?) OR (toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", expectedArgs: []any{float64(10), true, float64(100), true}, expectedErrorContains: "", }, @@ -1516,7 +1516,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT with expressions", query: "NOT status=200", shouldPass: true, - expectedQuery: "WHERE NOT ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?))", + expectedQuery: "WHERE NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))", expectedArgs: []any{float64(200), true}, expectedErrorContains: "", }, @@ -1532,7 +1532,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT with expressions", query: "NOT count>10", shouldPass: true, - expectedQuery: "WHERE NOT ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?))", + expectedQuery: "WHERE NOT ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", expectedArgs: []any{float64(10), true}, expectedErrorContains: "", }, @@ -1542,7 +1542,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR combinations", query: "status=200 AND (service.name=\"api\" OR service.name=\"web\")", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))))", expectedArgs: []any{float64(200), true, "api", true, "web", true}, expectedErrorContains: "", }, @@ -1550,7 +1550,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR combinations", query: "(count>10 AND count<100) OR (duration>1000 AND duration<5000)", shouldPass: true, - expectedQuery: "WHERE ((((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_number['count'] < ? AND mapContains(attributes_number, 'count') = ?))) OR (((attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?) AND (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?))))", + expectedQuery: "WHERE ((((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (toFloat64(attributes_number['count']) < ? AND mapContains(attributes_number, 'count') = ?))) OR (((toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))))", expectedArgs: []any{float64(10), true, float64(100), true, float64(1000), true, float64(5000), true}, expectedErrorContains: "", }, @@ -1568,7 +1568,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + NOT combinations", query: "status=200 AND NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", expectedArgs: []any{float64(200), true, "api", true}, expectedErrorContains: "", }, @@ -1576,7 +1576,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + NOT combinations", query: "count>0 AND NOT error.code EXISTS", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND NOT (mapContains(attributes_number, 'error.code') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND NOT (mapContains(attributes_number, 'error.code') = ?))", expectedArgs: []any{float64(0), true, true}, expectedErrorContains: "", }, @@ -1586,7 +1586,7 @@ func TestFilterExprLogs(t *testing.T) { category: "OR + NOT combinations", query: "NOT status=200 OR NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", expectedArgs: []any{float64(200), true, "api", true}, expectedErrorContains: "", }, @@ -1594,7 +1594,7 @@ func TestFilterExprLogs(t *testing.T) { category: "OR + NOT combinations", query: "NOT count>0 OR NOT error.code EXISTS", shouldPass: true, - expectedQuery: "WHERE (NOT ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?)) OR NOT (mapContains(attributes_number, 'error.code') = ?))", + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?)) OR NOT (mapContains(attributes_number, 'error.code') = ?))", expectedArgs: []any{float64(0), true, true}, expectedErrorContains: "", }, @@ -1604,7 +1604,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR + NOT combinations", query: "status=200 AND (service.name=\"api\" OR NOT duration>1000)", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR NOT ((attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?)))))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR NOT ((toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?)))))", expectedArgs: []any{float64(200), true, "api", true, float64(1000), true}, expectedErrorContains: "", }, @@ -1620,7 +1620,7 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR + NOT combinations", query: "NOT (status=200 AND service.name=\"api\") OR count>0", shouldPass: true, - expectedQuery: "WHERE (NOT ((((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))) OR (attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?))", + expectedQuery: "WHERE (NOT ((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))) OR (toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", expectedArgs: []any{float64(200), true, "api", true, float64(0), true}, expectedErrorContains: "", }, @@ -1630,7 +1630,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Implicit AND", query: "status=200 service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true}, expectedErrorContains: "", }, @@ -1638,7 +1638,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Implicit AND", query: "count>0 duration<1000", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))", expectedArgs: []any{float64(0), true, float64(1000), true}, expectedErrorContains: "", }, @@ -1656,7 +1656,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Mixed implicit/explicit AND", query: "status=200 AND service.name=\"api\" duration<1000", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))", expectedArgs: []any{float64(200), true, "api", true, float64(1000), true}, expectedErrorContains: "", }, @@ -1664,7 +1664,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Mixed implicit/explicit AND", query: "count>0 level=\"ERROR\" AND message CONTAINS \"error\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?) AND (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?) AND (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))", expectedArgs: []any{float64(0), true, "ERROR", true, "%error%", true}, expectedErrorContains: "", }, @@ -1674,7 +1674,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Simple grouping", query: "(status=200)", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))", expectedArgs: []any{float64(200), true}, expectedErrorContains: "", }, @@ -1690,7 +1690,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Simple grouping", query: "(count>0)", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", expectedArgs: []any{float64(0), true}, expectedErrorContains: "", }, @@ -1700,7 +1700,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Nested grouping", query: "((status=200))", shouldPass: true, - expectedQuery: "WHERE (((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)))", + expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)))", expectedArgs: []any{float64(200), true}, expectedErrorContains: "", }, @@ -1716,7 +1716,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Nested grouping", query: "((count>0) AND (duration<1000))", shouldPass: true, - expectedQuery: "WHERE ((((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?)) AND ((attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?))))", + expectedQuery: "WHERE ((((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?)) AND ((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))))", expectedArgs: []any{float64(0), true, float64(1000), true}, expectedErrorContains: "", }, @@ -1726,7 +1726,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Complex nested grouping", query: "(status=200 AND (service.name=\"api\" OR service.name=\"web\"))", shouldPass: true, - expectedQuery: "WHERE (((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", + expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", expectedArgs: []any{float64(200), true, "api", true, "web", true}, expectedErrorContains: "", }, @@ -1734,7 +1734,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Complex nested grouping", query: "((count>0 AND count<100) OR (duration>1000 AND duration<5000))", shouldPass: true, - expectedQuery: "WHERE (((((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (attributes_number['count'] < ? AND mapContains(attributes_number, 'count') = ?))) OR (((attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?) AND (attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?)))))", + expectedQuery: "WHERE (((((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (toFloat64(attributes_number['count']) < ? AND mapContains(attributes_number, 'count') = ?))) OR (((toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?)))))", expectedArgs: []any{float64(0), true, float64(100), true, float64(1000), true, float64(5000), true}, expectedErrorContains: "", }, @@ -1752,7 +1752,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Deep nesting", query: "(((status=200 OR status=201) AND service.name=\"api\") OR ((status=202 OR status=203) AND service.name=\"web\"))", shouldPass: true, - expectedQuery: "WHERE (((((((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) OR (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) OR (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", + expectedQuery: "WHERE (((((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", expectedArgs: []any{float64(200), true, float64(201), true, "api", true, float64(202), true, float64(203), true, "web", true}, expectedErrorContains: "", }, @@ -1760,7 +1760,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Deep nesting", query: "(count>0 AND ((duration<1000 AND service.name=\"api\") OR (duration<500 AND service.name=\"web\")))", shouldPass: true, - expectedQuery: "WHERE (((attributes_number['count'] > ? AND mapContains(attributes_number, 'count') = ?) AND (((((attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))))", + expectedQuery: "WHERE (((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))))", expectedArgs: []any{float64(0), true, float64(1000), true, "api", true, float64(500), true, "web", true}, expectedErrorContains: "", }, @@ -1795,7 +1795,7 @@ func TestFilterExprLogs(t *testing.T) { query: "message='This is a \\'quoted\\' message'", shouldPass: true, expectedQuery: "WHERE (attributes_string['message'] = ? AND mapContains(attributes_string, 'message') = ?)", - expectedArgs: []any{"This is a \\'quoted\\' message", true}, + expectedArgs: []any{"This is a 'quoted' message", true}, expectedErrorContains: "", }, @@ -1804,7 +1804,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Numeric values", query: "status=200", shouldPass: true, - expectedQuery: "WHERE (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)", expectedArgs: []any{float64(200), true}, expectedErrorContains: "", }, @@ -1812,7 +1812,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Numeric values", query: "count=0", shouldPass: true, - expectedQuery: "WHERE (attributes_number['count'] = ? AND mapContains(attributes_number, 'count') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['count']) = ? AND mapContains(attributes_number, 'count') = ?)", expectedArgs: []any{float64(0), true}, expectedErrorContains: "", }, @@ -1820,7 +1820,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Numeric values", query: "duration=1000.5", shouldPass: true, - expectedQuery: "WHERE (attributes_number['duration'] = ? AND mapContains(attributes_number, 'duration') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['duration']) = ? AND mapContains(attributes_number, 'duration') = ?)", expectedArgs: []any{float64(1000.5), true}, expectedErrorContains: "", }, @@ -1828,7 +1828,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Numeric values", query: "amount=-10.25", shouldPass: true, - expectedQuery: "WHERE (attributes_number['amount'] = ? AND mapContains(attributes_number, 'amount') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['amount']) = ? AND mapContains(attributes_number, 'amount') = ?)", expectedArgs: []any{float64(-10.25), true}, expectedErrorContains: "", }, @@ -1915,7 +1915,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Nested object paths", query: "metadata.dimensions.width>1000", shouldPass: true, - expectedQuery: "WHERE (attributes_number['metadata.dimensions.width'] > ? AND mapContains(attributes_number, 'metadata.dimensions.width') = ?)", + expectedQuery: "WHERE (toFloat64(attributes_number['metadata.dimensions.width']) > ? AND mapContains(attributes_number, 'metadata.dimensions.width') = ?)", expectedArgs: []any{float64(1000), true}, expectedErrorContains: "", }, @@ -1938,28 +1938,28 @@ func TestFilterExprLogs(t *testing.T) { category: "Operator precedence", query: "NOT status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true}, // Should be (NOT status=200) AND service.name="api" }, { category: "Operator precedence", query: "status=200 AND service.name=\"api\" OR service.name=\"web\"", shouldPass: true, - expectedQuery: "WHERE (((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true, "web", true}, // Should be (status=200 AND service.name="api") OR service.name="web" }, { category: "Operator precedence", query: "NOT status=200 OR NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", expectedArgs: []any{float64(200), true, "api", true}, // Should be (NOT status=200) OR (NOT service.name="api") }, { category: "Operator precedence", query: "status=200 OR service.name=\"api\" AND level=\"ERROR\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) OR ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?)))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?)))", expectedArgs: []any{float64(200), true, "api", true, "ERROR", true}, // Should be status=200 OR (service.name="api" AND level="ERROR") }, @@ -1984,7 +1984,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Whitespace patterns", query: "status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true}, // Multiple spaces }, @@ -2130,14 +2130,14 @@ func TestFilterExprLogs(t *testing.T) { category: "Common filters", query: "status IN (\"pending\", \"processing\", \"completed\") AND NOT is_deleted=true", shouldPass: true, - expectedQuery: "WHERE (((attributes_number['status'] = ? OR attributes_number['status'] = ? OR attributes_number['status'] = ?) AND mapContains(attributes_number, 'status') = ?) AND NOT ((attributes_bool['is_deleted'] = ? AND mapContains(attributes_bool, 'is_deleted') = ?)))", + expectedQuery: "WHERE (((toString(attributes_number['status']) = ? OR toString(attributes_number['status']) = ? OR toString(attributes_number['status']) = ?) AND mapContains(attributes_number, 'status') = ?) AND NOT ((attributes_bool['is_deleted'] = ? AND mapContains(attributes_bool, 'is_deleted') = ?)))", expectedArgs: []any{"pending", "processing", "completed", true, true, true}, }, { category: "Common filters", query: "(first_name LIKE \"John%\" OR last_name LIKE \"Smith%\") AND age>=18", shouldPass: true, - expectedQuery: "WHERE ((((attributes_string['first_name'] LIKE ? AND mapContains(attributes_string, 'first_name') = ?) OR (attributes_string['last_name'] LIKE ? AND mapContains(attributes_string, 'last_name') = ?))) AND (attributes_number['age'] >= ? AND mapContains(attributes_number, 'age') = ?))", + expectedQuery: "WHERE ((((attributes_string['first_name'] LIKE ? AND mapContains(attributes_string, 'first_name') = ?) OR (attributes_string['last_name'] LIKE ? AND mapContains(attributes_string, 'last_name') = ?))) AND (toFloat64(attributes_number['age']) >= ? AND mapContains(attributes_number, 'age') = ?))", expectedArgs: []any{"John%", true, "Smith%", true, float64(18), true}, }, { @@ -2154,7 +2154,7 @@ func TestFilterExprLogs(t *testing.T) { category: "More common filters", query: "service.name=\"api\" AND (status>=500 OR duration>1000) AND NOT message CONTAINS \"expected\"", shouldPass: true, - expectedQuery: "WHERE ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (((attributes_number['status'] >= ? AND mapContains(attributes_number, 'status') = ?) OR (attributes_number['duration'] > ? AND mapContains(attributes_number, 'duration') = ?))) AND NOT ((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)))", + expectedQuery: "WHERE ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?))) AND NOT ((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)))", expectedArgs: []any{"api", true, float64(500), true, float64(1000), true, "%expected%", true}, }, @@ -2205,14 +2205,14 @@ func TestFilterExprLogs(t *testing.T) { query: "path=\"C:\\\\Program Files\\\\Application\"", shouldPass: true, expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)", - expectedArgs: []any{"C:\\\\Program Files\\\\Application", true}, + expectedArgs: []any{"C:\\Program Files\\Application", true}, }, { category: "Escaped values", query: "path=\"^prefix\\\\.suffix$\\\\d+\\\\w+\"", shouldPass: true, expectedQuery: "WHERE (attributes_string['path'] = ? AND mapContains(attributes_string, 'path') = ?)", - expectedArgs: []any{"^prefix\\\\.suffix$\\\\d+\\\\w+", true}, + expectedArgs: []any{"^prefix\\.suffix$\\d+\\w+", true}, }, // Inconsistent/unusual whitespace @@ -2220,7 +2220,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Unusual whitespace", query: "status = 200 AND service.name = \"api\"", shouldPass: true, - expectedQuery: "WHERE ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", expectedArgs: []any{float64(200), true, "api", true}, }, { @@ -2281,7 +2281,7 @@ func TestFilterExprLogs(t *testing.T) { ) `, shouldPass: true, - expectedQuery: "WHERE ((((((((attributes_number['status'] >= ? AND mapContains(attributes_number, 'status') = ?) AND (attributes_number['status'] < ? AND mapContains(attributes_number, 'status') = ?))) OR (((attributes_number['status'] >= ? AND mapContains(attributes_number, 'status') = ?) AND (attributes_number['status'] < ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)))))) AND ((((resources_string['service.name'] = ? OR resources_string['service.name'] = ? OR resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?) OR (((resources_string['service.type'] = ? AND mapContains(resources_string, 'service.type') = ?) AND NOT ((resources_string['service.deprecated'] = ? AND mapContains(resources_string, 'service.deprecated') = ?)))))))) AND (((((attributes_number['duration'] < ? AND mapContains(attributes_number, 'duration') = ?) OR ((attributes_number['duration'] BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)))) AND ((resources_string['environment'] <> ? OR (((resources_string['environment'] = ? AND mapContains(resources_string, 'environment') = ?) AND (attributes_bool['is_automated_test'] = ? AND mapContains(attributes_bool, 'is_automated_test') = ?))))))) AND NOT ((((((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?) OR (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))) AND (attributes_string['severity'] = ? AND mapContains(attributes_string, 'severity') = ?)))))", + expectedQuery: "WHERE ((((((((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?))) OR (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)))))) AND ((((resources_string['service.name'] = ? OR resources_string['service.name'] = ? OR resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?) OR (((resources_string['service.type'] = ? AND mapContains(resources_string, 'service.type') = ?) AND NOT ((resources_string['service.deprecated'] = ? AND mapContains(resources_string, 'service.deprecated') = ?)))))))) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) OR ((toFloat64(attributes_number['duration']) BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)))) AND ((resources_string['environment'] <> ? OR (((resources_string['environment'] = ? AND mapContains(resources_string, 'environment') = ?) AND (attributes_bool['is_automated_test'] = ? AND mapContains(attributes_bool, 'is_automated_test') = ?))))))) AND NOT ((((((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?) OR (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))) AND (attributes_string['severity'] = ? AND mapContains(attributes_string, 'severity') = ?)))))", expectedArgs: []any{ float64(200), true, float64(300), true, float64(400), true, float64(500), true, float64(404), true, "api", "web", "auth", true, diff --git a/pkg/telemetrylogs/statement_builder.go b/pkg/telemetrylogs/statement_builder.go index 0ea14b154060..a4a55649c3d2 100644 --- a/pkg/telemetrylogs/statement_builder.go +++ b/pkg/telemetrylogs/statement_builder.go @@ -73,6 +73,8 @@ func (b *logQueryStatementBuilder) Build( return nil, err } + b.adjustKeys(ctx, keys, query) + // Create SQL builder q := sqlbuilder.NewSelectBuilder() @@ -124,6 +126,77 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) [] return keySelectors } +func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) { + // for group by / order by, if there is a key + // that exactly matches the name of intrinsic field but has + // a field context or data type that doesn't match the field context or data type of the + // intrinsic field, + // and there is no additional key present in the data with the incoming key match, + // then override the given context with + // intrinsic field context and data type + // Why does that happen? Because we have a lot of dashboards created by users and shared over web + // that has incorrect context or data type populated so we fix it + // note: this override happens only when there is no match; if there is a match, + // we can't make decision on behalf of users so we let it use unmodified + + // example: {"key": "severity_text","type": "tag","dataType": "string"} + // This is sent as "tag", when it's not, this was earlier managed with + // `isColumn`, which we don't have in v5 (because it's not a user concern whether it's mat col or not) + // Such requests as-is look for attributes, the following code exists to handle them + checkMatch := func(k *telemetrytypes.TelemetryFieldKey) { + var overallMatch bool + + findMatch := func(staticKeys map[string]telemetrytypes.TelemetryFieldKey) bool { + // for a given key `k`, iterate over the metadata keys `keys` + // and see if there is any exact match + match := false + for _, mapKey := range keys[k.Name] { + if mapKey.FieldContext == k.FieldContext && mapKey.FieldDataType == k.FieldDataType { + match = true + } + } + // we don't have exact match, then it's doesn't exist in attribute or resource attribute + // use the intrinsic/calculated field + if !match { + b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name) + k.FieldContext = staticKeys[k.Name].FieldContext + k.FieldDataType = staticKeys[k.Name].FieldDataType + } + return match + } + + if _, ok := IntrinsicFields[k.Name]; ok { + overallMatch = overallMatch || findMatch(IntrinsicFields) + } + + if !overallMatch { + // check if all the key for the given field have been materialized, if so + // set the key to materialized + materilized := true + for _, key := range keys[k.Name] { + materilized = materilized && key.Materialized + } + k.Materialized = materilized + } + } + + for idx := range query.GroupBy { + checkMatch(&query.GroupBy[idx].TelemetryFieldKey) + } + for idx := range query.Order { + checkMatch(&query.Order[idx].Key.TelemetryFieldKey) + } + + keys["id"] = []*telemetrytypes.TelemetryFieldKey{ + { + Name: "id", + Signal: telemetrytypes.SignalLogs, + FieldContext: telemetrytypes.FieldContextLog, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + } +} + // buildListQuery builds a query for list panel type func (b *logQueryStatementBuilder) buildListQuery( ctx context.Context, @@ -229,7 +302,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery( } colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) allGroupByArgs = append(allGroupByArgs, args...) - sb.SelectMore(sqlbuilder.Escape(colExpr)) + sb.SelectMore(colExpr) fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)) } @@ -349,7 +422,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery( } colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) allGroupByArgs = append(allGroupByArgs, args...) - sb.SelectMore(sqlbuilder.Escape(colExpr)) + sb.SelectMore(colExpr) } // for scalar queries, the rate would be end-start diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go index 26e38dac7469..c12170f99c73 100644 --- a/pkg/telemetrylogs/stmt_builder_test.go +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -36,7 +36,7 @@ func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation ) } -func TestStatementBuilder(t *testing.T) { +func TestStatementBuilderTimeSeries(t *testing.T) { cases := []struct { name string requestType qbtypes.RequestType @@ -45,7 +45,7 @@ func TestStatementBuilder(t *testing.T) { expectedErr error }{ { - name: "test", + name: "Time series with limit", requestType: qbtypes.RequestTypeTimeSeries, query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ Signal: telemetrytypes.SignalLogs, @@ -68,13 +68,13 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, { - name: "test", + name: "Time series with limit + custom order by", requestType: qbtypes.RequestTypeTimeSeries, query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ Signal: telemetrytypes.SignalLogs, @@ -107,7 +107,7 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, @@ -152,3 +152,96 @@ func TestStatementBuilder(t *testing.T) { }) } } + +func TestStatementBuilderListQuery(t *testing.T) { + cases := []struct { + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] + expected qbtypes.Statement + expectedErr error + }{ + { + name: "default list", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ + Signal: telemetrytypes.SignalLogs, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'cartservice'", + }, + Limit: 10, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?", + Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, + }, + expectedErr: nil, + }, + { + name: "list query with mat col order by", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ + Signal: telemetrytypes.SignalLogs, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'cartservice'", + }, + Limit: 10, + Order: []qbtypes.OrderBy{ + { + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "materialized.key.name", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `materialized.key.name` desc LIMIT ?", + Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, + }, + expectedErr: nil, + }, + } + + fm := NewFieldMapper() + cb := NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() + + aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil) + + resourceFilterStmtBuilder := resourceFilterStmtBuilder() + + statementBuilder := NewLogQueryStatementBuilder( + instrumentationtest.New().ToProviderSettings(), + mockMetadataStore, + fm, + cb, + resourceFilterStmtBuilder, + aggExprRewriter, + DefaultFullTextColumn, + BodyJSONStringSearchPrefix, + GetBodyJSONKey, + ) + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil) + + if c.expectedErr != nil { + require.Error(t, err) + require.Contains(t, err.Error(), c.expectedErr.Error()) + } else { + require.NoError(t, err) + require.Equal(t, c.expected.Query, q.Query) + require.Equal(t, c.expected.Args, q.Args) + require.Equal(t, c.expected.Warnings, q.Warnings) + } + }) + } +} diff --git a/pkg/telemetrymetadata/field_mapper.go b/pkg/telemetrymetadata/field_mapper.go index b1eb5ad49830..a7564cbe7c58 100644 --- a/pkg/telemetrymetadata/field_mapper.go +++ b/pkg/telemetrymetadata/field_mapper.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" "golang.org/x/exp/maps" ) @@ -95,7 +96,7 @@ func (m *fieldMapper) ColumnExpressionFor( return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction) } else { // not even a close match, return an error - return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name) + return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name) } } } else if len(keysForField) == 1 { @@ -112,5 +113,5 @@ func (m *fieldMapper) ColumnExpressionFor( } } - return fmt.Sprintf("%s AS `%s`", colName, field.Name), nil + return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(colName), field.Name), nil } diff --git a/pkg/telemetrymetadata/metadata.go b/pkg/telemetrymetadata/metadata.go index e9ec26920354..4fd86e29f146 100644 --- a/pkg/telemetrymetadata/metadata.go +++ b/pkg/telemetrymetadata/metadata.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "log/slog" + "slices" "strings" "github.com/SigNoz/signoz/pkg/errors" @@ -133,6 +134,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector var limit int searchTexts := []string{} + dataTypes := []telemetrytypes.FieldDataType{} conds := []string{} for _, fieldKeySelector := range fieldKeySelectors { @@ -153,9 +155,16 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector } searchTexts = append(searchTexts, fieldKeySelector.Name) - + if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified { + dataTypes = append(dataTypes, fieldKeySelector.FieldDataType) + } // now look at the field context - if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified { + // we don't write most of intrinsic fields to tag attributes table + // for this reason we don't want to apply tag_type if the field context + // if not attribute or resource attribute + if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified && + (fieldKeySelector.FieldContext == telemetrytypes.FieldContextAttribute || + fieldKeySelector.FieldContext == telemetrytypes.FieldContextResource) { fieldKeyConds = append(fieldKeyConds, sb.E("tag_type", fieldKeySelector.FieldContext.TagType())) } @@ -216,7 +225,7 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error()) } - staticKeys := []string{"isRoot", "isEntrypoint"} + staticKeys := []string{"isRoot", "isEntryPoint"} staticKeys = append(staticKeys, maps.Keys(telemetrytraces.IntrinsicFields)...) staticKeys = append(staticKeys, maps.Keys(telemetrytraces.CalculatedFields)...) @@ -229,6 +238,24 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector break } } + + // skip the keys that don't match data type + if field, exists := telemetrytraces.IntrinsicFields[key]; exists { + if len(dataTypes) > 0 && + slices.Index(dataTypes, field.FieldDataType) == -1 && + field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified { + continue + } + } + + if field, exists := telemetrytraces.CalculatedFields[key]; exists { + if len(dataTypes) > 0 && + slices.Index(dataTypes, field.FieldDataType) == -1 && + field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified { + continue + } + } + if found { if field, exists := telemetrytraces.IntrinsicFields[key]; exists { if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added { @@ -303,6 +330,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors conds := []string{} searchTexts := []string{} + dataTypes := []telemetrytypes.FieldDataType{} for _, fieldKeySelector := range fieldKeySelectors { @@ -321,9 +349,17 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors fieldKeyConds = append(fieldKeyConds, sb.Like("tag_key", "%"+fieldKeySelector.Name+"%")) } searchTexts = append(searchTexts, fieldKeySelector.Name) + if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified { + dataTypes = append(dataTypes, fieldKeySelector.FieldDataType) + } // now look at the field context - if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified { + // we don't write most of intrinsic fields to tag attributes table + // for this reason we don't want to apply tag_type if the field context + // if not attribute or resource attribute + if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified && + (fieldKeySelector.FieldContext == telemetrytypes.FieldContextAttribute || + fieldKeySelector.FieldContext == telemetrytypes.FieldContextResource) { fieldKeyConds = append(fieldKeyConds, sb.E("tag_type", fieldKeySelector.FieldContext.TagType())) } @@ -395,6 +431,16 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors break } } + + // skip the keys that don't match data type + if field, exists := telemetrylogs.IntrinsicFields[key]; exists { + if len(dataTypes) > 0 && + slices.Index(dataTypes, field.FieldDataType) == -1 && + field.FieldDataType != telemetrytypes.FieldDataTypeUnspecified { + continue + } + } + if found { if field, exists := telemetrylogs.IntrinsicFields[key]; exists { if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added { diff --git a/pkg/telemetrymetadata/metadata_test.go b/pkg/telemetrymetadata/metadata_test.go index e02eb61fa4df..078f74891529 100644 --- a/pkg/telemetrymetadata/metadata_test.go +++ b/pkg/telemetrymetadata/metadata_test.go @@ -60,7 +60,7 @@ func TestGetKeys(t *testing.T) { query := `SELECT.*` mock.ExpectQuery(query). - WithArgs("%http.method%", telemetrytypes.FieldContextSpan.TagType(), telemetrytypes.FieldDataTypeString.TagDataType(), 10). + WithArgs("%http.method%", telemetrytypes.FieldDataTypeString.TagDataType(), 10). WillReturnRows(cmock.NewRows([]cmock.ColumnType{ {Name: "tag_key", Type: "String"}, {Name: "tag_type", Type: "String"}, diff --git a/pkg/telemetrymetrics/condition_builder.go b/pkg/telemetrymetrics/condition_builder.go index 28074c7c8c4a..bacb90705ff5 100644 --- a/pkg/telemetrymetrics/condition_builder.go +++ b/pkg/telemetrymetrics/condition_builder.go @@ -93,23 +93,13 @@ func (c *conditionBuilder) conditionFor( if !ok { return "", qbtypes.ErrInValues } - // instead of using IN, we use `=` + `OR` to make use of index - conditions := []string{} - for _, value := range values { - conditions = append(conditions, sb.E(tblFieldName, value)) - } - return sb.Or(conditions...), nil + return sb.In(tblFieldName, values), nil case qbtypes.FilterOperatorNotIn: values, ok := value.([]any) if !ok { return "", qbtypes.ErrInValues } - // instead of using NOT IN, we use `!=` + `AND` to make use of index - conditions := []string{} - for _, value := range values { - conditions = append(conditions, sb.NE(tblFieldName, value)) - } - return sb.And(conditions...), nil + return sb.NotIn(tblFieldName, values), nil // exists and not exists // in the UI based query builder, `exists` and `not exists` are used for diff --git a/pkg/telemetrymetrics/condition_builder_test.go b/pkg/telemetrymetrics/condition_builder_test.go index 9d6ae2e53eee..b15ebb28914e 100644 --- a/pkg/telemetrymetrics/condition_builder_test.go +++ b/pkg/telemetrymetrics/condition_builder_test.go @@ -118,8 +118,8 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorIn, value: []any{"http.server.duration", "http.server.request.duration", "http.server.response.duration"}, - expectedSQL: "(metric_name = ? OR metric_name = ? OR metric_name = ?)", - expectedArgs: []any{"http.server.duration", "http.server.request.duration", "http.server.response.duration"}, + expectedSQL: "metric_name IN (?)", + expectedArgs: []any{[]any{"http.server.duration", "http.server.request.duration", "http.server.response.duration"}}, expectedError: nil, }, { @@ -141,8 +141,8 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorNotIn, value: []any{"debug", "info", "trace"}, - expectedSQL: "(metric_name <> ? AND metric_name <> ? AND metric_name <> ?)", - expectedArgs: []any{"debug", "info", "trace"}, + expectedSQL: "metric_name NOT IN (?)", + expectedArgs: []any{[]any{"debug", "info", "trace"}}, expectedError: nil, }, { diff --git a/pkg/telemetrymetrics/field_mapper.go b/pkg/telemetrymetrics/field_mapper.go index b80706da4850..b13dbcd327dd 100644 --- a/pkg/telemetrymetrics/field_mapper.go +++ b/pkg/telemetrymetrics/field_mapper.go @@ -8,6 +8,7 @@ import ( schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" ) var ( @@ -100,5 +101,5 @@ func (m *fieldMapper) ColumnExpressionFor( return "", err } - return fmt.Sprintf("%s AS `%s`", colName, field.Name), nil + return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(colName), field.Name), nil } diff --git a/pkg/telemetrymetrics/statement_builder.go b/pkg/telemetrymetrics/statement_builder.go index b1228160e49c..e8ffeabb3a5c 100644 --- a/pkg/telemetrymetrics/statement_builder.go +++ b/pkg/telemetrymetrics/statement_builder.go @@ -11,6 +11,7 @@ import ( qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" + "golang.org/x/exp/slices" ) const ( @@ -84,6 +85,8 @@ func (b *metricQueryStatementBuilder) Build( return nil, err } + start, end = querybuilder.AdjustedMetricTimeRange(start, end, uint64(query.StepInterval.Seconds()), query) + return b.buildPipelineStatement(ctx, start, end, query, keys, variables) } @@ -149,7 +152,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement( origSpaceAgg := query.Aggregations[0].SpaceAggregation origTimeAgg := query.Aggregations[0].TimeAggregation - origGroupBy := query.GroupBy + origGroupBy := slices.Clone(query.GroupBy) if query.Aggregations[0].SpaceAggregation.IsPercentile() && query.Aggregations[0].Type != metrictypes.ExpHistogramType { @@ -162,8 +165,20 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement( } } - // we need to add le in the group by if it doesn't exist - if !leExists { + if leExists { + // if the user themselves adds `le`, then we remove it from the original group by + // this is to avoid preparing a query that returns `nan`s, see following query + // SELECT + // ts, + // le, + // histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.99) AS value + // FROM __spatial_aggregation_cte + // GROUP BY + // le, + // ts + + origGroupBy = slices.DeleteFunc(origGroupBy, func(k qbtypes.GroupByKey) bool { return k.Name == "le" }) + } else { query.GroupBy = append(query.GroupBy, qbtypes.GroupByKey{ TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "le"}, }) diff --git a/pkg/telemetrymetrics/stmt_builder_test.go b/pkg/telemetrymetrics/stmt_builder_test.go index 669eea7002c7..2a87facdedba 100644 --- a/pkg/telemetrymetrics/stmt_builder_test.go +++ b/pkg/telemetrymetrics/stmt_builder_test.go @@ -49,8 +49,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte", - Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0}, + Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte", + Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0}, }, expectedErr: nil, }, @@ -83,7 +83,7 @@ func TestStatementBuilder(t *testing.T) { }, expected: qbtypes.Statement{ Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte", - Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000)}, + Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)}, }, expectedErr: nil, }, @@ -115,7 +115,7 @@ func TestStatementBuilder(t *testing.T) { }, expected: qbtypes.Statement{ Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", - Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983448000), "delta", false, "cartservice", "signoz_latency", uint64(1747947419000), uint64(1747983448000)}, + Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)}, }, expectedErr: nil, }, @@ -148,7 +148,7 @@ func TestStatementBuilder(t *testing.T) { }, expected: qbtypes.Statement{ Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte", - Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983448000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947419000), uint64(1747983448000), 0}, + Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0}, }, expectedErr: nil, }, @@ -176,8 +176,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", - Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0}, + Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts", + Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0}, }, expectedErr: nil, }, diff --git a/pkg/telemetrytraces/condition_builder.go b/pkg/telemetrytraces/condition_builder.go index c45329397ea5..1256a1d7ee30 100644 --- a/pkg/telemetrytraces/condition_builder.go +++ b/pkg/telemetrytraces/condition_builder.go @@ -4,7 +4,9 @@ import ( "context" "fmt" "slices" + "strconv" "strings" + "time" schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" "github.com/SigNoz/signoz/pkg/errors" @@ -43,7 +45,24 @@ func (c *conditionBuilder) conditionFor( return "", err } - tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName) + // TODO(srikanthccv): maybe extend this to every possible attribute + if key.Name == "duration_nano" || key.Name == "durationNano" { // QoL improvement + if strDuration, ok := value.(string); ok { + duration, err := time.ParseDuration(strDuration) + if err == nil { + value = duration.Nanoseconds() + } else { + duration, err := strconv.ParseFloat(strDuration, 64) + if err == nil { + value = duration + } else { + return "", errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid duration value: %s", strDuration) + } + } + } + } else { + tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName) + } // regular operators switch operator { @@ -129,13 +148,6 @@ func (c *conditionBuilder) conditionFor( // in the query builder, `exists` and `not exists` are used for // key membership checks, so depending on the column type, the condition changes case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: - // if the field is intrinsic, it always exists - if slices.Contains(maps.Keys(IntrinsicFields), tblFieldName) || - slices.Contains(maps.Keys(CalculatedFields), tblFieldName) || - slices.Contains(maps.Keys(IntrinsicFieldsDeprecated), tblFieldName) || - slices.Contains(maps.Keys(CalculatedFieldsDeprecated), tblFieldName) { - return "true", nil - } var value any switch column.Type { @@ -251,7 +263,7 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry case SpanSearchScopeRoot: return "parent_span_id = ''", nil case SpanSearchScopeEntryPoint: - return fmt.Sprintf("((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''", + return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''", DBName, TopLevelOperationsTableName), nil default: return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid span search scope: %s", key.Name) diff --git a/pkg/telemetrytraces/condition_builder_test.go b/pkg/telemetrytraces/condition_builder_test.go index a990648639b7..b6b45b1a73ba 100644 --- a/pkg/telemetrytraces/condition_builder_test.go +++ b/pkg/telemetrytraces/condition_builder_test.go @@ -44,7 +44,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorGreaterThan, value: float64(100), - expectedSQL: "(attributes_number['request.duration'] > ? AND mapContains(attributes_number, 'request.duration') = ?)", + expectedSQL: "(toFloat64(attributes_number['request.duration']) > ? AND mapContains(attributes_number, 'request.duration') = ?)", expectedArgs: []any{float64(100), true}, expectedError: nil, }, @@ -57,7 +57,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorLessThan, value: float64(1024), - expectedSQL: "(attributes_number['request.size'] < ? AND mapContains(attributes_number, 'request.size') = ?)", + expectedSQL: "(toFloat64(attributes_number['request.size']) < ? AND mapContains(attributes_number, 'request.size') = ?)", expectedArgs: []any{float64(1024), true}, expectedError: nil, }, diff --git a/pkg/telemetrytraces/const.go b/pkg/telemetrytraces/const.go index bddd5f7063ab..473b255e1d70 100644 --- a/pkg/telemetrytraces/const.go +++ b/pkg/telemetrytraces/const.go @@ -108,7 +108,7 @@ var ( Name: "spanKind", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, - FieldDataType: telemetrytypes.FieldDataTypeNumber, + FieldDataType: telemetrytypes.FieldDataTypeString, }, "durationNano": { Name: "durationNano", @@ -142,7 +142,7 @@ var ( Description: "Derived response status code from the HTTP/RPC status code attributes. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#response_status_code)", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, - FieldDataType: telemetrytypes.FieldDataTypeNumber, + FieldDataType: telemetrytypes.FieldDataTypeString, }, "external_http_url": { Name: "external_http_url", @@ -205,7 +205,7 @@ var ( Description: "Whether the span is remote. Learn more [here](https://signoz.io/docs/traces-management/guides/derived-fields-spans/#is_remote)", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, - FieldDataType: telemetrytypes.FieldDataTypeBool, + FieldDataType: telemetrytypes.FieldDataTypeString, }, } @@ -214,7 +214,7 @@ var ( Name: "responseStatusCode", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, - FieldDataType: telemetrytypes.FieldDataTypeNumber, + FieldDataType: telemetrytypes.FieldDataTypeString, }, "externalHttpUrl": { Name: "externalHttpUrl", @@ -268,51 +268,105 @@ var ( Name: "isRemote", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, - FieldDataType: telemetrytypes.FieldDataTypeBool, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "serviceName": { + Name: "serviceName", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "httpRoute": { + Name: "httpRoute", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "msgSystem": { + Name: "msgSystem", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "msgOperation": { + Name: "msgOperation", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "dbSystem": { + Name: "dbSystem", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "rpcSystem": { + Name: "rpcSystem", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "rpcService": { + Name: "rpcService", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "rpcMethod": { + Name: "rpcMethod", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + "peerService": { + Name: "peerService", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, }, } SpanSearchScopeRoot = "isroot" SpanSearchScopeEntryPoint = "isentrypoint" - DefaultFields = []telemetrytypes.TelemetryFieldKey{ - { + DefaultFields = map[string]telemetrytypes.TelemetryFieldKey{ + "timestamp": { Name: "timestamp", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, FieldDataType: telemetrytypes.FieldDataTypeNumber, }, - { + "span_id": { Name: "span_id", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, FieldDataType: telemetrytypes.FieldDataTypeString, }, - { + "trace_id": { Name: "trace_id", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, FieldDataType: telemetrytypes.FieldDataTypeString, }, - { + "name": { Name: "name", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, FieldDataType: telemetrytypes.FieldDataTypeString, }, - { + "service.name": { Name: "service.name", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString, Materialized: true, }, - { + "duration_nano": { Name: "duration_nano", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, FieldDataType: telemetrytypes.FieldDataTypeNumber, }, - { + "response_status_code": { Name: "response_status_code", Signal: telemetrytypes.SignalTraces, FieldContext: telemetrytypes.FieldContextSpan, diff --git a/pkg/telemetrytraces/field_mapper.go b/pkg/telemetrytraces/field_mapper.go index 90873c55e6ba..0d18bf2514b0 100644 --- a/pkg/telemetrytraces/field_mapper.go +++ b/pkg/telemetrytraces/field_mapper.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/errors" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" "golang.org/x/exp/maps" ) @@ -119,6 +120,41 @@ var ( "attribute_string_rpc$$method_exists": {Name: "attribute_string_rpc$$method_exists", Type: schema.ColumnTypeBool}, "attribute_string_peer$$service_exists": {Name: "attribute_string_peer$$service_exists", Type: schema.ColumnTypeBool}, } + + // TODO(srikanthccv): remove this mapping + oldToNew = map[string]string{ + // deprecated intrinsic -> new intrinsic + "traceID": "trace_id", + "spanID": "span_id", + "parentSpanID": "parent_span_id", + "spanKind": "kind_string", + "durationNano": "duration_nano", + "statusCode": "status_code", + "statusMessage": "status_message", + "statusCodeString": "status_code_string", + + // deprecated derived -> new derived / materialized + "references": "links", + "responseStatusCode": "response_status_code", + "externalHttpUrl": "external_http_url", + "httpUrl": "http_url", + "externalHttpMethod": "external_http_method", + "httpMethod": "http_method", + "httpHost": "http_host", + "dbName": "db_name", + "dbOperation": "db_operation", + "hasError": "has_error", + "isRemote": "is_remote", + "serviceName": "resource_string_service$$name", + "httpRoute": "attribute_string_http$$route", + "msgSystem": "attribute_string_messaging$$system", + "msgOperation": "attribute_string_messaging$$operation", + "dbSystem": "attribute_string_db$$system", + "rpcSystem": "attribute_string_rpc$$system", + "rpcService": "attribute_string_rpc$$service", + "rpcMethod": "attribute_string_rpc$$method", + "peerService": "attribute_string_peer$$service", + } ) type defaultFieldMapper struct{} @@ -155,6 +191,16 @@ func (m *defaultFieldMapper) getColumn( // The actual SQL will be generated in the condition builder return &schema.Column{Name: key.Name, Type: schema.ColumnTypeBool}, nil } + + // TODO(srikanthccv): remove this when it's safe to remove + // issue with CH aliasing + if _, ok := CalculatedFieldsDeprecated[key.Name]; ok { + return indexV3Columns[oldToNew[key.Name]], nil + } + if _, ok := IntrinsicFieldsDeprecated[key.Name]; ok { + return indexV3Columns[oldToNew[key.Name]], nil + } + if col, ok := indexV3Columns[key.Name]; ok { return col, nil } @@ -262,7 +308,7 @@ func (m *defaultFieldMapper) ColumnExpressionFor( return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "%s", correction) } else { // not even a close match, return an error - return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field %s not found", field.Name) + return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name) } } } else if len(keysForField) == 1 { @@ -279,5 +325,5 @@ func (m *defaultFieldMapper) ColumnExpressionFor( } } - return fmt.Sprintf("%s AS `%s`", colName, field.Name), nil + return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(colName), field.Name), nil } diff --git a/pkg/telemetrytraces/span_scope_simple_test.go b/pkg/telemetrytraces/span_scope_simple_test.go index 4120b6e38ddd..d9558d52d63c 100644 --- a/pkg/telemetrytraces/span_scope_simple_test.go +++ b/pkg/telemetrytraces/span_scope_simple_test.go @@ -29,7 +29,7 @@ func TestSpanScopeFilterExpression(t *testing.T) { { name: "simple isentrypoint filter", expression: "isentrypoint = true", - expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''", + expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''", }, { name: "combined filter with AND", @@ -39,7 +39,7 @@ func TestSpanScopeFilterExpression(t *testing.T) { { name: "combined filter with OR", expression: "isentrypoint = true OR has_error = true", - expectedCondition: "((name, resource_string_service$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''", + expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''", }, } diff --git a/pkg/telemetrytraces/statement_builder.go b/pkg/telemetrytraces/statement_builder.go index 20df9162f170..d231bdfd1800 100644 --- a/pkg/telemetrytraces/statement_builder.go +++ b/pkg/telemetrytraces/statement_builder.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "log/slog" + "slices" "strings" "github.com/SigNoz/signoz/pkg/errors" @@ -13,6 +14,7 @@ import ( qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" + "golang.org/x/exp/maps" ) var ( @@ -72,6 +74,8 @@ func (b *traceQueryStatementBuilder) Build( return nil, err } + b.adjustKeys(ctx, keys, query) + // Check if filter contains trace_id(s) and optimize time range if needed if query.Filter != nil && query.Filter.Expression != "" && b.telemetryStore != nil { traceIDs, found := ExtractTraceIDsFromFilter(query.Filter.Expression) @@ -126,19 +130,17 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) for idx := range query.SelectFields { keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{ - Name: query.SelectFields[idx].Name, - Signal: telemetrytypes.SignalTraces, - FieldContext: query.SelectFields[idx].FieldContext, - FieldDataType: query.SelectFields[idx].FieldDataType, + Name: query.SelectFields[idx].Name, + Signal: telemetrytypes.SignalTraces, + FieldContext: query.SelectFields[idx].FieldContext, }) } for idx := range query.Order { keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{ - Name: query.Order[idx].Key.Name, - Signal: telemetrytypes.SignalTraces, - FieldContext: query.Order[idx].Key.FieldContext, - FieldDataType: query.Order[idx].Key.FieldDataType, + Name: query.Order[idx].Key.Name, + Signal: telemetrytypes.SignalTraces, + FieldContext: query.Order[idx].Key.FieldContext, }) } @@ -149,6 +151,100 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) return keySelectors } +func (b *traceQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) { + // for group by / order by / selected fields, if there is a key + // that exactly matches the name of intrinsic / calculated field but has + // a field context or data type that doesn't match the field context or data type of the + // intrinsic field, + // and there is no additional key present in the data with the incoming key match, + // then override the given context with + // intrinsic / calculated field context and data type + // Why does that happen? Because we have a lot of assets created by users and shared over web + // that has incorrect context or data type populated so we fix it + // note: this override happens only when there is no match; if there is a match, + // we can't make decision on behalf of users so we let it use unmodified + + // example: {"key": "httpRoute","type": "tag","dataType": "string"} + // This is sent as "tag", when it's not, this was earlier managed with + // `isColumn`, which we don't have in v5 (because it's not a user concern whether it's mat col or not) + // Such requests as-is look for attributes, the following code exists to handle them + checkMatch := func(k *telemetrytypes.TelemetryFieldKey) { + var overallMatch bool + + findMatch := func(staticKeys map[string]telemetrytypes.TelemetryFieldKey) bool { + // for a given key `k`, iterate over the metadata keys `keys` + // and see if there is any exact match + match := false + for _, mapKey := range keys[k.Name] { + if mapKey.FieldContext == k.FieldContext && mapKey.FieldDataType == k.FieldDataType { + match = true + } + } + // we don't have exact match, then it's doesn't exist in attribute or resource attribute + // use the intrinsic/calculated field + if !match { + b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name) + k.FieldContext = staticKeys[k.Name].FieldContext + k.FieldDataType = staticKeys[k.Name].FieldDataType + } + return match + } + + if _, ok := IntrinsicFields[k.Name]; ok { + overallMatch = overallMatch || findMatch(IntrinsicFields) + } + if _, ok := CalculatedFields[k.Name]; ok { + overallMatch = overallMatch || findMatch(CalculatedFields) + } + if _, ok := IntrinsicFieldsDeprecated[k.Name]; ok { + overallMatch = overallMatch || findMatch(IntrinsicFieldsDeprecated) + } + if _, ok := CalculatedFieldsDeprecated[k.Name]; ok { + overallMatch = overallMatch || findMatch(CalculatedFieldsDeprecated) + } + + if !overallMatch { + // check if all the key for the given field have been materialized, if so + // set the key to materialized + materilized := true + for _, key := range keys[k.Name] { + materilized = materilized && key.Materialized + } + k.Materialized = materilized + } + } + + for idx := range query.GroupBy { + checkMatch(&query.GroupBy[idx].TelemetryFieldKey) + } + for idx := range query.Order { + checkMatch(&query.Order[idx].Key.TelemetryFieldKey) + } + for idx := range query.SelectFields { + checkMatch(&query.SelectFields[idx]) + } + + // add deprecated fields only during statement building + // why? + // 1. to not fail filter expression that use deprecated cols + // 2. this could have been moved to metadata fetching itself, however, that + // would mean, they also show up in suggestions we we don't want to do + for fieldKeyName, fieldKey := range IntrinsicFieldsDeprecated { + if _, ok := keys[fieldKeyName]; !ok { + keys[fieldKeyName] = []*telemetrytypes.TelemetryFieldKey{&fieldKey} + } else { + keys[fieldKeyName] = append(keys[fieldKeyName], &fieldKey) + } + } + for fieldKeyName, fieldKey := range CalculatedFieldsDeprecated { + if _, ok := keys[fieldKeyName]; !ok { + keys[fieldKeyName] = []*telemetrytypes.TelemetryFieldKey{&fieldKey} + } else { + keys[fieldKeyName] = append(keys[fieldKeyName], &fieldKey) + } + } +} + // buildListQuery builds a query for list panel type func (b *traceQueryStatementBuilder) buildListQuery( ctx context.Context, @@ -174,7 +270,22 @@ func (b *traceQueryStatementBuilder) buildListQuery( selectedFields := query.SelectFields if len(selectedFields) == 0 { - selectedFields = DefaultFields + sortedKeys := maps.Keys(DefaultFields) + slices.Sort(sortedKeys) + for _, key := range sortedKeys { + selectedFields = append(selectedFields, DefaultFields[key]) + } + } + + selectFieldKeys := []string{} + for _, field := range selectedFields { + selectFieldKeys = append(selectFieldKeys, field.Name) + } + + for _, x := range []string{"timestamp", "span_id", "trace_id"} { + if !slices.Contains(selectFieldKeys, x) { + selectedFields = append(selectedFields, DefaultFields[x]) + } } // TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs? @@ -183,7 +294,7 @@ func (b *traceQueryStatementBuilder) buildListQuery( if err != nil { return nil, err } - sb.SelectMore(sqlbuilder.Escape(colExpr)) + sb.SelectMore(colExpr) } // From table @@ -264,7 +375,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery( } colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) allGroupByArgs = append(allGroupByArgs, args...) - sb.SelectMore(sqlbuilder.Escape(colExpr)) + sb.SelectMore(colExpr) fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)) } @@ -381,7 +492,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery( } colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) allGroupByArgs = append(allGroupByArgs, args...) - sb.SelectMore(sqlbuilder.Escape(colExpr)) + sb.SelectMore(colExpr) } // for scalar queries, the rate would be end-start diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go index c89c14c93d45..ef1a5cdb9e00 100644 --- a/pkg/telemetrytraces/stmt_builder_test.go +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -59,11 +59,368 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, + { + name: "legacy httpRoute in group by", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "count()", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "httpRoute", + FieldDataType: telemetrytypes.FieldDataTypeString, + FieldContext: telemetrytypes.FieldContextAttribute, + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(attribute_string_http$$route <> ?, attribute_string_http$$route, NULL)) AS `httpRoute`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `httpRoute` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(attribute_string_http$$route <> ?, attribute_string_http$$route, NULL)) AS `httpRoute`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`httpRoute`) GLOBAL IN (SELECT `httpRoute` FROM __limit_cte) GROUP BY ts, `httpRoute`", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + { + name: "legacy fields in search and group by", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "count()", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "serviceName = $service.name AND httpMethod EXISTS AND spanKind = 'Server'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "httpRoute", + FieldDataType: telemetrytypes.FieldDataTypeString, + FieldContext: telemetrytypes.FieldContextAttribute, + }, + }, + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "httpMethod", + FieldDataType: telemetrytypes.FieldDataTypeString, + FieldContext: telemetrytypes.FieldContextAttribute, + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (true AND true AND true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(attribute_string_http$$route <> ?, attribute_string_http$$route, NULL)) AS `httpRoute`, toString(multiIf(http_method <> ?, http_method, NULL)) AS `httpMethod`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resource_string_service$$name = ? AND resource_string_service$$name <> ?) AND http_method <> ? AND kind_string = ?) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `httpRoute`, `httpMethod` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(attribute_string_http$$route <> ?, attribute_string_http$$route, NULL)) AS `httpRoute`, toString(multiIf(http_method <> ?, http_method, NULL)) AS `httpMethod`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resource_string_service$$name = ? AND resource_string_service$$name <> ?) AND http_method <> ? AND kind_string = ?) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`httpRoute`, `httpMethod`) GLOBAL IN (SELECT `httpRoute`, `httpMethod` FROM __limit_cte) GROUP BY ts, `httpRoute`, `httpMethod`", + Args: []any{uint64(1747945619), uint64(1747983448), "", "", "redis-manual", "", "", "Server", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "", "", "redis-manual", "", "", "Server", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + { + name: "context as key prefix test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "sum(metric.max_count)", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + { + name: "mat number key in aggregation test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "sum(cart.items_count)", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + { + name: "Legacy column with incorrect field context test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "count()", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "responseStatusCode", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(response_status_code <> ?, response_status_code, NULL)) AS `responseStatusCode`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `responseStatusCode` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(response_status_code <> ?, response_status_code, NULL)) AS `responseStatusCode`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`responseStatusCode`) GLOBAL IN (SELECT `responseStatusCode` FROM __limit_cte) GROUP BY ts, `responseStatusCode`", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + { + name: "Legacy column in aggregation and incorrect field context test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "p90(durationNano)", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "responseStatusCode", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(response_status_code <> ?, response_status_code, NULL)) AS `responseStatusCode`, quantile(0.90)(multiIf(duration_nano <> ?, duration_nano, NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `responseStatusCode` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(response_status_code <> ?, response_status_code, NULL)) AS `responseStatusCode`, quantile(0.90)(multiIf(duration_nano <> ?, duration_nano, NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`responseStatusCode`) GLOBAL IN (SELECT `responseStatusCode` FROM __limit_cte) GROUP BY ts, `responseStatusCode`", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "", 0, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "", 0, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + } + + fm := NewFieldMapper() + cb := NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() + aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil) + + resourceFilterStmtBuilder := resourceFilterStmtBuilder() + + statementBuilder := NewTraceQueryStatementBuilder( + instrumentationtest.New().ToProviderSettings(), + mockMetadataStore, + fm, + cb, + resourceFilterStmtBuilder, + aggExprRewriter, + nil, + ) + + vars := map[string]qbtypes.VariableItem{ + "service.name": { + Value: "redis-manual", + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, vars) + + if c.expectedErr != nil { + require.Error(t, err) + require.Contains(t, err.Error(), c.expectedErr.Error()) + } else { + require.NoError(t, err) + require.Equal(t, c.expected.Query, q.Query) + require.Equal(t, c.expected.Args, q.Args) + require.Equal(t, c.expected.Warnings, q.Warnings) + } + }) + } +} + +func TestStatementBuilderListQuery(t *testing.T) { + cases := []struct { + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation] + expected qbtypes.Statement + expectedErr error + }{ + { + name: "List query with mat selected fields", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + SelectFields: []telemetrytypes.TelemetryFieldKey{ + { + Name: "name", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + { + Name: "service.name", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextResource, + FieldDataType: telemetrytypes.FieldDataTypeString, + Materialized: true, + }, + { + Name: "duration_nano", + Signal: telemetrytypes.SignalTraces, + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + { + Name: "cart.items_count", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeFloat64, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, resources_string['service.name'] AS `service.name`, duration_nano AS `duration_nano`, `attribute_number_cart$$items_count` AS `cart.items_count`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, + }, + expectedErr: nil, + }, + { + name: "List query with default fields and attribute order by", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Order: []qbtypes.OrderBy{ + { + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "user.id", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + }, + Limit: 10, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT duration_nano AS `duration_nano`, name AS `name`, response_status_code AS `response_status_code`, `resource_string_service$$name` AS `service.name`, span_id AS `span_id`, timestamp AS `timestamp`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY attributes_string['user.id'] AS `user.id` desc LIMIT ?", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, + }, + expectedErr: nil, + }, + { + name: "List query with legacy fields", + requestType: qbtypes.RequestTypeRaw, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + SelectFields: []telemetrytypes.TelemetryFieldKey{ + { + Name: "name", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + { + Name: "serviceName", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + { + Name: "durationNano", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeNumber, + }, + { + Name: "httpMethod", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + { + Name: "responseStatusCode", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + Limit: 10, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, resource_string_service$$name AS `serviceName`, duration_nano AS `durationNano`, http_method AS `httpMethod`, response_status_code AS `responseStatusCode`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, + }, + expectedErr: nil, + }, } fm := NewFieldMapper() diff --git a/pkg/telemetrytraces/test_data.go b/pkg/telemetrytraces/test_data.go index 926fc61aae76..6c25ae8c83d6 100644 --- a/pkg/telemetrytraces/test_data.go +++ b/pkg/telemetrytraces/test_data.go @@ -34,6 +34,28 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { FieldDataType: telemetrytypes.FieldDataTypeString, }, }, + "metric.max_count": { + { + Name: "metric.max_count", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeFloat64, + }, + }, + "cart.items_count": { + { + Name: "cart.items_count", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeFloat64, + Materialized: true, + }, + }, + "user.id": { + { + Name: "user.id", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, } for _, keys := range keysMap { for _, key := range keys { diff --git a/pkg/types/emailtypes/template.go b/pkg/types/emailtypes/template.go index 68b76bd4ee1b..73f542a89ced 100644 --- a/pkg/types/emailtypes/template.go +++ b/pkg/types/emailtypes/template.go @@ -12,11 +12,12 @@ import ( var ( // Templates is a list of all the templates that are supported by the emailing service. // This list should be updated whenever a new template is added. - Templates = []TemplateName{TemplateNameInvitationEmail} + Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameUpdateRole} ) var ( TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation_email")} + TemplateNameUpdateRole = TemplateName{valuer.NewString("update_role")} ) type TemplateName struct{ valuer.String } @@ -25,6 +26,8 @@ func NewTemplateName(name string) (TemplateName, error) { switch name { case TemplateNameInvitationEmail.StringValue(): return TemplateNameInvitationEmail, nil + case TemplateNameUpdateRole.StringValue(): + return TemplateNameUpdateRole, nil default: return TemplateName{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid template name: %s", name) } diff --git a/pkg/types/pipelinetypes/pipeline.go b/pkg/types/pipelinetypes/pipeline.go index f441d9574feb..31fff6e7f4be 100644 --- a/pkg/types/pipelinetypes/pipeline.go +++ b/pkg/types/pipelinetypes/pipeline.go @@ -14,6 +14,32 @@ import ( "github.com/uptrace/bun" ) +type JSONMappingType = string + +const ( + Host JSONMappingType = "host" + Service JSONMappingType = "service" + Environment JSONMappingType = "environment" + Severity JSONMappingType = "severity" + TraceID JSONMappingType = "trace_id" + SpanID JSONMappingType = "span_id" + TraceFlags JSONMappingType = "trace_flags" + Message JSONMappingType = "message" +) + +var DefaultSeverityMapping = map[string][]string{ + "trace": {"TRACE", "Trace", "trace", "trc", "Trc"}, + "debug": {"DEBUG", "Debug", "debug", "dbg", "Dbg"}, + "info": {"INFO", "Info", "info"}, + "warn": {"WARN", "Warn", "warn", "warning", "Warning", "wrn", "Wrn"}, + "error": {"ERROR", "Error", "error", "err", "Err", "ERR", "fail", "Fail", "FAIL"}, + "fatal": {"FATAL", "Fatal", "fatal", "critical", "Critical", "CRITICAL", "crit", "Crit", "CRIT", + "panic", "Panic", "PANIC"}, +} + +var validMappingLevels = []string{"trace", "debug", "info", "warn", "error", "fatal"} +var validMappingVariableTypes = []string{Host, Service, Environment, Severity, TraceID, SpanID, TraceFlags, Message} + type StoreablePipeline struct { bun.BaseModel `bun:"table:pipelines,alias:p"` @@ -91,9 +117,54 @@ type PipelineOperator struct { Layout string `json:"layout,omitempty" yaml:"layout,omitempty"` LayoutType string `json:"layout_type,omitempty" yaml:"layout_type,omitempty"` + // json_parser fields + EnableFlattening bool `json:"enable_flattening,omitempty" yaml:"enable_flattening,omitempty"` + MaxFlatteningDepth int `json:"-" yaml:"max_flattening_depth,omitempty"` // MaxFlatteningDepth is not configurable from User's side + EnablePaths bool `json:"enable_paths,omitempty" yaml:"enable_paths,omitempty"` + PathPrefix string `json:"path_prefix,omitempty" yaml:"path_prefix,omitempty"` + + // Used in Severity Parsing and JSON Flattening mapping + Mapping map[string][]string `json:"mapping,omitempty" yaml:"mapping,omitempty"` // severity parser fields - SeverityMapping map[string][]string `json:"mapping,omitempty" yaml:"mapping,omitempty"` - OverwriteSeverityText bool `json:"overwrite_text,omitempty" yaml:"overwrite_text,omitempty"` + OverwriteSeverityText bool `json:"overwrite_text,omitempty" yaml:"overwrite_text,omitempty"` +} + +func (op PipelineOperator) MarshalJSON() ([]byte, error) { + type Alias PipelineOperator + + p := Alias(op) + if p.TraceParser != nil { + if p.TraceId != nil && len(p.TraceId.ParseFrom) < 1 { + p.TraceId = nil + } + if p.SpanId != nil && len(p.SpanId.ParseFrom) < 1 { + p.SpanId = nil + } + if p.TraceFlags != nil && len(p.TraceFlags.ParseFrom) < 1 { + p.TraceFlags = nil + } + } + + return json.Marshal(p) +} + +func (op PipelineOperator) MarshalYAML() (interface{}, error) { + type Alias PipelineOperator + alias := Alias(op) + + if alias.TraceParser != nil { + if alias.TraceParser.TraceId != nil && len(alias.TraceParser.TraceId.ParseFrom) < 1 { + alias.TraceParser.TraceId = nil + } + if alias.TraceParser.SpanId != nil && len(alias.TraceParser.SpanId.ParseFrom) < 1 { + alias.TraceParser.SpanId = nil + } + if alias.TraceParser.TraceFlags != nil && len(alias.TraceParser.TraceFlags.ParseFrom) < 1 { + alias.TraceParser.TraceFlags = nil + } + } + + return alias, nil } type TimestampParser struct { @@ -206,6 +277,12 @@ func isValidOperator(op PipelineOperator) error { if op.ParseFrom == "" && op.ParseTo == "" { return fmt.Errorf("parse from and parse to of %s json operator cannot be empty", op.ID) } + + for k := range op.Mapping { + if !slices.Contains(validMappingVariableTypes, strings.ToLower(k)) { + return fmt.Errorf("%s is not a valid mapping type in processor %s", k, op.ID) + } + } case "grok_parser": if op.Pattern == "" { return fmt.Errorf("pattern of %s grok operator cannot be empty", op.ID) @@ -306,8 +383,7 @@ func isValidOperator(op PipelineOperator) error { return fmt.Errorf("parse from of severity parsing processor %s cannot be empty", op.ID) } - validMappingLevels := []string{"trace", "debug", "info", "warn", "error", "fatal"} - for k := range op.SeverityMapping { + for k := range op.Mapping { if !slices.Contains(validMappingLevels, strings.ToLower(k)) { return fmt.Errorf("%s is not a valid severity in processor %s", k, op.ID) } diff --git a/pkg/types/pipelinetypes/postable_pipeline_test.go b/pkg/types/pipelinetypes/postable_pipeline_test.go index 832a7860084b..4713ecedb2a4 100644 --- a/pkg/types/pipelinetypes/postable_pipeline_test.go +++ b/pkg/types/pipelinetypes/postable_pipeline_test.go @@ -332,7 +332,7 @@ var operatorTest = []struct { ID: "severity", Type: "severity_parser", ParseFrom: "attributes.test_severity", - SeverityMapping: map[string][]string{ + Mapping: map[string][]string{ "trace": {"test_trace"}, "fatal": {"test_fatal"}, }, @@ -344,7 +344,7 @@ var operatorTest = []struct { Operator: PipelineOperator{ ID: "severity", Type: "severity_parser", - SeverityMapping: map[string][]string{}, + Mapping: map[string][]string{}, OverwriteSeverityText: true, }, IsValid: false, @@ -354,7 +354,7 @@ var operatorTest = []struct { ID: "severity", Type: "severity_parser", ParseFrom: "attributes.test", - SeverityMapping: map[string][]string{ + Mapping: map[string][]string{ "not-a-level": {"bad-level"}, }, OverwriteSeverityText: true, diff --git a/pkg/types/preferencetypes/name.go b/pkg/types/preferencetypes/name.go index 2ff280e0fe92..00e1852afe71 100644 --- a/pkg/types/preferencetypes/name.go +++ b/pkg/types/preferencetypes/name.go @@ -18,6 +18,7 @@ var ( NameWelcomeChecklistSetupSavedViewSkipped = Name{valuer.NewString("welcome_checklist_setup_saved_view_skipped")} NameSidenavPinned = Name{valuer.NewString("sidenav_pinned")} NameNavShortcuts = Name{valuer.NewString("nav_shortcuts")} + NameLastSeenChangelogVersion = Name{valuer.NewString("last_seen_changelog_version")} ) type Name struct{ valuer.String } @@ -35,6 +36,7 @@ func NewName(name string) (Name, error) { NameWelcomeChecklistSetupSavedViewSkipped.StringValue(), NameSidenavPinned.StringValue(), NameNavShortcuts.StringValue(), + NameLastSeenChangelogVersion.StringValue(), }, name, ) diff --git a/pkg/types/preferencetypes/preference.go b/pkg/types/preferencetypes/preference.go index d1461797f26e..fafa5b6d8c74 100644 --- a/pkg/types/preferencetypes/preference.go +++ b/pkg/types/preferencetypes/preference.go @@ -145,6 +145,15 @@ func NewAvailablePreference() map[Name]Preference { AllowedValues: []string{}, Value: MustNewValue([]any{}, ValueTypeArray), }, + NameLastSeenChangelogVersion: { + Name: NameLastSeenChangelogVersion, + Description: "Changelog version seen by the user.", + ValueType: ValueTypeString, + DefaultValue: MustNewValue("", ValueTypeString), + AllowedScopes: []Scope{ScopeUser}, + AllowedValues: []string{}, + Value: MustNewValue("", ValueTypeString), + }, } } diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/functions.go b/pkg/types/querybuildertypes/querybuildertypesv5/functions.go index c4592e351cd7..837c4a473cf2 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/functions.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/functions.go @@ -376,8 +376,8 @@ func funcFillZero(result *TimeSeries, start, end, step int64) *TimeSeries { return result } - alignedStart := (start / step) * step - alignedEnd := ((end + step - 1) / step) * step + alignedStart := start - (start % (step * 1000)) + alignedEnd := end existingValues := make(map[int64]*TimeSeriesValue) for _, v := range result.Values { @@ -386,7 +386,7 @@ func funcFillZero(result *TimeSeries, start, end, step int64) *TimeSeries { filledValues := make([]*TimeSeriesValue, 0) - for ts := alignedStart; ts <= alignedEnd; ts += step { + for ts := alignedStart; ts <= alignedEnd; ts += step * 1000 { if val, exists := existingValues[ts]; exists { filledValues = append(filledValues, val) } else { diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/functions_test.go b/pkg/types/querybuildertypes/querybuildertypesv5/functions_test.go index f16a30a595cc..3b62bdc0a729 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/functions_test.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/functions_test.go @@ -698,7 +698,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 3000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -717,7 +717,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 3000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -737,7 +737,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 6000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -761,7 +761,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 6000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -780,7 +780,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 3000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 0}, @@ -798,7 +798,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 3000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -820,7 +820,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 1000, end: 4000, - step: 1000, + step: 1, expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 1000, Value: 1.0}, @@ -841,7 +841,7 @@ func TestFuncFillZero(t *testing.T) { }, start: 50000, // Not aligned to 60s end: 250000, // Not aligned to 60s - step: 60000, // 60 seconds + step: 60, // 60 seconds expected: &TimeSeries{ Values: []*TimeSeriesValue{ {Timestamp: 0, Value: 0}, // Aligned start @@ -849,7 +849,6 @@ func TestFuncFillZero(t *testing.T) { {Timestamp: 120000, Value: 2.0}, {Timestamp: 180000, Value: 0}, // Filled gap {Timestamp: 240000, Value: 4.0}, - {Timestamp: 300000, Value: 0}, // Aligned end }, }, }, @@ -891,7 +890,7 @@ func TestApplyFunction_FillZero(t *testing.T) { Args: []FunctionArg{ {Value: 1000.0}, // start {Value: 4000.0}, // end - {Value: 1000.0}, // step + {Value: 1.0}, // step }, } diff --git a/pkg/types/telemetrytypes/field.go b/pkg/types/telemetrytypes/field.go index cb8a08883ca9..38928adcc952 100644 --- a/pkg/types/telemetrytypes/field.go +++ b/pkg/types/telemetrytypes/field.go @@ -36,6 +36,9 @@ func (f TelemetryFieldKey) String() string { if f.FieldDataType != FieldDataTypeUnspecified { sb.WriteString(fmt.Sprintf(",type=%s", f.FieldDataType.StringValue())) } + if f.Materialized { + sb.WriteString(",materialized") + } return sb.String() } @@ -146,41 +149,86 @@ func DataTypeCollisionHandledFieldName(key *TelemetryFieldKey, value any, tblFie // So we handle the data type collisions here switch key.FieldDataType { case FieldDataTypeString: - switch value.(type) { + switch v := value.(type) { case float64: // try to convert the string value to to number - tblFieldName = fmt.Sprintf(`toFloat64OrNull(%s)`, tblFieldName) + tblFieldName = castFloat(tblFieldName) case []any: - areFloats := true - for _, v := range value.([]any) { - if _, ok := v.(float64); !ok { - areFloats = false - break - } - } - if areFloats { - tblFieldName = fmt.Sprintf(`toFloat64OrNull(%s)`, tblFieldName) + if allFloats(v) { + tblFieldName = castFloat(tblFieldName) + } else if hasString(v) { + _, value = castString(tblFieldName), toStrings(v) } case bool: // we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string - value = fmt.Sprintf("%t", value) - case string: - // nothing to do + value = fmt.Sprintf("%t", v) } + case FieldDataTypeFloat64, FieldDataTypeInt64, FieldDataTypeNumber: - switch value.(type) { + switch v := value.(type) { + // why? ; CH returns an error for a simple check + // attributes_number['http.status_code'] = 200 but not for attributes_number['http.status_code'] >= 200 + // DB::Exception: Bad get: has UInt64, requested Float64. + // How is it working in v4? v4 prepares the full query with values in query string + // When we format the float it becomes attributes_number['http.status_code'] = 200.000 + // Which CH gladly accepts and doesn't throw error + // However, when passed as query args, the default formatter + // https://github.com/ClickHouse/clickhouse-go/blob/757e102f6d8c6059d564ce98795b4ce2a101b1a5/bind.go#L393 + // is used which prepares the + // final query as attributes_number['http.status_code'] = 200 giving this error + // This following is one way to workaround it + case float32, float64: + tblFieldName = castFloatHack(tblFieldName) case string: - // try to convert the string value to to number - tblFieldName = fmt.Sprintf(`toString(%s)`, tblFieldName) - case float64: - // nothing to do + // try to convert the number attribute to string + tblFieldName = castString(tblFieldName) // numeric col vs string literal + case []any: + if allFloats(v) { + tblFieldName = castFloatHack(tblFieldName) + } else if hasString(v) { + tblFieldName, value = castString(tblFieldName), toStrings(v) + } } + case FieldDataTypeBool: - switch value.(type) { + switch v := value.(type) { case string: - // try to convert the string value to to number - tblFieldName = fmt.Sprintf(`toString(%s)`, tblFieldName) + tblFieldName = castString(tblFieldName) + case []any: + if hasString(v) { + tblFieldName, value = castString(tblFieldName), toStrings(v) + } } } return tblFieldName, value } + +func castFloat(col string) string { return fmt.Sprintf("toFloat64OrNull(%s)", col) } +func castFloatHack(col string) string { return fmt.Sprintf("toFloat64(%s)", col) } +func castString(col string) string { return fmt.Sprintf("toString(%s)", col) } + +func allFloats(in []any) bool { + for _, x := range in { + if _, ok := x.(float64); !ok { + return false + } + } + return true +} + +func hasString(in []any) bool { + for _, x := range in { + if _, ok := x.(string); ok { + return true + } + } + return false +} + +func toStrings(in []any) []any { + out := make([]any, len(in)) + for i, x := range in { + out[i] = fmt.Sprintf("%v", x) + } + return out +} diff --git a/pkg/types/telemetrytypes/field_test.go b/pkg/types/telemetrytypes/field_test.go index 179e6ee093b7..dc2494ab4127 100644 --- a/pkg/types/telemetrytypes/field_test.go +++ b/pkg/types/telemetrytypes/field_test.go @@ -2,6 +2,8 @@ package telemetrytypes import ( "testing" + + "github.com/stretchr/testify/assert" ) func TestGetFieldKeyFromKeyText(t *testing.T) { @@ -91,3 +93,115 @@ func TestGetFieldKeyFromKeyText(t *testing.T) { } } } + +func TestDataTypeCollisionHandledFieldName(t *testing.T) { + tests := []struct { + name string + key *TelemetryFieldKey + value any + tblFieldName string + expectedFieldName string + expectedValue any + }{ + { + name: "http_status_code_string_field_with_numeric_value", + key: &TelemetryFieldKey{ + Name: "http.status_code", + FieldDataType: FieldDataTypeString, + }, + value: float64(200), + tblFieldName: "attribute_string_http$$status_code", + expectedFieldName: "toFloat64OrNull(attribute_string_http$$status_code)", + expectedValue: float64(200), + }, + { + name: "service_enabled_string_field_with_bool_value", + key: &TelemetryFieldKey{ + Name: "service.enabled", + FieldDataType: FieldDataTypeString, + }, + value: true, + tblFieldName: "attribute_string_service$$enabled", + expectedFieldName: "attribute_string_service$$enabled", + expectedValue: "true", + }, + { + name: "http_method_string_field_with_string_value", + key: &TelemetryFieldKey{ + Name: "http.method", + FieldDataType: FieldDataTypeString, + }, + value: "GET", + tblFieldName: "attribute_string_http$$method", + expectedFieldName: "attribute_string_http$$method", + expectedValue: "GET", + }, + { + name: "response_times_string_field_with_numeric_array", + key: &TelemetryFieldKey{ + Name: "response.times", + FieldDataType: FieldDataTypeString, + }, + value: []any{float64(100.5), float64(200.3), float64(150.7)}, + tblFieldName: "attribute_string_response$$times", + expectedFieldName: "toFloat64OrNull(attribute_string_response$$times)", + expectedValue: []any{float64(100.5), float64(200.3), float64(150.7)}, + }, + { + name: "error_codes_string_field_with_mixed_array", + key: &TelemetryFieldKey{ + Name: "error.codes", + FieldDataType: FieldDataTypeString, + }, + value: []any{float64(500), "TIMEOUT", float64(503)}, + tblFieldName: "attribute_string_error$$codes", + expectedFieldName: "attribute_string_error$$codes", + expectedValue: []any{"500", "TIMEOUT", "503"}, + }, + + // numbers + { + name: "http_request_duration_float_field_with_string_value", + key: &TelemetryFieldKey{ + Name: "http.request.duration", + FieldDataType: FieldDataTypeFloat64, + }, + value: "1234.56", + tblFieldName: "attribute_float64_http$$request$$duration", + expectedFieldName: "toString(attribute_float64_http$$request$$duration)", + expectedValue: "1234.56", + }, + + // bools + { + name: "feature_enabled_bool_field_with_string_value", + key: &TelemetryFieldKey{ + Name: "feature.enabled", + FieldDataType: FieldDataTypeBool, + }, + value: "true", + tblFieldName: "attribute_bool_feature$$enabled", + expectedFieldName: "toString(attribute_bool_feature$$enabled)", + expectedValue: "true", + }, + { + name: "feature_flags_bool_field_with_mixed_array", + key: &TelemetryFieldKey{ + Name: "feature.flags", + FieldDataType: FieldDataTypeBool, + }, + value: []any{true, "enabled", false}, + tblFieldName: "attribute_bool_feature$$flags", + expectedFieldName: "toString(attribute_bool_feature$$flags)", + expectedValue: []any{"true", "enabled", "false"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resultFieldName, resultValue := DataTypeCollisionHandledFieldName(tt.key, tt.value, tt.tblFieldName) + assert.Equal(t, tt.expectedFieldName, resultFieldName) + assert.Equal(t, tt.expectedValue, resultValue) + }) + } +} diff --git a/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go b/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go index 41e7065689c8..63bc0b4e7bca 100644 --- a/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go +++ b/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go @@ -179,7 +179,10 @@ func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.T } // Check field context + // check for the context filter only for attribute and resource attribute if selector.FieldContext != telemetrytypes.FieldContextUnspecified && + (selector.FieldContext == telemetrytypes.FieldContextAttribute || + selector.FieldContext == telemetrytypes.FieldContextResource) && selector.FieldContext != key.FieldContext { return false } diff --git a/pkg/version/deployment.go b/pkg/version/deployment.go index e87834528f28..f1ba092998ce 100644 --- a/pkg/version/deployment.go +++ b/pkg/version/deployment.go @@ -106,6 +106,10 @@ func detectPlatform() string { return "render" case os.Getenv("COOLIFY_RESOURCE_UUID") != "": return "coolify" + case os.Getenv("RAILWAY_SERVICE_ID") != "": + return "railway" + case os.Getenv("ECS_CONTAINER_METADATA_URI_V4") != "": + return "ecs" } // Try to detect cloud provider through metadata endpoints @@ -163,6 +167,5 @@ func detectPlatform() string { } } - return "unknown" } diff --git a/templates/email/update_role.gotmpl b/templates/email/update_role.gotmpl new file mode 100644 index 000000000000..8815c6a2b181 --- /dev/null +++ b/templates/email/update_role.gotmpl @@ -0,0 +1,23 @@ + + + +

Hi {{.CustomerName}},

+ +

We wanted to inform you that your role in the SigNoz project has been updated by {{.UpdatedByEmail}}.

+ +

+ Previous Role: {{.OldRole}}
+ New Role: {{.NewRole}} +

+ +

+ Please note that you will need to log out and log back in for the changes to take effect. +

+ +

+ If you were not expecting this change or have any questions, please reach out to your project administrator or contact us at support@signoz.io. +

+ +

Thanks,
The SigNoz Team

+ + \ No newline at end of file diff --git a/tests/integration/fixtures/signoz.py b/tests/integration/fixtures/signoz.py index e5b02e85810e..393122c8663e 100644 --- a/tests/integration/fixtures/signoz.py +++ b/tests/integration/fixtures/signoz.py @@ -53,7 +53,7 @@ def signoz( # Build the image self = DockerImage( path="../../", - dockerfile_path="ee/query-service/Dockerfile.integration", + dockerfile_path="cmd/enterprise/Dockerfile.integration", tag="signoz:integration", )