diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 6260fd594062..0691744c3223 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -32,9 +32,7 @@ ex: > Tag the relevant teams for review: -- [ ] @SigNoz/frontend -- [ ] @SigNoz/backend -- [ ] @SigNoz/devops +- frontend / backend / devops --- diff --git a/.github/workflows/build-enterprise.yaml b/.github/workflows/build-enterprise.yaml index 4031abed106a..99b80bafa700 100644 --- a/.github/workflows/build-enterprise.yaml +++ b/.github/workflows/build-enterprise.yaml @@ -67,9 +67,8 @@ jobs: echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> frontend/.env echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> frontend/.env echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env - echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> frontend/.env - echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> frontend/.env - echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> frontend/.env + echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env + echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env - name: cache-dotenv uses: actions/cache@v4 with: diff --git a/.github/workflows/build-staging.yaml b/.github/workflows/build-staging.yaml index 0effc134928b..9006f0233955 100644 --- a/.github/workflows/build-staging.yaml +++ b/.github/workflows/build-staging.yaml @@ -66,7 +66,8 @@ jobs: echo 'CI=1' > frontend/.env echo 'TUNNEL_URL="${{ secrets.NP_TUNNEL_URL }}"' >> frontend/.env echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env - echo 'USERPILOT_KEY="${{ secrets.NP_USERPILOT_KEY }}"' >> frontend/.env + echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env + echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env - name: cache-dotenv uses: actions/cache@v4 with: diff --git a/.github/workflows/gor-signoz.yaml b/.github/workflows/gor-signoz.yaml index a74f5aa92dae..4f8f923fe834 100644 --- a/.github/workflows/gor-signoz.yaml +++ b/.github/workflows/gor-signoz.yaml @@ -33,9 +33,8 @@ jobs: echo 'TUNNEL_URL="${{ secrets.TUNNEL_URL }}"' >> .env echo 'TUNNEL_DOMAIN="${{ secrets.TUNNEL_DOMAIN }}"' >> .env echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env - echo 'CUSTOMERIO_ID="${{ secrets.CUSTOMERIO_ID }}"' >> .env - echo 'CUSTOMERIO_SITE_ID="${{ secrets.CUSTOMERIO_SITE_ID }}"' >> .env - echo 'USERPILOT_KEY="${{ secrets.USERPILOT_KEY }}"' >> .env + echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env + echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env - name: build-frontend run: make js-build - name: upload-frontend-artifact diff --git a/.golangci.yml b/.golangci.yml index bd48535d9425..fa16191b1727 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,4 +1,33 @@ +linters: + default: standard + enable: + - bodyclose + - misspell + - nilnil + - sloglint + - depguard + - iface + +linters-settings: + sloglint: + no-mixed-args: true + kv-only: true + no-global: all + context: all + static-msg: true + msg-style: lowercased + key-naming-case: snake + depguard: + rules: + nozap: + deny: + - pkg: "go.uber.org/zap" + desc: "Do not use zap logger. Use slog instead." + iface: + enable: + - identical issues: exclude-dirs: - "pkg/query-service" - "ee/query-service" + - "scripts/" diff --git a/ee/licensing/config.go b/ee/licensing/config.go new file mode 100644 index 000000000000..598724d8e363 --- /dev/null +++ b/ee/licensing/config.go @@ -0,0 +1,26 @@ +package licensing + +import ( + "fmt" + "sync" + "time" + + "github.com/SigNoz/signoz/pkg/licensing" +) + +var ( + config licensing.Config + once sync.Once +) + +// initializes the licensing configuration +func Config(pollInterval time.Duration, failureThreshold int) licensing.Config { + once.Do(func() { + config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold} + if err := config.Validate(); err != nil { + panic(fmt.Errorf("invalid licensing config: %w", err)) + } + }) + + return config +} diff --git a/ee/licensing/httplicensing/api.go b/ee/licensing/httplicensing/api.go new file mode 100644 index 000000000000..9f9bc1f5da98 --- /dev/null +++ b/ee/licensing/httplicensing/api.go @@ -0,0 +1,168 @@ +package httplicensing + +import ( + "context" + "encoding/json" + "net/http" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/types/licensetypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type licensingAPI struct { + licensing licensing.Licensing +} + +func NewLicensingAPI(licensing licensing.Licensing) licensing.API { + return &licensingAPI{licensing: licensing} +} + +func (api *licensingAPI) Activate(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + + req := new(licensetypes.PostableLicense) + err = json.NewDecoder(r.Body).Decode(&req) + if err != nil { + render.Error(rw, err) + return + } + + err = api.licensing.Activate(r.Context(), orgID, req.Key) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusAccepted, nil) +} + +func (api *licensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + + license, err := api.licensing.GetActive(r.Context(), orgID) + if err != nil { + render.Error(rw, err) + return + } + + gettableLicense := licensetypes.NewGettableLicense(license.Data, license.Key) + render.Success(rw, http.StatusOK, gettableLicense) +} + +func (api *licensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + + err = api.licensing.Refresh(r.Context(), orgID) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) +} + +func (api *licensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + + req := new(licensetypes.PostableSubscription) + if err := json.NewDecoder(r.Body).Decode(req); err != nil { + render.Error(rw, err) + return + } + + gettableSubscription, err := api.licensing.Checkout(ctx, orgID, req) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusCreated, gettableSubscription) +} + +func (api *licensingAPI) Portal(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + + req := new(licensetypes.PostableSubscription) + if err := json.NewDecoder(r.Body).Decode(req); err != nil { + render.Error(rw, err) + return + } + + gettableSubscription, err := api.licensing.Portal(ctx, orgID, req) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusCreated, gettableSubscription) +} diff --git a/ee/licensing/httplicensing/provider.go b/ee/licensing/httplicensing/provider.go new file mode 100644 index 000000000000..0c63ff295bdc --- /dev/null +++ b/ee/licensing/httplicensing/provider.go @@ -0,0 +1,280 @@ +package httplicensing + +import ( + "context" + "encoding/json" + "time" + + "github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/types/licensetypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/SigNoz/signoz/pkg/zeus" + "github.com/tidwall/gjson" +) + +type provider struct { + store licensetypes.Store + zeus zeus.Zeus + config licensing.Config + settings factory.ScopedProviderSettings + stopChan chan struct{} +} + +func NewProviderFactory(store sqlstore.SQLStore, zeus zeus.Zeus) factory.ProviderFactory[licensing.Licensing, licensing.Config] { + return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) { + return New(ctx, providerSettings, config, store, zeus) + }) +} + +func New(ctx context.Context, ps factory.ProviderSettings, config licensing.Config, sqlstore sqlstore.SQLStore, zeus zeus.Zeus) (licensing.Licensing, error) { + settings := factory.NewScopedProviderSettings(ps, "github.com/SigNoz/signoz/ee/licensing/httplicensing") + licensestore := sqllicensingstore.New(sqlstore) + return &provider{store: licensestore, zeus: zeus, config: config, settings: settings, stopChan: make(chan struct{})}, nil +} + +func (provider *provider) Start(ctx context.Context) error { + tick := time.NewTicker(provider.config.PollInterval) + defer tick.Stop() + + err := provider.Validate(ctx) + if err != nil { + provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err) + } + + for { + select { + case <-provider.stopChan: + return nil + case <-tick.C: + err := provider.Validate(ctx) + if err != nil { + provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err) + } + } + } +} + +func (provider *provider) Stop(ctx context.Context) error { + provider.settings.Logger().DebugContext(ctx, "license validation stopped") + close(provider.stopChan) + return nil +} + +func (provider *provider) Validate(ctx context.Context) error { + organizations, err := provider.store.ListOrganizations(ctx) + if err != nil { + return err + } + + for _, organizationID := range organizations { + err := provider.Refresh(ctx, organizationID) + if err != nil { + return err + } + } + + if len(organizations) == 0 { + err = provider.InitFeatures(ctx, licensetypes.BasicPlan) + if err != nil { + return err + } + } + + return nil +} + +func (provider *provider) Activate(ctx context.Context, organizationID valuer.UUID, key string) error { + data, err := provider.zeus.GetLicense(ctx, key) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch license data with upstream server") + } + + license, err := licensetypes.NewLicense(data, organizationID) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity") + } + + storableLicense := licensetypes.NewStorableLicenseFromLicense(license) + err = provider.store.Create(ctx, storableLicense) + if err != nil { + return err + } + + err = provider.InitFeatures(ctx, license.Features) + if err != nil { + return err + } + + return nil +} + +func (provider *provider) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) { + storableLicenses, err := provider.store.GetAll(ctx, organizationID) + if err != nil { + return nil, err + } + + activeLicense, err := licensetypes.GetActiveLicenseFromStorableLicenses(storableLicenses, organizationID) + if err != nil { + return nil, err + } + + return activeLicense, nil +} + +func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUID) error { + activeLicense, err := provider.GetActive(ctx, organizationID) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue()) + return err + } + + if err != nil && errors.Ast(err, errors.TypeNotFound) { + provider.settings.Logger().DebugContext(ctx, "no active license found, defaulting to basic plan", "org_id", organizationID.StringValue()) + err = provider.InitFeatures(ctx, licensetypes.BasicPlan) + if err != nil { + return err + } + return nil + } + + data, err := provider.zeus.GetLicense(ctx, activeLicense.Key) + if err != nil { + if time.Since(activeLicense.LastValidatedAt) > time.Duration(provider.config.FailureThreshold)*provider.config.PollInterval { + provider.settings.Logger().ErrorContext(ctx, "license validation failed for consecutive poll intervals, defaulting to basic plan", "failure_threshold", provider.config.FailureThreshold, "license_id", activeLicense.ID.StringValue(), "org_id", organizationID.StringValue()) + err = provider.InitFeatures(ctx, licensetypes.BasicPlan) + if err != nil { + return err + } + return nil + } + return err + } + + err = activeLicense.Update(data) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create license entity from license data") + } + + updatedStorableLicense := licensetypes.NewStorableLicenseFromLicense(activeLicense) + err = provider.store.Update(ctx, organizationID, updatedStorableLicense) + if err != nil { + return err + } + + return nil +} + +func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) { + activeLicense, err := provider.GetActive(ctx, organizationID) + if err != nil { + return nil, err + } + + body, err := json.Marshal(postableSubscription) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal checkout payload") + } + + response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session") + } + + return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil +} + +func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) { + activeLicense, err := provider.GetActive(ctx, organizationID) + if err != nil { + return nil, err + } + + body, err := json.Marshal(postableSubscription) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal portal payload") + } + + response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session") + } + + return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil +} + +// feature surrogate +func (provider *provider) CheckFeature(ctx context.Context, key string) error { + feature, err := provider.store.GetFeature(ctx, key) + if err != nil { + return err + } + if feature.Active { + return nil + } + return errors.Newf(errors.TypeUnsupported, licensing.ErrCodeFeatureUnavailable, "feature unavailable: %s", key) +} + +func (provider *provider) GetFeatureFlag(ctx context.Context, key string) (*featuretypes.GettableFeature, error) { + featureStatus, err := provider.store.GetFeature(ctx, key) + if err != nil { + return nil, err + } + return &featuretypes.GettableFeature{ + Name: featureStatus.Name, + Active: featureStatus.Active, + Usage: int64(featureStatus.Usage), + UsageLimit: int64(featureStatus.UsageLimit), + Route: featureStatus.Route, + }, nil +} + +func (provider *provider) GetFeatureFlags(ctx context.Context) ([]*featuretypes.GettableFeature, error) { + storableFeatures, err := provider.store.GetAllFeatures(ctx) + if err != nil { + return nil, err + } + + gettableFeatures := make([]*featuretypes.GettableFeature, len(storableFeatures)) + for idx, gettableFeature := range storableFeatures { + gettableFeatures[idx] = &featuretypes.GettableFeature{ + Name: gettableFeature.Name, + Active: gettableFeature.Active, + Usage: int64(gettableFeature.Usage), + UsageLimit: int64(gettableFeature.UsageLimit), + Route: gettableFeature.Route, + } + } + + return gettableFeatures, nil +} + +func (provider *provider) InitFeatures(ctx context.Context, features []*featuretypes.GettableFeature) error { + featureStatus := make([]*featuretypes.StorableFeature, len(features)) + for i, f := range features { + featureStatus[i] = &featuretypes.StorableFeature{ + Name: f.Name, + Active: f.Active, + Usage: int(f.Usage), + UsageLimit: int(f.UsageLimit), + Route: f.Route, + } + } + + return provider.store.InitFeatures(ctx, featureStatus) +} + +func (provider *provider) UpdateFeatureFlag(ctx context.Context, feature *featuretypes.GettableFeature) error { + return provider.store.UpdateFeature(ctx, &featuretypes.StorableFeature{ + Name: feature.Name, + Active: feature.Active, + Usage: int(feature.Usage), + UsageLimit: int(feature.UsageLimit), + Route: feature.Route, + }) +} diff --git a/ee/licensing/licensingstore/sqllicensingstore/store.go b/ee/licensing/licensingstore/sqllicensingstore/store.go new file mode 100644 index 000000000000..5167a5fc00d3 --- /dev/null +++ b/ee/licensing/licensingstore/sqllicensingstore/store.go @@ -0,0 +1,186 @@ +package sqllicensingstore + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/types/licensetypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type store struct { + sqlstore sqlstore.SQLStore +} + +func New(sqlstore sqlstore.SQLStore) licensetypes.Store { + return &store{sqlstore} +} + +func (store *store) Create(ctx context.Context, storableLicense *licensetypes.StorableLicense) error { + _, err := store. + sqlstore. + BunDB(). + NewInsert(). + Model(storableLicense). + Exec(ctx) + if err != nil { + return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "license with ID: %s already exists", storableLicense.ID) + } + + return nil +} + +func (store *store) Get(ctx context.Context, organizationID valuer.UUID, licenseID valuer.UUID) (*licensetypes.StorableLicense, error) { + storableLicense := new(licensetypes.StorableLicense) + err := store. + sqlstore. + BunDB(). + NewSelect(). + Model(storableLicense). + Where("org_id = ?", organizationID). + Where("id = ?", licenseID). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "license with ID: %s does not exist", licenseID) + } + + return storableLicense, nil +} + +func (store *store) GetAll(ctx context.Context, organizationID valuer.UUID) ([]*licensetypes.StorableLicense, error) { + storableLicenses := make([]*licensetypes.StorableLicense, 0) + err := store. + sqlstore. + BunDB(). + NewSelect(). + Model(&storableLicenses). + Where("org_id = ?", organizationID). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "licenses for organizationID: %s does not exists", organizationID) + } + + return storableLicenses, nil +} + +func (store *store) Update(ctx context.Context, organizationID valuer.UUID, storableLicense *licensetypes.StorableLicense) error { + _, err := store. + sqlstore. + BunDB(). + NewUpdate(). + Model(storableLicense). + WherePK(). + Where("org_id = ?", organizationID). + Exec(ctx) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update license with ID: %s", storableLicense.ID) + } + + return nil +} + +func (store *store) ListOrganizations(ctx context.Context) ([]valuer.UUID, error) { + orgIDStrs := make([]string, 0) + err := store.sqlstore. + BunDB(). + NewSelect(). + Model(new(types.Organization)). + Column("id"). + Scan(ctx, &orgIDStrs) + if err != nil { + return nil, err + } + + orgIDs := make([]valuer.UUID, len(orgIDStrs)) + for idx, orgIDStr := range orgIDStrs { + orgID, err := valuer.NewUUID(orgIDStr) + if err != nil { + return nil, err + } + orgIDs[idx] = orgID + } + + return orgIDs, nil + +} + +func (store *store) CreateFeature(ctx context.Context, storableFeature *featuretypes.StorableFeature) error { + _, err := store. + sqlstore. + BunDB(). + NewInsert(). + Model(storableFeature). + Exec(ctx) + if err != nil { + return store.sqlstore.WrapAlreadyExistsErrf(err, errors.CodeAlreadyExists, "feature with name:%s already exists", storableFeature.Name) + } + + return nil +} + +func (store *store) GetFeature(ctx context.Context, key string) (*featuretypes.StorableFeature, error) { + storableFeature := new(featuretypes.StorableFeature) + err := store. + sqlstore. + BunDB(). + NewSelect(). + Model(storableFeature). + Where("name = ?", key). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "feature with name:%s does not exist", key) + } + + return storableFeature, nil +} + +func (store *store) GetAllFeatures(ctx context.Context) ([]*featuretypes.StorableFeature, error) { + storableFeatures := make([]*featuretypes.StorableFeature, 0) + err := store. + sqlstore. + BunDB(). + NewSelect(). + Model(&storableFeatures). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "features do not exist") + } + + return storableFeatures, nil +} + +func (store *store) InitFeatures(ctx context.Context, storableFeatures []*featuretypes.StorableFeature) error { + _, err := store. + sqlstore. + BunDB(). + NewInsert(). + Model(&storableFeatures). + On("CONFLICT (name) DO UPDATE"). + Set("active = EXCLUDED.active"). + Set("usage = EXCLUDED.usage"). + Set("usage_limit = EXCLUDED.usage_limit"). + Set("route = EXCLUDED.route"). + Exec(ctx) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to initialise features") + } + + return nil +} + +func (store *store) UpdateFeature(ctx context.Context, storableFeature *featuretypes.StorableFeature) error { + _, err := store. + sqlstore. + BunDB(). + NewUpdate(). + Model(storableFeature). + Exec(ctx) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to update feature with key: %s", storableFeature.Name) + } + + return nil +} diff --git a/ee/modules/user/impluser/handler.go b/ee/modules/user/impluser/handler.go deleted file mode 100644 index 7d747cfebce9..000000000000 --- a/ee/modules/user/impluser/handler.go +++ /dev/null @@ -1,416 +0,0 @@ -package impluser - -import ( - "context" - "encoding/json" - "net/http" - "slices" - "time" - - "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/http/render" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/valuer" - "github.com/gorilla/mux" -) - -// EnterpriseHandler embeds the base handler implementation -type Handler struct { - user.Handler // Embed the base handler interface - module user.Module -} - -func NewHandler(module user.Module) user.Handler { - baseHandler := impluser.NewHandler(module) - return &Handler{ - Handler: baseHandler, - module: module, - } -} - -func (h *Handler) Login(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - var req types.PostableLoginRequest - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - render.Error(w, err) - return - } - - if req.RefreshToken == "" { - // the EE handler wrapper passes the feature flag value in context - ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool) - if !ok { - render.Error(w, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability")) - return - } - - if ssoAvailable { - _, err := h.module.CanUsePassword(ctx, req.Email) - if err != nil { - render.Error(w, err) - return - } - } - } - - user, err := h.module.GetAuthenticatedUser(ctx, req.OrgID, req.Email, req.Password, req.RefreshToken) - if err != nil { - render.Error(w, err) - return - } - - jwt, err := h.module.GetJWTForUser(ctx, user) - if err != nil { - render.Error(w, err) - return - } - - gettableLoginResponse := &types.GettableLoginResponse{ - GettableUserJwt: jwt, - UserID: user.ID.String(), - } - - render.Success(w, http.StatusOK, gettableLoginResponse) -} - -// Override only the methods you need with enterprise-specific implementations -func (h *Handler) LoginPrecheck(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - // assume user is valid unless proven otherwise and assign default values for rest of the fields - - email := r.URL.Query().Get("email") - sourceUrl := r.URL.Query().Get("ref") - orgID := r.URL.Query().Get("orgID") - - resp, err := h.module.LoginPrecheck(ctx, orgID, email, sourceUrl) - if err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusOK, resp) - -} - -func (h *Handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - req := new(types.PostableAcceptInvite) - if err := json.NewDecoder(r.Body).Decode(req); err != nil { - render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode user")) - return - } - - // get invite object - invite, err := h.module.GetInviteByToken(ctx, req.InviteToken) - if err != nil { - render.Error(w, err) - return - } - - orgDomain, err := h.module.GetAuthDomainByEmail(ctx, invite.Email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - render.Error(w, err) - return - } - - precheckResp := &types.GettableLoginPrecheck{ - SSO: false, - IsUser: false, - } - - if invite.Name == "" && req.DisplayName != "" { - invite.Name = req.DisplayName - } - - user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID) - if err != nil { - render.Error(w, err) - return - } - - if orgDomain != nil && orgDomain.SsoEnabled { - // sso is enabled, create user and respond precheck data - err = h.module.CreateUser(ctx, user) - if err != nil { - render.Error(w, err) - return - } - - // check if sso is enforced for the org - precheckResp, err = h.module.LoginPrecheck(ctx, invite.OrgID, user.Email, req.SourceURL) - if err != nil { - render.Error(w, err) - return - } - - } else { - password, err := types.NewFactorPassword(req.Password) - if err != nil { - render.Error(w, err) - return - } - - _, err = h.module.CreateUserWithPassword(ctx, user, password) - if err != nil { - render.Error(w, err) - return - } - - precheckResp.IsUser = true - } - - // delete the invite - if err := h.module.DeleteInvite(ctx, invite.OrgID, invite.ID); err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusOK, precheckResp) -} - -func (h *Handler) GetInvite(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - token := mux.Vars(r)["token"] - sourceUrl := r.URL.Query().Get("ref") - invite, err := h.module.GetInviteByToken(ctx, token) - if err != nil { - render.Error(w, err) - return - } - - // precheck the user - precheckResp, err := h.module.LoginPrecheck(ctx, invite.OrgID, invite.Email, sourceUrl) - if err != nil { - render.Error(w, err) - return - } - - gettableInvite := &types.GettableEEInvite{ - GettableInvite: *invite, - PreCheck: precheckResp, - } - - render.Success(w, http.StatusOK, gettableInvite) -} - -func (h *Handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(w, err) - return - } - - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - - req := new(types.PostableAPIKey) - if err := json.NewDecoder(r.Body).Decode(req); err != nil { - render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) - return - } - - apiKey, err := types.NewStorableAPIKey( - req.Name, - userID, - req.Role, - req.ExpiresInDays, - ) - if err != nil { - render.Error(w, err) - return - } - - err = h.module.CreateAPIKey(ctx, apiKey) - if err != nil { - render.Error(w, err) - return - } - - createdApiKey, err := h.module.GetAPIKey(ctx, orgID, apiKey.ID) - if err != nil { - render.Error(w, err) - return - } - - // just corrected the status code, response is same, - render.Success(w, http.StatusCreated, createdApiKey) -} - -func (h *Handler) ListAPIKeys(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(w, err) - return - } - - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - apiKeys, err := h.module.ListAPIKeys(ctx, orgID) - if err != nil { - render.Error(w, err) - return - } - - // for backward compatibility - if len(apiKeys) == 0 { - render.Success(w, http.StatusOK, []types.GettableAPIKey{}) - return - } - - result := make([]*types.GettableAPIKey, len(apiKeys)) - for i, apiKey := range apiKeys { - result[i] = types.NewGettableAPIKeyFromStorableAPIKey(apiKey) - } - - render.Success(w, http.StatusOK, result) - -} - -func (h *Handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(w, err) - return - } - - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - - req := types.StorableAPIKey{} - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) - return - } - - idStr := mux.Vars(r)["id"] - id, err := valuer.NewUUID(idStr) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7")) - return - } - - //get the API Key - existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) - if err != nil { - render.Error(w, err) - return - } - - // get the user - createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) - if err != nil { - render.Error(w, err) - return - } - - if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) - return - } - - err = h.module.UpdateAPIKey(ctx, id, &req, userID) - if err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusNoContent, nil) -} - -func (h *Handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) { - ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) - defer cancel() - - claims, err := authtypes.ClaimsFromContext(ctx) - if err != nil { - render.Error(w, err) - return - } - - idStr := mux.Vars(r)["id"] - id, err := valuer.NewUUID(idStr) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7")) - return - } - - orgID, err := valuer.NewUUID(claims.OrgID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) - return - } - - userID, err := valuer.NewUUID(claims.UserID) - if err != nil { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) - return - } - - //get the API Key - existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) - if err != nil { - render.Error(w, err) - return - } - - // get the user - createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) - if err != nil { - render.Error(w, err) - return - } - - if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { - render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) - return - } - - if err := h.module.RevokeAPIKey(ctx, id, userID); err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusNoContent, nil) -} diff --git a/ee/modules/user/impluser/module.go b/ee/modules/user/impluser/module.go deleted file mode 100644 index 92200f2aa310..000000000000 --- a/ee/modules/user/impluser/module.go +++ /dev/null @@ -1,252 +0,0 @@ -package impluser - -import ( - "context" - "fmt" - "net/url" - "strings" - - "github.com/SigNoz/signoz/ee/query-service/constants" - "github.com/SigNoz/signoz/pkg/emailing" - "github.com/SigNoz/signoz/pkg/errors" - "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/modules/user" - baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/types/authtypes" - "github.com/SigNoz/signoz/pkg/valuer" - "go.uber.org/zap" -) - -// EnterpriseModule embeds the base module implementation -type Module struct { - user.Module // Embed the base module implementation - store types.UserStore -} - -func NewModule(store types.UserStore, jwt *authtypes.JWT, emailing emailing.Emailing, providerSettings factory.ProviderSettings) user.Module { - baseModule := baseimpl.NewModule(store, jwt, emailing, providerSettings) - return &Module{ - Module: baseModule, - store: store, - } -} - -func (m *Module) createUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) { - // get auth domain from email domain - _, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return nil, err - } - - // get name from email - parts := strings.Split(email, "@") - if len(parts) < 2 { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") - } - name := parts[0] - - defaultOrgID, err := m.store.GetDefaultOrgID(ctx) - if err != nil { - return nil, err - } - - user, err := types.NewUser(name, email, types.RoleViewer.String(), defaultOrgID) - if err != nil { - return nil, err - } - - err = m.CreateUser(ctx, user) - if err != nil { - return nil, err - } - - return user, nil -} - -func (m *Module) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (string, error) { - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - zap.L().Error("failed to get user with email received from auth provider", zap.String("error", err.Error())) - return "", err - } - user := &types.User{} - - if len(users) == 0 { - newUser, err := m.createUserForSAMLRequest(ctx, email) - user = newUser - if err != nil { - zap.L().Error("failed to create user with email received from auth provider", zap.Error(err)) - return "", err - } - } else { - user = &users[0].User - } - - tokenStore, err := m.GetJWTForUser(ctx, user) - if err != nil { - zap.L().Error("failed to generate token for SSO login user", zap.Error(err)) - return "", err - } - - return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s", - redirectUri, - tokenStore.AccessJwt, - user.ID, - tokenStore.RefreshJwt), nil -} - -func (m *Module) CanUsePassword(ctx context.Context, email string) (bool, error) { - domain, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return false, err - } - - if domain != nil && domain.SsoEnabled { - // sso is enabled, check if the user has admin role - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - return false, err - } - - if len(users) == 0 { - return false, errors.New(errors.TypeNotFound, errors.CodeNotFound, "user not found") - } - - if users[0].Role != types.RoleAdmin.String() { - return false, errors.New(errors.TypeForbidden, errors.CodeForbidden, "auth method not supported") - } - - } - - return true, nil -} - -func (m *Module) LoginPrecheck(ctx context.Context, orgID, email, sourceUrl string) (*types.GettableLoginPrecheck, error) { - resp := &types.GettableLoginPrecheck{IsUser: true, CanSelfRegister: false} - - // check if email is a valid user - users, err := m.GetUsersByEmail(ctx, email) - if err != nil { - return nil, err - } - - if len(users) == 0 { - resp.IsUser = false - } - - // give them an option to select an org - if orgID == "" && len(users) > 1 { - resp.SelectOrg = true - resp.Orgs = make([]string, len(users)) - for i, user := range users { - resp.Orgs[i] = user.OrgID - } - return resp, nil - } - - // select the user with the corresponding orgID - if len(users) > 1 { - found := false - for _, tuser := range users { - if tuser.OrgID == orgID { - // user = tuser - found = true - break - } - } - if !found { - resp.IsUser = false - return resp, nil - } - } - - // the EE handler wrapper passes the feature flag value in context - ssoAvailable, ok := ctx.Value(types.SSOAvailable).(bool) - if !ok { - zap.L().Error("failed to retrieve ssoAvailable from context") - return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to retrieve SSO availability") - } - - if ssoAvailable { - - // TODO(Nitya): in multitenancy this should use orgId as well. - orgDomain, err := m.GetAuthDomainByEmail(ctx, email) - if err != nil && !errors.Ast(err, errors.TypeNotFound) { - return nil, err - } - - if orgDomain != nil && orgDomain.SsoEnabled { - // this is to allow self registration - resp.IsUser = true - - // saml is enabled for this domain, lets prepare sso url - if sourceUrl == "" { - sourceUrl = constants.GetDefaultSiteURL() - } - - // parse source url that generated the login request - var err error - escapedUrl, _ := url.QueryUnescape(sourceUrl) - siteUrl, err := url.Parse(escapedUrl) - if err != nil { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse referer") - } - - // build Idp URL that will authenticat the user - // the front-end will redirect user to this url - resp.SSOUrl, err = orgDomain.BuildSsoUrl(siteUrl) - if err != nil { - zap.L().Error("failed to prepare saml request for domain", zap.String("domain", orgDomain.Name), zap.Error(err)) - return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to prepare saml request for domain") - } - - // set SSO to true, as the url is generated correctly - resp.SSO = true - } - } - return resp, nil -} - -func (m *Module) GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) { - - if email == "" { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") - } - - components := strings.Split(email, "@") - if len(components) < 2 { - return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") - } - - domain, err := m.store.GetDomainByName(ctx, components[1]) - if err != nil { - return nil, err - } - - gettableDomain := &types.GettableOrgDomain{StorableOrgDomain: *domain} - if err := gettableDomain.LoadConfig(domain.Data); err != nil { - return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to load domain config") - } - return gettableDomain, nil -} - -func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { - return m.store.CreateAPIKey(ctx, apiKey) -} - -func (m *Module) UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *types.StorableAPIKey, updaterID valuer.UUID) error { - return m.store.UpdateAPIKey(ctx, id, apiKey, updaterID) -} - -func (m *Module) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.StorableAPIKeyUser, error) { - return m.store.ListAPIKeys(ctx, orgID) -} - -func (m *Module) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.StorableAPIKeyUser, error) { - return m.store.GetAPIKey(ctx, orgID, id) -} - -func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UUID) error { - return m.store.RevokeAPIKey(ctx, id, removedByUserID) -} diff --git a/ee/modules/user/impluser/store.go b/ee/modules/user/impluser/store.go deleted file mode 100644 index cbd23478d79d..000000000000 --- a/ee/modules/user/impluser/store.go +++ /dev/null @@ -1,37 +0,0 @@ -package impluser - -import ( - "context" - - "github.com/SigNoz/signoz/pkg/errors" - baseimpl "github.com/SigNoz/signoz/pkg/modules/user/impluser" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" -) - -type store struct { - *baseimpl.Store - sqlstore sqlstore.SQLStore -} - -func NewStore(sqlstore sqlstore.SQLStore) types.UserStore { - baseStore := baseimpl.NewStore(sqlstore).(*baseimpl.Store) - return &store{ - Store: baseStore, - sqlstore: sqlstore, - } -} - -func (s *store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) { - domain := new(types.StorableOrgDomain) - err := s.sqlstore.BunDB().NewSelect(). - Model(domain). - Where("name = ?", name). - Limit(1). - Scan(ctx) - - if err != nil { - return nil, errors.Wrapf(err, errors.TypeNotFound, errors.CodeNotFound, "failed to get domain from name") - } - return domain, nil -} diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 812b83b73d4e..40a28944daa5 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -6,42 +6,30 @@ import ( "net/http/httputil" "time" - "github.com/SigNoz/signoz/ee/query-service/dao" + "github.com/SigNoz/signoz/ee/licensing/httplicensing" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway" "github.com/SigNoz/signoz/ee/query-service/interfaces" - "github.com/SigNoz/signoz/ee/query-service/license" - "github.com/SigNoz/signoz/ee/query-service/model" "github.com/SigNoz/signoz/ee/query-service/usage" "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/apis/fields" - "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/http/middleware" - "github.com/SigNoz/signoz/pkg/http/render" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" baseapp "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" "github.com/SigNoz/signoz/pkg/query-service/app/integrations" "github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline" - baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces" basemodel "github.com/SigNoz/signoz/pkg/query-service/model" rules "github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/signoz" - "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" "github.com/gorilla/mux" - "go.uber.org/zap" ) type APIHandlerOptions struct { DataConnector interfaces.DataConnector PreferSpanMetrics bool - AppDao dao.ModelDao RulesManager *rules.Manager UsageManager *usage.Manager - FeatureFlags baseint.FeatureLookup - LicenseManager *license.Manager IntegrationsController *integrations.Controller CloudIntegrationsController *cloudintegrations.Controller LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController @@ -61,22 +49,18 @@ type APIHandler struct { // NewAPIHandler returns an APIHandler func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) { - quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(signoz.SQLStore)) - quickFilter := quickfilter.NewAPI(quickfiltermodule) baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{ Reader: opts.DataConnector, PreferSpanMetrics: opts.PreferSpanMetrics, RuleManager: opts.RulesManager, - FeatureFlags: opts.FeatureFlags, IntegrationsController: opts.IntegrationsController, CloudIntegrationsController: opts.CloudIntegrationsController, LogsParsingPipelineController: opts.LogsParsingPipelineController, FluxInterval: opts.FluxInterval, AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager), - FieldsAPI: fields.NewAPI(signoz.TelemetryStore), + LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing), + FieldsAPI: fields.NewAPI(signoz.TelemetryStore, signoz.Instrumentation.Logger()), Signoz: signoz, - QuickFilters: quickFilter, - QuickFilterModule: quickfiltermodule, }) if err != nil { @@ -90,32 +74,20 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, return ah, nil } -func (ah *APIHandler) FF() baseint.FeatureLookup { - return ah.opts.FeatureFlags -} - func (ah *APIHandler) RM() *rules.Manager { return ah.opts.RulesManager } -func (ah *APIHandler) LM() *license.Manager { - return ah.opts.LicenseManager -} - func (ah *APIHandler) UM() *usage.Manager { return ah.opts.UsageManager } -func (ah *APIHandler) AppDao() dao.ModelDao { - return ah.opts.AppDao -} - func (ah *APIHandler) Gateway() *httputil.ReverseProxy { return ah.opts.Gateway } -func (ah *APIHandler) CheckFeature(f string) bool { - err := ah.FF().CheckFeature(f) +func (ah *APIHandler) CheckFeature(ctx context.Context, key string) bool { + err := ah.Signoz.Licensing.CheckFeature(ctx, key) return err == nil } @@ -126,43 +98,29 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { // routes available only in ee version router.HandleFunc("/api/v1/featureFlags", am.OpenAccess(ah.getFeatureFlags)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(ah.loginPrecheck)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(ah.Signoz.Handlers.User.LoginPrecheck)).Methods(http.MethodGet) // invite - router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.getInvite)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(ah.acceptInvite)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/invite/{token}", am.OpenAccess(ah.Signoz.Handlers.User.GetInvite)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/invite/accept", am.OpenAccess(ah.Signoz.Handlers.User.AcceptInvite)).Methods(http.MethodPost) // paid plans specific routes router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/complete/google", am.OpenAccess(ah.receiveGoogleAuth)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/orgs/{orgId}/domains", am.AdminAccess(ah.listDomainsByOrg)).Methods(http.MethodGet) - - router.HandleFunc("/api/v1/domains", am.AdminAccess(ah.postDomain)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.putDomain)).Methods(http.MethodPut) - router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(ah.deleteDomain)).Methods(http.MethodDelete) // base overrides router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/login", am.OpenAccess(ah.loginUser)).Methods(http.MethodPost) - // PAT APIs - router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.Signoz.Handlers.User.CreateAPIKey)).Methods(http.MethodPost) - router.HandleFunc("/api/v1/pats", am.AdminAccess(ah.Signoz.Handlers.User.ListAPIKeys)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.Signoz.Handlers.User.UpdateAPIKey)).Methods(http.MethodPut) - router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(ah.Signoz.Handlers.User.RevokeAPIKey)).Methods(http.MethodDelete) - - router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.LicensingAPI.Checkout)).Methods(http.MethodPost) router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.LicensingAPI.Portal)).Methods(http.MethodPost) router.HandleFunc("/api/v1/dashboards/{uuid}/lock", am.EditAccess(ah.lockDashboard)).Methods(http.MethodPut) router.HandleFunc("/api/v1/dashboards/{uuid}/unlock", am.EditAccess(ah.unlockDashboard)).Methods(http.MethodPut) // v3 - router.HandleFunc("/api/v3/licenses", am.ViewAccess(ah.listLicensesV3)).Methods(http.MethodGet) - router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.applyLicenseV3)).Methods(http.MethodPost) - router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.refreshLicensesV3)).Methods(http.MethodPut) - router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.getActiveLicenseV3)).Methods(http.MethodGet) + router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Activate)).Methods(http.MethodPost) + router.HandleFunc("/api/v3/licenses", am.AdminAccess(ah.LicensingAPI.Refresh)).Methods(http.MethodPut) + router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(ah.LicensingAPI.GetActive)).Methods(http.MethodGet) // v4 router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost) @@ -174,54 +132,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { } -// TODO(nitya): remove this once we know how to get the FF's -func (ah *APIHandler) updateRequestContext(w http.ResponseWriter, r *http.Request) (*http.Request, error) { - ssoAvailable := true - err := ah.FF().CheckFeature(model.SSO) - if err != nil { - switch err.(type) { - case basemodel.ErrFeatureUnavailable: - // do nothing, just skip sso - ssoAvailable = false - default: - zap.L().Error("feature check failed", zap.String("featureKey", model.SSO), zap.Error(err)) - return r, errors.New(errors.TypeInternal, errors.CodeInternal, "error checking SSO feature") - } - } - ctx := context.WithValue(r.Context(), types.SSOAvailable, ssoAvailable) - return r.WithContext(ctx), nil -} - -func (ah *APIHandler) loginPrecheck(w http.ResponseWriter, r *http.Request) { - r, err := ah.updateRequestContext(w, r) - if err != nil { - render.Error(w, err) - return - } - ah.Signoz.Handlers.User.LoginPrecheck(w, r) - return -} - -func (ah *APIHandler) acceptInvite(w http.ResponseWriter, r *http.Request) { - r, err := ah.updateRequestContext(w, r) - if err != nil { - render.Error(w, err) - return - } - ah.Signoz.Handlers.User.AcceptInvite(w, r) - return -} - -func (ah *APIHandler) getInvite(w http.ResponseWriter, r *http.Request) { - r, err := ah.updateRequestContext(w, r) - if err != nil { - render.Error(w, err) - return - } - ah.Signoz.Handlers.User.GetInvite(w, r) - return -} - func (ah *APIHandler) RegisterCloudIntegrationsRoutes(router *mux.Router, am *middleware.AuthZ) { ah.APIHandler.RegisterCloudIntegrationsRoutes(router, am) diff --git a/ee/query-service/app/api/auth.go b/ee/query-service/app/api/auth.go index 1d90df80d5be..329d4bb79314 100644 --- a/ee/query-service/app/api/auth.go +++ b/ee/query-service/app/api/auth.go @@ -3,41 +3,18 @@ package api import ( "context" "encoding/base64" - "encoding/json" "fmt" - "io" "net/http" "net/url" "go.uber.org/zap" - "github.com/SigNoz/signoz/ee/query-service/constants" - "github.com/SigNoz/signoz/ee/query-service/model" "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/query-service/constants" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" ) -func parseRequest(r *http.Request, req interface{}) error { - defer r.Body.Close() - requestBody, err := io.ReadAll(r.Body) - if err != nil { - return err - } - - err = json.Unmarshal(requestBody, &req) - return err -} - -// loginUser overrides base handler and considers SSO case. -func (ah *APIHandler) loginUser(w http.ResponseWriter, r *http.Request) { - r, err := ah.updateRequestContext(w, r) - if err != nil { - render.Error(w, err) - return - } - ah.Signoz.Handlers.User.Login(w, r) - return -} - func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { ssoError := []byte("Login failed. Please contact your system administrator") dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) @@ -46,85 +23,31 @@ func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) } -// receiveGoogleAuth completes google OAuth response and forwards a request -// to front-end to sign user in -func (ah *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) { - redirectUri := constants.GetDefaultSiteURL() - ctx := context.Background() - - if !ah.CheckFeature(model.SSO) { - zap.L().Error("[receiveGoogleAuth] sso requested but feature unavailable in org domain") - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) - return - } - - q := r.URL.Query() - if errType := q.Get("error"); errType != "" { - zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) - http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO "), http.StatusMovedPermanently) - return - } - - relayState := q.Get("state") - zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) - - parsedState, err := url.Parse(relayState) - if err != nil || relayState == "" { - zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) - handleSsoError(w, r, redirectUri) - return - } - - // upgrade redirect url from the relay state for better accuracy - redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") - - // fetch domain by parsing relay state. - domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState) - if err != nil { - handleSsoError(w, r, redirectUri) - return - } - - // now that we have domain, use domain to fetch sso settings. - // prepare google callback handler using parsedState - - // which contains redirect URL (front-end endpoint) - callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - identity, err := callbackHandler.HandleCallback(r) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback ", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, identity.Email, ah.opts.JWT) - if err != nil { - zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) - handleSsoError(w, r, redirectUri) - return - } - - http.Redirect(w, r, nextPage, http.StatusSeeOther) -} - // receiveSAML completes a SAML request and gets user logged in func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { + claims, err := authtypes.ClaimsFromContext(r.Context()) + if err != nil { + render.Error(w, err) + return + } + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, err) + return + } + // this is the source url that initiated the login request redirectUri := constants.GetDefaultSiteURL() ctx := context.Background() - if !ah.CheckFeature(model.SSO) { + _, err = ah.Signoz.Licensing.GetActive(ctx, orgID) + if err != nil { zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain") http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently) return } - err := r.ParseForm() + err = r.ParseForm() if err != nil { zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) handleSsoError(w, r, redirectUri) @@ -147,7 +70,7 @@ func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) { redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") // fetch domain by parsing relay state. - domain, err := ah.AppDao().GetDomainFromSsoResponse(ctx, parsedState) + domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState) if err != nil { handleSsoError(w, r, redirectUri) return diff --git a/ee/query-service/app/api/cloudIntegrations.go b/ee/query-service/app/api/cloudIntegrations.go index 1251a5240322..f73e488f281f 100644 --- a/ee/query-service/app/api/cloudIntegrations.go +++ b/ee/query-service/app/api/cloudIntegrations.go @@ -36,6 +36,12 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW return } + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + cloudProvider := mux.Vars(r)["cloudProvider"] if cloudProvider != "aws" { RespondError(w, basemodel.BadRequest(fmt.Errorf( @@ -56,11 +62,9 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW SigNozAPIKey: apiKey, } - license, apiErr := ah.LM().GetRepo().GetActiveLicense(r.Context()) - if apiErr != nil { - RespondError(w, basemodel.WrapApiError( - apiErr, "couldn't look for active license", - ), nil) + license, err := ah.Signoz.Licensing.GetActive(r.Context(), orgID) + if err != nil { + render.Error(w, err) return } diff --git a/ee/query-service/app/api/domains.go b/ee/query-service/app/api/domains.go deleted file mode 100644 index 770c2048f98f..000000000000 --- a/ee/query-service/app/api/domains.go +++ /dev/null @@ -1,91 +0,0 @@ -package api - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - - "github.com/SigNoz/signoz/ee/query-service/model" - "github.com/SigNoz/signoz/pkg/types" - "github.com/google/uuid" - "github.com/gorilla/mux" -) - -func (ah *APIHandler) listDomainsByOrg(w http.ResponseWriter, r *http.Request) { - orgId := mux.Vars(r)["orgId"] - domains, apierr := ah.AppDao().ListDomains(context.Background(), orgId) - if apierr != nil { - RespondError(w, apierr, domains) - return - } - ah.Respond(w, domains) -} - -func (ah *APIHandler) postDomain(w http.ResponseWriter, r *http.Request) { - ctx := context.Background() - - req := types.GettableOrgDomain{} - - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - if err := req.ValidNew(); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - if apierr := ah.AppDao().CreateDomain(ctx, &req); apierr != nil { - RespondError(w, apierr, nil) - return - } - - ah.Respond(w, &req) -} - -func (ah *APIHandler) putDomain(w http.ResponseWriter, r *http.Request) { - ctx := context.Background() - - domainIdStr := mux.Vars(r)["id"] - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}} - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - req.ID = domainId - if err := req.Valid(nil); err != nil { - RespondError(w, model.BadRequest(err), nil) - } - - if apierr := ah.AppDao().UpdateDomain(ctx, &req); apierr != nil { - RespondError(w, apierr, nil) - return - } - - ah.Respond(w, &req) -} - -func (ah *APIHandler) deleteDomain(w http.ResponseWriter, r *http.Request) { - domainIdStr := mux.Vars(r)["id"] - - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - RespondError(w, model.BadRequest(fmt.Errorf("invalid domain id")), nil) - return - } - - apierr := ah.AppDao().DeleteDomain(context.Background(), domainId) - if apierr != nil { - RespondError(w, apierr, nil) - return - } - ah.Respond(w, nil) -} diff --git a/ee/query-service/app/api/featureFlags.go b/ee/query-service/app/api/featureFlags.go index 1feca4b7645b..13572c2f5f20 100644 --- a/ee/query-service/app/api/featureFlags.go +++ b/ee/query-service/app/api/featureFlags.go @@ -9,13 +9,29 @@ import ( "time" "github.com/SigNoz/signoz/ee/query-service/constants" - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" + pkgError "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/valuer" "go.uber.org/zap" ) func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { ctx := r.Context() - featureSet, err := ah.FF().GetFeatureFlags() + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid")) + return + } + + featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context()) if err != nil { ah.HandleError(w, err, http.StatusInternalServerError) return @@ -23,7 +39,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { if constants.FetchFeatures == "true" { zap.L().Debug("fetching license") - license, err := ah.LM().GetRepo().GetActiveLicense(ctx) + license, err := ah.Signoz.Licensing.GetActive(ctx, orgID) if err != nil { zap.L().Error("failed to fetch license", zap.Error(err)) } else if license == nil { @@ -44,9 +60,8 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { } if ah.opts.PreferSpanMetrics { - for idx := range featureSet { - feature := &featureSet[idx] - if feature.Name == basemodel.UseSpanMetrics { + for idx, feature := range featureSet { + if feature.Name == featuretypes.UseSpanMetrics { featureSet[idx].Active = true } } @@ -57,7 +72,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { // fetchZeusFeatures makes an HTTP GET request to the /zeusFeatures endpoint // and returns the FeatureSet. -func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) { +func fetchZeusFeatures(url, licenseKey string) ([]*featuretypes.GettableFeature, error) { // Check if the URL is empty if url == "" { return nil, fmt.Errorf("url is empty") @@ -116,14 +131,14 @@ func fetchZeusFeatures(url, licenseKey string) (basemodel.FeatureSet, error) { } type ZeusFeaturesResponse struct { - Status string `json:"status"` - Data basemodel.FeatureSet `json:"data"` + Status string `json:"status"` + Data []*featuretypes.GettableFeature `json:"data"` } // MergeFeatureSets merges two FeatureSet arrays with precedence to zeusFeatures. -func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basemodel.FeatureSet { +func MergeFeatureSets(zeusFeatures, internalFeatures []*featuretypes.GettableFeature) []*featuretypes.GettableFeature { // Create a map to store the merged features - featureMap := make(map[string]basemodel.Feature) + featureMap := make(map[string]*featuretypes.GettableFeature) // Add all features from the otherFeatures set to the map for _, feature := range internalFeatures { @@ -137,7 +152,7 @@ func MergeFeatureSets(zeusFeatures, internalFeatures basemodel.FeatureSet) basem } // Convert the map back to a FeatureSet slice - var mergedFeatures basemodel.FeatureSet + var mergedFeatures []*featuretypes.GettableFeature for _, feature := range featureMap { mergedFeatures = append(mergedFeatures, feature) } diff --git a/ee/query-service/app/api/featureFlags_test.go b/ee/query-service/app/api/featureFlags_test.go index e64e2ea135c7..79032a43a5db 100644 --- a/ee/query-service/app/api/featureFlags_test.go +++ b/ee/query-service/app/api/featureFlags_test.go @@ -3,58 +3,58 @@ package api import ( "testing" - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" + "github.com/SigNoz/signoz/pkg/types/featuretypes" "github.com/stretchr/testify/assert" ) func TestMergeFeatureSets(t *testing.T) { tests := []struct { name string - zeusFeatures basemodel.FeatureSet - internalFeatures basemodel.FeatureSet - expected basemodel.FeatureSet + zeusFeatures []*featuretypes.GettableFeature + internalFeatures []*featuretypes.GettableFeature + expected []*featuretypes.GettableFeature }{ { name: "empty zeusFeatures and internalFeatures", - zeusFeatures: basemodel.FeatureSet{}, - internalFeatures: basemodel.FeatureSet{}, - expected: basemodel.FeatureSet{}, + zeusFeatures: []*featuretypes.GettableFeature{}, + internalFeatures: []*featuretypes.GettableFeature{}, + expected: []*featuretypes.GettableFeature{}, }, { name: "non-empty zeusFeatures and empty internalFeatures", - zeusFeatures: basemodel.FeatureSet{ + zeusFeatures: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, }, - internalFeatures: basemodel.FeatureSet{}, - expected: basemodel.FeatureSet{ + internalFeatures: []*featuretypes.GettableFeature{}, + expected: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, }, }, { name: "empty zeusFeatures and non-empty internalFeatures", - zeusFeatures: basemodel.FeatureSet{}, - internalFeatures: basemodel.FeatureSet{ + zeusFeatures: []*featuretypes.GettableFeature{}, + internalFeatures: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, }, - expected: basemodel.FeatureSet{ + expected: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, }, }, { name: "non-empty zeusFeatures and non-empty internalFeatures with no conflicts", - zeusFeatures: basemodel.FeatureSet{ + zeusFeatures: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature3", Active: false}, }, - internalFeatures: basemodel.FeatureSet{ + internalFeatures: []*featuretypes.GettableFeature{ {Name: "Feature2", Active: true}, {Name: "Feature4", Active: false}, }, - expected: basemodel.FeatureSet{ + expected: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: true}, {Name: "Feature3", Active: false}, @@ -63,15 +63,15 @@ func TestMergeFeatureSets(t *testing.T) { }, { name: "non-empty zeusFeatures and non-empty internalFeatures with conflicts", - zeusFeatures: basemodel.FeatureSet{ + zeusFeatures: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, }, - internalFeatures: basemodel.FeatureSet{ + internalFeatures: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: false}, {Name: "Feature3", Active: true}, }, - expected: basemodel.FeatureSet{ + expected: []*featuretypes.GettableFeature{ {Name: "Feature1", Active: true}, {Name: "Feature2", Active: false}, {Name: "Feature3", Active: true}, diff --git a/ee/query-service/app/api/gateway.go b/ee/query-service/app/api/gateway.go index 54fc1759ed1a..fa1d52153fee 100644 --- a/ee/query-service/app/api/gateway.go +++ b/ee/query-service/app/api/gateway.go @@ -5,10 +5,26 @@ import ( "strings" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/valuer" ) func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) { ctx := req.Context() + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid")) + return + } + validPath := false for _, allowedPrefix := range gateway.AllowedPrefix { if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) { @@ -22,9 +38,9 @@ func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request return } - license, err := ah.LM().GetRepo().GetActiveLicense(ctx) + license, err := ah.Signoz.Licensing.GetActive(ctx, orgID) if err != nil { - RespondError(rw, err, nil) + render.Error(rw, err) return } diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index 8fbbb0cccc62..73b4ae56acef 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -6,11 +6,7 @@ import ( "net/http" "github.com/SigNoz/signoz/ee/query-service/constants" - "github.com/SigNoz/signoz/ee/query-service/integrations/signozio" "github.com/SigNoz/signoz/ee/query-service/model" - "github.com/SigNoz/signoz/pkg/http/render" - "github.com/SigNoz/signoz/pkg/query-service/telemetry" - "github.com/SigNoz/signoz/pkg/types/authtypes" ) type DayWiseBreakdown struct { @@ -49,10 +45,6 @@ type details struct { BillTotal float64 `json:"billTotal"` } -type Redirect struct { - RedirectURL string `json:"redirectURL"` -} - type billingDetails struct { Status string `json:"status"` Data struct { @@ -64,97 +56,6 @@ type billingDetails struct { } `json:"data"` } -type ApplyLicenseRequest struct { - LicenseKey string `json:"key"` -} - -func (ah *APIHandler) listLicensesV3(w http.ResponseWriter, r *http.Request) { - ah.listLicensesV2(w, r) -} - -func (ah *APIHandler) getActiveLicenseV3(w http.ResponseWriter, r *http.Request) { - activeLicense, err := ah.LM().GetRepo().GetActiveLicenseV3(r.Context()) - if err != nil { - RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) - return - } - - // return 404 not found if there is no active license - if activeLicense == nil { - RespondError(w, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no active license found")}, nil) - return - } - - // TODO deprecate this when we move away from key for stripe - activeLicense.Data["key"] = activeLicense.Key - render.Success(w, http.StatusOK, activeLicense.Data) -} - -// this function is called by zeus when inserting licenses in the query-service -func (ah *APIHandler) applyLicenseV3(w http.ResponseWriter, r *http.Request) { - claims, err := authtypes.ClaimsFromContext(r.Context()) - if err != nil { - render.Error(w, err) - return - } - - var licenseKey ApplyLicenseRequest - if err := json.NewDecoder(r.Body).Decode(&licenseKey); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - if licenseKey.LicenseKey == "" { - RespondError(w, model.BadRequest(fmt.Errorf("license key is required")), nil) - return - } - - _, err = ah.LM().ActivateV3(r.Context(), licenseKey.LicenseKey) - if err != nil { - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_ACT_FAILED, map[string]interface{}{"err": err.Error()}, claims.Email, true, false) - render.Error(w, err) - return - } - - render.Success(w, http.StatusAccepted, nil) -} - -func (ah *APIHandler) refreshLicensesV3(w http.ResponseWriter, r *http.Request) { - err := ah.LM().RefreshLicense(r.Context()) - if err != nil { - render.Error(w, err) - return - } - - render.Success(w, http.StatusNoContent, nil) -} - -func getCheckoutPortalResponse(redirectURL string) *Redirect { - return &Redirect{RedirectURL: redirectURL} -} - -func (ah *APIHandler) checkout(w http.ResponseWriter, r *http.Request) { - checkoutRequest := &model.CheckoutRequest{} - if err := json.NewDecoder(r.Body).Decode(checkoutRequest); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - license := ah.LM().GetActiveLicense() - if license == nil { - RespondError(w, model.BadRequestStr("cannot proceed with checkout without license key"), nil) - return - } - - redirectUrl, err := signozio.CheckoutSession(r.Context(), checkoutRequest, license.Key, ah.Signoz.Zeus) - if err != nil { - render.Error(w, err) - return - } - - ah.Respond(w, getCheckoutPortalResponse(redirectUrl)) -} - func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) { licenseKey := r.URL.Query().Get("licenseKey") @@ -188,71 +89,3 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) { // TODO(srikanthccv):Fetch the current day usage and add it to the response ah.Respond(w, billingResponse.Data) } - -func convertLicenseV3ToLicenseV2(licenses []*model.LicenseV3) []model.License { - licensesV2 := []model.License{} - for _, l := range licenses { - planKeyFromPlanName, ok := model.MapOldPlanKeyToNewPlanName[l.PlanName] - if !ok { - planKeyFromPlanName = model.Basic - } - licenseV2 := model.License{ - Key: l.Key, - ActivationId: "", - PlanDetails: "", - FeatureSet: l.Features, - ValidationMessage: "", - IsCurrent: l.IsCurrent, - LicensePlan: model.LicensePlan{ - PlanKey: planKeyFromPlanName, - ValidFrom: l.ValidFrom, - ValidUntil: l.ValidUntil, - Status: l.Status}, - } - licensesV2 = append(licensesV2, licenseV2) - } - return licensesV2 -} - -func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { - licensesV3, apierr := ah.LM().GetLicensesV3(r.Context()) - if apierr != nil { - RespondError(w, apierr, nil) - return - } - licenses := convertLicenseV3ToLicenseV2(licensesV3) - - resp := model.Licenses{ - TrialStart: -1, - TrialEnd: -1, - OnTrial: false, - WorkSpaceBlock: false, - TrialConvertedToSubscription: false, - GracePeriodEnd: -1, - Licenses: licenses, - } - - ah.Respond(w, resp) -} - -func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) { - portalRequest := &model.PortalRequest{} - if err := json.NewDecoder(r.Body).Decode(portalRequest); err != nil { - RespondError(w, model.BadRequest(err), nil) - return - } - - license := ah.LM().GetActiveLicense() - if license == nil { - RespondError(w, model.BadRequestStr("cannot request the portal session without license key"), nil) - return - } - - redirectUrl, err := signozio.PortalSession(r.Context(), portalRequest, license.Key, ah.Signoz.Zeus) - if err != nil { - render.Error(w, err) - return - } - - ah.Respond(w, getCheckoutPortalResponse(redirectUrl)) -} diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 3c249cb9389a..84f3f4a7f4ff 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -11,13 +11,12 @@ import ( "github.com/gorilla/handlers" "github.com/jmoiron/sqlx" - eemiddleware "github.com/SigNoz/signoz/ee/http/middleware" "github.com/SigNoz/signoz/ee/query-service/app/api" "github.com/SigNoz/signoz/ee/query-service/app/db" "github.com/SigNoz/signoz/ee/query-service/constants" - "github.com/SigNoz/signoz/ee/query-service/dao/sqlite" "github.com/SigNoz/signoz/ee/query-service/integrations/gateway" "github.com/SigNoz/signoz/ee/query-service/rules" + "github.com/SigNoz/signoz/ee/query-service/usage" "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/http/middleware" @@ -30,9 +29,6 @@ import ( "github.com/rs/cors" "github.com/soheilhy/cmux" - licensepkg "github.com/SigNoz/signoz/ee/query-service/license" - "github.com/SigNoz/signoz/ee/query-service/usage" - "github.com/SigNoz/signoz/pkg/query-service/agentConf" baseapp "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" @@ -90,18 +86,11 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status { // NewServer creates and initializes Server func NewServer(serverOptions *ServerOptions) (*Server, error) { - modelDao := sqlite.NewModelDao(serverOptions.SigNoz.SQLStore) gatewayProxy, err := gateway.NewProxy(serverOptions.GatewayUrl, gateway.RoutePrefix) if err != nil { return nil, err } - // initiate license manager - lm, err := licensepkg.StartManager(serverOptions.SigNoz.SQLStore.SQLxDB(), serverOptions.SigNoz.SQLStore, serverOptions.SigNoz.Zeus) - if err != nil { - return nil, err - } - fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail) if err != nil { return nil, err @@ -168,11 +157,11 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { } // start the usagemanager - usageManager, err := usage.New(modelDao, lm.GetRepo(), serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus) + usageManager, err := usage.New(serverOptions.SigNoz.Licensing, serverOptions.SigNoz.TelemetryStore.ClickhouseDB(), serverOptions.SigNoz.Zeus, serverOptions.SigNoz.Modules.Organization) if err != nil { return nil, err } - err = usageManager.Start() + err = usageManager.Start(context.Background()) if err != nil { return nil, err } @@ -194,11 +183,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { apiOpts := api.APIHandlerOptions{ DataConnector: reader, PreferSpanMetrics: serverOptions.PreferSpanMetrics, - AppDao: modelDao, RulesManager: rm, UsageManager: usageManager, - FeatureFlags: lm, - LicenseManager: lm, IntegrationsController: integrationsController, CloudIntegrationsController: cloudIntegrationsController, LogsParsingPipelineController: logParsingPipelineController, @@ -257,15 +243,15 @@ func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, r := baseapp.NewRouter() - r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) - r.Use(eemiddleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap) - r.Use(middleware.NewTimeout(zap.L(), + r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) + r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Max, ).Wrap) - r.Use(middleware.NewAnalytics(zap.L()).Wrap) - r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) + r.Use(middleware.NewAnalytics().Wrap) + r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) apiHandler.RegisterPrivateRoutes(r) @@ -289,15 +275,15 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h r := baseapp.NewRouter() am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger()) - r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) - r.Use(eemiddleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}).Wrap) - r.Use(middleware.NewTimeout(zap.L(), + r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) + r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Max, ).Wrap) - r.Use(middleware.NewAnalytics(zap.L()).Wrap) - r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) + r.Use(middleware.NewAnalytics().Wrap) + r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) apiHandler.RegisterRoutes(r, am) apiHandler.RegisterLogsRoutes(r, am) @@ -431,15 +417,15 @@ func (s *Server) Start(ctx context.Context) error { return nil } -func (s *Server) Stop() error { +func (s *Server) Stop(ctx context.Context) error { if s.httpServer != nil { - if err := s.httpServer.Shutdown(context.Background()); err != nil { + if err := s.httpServer.Shutdown(ctx); err != nil { return err } } if s.privateHTTP != nil { - if err := s.privateHTTP.Shutdown(context.Background()); err != nil { + if err := s.privateHTTP.Shutdown(ctx); err != nil { return err } } @@ -447,11 +433,11 @@ func (s *Server) Stop() error { s.opampServer.Stop() if s.ruleManager != nil { - s.ruleManager.Stop(context.Background()) + s.ruleManager.Stop(ctx) } // stop usage manager - s.usageManager.Stop() + s.usageManager.Stop(ctx) return nil } diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index 701495c6eb0a..ff04ff0246cd 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -4,10 +4,6 @@ import ( "os" ) -const ( - DefaultSiteURL = "https://localhost:8080" -) - var LicenseSignozIo = "https://license.signoz.io/api/v1" var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") @@ -24,12 +20,3 @@ func GetOrDefaultEnv(key string, fallback string) string { } return v } - -// constant functions that override env vars - -// GetDefaultSiteURL returns default site url, primarily -// used to send saml request and allowing backend to -// handle http redirect -func GetDefaultSiteURL() string { - return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL) -} diff --git a/ee/query-service/dao/interface.go b/ee/query-service/dao/interface.go deleted file mode 100644 index 2e40abcf2164..000000000000 --- a/ee/query-service/dao/interface.go +++ /dev/null @@ -1,23 +0,0 @@ -package dao - -import ( - "context" - "net/url" - - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/types" - "github.com/google/uuid" -) - -type ModelDao interface { - // auth methods - GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) - - // org domain (auth domains) CRUD ops - ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) - GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) - CreateDomain(ctx context.Context, d *types.GettableOrgDomain) basemodel.BaseApiError - UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError - DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError - GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) -} diff --git a/ee/query-service/dao/sqlite/domain.go b/ee/query-service/dao/sqlite/domain.go deleted file mode 100644 index 7acd051777d8..000000000000 --- a/ee/query-service/dao/sqlite/domain.go +++ /dev/null @@ -1,271 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "net/url" - "strings" - "time" - - "github.com/SigNoz/signoz/ee/query-service/model" - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/types" - "github.com/google/uuid" - "go.uber.org/zap" -) - -// GetDomainFromSsoResponse uses relay state received from IdP to fetch -// user domain. The domain is further used to process validity of the response. -// when sending login request to IdP we send relay state as URL (site url) -// with domainId or domainName as query parameter. -func (m *modelDao) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) { - // derive domain id from relay state now - var domainIdStr string - var domainNameStr string - var domain *types.GettableOrgDomain - - for k, v := range relayState.Query() { - if k == "domainId" && len(v) > 0 { - domainIdStr = strings.Replace(v[0], ":", "-", -1) - } - if k == "domainName" && len(v) > 0 { - domainNameStr = v[0] - } - } - - if domainIdStr != "" { - domainId, err := uuid.Parse(domainIdStr) - if err != nil { - zap.L().Error("failed to parse domainId from relay state", zap.Error(err)) - return nil, fmt.Errorf("failed to parse domainId from IdP response") - } - - domain, err = m.GetDomain(ctx, domainId) - if err != nil { - zap.L().Error("failed to find domain from domainId received in IdP response", zap.Error(err)) - return nil, fmt.Errorf("invalid credentials") - } - } - - if domainNameStr != "" { - - domainFromDB, err := m.GetDomainByName(ctx, domainNameStr) - domain = domainFromDB - if err != nil { - zap.L().Error("failed to find domain from domainName received in IdP response", zap.Error(err)) - return nil, fmt.Errorf("invalid credentials") - } - } - if domain != nil { - return domain, nil - } - - return nil, fmt.Errorf("failed to find domain received in IdP response") -} - -// GetDomainByName returns org domain for a given domain name -func (m *modelDao) GetDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, basemodel.BaseApiError) { - - stored := types.StorableOrgDomain{} - err := m.sqlStore.BunDB().NewSelect(). - Model(&stored). - Where("name = ?", name). - Limit(1). - Scan(ctx) - - if err != nil { - if err == sql.ErrNoRows { - return nil, model.BadRequest(fmt.Errorf("invalid domain name")) - } - return nil, model.InternalError(err) - } - - domain := &types.GettableOrgDomain{StorableOrgDomain: stored} - if err := domain.LoadConfig(stored.Data); err != nil { - return nil, model.InternalError(err) - } - return domain, nil -} - -// GetDomain returns org domain for a given domain id -func (m *modelDao) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, basemodel.BaseApiError) { - - stored := types.StorableOrgDomain{} - err := m.sqlStore.BunDB().NewSelect(). - Model(&stored). - Where("id = ?", id). - Limit(1). - Scan(ctx) - - if err != nil { - if err == sql.ErrNoRows { - return nil, model.BadRequest(fmt.Errorf("invalid domain id")) - } - return nil, model.InternalError(err) - } - - domain := &types.GettableOrgDomain{StorableOrgDomain: stored} - if err := domain.LoadConfig(stored.Data); err != nil { - return nil, model.InternalError(err) - } - return domain, nil -} - -// ListDomains gets the list of auth domains by org id -func (m *modelDao) ListDomains(ctx context.Context, orgId string) ([]types.GettableOrgDomain, basemodel.BaseApiError) { - domains := []types.GettableOrgDomain{} - - stored := []types.StorableOrgDomain{} - err := m.sqlStore.BunDB().NewSelect(). - Model(&stored). - Where("org_id = ?", orgId). - Scan(ctx) - - if err != nil { - if err == sql.ErrNoRows { - return domains, nil - } - return nil, model.InternalError(err) - } - - for _, s := range stored { - domain := types.GettableOrgDomain{StorableOrgDomain: s} - if err := domain.LoadConfig(s.Data); err != nil { - zap.L().Error("ListDomains() failed", zap.Error(err)) - } - domains = append(domains, domain) - } - - return domains, nil -} - -// CreateDomain creates a new auth domain -func (m *modelDao) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError { - - if domain.ID == uuid.Nil { - domain.ID = uuid.New() - } - - if domain.OrgID == "" || domain.Name == "" { - return model.BadRequest(fmt.Errorf("domain creation failed, missing fields: OrgID, Name ")) - } - - configJson, err := json.Marshal(domain) - if err != nil { - zap.L().Error("failed to unmarshal domain config", zap.Error(err)) - return model.InternalError(fmt.Errorf("domain creation failed")) - } - - storableDomain := types.StorableOrgDomain{ - ID: domain.ID, - Name: domain.Name, - OrgID: domain.OrgID, - Data: string(configJson), - TimeAuditable: types.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()}, - } - - _, err = m.sqlStore.BunDB().NewInsert(). - Model(&storableDomain). - Exec(ctx) - - if err != nil { - zap.L().Error("failed to insert domain in db", zap.Error(err)) - return model.InternalError(fmt.Errorf("domain creation failed")) - } - - return nil -} - -// UpdateDomain updates stored config params for a domain -func (m *modelDao) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) basemodel.BaseApiError { - - if domain.ID == uuid.Nil { - zap.L().Error("domain update failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) - return model.InternalError(fmt.Errorf("domain update failed")) - } - - configJson, err := json.Marshal(domain) - if err != nil { - zap.L().Error("domain update failed", zap.Error(err)) - return model.InternalError(fmt.Errorf("domain update failed")) - } - - storableDomain := &types.StorableOrgDomain{ - ID: domain.ID, - Name: domain.Name, - OrgID: domain.OrgID, - Data: string(configJson), - TimeAuditable: types.TimeAuditable{UpdatedAt: time.Now()}, - } - - _, err = m.sqlStore.BunDB().NewUpdate(). - Model(storableDomain). - Column("data", "updated_at"). - WherePK(). - Exec(ctx) - - if err != nil { - zap.L().Error("domain update failed", zap.Error(err)) - return model.InternalError(fmt.Errorf("domain update failed")) - } - - return nil -} - -// DeleteDomain deletes an org domain -func (m *modelDao) DeleteDomain(ctx context.Context, id uuid.UUID) basemodel.BaseApiError { - - if id == uuid.Nil { - zap.L().Error("domain delete failed", zap.Error(fmt.Errorf("OrgDomain.Id is null"))) - return model.InternalError(fmt.Errorf("domain delete failed")) - } - - storableDomain := &types.StorableOrgDomain{ID: id} - _, err := m.sqlStore.BunDB().NewDelete(). - Model(storableDomain). - WherePK(). - Exec(ctx) - - if err != nil { - zap.L().Error("domain delete failed", zap.Error(err)) - return model.InternalError(fmt.Errorf("domain delete failed")) - } - - return nil -} - -func (m *modelDao) GetDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, basemodel.BaseApiError) { - - if email == "" { - return nil, model.BadRequest(fmt.Errorf("could not find auth domain, missing fields: email ")) - } - - components := strings.Split(email, "@") - if len(components) < 2 { - return nil, model.BadRequest(fmt.Errorf("invalid email address")) - } - - parsedDomain := components[1] - - stored := types.StorableOrgDomain{} - err := m.sqlStore.BunDB().NewSelect(). - Model(&stored). - Where("name = ?", parsedDomain). - Limit(1). - Scan(ctx) - - if err != nil { - if err == sql.ErrNoRows { - return nil, nil - } - return nil, model.InternalError(err) - } - - domain := &types.GettableOrgDomain{StorableOrgDomain: stored} - if err := domain.LoadConfig(stored.Data); err != nil { - return nil, model.InternalError(err) - } - return domain, nil -} diff --git a/ee/query-service/dao/sqlite/modelDao.go b/ee/query-service/dao/sqlite/modelDao.go deleted file mode 100644 index fd934aec2f82..000000000000 --- a/ee/query-service/dao/sqlite/modelDao.go +++ /dev/null @@ -1,14 +0,0 @@ -package sqlite - -import ( - "github.com/SigNoz/signoz/pkg/sqlstore" -) - -type modelDao struct { - sqlStore sqlstore.SQLStore -} - -// InitDB creates and extends base model DB repository -func NewModelDao(sqlStore sqlstore.SQLStore) *modelDao { - return &modelDao{sqlStore: sqlStore} -} diff --git a/ee/query-service/integrations/signozio/signozio.go b/ee/query-service/integrations/signozio/signozio.go deleted file mode 100644 index d1bd76572844..000000000000 --- a/ee/query-service/integrations/signozio/signozio.go +++ /dev/null @@ -1,67 +0,0 @@ -package signozio - -import ( - "context" - "encoding/json" - - "github.com/SigNoz/signoz/ee/query-service/model" - "github.com/SigNoz/signoz/pkg/zeus" - "github.com/tidwall/gjson" -) - -func ValidateLicenseV3(ctx context.Context, licenseKey string, zeus zeus.Zeus) (*model.LicenseV3, error) { - data, err := zeus.GetLicense(ctx, licenseKey) - if err != nil { - return nil, err - } - - var m map[string]any - if err = json.Unmarshal(data, &m); err != nil { - return nil, err - } - - license, err := model.NewLicenseV3(m) - if err != nil { - return nil, err - } - - return license, nil -} - -// SendUsage reports the usage of signoz to license server -func SendUsage(ctx context.Context, usage model.UsagePayload, zeus zeus.Zeus) error { - body, err := json.Marshal(usage) - if err != nil { - return err - } - - return zeus.PutMeters(ctx, usage.LicenseKey.String(), body) -} - -func CheckoutSession(ctx context.Context, checkoutRequest *model.CheckoutRequest, licenseKey string, zeus zeus.Zeus) (string, error) { - body, err := json.Marshal(checkoutRequest) - if err != nil { - return "", err - } - - response, err := zeus.GetCheckoutURL(ctx, licenseKey, body) - if err != nil { - return "", err - } - - return gjson.GetBytes(response, "url").String(), nil -} - -func PortalSession(ctx context.Context, portalRequest *model.PortalRequest, licenseKey string, zeus zeus.Zeus) (string, error) { - body, err := json.Marshal(portalRequest) - if err != nil { - return "", err - } - - response, err := zeus.GetPortalURL(ctx, licenseKey, body) - if err != nil { - return "", err - } - - return gjson.GetBytes(response, "url").String(), nil -} diff --git a/ee/query-service/license/db.go b/ee/query-service/license/db.go deleted file mode 100644 index c241ad876622..000000000000 --- a/ee/query-service/license/db.go +++ /dev/null @@ -1,248 +0,0 @@ -package license - -import ( - "context" - "database/sql" - "encoding/json" - "fmt" - "time" - - "github.com/jmoiron/sqlx" - "github.com/mattn/go-sqlite3" - - "github.com/SigNoz/signoz/ee/query-service/model" - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "go.uber.org/zap" -) - -// Repo is license repo. stores license keys in a secured DB -type Repo struct { - db *sqlx.DB - store sqlstore.SQLStore -} - -// NewLicenseRepo initiates a new license repo -func NewLicenseRepo(db *sqlx.DB, store sqlstore.SQLStore) Repo { - return Repo{ - db: db, - store: store, - } -} - -func (r *Repo) GetLicensesV3(ctx context.Context) ([]*model.LicenseV3, error) { - licensesData := []model.LicenseDB{} - licenseV3Data := []*model.LicenseV3{} - - query := "SELECT id,key,data FROM licenses_v3" - - err := r.db.Select(&licensesData, query) - if err != nil { - return nil, fmt.Errorf("failed to get licenses from db: %v", err) - } - - for _, l := range licensesData { - var licenseData map[string]interface{} - err := json.Unmarshal([]byte(l.Data), &licenseData) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err) - } - - license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData) - if err != nil { - return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err) - } - licenseV3Data = append(licenseV3Data, license) - } - - return licenseV3Data, nil -} - -// GetActiveLicense fetches the latest active license from DB. -// If the license is not present, expect a nil license and a nil error in the output. -func (r *Repo) GetActiveLicense(ctx context.Context) (*model.License, *basemodel.ApiError) { - activeLicenseV3, err := r.GetActiveLicenseV3(ctx) - if err != nil { - return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err)) - } - - if activeLicenseV3 == nil { - return nil, nil - } - activeLicenseV2 := model.ConvertLicenseV3ToLicenseV2(activeLicenseV3) - return activeLicenseV2, nil -} - -func (r *Repo) GetActiveLicenseV3(ctx context.Context) (*model.LicenseV3, error) { - var err error - licenses := []model.LicenseDB{} - - query := "SELECT id,key,data FROM licenses_v3" - - err = r.db.Select(&licenses, query) - if err != nil { - return nil, basemodel.InternalError(fmt.Errorf("failed to get active licenses from db: %v", err)) - } - - var active *model.LicenseV3 - for _, l := range licenses { - var licenseData map[string]interface{} - err := json.Unmarshal([]byte(l.Data), &licenseData) - if err != nil { - return nil, fmt.Errorf("failed to unmarshal data into licenseData : %v", err) - } - - license, err := model.NewLicenseV3WithIDAndKey(l.ID, l.Key, licenseData) - if err != nil { - return nil, fmt.Errorf("failed to get licenses v3 schema : %v", err) - } - - if active == nil && - (license.ValidFrom != 0) && - (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { - active = license - } - if active != nil && - license.ValidFrom > active.ValidFrom && - (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { - active = license - } - } - - return active, nil -} - -// InsertLicenseV3 inserts a new license v3 in db -func (r *Repo) InsertLicenseV3(ctx context.Context, l *model.LicenseV3) *model.ApiError { - - query := `INSERT INTO licenses_v3 (id, key, data) VALUES ($1, $2, $3)` - - // licsense is the entity of zeus so putting the entire license here without defining schema - licenseData, err := json.Marshal(l.Data) - if err != nil { - return &model.ApiError{Typ: basemodel.ErrorBadData, Err: err} - } - - _, err = r.db.ExecContext(ctx, - query, - l.ID, - l.Key, - string(licenseData), - ) - - if err != nil { - if sqliteErr, ok := err.(sqlite3.Error); ok { - if sqliteErr.ExtendedCode == sqlite3.ErrConstraintUnique { - zap.L().Error("error in inserting license data: ", zap.Error(sqliteErr)) - return &model.ApiError{Typ: model.ErrorConflict, Err: sqliteErr} - } - } - zap.L().Error("error in inserting license data: ", zap.Error(err)) - return &model.ApiError{Typ: basemodel.ErrorExec, Err: err} - } - - return nil -} - -// UpdateLicenseV3 updates a new license v3 in db -func (r *Repo) UpdateLicenseV3(ctx context.Context, l *model.LicenseV3) error { - - // the key and id for the license can't change so only update the data here! - query := `UPDATE licenses_v3 SET data=$1 WHERE id=$2;` - - license, err := json.Marshal(l.Data) - if err != nil { - return fmt.Errorf("insert license failed: license marshal error") - } - _, err = r.db.ExecContext(ctx, - query, - license, - l.ID, - ) - - if err != nil { - zap.L().Error("error in updating license data: ", zap.Error(err)) - return fmt.Errorf("failed to update license in db: %v", err) - } - - return nil -} - -func (r *Repo) CreateFeature(req *types.FeatureStatus) *basemodel.ApiError { - - _, err := r.store.BunDB().NewInsert(). - Model(req). - Exec(context.Background()) - if err != nil { - return &basemodel.ApiError{Typ: basemodel.ErrorInternal, Err: err} - } - return nil -} - -func (r *Repo) GetFeature(featureName string) (types.FeatureStatus, error) { - var feature types.FeatureStatus - - err := r.store.BunDB().NewSelect(). - Model(&feature). - Where("name = ?", featureName). - Scan(context.Background()) - - if err != nil { - return feature, err - } - if feature.Name == "" { - return feature, basemodel.ErrFeatureUnavailable{Key: featureName} - } - return feature, nil -} - -func (r *Repo) GetAllFeatures() ([]basemodel.Feature, error) { - - var feature []basemodel.Feature - - err := r.db.Select(&feature, - `SELECT * FROM feature_status;`) - if err != nil { - return feature, err - } - - return feature, nil -} - -func (r *Repo) UpdateFeature(req types.FeatureStatus) error { - - _, err := r.store.BunDB().NewUpdate(). - Model(&req). - Where("name = ?", req.Name). - Exec(context.Background()) - if err != nil { - return err - } - return nil -} - -func (r *Repo) InitFeatures(req []types.FeatureStatus) error { - // get a feature by name, if it doesn't exist, create it. If it does exist, update it. - for _, feature := range req { - currentFeature, err := r.GetFeature(feature.Name) - if err != nil && err == sql.ErrNoRows { - err := r.CreateFeature(&feature) - if err != nil { - return err - } - continue - } else if err != nil { - return err - } - feature.Usage = int(currentFeature.Usage) - if feature.Usage >= feature.UsageLimit && feature.UsageLimit != -1 { - feature.Active = false - } - err = r.UpdateFeature(feature) - if err != nil { - return err - } - } - return nil -} diff --git a/ee/query-service/license/manager.go b/ee/query-service/license/manager.go deleted file mode 100644 index ae5b5c897905..000000000000 --- a/ee/query-service/license/manager.go +++ /dev/null @@ -1,318 +0,0 @@ -package license - -import ( - "context" - "sync/atomic" - "time" - - "github.com/jmoiron/sqlx" - - "sync" - - baseconstants "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types" - "github.com/SigNoz/signoz/pkg/zeus" - - validate "github.com/SigNoz/signoz/ee/query-service/integrations/signozio" - "github.com/SigNoz/signoz/ee/query-service/model" - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/SigNoz/signoz/pkg/query-service/telemetry" - "go.uber.org/zap" -) - -var LM *Manager - -// validate and update license every 24 hours -var validationFrequency = 24 * 60 * time.Minute - -type Manager struct { - repo *Repo - zeus zeus.Zeus - mutex sync.Mutex - validatorRunning bool - // end the license validation, this is important to gracefully - // stopping validation and protect in-consistent updates - done chan struct{} - // terminated waits for the validate go routine to end - terminated chan struct{} - // last time the license was validated - lastValidated int64 - // keep track of validation failure attempts - failedAttempts uint64 - // keep track of active license and features - activeLicenseV3 *model.LicenseV3 - activeFeatures basemodel.FeatureSet -} - -func StartManager(db *sqlx.DB, store sqlstore.SQLStore, zeus zeus.Zeus, features ...basemodel.Feature) (*Manager, error) { - if LM != nil { - return LM, nil - } - - repo := NewLicenseRepo(db, store) - m := &Manager{ - repo: &repo, - zeus: zeus, - } - if err := m.start(features...); err != nil { - return m, err - } - - LM = m - return m, nil -} - -// start loads active license in memory and initiates validator -func (lm *Manager) start(features ...basemodel.Feature) error { - return lm.LoadActiveLicenseV3(features...) -} - -func (lm *Manager) Stop() { - close(lm.done) - <-lm.terminated -} - -func (lm *Manager) SetActiveV3(l *model.LicenseV3, features ...basemodel.Feature) { - lm.mutex.Lock() - defer lm.mutex.Unlock() - - if l == nil { - return - } - - lm.activeLicenseV3 = l - lm.activeFeatures = append(l.Features, features...) - // set default features - setDefaultFeatures(lm) - - err := lm.InitFeatures(lm.activeFeatures) - if err != nil { - zap.L().Panic("Couldn't activate features", zap.Error(err)) - } - if !lm.validatorRunning { - // we want to make sure only one validator runs, - // we already have lock() so good to go - lm.validatorRunning = true - go lm.ValidatorV3(context.Background()) - } - -} - -func setDefaultFeatures(lm *Manager) { - lm.activeFeatures = append(lm.activeFeatures, baseconstants.DEFAULT_FEATURE_SET...) -} - -func (lm *Manager) LoadActiveLicenseV3(features ...basemodel.Feature) error { - active, err := lm.repo.GetActiveLicenseV3(context.Background()) - if err != nil { - return err - } - - if active != nil { - lm.SetActiveV3(active, features...) - } else { - zap.L().Info("No active license found, defaulting to basic plan") - // if no active license is found, we default to basic(free) plan with all default features - lm.activeFeatures = model.BasicPlan - setDefaultFeatures(lm) - err := lm.InitFeatures(lm.activeFeatures) - if err != nil { - zap.L().Error("Couldn't initialize features", zap.Error(err)) - return err - } - } - - return nil -} - -func (lm *Manager) GetLicensesV3(ctx context.Context) (response []*model.LicenseV3, apiError *model.ApiError) { - - licenses, err := lm.repo.GetLicensesV3(ctx) - if err != nil { - return nil, model.InternalError(err) - } - - for _, l := range licenses { - if lm.activeLicenseV3 != nil && l.Key == lm.activeLicenseV3.Key { - l.IsCurrent = true - } - if l.ValidUntil == -1 { - // for subscriptions, there is no end-date as such - // but for showing user some validity we default one year timespan - l.ValidUntil = l.ValidFrom + 31556926 - } - response = append(response, l) - } - - return response, nil -} - -// Validator validates license after an epoch of time -func (lm *Manager) ValidatorV3(ctx context.Context) { - zap.L().Info("ValidatorV3 started!") - defer close(lm.terminated) - - tick := time.NewTicker(validationFrequency) - defer tick.Stop() - - _ = lm.ValidateV3(ctx) - for { - select { - case <-lm.done: - return - default: - select { - case <-lm.done: - return - case <-tick.C: - _ = lm.ValidateV3(ctx) - } - } - - } -} - -func (lm *Manager) RefreshLicense(ctx context.Context) error { - license, err := validate.ValidateLicenseV3(ctx, lm.activeLicenseV3.Key, lm.zeus) - if err != nil { - return err - } - - err = lm.repo.UpdateLicenseV3(ctx, license) - if err != nil { - return err - } - lm.SetActiveV3(license) - - return nil -} - -func (lm *Manager) ValidateV3(ctx context.Context) (reterr error) { - if lm.activeLicenseV3 == nil { - return nil - } - - defer func() { - lm.mutex.Lock() - - lm.lastValidated = time.Now().Unix() - if reterr != nil { - zap.L().Error("License validation completed with error", zap.Error(reterr)) - - atomic.AddUint64(&lm.failedAttempts, 1) - // default to basic plan if validation fails for three consecutive times - if atomic.LoadUint64(&lm.failedAttempts) > 3 { - zap.L().Error("License validation completed with error for three consecutive times, defaulting to basic plan", zap.String("license_id", lm.activeLicenseV3.ID), zap.Bool("license_validation", false)) - lm.activeLicenseV3 = nil - lm.activeFeatures = model.BasicPlan - setDefaultFeatures(lm) - err := lm.InitFeatures(lm.activeFeatures) - if err != nil { - zap.L().Error("Couldn't initialize features", zap.Error(err)) - } - lm.done <- struct{}{} - lm.validatorRunning = false - } - - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_LICENSE_CHECK_FAILED, - map[string]interface{}{"err": reterr.Error()}, "", true, false) - } else { - // reset the failed attempts counter - atomic.StoreUint64(&lm.failedAttempts, 0) - zap.L().Info("License validation completed with no errors") - } - - lm.mutex.Unlock() - }() - - err := lm.RefreshLicense(ctx) - - if err != nil { - return err - } - return nil -} - -func (lm *Manager) ActivateV3(ctx context.Context, licenseKey string) (*model.LicenseV3, error) { - license, err := validate.ValidateLicenseV3(ctx, licenseKey, lm.zeus) - if err != nil { - return nil, err - } - - // insert the new license to the sqlite db - modelErr := lm.repo.InsertLicenseV3(ctx, license) - if modelErr != nil { - zap.L().Error("failed to activate license", zap.Error(modelErr)) - return nil, modelErr - } - - // license is valid, activate it - lm.SetActiveV3(license) - return license, nil -} - -func (lm *Manager) GetActiveLicense() *model.LicenseV3 { - return lm.activeLicenseV3 -} - -// CheckFeature will be internally used by backend routines -// for feature gating -func (lm *Manager) CheckFeature(featureKey string) error { - feature, err := lm.repo.GetFeature(featureKey) - if err != nil { - return err - } - if feature.Active { - return nil - } - return basemodel.ErrFeatureUnavailable{Key: featureKey} -} - -// GetFeatureFlags returns current active features -func (lm *Manager) GetFeatureFlags() (basemodel.FeatureSet, error) { - return lm.repo.GetAllFeatures() -} - -func (lm *Manager) InitFeatures(features basemodel.FeatureSet) error { - featureStatus := make([]types.FeatureStatus, len(features)) - for i, f := range features { - featureStatus[i] = types.FeatureStatus{ - Name: f.Name, - Active: f.Active, - Usage: int(f.Usage), - UsageLimit: int(f.UsageLimit), - Route: f.Route, - } - } - return lm.repo.InitFeatures(featureStatus) -} - -func (lm *Manager) UpdateFeatureFlag(feature basemodel.Feature) error { - return lm.repo.UpdateFeature(types.FeatureStatus{ - Name: feature.Name, - Active: feature.Active, - Usage: int(feature.Usage), - UsageLimit: int(feature.UsageLimit), - Route: feature.Route, - }) -} - -func (lm *Manager) GetFeatureFlag(key string) (basemodel.Feature, error) { - featureStatus, err := lm.repo.GetFeature(key) - if err != nil { - return basemodel.Feature{}, err - } - return basemodel.Feature{ - Name: featureStatus.Name, - Active: featureStatus.Active, - Usage: int64(featureStatus.Usage), - UsageLimit: int64(featureStatus.UsageLimit), - Route: featureStatus.Route, - }, nil -} - -// GetRepo return the license repo -func (lm *Manager) GetRepo() *Repo { - return lm.repo -} diff --git a/ee/query-service/main.go b/ee/query-service/main.go index bb8e0d7e176b..996b2ac0ebdd 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -6,7 +6,8 @@ import ( "os" "time" - eeuserimpl "github.com/SigNoz/signoz/ee/modules/user/impluser" + "github.com/SigNoz/signoz/ee/licensing" + "github.com/SigNoz/signoz/ee/licensing/httplicensing" "github.com/SigNoz/signoz/ee/query-service/app" "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" "github.com/SigNoz/signoz/ee/zeus" @@ -14,15 +15,15 @@ import ( "github.com/SigNoz/signoz/pkg/config" "github.com/SigNoz/signoz/pkg/config/envprovider" "github.com/SigNoz/signoz/pkg/config/fileprovider" - "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/modules/user" + pkglicensing "github.com/SigNoz/signoz/pkg/licensing" baseconst "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" + pkgzeus "github.com/SigNoz/signoz/pkg/zeus" "go.uber.org/zap" "go.uber.org/zap/zapcore" @@ -90,8 +91,9 @@ func main() { loggerMgr := initZapLog() zap.ReplaceGlobals(loggerMgr) defer loggerMgr.Sync() // flushes buffer, if any + ctx := context.Background() - config, err := signoz.NewConfig(context.Background(), config.ResolverConfig{ + config, err := signoz.NewConfig(ctx, config.ResolverConfig{ Uris: []string{"env:"}, ProviderFactories: []config.ProviderFactory{ envprovider.NewFactory(), @@ -127,19 +129,18 @@ func main() { signoz, err := signoz.New( context.Background(), config, + jwt, zeus.Config(), httpzeus.NewProviderFactory(), + licensing.Config(24*time.Hour, 3), + func(sqlstore sqlstore.SQLStore, zeus pkgzeus.Zeus) factory.ProviderFactory[pkglicensing.Licensing, pkglicensing.Config] { + return httplicensing.NewProviderFactory(sqlstore, zeus) + }, signoz.NewEmailingProviderFactories(), signoz.NewCacheProviderFactories(), signoz.NewWebProviderFactories(), sqlStoreFactories, signoz.NewTelemetryStoreProviderFactories(), - func(sqlstore sqlstore.SQLStore, emailing emailing.Emailing, providerSettings factory.ProviderSettings) user.Module { - return eeuserimpl.NewModule(eeuserimpl.NewStore(sqlstore), jwt, emailing, providerSettings) - }, - func(userModule user.Module) user.Handler { - return eeuserimpl.NewHandler(userModule) - }, ) if err != nil { zap.L().Fatal("Failed to create signoz", zap.Error(err)) @@ -163,22 +164,22 @@ func main() { zap.L().Fatal("Failed to create server", zap.Error(err)) } - if err := server.Start(context.Background()); err != nil { + if err := server.Start(ctx); err != nil { zap.L().Fatal("Could not start server", zap.Error(err)) } - signoz.Start(context.Background()) + signoz.Start(ctx) - if err := signoz.Wait(context.Background()); err != nil { + if err := signoz.Wait(ctx); err != nil { zap.L().Fatal("Failed to start signoz", zap.Error(err)) } - err = server.Stop() + err = server.Stop(ctx) if err != nil { zap.L().Fatal("Failed to stop server", zap.Error(err)) } - err = signoz.Stop(context.Background()) + err = signoz.Stop(ctx) if err != nil { zap.L().Fatal("Failed to stop signoz", zap.Error(err)) } diff --git a/ee/query-service/model/license.go b/ee/query-service/model/license.go deleted file mode 100644 index 513d080891ca..000000000000 --- a/ee/query-service/model/license.go +++ /dev/null @@ -1,244 +0,0 @@ -package model - -import ( - "encoding/json" - "fmt" - "reflect" - "time" - - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/pkg/errors" -) - -type License struct { - Key string `json:"key" db:"key"` - ActivationId string `json:"activationId" db:"activationId"` - CreatedAt time.Time `db:"created_at"` - - // PlanDetails contains the encrypted plan info - PlanDetails string `json:"planDetails" db:"planDetails"` - - // stores parsed license details - LicensePlan - - FeatureSet basemodel.FeatureSet - - // populated in case license has any errors - ValidationMessage string `db:"validationMessage"` - - // used only for sending details to front-end - IsCurrent bool `json:"isCurrent"` -} - -func (l *License) MarshalJSON() ([]byte, error) { - - return json.Marshal(&struct { - Key string `json:"key" db:"key"` - ActivationId string `json:"activationId" db:"activationId"` - ValidationMessage string `db:"validationMessage"` - IsCurrent bool `json:"isCurrent"` - PlanKey string `json:"planKey"` - ValidFrom time.Time `json:"ValidFrom"` - ValidUntil time.Time `json:"ValidUntil"` - Status string `json:"status"` - }{ - Key: l.Key, - ActivationId: l.ActivationId, - IsCurrent: l.IsCurrent, - PlanKey: l.PlanKey, - ValidFrom: time.Unix(l.ValidFrom, 0), - ValidUntil: time.Unix(l.ValidUntil, 0), - Status: l.Status, - ValidationMessage: l.ValidationMessage, - }) -} - -type LicensePlan struct { - PlanKey string `json:"planKey"` - ValidFrom int64 `json:"validFrom"` - ValidUntil int64 `json:"validUntil"` - Status string `json:"status"` -} - -type Licenses struct { - TrialStart int64 `json:"trialStart"` - TrialEnd int64 `json:"trialEnd"` - OnTrial bool `json:"onTrial"` - WorkSpaceBlock bool `json:"workSpaceBlock"` - TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"` - GracePeriodEnd int64 `json:"gracePeriodEnd"` - Licenses []License `json:"licenses"` -} - -type SubscriptionServerResp struct { - Status string `json:"status"` - Data Licenses `json:"data"` -} - -type Plan struct { - Name string `json:"name"` -} - -type LicenseDB struct { - ID string `json:"id"` - Key string `json:"key"` - Data string `json:"data"` -} -type LicenseV3 struct { - ID string - Key string - Data map[string]interface{} - PlanName string - Features basemodel.FeatureSet - Status string - IsCurrent bool - ValidFrom int64 - ValidUntil int64 -} - -func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) { - var zeroValue T - if val, ok := data[key]; ok { - if value, ok := val.(T); ok { - return value, nil - } - return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue)) - } - return zeroValue, fmt.Errorf("%s key is missing", key) -} - -func NewLicenseV3(data map[string]interface{}) (*LicenseV3, error) { - var features basemodel.FeatureSet - - // extract id from data - licenseID, err := extractKeyFromMapStringInterface[string](data, "id") - if err != nil { - return nil, err - } - delete(data, "id") - - // extract key from data - licenseKey, err := extractKeyFromMapStringInterface[string](data, "key") - if err != nil { - return nil, err - } - delete(data, "key") - - // extract status from data - status, err := extractKeyFromMapStringInterface[string](data, "status") - if err != nil { - return nil, err - } - - planMap, err := extractKeyFromMapStringInterface[map[string]any](data, "plan") - if err != nil { - return nil, err - } - - planName, err := extractKeyFromMapStringInterface[string](planMap, "name") - if err != nil { - return nil, err - } - // if license status is invalid then default it to basic - if status == LicenseStatusInvalid { - planName = PlanNameBasic - } - - featuresFromZeus := basemodel.FeatureSet{} - if _features, ok := data["features"]; ok { - featuresData, err := json.Marshal(_features) - if err != nil { - return nil, errors.Wrap(err, "failed to marshal features data") - } - - if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil { - return nil, errors.Wrap(err, "failed to unmarshal features data") - } - } - - switch planName { - case PlanNameEnterprise: - features = append(features, EnterprisePlan...) - case PlanNameBasic: - features = append(features, BasicPlan...) - default: - features = append(features, BasicPlan...) - } - - if len(featuresFromZeus) > 0 { - for _, feature := range featuresFromZeus { - exists := false - for i, existingFeature := range features { - if existingFeature.Name == feature.Name { - features[i] = feature // Replace existing feature - exists = true - break - } - } - if !exists { - features = append(features, feature) // Append if it doesn't exist - } - } - } - data["features"] = features - - _validFrom, err := extractKeyFromMapStringInterface[float64](data, "valid_from") - if err != nil { - _validFrom = 0 - } - validFrom := int64(_validFrom) - - _validUntil, err := extractKeyFromMapStringInterface[float64](data, "valid_until") - if err != nil { - _validUntil = 0 - } - validUntil := int64(_validUntil) - - return &LicenseV3{ - ID: licenseID, - Key: licenseKey, - Data: data, - PlanName: planName, - Features: features, - ValidFrom: validFrom, - ValidUntil: validUntil, - Status: status, - }, nil - -} - -func NewLicenseV3WithIDAndKey(id string, key string, data map[string]interface{}) (*LicenseV3, error) { - licenseDataWithIdAndKey := data - licenseDataWithIdAndKey["id"] = id - licenseDataWithIdAndKey["key"] = key - return NewLicenseV3(licenseDataWithIdAndKey) -} - -func ConvertLicenseV3ToLicenseV2(l *LicenseV3) *License { - planKeyFromPlanName, ok := MapOldPlanKeyToNewPlanName[l.PlanName] - if !ok { - planKeyFromPlanName = Basic - } - return &License{ - Key: l.Key, - ActivationId: "", - PlanDetails: "", - FeatureSet: l.Features, - ValidationMessage: "", - IsCurrent: l.IsCurrent, - LicensePlan: LicensePlan{ - PlanKey: planKeyFromPlanName, - ValidFrom: l.ValidFrom, - ValidUntil: l.ValidUntil, - Status: l.Status}, - } - -} - -type CheckoutRequest struct { - SuccessURL string `json:"url"` -} - -type PortalRequest struct { - SuccessURL string `json:"url"` -} diff --git a/ee/query-service/model/license_test.go b/ee/query-service/model/license_test.go deleted file mode 100644 index 710541eea3f5..000000000000 --- a/ee/query-service/model/license_test.go +++ /dev/null @@ -1,170 +0,0 @@ -package model - -import ( - "encoding/json" - "testing" - - "github.com/SigNoz/signoz/pkg/query-service/model" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewLicenseV3(t *testing.T) { - testCases := []struct { - name string - data []byte - pass bool - expected *LicenseV3 - error error - }{ - { - name: "Error for missing license id", - data: []byte(`{}`), - pass: false, - error: errors.New("id key is missing"), - }, - { - name: "Error for license id not being a valid string", - data: []byte(`{"id": 10}`), - pass: false, - error: errors.New("id key is not a valid string"), - }, - { - name: "Error for missing license key", - data: []byte(`{"id":"does-not-matter"}`), - pass: false, - error: errors.New("key key is missing"), - }, - { - name: "Error for invalid string license key", - data: []byte(`{"id":"does-not-matter","key":10}`), - pass: false, - error: errors.New("key key is not a valid string"), - }, - { - name: "Error for missing license status", - data: []byte(`{"id":"does-not-matter", "key": "does-not-matter","category":"FREE"}`), - pass: false, - error: errors.New("status key is missing"), - }, - { - name: "Error for invalid string license status", - data: []byte(`{"id":"does-not-matter","key": "does-not-matter", "category":"FREE", "status":10}`), - pass: false, - error: errors.New("status key is not a valid string"), - }, - { - name: "Error for missing license plan", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`), - pass: false, - error: errors.New("plan key is missing"), - }, - { - name: "Error for invalid json license plan", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`), - pass: false, - error: errors.New("plan key is not a valid map[string]interface {}"), - }, - { - name: "Error for invalid license plan", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`), - pass: false, - error: errors.New("name key is missing"), - }, - { - name: "Parse the entire license properly", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`), - pass: true, - expected: &LicenseV3{ - ID: "does-not-matter", - Key: "does-not-matter-key", - Data: map[string]interface{}{ - "plan": map[string]interface{}{ - "name": "ENTERPRISE", - }, - "category": "FREE", - "status": "ACTIVE", - "valid_from": float64(1730899309), - "valid_until": float64(-1), - }, - PlanName: PlanNameEnterprise, - ValidFrom: 1730899309, - ValidUntil: -1, - Status: "ACTIVE", - IsCurrent: false, - Features: model.FeatureSet{}, - }, - }, - { - name: "Fallback to basic plan if license status is invalid", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`), - pass: true, - expected: &LicenseV3{ - ID: "does-not-matter", - Key: "does-not-matter-key", - Data: map[string]interface{}{ - "plan": map[string]interface{}{ - "name": "ENTERPRISE", - }, - "category": "FREE", - "status": "INVALID", - "valid_from": float64(1730899309), - "valid_until": float64(-1), - }, - PlanName: PlanNameBasic, - ValidFrom: 1730899309, - ValidUntil: -1, - Status: "INVALID", - IsCurrent: false, - Features: model.FeatureSet{}, - }, - }, - { - name: "fallback states for validFrom and validUntil", - data: []byte(`{"id":"does-not-matter","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`), - pass: true, - expected: &LicenseV3{ - ID: "does-not-matter", - Key: "does-not-matter-key", - Data: map[string]interface{}{ - "plan": map[string]interface{}{ - "name": "ENTERPRISE", - }, - "valid_from": 1234.456, - "valid_until": 5678.567, - "category": "FREE", - "status": "ACTIVE", - }, - PlanName: PlanNameEnterprise, - ValidFrom: 1234, - ValidUntil: 5678, - Status: "ACTIVE", - IsCurrent: false, - Features: model.FeatureSet{}, - }, - }, - } - - for _, tc := range testCases { - var licensePayload map[string]interface{} - err := json.Unmarshal(tc.data, &licensePayload) - require.NoError(t, err) - license, err := NewLicenseV3(licensePayload) - if license != nil { - license.Features = make(model.FeatureSet, 0) - delete(license.Data, "features") - } - - if tc.pass { - require.NoError(t, err) - require.NotNil(t, license) - assert.Equal(t, tc.expected, license) - } else { - require.Error(t, err) - assert.EqualError(t, err, tc.error.Error()) - require.Nil(t, license) - } - - } -} diff --git a/ee/query-service/usage/manager.go b/ee/query-service/usage/manager.go index e42946cf9c0e..c7ab151f8069 100644 --- a/ee/query-service/usage/manager.go +++ b/ee/query-service/usage/manager.go @@ -14,9 +14,9 @@ import ( "go.uber.org/zap" - "github.com/SigNoz/signoz/ee/query-service/dao" - "github.com/SigNoz/signoz/ee/query-service/license" "github.com/SigNoz/signoz/ee/query-service/model" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/modules/organization" "github.com/SigNoz/signoz/pkg/query-service/utils/encryption" "github.com/SigNoz/signoz/pkg/zeus" ) @@ -35,64 +35,69 @@ var ( type Manager struct { clickhouseConn clickhouse.Conn - licenseRepo *license.Repo + licenseService licensing.Licensing scheduler *gocron.Scheduler - modelDao dao.ModelDao - zeus zeus.Zeus + + organizationModule organization.Module } -func New(modelDao dao.ModelDao, licenseRepo *license.Repo, clickhouseConn clickhouse.Conn, zeus zeus.Zeus) (*Manager, error) { +func New(licenseService licensing.Licensing, clickhouseConn clickhouse.Conn, zeus zeus.Zeus, organizationModule organization.Module) (*Manager, error) { m := &Manager{ - clickhouseConn: clickhouseConn, - licenseRepo: licenseRepo, - scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC - modelDao: modelDao, - zeus: zeus, + clickhouseConn: clickhouseConn, + licenseService: licenseService, + scheduler: gocron.NewScheduler(time.UTC).Every(1).Day().At("00:00"), // send usage every at 00:00 UTC + zeus: zeus, + organizationModule: organizationModule, } return m, nil } // start loads collects and exports any exported snapshot and starts the exporter -func (lm *Manager) Start() error { +func (lm *Manager) Start(ctx context.Context) error { // compares the locker and stateUnlocked if both are same lock is applied else returns error if !atomic.CompareAndSwapUint32(&locker, stateUnlocked, stateLocked) { return fmt.Errorf("usage exporter is locked") } - _, err := lm.scheduler.Do(func() { lm.UploadUsage() }) + // upload usage once when starting the service + + _, err := lm.scheduler.Do(func() { lm.UploadUsage(ctx) }) if err != nil { return err } - // upload usage once when starting the service - lm.UploadUsage() - + lm.UploadUsage(ctx) lm.scheduler.StartAsync() - return nil } -func (lm *Manager) UploadUsage() { - ctx := context.Background() - // check if license is present or not - license, err := lm.licenseRepo.GetActiveLicense(ctx) +func (lm *Manager) UploadUsage(ctx context.Context) { + + organizations, err := lm.organizationModule.GetAll(context.Background()) if err != nil { - zap.L().Error("failed to get active license", zap.Error(err)) - return - } - if license == nil { - // we will not start the usage reporting if license is not present. - zap.L().Info("no license present, skipping usage reporting") + zap.L().Error("failed to get organizations", zap.Error(err)) return } + for _, organization := range organizations { + // check if license is present or not + license, err := lm.licenseService.GetActive(ctx, organization.ID) + if err != nil { + zap.L().Error("failed to get active license", zap.Error(err)) + return + } + if license == nil { + // we will not start the usage reporting if license is not present. + zap.L().Info("no license present, skipping usage reporting") + return + } - usages := []model.UsageDB{} + usages := []model.UsageDB{} - // get usage from clickhouse - dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"} - query := ` + // get usage from clickhouse + dbs := []string{"signoz_logs", "signoz_traces", "signoz_metrics"} + query := ` SELECT tenant, collector_id, exporter_id, timestamp, data FROM %s.distributed_usage as u1 GLOBAL INNER JOIN @@ -107,76 +112,76 @@ func (lm *Manager) UploadUsage() { order by timestamp ` - for _, db := range dbs { - dbusages := []model.UsageDB{} - err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) - if err != nil && !strings.Contains(err.Error(), "doesn't exist") { - zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) - return + for _, db := range dbs { + dbusages := []model.UsageDB{} + err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour))) + if err != nil && !strings.Contains(err.Error(), "doesn't exist") { + zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err)) + return + } + for _, u := range dbusages { + u.Type = db + usages = append(usages, u) + } } - for _, u := range dbusages { - u.Type = db - usages = append(usages, u) - } - } - if len(usages) <= 0 { - zap.L().Info("no snapshots to upload, skipping.") - return - } - - zap.L().Info("uploading usage data") - - usagesPayload := []model.Usage{} - for _, usage := range usages { - usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) - if err != nil { - zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) + if len(usages) <= 0 { + zap.L().Info("no snapshots to upload, skipping.") return } - usageData := model.Usage{} - err = json.Unmarshal(usageDataBytes, &usageData) - if err != nil { - zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) + zap.L().Info("uploading usage data") + + usagesPayload := []model.Usage{} + for _, usage := range usages { + usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data)) + if err != nil { + zap.L().Error("error while decrypting usage data: %v", zap.Error(err)) + return + } + + usageData := model.Usage{} + err = json.Unmarshal(usageDataBytes, &usageData) + if err != nil { + zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err)) + return + } + + usageData.CollectorID = usage.CollectorID + usageData.ExporterID = usage.ExporterID + usageData.Type = usage.Type + usageData.Tenant = "default" + usageData.OrgName = "default" + usageData.TenantId = "default" + usagesPayload = append(usagesPayload, usageData) + } + + key, _ := uuid.Parse(license.Key) + payload := model.UsagePayload{ + LicenseKey: key, + Usage: usagesPayload, + } + + body, errv2 := json.Marshal(payload) + if errv2 != nil { + zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2)) return } - usageData.CollectorID = usage.CollectorID - usageData.ExporterID = usage.ExporterID - usageData.Type = usage.Type - usageData.Tenant = "default" - usageData.OrgName = "default" - usageData.TenantId = "default" - usagesPayload = append(usagesPayload, usageData) - } - - key, _ := uuid.Parse(license.Key) - payload := model.UsagePayload{ - LicenseKey: key, - Usage: usagesPayload, - } - - body, errv2 := json.Marshal(payload) - if errv2 != nil { - zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2)) - return - } - - errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body) - if errv2 != nil { - zap.L().Error("failed to upload usage: %v", zap.Error(errv2)) - // not returning error here since it is captured in the failed count - return + errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body) + if errv2 != nil { + zap.L().Error("failed to upload usage: %v", zap.Error(errv2)) + // not returning error here since it is captured in the failed count + return + } } } -func (lm *Manager) Stop() { +func (lm *Manager) Stop(ctx context.Context) { lm.scheduler.Stop() zap.L().Info("sending usage data before shutting down") // send usage before shutting down - lm.UploadUsage() - + lm.UploadUsage(ctx) atomic.StoreUint32(&locker, stateUnlocked) } diff --git a/frontend/src/AppRoutes/Private.tsx b/frontend/src/AppRoutes/Private.tsx index 092fedaae6a9..9c9719694b69 100644 --- a/frontend/src/AppRoutes/Private.tsx +++ b/frontend/src/AppRoutes/Private.tsx @@ -36,8 +36,8 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { user, isLoggedIn: isLoggedInState, isFetchingOrgPreferences, - activeLicenseV3, - isFetchingActiveLicenseV3, + activeLicense, + isFetchingActiveLicense, trialInfo, featureFlags, } = useAppContext(); @@ -145,16 +145,16 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { }; useEffect(() => { - if (!isFetchingActiveLicenseV3 && activeLicenseV3) { + if (!isFetchingActiveLicense && activeLicense) { const currentRoute = mapRoutes.get('current'); - const isTerminated = activeLicenseV3.state === LicenseState.TERMINATED; - const isExpired = activeLicenseV3.state === LicenseState.EXPIRED; - const isCancelled = activeLicenseV3.state === LicenseState.CANCELLED; + const isTerminated = activeLicense.state === LicenseState.TERMINATED; + const isExpired = activeLicense.state === LicenseState.EXPIRED; + const isCancelled = activeLicense.state === LicenseState.CANCELLED; const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled; - const { platform } = activeLicenseV3; + const { platform } = activeLicense; if ( isWorkspaceAccessRestricted && @@ -164,26 +164,26 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { navigateToWorkSpaceAccessRestricted(currentRoute); } } - }, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]); + }, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]); useEffect(() => { - if (!isFetchingActiveLicenseV3) { + if (!isFetchingActiveLicense) { const currentRoute = mapRoutes.get('current'); const shouldBlockWorkspace = trialInfo?.workSpaceBlock; if ( shouldBlockWorkspace && currentRoute && - activeLicenseV3?.platform === LicensePlatform.CLOUD + activeLicense?.platform === LicensePlatform.CLOUD ) { navigateToWorkSpaceBlocked(currentRoute); } } // eslint-disable-next-line react-hooks/exhaustive-deps }, [ - isFetchingActiveLicenseV3, + isFetchingActiveLicense, trialInfo?.workSpaceBlock, - activeLicenseV3?.platform, + activeLicense?.platform, mapRoutes, pathname, ]); @@ -197,20 +197,20 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { }; useEffect(() => { - if (!isFetchingActiveLicenseV3 && activeLicenseV3) { + if (!isFetchingActiveLicense && activeLicense) { const currentRoute = mapRoutes.get('current'); const shouldSuspendWorkspace = - activeLicenseV3.state === LicenseState.DEFAULTED; + activeLicense.state === LicenseState.DEFAULTED; if ( shouldSuspendWorkspace && currentRoute && - activeLicenseV3.platform === LicensePlatform.CLOUD + activeLicense.platform === LicensePlatform.CLOUD ) { navigateToWorkSpaceSuspended(currentRoute); } } - }, [isFetchingActiveLicenseV3, activeLicenseV3, mapRoutes, pathname]); + }, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]); useEffect(() => { if (org && org.length > 0 && org[0].id !== undefined) { diff --git a/frontend/src/AppRoutes/index.tsx b/frontend/src/AppRoutes/index.tsx index e24704a22cb1..40ddd6eef7e6 100644 --- a/frontend/src/AppRoutes/index.tsx +++ b/frontend/src/AppRoutes/index.tsx @@ -13,9 +13,9 @@ import AppLayout from 'container/AppLayout'; import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys'; import { useThemeConfig } from 'hooks/useDarkMode'; import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; -import { LICENSE_PLAN_KEY } from 'hooks/useLicense'; import { NotificationProvider } from 'hooks/useNotifications'; import { ResourceProvider } from 'hooks/useResourceAttribute'; +import { StatusCodes } from 'http-status-codes'; import history from 'lib/history'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; import posthog from 'posthog-js'; @@ -23,6 +23,7 @@ import AlertRuleProvider from 'providers/Alert'; import { useAppContext } from 'providers/App/App'; import { IUser } from 'providers/App/types'; import { DashboardProvider } from 'providers/Dashboard/Dashboard'; +import { ErrorModalProvider } from 'providers/ErrorModalProvider'; import { QueryBuilderProvider } from 'providers/QueryBuilder'; import { Suspense, useCallback, useEffect, useState } from 'react'; import { Route, Router, Switch } from 'react-router-dom'; @@ -41,14 +42,13 @@ import defaultRoutes, { function App(): JSX.Element { const themeConfig = useThemeConfig(); const { - licenses, user, isFetchingUser, - isFetchingLicenses, isFetchingFeatureFlags, trialInfo, - activeLicenseV3, - isFetchingActiveLicenseV3, + activeLicense, + isFetchingActiveLicense, + activeLicenseFetchError, userFetchError, featureFlagsFetchError, isLoggedIn: isLoggedInState, @@ -66,7 +66,7 @@ function App(): JSX.Element { const enableAnalytics = useCallback( (user: IUser): void => { // wait for the required data to be loaded before doing init for anything! - if (!isFetchingActiveLicenseV3 && activeLicenseV3 && org) { + if (!isFetchingActiveLicense && activeLicense && org) { const orgName = org && Array.isArray(org) && org.length > 0 ? org[0].displayName : ''; @@ -153,8 +153,8 @@ function App(): JSX.Element { }, [ hostname, - isFetchingActiveLicenseV3, - activeLicenseV3, + isFetchingActiveLicense, + activeLicense, org, trialInfo?.trialConvertedToSubscription, ], @@ -163,18 +163,17 @@ function App(): JSX.Element { // eslint-disable-next-line sonarjs/cognitive-complexity useEffect(() => { if ( - !isFetchingLicenses && - licenses && + !isFetchingActiveLicense && + (activeLicense || activeLicenseFetchError) && !isFetchingUser && user && !!user.email ) { const isOnBasicPlan = - licenses.licenses?.some( - (license) => - license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, - ) || licenses.licenses === null; - + activeLicenseFetchError && + [StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes( + activeLicenseFetchError?.getHttpStatusCode(), + ); const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER); if (isLoggedInState && user && user.id && user.email && !isIdentifiedUser) { @@ -204,11 +203,12 @@ function App(): JSX.Element { }, [ isLoggedInState, user, - licenses, isCloudUser, isEnterpriseSelfHostedUser, - isFetchingLicenses, + isFetchingActiveLicense, isFetchingUser, + activeLicense, + activeLicenseFetchError, ]); useEffect(() => { @@ -231,8 +231,7 @@ function App(): JSX.Element { if ( !isFetchingFeatureFlags && (featureFlags || featureFlagsFetchError) && - licenses && - activeLicenseV3 && + activeLicense && trialInfo ) { let isChatSupportEnabled = false; @@ -270,8 +269,7 @@ function App(): JSX.Element { featureFlags, isFetchingFeatureFlags, featureFlagsFetchError, - licenses, - activeLicenseV3, + activeLicense, trialInfo, isCloudUser, isEnterpriseSelfHostedUser, @@ -333,7 +331,7 @@ function App(): JSX.Element { // if the user is in logged in state if (isLoggedInState) { // if the setup calls are loading then return a spinner - if (isFetchingLicenses || isFetchingUser || isFetchingFeatureFlags) { + if (isFetchingActiveLicense || isFetchingUser || isFetchingFeatureFlags) { return ; } @@ -345,7 +343,11 @@ function App(): JSX.Element { } // if all of the data is not set then return a spinner, this is required because there is some gap between loading states and data setting - if ((!licenses || !user.email || !featureFlags) && !userFetchError) { + if ( + (!activeLicense || !user.email || !featureFlags) && + !userFetchError && + !activeLicenseFetchError + ) { return ; } } @@ -357,34 +359,36 @@ function App(): JSX.Element { - - - - - - - - }> - - {routes.map(({ path, component, exact }) => ( - - ))} - - - - - - - - - - - + + + + + + + + + }> + + {routes.map(({ path, component, exact }) => ( + + ))} + + + + + + + + + + + + diff --git a/frontend/src/api/SAML/deleteDomain.ts b/frontend/src/api/SAML/deleteDomain.ts deleted file mode 100644 index 50c2b51a8078..000000000000 --- a/frontend/src/api/SAML/deleteDomain.ts +++ /dev/null @@ -1,24 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/SAML/deleteDomain'; - -const deleteDomain = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.delete(`/domains/${props.id}`); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default deleteDomain; diff --git a/frontend/src/api/SAML/listAllDomain.ts b/frontend/src/api/SAML/listAllDomain.ts deleted file mode 100644 index 41620f7d3e50..000000000000 --- a/frontend/src/api/SAML/listAllDomain.ts +++ /dev/null @@ -1,24 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/SAML/listDomain'; - -const listAllDomain = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.get(`/orgs/${props.orgId}/domains`); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default listAllDomain; diff --git a/frontend/src/api/SAML/postDomain.ts b/frontend/src/api/SAML/postDomain.ts deleted file mode 100644 index 34a8ecd1f793..000000000000 --- a/frontend/src/api/SAML/postDomain.ts +++ /dev/null @@ -1,24 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/SAML/postDomain'; - -const postDomain = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.post(`/domains`, props); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default postDomain; diff --git a/frontend/src/api/SAML/updateDomain.ts b/frontend/src/api/SAML/updateDomain.ts deleted file mode 100644 index 0c4cce83af09..000000000000 --- a/frontend/src/api/SAML/updateDomain.ts +++ /dev/null @@ -1,24 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/SAML/updateDomain'; - -const updateDomain = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.put(`/domains/${props.id}`, props); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default updateDomain; diff --git a/frontend/src/api/billing/checkout.ts b/frontend/src/api/billing/checkout.ts deleted file mode 100644 index f8eaf397486c..000000000000 --- a/frontend/src/api/billing/checkout.ts +++ /dev/null @@ -1,29 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { - CheckoutRequestPayloadProps, - CheckoutSuccessPayloadProps, -} from 'types/api/billing/checkout'; - -const updateCreditCardApi = async ( - props: CheckoutRequestPayloadProps, -): Promise | ErrorResponse> => { - try { - const response = await axios.post('/checkout', { - url: props.url, - }); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default updateCreditCardApi; diff --git a/frontend/src/api/billing/manage.ts b/frontend/src/api/billing/manage.ts deleted file mode 100644 index 1ea8fa762d3e..000000000000 --- a/frontend/src/api/billing/manage.ts +++ /dev/null @@ -1,29 +0,0 @@ -import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { - CheckoutRequestPayloadProps, - CheckoutSuccessPayloadProps, -} from 'types/api/billing/checkout'; - -const manageCreditCardApi = async ( - props: CheckoutRequestPayloadProps, -): Promise | ErrorResponse> => { - try { - const response = await axios.post('/portal', { - url: props.url, - }); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default manageCreditCardApi; diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index f58e2405163e..a5e62ae78942 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -4,7 +4,11 @@ import getLocalStorageApi from 'api/browser/localstorage/get'; import loginApi from 'api/v1/login/login'; import afterLogin from 'AppRoutes/utils'; -import axios, { AxiosResponse, InternalAxiosRequestConfig } from 'axios'; +import axios, { + AxiosError, + AxiosResponse, + InternalAxiosRequestConfig, +} from 'axios'; import { ENVIRONMENT } from 'constants/env'; import { Events } from 'constants/events'; import { LOCALSTORAGE } from 'constants/localStorage'; @@ -83,24 +87,27 @@ const interceptorRejected = async ( true, ); - const reResponse = await axios( - `${value.config.baseURL}${value.config.url?.substring(1)}`, - { - method: value.config.method, - headers: { - ...value.config.headers, - Authorization: `Bearer ${response.data.accessJwt}`, + try { + const reResponse = await axios( + `${value.config.baseURL}${value.config.url?.substring(1)}`, + { + method: value.config.method, + headers: { + ...value.config.headers, + Authorization: `Bearer ${response.data.accessJwt}`, + }, + data: { + ...JSON.parse(value.config.data || '{}'), + }, }, - data: { - ...JSON.parse(value.config.data || '{}'), - }, - }, - ); - if (reResponse.status === 200) { + ); + return await Promise.resolve(reResponse); + } catch (error) { + if ((error as AxiosError)?.response?.status === 401) { + Logout(); + } } - Logout(); - return await Promise.reject(reResponse); } catch (error) { Logout(); } diff --git a/frontend/src/api/licenses/apply.ts b/frontend/src/api/licenses/apply.ts deleted file mode 100644 index c691ad836ff3..000000000000 --- a/frontend/src/api/licenses/apply.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { ApiV3Instance as axios } from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; -import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/licenses/apply'; - -const apply = async ( - props: Props, -): Promise | ErrorResponse> => { - try { - const response = await axios.post('/licenses', { - key: props.key, - }); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; - } catch (error) { - return ErrorResponseHandler(error as AxiosError); - } -}; - -export default apply; diff --git a/frontend/src/api/licenses/getAll.ts b/frontend/src/api/licenses/getAll.ts deleted file mode 100644 index b05cdcb9e2c0..000000000000 --- a/frontend/src/api/licenses/getAll.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { ApiV3Instance as axios } from 'api'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps } from 'types/api/licenses/getAll'; - -const getAll = async (): Promise< - SuccessResponse | ErrorResponse -> => { - const response = await axios.get('/licenses'); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; -}; - -export default getAll; diff --git a/frontend/src/api/licensesV3/getActive.ts b/frontend/src/api/licensesV3/getActive.ts deleted file mode 100644 index 48dd0a3a434f..000000000000 --- a/frontend/src/api/licensesV3/getActive.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { ApiV3Instance as axios } from 'api'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { LicenseV3EventQueueResModel } from 'types/api/licensesV3/getActive'; - -const getActive = async (): Promise< - SuccessResponse | ErrorResponse -> => { - const response = await axios.get('/licenses/active'); - - return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data.data, - }; -}; - -export default getActive; diff --git a/frontend/src/api/v1/checkout/create.ts b/frontend/src/api/v1/checkout/create.ts new file mode 100644 index 000000000000..2e71a647686b --- /dev/null +++ b/frontend/src/api/v1/checkout/create.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, + PayloadProps, +} from 'types/api/billing/checkout'; + +const updateCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise> => { + try { + const response = await axios.post('/checkout', { + url: props.url, + }); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default updateCreditCardApi; diff --git a/frontend/src/api/v1/domains/create.ts b/frontend/src/api/v1/domains/create.ts new file mode 100644 index 000000000000..18fbc21b2bd1 --- /dev/null +++ b/frontend/src/api/v1/domains/create.ts @@ -0,0 +1,21 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { AuthDomain } from 'types/api/SAML/listDomain'; +import { PayloadProps, Props } from 'types/api/SAML/postDomain'; + +const create = async (props: Props): Promise> => { + try { + const response = await axios.post(`/domains`, props); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default create; diff --git a/frontend/src/api/v1/domains/delete.ts b/frontend/src/api/v1/domains/delete.ts new file mode 100644 index 000000000000..0c1f452248fe --- /dev/null +++ b/frontend/src/api/v1/domains/delete.ts @@ -0,0 +1,20 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { PayloadProps, Props } from 'types/api/SAML/deleteDomain'; + +const deleteDomain = async (props: Props): Promise> => { + try { + const response = await axios.delete(`/domains/${props.id}`); + + return { + httpStatusCode: response.status, + data: null, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default deleteDomain; diff --git a/frontend/src/api/v1/domains/list.ts b/frontend/src/api/v1/domains/list.ts new file mode 100644 index 000000000000..fc056873a064 --- /dev/null +++ b/frontend/src/api/v1/domains/list.ts @@ -0,0 +1,20 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { AuthDomain, PayloadProps } from 'types/api/SAML/listDomain'; + +const listAllDomain = async (): Promise> => { + try { + const response = await axios.get(`/domains`); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default listAllDomain; diff --git a/frontend/src/api/v1/domains/update.ts b/frontend/src/api/v1/domains/update.ts new file mode 100644 index 000000000000..701555a39d17 --- /dev/null +++ b/frontend/src/api/v1/domains/update.ts @@ -0,0 +1,23 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { AuthDomain } from 'types/api/SAML/listDomain'; +import { PayloadProps, Props } from 'types/api/SAML/updateDomain'; + +const updateDomain = async ( + props: Props, +): Promise> => { + try { + const response = await axios.put(`/domains/${props.id}`, props); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default updateDomain; diff --git a/frontend/src/api/v1/portal/create.ts b/frontend/src/api/v1/portal/create.ts new file mode 100644 index 000000000000..1c6854ffe293 --- /dev/null +++ b/frontend/src/api/v1/portal/create.ts @@ -0,0 +1,28 @@ +import axios from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, + PayloadProps, +} from 'types/api/billing/checkout'; + +const manageCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise> => { + try { + const response = await axios.post('/portal', { + url: props.url, + }); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default manageCreditCardApi; diff --git a/frontend/src/api/v3/licenses/active/get.ts b/frontend/src/api/v3/licenses/active/get.ts new file mode 100644 index 000000000000..7bf73e95cad9 --- /dev/null +++ b/frontend/src/api/v3/licenses/active/get.ts @@ -0,0 +1,25 @@ +import { ApiV3Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { + LicenseEventQueueResModel, + PayloadProps, +} from 'types/api/licensesV3/getActive'; + +const getActive = async (): Promise< + SuccessResponseV2 +> => { + try { + const response = await axios.get('/licenses/active'); + + return { + httpStatusCode: response.status, + data: response.data.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default getActive; diff --git a/frontend/src/api/v3/licenses/put.ts b/frontend/src/api/v3/licenses/put.ts new file mode 100644 index 000000000000..4cd971acc0e8 --- /dev/null +++ b/frontend/src/api/v3/licenses/put.ts @@ -0,0 +1,24 @@ +import { ApiV3Instance as axios } from 'api'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; +import { AxiosError } from 'axios'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { PayloadProps, Props } from 'types/api/licenses/apply'; + +const apply = async ( + props: Props, +): Promise> => { + try { + const response = await axios.post('/licenses', { + key: props.key, + }); + + return { + httpStatusCode: response.status, + data: response.data, + }; + } catch (error) { + ErrorResponseHandlerV2(error as AxiosError); + } +}; + +export default apply; diff --git a/frontend/src/assets/Error.tsx b/frontend/src/assets/Error.tsx new file mode 100644 index 000000000000..9b6924c4fcd9 --- /dev/null +++ b/frontend/src/assets/Error.tsx @@ -0,0 +1,191 @@ +import React from 'react'; + +type ErrorIconProps = React.SVGProps; + +function ErrorIcon({ ...props }: ErrorIconProps): JSX.Element { + return ( + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ); +} + +export default ErrorIcon; diff --git a/frontend/src/components/ChatSupportGateway/ChatSupportGateway.tsx b/frontend/src/components/ChatSupportGateway/ChatSupportGateway.tsx index 94e8de819448..64657c839894 100644 --- a/frontend/src/components/ChatSupportGateway/ChatSupportGateway.tsx +++ b/frontend/src/components/ChatSupportGateway/ChatSupportGateway.tsx @@ -1,14 +1,14 @@ import { Button, Modal, Typography } from 'antd'; -import updateCreditCardApi from 'api/billing/checkout'; import logEvent from 'api/common/logEvent'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; +import updateCreditCardApi from 'api/v1/checkout/create'; import { useNotifications } from 'hooks/useNotifications'; import { CreditCard, X } from 'lucide-react'; import { useState } from 'react'; import { useMutation } from 'react-query'; import { useLocation } from 'react-router-dom'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { SuccessResponseV2 } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; +import APIError from 'types/api/error'; export default function ChatSupportGateway(): JSX.Element { const { notifications } = useNotifications(); @@ -18,20 +18,21 @@ export default function ChatSupportGateway(): JSX.Element { ); const handleBillingOnSuccess = ( - data: ErrorResponse | SuccessResponse, + data: SuccessResponseV2, ): void => { - if (data?.payload?.redirectURL) { + if (data?.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }; - const handleBillingOnError = (): void => { + const handleBillingOnError = (error: APIError): void => { notifications.error({ - message: SOMETHING_WENT_WRONG, + message: error.getErrorCode(), + description: error.getErrorMessage(), }); }; diff --git a/frontend/src/components/ErrorModal/ErrorModal.styles.scss b/frontend/src/components/ErrorModal/ErrorModal.styles.scss new file mode 100644 index 000000000000..87c2ea6edd5c --- /dev/null +++ b/frontend/src/components/ErrorModal/ErrorModal.styles.scss @@ -0,0 +1,118 @@ +.error-modal { + &__trigger { + width: fit-content; + display: flex; + align-items: center; + gap: 4px; + border-radius: 20px; + background: rgba(229, 72, 77, 0.2); + padding-left: 3px; + padding-right: 8px; + cursor: pointer; + span { + color: var(--bg-cherry-500); + font-size: 10px; + font-weight: 500; + line-height: 20px; /* 200% */ + letter-spacing: 0.4px; + text-transform: uppercase; + } + } + &__wrap { + background: linear-gradient( + 180deg, + rgba(11, 12, 14, 0.12) 0.07%, + rgba(39, 8, 14, 0.24) 50.04%, + rgba(106, 29, 44, 0.36) 75.02%, + rgba(197, 57, 85, 0.48) 87.51%, + rgba(242, 71, 105, 0.6) 100% + ); + -webkit-backdrop-filter: blur(10px); + backdrop-filter: blur(10px); + + .ant-modal { + bottom: 40px; + top: unset; + position: absolute; + width: 520px; + left: 0px; + right: 0px; + margin: auto; + } + } + &__body { + padding: 0; + background: var(--bg-ink-400); + overflow: hidden; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + } + &__header { + background: none !important; + .ant-modal-title { + display: flex; + justify-content: space-between; + align-items: center; + } + .key-value-label { + padding: 0; + border: none; + border-radius: 4px; + overflow: hidden; + &__key, + &__value { + padding: 4px 8px; + font-size: 12px; + font-style: normal; + font-weight: 500; + line-height: 16px; + letter-spacing: 0.48px; + } + &__key { + text-transform: uppercase; + &, + &:hover { + color: var(--bg-vanilla-100); + } + } + &__value { + color: var(--bg-vanilla-400); + pointer-events: none; + } + } + .close-button { + padding: 3px 7px; + background: var(--bg-ink-400); + display: inline-flex; + align-items: center; + border-radius: 4px; + border: 1px solid var(--bg-slate-500); + box-shadow: none; + } + } + &__footer { + margin: 0 !important; + height: 6px; + background: var(--bg-sakura-500); + } + &__content { + padding: 0 !important; + border-radius: 4px; + overflow: hidden; + background: none !important; + } +} + +.lightMode { + .error-modal { + &__body, + &__header .close-button { + background: var(--bg-vanilla-100); + } + &__header .close-button { + svg { + fill: var(--bg-vanilla-100); + } + } + } +} diff --git a/frontend/src/components/ErrorModal/ErrorModal.test.tsx b/frontend/src/components/ErrorModal/ErrorModal.test.tsx new file mode 100644 index 000000000000..64f880e8cece --- /dev/null +++ b/frontend/src/components/ErrorModal/ErrorModal.test.tsx @@ -0,0 +1,195 @@ +import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils'; +import APIError from 'types/api/error'; + +import ErrorModal from './ErrorModal'; + +// Mock the query client to return version data +const mockVersionData = { + payload: { + ee: 'Y', + version: '1.0.0', + }, +}; +jest.mock('react-query', () => ({ + ...jest.requireActual('react-query'), + useQueryClient: (): { getQueryData: () => typeof mockVersionData } => ({ + getQueryData: jest.fn(() => mockVersionData), + }), +})); +const mockError: APIError = new APIError({ + httpStatusCode: 400, + error: { + // eslint-disable-next-line sonarjs/no-duplicate-string + message: 'Something went wrong while processing your request.', + // eslint-disable-next-line sonarjs/no-duplicate-string + code: 'An error occurred', + // eslint-disable-next-line sonarjs/no-duplicate-string + url: 'https://example.com/docs', + errors: [ + { message: 'First error detail' }, + { message: 'Second error detail' }, + { message: 'Third error detail' }, + ], + }, +}); +describe('ErrorModal Component', () => { + it('should render the modal when open is true', () => { + render(); + + // Check if the error message is displayed + expect(screen.getByText('An error occurred')).toBeInTheDocument(); + expect( + screen.getByText('Something went wrong while processing your request.'), + ).toBeInTheDocument(); + }); + + it('should not render the modal when open is false', () => { + render(); + + // Check that the modal content is not in the document + expect(screen.queryByText('An error occurred')).not.toBeInTheDocument(); + }); + + it('should call onClose when the close button is clicked', async () => { + const onCloseMock = jest.fn(); + render(); + + // Click the close button + const closeButton = screen.getByTestId('close-button'); + act(() => { + fireEvent.click(closeButton); + }); + + // Check if onClose was called + expect(onCloseMock).toHaveBeenCalledTimes(1); + }); + + it('should display version data if available', async () => { + render(); + + // Check if the version data is displayed + expect(screen.getByText('ENTERPRISE')).toBeInTheDocument(); + expect(screen.getByText('1.0.0')).toBeInTheDocument(); + }); + it('should render the messages count badge when there are multiple errors', () => { + render(); + + // Check if the messages count badge is displayed + expect(screen.getByText('MESSAGES')).toBeInTheDocument(); + + expect(screen.getByText('3')).toBeInTheDocument(); + + // Check if the individual error messages are displayed + expect(screen.getByText('First error detail')).toBeInTheDocument(); + expect(screen.getByText('Second error detail')).toBeInTheDocument(); + expect(screen.getByText('Third error detail')).toBeInTheDocument(); + }); + + it('should render the open docs button when URL is provided', async () => { + render(); + + // Check if the open docs button is displayed + const openDocsButton = screen.getByTestId('error-docs-button'); + + expect(openDocsButton).toBeInTheDocument(); + + expect(openDocsButton).toHaveAttribute('href', 'https://example.com/docs'); + + expect(openDocsButton).toHaveAttribute('target', '_blank'); + }); + + it('should not display scroll for more if there are less than 10 messages', () => { + render(); + + expect(screen.queryByText('Scroll for more')).not.toBeInTheDocument(); + }); + it('should display scroll for more if there are more than 10 messages', async () => { + const longError = new APIError({ + httpStatusCode: 400, + error: { + ...mockError.error, + code: 'An error occurred', + message: 'Something went wrong while processing your request.', + url: 'https://example.com/docs', + errors: Array.from({ length: 15 }, (_, i) => ({ + message: `Error detail ${i + 1}`, + })), + }, + }); + + render(); + + // Check if the scroll hint is displayed + expect(screen.getByText('Scroll for more')).toBeInTheDocument(); + }); +}); +it('should render the trigger component if provided', () => { + const mockTrigger = ; + render( + , + ); + + // Check if the trigger component is rendered + expect(screen.getByText('Open Error Modal')).toBeInTheDocument(); +}); + +it('should open the modal when the trigger component is clicked', async () => { + const mockTrigger = ; + render( + , + ); + + // Click the trigger component + const triggerButton = screen.getByText('Open Error Modal'); + act(() => { + fireEvent.click(triggerButton); + }); + + // Check if the modal is displayed + expect(screen.getByText('An error occurred')).toBeInTheDocument(); +}); + +it('should render the default trigger tag if no trigger component is provided', () => { + render(); + + // Check if the default trigger tag is rendered + expect(screen.getByText('error')).toBeInTheDocument(); +}); + +it('should close the modal when the onCancel event is triggered', async () => { + const onCloseMock = jest.fn(); + render(); + + // Click the trigger component + const triggerButton = screen.getByText('error'); + act(() => { + fireEvent.click(triggerButton); + }); + + await waitFor(() => { + expect(screen.getByText('An error occurred')).toBeInTheDocument(); + }); + + // Trigger the onCancel event + act(() => { + fireEvent.click(screen.getByTestId('close-button')); + }); + + // Check if the modal is closed + expect(onCloseMock).toHaveBeenCalledTimes(1); + + await waitFor(() => { + // check if the modal is not visible + const modal = document.getElementsByClassName('ant-modal'); + const style = window.getComputedStyle(modal[0]); + expect(style.display).toBe('none'); + }); +}); diff --git a/frontend/src/components/ErrorModal/ErrorModal.tsx b/frontend/src/components/ErrorModal/ErrorModal.tsx new file mode 100644 index 000000000000..3765345ba45b --- /dev/null +++ b/frontend/src/components/ErrorModal/ErrorModal.tsx @@ -0,0 +1,102 @@ +import './ErrorModal.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Button, Modal, Tag } from 'antd'; +import { CircleAlert, X } from 'lucide-react'; +import KeyValueLabel from 'periscope/components/KeyValueLabel'; +import { useAppContext } from 'providers/App/App'; +import React from 'react'; +import APIError from 'types/api/error'; + +import ErrorContent from './components/ErrorContent'; + +type Props = { + error: APIError; + triggerComponent?: React.ReactElement; + onClose?: () => void; + open?: boolean; +}; + +const classNames = { + body: 'error-modal__body', + mask: 'error-modal__mask', + header: 'error-modal__header', + footer: 'error-modal__footer', + content: 'error-modal__content', +}; + +function ErrorModal({ + open, + error, + triggerComponent, + onClose, +}: Props): JSX.Element { + const [visible, setVisible] = React.useState(open); + + const handleClose = (): void => { + setVisible(false); + onClose?.(); + }; + + const { versionData } = useAppContext(); + + const versionDataPayload = versionData; + + return ( + <> + {!triggerComponent ? ( + } + color="error" + onClick={(): void => setVisible(true)} + > + error + + ) : ( + React.cloneElement(triggerComponent, { + onClick: () => setVisible(true), + }) + )} + + } + title={ + <> + {versionDataPayload ? ( + + ) : ( +
+ )} + + + } + onCancel={handleClose} + closeIcon={false} + classNames={classNames} + wrapClassName="error-modal__wrap" + > + + + + ); +} + +ErrorModal.defaultProps = { + onClose: undefined, + triggerComponent: null, + open: false, +}; + +export default ErrorModal; diff --git a/frontend/src/components/ErrorModal/components/ErrorContent.styles.scss b/frontend/src/components/ErrorModal/components/ErrorContent.styles.scss new file mode 100644 index 000000000000..a00f2111f3cd --- /dev/null +++ b/frontend/src/components/ErrorModal/components/ErrorContent.styles.scss @@ -0,0 +1,208 @@ +.error-content { + display: flex; + flex-direction: column; + // === SECTION: Summary (Top) + &__summary-section { + display: flex; + flex-direction: column; + border-bottom: 1px solid var(--bg-slate-400); + } + + &__summary { + display: flex; + justify-content: space-between; + padding: 16px; + } + + &__summary-left { + display: flex; + align-items: baseline; + gap: 8px; + } + + &__summary-text { + display: flex; + flex-direction: column; + gap: 6px; + } + + &__error-code { + color: var(--bg-vanilla-100); + margin: 0; + font-size: 16px; + font-weight: 500; + line-height: 24px; /* 150% */ + letter-spacing: -0.08px; + } + + &__error-message { + margin: 0; + color: var(--bg-vanilla-400); + font-size: 14px; + font-weight: 400; + line-height: 20px; /* 142.857% */ + letter-spacing: -0.07px; + } + + &__docs-button { + display: flex; + align-items: center; + gap: 6px; + padding: 9px 12.5px; + color: var(--bg-vanilla-400); + font-size: 12px; + font-weight: 400; + line-height: 18px; /* 150% */ + letter-spacing: 0.12px; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + box-shadow: none; + } + + &__message-badge { + display: flex; + align-items: center; + gap: 12px; + padding: 0px 16px 16px; + + .key-value-label { + width: fit-content; + border-color: var(--bg-slate-400); + border-radius: 20px; + overflow: hidden; + &__key { + padding-left: 8px; + padding-right: 8px; + } + &__value { + padding-right: 10px; + color: var(--bg-vanilla-400); + font-size: 12px; + font-weight: 500; + line-height: 18px; /* 150% */ + letter-spacing: 0.48px; + pointer-events: none; + } + } + &-label { + display: flex; + align-items: center; + gap: 6px; + &-dot { + height: 6px; + width: 6px; + background: var(--bg-sakura-500); + border-radius: 50%; + } + &-text { + color: var(--bg-vanilla-100); + font-size: 10px; + font-weight: 500; + line-height: 18px; /* 180% */ + letter-spacing: 0.5px; + } + } + &-line { + flex: 1; + height: 8px; + background-image: radial-gradient(circle, #444c63 1px, transparent 2px); + background-size: 8px 11px; + background-position: top left; + padding: 6px; + } + } + + // === SECTION: Message List (Bottom) + + &__message-list-container { + position: relative; + } + + &__message-list { + margin: 0; + padding: 0; + list-style: none; + max-height: 275px; + } + + &__message-item { + position: relative; + margin-bottom: 4px; + color: var(--bg-vanilla-400); + font-family: Geist Mono; + font-size: 12px; + font-weight: 400; + line-height: 18px; + color: var(--bg-vanilla-400); + padding: 3px 12px; + padding-left: 26px; + } + + &__message-item::before { + font-family: unset; + content: ''; + position: absolute; + left: 12px; + top: 50%; + transform: translateY(-50%); + width: 2px; + height: 4px; + border-radius: 50px; + background: var(--bg-slate-400); + } + + &__scroll-hint { + position: absolute; + bottom: 10px; + left: 0px; + right: 0px; + margin: auto; + width: fit-content; + display: inline-flex; + padding: 4px 12px 4px 10px; + justify-content: center; + align-items: center; + gap: 3px; + background: var(--bg-slate-200); + border-radius: 20px; + box-shadow: 0px 103px 12px 0px rgba(0, 0, 0, 0.01), + 0px 66px 18px 0px rgba(0, 0, 0, 0.01), 0px 37px 22px 0px rgba(0, 0, 0, 0.03), + 0px 17px 17px 0px rgba(0, 0, 0, 0.04), 0px 4px 9px 0px rgba(0, 0, 0, 0.04); + } + + &__scroll-hint-text { + color: var(--bg-vanilla-100); + + font-size: 12px; + font-weight: 400; + line-height: 18px; + letter-spacing: -0.06px; + } +} + +.lightMode { + .error-content { + &__error-code { + color: var(--bg-ink-100); + } + &__error-message { + color: var(--bg-ink-400); + } + &__message-item { + color: var(--bg-ink-400); + } + &__message-badge { + &-label-text { + color: var(--bg-ink-400); + } + .key-value-label__value { + color: var(--bg-ink-400); + } + } + &__docs-button { + background: var(--bg-vanilla-100); + color: var(--bg-ink-100); + } + } +} diff --git a/frontend/src/components/ErrorModal/components/ErrorContent.tsx b/frontend/src/components/ErrorModal/components/ErrorContent.tsx new file mode 100644 index 000000000000..3817b0d82ce2 --- /dev/null +++ b/frontend/src/components/ErrorModal/components/ErrorContent.tsx @@ -0,0 +1,98 @@ +import './ErrorContent.styles.scss'; + +import { Color } from '@signozhq/design-tokens'; +import { Button } from 'antd'; +import ErrorIcon from 'assets/Error'; +import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; +import { BookOpenText, ChevronsDown } from 'lucide-react'; +import KeyValueLabel from 'periscope/components/KeyValueLabel'; +import APIError from 'types/api/error'; + +interface ErrorContentProps { + error: APIError; +} + +function ErrorContent({ error }: ErrorContentProps): JSX.Element { + const { + url: errorUrl, + errors: errorMessages, + code: errorCode, + message: errorMessage, + } = error.error.error; + return ( +
+ {/* Summary Header */} +
+
+
+
+ +
+ +
+

{errorCode}

+

{errorMessage}

+
+
+ + {errorUrl && ( +
+ +
+ )} +
+ + {errorMessages?.length > 0 && ( +
+ +
+
MESSAGES
+
+ } + badgeValue={errorMessages.length.toString()} + /> +
+
+ )} +
+ + {/* Detailed Messages */} +
+
+ +
    + {errorMessages?.map((error) => ( +
  • + {error.message} +
  • + ))} +
+
+ {errorMessages?.length > 10 && ( +
+ + Scroll for more +
+ )} +
+
+
+ ); +} + +export default ErrorContent; diff --git a/frontend/src/components/HostMetricsDetail/HostMetricsDetails.tsx b/frontend/src/components/HostMetricsDetail/HostMetricsDetails.tsx index 2af30b994396..fa21c837298d 100644 --- a/frontend/src/components/HostMetricsDetail/HostMetricsDetails.tsx +++ b/frontend/src/components/HostMetricsDetail/HostMetricsDetails.tsx @@ -37,6 +37,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -67,6 +68,7 @@ function HostMetricsDetails({ AppState, GlobalReducer >((state) => state.globalTime); + const [searchParams, setSearchParams] = useSearchParams(); const startMs = useMemo(() => Math.floor(Number(minTime) / 1000000000), [ minTime, @@ -86,7 +88,9 @@ function HostMetricsDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [selectedView, setSelectedView] = useState( + (searchParams.get('view') as VIEWS) || VIEWS.METRICS, + ); const isDarkMode = useIsDarkMode(); const initialFilters = useMemo( @@ -149,6 +153,9 @@ function HostMetricsDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + if (host?.hostName) { + setSearchParams({ hostName: host?.hostName, view: e.target.value }); + } logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.HostEntity, view: e.target.value, @@ -313,6 +320,7 @@ function HostMetricsDetails({ const handleClose = (): void => { setSelectedInterval(selectedTime as Time); + setSearchParams({}); if (selectedTime !== 'custom') { const { maxTime, minTime } = GetMinMax(selectedTime); diff --git a/frontend/src/components/HostMetricsDetail/HostMetricsLogs/HostMetricsLogs.tsx b/frontend/src/components/HostMetricsDetail/HostMetricsLogs/HostMetricsLogs.tsx index 8279336eea07..95e535dad77b 100644 --- a/frontend/src/components/HostMetricsDetail/HostMetricsLogs/HostMetricsLogs.tsx +++ b/frontend/src/components/HostMetricsDetail/HostMetricsLogs/HostMetricsLogs.tsx @@ -75,7 +75,6 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element { const getItemContent = useCallback( (_: number, logToRender: ILog): JSX.Element => ( { +describe.skip('HostMetricsLogs', () => { let capturedQueryRangePayloads: QueryRangePayload[] = []; const itemHeight = 100; beforeEach(() => { diff --git a/frontend/src/components/HostMetricsDetail/Metrics/Metrics.tsx b/frontend/src/components/HostMetricsDetail/Metrics/Metrics.tsx index a8ab1359a006..005047fc0afa 100644 --- a/frontend/src/components/HostMetricsDetail/Metrics/Metrics.tsx +++ b/frontend/src/components/HostMetricsDetail/Metrics/Metrics.tsx @@ -80,6 +80,7 @@ function Metrics({ softMin: null, minTimeScale: timeRange.startTime, maxTimeScale: timeRange.endTime, + enableZoom: true, }), ), [queries, isDarkMode, dimensions, timeRange.startTime, timeRange.endTime], @@ -115,7 +116,7 @@ function Metrics({
, + data: SuccessResponseV2, ): void => { - if (data?.payload?.redirectURL) { + if (data?.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }; - const handleBillingOnError = (): void => { + const handleBillingOnError = (error: APIError): void => { notifications.error({ - message: SOMETHING_WENT_WRONG, + message: error.getErrorCode(), + description: error.getErrorMessage(), }); }; diff --git a/frontend/src/components/LogDetail/LogDetails.styles.scss b/frontend/src/components/LogDetail/LogDetails.styles.scss index 5cd014b71b3b..458de97b3612 100644 --- a/frontend/src/components/LogDetail/LogDetails.styles.scss +++ b/frontend/src/components/LogDetail/LogDetails.styles.scss @@ -3,6 +3,25 @@ background: var(--bg-ink-400); box-shadow: -4px 10px 16px 2px rgba(0, 0, 0, 0.2); + .log-detail-drawer__title { + display: flex; + justify-content: space-between; + align-items: center; + + .log-detail-drawer__title-left { + display: flex; + align-items: center; + gap: 8px; + } + + .log-detail-drawer__title-right { + .ant-btn { + display: flex; + align-items: center; + } + } + } + .ant-drawer-header { padding: 8px 16px; border-bottom: none; diff --git a/frontend/src/components/LogDetail/index.tsx b/frontend/src/components/LogDetail/index.tsx index c216694c35b2..b818823b5a70 100644 --- a/frontend/src/components/LogDetail/index.tsx +++ b/frontend/src/components/LogDetail/index.tsx @@ -8,6 +8,8 @@ import { RadioChangeEvent } from 'antd/lib'; import cx from 'classnames'; import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator'; import { LOCALSTORAGE } from 'constants/localStorage'; +import { QueryParams } from 'constants/query'; +import ROUTES from 'constants/routes'; import ContextView from 'container/LogDetailedView/ContextView/ContextView'; import InfraMetrics from 'container/LogDetailedView/InfraMetrics/InfraMetrics'; import JSONView from 'container/LogDetailedView/JsonView'; @@ -22,9 +24,12 @@ import dompurify from 'dompurify'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useIsDarkMode } from 'hooks/useDarkMode'; import { useNotifications } from 'hooks/useNotifications'; +import { useSafeNavigate } from 'hooks/useSafeNavigate'; +import useUrlQuery from 'hooks/useUrlQuery'; import { BarChart2, Braces, + Compass, Copy, Filter, HardHat, @@ -33,9 +38,12 @@ import { X, } from 'lucide-react'; import { useMemo, useState } from 'react'; -import { useCopyToClipboard } from 'react-use'; +import { useSelector } from 'react-redux'; +import { useCopyToClipboard, useLocation } from 'react-use'; +import { AppState } from 'store/reducers'; import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData'; import { DataSource, StringOperators } from 'types/common/queryBuilder'; +import { GlobalReducer } from 'types/reducer/globalTime'; import { FORBID_DOM_PURIFY_TAGS } from 'utils/app'; import { RESOURCE_KEYS, VIEW_TYPES, VIEWS } from './constants'; @@ -77,6 +85,12 @@ function LogDetail({ }); const isDarkMode = useIsDarkMode(); + const location = useLocation(); + const { safeNavigate } = useSafeNavigate(); + const urlQuery = useUrlQuery(); + const { maxTime, minTime } = useSelector( + (state) => state.globalTime, + ); const { notifications } = useNotifications(); @@ -119,6 +133,21 @@ function LogDetail({ }); }; + // Go to logs explorer page with the log data + const handleOpenInExplorer = (): void => { + urlQuery.set(QueryParams.activeLogId, `"${log?.id}"`); + urlQuery.set(QueryParams.startTime, minTime?.toString() || ''); + urlQuery.set(QueryParams.endTime, maxTime?.toString() || ''); + safeNavigate(`${ROUTES.LOGS_EXPLORER}?${urlQuery.toString()}`); + }; + + // Only show when opened from infra monitoring page + const showOpenInExplorerBtn = useMemo( + () => location.pathname?.includes('/infrastructure-monitoring'), + // eslint-disable-next-line react-hooks/exhaustive-deps + [], + ); + if (!log) { // eslint-disable-next-line react/jsx-no-useless-fragment return <>; @@ -131,10 +160,23 @@ function LogDetail({ width="60%" maskStyle={{ background: 'none' }} title={ - <> - - Log details - +
+
+ + Log details +
+ {showOpenInExplorerBtn && ( +
+ +
+ )} +
} placement="right" // closable diff --git a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.styles.scss b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.styles.scss index 73849b010a48..6524f55b1662 100644 --- a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.styles.scss +++ b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.styles.scss @@ -30,6 +30,7 @@ .right-action { display: flex; align-items: center; + min-width: 48px; .clear-all { font-size: 12px; @@ -52,10 +53,14 @@ .checkbox-value-section { display: flex; align-items: center; - justify-content: space-between; + gap: 4px; width: calc(100% - 24px); cursor: pointer; + .value-string { + width: 100%; + } + &.filter-disabled { cursor: not-allowed; @@ -74,9 +79,6 @@ } } - .value-string { - } - .only-btn { display: none; } @@ -177,3 +179,17 @@ } } } + +.label-false { + width: 2px; + height: 11px; + border-radius: 2px; + background: var(--bg-cherry-500); +} + +.label-true { + width: 2px; + height: 11px; + border-radius: 2px; + background: var(--bg-forest-500); +} diff --git a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.tsx b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.tsx index a9d96adf324f..784c7cdd2487 100644 --- a/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.tsx +++ b/frontend/src/components/QuickFilters/FilterRenderers/Checkbox/Checkbox.tsx @@ -504,6 +504,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element { onChange(value, currentFilterState[value], true); }} > +
{filter.customRendererForValue ? ( filter.customRendererForValue(value) ) : ( @@ -511,7 +512,7 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element { className="value-string" ellipsis={{ tooltip: { placement: 'right' } }} > - {value} + {String(value)} )} + )} +
+ ); +} + +Duration.defaultProps = { + onFilterChange: (): void => {}, +}; + +export default Duration; diff --git a/frontend/src/components/QuickFilters/QuickFilters.tsx b/frontend/src/components/QuickFilters/QuickFilters.tsx index ed443e8e4f67..af989aea13c9 100644 --- a/frontend/src/components/QuickFilters/QuickFilters.tsx +++ b/frontend/src/components/QuickFilters/QuickFilters.tsx @@ -5,19 +5,24 @@ import { SyncOutlined, VerticalAlignTopOutlined, } from '@ant-design/icons'; -import { Skeleton, Tooltip, Typography } from 'antd'; +import { Skeleton, Switch, Tooltip, Typography } from 'antd'; import getLocalStorageKey from 'api/browser/localstorage/get'; import setLocalStorageKey from 'api/browser/localstorage/set'; +import logEvent from 'api/common/logEvent'; import classNames from 'classnames'; import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; import { LOCALSTORAGE } from 'constants/localStorage'; +import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { cloneDeep, isFunction, isNull } from 'lodash-es'; import { Settings2 as SettingsIcon } from 'lucide-react'; +import { useAppContext } from 'providers/App/App'; import { useMemo, useState } from 'react'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { USER_ROLES } from 'types/roles'; import Checkbox from './FilterRenderers/Checkbox/Checkbox'; +import Duration from './FilterRenderers/Duration/Duration'; import Slider from './FilterRenderers/Slider/Slider'; import useFilterConfig from './hooks/useFilterConfig'; import AnnouncementTooltip from './QuickFiltersSettings/AnnouncementTooltip'; @@ -32,8 +37,14 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element { source, onFilterChange, signal, + showFilterCollapse = true, + showQueryName = true, } = props; + const { user } = useAppContext(); const [isSettingsOpen, setIsSettingsOpen] = useState(false); + const isAdmin = user.role === USER_ROLES.ADMIN; + const [params, setParams] = useApiMonitoringParams(); + const showIP = params.showIP ?? true; const { filterConfig, @@ -95,36 +106,33 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element { }; const lastQueryName = + showQueryName && currentQuery.builder.queryData?.[lastUsedQuery || 0]?.queryName; return (
- {source !== QuickFiltersSource.INFRA_MONITORING && - source !== QuickFiltersSource.API_MONITORING && ( -
-
- - - {lastQueryName ? 'Filters for' : 'Filters'} - - {lastQueryName && ( - - - {lastQueryName} - - - )} -
- -
- -
- -
+ {source !== QuickFiltersSource.INFRA_MONITORING && ( +
+
+ + + {lastQueryName ? 'Filters for' : 'Filters'} + + {lastQueryName && ( + + {lastQueryName} + )} +
+ +
+ +
+ +
+
+ {showFilterCollapse && (
- {isDynamicFilters && ( - -
- setIsSettingsOpen(true)} - /> - { - setLocalStorageKey( - LOCALSTORAGE.QUICK_FILTERS_SETTINGS_ANNOUNCEMENT, - 'false', - ); - }} - /> -
-
- )} -
+ )} + {isDynamicFilters && isAdmin && ( + +
+ setIsSettingsOpen(true)} + /> + { + setLocalStorageKey( + LOCALSTORAGE.QUICK_FILTERS_SETTINGS_ANNOUNCEMENT, + 'false', + ); + }} + /> +
+
+ )}
- )} +
+ )} {isCustomFiltersLoading ? (
@@ -179,31 +188,51 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
) : ( -
- {filterConfig.map((filter) => { - switch (filter.type) { - case FiltersType.CHECKBOX: - return ( - - ); - case FiltersType.SLIDER: - return ; - // eslint-disable-next-line sonarjs/no-duplicated-branches - default: - return ( - - ); - } - })} -
+ <> + {source === QuickFiltersSource.API_MONITORING && ( +
+ Show IP addresses + { + logEvent('API Monitoring: Show IP addresses clicked', { + showIP: !(showIP ?? true), + }); + setParams({ showIP }); + }} + /> +
+ )} +
+ {filterConfig.map((filter) => { + switch (filter.type) { + case FiltersType.CHECKBOX: + return ( + + ); + case FiltersType.DURATION: + return ; + case FiltersType.SLIDER: + return ; + // eslint-disable-next-line sonarjs/no-duplicated-branches + default: + return ( + + ); + } + })} +
+
)}
@@ -235,4 +264,6 @@ QuickFilters.defaultProps = { onFilterChange: null, signal: '', config: [], + showFilterCollapse: true, + showQueryName: true, }; diff --git a/frontend/src/components/QuickFilters/QuickFiltersSettings/OtherFilters.tsx b/frontend/src/components/QuickFilters/QuickFiltersSettings/OtherFilters.tsx index 05c9954295e3..635338ef5e78 100644 --- a/frontend/src/components/QuickFilters/QuickFiltersSettings/OtherFilters.tsx +++ b/frontend/src/components/QuickFilters/QuickFiltersSettings/OtherFilters.tsx @@ -3,10 +3,12 @@ import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; import { SIGNAL_DATA_SOURCE_MAP } from 'components/QuickFilters/QuickFiltersSettings/constants'; import { SignalType } from 'components/QuickFilters/types'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys'; import { useGetAttributeSuggestions } from 'hooks/queryBuilder/useGetAttributeSuggestions'; import { useMemo } from 'react'; import { TagFilter } from 'types/api/queryBuilder/queryBuilderData'; import { Filter as FilterType } from 'types/api/quickFilters/getCustomFilters'; +import { DataSource } from 'types/common/queryBuilder'; function OtherFiltersSkeleton(): JSX.Element { return ( @@ -34,6 +36,11 @@ function OtherFilters({ addedFilters: FilterType[]; setAddedFilters: React.Dispatch>; }): JSX.Element { + const isLogDataSource = useMemo( + () => SIGNAL_DATA_SOURCE_MAP[signal as SignalType] === DataSource.LOGS, + [signal], + ); + const { data: suggestionsData, isFetching: isFetchingSuggestions, @@ -45,18 +52,39 @@ function OtherFilters({ }, { queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue], - enabled: !!signal, + enabled: !!signal && isLogDataSource, }, ); - const otherFilters = useMemo( - () => - suggestionsData?.payload?.attributes?.filter( - (attr) => !addedFilters.some((filter) => filter.key === attr.key), - ), - [suggestionsData, addedFilters], + const { + data: aggregateKeysData, + isFetching: isFetchingAggregateKeys, + } = useGetAggregateKeys( + { + searchText: inputValue, + dataSource: SIGNAL_DATA_SOURCE_MAP[signal as SignalType], + aggregateOperator: 'noop', + aggregateAttribute: '', + tagType: '', + }, + { + queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue], + enabled: !!signal && !isLogDataSource, + }, ); + const otherFilters = useMemo(() => { + let filterAttributes; + if (isLogDataSource) { + filterAttributes = suggestionsData?.payload?.attributes || []; + } else { + filterAttributes = aggregateKeysData?.payload?.attributeKeys || []; + } + return filterAttributes?.filter( + (attr) => !addedFilters.some((filter) => filter.key === attr.key), + ); + }, [suggestionsData, aggregateKeysData, addedFilters, isLogDataSource]); + const handleAddFilter = (filter: FilterType): void => { setAddedFilters((prev) => [ ...prev, @@ -71,7 +99,8 @@ function OtherFilters({ }; const renderFilters = (): React.ReactNode => { - if (isFetchingSuggestions) return ; + const isLoading = isFetchingSuggestions || isFetchingAggregateKeys; + if (isLoading) return ; if (!otherFilters?.length) return
No values found
; diff --git a/frontend/src/components/QuickFilters/QuickFiltersSettings/QuickFiltersSettings.styles.scss b/frontend/src/components/QuickFilters/QuickFiltersSettings/QuickFiltersSettings.styles.scss index 6fa705434427..e6cbfc642e8c 100644 --- a/frontend/src/components/QuickFilters/QuickFiltersSettings/QuickFiltersSettings.styles.scss +++ b/frontend/src/components/QuickFilters/QuickFiltersSettings/QuickFiltersSettings.styles.scss @@ -7,6 +7,7 @@ background: var(--bg-slate-500); transition: width 0.05s ease-in-out; overflow: hidden; + color: var(--bg-vanilla-100); &.qf-logs-explorer { height: calc(100vh - 45px); @@ -16,6 +17,14 @@ height: 100vh; } + &.qf-api-monitoring { + height: calc(100vh - 45px); + } + + &.qf-traces-explorer { + height: calc(100vh - 45px); + } + &.hidden { width: 0; } @@ -172,6 +181,7 @@ .lightMode { .quick-filters-settings { background: var(--bg-vanilla-100); + color: var(--bg-slate-500); .search { .ant-input { background-color: var(--bg-vanilla-100); diff --git a/frontend/src/components/QuickFilters/QuickFiltersSettings/hooks/useQuickFilterSettings.tsx b/frontend/src/components/QuickFilters/QuickFiltersSettings/hooks/useQuickFilterSettings.tsx index f365ddba0663..bf4406c3045a 100644 --- a/frontend/src/components/QuickFilters/QuickFiltersSettings/hooks/useQuickFilterSettings.tsx +++ b/frontend/src/components/QuickFilters/QuickFiltersSettings/hooks/useQuickFilterSettings.tsx @@ -1,3 +1,4 @@ +import logEvent from 'api/common/logEvent'; import updateCustomFiltersAPI from 'api/quickFilters/updateCustomFilters'; import axios, { AxiosError } from 'axios'; import { SignalType } from 'components/QuickFilters/types'; @@ -46,6 +47,9 @@ const useQuickFilterSettings = ({ onSuccess: () => { setIsSettingsOpen(false); setIsStale(true); + logEvent('Quick Filters Settings: changes saved', { + addedFilters, + }); notifications.success({ message: 'Quick filters updated successfully', placement: 'bottomRight', diff --git a/frontend/src/components/QuickFilters/hooks/useFilterConfig.tsx b/frontend/src/components/QuickFilters/hooks/useFilterConfig.tsx index 2a095d6685ea..fb2659a6817b 100644 --- a/frontend/src/components/QuickFilters/hooks/useFilterConfig.tsx +++ b/frontend/src/components/QuickFilters/hooks/useFilterConfig.tsx @@ -33,7 +33,7 @@ const useFilterConfig = ({ const isDynamicFilters = useMemo(() => customFilters.length > 0, [ customFilters, ]); - const { isLoading: isCustomFiltersLoading } = useQuery< + const { isFetching: isCustomFiltersLoading } = useQuery< SuccessResponse | ErrorResponse, Error >( @@ -49,10 +49,10 @@ const useFilterConfig = ({ enabled: !!signal && isStale, }, ); - const filterConfig = useMemo(() => getFilterConfig(customFilters, config), [ - config, - customFilters, - ]); + const filterConfig = useMemo( + () => getFilterConfig(signal, customFilters, config), + [config, customFilters, signal], + ); return { filterConfig, diff --git a/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx b/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx index 069d52d6ebe0..f998e587eeb0 100644 --- a/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx +++ b/frontend/src/components/QuickFilters/tests/QuickFilters.test.tsx @@ -1,6 +1,7 @@ import '@testing-library/jest-dom'; import { + act, cleanup, fireEvent, render, @@ -8,6 +9,7 @@ import { waitFor, } from '@testing-library/react'; import { ENVIRONMENT } from 'constants/env'; +import ROUTES from 'constants/routes'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { otherFiltersResponse, @@ -17,6 +19,7 @@ import { import { server } from 'mocks-server/server'; import { rest } from 'msw'; import MockQueryClientProvider from 'providers/test/MockQueryClientProvider'; +import { USER_ROLES } from 'types/roles'; import QuickFilters from '../QuickFilters'; import { IQuickFiltersConfig, QuickFiltersSource, SignalType } from '../types'; @@ -26,6 +29,21 @@ jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({ useQueryBuilder: jest.fn(), })); +// eslint-disable-next-line sonarjs/no-duplicate-string +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.TRACES_EXPLORER}/`, + }), +})); + +const userRole = USER_ROLES.ADMIN; + +// mock useAppContext +jest.mock('providers/App/App', () => ({ + useAppContext: jest.fn(() => ({ user: { role: userRole } })), +})); + const handleFilterVisibilityChange = jest.fn(); const redirectWithQueryBuilderData = jest.fn(); const putHandler = jest.fn(); @@ -163,7 +181,9 @@ describe('Quick Filters with custom filters', () => { expect(screen.getByText('Filters for')).toBeInTheDocument(); expect(screen.getByText(QUERY_NAME)).toBeInTheDocument(); await screen.findByText(FILTER_SERVICE_NAME); - await screen.findByText('otel-demo'); + const allByText = await screen.findAllByText('otel-demo'); + // since 2 filter collapse are open, there are 2 filter items visible + expect(allByText).toHaveLength(2); const icon = await screen.findByTestId(SETTINGS_ICON_TEST_ID); fireEvent.click(icon); @@ -285,4 +305,59 @@ describe('Quick Filters with custom filters', () => { ); expect(requestBody.signal).toBe(SIGNAL); }); + + // render duration filter + it('should render duration slider for duration_nono filter', async () => { + // Set up fake timers **before rendering** + jest.useFakeTimers(); + + const { getByTestId } = render(); + await screen.findByText(FILTER_SERVICE_NAME); + expect(screen.getByText('Duration')).toBeInTheDocument(); + + // click to open the duration filter + fireEvent.click(screen.getByText('Duration')); + + const minDuration = getByTestId('min-input') as HTMLInputElement; + const maxDuration = getByTestId('max-input') as HTMLInputElement; + expect(minDuration).toHaveValue(null); + expect(minDuration).toHaveProperty('placeholder', '0'); + expect(maxDuration).toHaveValue(null); + expect(maxDuration).toHaveProperty('placeholder', '100000000'); + + await act(async () => { + // set values + fireEvent.change(minDuration, { target: { value: '10000' } }); + fireEvent.change(maxDuration, { target: { value: '20000' } }); + jest.advanceTimersByTime(2000); + }); + await waitFor(() => { + expect(redirectWithQueryBuilderData).toHaveBeenCalledWith( + expect.objectContaining({ + builder: { + queryData: expect.arrayContaining([ + expect.objectContaining({ + filters: expect.objectContaining({ + items: expect.arrayContaining([ + expect.objectContaining({ + key: expect.objectContaining({ key: 'durationNano' }), + op: '>=', + value: 10000000000, + }), + expect.objectContaining({ + key: expect.objectContaining({ key: 'durationNano' }), + op: '<=', + value: 20000000000, + }), + ]), + }), + }), + ]), + }, + }), + ); + }); + + jest.useRealTimers(); // Clean up + }); }); diff --git a/frontend/src/components/QuickFilters/types.ts b/frontend/src/components/QuickFilters/types.ts index e39daf232d48..45c671e77e39 100644 --- a/frontend/src/components/QuickFilters/types.ts +++ b/frontend/src/components/QuickFilters/types.ts @@ -5,6 +5,7 @@ import { DataSource } from 'types/common/queryBuilder'; export enum FiltersType { SLIDER = 'SLIDER', CHECKBOX = 'CHECKBOX', + DURATION = 'DURATION', // ALIAS FOR DURATION_NANO } export enum MinMax { @@ -42,6 +43,8 @@ export interface IQuickFiltersProps { onFilterChange?: (query: Query) => void; signal?: SignalType; className?: string; + showFilterCollapse?: boolean; + showQueryName?: boolean; } export enum QuickFiltersSource { diff --git a/frontend/src/components/QuickFilters/utils.tsx b/frontend/src/components/QuickFilters/utils.tsx index 4d1ed5ffa52f..4df900e8354d 100644 --- a/frontend/src/components/QuickFilters/utils.tsx +++ b/frontend/src/components/QuickFilters/utils.tsx @@ -1,30 +1,53 @@ +import { SIGNAL_DATA_SOURCE_MAP } from 'components/QuickFilters/QuickFiltersSettings/constants'; import { Filter as FilterType } from 'types/api/quickFilters/getCustomFilters'; -import { FiltersType, IQuickFiltersConfig } from './types'; +import { FiltersType, IQuickFiltersConfig, SignalType } from './types'; -const getFilterName = (str: string): string => +const FILTER_TITLE_MAP: Record = { + duration_nano: 'Duration', + hasError: 'Has Error (Status)', +}; + +const FILTER_TYPE_MAP: Record = { + duration_nano: FiltersType.DURATION, +}; + +const getFilterName = (str: string): string => { + if (FILTER_TITLE_MAP[str]) { + return FILTER_TITLE_MAP[str]; + } // replace . and _ with space // capitalize the first letter of each word - str + return str .replace(/\./g, ' ') .replace(/_/g, ' ') .split(' ') .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) .join(' '); +}; + +const getFilterType = (att: FilterType): FiltersType => { + if (FILTER_TYPE_MAP[att.key]) { + return FILTER_TYPE_MAP[att.key]; + } + return FiltersType.CHECKBOX; +}; export const getFilterConfig = ( + signal?: SignalType, customFilters?: FilterType[], config?: IQuickFiltersConfig[], ): IQuickFiltersConfig[] => { - if (!customFilters?.length) { + if (!customFilters?.length || !signal) { return config || []; } return customFilters.map( (att, index) => ({ - type: FiltersType.CHECKBOX, + type: getFilterType(att), title: getFilterName(att.key), + dataSource: SIGNAL_DATA_SOURCE_MAP[signal], attributeKey: { id: att.key, key: att.key, @@ -33,7 +56,7 @@ export const getFilterConfig = ( isColumn: att.isColumn, isJSON: att.isJSON, }, - defaultOpen: index === 0, + defaultOpen: index < 2, } as IQuickFiltersConfig), ); }; diff --git a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx index 3735ed1ff601..13b12a317400 100644 --- a/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx +++ b/frontend/src/container/AllAlertChannels/__tests__/CreateAlertChannel.test.tsx @@ -15,7 +15,7 @@ import { } from 'mocks-server/__mockdata__/alerts'; import { server } from 'mocks-server/server'; import { rest } from 'msw'; -import { fireEvent, render, screen, waitFor } from 'tests/test-utils'; +import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils'; import { testLabelInputAndHelpValue } from './testUtils'; @@ -30,6 +30,14 @@ jest.mock('hooks/useNotifications', () => ({ }, })), })); +const showErrorModal = jest.fn(); +jest.mock('providers/ErrorModalProvider', () => ({ + __esModule: true, + ...jest.requireActual('providers/ErrorModalProvider'), + useErrorModal: jest.fn(() => ({ + showErrorModal, + })), +})); jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({ MarkdownRenderer: jest.fn(() =>
Mocked MarkdownRenderer
), @@ -119,7 +127,7 @@ describe('Create Alert Channel', () => { fireEvent.click(saveButton); - await waitFor(() => expect(errorNotification).toHaveBeenCalled()); + await waitFor(() => expect(showErrorModal).toHaveBeenCalled()); }); it('Should check if clicking on Test button shows "An alert has been sent to this channel" success message if testing passes', async () => { server.use( @@ -151,9 +159,11 @@ describe('Create Alert Channel', () => { name: 'button_test_channel', }); - fireEvent.click(testButton); + act(() => { + fireEvent.click(testButton); + }); - await waitFor(() => expect(errorNotification).toHaveBeenCalled()); + await waitFor(() => expect(showErrorModal).toHaveBeenCalled()); }); }); describe('New Alert Channel Cascading Fields Based on Channel Type', () => { diff --git a/frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx b/frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx index 14c57a67426a..c255cab7c02b 100644 --- a/frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx +++ b/frontend/src/container/ApiMonitoring/Explorer/Explorer.tsx @@ -1,23 +1,16 @@ import './Explorer.styles.scss'; -import { FilterOutlined } from '@ant-design/icons'; import * as Sentry from '@sentry/react'; -import { Switch, Typography } from 'antd'; import logEvent from 'api/common/logEvent'; import cx from 'classnames'; import QuickFilters from 'components/QuickFilters/QuickFilters'; -import { QuickFiltersSource } from 'components/QuickFilters/types'; +import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; import { useEffect } from 'react'; -import { useApiMonitoringParams } from '../queryParams'; -import { ApiMonitoringQuickFiltersConfig } from '../utils'; import DomainList from './Domains/DomainList'; function Explorer(): JSX.Element { - const [params, setParams] = useApiMonitoringParams(); - const showIP = params.showIP ?? true; - useEffect(() => { logEvent('API Monitoring: Landing page visited', {}); }, []); @@ -26,29 +19,12 @@ function Explorer(): JSX.Element { }>
-
- - Filters -
- -
- Show IP addresses - { - logEvent('API Monitoring: Show IP addresses clicked', { - showIP: !(showIP ?? true), - }); - setParams({ showIP }); - }} - /> -
- {}} />
diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx index 03accc56f0b6..e481a4165cdb 100644 --- a/frontend/src/container/AppLayout/index.tsx +++ b/frontend/src/container/AppLayout/index.tsx @@ -5,16 +5,15 @@ import './AppLayout.styles.scss'; import * as Sentry from '@sentry/react'; import { Flex } from 'antd'; -import manageCreditCardApi from 'api/billing/manage'; import getLocalStorageApi from 'api/browser/localstorage/get'; import setLocalStorageApi from 'api/browser/localstorage/set'; import logEvent from 'api/common/logEvent'; +import manageCreditCardApi from 'api/v1/portal/create'; import getUserLatestVersion from 'api/v1/version/getLatestVersion'; import getUserVersion from 'api/v1/version/getVersion'; import cx from 'classnames'; import ChatSupportGateway from 'components/ChatSupportGateway/ChatSupportGateway'; import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; import { Events } from 'constants/events'; import { FeatureKeys } from 'constants/features'; import { LOCALSTORAGE } from 'constants/localStorage'; @@ -51,8 +50,9 @@ import { UPDATE_LATEST_VERSION, UPDATE_LATEST_VERSION_ERROR, } from 'types/actions/app'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { SuccessResponseV2 } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; +import APIError from 'types/api/error'; import { LicenseEvent, LicensePlatform, @@ -75,8 +75,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element { isLoggedIn, user, trialInfo, - activeLicenseV3, - isFetchingActiveLicenseV3, + activeLicense, + isFetchingActiveLicense, featureFlags, isFetchingFeatureFlags, featureFlagsFetchError, @@ -93,20 +93,21 @@ function AppLayout(props: AppLayoutProps): JSX.Element { const [slowApiWarningShown, setSlowApiWarningShown] = useState(false); const handleBillingOnSuccess = ( - data: ErrorResponse | SuccessResponse, + data: SuccessResponseV2, ): void => { - if (data?.payload?.redirectURL) { + if (data?.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }; - const handleBillingOnError = (): void => { + const handleBillingOnError = (error: APIError): void => { notifications.error({ - message: SOMETHING_WENT_WRONG, + message: error.getErrorCode(), + description: error.getErrorMessage(), }); }; @@ -260,8 +261,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element { useEffect(() => { if ( - !isFetchingActiveLicenseV3 && - activeLicenseV3 && + !isFetchingActiveLicense && + activeLicense && trialInfo?.onTrial && !trialInfo?.trialConvertedToSubscription && !trialInfo?.workSpaceBlock && @@ -269,16 +270,16 @@ function AppLayout(props: AppLayoutProps): JSX.Element { ) { setShowTrialExpiryBanner(true); } - }, [isFetchingActiveLicenseV3, activeLicenseV3, trialInfo]); + }, [isFetchingActiveLicense, activeLicense, trialInfo]); useEffect(() => { - if (!isFetchingActiveLicenseV3 && activeLicenseV3) { - const isTerminated = activeLicenseV3.state === LicenseState.TERMINATED; - const isExpired = activeLicenseV3.state === LicenseState.EXPIRED; - const isCancelled = activeLicenseV3.state === LicenseState.CANCELLED; - const isDefaulted = activeLicenseV3.state === LicenseState.DEFAULTED; + if (!isFetchingActiveLicense && activeLicense) { + const isTerminated = activeLicense.state === LicenseState.TERMINATED; + const isExpired = activeLicense.state === LicenseState.EXPIRED; + const isCancelled = activeLicense.state === LicenseState.CANCELLED; + const isDefaulted = activeLicense.state === LicenseState.DEFAULTED; const isEvaluationExpired = - activeLicenseV3.state === LicenseState.EVALUATION_EXPIRED; + activeLicense.state === LicenseState.EVALUATION_EXPIRED; const isWorkspaceAccessRestricted = isTerminated || @@ -287,7 +288,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { isDefaulted || isEvaluationExpired; - const { platform } = activeLicenseV3; + const { platform } = activeLicense; if ( isWorkspaceAccessRestricted && @@ -296,17 +297,17 @@ function AppLayout(props: AppLayoutProps): JSX.Element { setShowWorkspaceRestricted(true); } } - }, [isFetchingActiveLicenseV3, activeLicenseV3]); + }, [isFetchingActiveLicense, activeLicense]); useEffect(() => { if ( - !isFetchingActiveLicenseV3 && - !isNull(activeLicenseV3) && - activeLicenseV3?.event_queue?.event === LicenseEvent.DEFAULT + !isFetchingActiveLicense && + !isNull(activeLicense) && + activeLicense?.event_queue?.event === LicenseEvent.DEFAULT ) { setShowPaymentFailedWarning(true); } - }, [activeLicenseV3, isFetchingActiveLicenseV3]); + }, [activeLicense, isFetchingActiveLicense]); useEffect(() => { // after logging out hide the trial expiry banner @@ -392,7 +393,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { if ( !isFetchingFeatureFlags && (featureFlags || featureFlagsFetchError) && - activeLicenseV3 && + activeLicense && trialInfo ) { let isChatSupportEnabled = false; @@ -421,7 +422,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { isCloudUserVal, isFetchingFeatureFlags, isLoggedIn, - activeLicenseV3, + activeLicense, trialInfo, ]); @@ -523,14 +524,14 @@ function AppLayout(props: AppLayoutProps): JSX.Element { const renderWorkspaceRestrictedBanner = (): JSX.Element => (
- {activeLicenseV3?.state === LicenseState.TERMINATED && ( + {activeLicense?.state === LicenseState.TERMINATED && ( <> Your SigNoz license is terminated, enterprise features have been disabled. Please contact support at{' '} support@signoz.io for new license )} - {activeLicenseV3?.state === LicenseState.EXPIRED && ( + {activeLicense?.state === LicenseState.EXPIRED && ( <> Your SigNoz license has expired. Please contact support at{' '} support@signoz.io for renewal to @@ -544,7 +545,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { )} - {activeLicenseV3?.state === LicenseState.CANCELLED && ( + {activeLicense?.state === LicenseState.CANCELLED && ( <> Your SigNoz license is cancelled. Please contact support at{' '} support@signoz.io for reactivation @@ -559,7 +560,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { )} - {activeLicenseV3?.state === LicenseState.DEFAULTED && ( + {activeLicense?.state === LicenseState.DEFAULTED && ( <> Your SigNoz license is defaulted. Please clear the bill to continue using the enterprise features. Contact support at{' '} @@ -575,7 +576,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { )} - {activeLicenseV3?.state === LicenseState.EVALUATION_EXPIRED && ( + {activeLicense?.state === LicenseState.EVALUATION_EXPIRED && ( <> Your SigNoz trial has ended. Please contact support at{' '} support@signoz.io for next steps to @@ -624,7 +625,7 @@ function AppLayout(props: AppLayoutProps): JSX.Element { Your bill payment has failed. Your workspace will get suspended on{' '} {getFormattedDateWithMinutes( - dayjs(activeLicenseV3?.event_queue?.scheduled_at).unix() || Date.now(), + dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(), )} . diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index 9906cace1e2e..e2f4bc847fc0 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -16,10 +16,10 @@ import { Typography, } from 'antd'; import { ColumnsType } from 'antd/es/table'; -import updateCreditCardApi from 'api/billing/checkout'; import getUsage, { UsageResponsePayloadProps } from 'api/billing/getUsage'; -import manageCreditCardApi from 'api/billing/manage'; import logEvent from 'api/common/logEvent'; +import updateCreditCardApi from 'api/v1/checkout/create'; +import manageCreditCardApi from 'api/v1/portal/create'; import Spinner from 'components/Spinner'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; @@ -31,9 +31,8 @@ import { useAppContext } from 'providers/App/App'; import { useCallback, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useMutation, useQuery } from 'react-query'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { SuccessResponseV2 } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; -import { License } from 'types/api/licenses/def'; import { getFormattedDate, getRemainingDays } from 'utils/timeUtils'; import { BillingUsageGraph } from './BillingUsageGraph/BillingUsageGraph'; @@ -126,7 +125,6 @@ export default function BillingContainer(): JSX.Element { const daysRemainingStr = t('days_remaining'); const [headerText, setHeaderText] = useState(''); const [billAmount, setBillAmount] = useState(0); - const [activeLicense, setActiveLicense] = useState(null); const [daysRemaining, setDaysRemaining] = useState(0); const [isFreeTrial, setIsFreeTrial] = useState(false); const [data, setData] = useState([]); @@ -137,11 +135,10 @@ export default function BillingContainer(): JSX.Element { const { user, org, - licenses, trialInfo, - isFetchingActiveLicenseV3, - activeLicenseV3, - activeLicenseV3FetchError, + isFetchingActiveLicense, + activeLicense, + activeLicenseFetchError, } = useAppContext(); const { notifications } = useNotifications(); @@ -216,14 +213,9 @@ export default function BillingContainer(): JSX.Element { }); useEffect(() => { - const activeValidLicense = - licenses?.licenses?.find((license) => license.isCurrent === true) || null; - - setActiveLicense(activeValidLicense); - if ( - !isFetchingActiveLicenseV3 && - !activeLicenseV3FetchError && + !isFetchingActiveLicense && + !activeLicenseFetchError && trialInfo?.onTrial ) { const remainingDays = getRemainingDays(trialInfo?.trialEnd); @@ -238,12 +230,11 @@ export default function BillingContainer(): JSX.Element { ); } }, [ - licenses?.licenses, - activeLicenseV3, + activeLicense, trialInfo?.onTrial, trialInfo?.trialEnd, - isFetchingActiveLicenseV3, - activeLicenseV3FetchError, + isFetchingActiveLicense, + activeLicenseFetchError, ]); const columns: ColumnsType = [ @@ -288,11 +279,11 @@ export default function BillingContainer(): JSX.Element { ); const handleBillingOnSuccess = ( - data: ErrorResponse | SuccessResponse, + data: SuccessResponseV2, ): void => { - if (data?.payload?.redirectURL) { + if (data?.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); diff --git a/frontend/src/container/CreateAlertChannels/index.tsx b/frontend/src/container/CreateAlertChannels/index.tsx index 8651474347c7..a55792533f4d 100644 --- a/frontend/src/container/CreateAlertChannels/index.tsx +++ b/frontend/src/container/CreateAlertChannels/index.tsx @@ -16,6 +16,7 @@ import ROUTES from 'constants/routes'; import FormAlertChannels from 'container/FormAlertChannels'; import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; +import { useErrorModal } from 'providers/ErrorModalProvider'; import { useCallback, useEffect, useState } from 'react'; import { useTranslation } from 'react-i18next'; import APIError from 'types/api/error'; @@ -42,6 +43,7 @@ function CreateAlertChannels({ }: CreateAlertChannelsProps): JSX.Element { // init namespace for translations const { t } = useTranslation('channels'); + const { showErrorModal } = useErrorModal(); const [formInstance] = Form.useForm(); @@ -145,15 +147,12 @@ function CreateAlertChannels({ history.replace(ROUTES.ALL_CHANNELS); return { status: 'success', statusMessage: t('channel_creation_done') }; } catch (error) { - notifications.error({ - message: (error as APIError).error.error.code, - description: (error as APIError).error.error.message, - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [prepareSlackRequest, t, notifications]); + }, [prepareSlackRequest, notifications, t, showErrorModal]); const prepareWebhookRequest = useCallback(() => { // initial api request without auth params @@ -202,15 +201,12 @@ function CreateAlertChannels({ history.replace(ROUTES.ALL_CHANNELS); return { status: 'success', statusMessage: t('channel_creation_done') }; } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [prepareWebhookRequest, t, notifications]); + }, [prepareWebhookRequest, notifications, t, showErrorModal]); const preparePagerRequest = useCallback(() => { const validationError = ValidatePagerChannel(selectedConfig as PagerChannel); @@ -254,15 +250,12 @@ function CreateAlertChannels({ } return { status: 'failed', statusMessage: t('channel_creation_failed') }; } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [t, notifications, preparePagerRequest]); + }, [preparePagerRequest, t, notifications, showErrorModal]); const prepareOpsgenieRequest = useCallback( () => ({ @@ -287,15 +280,12 @@ function CreateAlertChannels({ history.replace(ROUTES.ALL_CHANNELS); return { status: 'success', statusMessage: t('channel_creation_done') }; } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [prepareOpsgenieRequest, t, notifications]); + }, [prepareOpsgenieRequest, notifications, t, showErrorModal]); const prepareEmailRequest = useCallback( () => ({ @@ -320,15 +310,12 @@ function CreateAlertChannels({ history.replace(ROUTES.ALL_CHANNELS); return { status: 'success', statusMessage: t('channel_creation_done') }; } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [prepareEmailRequest, t, notifications]); + }, [prepareEmailRequest, notifications, t, showErrorModal]); const prepareMsTeamsRequest = useCallback( () => ({ @@ -353,15 +340,12 @@ function CreateAlertChannels({ history.replace(ROUTES.ALL_CHANNELS); return { status: 'success', statusMessage: t('channel_creation_done') }; } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), - }); + showErrorModal(error as APIError); return { status: 'failed', statusMessage: t('channel_creation_failed') }; } finally { setSavingState(false); } - }, [prepareMsTeamsRequest, t, notifications]); + }, [prepareMsTeamsRequest, notifications, t, showErrorModal]); const onSaveHandler = useCallback( async (value: ChannelType) => { @@ -459,10 +443,8 @@ function CreateAlertChannels({ status: 'Test success', }); } catch (error) { - notifications.error({ - message: (error as APIError).error.error.code, - description: (error as APIError).error.error.message, - }); + showErrorModal(error as APIError); + logEvent('Alert Channel: Test notification', { type: channelType, sendResolvedAlert: selectedConfig?.send_resolved, diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx index a2e0eff9c829..beef5c66861d 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx +++ b/frontend/src/container/ExplorerOptions/ExplorerOptionWrapper.tsx @@ -14,6 +14,8 @@ function ExplorerOptionWrapper({ isLoading, onExport, sourcepage, + isOneChartPerQuery, + splitedQueries, }: ExplorerOptionsWrapperProps): JSX.Element { const [isExplorerOptionHidden, setIsExplorerOptionHidden] = useState(false); @@ -32,6 +34,8 @@ function ExplorerOptionWrapper({ sourcepage={sourcepage} isExplorerOptionHidden={isExplorerOptionHidden} setIsExplorerOptionHidden={setIsExplorerOptionHidden} + isOneChartPerQuery={isOneChartPerQuery} + splitedQueries={splitedQueries} /> ); } diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss index 9efc053245bd..179003da452b 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.styles.scss @@ -8,6 +8,21 @@ display: flex; gap: 16px; background-color: transparent; + + .multi-alert-button, + .multi-dashboard-button { + min-width: 130px; + + .ant-select-selector { + .ant-select-selection-placeholder { + margin-left: 0 !important; + } + + .ant-select-arrow { + display: none !important; + } + } + } } .hide-update { diff --git a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx index 3090babe1d91..fa4a723f1747 100644 --- a/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx +++ b/frontend/src/container/ExplorerOptions/ExplorerOptions.tsx @@ -90,6 +90,8 @@ function ExplorerOptions({ sourcepage, isExplorerOptionHidden = false, setIsExplorerOptionHidden, + isOneChartPerQuery = false, + splitedQueries = [], }: ExplorerOptionsProps): JSX.Element { const [isExport, setIsExport] = useState(false); const [isSaveModalOpen, setIsSaveModalOpen] = useState(false); @@ -99,6 +101,8 @@ function ExplorerOptions({ const history = useHistory(); const ref = useRef(null); const isDarkMode = useIsDarkMode(); + const [queryToExport, setQueryToExport] = useState(null); + const isLogsExplorer = sourcepage === DataSource.LOGS; const isMetricsExplorer = sourcepage === DataSource.METRICS; @@ -149,51 +153,62 @@ function ExplorerOptions({ const { user } = useAppContext(); - const handleConditionalQueryModification = useCallback((): string => { - if ( - query?.builder?.queryData?.[0]?.aggregateOperator !== StringOperators.NOOP - ) { - return JSON.stringify(query); - } + const handleConditionalQueryModification = useCallback( + (defaultQuery: Query | null): string => { + const queryToUse = defaultQuery || query; + if ( + queryToUse?.builder?.queryData?.[0]?.aggregateOperator !== + StringOperators.NOOP + ) { + return JSON.stringify(queryToUse); + } - // Modify aggregateOperator to count, as noop is not supported in alerts - const modifiedQuery = cloneDeep(query); + // Modify aggregateOperator to count, as noop is not supported in alerts + const modifiedQuery = cloneDeep(queryToUse); - modifiedQuery.builder.queryData[0].aggregateOperator = StringOperators.COUNT; + modifiedQuery.builder.queryData[0].aggregateOperator = StringOperators.COUNT; - return JSON.stringify(modifiedQuery); - }, [query]); + return JSON.stringify(modifiedQuery); + }, + [query], + ); - const onCreateAlertsHandler = useCallback(() => { - if (sourcepage === DataSource.TRACES) { - logEvent('Traces Explorer: Create alert', { - panelType, - }); - } else if (isLogsExplorer) { - logEvent('Logs Explorer: Create alert', { - panelType, - }); - } else if (isMetricsExplorer) { - logEvent('Metrics Explorer: Create alert', { - panelType, - }); - } + const onCreateAlertsHandler = useCallback( + (defaultQuery: Query | null) => { + if (sourcepage === DataSource.TRACES) { + logEvent('Traces Explorer: Create alert', { + panelType, + }); + } else if (isLogsExplorer) { + logEvent('Logs Explorer: Create alert', { + panelType, + }); + } else if (isMetricsExplorer) { + logEvent('Metrics Explorer: Create alert', { + panelType, + }); + } - const stringifiedQuery = handleConditionalQueryModification(); + const stringifiedQuery = handleConditionalQueryModification(defaultQuery); - history.push( - `${ROUTES.ALERTS_NEW}?${QueryParams.compositeQuery}=${encodeURIComponent( - stringifiedQuery, - )}`, - ); + history.push( + `${ROUTES.ALERTS_NEW}?${QueryParams.compositeQuery}=${encodeURIComponent( + stringifiedQuery, + )}`, + ); + }, // eslint-disable-next-line react-hooks/exhaustive-deps - }, [handleConditionalQueryModification, history]); + [handleConditionalQueryModification, history], + ); const onCancel = (value: boolean) => (): void => { onModalToggle(value); + if (isOneChartPerQuery) { + setQueryToExport(null); + } }; - const onAddToDashboard = (): void => { + const onAddToDashboard = useCallback((): void => { if (sourcepage === DataSource.TRACES) { logEvent('Traces Explorer: Add to dashboard clicked', { panelType, @@ -208,7 +223,7 @@ function ExplorerOptions({ }); } setIsExport(true); - }; + }, [isLogsExplorer, isMetricsExplorer, panelType, setIsExport, sourcepage]); const { data: viewsData, @@ -616,6 +631,120 @@ function ExplorerOptions({ return 'https://signoz.io/docs/product-features/trace-explorer/?utm_source=product&utm_medium=trace-explorer-toolbar'; }, [isLogsExplorer, isMetricsExplorer]); + const getQueryName = (query: Query): string => { + if (query.builder.queryFormulas.length > 0) { + return `Formula ${query.builder.queryFormulas[0].queryName}`; + } + return `Query ${query.builder.queryData[0].queryName}`; + }; + + const alertButton = useMemo(() => { + if (isOneChartPerQuery) { + const selectLabel = ( + + ); + return ( + + ); + } + return ( + + ); + }, [ + disabled, + isOneChartPerQuery, + onCreateAlertsHandler, + query, + splitedQueries, + ]); + + const dashboardButton = useMemo(() => { + if (isOneChartPerQuery) { + const selectLabel = ( + + ); + return ( + + ); + } + return ( + + ); + }, [disabled, isOneChartPerQuery, onAddToDashboard, splitedQueries]); + return (
{ @@ -719,24 +848,8 @@ function ExplorerOptions({
- - - + {alertButton} + {dashboardButton}
{/* Hide the info icon for metrics explorer until we get the docs link */} @@ -818,9 +931,15 @@ function ExplorerOptions({ destroyOnClose > { + if (isOneChartPerQuery && queryToExport) { + onExport(dashboard, isNewDashboard, queryToExport); + } else { + onExport(dashboard, isNewDashboard); + } + }} />
@@ -829,18 +948,26 @@ function ExplorerOptions({ export interface ExplorerOptionsProps { isLoading?: boolean; - onExport: (dashboard: Dashboard | null, isNewDashboard?: boolean) => void; + onExport: ( + dashboard: Dashboard | null, + isNewDashboard?: boolean, + queryToExport?: Query, + ) => void; query: Query | null; disabled: boolean; sourcepage: DataSource; isExplorerOptionHidden?: boolean; setIsExplorerOptionHidden?: Dispatch>; + isOneChartPerQuery?: boolean; + splitedQueries?: Query[]; } ExplorerOptions.defaultProps = { isLoading: false, isExplorerOptionHidden: false, setIsExplorerOptionHidden: undefined, + isOneChartPerQuery: false, + splitedQueries: [], }; export default ExplorerOptions; diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index e8e9b484a26f..89a1fa510558 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -142,6 +142,7 @@ function ChartPreview({ params: { allowSelectedIntervalForStepGen, }, + originalGraphType: graphType, }, alertDef?.version || DEFAULT_ENTITY_VERSION, { diff --git a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx index 9a87c228c0b8..dd91469d63f8 100644 --- a/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/FullView/index.tsx @@ -94,6 +94,7 @@ function FullView({ variables: getDashboardVariables(selectedDashboard?.data.variables), fillGaps: widget.fillSpans, formatForWeb: widget.panelTypes === PANEL_TYPES.TABLE, + originalGraphType: widget?.panelTypes, }; } updatedQuery.builder.queryData[0].pageSize = 10; diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx index c29050ca6bb8..a9d58a13bc90 100644 --- a/frontend/src/container/GridCardLayout/GridCard/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx @@ -208,6 +208,7 @@ function GridCardGraph({ : globalSelectedInterval, start: customTimeRange?.startTime || start, end: customTimeRange?.endTime || end, + originalGraphType: widget?.panelTypes, }, version || DEFAULT_ENTITY_VERSION, { diff --git a/frontend/src/container/GridCardLayout/__tests__/utils.test.ts b/frontend/src/container/GridCardLayout/__tests__/utils.test.ts new file mode 100644 index 000000000000..84904064f587 --- /dev/null +++ b/frontend/src/container/GridCardLayout/__tests__/utils.test.ts @@ -0,0 +1,228 @@ +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { EQueryType } from 'types/common/dashboard'; +import { DataSource } from 'types/common/queryBuilder'; + +import { getStepIntervalPoints, updateStepInterval } from '../utils'; + +describe('GridCardLayout Utils', () => { + describe('getStepIntervalPoints', () => { + it('should return 60 points for duration <= 1 hour', () => { + // 30 minutes in milliseconds + const start = Date.now(); + const end = start + 30 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(60); + }); + + it('should return 60 points for exactly 1 hour', () => { + // 1 hour in milliseconds + const start = Date.now(); + const end = start + 60 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(60); + }); + + it('should return 120 points for duration <= 3 hours', () => { + // 2 hours in milliseconds + const start = Date.now(); + const end = start + 2 * 60 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(120); + }); + + it('should return 120 points for exactly 3 hours', () => { + // 3 hours in milliseconds + const start = Date.now(); + const end = start + 3 * 60 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(120); + }); + + it('should return 180 points for duration <= 5 hours', () => { + // 4 hours in milliseconds + const start = Date.now(); + const end = start + 4 * 60 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(180); + }); + + it('should return 180 points for exactly 5 hours', () => { + // 5 hours in milliseconds + const start = Date.now(); + const end = start + 5 * 60 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(180); + }); + + it('should calculate dynamic interval for duration > 5 hours', () => { + // 10 hours in milliseconds + const start = Date.now(); + const end = start + 10 * 60 * 60 * 1000; + + const result = getStepIntervalPoints(start, end); + + // For 10 hours (600 minutes), interval should be ceil(600/80) = 8, rounded to 10, then * 60 = 600 + expect(result).toBe(600); + }); + + it('should handle very long durations correctly', () => { + // 7 days in milliseconds + const start = Date.now(); + const end = start + 7 * 24 * 60 * 60 * 1000; + + const result = getStepIntervalPoints(start, end); + + // For 7 days (10080 minutes), interval should be ceil(10080/80) = 126, rounded to 130, then * 60 = 7800 + expect(result).toBe(7800); + }); + + it('should round up to nearest multiple of 5 minutes', () => { + // 12 hours in milliseconds + const start = Date.now(); + const end = start + 12 * 60 * 60 * 1000; + + const result = getStepIntervalPoints(start, end); + + // For 12 hours (720 minutes), interval should be ceil(720/80) = 9, rounded to 10, then * 60 = 600 + expect(result).toBe(600); + }); + + it('should handle edge case with very small duration', () => { + // 1 minute in milliseconds + const start = Date.now(); + const end = start + 1 * 60 * 1000; + + expect(getStepIntervalPoints(start, end)).toBe(60); + }); + + it('should handle zero duration', () => { + const start = Date.now(); + const end = start; + + expect(getStepIntervalPoints(start, end)).toBe(60); + }); + }); + + describe('updateStepInterval', () => { + const mockQuery: Query = { + queryType: EQueryType.QUERY_BUILDER, + builder: { + queryData: [ + { + stepInterval: 30, + aggregateOperator: 'avg', + dataSource: DataSource.METRICS, + queryName: 'A', + aggregateAttribute: { key: 'cpu_usage', type: 'Gauge' }, + timeAggregation: 'avg', + spaceAggregation: 'avg', + functions: [], + filters: { items: [], op: 'AND' }, + expression: 'A', + disabled: false, + having: [], + groupBy: [], + orderBy: [], + limit: null, + offset: 0, + pageSize: 0, + reduceTo: 'avg', + legend: '', + }, + ], + queryFormulas: [], + }, + clickhouse_sql: [], + promql: [], + id: 'test-query', + }; + + it('should update stepInterval based on time range', () => { + // 2 hours duration + const minTime = Date.now(); + const maxTime = minTime + 2 * 60 * 60 * 1000; + + const result = updateStepInterval(mockQuery, minTime, maxTime); + + expect(result.builder.queryData[0].stepInterval).toBe(120); + }); + + it('should preserve other query properties', () => { + const minTime = Date.now(); + const maxTime = minTime + 1 * 60 * 60 * 1000; + + const result = updateStepInterval(mockQuery, minTime, maxTime); + + expect(result.builder.queryData[0].aggregateOperator).toBe('avg'); + expect(result.builder.queryData[0].queryName).toBe('A'); + expect(result.builder.queryData[0].dataSource).toBe('metrics'); + }); + + it('should handle multiple queryData items', () => { + const multiQueryMock: Query = { + ...mockQuery, + builder: { + queryData: [ + ...mockQuery.builder.queryData, + { + ...mockQuery.builder.queryData[0], + queryName: 'B', + stepInterval: 45, + }, + ], + queryFormulas: [], + }, + }; + + const minTime = Date.now(); + const maxTime = minTime + 4 * 60 * 60 * 1000; + + const result = updateStepInterval(multiQueryMock, minTime, maxTime); + + expect(result.builder.queryData).toHaveLength(2); + expect(result.builder.queryData[0].stepInterval).toBe(180); + expect(result.builder.queryData[1].stepInterval).toBe(180); + }); + + it('should use calculated stepInterval when original is undefined', () => { + const queryWithUndefinedStep: Query = { + ...mockQuery, + builder: { + queryData: [ + { + ...mockQuery.builder.queryData[0], + stepInterval: undefined as any, + }, + ], + queryFormulas: [], + }, + }; + + const minTime = Date.now(); + const maxTime = minTime + 1 * 60 * 60 * 1000; + + const result = updateStepInterval(queryWithUndefinedStep, minTime, maxTime); + + expect(result.builder.queryData[0].stepInterval).toBe(60); + }); + + it('should fallback to 60 when calculated stepInterval is 0', () => { + const minTime = Date.now(); + const maxTime = minTime; // Same time = 0 duration + + const result = updateStepInterval(mockQuery, minTime, maxTime); + + expect(result.builder.queryData[0].stepInterval).toBe(60); + }); + + it('should handle very large time ranges', () => { + const minTime = Date.now(); + const maxTime = minTime + 30 * 24 * 60 * 60 * 1000; // 30 days + + const result = updateStepInterval(mockQuery, minTime, maxTime); + + // Should calculate appropriate interval for 30 days + expect(result.builder.queryData[0].stepInterval).toBeGreaterThan(180); + }); + }); +}); diff --git a/frontend/src/container/GridCardLayout/utils.ts b/frontend/src/container/GridCardLayout/utils.ts index 72026a3c4946..f22c9e819e98 100644 --- a/frontend/src/container/GridCardLayout/utils.ts +++ b/frontend/src/container/GridCardLayout/utils.ts @@ -2,6 +2,7 @@ import { FORMULA_REGEXP } from 'constants/regExp'; import { isEmpty, isEqual } from 'lodash-es'; import { Layout } from 'react-grid-layout'; import { Dashboard, Widgets } from 'types/api/dashboard/getAll'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; export const removeUndefinedValuesFromLayout = (layout: Layout[]): Layout[] => layout.map((obj) => @@ -51,3 +52,63 @@ export const hasColumnWidthsChanged = ( return !isEqual(newWidths, existingWidths); }); }; + +/** + * Calculates the step interval in uPlot points (1 minute = 60 points) + * based on the time duration between two timestamps in nanoseconds. + * + * Conversion logic: + * - <= 1 hr → 1 min (60 points) + * - <= 3 hr → 2 min (120 points) + * - <= 5 hr → 3 min (180 points) + * - > 5 hr → max 80 bars, ceil((end-start)/80), rounded to nearest multiple of 5 min + * + * @param startNano - start time in nanoseconds + * @param endNano - end time in nanoseconds + * @returns stepInterval in uPlot points + */ +export function getStepIntervalPoints( + startNano: number, + endNano: number, +): number { + const startMs = startNano; + const endMs = endNano; + const durationMs = endMs - startMs; + const durationMin = durationMs / (60 * 1000); // convert to minutes + + if (durationMin <= 60) { + return 60; // 1 min + } + if (durationMin <= 180) { + return 120; // 2 min + } + if (durationMin <= 300) { + return 180; // 3 min + } + + const totalPoints = Math.ceil(durationMs / (1000 * 60)); // total minutes + const interval = Math.ceil(totalPoints / 80); // at most 80 bars + const roundedInterval = Math.ceil(interval / 5) * 5; // round up to nearest 5 + return roundedInterval * 60; // convert min to points +} + +export function updateStepInterval( + query: Query, + minTime: number, + maxTime: number, +): Query { + const stepIntervalPoints = getStepIntervalPoints(minTime, maxTime); + + return { + ...query, + builder: { + ...query.builder, + queryData: [ + ...query.builder.queryData.map((queryData) => ({ + ...queryData, + stepInterval: stepIntervalPoints || queryData.stepInterval || 60, + })), + ], + }, + }; +} diff --git a/frontend/src/container/Home/DataSourceInfo/DataSourceInfo.tsx b/frontend/src/container/Home/DataSourceInfo/DataSourceInfo.tsx index 82a94f4c7571..f9d50e06f6a8 100644 --- a/frontend/src/container/Home/DataSourceInfo/DataSourceInfo.tsx +++ b/frontend/src/container/Home/DataSourceInfo/DataSourceInfo.tsx @@ -19,12 +19,12 @@ function DataSourceInfo({ dataSentToSigNoz: boolean; isLoading: boolean; }): JSX.Element { - const { activeLicenseV3 } = useAppContext(); + const { activeLicense } = useAppContext(); const notSendingData = !dataSentToSigNoz; const isEnabled = - activeLicenseV3 && activeLicenseV3.platform === LicensePlatform.CLOUD; + activeLicense && activeLicense.platform === LicensePlatform.CLOUD; const { data: deploymentsData, @@ -88,8 +88,8 @@ function DataSourceInfo({ logEvent('Homepage: Connect dataSource clicked', {}); if ( - activeLicenseV3 && - activeLicenseV3.platform === LicensePlatform.CLOUD + activeLicense && + activeLicense.platform === LicensePlatform.CLOUD ) { history.push(ROUTES.GET_STARTED_WITH_CLOUD); } else { @@ -105,8 +105,8 @@ function DataSourceInfo({ logEvent('Homepage: Connect dataSource clicked', {}); if ( - activeLicenseV3 && - activeLicenseV3.platform === LicensePlatform.CLOUD + activeLicense && + activeLicense.platform === LicensePlatform.CLOUD ) { history.push(ROUTES.GET_STARTED_WITH_CLOUD); } else { diff --git a/frontend/src/container/Home/Home.tsx b/frontend/src/container/Home/Home.tsx index 58446801a74b..56257021e5ac 100644 --- a/frontend/src/container/Home/Home.tsx +++ b/frontend/src/container/Home/Home.tsx @@ -19,6 +19,7 @@ import { useGetDeploymentsData } from 'hooks/CustomDomain/useGetDeploymentsData' import { useGetHostList } from 'hooks/infraMonitoring/useGetHostList'; import { useGetK8sPodsList } from 'hooks/infraMonitoring/useGetK8sPodsList'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; +import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; import history from 'lib/history'; import cloneDeep from 'lodash-es/cloneDeep'; import { CompassIcon, DotIcon, HomeIcon, Plus, Wrench, X } from 'lucide-react'; @@ -54,6 +55,8 @@ export default function Home(): JSX.Element { const [updatingUserPreferences, setUpdatingUserPreferences] = useState(false); const [loadingUserPreferences, setLoadingUserPreferences] = useState(true); + const { isCommunityUser, isCommunityEnterpriseUser } = useGetTenantLicense(); + const [checklistItems, setChecklistItems] = useState( defaultChecklistItemsState, ); @@ -300,17 +303,17 @@ export default function Home(): JSX.Element { } }, [hostData, k8sPodsData, handleUpdateChecklistDoneItem]); - const { activeLicenseV3, isFetchingActiveLicenseV3 } = useAppContext(); + const { activeLicense, isFetchingActiveLicense } = useAppContext(); const [isEnabled, setIsEnabled] = useState(false); useEffect(() => { - if (isFetchingActiveLicenseV3) { + if (isFetchingActiveLicense) { setIsEnabled(false); return; } - setIsEnabled(Boolean(activeLicenseV3?.platform === LicensePlatform.CLOUD)); - }, [activeLicenseV3, isFetchingActiveLicenseV3]); + setIsEnabled(Boolean(activeLicense?.platform === LicensePlatform.CLOUD)); + }, [activeLicense, isFetchingActiveLicense]); const { data: deploymentsData } = useGetDeploymentsData(isEnabled); @@ -323,22 +326,27 @@ export default function Home(): JSX.Element { setIsBannerDismissed(true); }; + const showBanner = useMemo( + () => !isBannerDismissed && (isCommunityUser || isCommunityEnterpriseUser), + [isBannerDismissed, isCommunityUser, isCommunityEnterpriseUser], + ); + return (
- {!isBannerDismissed && ( + {showBanner && (
- Big news: SigNoz Cloud Teams plan now starting at just $49/Month - + Big News: SigNoz Community Edition now available with SSO (Google OAuth) + and API keys - read more - 🥳🎉
diff --git a/frontend/src/container/Home/HomeChecklist/HomeChecklist.tsx b/frontend/src/container/Home/HomeChecklist/HomeChecklist.tsx index 3bf52945d058..6dab3d07526b 100644 --- a/frontend/src/container/Home/HomeChecklist/HomeChecklist.tsx +++ b/frontend/src/container/Home/HomeChecklist/HomeChecklist.tsx @@ -32,7 +32,7 @@ function HomeChecklist({ onSkip: (item: ChecklistItem) => void; isLoading: boolean; }): JSX.Element { - const { user, activeLicenseV3 } = useAppContext(); + const { user, activeLicense } = useAppContext(); const [completedChecklistItems, setCompletedChecklistItems] = useState< ChecklistItem[] @@ -94,8 +94,8 @@ function HomeChecklist({ if (item.toRoute !== ROUTES.GET_STARTED_WITH_CLOUD) { history.push(item.toRoute || ''); } else if ( - activeLicenseV3 && - activeLicenseV3.platform === LicensePlatform.CLOUD + activeLicense && + activeLicense.platform === LicensePlatform.CLOUD ) { history.push(item.toRoute || ''); } else { diff --git a/frontend/src/container/Home/Services/ServiceMetrics.tsx b/frontend/src/container/Home/Services/ServiceMetrics.tsx index 7f1baf92f1b3..d588f9d65d10 100644 --- a/frontend/src/container/Home/Services/ServiceMetrics.tsx +++ b/frontend/src/container/Home/Services/ServiceMetrics.tsx @@ -23,7 +23,7 @@ import { Link } from 'react-router-dom'; import { AppState } from 'store/reducers'; import { LicensePlatform, - LicenseV3ResModel, + LicenseResModel, } from 'types/api/licensesV3/getActive'; import { ServicesList } from 'types/api/metrics/getService'; import { GlobalReducer } from 'types/reducer/globalTime'; @@ -42,7 +42,7 @@ const EmptyState = memo( activeLicenseV3, }: { user: IUser; - activeLicenseV3: LicenseV3ResModel | null; + activeLicenseV3: LicenseResModel | null; }): JSX.Element => (
@@ -146,7 +146,7 @@ function ServiceMetrics({ GlobalReducer >((state) => state.globalTime); - const { user, activeLicenseV3 } = useAppContext(); + const { user, activeLicense } = useAppContext(); const [timeRange, setTimeRange] = useState(() => { const now = new Date().getTime(); @@ -335,7 +335,7 @@ function ServiceMetrics({ {servicesExist ? ( ) : ( - + )} diff --git a/frontend/src/container/Home/Services/ServiceTraces.tsx b/frontend/src/container/Home/Services/ServiceTraces.tsx index 23d3f613ba49..fd4b0592e169 100644 --- a/frontend/src/container/Home/Services/ServiceTraces.tsx +++ b/frontend/src/container/Home/Services/ServiceTraces.tsx @@ -32,7 +32,7 @@ export default function ServiceTraces({ (state) => state.globalTime, ); - const { user, activeLicenseV3 } = useAppContext(); + const { user, activeLicense } = useAppContext(); const now = new Date().getTime(); const [timeRange, setTimeRange] = useState({ @@ -124,8 +124,8 @@ export default function ServiceTraces({ }); if ( - activeLicenseV3 && - activeLicenseV3.platform === LicensePlatform.CLOUD + activeLicense && + activeLicense.platform === LicensePlatform.CLOUD ) { history.push(ROUTES.GET_STARTED_WITH_CLOUD); } else { @@ -160,7 +160,7 @@ export default function ServiceTraces({
), - [user?.role, activeLicenseV3], + [user?.role, activeLicense], ); const renderDashboardsList = useCallback( diff --git a/frontend/src/container/InfraMonitoringHosts/HostsList.tsx b/frontend/src/container/InfraMonitoringHosts/HostsList.tsx index c06f63a4b067..f4dac3f9eb16 100644 --- a/frontend/src/container/InfraMonitoringHosts/HostsList.tsx +++ b/frontend/src/container/InfraMonitoringHosts/HostsList.tsx @@ -8,6 +8,11 @@ import HostMetricDetail from 'components/HostMetricsDetail'; import QuickFilters from 'components/QuickFilters/QuickFilters'; import { QuickFiltersSource } from 'components/QuickFilters/types'; import { InfraMonitoringEvents } from 'constants/events'; +import { + getFiltersFromParams, + getOrderByFromParams, +} from 'container/InfraMonitoringK8s/commonUtils'; +import { INFRA_MONITORING_K8S_PARAMS_KEYS } from 'container/InfraMonitoringK8s/constants'; import { usePageSize } from 'container/InfraMonitoringK8s/utils'; import { useGetHostList } from 'hooks/infraMonitoring/useGetHostList'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; @@ -15,6 +20,7 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { Filter } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; @@ -27,20 +33,51 @@ function HostsList(): JSX.Element { const { maxTime, minTime } = useSelector( (state) => state.globalTime, ); + const [searchParams, setSearchParams] = useSearchParams(); const [currentPage, setCurrentPage] = useState(1); - const [filters, setFilters] = useState({ - items: [], - op: 'and', + const [filters, setFilters] = useState(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.FILTERS, + ); + if (!filters) { + return { + items: [], + op: 'and', + }; + } + return filters; }); const [showFilters, setShowFilters] = useState(true); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams)); - const [selectedHostName, setSelectedHostName] = useState(null); + const handleOrderByChange = ( + orderBy: { + columnName: string; + order: 'asc' | 'desc'; + } | null, + ): void => { + setOrderBy(orderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(orderBy), + }); + }; + + const [selectedHostName, setSelectedHostName] = useState(() => { + const hostName = searchParams.get('hostName'); + return hostName || null; + }); + + const handleHostClick = (hostName: string): void => { + setSelectedHostName(hostName); + setSearchParams({ ...searchParams, hostName }); + }; const { pageSize, setPageSize } = usePageSize('hosts'); @@ -82,6 +119,10 @@ function HostsList(): JSX.Element { const isNewFilterAdded = value.items.length !== filters.items.length; setFilters(value); handleChangeQueryData('filters', value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.FILTERS]: JSON.stringify(value), + }); if (isNewFilterAdded) { setCurrentPage(1); @@ -161,7 +202,10 @@ function HostsList(): JSX.Element {
)} - +
diff --git a/frontend/src/container/InfraMonitoringHosts/HostsListControls.tsx b/frontend/src/container/InfraMonitoringHosts/HostsListControls.tsx index 5c7cad87ffe2..ca0e85e75226 100644 --- a/frontend/src/container/InfraMonitoringHosts/HostsListControls.tsx +++ b/frontend/src/container/InfraMonitoringHosts/HostsListControls.tsx @@ -10,8 +10,10 @@ import { DataSource } from 'types/common/queryBuilder'; function HostsListControls({ handleFiltersChange, + filters, }: { handleFiltersChange: (value: IBuilderQuery['filters']) => void; + filters: IBuilderQuery['filters']; }): JSX.Element { const currentQuery = initialQueriesMap[DataSource.METRICS]; const updatedCurrentQuery = useMemo( @@ -26,11 +28,12 @@ function HostsListControls({ aggregateAttribute: { ...currentQuery.builder.queryData[0].aggregateAttribute, }, + filters, }, ], }, }), - [currentQuery], + [currentQuery, filters], ); const query = updatedCurrentQuery?.builder?.queryData[0] || null; diff --git a/frontend/src/container/InfraMonitoringHosts/HostsListTable.tsx b/frontend/src/container/InfraMonitoringHosts/HostsListTable.tsx index ba9b35143bfa..5c7d3bbe17c7 100644 --- a/frontend/src/container/InfraMonitoringHosts/HostsListTable.tsx +++ b/frontend/src/container/InfraMonitoringHosts/HostsListTable.tsx @@ -27,7 +27,7 @@ export default function HostsListTable({ tableData: data, hostMetricsData, filters, - setSelectedHostName, + onHostClick, currentPage, setCurrentPage, pageSize, @@ -77,7 +77,7 @@ export default function HostsListTable({ ); const handleRowClick = (record: HostRowData): void => { - setSelectedHostName(record.hostName); + onHostClick(record.hostName); logEvent(InfraMonitoringEvents.ItemClicked, { entity: InfraMonitoringEvents.HostEntity, page: InfraMonitoringEvents.ListPage, diff --git a/frontend/src/container/InfraMonitoringHosts/utils.tsx b/frontend/src/container/InfraMonitoringHosts/utils.tsx index 743a9135b0df..b6778ff22d6a 100644 --- a/frontend/src/container/InfraMonitoringHosts/utils.tsx +++ b/frontend/src/container/InfraMonitoringHosts/utils.tsx @@ -41,16 +41,13 @@ export interface HostsListTableProps { | undefined; hostMetricsData: HostData[]; filters: TagFilter; - setSelectedHostName: Dispatch>; + onHostClick: (hostName: string) => void; currentPage: number; setCurrentPage: Dispatch>; pageSize: number; - setOrderBy: Dispatch< - SetStateAction<{ - columnName: string; - order: 'asc' | 'desc'; - } | null> - >; + setOrderBy: ( + orderBy: { columnName: string; order: 'asc' | 'desc' } | null, + ) => void; setPageSize: (pageSize: number) => void; } diff --git a/frontend/src/container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails.tsx index b6b22f462ddd..b0712c94e4d6 100644 --- a/frontend/src/container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails.tsx @@ -14,8 +14,14 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { filterDuplicateFilters } from 'container/InfraMonitoringK8s/commonUtils'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { + filterDuplicateFilters, + getFiltersFromParams, +} from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { QUERY_KEYS } from 'container/InfraMonitoringK8s/EntityDetailsUtils/utils'; import { CustomTimeType, @@ -34,6 +40,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -82,11 +89,27 @@ function ClusterDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -103,12 +126,18 @@ function ClusterDetails({ value: cluster?.meta.k8s_cluster_name || '', }, ], - }), - [cluster?.meta.k8s_cluster_name], - ); + }; + }, [cluster?.meta.k8s_cluster_name, searchParams]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -138,9 +167,8 @@ function ClusterDetails({ value: cluster?.meta.k8s_cluster_name || '', }, ], - }), - [cluster?.meta.k8s_cluster_name], - ); + }; + }, [cluster?.meta.k8s_cluster_name, searchParams]); const [logsAndTracesFilters, setLogsAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -181,6 +209,13 @@ function ClusterDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -220,7 +255,7 @@ function ClusterDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogsAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_CLUSTER_NAME].includes(item.key?.key ?? ''), @@ -240,7 +275,7 @@ function ClusterDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -250,6 +285,16 @@ function ClusterDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -257,7 +302,7 @@ function ClusterDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogsAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_CLUSTER_NAME].includes(item.key?.key ?? ''), @@ -272,7 +317,7 @@ function ClusterDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -283,6 +328,16 @@ function ClusterDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -290,7 +345,7 @@ function ClusterDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const clusterKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -308,7 +363,7 @@ function ClusterDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -322,6 +377,16 @@ function ClusterDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Clusters/K8sClustersList.tsx b/frontend/src/container/InfraMonitoringK8s/Clusters/K8sClustersList.tsx index 19e920ffe999..6732d018dc40 100644 --- a/frontend/src/container/InfraMonitoringK8s/Clusters/K8sClustersList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Clusters/K8sClustersList.tsx @@ -23,11 +23,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -59,19 +62,36 @@ function K8sClustersList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>({ columnName: 'cpu', order: 'desc' }); + } | null>(() => getOrderByFromParams(searchParams, false)); const [selectedClusterName, setselectedClusterName] = useState( - null, + () => { + const clusterName = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.CLUSTER_NAME, + ); + if (clusterName) { + return clusterName; + } + return null; + }, ); const { pageSize, setPageSize } = usePageSize(K8sCategory.CLUSTERS); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -258,15 +278,26 @@ function K8sClustersList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -322,6 +353,10 @@ function K8sClustersList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedClusterName(record.clusterUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.CLUSTER_NAME]: record.clusterUID, + }); } else { handleGroupByRowClick(record); } @@ -348,6 +383,11 @@ function K8sClustersList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -372,7 +412,9 @@ function K8sClustersList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedClusterName(record.clusterUID), + onClick: (): void => { + setselectedClusterName(record.clusterUID); + }, className: 'expanded-clickable-row', })} /> @@ -436,6 +478,20 @@ function K8sClustersList({ const handleCloseClusterDetail = (): void => { setselectedClusterName(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.CLUSTER_NAME, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -457,6 +513,10 @@ function K8sClustersList({ // Reset pagination on switching to groupBy setCurrentPage(1); setGroupBy(groupBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); setExpandedRowKeys([]); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -464,7 +524,7 @@ function K8sClustersList({ category: InfraMonitoringEvents.Cluster, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails.tsx b/frontend/src/container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails.tsx index 58ef36949af7..7b347835249a 100644 --- a/frontend/src/container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails.tsx @@ -13,7 +13,11 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { getFiltersFromParams } from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { CustomTimeType, Time, @@ -31,6 +35,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -83,11 +88,27 @@ function DaemonSetDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -117,12 +138,22 @@ function DaemonSetDetails({ value: daemonSet?.meta.k8s_namespace_name || '', }, ], - }), - [daemonSet?.meta.k8s_daemonset_name, daemonSet?.meta.k8s_namespace_name], - ); + }; + }, [ + daemonSet?.meta.k8s_daemonset_name, + daemonSet?.meta.k8s_namespace_name, + searchParams, + ]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -152,9 +183,8 @@ function DaemonSetDetails({ value: daemonSet?.meta.k8s_daemonset_name || '', }, ], - }), - [daemonSet?.meta.k8s_daemonset_name], - ); + }; + }, [daemonSet?.meta.k8s_daemonset_name, searchParams]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -195,6 +225,13 @@ function DaemonSetDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -234,7 +271,7 @@ function DaemonSetDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_DAEMON_SET_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -257,7 +294,7 @@ function DaemonSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -265,6 +302,15 @@ function DaemonSetDetails({ ...(paginationFilter ? [paginationFilter] : []), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -272,7 +318,7 @@ function DaemonSetDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_DAEMON_SET_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -289,7 +335,7 @@ function DaemonSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -298,6 +344,16 @@ function DaemonSetDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -305,7 +361,7 @@ function DaemonSetDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const daemonSetKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -323,7 +379,7 @@ function DaemonSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ daemonSetKindFilter, @@ -335,6 +391,16 @@ function DaemonSetDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/DaemonSets/K8sDaemonSetsList.tsx b/frontend/src/container/InfraMonitoringK8s/DaemonSets/K8sDaemonSetsList.tsx index 8518a470c33f..a104144e52fc 100644 --- a/frontend/src/container/InfraMonitoringK8s/DaemonSets/K8sDaemonSetsList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/DaemonSets/K8sDaemonSetsList.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -59,21 +62,38 @@ function K8sDaemonSetsList({ ); const [currentPage, setCurrentPage] = useState(1); + const [searchParams, setSearchParams] = useSearchParams(); const [expandedRowKeys, setExpandedRowKeys] = useState([]); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); - const [selectedDaemonSetUID, setselectedDaemonSetUID] = useState< + const [selectedDaemonSetUID, setSelectedDaemonSetUID] = useState< string | null - >(null); + >(() => { + const daemonSetUID = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.DAEMONSET_UID, + ); + if (daemonSetUID) { + return daemonSetUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.DAEMONSETS); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -262,15 +282,26 @@ function K8sDaemonSetsList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -329,7 +360,11 @@ function K8sDaemonSetsList({ const handleRowClick = (record: K8sDaemonSetsRowData): void => { if (groupBy.length === 0) { setSelectedRowData(null); - setselectedDaemonSetUID(record.daemonsetUID); + setSelectedDaemonSetUID(record.daemonsetUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.DAEMONSET_UID]: record.daemonsetUID, + }); } else { handleGroupByRowClick(record); } @@ -356,6 +391,11 @@ function K8sDaemonSetsList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -380,7 +420,9 @@ function K8sDaemonSetsList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedDaemonSetUID(record.daemonsetUID), + onClick: (): void => { + setSelectedDaemonSetUID(record.daemonsetUID); + }, className: 'expanded-clickable-row', })} /> @@ -443,7 +485,21 @@ function K8sDaemonSetsList({ }; const handleCloseDaemonSetDetail = (): void => { - setselectedDaemonSetUID(null); + setSelectedDaemonSetUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.DAEMONSET_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -464,6 +520,10 @@ function K8sDaemonSetsList({ setCurrentPage(1); setGroupBy(groupBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); setExpandedRowKeys([]); logEvent(InfraMonitoringEvents.GroupByChanged, { @@ -472,7 +532,7 @@ function K8sDaemonSetsList({ category: InfraMonitoringEvents.DaemonSet, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails.tsx index 2531f4723398..951b4cf7dd89 100644 --- a/frontend/src/container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails.tsx @@ -14,8 +14,14 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { filterDuplicateFilters } from 'container/InfraMonitoringK8s/commonUtils'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { + filterDuplicateFilters, + getFiltersFromParams, +} from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { QUERY_KEYS } from 'container/InfraMonitoringK8s/EntityDetailsUtils/utils'; import { CustomTimeType, @@ -34,6 +40,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -85,11 +92,27 @@ function DeploymentDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -119,12 +142,22 @@ function DeploymentDetails({ value: deployment?.meta.k8s_namespace_name || '', }, ], - }), - [deployment?.meta.k8s_deployment_name, deployment?.meta.k8s_namespace_name], - ); + }; + }, [ + deployment?.meta.k8s_deployment_name, + deployment?.meta.k8s_namespace_name, + searchParams, + ]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -154,9 +187,8 @@ function DeploymentDetails({ value: deployment?.meta.k8s_deployment_name || '', }, ], - }), - [deployment?.meta.k8s_deployment_name], - ); + }; + }, [deployment?.meta.k8s_deployment_name, searchParams]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -197,6 +229,13 @@ function DeploymentDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -236,7 +275,7 @@ function DeploymentDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_DEPLOYMENT_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -259,7 +298,7 @@ function DeploymentDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -269,6 +308,16 @@ function DeploymentDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -276,7 +325,7 @@ function DeploymentDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_DEPLOYMENT_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -293,7 +342,7 @@ function DeploymentDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -304,6 +353,16 @@ function DeploymentDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -311,7 +370,7 @@ function DeploymentDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const deploymentKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -329,7 +388,7 @@ function DeploymentDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -343,6 +402,16 @@ function DeploymentDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Deployments/K8sDeploymentsList.tsx b/frontend/src/container/InfraMonitoringK8s/Deployments/K8sDeploymentsList.tsx index 7b463bf4af8c..b4294226bcc4 100644 --- a/frontend/src/container/InfraMonitoringK8s/Deployments/K8sDeploymentsList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Deployments/K8sDeploymentsList.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -61,19 +64,36 @@ function K8sDeploymentsList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); const [selectedDeploymentUID, setselectedDeploymentUID] = useState< string | null - >(null); + >(() => { + const deploymentUID = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.DEPLOYMENT_UID, + ); + if (deploymentUID) { + return deploymentUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.DEPLOYMENTS); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -264,15 +284,26 @@ function K8sDeploymentsList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -333,6 +364,10 @@ function K8sDeploymentsList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedDeploymentUID(record.deploymentUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.DEPLOYMENT_UID]: record.deploymentUID, + }); } else { handleGroupByRowClick(record); } @@ -359,6 +394,11 @@ function K8sDeploymentsList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -383,7 +423,9 @@ function K8sDeploymentsList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedDeploymentUID(record.deploymentUID), + onClick: (): void => { + setselectedDeploymentUID(record.deploymentUID); + }, className: 'expanded-clickable-row', })} /> @@ -447,6 +489,20 @@ function K8sDeploymentsList({ const handleCloseDeploymentDetail = (): void => { setselectedDeploymentUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.DEPLOYMENT_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -468,6 +524,10 @@ function K8sDeploymentsList({ // Reset pagination on switching to groupBy setCurrentPage(1); setGroupBy(groupBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); setExpandedRowKeys([]); logEvent(InfraMonitoringEvents.GroupByChanged, { @@ -476,7 +536,7 @@ function K8sDeploymentsList({ category: InfraMonitoringEvents.Deployment, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/EntityEvents.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/EntityEvents.tsx index 70e02902c995..97808f2d299b 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/EntityEvents.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents/EntityEvents.tsx @@ -3,6 +3,7 @@ import './entityEvents.styles.scss'; import { Color } from '@signozhq/design-tokens'; import { Button, Table, TableColumnsType } from 'antd'; +import { VIEWS } from 'components/HostMetricsDetail/constants'; import { DEFAULT_ENTITY_VERSION } from 'constants/app'; import { EventContents } from 'container/InfraMonitoringK8s/commonUtils'; import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; @@ -28,6 +29,7 @@ import { DataSource } from 'types/common/queryBuilder'; import { EntityDetailsEmptyContainer, getEntityEventsOrLogsQueryPayload, + QUERY_KEYS, } from '../utils'; interface EventDataType { @@ -55,7 +57,10 @@ interface IEntityEventsProps { startTime: number; endTime: number; }; - handleChangeEventFilters: (filters: IBuilderQuery['filters']) => void; + handleChangeEventFilters: ( + filters: IBuilderQuery['filters'], + view: VIEWS, + ) => void; filters: IBuilderQuery['filters']; isModalTimeSelection: boolean; handleTimeChange: ( @@ -103,14 +108,18 @@ export default function Events({ ...currentQuery.builder.queryData[0].aggregateAttribute, }, filters: { - items: [], + items: filters.items.filter( + (item) => + item.key?.key !== QUERY_KEYS.K8S_OBJECT_KIND && + item.key?.key !== QUERY_KEYS.K8S_OBJECT_NAME, + ), op: 'AND', }, }, ], }, }), - [currentQuery], + [currentQuery, filters], ); const query = updatedCurrentQuery?.builder?.queryData[0] || null; @@ -243,7 +252,7 @@ export default function Events({ {query && ( handleChangeEventFilters(value, VIEWS.EVENTS)} disableNavigationShortcuts /> )} diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/EntityLogs.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/EntityLogs.tsx index 919559c795ad..1bb45b251f41 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/EntityLogs.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs/EntityLogs.tsx @@ -65,7 +65,6 @@ function EntityLogs({ const getItemContent = useCallback( (_: number, logToRender: ILog): JSX.Element => ( void; - handleChangeLogFilters: (value: IBuilderQuery['filters']) => void; + handleChangeLogFilters: (value: IBuilderQuery['filters'], view: VIEWS) => void; logFilters: IBuilderQuery['filters']; selectedInterval: Time; queryKey: string; @@ -78,7 +79,7 @@ function EntityLogsDetailedView({ {query && ( handleChangeLogFilters(value, VIEWS.LOGS)} disableNavigationShortcuts /> )} diff --git a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/EntityMetrics.tsx b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/EntityMetrics.tsx index b8a8bbe172e6..f4df393b2afe 100644 --- a/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/EntityMetrics.tsx +++ b/frontend/src/container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics/EntityMetrics.tsx @@ -110,6 +110,7 @@ function EntityMetrics({ softMin: null, minTimeScale: timeRange.startTime, maxTimeScale: timeRange.endTime, + enableZoom: true, }); }), [ @@ -162,7 +163,7 @@ function EntityMetrics({
void; - handleChangeTracesFilters: (value: IBuilderQuery['filters']) => void; + handleChangeTracesFilters: ( + value: IBuilderQuery['filters'], + view: VIEWS, + ) => void; tracesFilters: IBuilderQuery['filters']; selectedInterval: Time; queryKey: string; @@ -164,7 +168,9 @@ function EntityTraces({ {query && ( + handleChangeTracesFilters(value, VIEWS.TRACES) + } disableNavigationShortcuts /> )} diff --git a/frontend/src/container/InfraMonitoringK8s/InfraMonitoringK8s.tsx b/frontend/src/container/InfraMonitoringK8s/InfraMonitoringK8s.tsx index a3f520629db1..9aa6ab6f33aa 100644 --- a/frontend/src/container/InfraMonitoringK8s/InfraMonitoringK8s.tsx +++ b/frontend/src/container/InfraMonitoringK8s/InfraMonitoringK8s.tsx @@ -23,6 +23,7 @@ import { } from 'lucide-react'; import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'; import { useState } from 'react'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; import K8sClustersList from './Clusters/K8sClustersList'; @@ -30,6 +31,7 @@ import { ClustersQuickFiltersConfig, DaemonSetsQuickFiltersConfig, DeploymentsQuickFiltersConfig, + INFRA_MONITORING_K8S_PARAMS_KEYS, JobsQuickFiltersConfig, K8sCategories, NamespaceQuickFiltersConfig, @@ -50,7 +52,14 @@ import K8sVolumesList from './Volumes/K8sVolumesList'; export default function InfraMonitoringK8s(): JSX.Element { const [showFilters, setShowFilters] = useState(true); - const [selectedCategory, setSelectedCategory] = useState(K8sCategories.PODS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedCategory, setSelectedCategory] = useState(() => { + const category = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.CATEGORY); + if (category) { + return category as keyof typeof K8sCategories; + } + return K8sCategories.PODS; + }); const [quickFiltersLastUpdated, setQuickFiltersLastUpdated] = useState(-1); const { currentQuery } = useQueryBuilder(); @@ -70,6 +79,12 @@ export default function InfraMonitoringK8s(): JSX.Element { // in infra monitoring k8s, we are using only one query, hence updating the 0th index of queryData handleChangeQueryData('filters', query.builder.queryData[0].filters); setQuickFiltersLastUpdated(Date.now()); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.FILTERS]: JSON.stringify( + query.builder.queryData[0].filters, + ), + }); logEvent(InfraMonitoringEvents.FilterApplied, { entity: InfraMonitoringEvents.K8sEntity, @@ -295,6 +310,9 @@ export default function InfraMonitoringK8s(): JSX.Element { const handleCategoryChange = (key: string | string[]): void => { if (Array.isArray(key) && key.length > 0) { setSelectedCategory(key[0] as string); + setSearchParams({ + [INFRA_MONITORING_K8S_PARAMS_KEYS.CATEGORY]: key[0] as string, + }); // Reset filters handleChangeQueryData('filters', { items: [], op: 'and' }); } diff --git a/frontend/src/container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails.tsx index 63ea8eaae90f..47063f487db1 100644 --- a/frontend/src/container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails.tsx @@ -13,7 +13,11 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { getFiltersFromParams } from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { CustomTimeType, Time, @@ -31,6 +35,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -80,11 +85,27 @@ function JobDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -114,12 +135,18 @@ function JobDetails({ value: job?.meta.k8s_namespace_name || '', }, ], - }), - [job?.meta.k8s_job_name, job?.meta.k8s_namespace_name], - ); + }; + }, [job?.meta.k8s_job_name, job?.meta.k8s_namespace_name, searchParams]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -149,9 +176,8 @@ function JobDetails({ value: job?.meta.k8s_job_name || '', }, ], - }), - [job?.meta.k8s_job_name], - ); + }; + }, [job?.meta.k8s_job_name, searchParams]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -192,6 +218,13 @@ function JobDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -231,7 +264,7 @@ function JobDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_JOB_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -253,7 +286,7 @@ function JobDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -261,6 +294,16 @@ function JobDetails({ ...(paginationFilter ? [paginationFilter] : []), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -268,7 +311,7 @@ function JobDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_JOB_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -285,7 +328,7 @@ function JobDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -294,6 +337,16 @@ function JobDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -301,7 +354,7 @@ function JobDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const jobKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -319,7 +372,7 @@ function JobDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ jobKindFilter, @@ -331,6 +384,16 @@ function JobDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Jobs/K8sJobsList.tsx b/frontend/src/container/InfraMonitoringK8s/Jobs/K8sJobsList.tsx index 397d9b04889b..85d0505ce298 100644 --- a/frontend/src/container/InfraMonitoringK8s/Jobs/K8sJobsList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Jobs/K8sJobsList.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -61,17 +64,32 @@ function K8sJobsList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); - const [selectedJobUID, setselectedJobUID] = useState(null); + const [selectedJobUID, setselectedJobUID] = useState(() => { + const jobUID = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.JOB_UID); + if (jobUID) { + return jobUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.JOBS); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [selectedRowData, setSelectedRowData] = useState( null, @@ -251,15 +269,26 @@ function K8sJobsList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -306,6 +335,10 @@ function K8sJobsList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedJobUID(record.jobUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.JOB_UID]: record.jobUID, + }); } else { handleGroupByRowClick(record); } @@ -332,6 +365,11 @@ function K8sJobsList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -356,7 +394,9 @@ function K8sJobsList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedJobUID(record.jobUID), + onClick: (): void => { + setselectedJobUID(record.jobUID); + }, className: 'expanded-clickable-row', })} /> @@ -420,6 +460,20 @@ function K8sJobsList({ const handleCloseJobDetail = (): void => { setselectedJobUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.JOB_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -441,6 +495,10 @@ function K8sJobsList({ setCurrentPage(1); setGroupBy(groupBy); setExpandedRowKeys([]); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -448,7 +506,7 @@ function K8sJobsList({ category: InfraMonitoringEvents.Job, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/K8sHeader.tsx b/frontend/src/container/InfraMonitoringK8s/K8sHeader.tsx index 2f583a965195..fa9eaf2d6f95 100644 --- a/frontend/src/container/InfraMonitoringK8s/K8sHeader.tsx +++ b/frontend/src/container/InfraMonitoringK8s/K8sHeader.tsx @@ -7,11 +7,12 @@ import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearc import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2'; import { Filter, SlidersHorizontal } from 'lucide-react'; import { useCallback, useMemo, useState } from 'react'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { DataSource } from 'types/common/queryBuilder'; -import { K8sCategory } from './constants'; +import { INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory } from './constants'; import K8sFiltersSidePanel from './K8sFiltersSidePanel/K8sFiltersSidePanel'; import { IEntityColumn } from './utils'; @@ -47,11 +48,19 @@ function K8sHeader({ entity, }: K8sHeaderProps): JSX.Element { const [isFiltersSidePanelOpen, setIsFiltersSidePanelOpen] = useState(false); + const [searchParams, setSearchParams] = useSearchParams(); const currentQuery = initialQueriesMap[DataSource.METRICS]; - const updatedCurrentQuery = useMemo( - () => ({ + const updatedCurrentQuery = useMemo(() => { + const urlFilters = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.FILTERS); + let { filters } = currentQuery.builder.queryData[0]; + if (urlFilters) { + const decoded = decodeURIComponent(urlFilters); + const parsed = JSON.parse(decoded); + filters = parsed; + } + return { ...currentQuery, builder: { ...currentQuery.builder, @@ -62,20 +71,24 @@ function K8sHeader({ aggregateAttribute: { ...currentQuery.builder.queryData[0].aggregateAttribute, }, + filters, }, ], }, - }), - [currentQuery], - ); + }; + }, [currentQuery, searchParams]); const query = updatedCurrentQuery?.builder?.queryData[0] || null; const handleChangeTagFilters = useCallback( (value: IBuilderQuery['filters']) => { handleFiltersChange(value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.FILTERS]: JSON.stringify(value), + }); }, - [handleFiltersChange], + [handleFiltersChange, searchParams, setSearchParams], ); return ( diff --git a/frontend/src/container/InfraMonitoringK8s/Namespaces/K8sNamespacesList.tsx b/frontend/src/container/InfraMonitoringK8s/Namespaces/K8sNamespacesList.tsx index 5181b3ac80c8..6f076132cfac 100644 --- a/frontend/src/container/InfraMonitoringK8s/Namespaces/K8sNamespacesList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Namespaces/K8sNamespacesList.tsx @@ -23,11 +23,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -60,19 +63,36 @@ function K8sNamespacesList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); const [selectedNamespaceUID, setselectedNamespaceUID] = useState< string | null - >(null); + >(() => { + const namespaceUID = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.NAMESPACE_UID, + ); + if (namespaceUID) { + return namespaceUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.NAMESPACES); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -261,15 +281,26 @@ function K8sNamespacesList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -330,6 +361,10 @@ function K8sNamespacesList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedNamespaceUID(record.namespaceUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.NAMESPACE_UID]: record.namespaceUID, + }); } else { handleGroupByRowClick(record); } @@ -356,6 +391,11 @@ function K8sNamespacesList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -380,7 +420,9 @@ function K8sNamespacesList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedNamespaceUID(record.namespaceUID), + onClick: (): void => { + setselectedNamespaceUID(record.namespaceUID); + }, className: 'expanded-clickable-row', })} /> @@ -444,6 +486,20 @@ function K8sNamespacesList({ const handleCloseNamespaceDetail = (): void => { setselectedNamespaceUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.NAMESPACE_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -466,6 +522,10 @@ function K8sNamespacesList({ setCurrentPage(1); setGroupBy(groupBy); setExpandedRowKeys([]); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -473,7 +533,7 @@ function K8sNamespacesList({ category: InfraMonitoringEvents.Namespace, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails.tsx index f9f1801f936f..f64057e4ce41 100644 --- a/frontend/src/container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails.tsx @@ -14,7 +14,11 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { getFiltersFromParams } from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { QUERY_KEYS } from 'container/InfraMonitoringK8s/EntityDetailsUtils/utils'; import { CustomTimeType, @@ -33,6 +37,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -84,11 +89,27 @@ function NamespaceDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -105,12 +126,18 @@ function NamespaceDetails({ value: namespace?.namespaceName || '', }, ], - }), - [namespace?.namespaceName], - ); + }; + }, [namespace?.namespaceName, searchParams]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -140,9 +167,8 @@ function NamespaceDetails({ value: namespace?.namespaceName || '', }, ], - }), - [namespace?.namespaceName], - ); + }; + }, [namespace?.namespaceName, searchParams]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -183,6 +209,13 @@ function NamespaceDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -222,7 +255,7 @@ function NamespaceDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_NAMESPACE_NAME, QUERY_KEYS.K8S_CLUSTER_NAME].includes( @@ -244,7 +277,7 @@ function NamespaceDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -252,6 +285,17 @@ function NamespaceDetails({ ...(paginationFilter ? [paginationFilter] : []), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -259,7 +303,7 @@ function NamespaceDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_NAMESPACE_NAME, QUERY_KEYS.K8S_CLUSTER_NAME].includes( @@ -276,7 +320,7 @@ function NamespaceDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -285,6 +329,16 @@ function NamespaceDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -292,7 +346,7 @@ function NamespaceDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const namespaceKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -310,7 +364,7 @@ function NamespaceDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ namespaceKindFilter, @@ -322,6 +376,16 @@ function NamespaceDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Nodes/K8sNodesList.tsx b/frontend/src/container/InfraMonitoringK8s/Nodes/K8sNodesList.tsx index d27f5d9196b9..441346980fb1 100644 --- a/frontend/src/container/InfraMonitoringK8s/Nodes/K8sNodesList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Nodes/K8sNodesList.tsx @@ -23,11 +23,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -60,17 +63,32 @@ function K8sNodesList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>({ columnName: 'cpu', order: 'desc' }); + } | null>(() => getOrderByFromParams(searchParams, false)); - const [selectedNodeUID, setselectedNodeUID] = useState(null); + const [selectedNodeUID, setSelectedNodeUID] = useState(() => { + const nodeUID = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.NODE_UID); + if (nodeUID) { + return nodeUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.NODES); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [selectedRowData, setSelectedRowData] = useState( null, @@ -250,15 +268,26 @@ function K8sNodesList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -307,7 +336,11 @@ function K8sNodesList({ const handleRowClick = (record: K8sNodesRowData): void => { if (groupBy.length === 0) { setSelectedRowData(null); - setselectedNodeUID(record.nodeUID); + setSelectedNodeUID(record.nodeUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.NODE_UID]: record.nodeUID, + }); } else { handleGroupByRowClick(record); } @@ -334,6 +367,11 @@ function K8sNodesList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -359,7 +397,9 @@ function K8sNodesList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedNodeUID(record.nodeUID), + onClick: (): void => { + setSelectedNodeUID(record.nodeUID); + }, className: 'expanded-clickable-row', })} /> @@ -422,7 +462,21 @@ function K8sNodesList({ }; const handleCloseNodeDetail = (): void => { - setselectedNodeUID(null); + setSelectedNodeUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.NODE_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -444,6 +498,10 @@ function K8sNodesList({ setCurrentPage(1); setGroupBy(groupBy); setExpandedRowKeys([]); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -451,7 +509,7 @@ function K8sNodesList({ category: InfraMonitoringEvents.Node, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails.tsx index a1d54e4a781c..a216ea010cbe 100644 --- a/frontend/src/container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails.tsx @@ -14,8 +14,14 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { filterDuplicateFilters } from 'container/InfraMonitoringK8s/commonUtils'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { + filterDuplicateFilters, + getFiltersFromParams, +} from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import NodeEvents from 'container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents'; import { CustomTimeType, @@ -34,6 +40,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -82,11 +89,27 @@ function NodeDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -103,12 +126,18 @@ function NodeDetails({ value: node?.meta.k8s_node_name || '', }, ], - }), - [node?.meta.k8s_node_name], - ); + }; + }, [node?.meta.k8s_node_name, searchParams]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -138,9 +167,8 @@ function NodeDetails({ value: node?.meta.k8s_node_name || '', }, ], - }), - [node?.meta.k8s_node_name], - ); + }; + }, [node?.meta.k8s_node_name, searchParams]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -181,6 +209,13 @@ function NodeDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -220,7 +255,7 @@ function NodeDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_NODE_NAME, QUERY_KEYS.K8S_CLUSTER_NAME].includes( @@ -242,7 +277,7 @@ function NodeDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -252,6 +287,16 @@ function NodeDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -259,7 +304,7 @@ function NodeDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_NODE_NAME, QUERY_KEYS.K8S_CLUSTER_NAME].includes( @@ -276,7 +321,7 @@ function NodeDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -287,6 +332,16 @@ function NodeDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -294,7 +349,7 @@ function NodeDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const nodeKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -312,7 +367,7 @@ function NodeDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ nodeKindFilter, @@ -324,6 +379,16 @@ function NodeDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Pods/K8sPodLists.tsx b/frontend/src/container/InfraMonitoringK8s/Pods/K8sPodLists.tsx index 08d8a2ddc4c5..f6bc79171e1f 100644 --- a/frontend/src/container/InfraMonitoringK8s/Pods/K8sPodLists.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Pods/K8sPodLists.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight, CornerDownRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -59,6 +62,7 @@ function K8sPodsList({ const { maxTime, minTime } = useSelector( (state) => state.globalTime, ); + const [searchParams, setSearchParams] = useSearchParams(); const [currentPage, setCurrentPage] = useState(1); @@ -68,7 +72,15 @@ function K8sPodsList({ defaultAvailableColumns, ); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [selectedRowData, setSelectedRowData] = useState( null, @@ -134,9 +146,15 @@ function K8sPodsList({ const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>({ columnName: 'cpu', order: 'desc' }); + } | null>(() => getOrderByFromParams(searchParams, false)); - const [selectedPodUID, setSelectedPodUID] = useState(null); + const [selectedPodUID, setSelectedPodUID] = useState(() => { + const podUID = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.POD_UID); + if (podUID) { + return podUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.PODS); @@ -265,15 +283,26 @@ function K8sPodsList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -318,6 +347,10 @@ function K8sPodsList({ setCurrentPage(1); setGroupBy(groupBy); setExpandedRowKeys([]); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -325,7 +358,7 @@ function K8sPodsList({ category: InfraMonitoringEvents.Pod, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { @@ -366,6 +399,10 @@ function K8sPodsList({ const handleRowClick = (record: K8sPodsRowData): void => { if (groupBy.length === 0) { setSelectedPodUID(record.podUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.POD_UID]: record.podUID, + }); setSelectedRowData(null); } else { handleGroupByRowClick(record); @@ -380,6 +417,20 @@ function K8sPodsList({ const handleClosePodDetail = (): void => { setSelectedPodUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.POD_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleAddColumn = useCallback( @@ -435,6 +486,11 @@ function K8sPodsList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -459,7 +515,9 @@ function K8sPodsList({ indicator: } />, }} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setSelectedPodUID(record.podUID), + onClick: (): void => { + setSelectedPodUID(record.podUID); + }, className: 'expanded-clickable-row', })} /> diff --git a/frontend/src/container/InfraMonitoringK8s/Pods/PodDetails/PodDetails.tsx b/frontend/src/container/InfraMonitoringK8s/Pods/PodDetails/PodDetails.tsx index dec4d7d19df8..aefb5a0bb1d4 100644 --- a/frontend/src/container/InfraMonitoringK8s/Pods/PodDetails/PodDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Pods/PodDetails/PodDetails.tsx @@ -15,8 +15,14 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { filterDuplicateFilters } from 'container/InfraMonitoringK8s/commonUtils'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { + filterDuplicateFilters, + getFiltersFromParams, +} from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import { QUERY_KEYS } from 'container/InfraMonitoringK8s/EntityDetailsUtils/utils'; import { CustomTimeType, @@ -35,6 +41,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -86,11 +93,27 @@ function PodDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -120,12 +143,18 @@ function PodDetails({ value: pod?.meta.k8s_namespace_name || '', }, ], - }), - [pod?.meta.k8s_namespace_name, pod?.meta.k8s_pod_name], - ); + }; + }, [pod?.meta.k8s_namespace_name, pod?.meta.k8s_pod_name, searchParams]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -155,9 +184,8 @@ function PodDetails({ value: pod?.meta.k8s_pod_name || '', }, ], - }), - [pod?.meta.k8s_pod_name], - ); + }; + }, [pod?.meta.k8s_pod_name, searchParams]); const [logsAndTracesFilters, setLogsAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -198,6 +226,13 @@ function PodDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -237,7 +272,7 @@ function PodDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogsAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [ @@ -261,7 +296,7 @@ function PodDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -271,6 +306,16 @@ function PodDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -278,7 +323,7 @@ function PodDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogsAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [ @@ -297,7 +342,7 @@ function PodDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: filterDuplicateFilters( [ @@ -308,6 +353,16 @@ function PodDetails({ ].filter((item): item is TagFilterItem => item !== undefined), ), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -315,7 +370,7 @@ function PodDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const podKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -333,7 +388,7 @@ function PodDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ podKindFilter, @@ -345,6 +400,16 @@ function PodDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/StatefulSets/K8sStatefulSetsList.tsx b/frontend/src/container/InfraMonitoringK8s/StatefulSets/K8sStatefulSetsList.tsx index f3bdb46ec55c..ba2b14f8defe 100644 --- a/frontend/src/container/InfraMonitoringK8s/StatefulSets/K8sStatefulSetsList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/StatefulSets/K8sStatefulSetsList.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -60,19 +63,36 @@ function K8sStatefulSetsList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); const [selectedStatefulSetUID, setselectedStatefulSetUID] = useState< string | null - >(null); + >(() => { + const statefulSetUID = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.STATEFULSET_UID, + ); + if (statefulSetUID) { + return statefulSetUID; + } + return null; + }); const { pageSize, setPageSize } = usePageSize(K8sCategory.STATEFULSETS); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -263,15 +283,26 @@ function K8sStatefulSetsList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -330,6 +361,10 @@ function K8sStatefulSetsList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedStatefulSetUID(record.statefulsetUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.STATEFULSET_UID]: record.statefulsetUID, + }); } else { handleGroupByRowClick(record); } @@ -356,6 +391,11 @@ function K8sStatefulSetsList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -380,7 +420,9 @@ function K8sStatefulSetsList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedStatefulSetUID(record.statefulsetUID), + onClick: (): void => { + setselectedStatefulSetUID(record.statefulsetUID); + }, className: 'expanded-clickable-row', })} /> @@ -444,6 +486,20 @@ function K8sStatefulSetsList({ const handleCloseStatefulSetDetail = (): void => { setselectedStatefulSetUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => + ![ + INFRA_MONITORING_K8S_PARAMS_KEYS.STATEFULSET_UID, + INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW, + INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS, + ].includes(key), + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -465,6 +521,10 @@ function K8sStatefulSetsList({ setCurrentPage(1); setGroupBy(groupBy); setExpandedRowKeys([]); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); logEvent(InfraMonitoringEvents.GroupByChanged, { entity: InfraMonitoringEvents.K8sEntity, @@ -472,7 +532,7 @@ function K8sStatefulSetsList({ category: InfraMonitoringEvents.StatefulSet, }); }, - [groupByFiltersData], + [groupByFiltersData, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails.tsx b/frontend/src/container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails.tsx index 4ebcd39847b7..aac8c1c60fc7 100644 --- a/frontend/src/container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails.tsx +++ b/frontend/src/container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails.tsx @@ -13,7 +13,11 @@ import { initialQueryState, } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; -import { K8sCategory } from 'container/InfraMonitoringK8s/constants'; +import { getFiltersFromParams } from 'container/InfraMonitoringK8s/commonUtils'; +import { + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from 'container/InfraMonitoringK8s/constants'; import EntityEvents from 'container/InfraMonitoringK8s/EntityDetailsUtils/EntityEvents'; import EntityLogs from 'container/InfraMonitoringK8s/EntityDetailsUtils/EntityLogs'; import EntityMetrics from 'container/InfraMonitoringK8s/EntityDetailsUtils/EntityMetrics'; @@ -36,6 +40,7 @@ import { } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse'; import { @@ -83,11 +88,27 @@ function StatefulSetDetails({ selectedTime as Time, ); - const [selectedView, setSelectedView] = useState(VIEWS.METRICS); + const [searchParams, setSearchParams] = useSearchParams(); + const [selectedView, setSelectedView] = useState(() => { + const view = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + if (view) { + return view as VIEWS; + } + return VIEWS.METRICS; + }); const isDarkMode = useIsDarkMode(); - const initialFilters = useMemo( - () => ({ + const initialFilters = useMemo(() => { + const urlView = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW); + const queryKey = + urlView === VIEW_TYPES.LOGS + ? INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS + : INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS; + const filters = getFiltersFromParams(searchParams, queryKey); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -117,15 +138,22 @@ function StatefulSetDetails({ value: statefulSet?.meta.k8s_namespace_name || '', }, ], - }), - [ - statefulSet?.meta.k8s_statefulset_name, - statefulSet?.meta.k8s_namespace_name, - ], - ); + }; + }, [ + searchParams, + statefulSet?.meta.k8s_statefulset_name, + statefulSet?.meta.k8s_namespace_name, + ]); - const initialEventsFilters = useMemo( - () => ({ + const initialEventsFilters = useMemo(() => { + const filters = getFiltersFromParams( + searchParams, + INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS, + ); + if (filters) { + return filters; + } + return { op: 'AND', items: [ { @@ -155,9 +183,8 @@ function StatefulSetDetails({ value: statefulSet?.meta.k8s_statefulset_name || '', }, ], - }), - [statefulSet?.meta.k8s_statefulset_name], - ); + }; + }, [searchParams, statefulSet?.meta.k8s_statefulset_name]); const [logAndTracesFilters, setLogAndTracesFilters] = useState< IBuilderQuery['filters'] @@ -198,6 +225,13 @@ function StatefulSetDetails({ const handleTabChange = (e: RadioChangeEvent): void => { setSelectedView(e.target.value); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: e.target.value, + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify(null), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify(null), + }); logEvent(InfraMonitoringEvents.TabChanged, { entity: InfraMonitoringEvents.K8sEntity, page: InfraMonitoringEvents.DetailedPage, @@ -237,7 +271,7 @@ function StatefulSetDetails({ ); const handleChangeLogFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_STATEFUL_SET_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -260,7 +294,7 @@ function StatefulSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -268,6 +302,16 @@ function StatefulSetDetails({ ...(paginationFilter ? [paginationFilter] : []), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.LOG_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -275,7 +319,7 @@ function StatefulSetDetails({ ); const handleChangeTracesFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setLogAndTracesFilters((prevFilters) => { const primaryFilters = prevFilters.items.filter((item) => [QUERY_KEYS.K8S_STATEFUL_SET_NAME, QUERY_KEYS.K8S_NAMESPACE_NAME].includes( @@ -292,7 +336,7 @@ function StatefulSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ ...primaryFilters, @@ -301,6 +345,16 @@ function StatefulSetDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.TRACES_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps @@ -308,7 +362,7 @@ function StatefulSetDetails({ ); const handleChangeEventsFilters = useCallback( - (value: IBuilderQuery['filters']) => { + (value: IBuilderQuery['filters'], view: VIEWS) => { setEventsFilters((prevFilters) => { const statefulSetKindFilter = prevFilters.items.find( (item) => item.key?.key === QUERY_KEYS.K8S_OBJECT_KIND, @@ -326,7 +380,7 @@ function StatefulSetDetails({ }); } - return { + const updatedFilters = { op: 'AND', items: [ statefulSetKindFilter, @@ -338,6 +392,16 @@ function StatefulSetDetails({ ), ].filter((item): item is TagFilterItem => item !== undefined), }; + + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.EVENTS_FILTERS]: JSON.stringify( + updatedFilters, + ), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VIEW]: view, + }); + + return updatedFilters; }); }, // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/frontend/src/container/InfraMonitoringK8s/Volumes/K8sVolumesList.tsx b/frontend/src/container/InfraMonitoringK8s/Volumes/K8sVolumesList.tsx index 9ade0544ecdc..9ba4a7544804 100644 --- a/frontend/src/container/InfraMonitoringK8s/Volumes/K8sVolumesList.tsx +++ b/frontend/src/container/InfraMonitoringK8s/Volumes/K8sVolumesList.tsx @@ -24,11 +24,14 @@ import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations import { ChevronDown, ChevronRight } from 'lucide-react'; import { useCallback, useEffect, useMemo, useState } from 'react'; import { useSelector } from 'react-redux'; +import { useSearchParams } from 'react-router-dom-v5-compat'; import { AppState } from 'store/reducers'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; import { GlobalReducer } from 'types/reducer/globalTime'; +import { getOrderByFromParams } from '../commonUtils'; import { + INFRA_MONITORING_K8S_PARAMS_KEYS, K8sCategory, K8sEntityToAggregateAttributeMapping, } from '../constants'; @@ -60,19 +63,36 @@ function K8sVolumesList({ const [currentPage, setCurrentPage] = useState(1); const [expandedRowKeys, setExpandedRowKeys] = useState([]); + const [searchParams, setSearchParams] = useSearchParams(); const [orderBy, setOrderBy] = useState<{ columnName: string; order: 'asc' | 'desc'; - } | null>(null); + } | null>(() => getOrderByFromParams(searchParams, true)); const [selectedVolumeUID, setselectedVolumeUID] = useState( - null, + () => { + const volumeUID = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.VOLUME_UID, + ); + if (volumeUID) { + return volumeUID; + } + return null; + }, ); const { pageSize, setPageSize } = usePageSize(K8sCategory.VOLUMES); - const [groupBy, setGroupBy] = useState([]); + const [groupBy, setGroupBy] = useState(() => { + const groupBy = searchParams.get(INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY); + if (groupBy) { + const decoded = decodeURIComponent(groupBy); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['groupBy']; + } + return []; + }); const [ selectedRowData, @@ -253,15 +273,26 @@ function K8sVolumesList({ } if ('field' in sorter && sorter.order) { - setOrderBy({ + const currentOrderBy = { columnName: sorter.field as string, - order: sorter.order === 'ascend' ? 'asc' : 'desc', + order: (sorter.order === 'ascend' ? 'asc' : 'desc') as 'asc' | 'desc', + }; + setOrderBy(currentOrderBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify( + currentOrderBy, + ), }); } else { setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); } }, - [], + [searchParams, setSearchParams], ); const { handleChangeQueryData } = useQueryOperations({ @@ -315,6 +346,10 @@ function K8sVolumesList({ if (groupBy.length === 0) { setSelectedRowData(null); setselectedVolumeUID(record.volumeUID); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.VOLUME_UID]: record.volumeUID, + }); } else { handleGroupByRowClick(record); } @@ -341,6 +376,11 @@ function K8sVolumesList({ setSelectedRowData(null); setGroupBy([]); setOrderBy(null); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify([]), + [INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY]: JSON.stringify(null), + }); }; const expandedRowRender = (): JSX.Element => ( @@ -365,7 +405,9 @@ function K8sVolumesList({ }} showHeader={false} onRow={(record): { onClick: () => void; className: string } => ({ - onClick: (): void => setselectedVolumeUID(record.volumeUID), + onClick: (): void => { + setselectedVolumeUID(record.volumeUID); + }, className: 'expanded-clickable-row', })} /> @@ -429,6 +471,13 @@ function K8sVolumesList({ const handleCloseVolumeDetail = (): void => { setselectedVolumeUID(null); + setSearchParams({ + ...Object.fromEntries( + Array.from(searchParams.entries()).filter( + ([key]) => key !== INFRA_MONITORING_K8S_PARAMS_KEYS.VOLUME_UID, + ), + ), + }); }; const handleGroupByChange = useCallback( @@ -449,6 +498,10 @@ function K8sVolumesList({ setCurrentPage(1); setGroupBy(groupBy); + setSearchParams({ + ...Object.fromEntries(searchParams.entries()), + [INFRA_MONITORING_K8S_PARAMS_KEYS.GROUP_BY]: JSON.stringify(groupBy), + }); setExpandedRowKeys([]); logEvent(InfraMonitoringEvents.GroupByChanged, { @@ -457,7 +510,7 @@ function K8sVolumesList({ category: InfraMonitoringEvents.Volumes, }); }, - [groupByFiltersData], + [groupByFiltersData?.payload?.attributeKeys, searchParams, setSearchParams], ); useEffect(() => { diff --git a/frontend/src/container/InfraMonitoringK8s/commonUtils.tsx b/frontend/src/container/InfraMonitoringK8s/commonUtils.tsx index d7f9fd9b7904..e61f64e5dbd9 100644 --- a/frontend/src/container/InfraMonitoringK8s/commonUtils.tsx +++ b/frontend/src/container/InfraMonitoringK8s/commonUtils.tsx @@ -12,9 +12,16 @@ import { ResizeTable } from 'components/ResizeTable'; import FieldRenderer from 'container/LogDetailedView/FieldRenderer'; import { DataType } from 'container/LogDetailedView/TableView'; import { useMemo } from 'react'; -import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData'; +import { + IBuilderQuery, + TagFilterItem, +} from 'types/api/queryBuilder/queryBuilderData'; -import { getInvalidValueTooltipText, K8sCategory } from './constants'; +import { + getInvalidValueTooltipText, + INFRA_MONITORING_K8S_PARAMS_KEYS, + K8sCategory, +} from './constants'; /** * Converts size in bytes to a human-readable string with appropriate units @@ -250,3 +257,37 @@ export const filterDuplicateFilters = ( return uniqueFilters; }; + +export const getOrderByFromParams = ( + searchParams: URLSearchParams, + returnNullAsDefault = false, +): { + columnName: string; + order: 'asc' | 'desc'; +} | null => { + const orderByFromParams = searchParams.get( + INFRA_MONITORING_K8S_PARAMS_KEYS.ORDER_BY, + ); + if (orderByFromParams) { + const decoded = decodeURIComponent(orderByFromParams); + const parsed = JSON.parse(decoded); + return parsed as { columnName: string; order: 'asc' | 'desc' }; + } + if (returnNullAsDefault) { + return null; + } + return { columnName: 'cpu', order: 'desc' }; +}; + +export const getFiltersFromParams = ( + searchParams: URLSearchParams, + queryKey: string, +): IBuilderQuery['filters'] | null => { + const filtersFromParams = searchParams.get(queryKey); + if (filtersFromParams) { + const decoded = decodeURIComponent(filtersFromParams); + const parsed = JSON.parse(decoded); + return parsed as IBuilderQuery['filters']; + } + return null; +}; diff --git a/frontend/src/container/InfraMonitoringK8s/constants.ts b/frontend/src/container/InfraMonitoringK8s/constants.ts index 997daeaa7775..a8bc29351fff 100644 --- a/frontend/src/container/InfraMonitoringK8s/constants.ts +++ b/frontend/src/container/InfraMonitoringK8s/constants.ts @@ -518,3 +518,24 @@ export const getInvalidValueTooltipText = ( entity: K8sCategory, attribute: string, ): string => `Some ${entity} do not have ${attribute}s.`; + +export const INFRA_MONITORING_K8S_PARAMS_KEYS = { + CATEGORY: 'category', + VIEW: 'view', + CLUSTER_NAME: 'clusterName', + DAEMONSET_UID: 'daemonSetUID', + DEPLOYMENT_UID: 'deploymentUID', + JOB_UID: 'jobUID', + NAMESPACE_UID: 'namespaceUID', + NODE_UID: 'nodeUID', + POD_UID: 'podUID', + STATEFULSET_UID: 'statefulsetUID', + VOLUME_UID: 'volumeUID', + FILTERS: 'filters', + GROUP_BY: 'groupBy', + ORDER_BY: 'orderBy', + LOG_FILTERS: 'logFilters', + TRACES_FILTERS: 'tracesFilters', + EVENTS_FILTERS: 'eventsFilters', + HOSTS_FILTERS: 'hostsFilters', +}; diff --git a/frontend/src/container/Licenses/ApplyLicenseForm.tsx b/frontend/src/container/Licenses/ApplyLicenseForm.tsx index 6b6da7266010..38d774f549f6 100644 --- a/frontend/src/container/Licenses/ApplyLicenseForm.tsx +++ b/frontend/src/container/Licenses/ApplyLicenseForm.tsx @@ -1,8 +1,9 @@ import { Button, Form, Input } from 'antd'; -import apply from 'api/licenses/apply'; +import apply from 'api/v3/licenses/put'; import { useNotifications } from 'hooks/useNotifications'; import { useState } from 'react'; import { useTranslation } from 'react-i18next'; +import APIError from 'types/api/error'; import { requireErrorMessage } from 'utils/form/requireErrorMessage'; import { @@ -36,27 +37,18 @@ function ApplyLicenseForm({ setIsLoading(true); try { - const response = await apply({ + await apply({ key: params.key, }); - - if (response.statusCode === 200) { - await Promise.all([licenseRefetch()]); - - notifications.success({ - message: 'Success', - description: t('license_applied'), - }); - } else { - notifications.error({ - message: 'Error', - description: response.error || t('unexpected_error'), - }); - } + await Promise.all([licenseRefetch()]); + notifications.success({ + message: 'Success', + description: t('license_applied'), + }); } catch (e) { notifications.error({ - message: 'Error', - description: t('unexpected_error'), + message: (e as APIError).getErrorCode(), + description: (e as APIError).getErrorMessage(), }); } setIsLoading(false); diff --git a/frontend/src/container/Licenses/ListLicenses.tsx b/frontend/src/container/Licenses/ListLicenses.tsx deleted file mode 100644 index 45c14bd23884..000000000000 --- a/frontend/src/container/Licenses/ListLicenses.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import { Typography } from 'antd'; -import { ColumnsType } from 'antd/lib/table'; -import { ResizeTable } from 'components/ResizeTable'; -import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats'; -import { useTimezone } from 'providers/Timezone'; -import { useTranslation } from 'react-i18next'; -import { License } from 'types/api/licenses/def'; - -function ValidityColumn({ value }: { value: string }): JSX.Element { - const { formatTimezoneAdjustedTimestamp } = useTimezone(); - - return ( - - {formatTimezoneAdjustedTimestamp(value, DATE_TIME_FORMATS.ISO_DATETIME_UTC)} - - ); -} - -function ListLicenses({ licenses }: ListLicensesProps): JSX.Element { - const { t } = useTranslation(['licenses']); - - const columns: ColumnsType = [ - { - title: t('column_license_status'), - dataIndex: 'status', - key: 'status', - width: 100, - }, - { - title: t('column_license_key'), - dataIndex: 'key', - key: 'key', - width: 80, - }, - { - title: t('column_valid_from'), - dataIndex: 'ValidFrom', - key: 'valid from', - render: (value: string): JSX.Element => ValidityColumn({ value }), - width: 80, - }, - { - title: t('column_valid_until'), - dataIndex: 'ValidUntil', - key: 'valid until', - render: (value: string): JSX.Element => ValidityColumn({ value }), - width: 80, - }, - ]; - - return ; -} - -interface ListLicensesProps { - licenses: License[]; -} - -export default ListLicenses; diff --git a/frontend/src/container/Licenses/index.tsx b/frontend/src/container/Licenses/index.tsx index 6eeed645e9fd..cae9ad566e67 100644 --- a/frontend/src/container/Licenses/index.tsx +++ b/frontend/src/container/Licenses/index.tsx @@ -4,29 +4,20 @@ import { useAppContext } from 'providers/App/App'; import { useTranslation } from 'react-i18next'; import ApplyLicenseForm from './ApplyLicenseForm'; -import ListLicenses from './ListLicenses'; function Licenses(): JSX.Element { const { t, ready: translationsReady } = useTranslation(['licenses']); - const { licenses, licensesRefetch } = useAppContext(); + const { activeLicenseRefetch } = useAppContext(); if (!translationsReady) { return ; } - const allValidLicense = - licenses?.licenses?.filter((license) => license.isCurrent) || []; - const tabs = [ { label: t('tab_current_license'), key: 'licenses', - children: , - }, - { - label: t('tab_license_history'), - key: 'history', - children: , + children: , }, ]; diff --git a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx index a437acf42e75..2c4e618a4a2b 100644 --- a/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx +++ b/frontend/src/container/LogsExplorerViews/tests/LogsExplorerPagination.test.tsx @@ -169,7 +169,7 @@ export const verifyFiltersAndOrderBy = (queryData: IBuilderQuery): void => { } }; -describe('LogsExplorerViews Pagination', () => { +describe.skip('LogsExplorerViews Pagination', () => { // Array to store captured API request payloads let capturedPayloads: QueryRangePayload[]; diff --git a/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx b/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx index eae66505c586..1aed2e567bcb 100644 --- a/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx +++ b/frontend/src/container/MetricsExplorer/Explorer/Explorer.tsx @@ -19,6 +19,7 @@ import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFall import { useCallback, useMemo, useState } from 'react'; import { useSearchParams } from 'react-router-dom-v5-compat'; import { Dashboard } from 'types/api/dashboard/getAll'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { DataSource } from 'types/common/queryBuilder'; import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink'; import { v4 as uuid } from 'uuid'; @@ -26,6 +27,7 @@ import { v4 as uuid } from 'uuid'; import QuerySection from './QuerySection'; import TimeSeries from './TimeSeries'; import { ExplorerTabs } from './types'; +import { splitQueryIntoOneChartPerQuery } from './utils'; const ONE_CHART_PER_QUERY_ENABLED_KEY = 'isOneChartPerQueryEnabled'; @@ -75,14 +77,18 @@ function Explorer(): JSX.Element { useShareBuilderUrl(exportDefaultQuery); const handleExport = useCallback( - (dashboard: Dashboard | null): void => { + ( + dashboard: Dashboard | null, + _isNewDashboard?: boolean, + queryToExport?: Query, + ): void => { if (!dashboard) return; const widgetId = uuid(); const updatedDashboard = addEmptyWidgetInDashboardJSONWithQuery( dashboard, - exportDefaultQuery, + queryToExport || exportDefaultQuery, widgetId, PANEL_TYPES.TIME_SERIES, options.selectColumns, @@ -114,7 +120,7 @@ function Explorer(): JSX.Element { return; } const dashboardEditView = generateExportToDashboardLink({ - query: exportDefaultQuery, + query: queryToExport || exportDefaultQuery, panelType: PANEL_TYPES.TIME_SERIES, dashboardId: data.payload?.uuid || '', widgetId, @@ -135,6 +141,14 @@ function Explorer(): JSX.Element { [exportDefaultQuery, notifications, updateDashboard], ); + const splitedQueries = useMemo( + () => + splitQueryIntoOneChartPerQuery( + stagedQuery || initialQueriesMap[DataSource.METRICS], + ), + [stagedQuery], + ); + return ( }>
@@ -190,6 +204,8 @@ function Explorer(): JSX.Element { isLoading={isLoading} sourcepage={DataSource.METRICS} onExport={handleExport} + isOneChartPerQuery={showOneChartPerQuery} + splitedQueries={splitedQueries} /> ); diff --git a/frontend/src/container/MetricsExplorer/MetricDetails/MetricDetails.tsx b/frontend/src/container/MetricsExplorer/MetricDetails/MetricDetails.tsx index 97670b50618c..b0c18ab64b47 100644 --- a/frontend/src/container/MetricsExplorer/MetricDetails/MetricDetails.tsx +++ b/frontend/src/container/MetricsExplorer/MetricDetails/MetricDetails.tsx @@ -75,7 +75,7 @@ function MetricDetails({ hour." placement="top" > - {`${timeSeriesTotal} ⎯ ${timeSeriesActive} active`} + {`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`} ); }, [metric]); diff --git a/frontend/src/container/MetricsExplorer/Summary/MetricsTreemap.tsx b/frontend/src/container/MetricsExplorer/Summary/MetricsTreemap.tsx index f9f81fa92c13..ef9649be1dc6 100644 --- a/frontend/src/container/MetricsExplorer/Summary/MetricsTreemap.tsx +++ b/frontend/src/container/MetricsExplorer/Summary/MetricsTreemap.tsx @@ -154,10 +154,14 @@ function MetricsTreemap({ openMetricDetails(node.data.id)} > -
+
{`${node.data.displayValue}%`}
diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx index ab4e67120f46..f142bd704337 100644 --- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraphs.tsx @@ -1,4 +1,5 @@ import { useNavigateToExplorer } from 'components/CeleryTask/useNavigateToExplorer'; +import { ToggleGraphProps } from 'components/Graph/types'; import { QueryParams } from 'constants/query'; import { PANEL_TYPES } from 'constants/queryBuilder'; import { handleGraphClick } from 'container/GridCardLayout/GridCard/utils'; @@ -19,6 +20,7 @@ import { useCallback, useEffect, useRef, + useState, } from 'react'; import { UseQueryResult } from 'react-query'; import { useDispatch } from 'react-redux'; @@ -36,11 +38,37 @@ function WidgetGraph({ selectedGraph, }: WidgetGraphProps): JSX.Element { const graphRef = useRef(null); + const lineChartRef = useRef(); const dispatch = useDispatch(); const urlQuery = useUrlQuery(); const location = useLocation(); const { safeNavigate } = useSafeNavigate(); + // Add legend state management similar to dashboard components + const [graphVisibility, setGraphVisibility] = useState( + Array((queryResponse.data?.payload?.data?.result?.length || 0) + 1).fill( + true, + ), + ); + + // Initialize graph visibility when data changes + useEffect(() => { + if (queryResponse.data?.payload?.data?.result) { + setGraphVisibility( + Array(queryResponse.data.payload.data.result.length + 1).fill(true), + ); + } + }, [queryResponse.data?.payload?.data?.result]); + + // Apply graph visibility when lineChartRef is available + useEffect(() => { + if (!lineChartRef.current) return; + + graphVisibility.forEach((state, index) => { + lineChartRef.current?.toggleGraph(index, state); + }); + }, [graphVisibility]); + const handleBackNavigation = (): void => { const searchParams = new URLSearchParams(window.location.search); const startTime = searchParams.get(QueryParams.startTime); @@ -154,6 +182,8 @@ function WidgetGraph({ onDragSelect={onDragSelect} selectedGraph={selectedGraph} onClickHandler={graphClickHandler} + graphVisibility={graphVisibility} + setGraphVisibility={setGraphVisibility} />
); diff --git a/frontend/src/container/NewWidget/LeftContainer/index.tsx b/frontend/src/container/NewWidget/LeftContainer/index.tsx index 83d99aefcffe..6b72e6a6ad79 100644 --- a/frontend/src/container/NewWidget/LeftContainer/index.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/index.tsx @@ -6,7 +6,7 @@ import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useDashboard } from 'providers/Dashboard/Dashboard'; -import { memo } from 'react'; +import { memo, useEffect } from 'react'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; import { GlobalReducer } from 'types/reducer/globalTime'; @@ -27,6 +27,7 @@ function LeftContainer({ requestData, setRequestData, isLoadingPanelData, + setQueryResponse, }: WidgetGraphProps): JSX.Element { const { stagedQuery } = useQueryBuilder(); const { selectedDashboard } = useDashboard(); @@ -49,6 +50,13 @@ function LeftContainer({ }, ); + // Update parent component with query response for legend colors + useEffect(() => { + if (setQueryResponse) { + setQueryResponse(queryResponse); + } + }, [queryResponse, setQueryResponse]); + return ( <> (null); + const [isOverflowing, setIsOverflowing] = useState(false); + + useEffect(() => { + const checkOverflow = (): void => { + if (textRef.current) { + const isTextOverflowing = + textRef.current.scrollWidth > textRef.current.clientWidth; + setIsOverflowing(isTextOverflowing); + } + }; + + checkOverflow(); + // Check on window resize + window.addEventListener('resize', checkOverflow); + return (): void => window.removeEventListener('resize', checkOverflow); + }, [label]); + + return ( + + + {label} + + + ); +} + +interface LegendColorsProps { + customLegendColors: Record; + setCustomLegendColors: Dispatch>>; + queryResponse?: UseQueryResult< + SuccessResponse, + Error + >; +} + +function LegendColors({ + customLegendColors, + setCustomLegendColors, + queryResponse = null as any, +}: LegendColorsProps): JSX.Element { + const { currentQuery } = useQueryBuilder(); + const isDarkMode = useIsDarkMode(); + + // Get legend labels from query response or current query + const legendLabels = useMemo(() => { + if (queryResponse?.data?.payload?.data?.result) { + return queryResponse.data.payload.data.result.map((item: any) => + getLabelName(item.metric || {}, item.queryName || '', item.legend || ''), + ); + } + + // Fallback to query data if no response available + return currentQuery.builder.queryData.map((query) => + getLabelName({}, query.queryName || '', query.legend || ''), + ); + }, [queryResponse, currentQuery]); + + // Get current or default color for a legend + const getColorForLegend = (label: string): string => { + if (customLegendColors[label]) { + return customLegendColors[label]; + } + return generateColor( + label, + isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor, + ); + }; + + // Handle color change + const handleColorChange = (label: string, color: string): void => { + setCustomLegendColors((prev) => ({ + ...prev, + [label]: color, + })); + }; + + // Reset to default color + const resetToDefault = (label: string): void => { + setCustomLegendColors((prev) => { + const updated = { ...prev }; + delete updated[label]; + return updated; + }); + }; + + // Reset all colors to default + const resetAllColors = (): void => { + setCustomLegendColors({}); + }; + + const items = [ + { + key: 'legend-colors', + label: ( +
+ + Legend Colors +
+ ), + children: ( +
+ {legendLabels.length === 0 ? ( + + No legends available. Run a query to see legend options. + + ) : ( + <> +
+ +
+
+ {legendLabels.map((label: string) => ( +
+ + handleColorChange(label, color.toHexString()) + } + size="small" + showText={false} + trigger="click" + > +
+
+
+ +
+ {customLegendColors[label] && ( +
+ { + e.stopPropagation(); + resetToDefault(label); + }} + > + Reset + +
+ )} +
+ +
+ ))} +
+ + )} +
+ ), + }, + ]; + + return ( +
+ +
+ ); +} + +LegendColors.defaultProps = { + queryResponse: null, +}; + +export default LegendColors; diff --git a/frontend/src/container/NewWidget/RightContainer/RightContainer.styles.scss b/frontend/src/container/NewWidget/RightContainer/RightContainer.styles.scss index 8855eae49845..1c3b78494ab9 100644 --- a/frontend/src/container/NewWidget/RightContainer/RightContainer.styles.scss +++ b/frontend/src/container/NewWidget/RightContainer/RightContainer.styles.scss @@ -166,6 +166,18 @@ gap: 8px; } + .legend-position { + margin-top: 16px; + display: flex; + justify-content: space-between; + flex-direction: column; + gap: 8px; + } + + .legend-colors { + margin-top: 16px; + } + .panel-time-text { margin-top: 16px; color: var(--bg-vanilla-400); diff --git a/frontend/src/container/NewWidget/RightContainer/constants.ts b/frontend/src/container/NewWidget/RightContainer/constants.ts index cec2f8a6008f..53aa7eae999c 100644 --- a/frontend/src/container/NewWidget/RightContainer/constants.ts +++ b/frontend/src/container/NewWidget/RightContainer/constants.ts @@ -150,3 +150,31 @@ export const panelTypeVsStackingChartPreferences: { [PANEL_TYPES.HISTOGRAM]: false, [PANEL_TYPES.EMPTY_WIDGET]: false, } as const; + +export const panelTypeVsLegendPosition: { + [key in PANEL_TYPES]: boolean; +} = { + [PANEL_TYPES.TIME_SERIES]: true, + [PANEL_TYPES.VALUE]: false, + [PANEL_TYPES.TABLE]: false, + [PANEL_TYPES.LIST]: false, + [PANEL_TYPES.PIE]: false, + [PANEL_TYPES.BAR]: true, + [PANEL_TYPES.TRACE]: false, + [PANEL_TYPES.HISTOGRAM]: false, + [PANEL_TYPES.EMPTY_WIDGET]: false, +} as const; + +export const panelTypeVsLegendColors: { + [key in PANEL_TYPES]: boolean; +} = { + [PANEL_TYPES.TIME_SERIES]: true, + [PANEL_TYPES.VALUE]: false, + [PANEL_TYPES.TABLE]: false, + [PANEL_TYPES.LIST]: false, + [PANEL_TYPES.PIE]: true, + [PANEL_TYPES.BAR]: true, + [PANEL_TYPES.TRACE]: false, + [PANEL_TYPES.HISTOGRAM]: true, + [PANEL_TYPES.EMPTY_WIDGET]: false, +} as const; diff --git a/frontend/src/container/NewWidget/RightContainer/index.tsx b/frontend/src/container/NewWidget/RightContainer/index.tsx index 86ada0d4bac9..f0e518ab06f5 100644 --- a/frontend/src/container/NewWidget/RightContainer/index.tsx +++ b/frontend/src/container/NewWidget/RightContainer/index.tsx @@ -30,7 +30,14 @@ import { useRef, useState, } from 'react'; -import { ColumnUnit, Widgets } from 'types/api/dashboard/getAll'; +import { UseQueryResult } from 'react-query'; +import { SuccessResponse } from 'types/api'; +import { + ColumnUnit, + LegendPosition, + Widgets, +} from 'types/api/dashboard/getAll'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { DataSource } from 'types/common/queryBuilder'; import { popupContainer } from 'utils/selectPopupContainer'; @@ -40,6 +47,8 @@ import { panelTypeVsColumnUnitPreferences, panelTypeVsCreateAlert, panelTypeVsFillSpan, + panelTypeVsLegendColors, + panelTypeVsLegendPosition, panelTypeVsLogScale, panelTypeVsPanelTimePreferences, panelTypeVsSoftMinMax, @@ -47,6 +56,7 @@ import { panelTypeVsThreshold, panelTypeVsYAxisUnit, } from './constants'; +import LegendColors from './LegendColors/LegendColors'; import ThresholdSelector from './Threshold/ThresholdSelector'; import { ThresholdProps } from './Threshold/types'; import { timePreferance } from './timeItems'; @@ -98,6 +108,11 @@ function RightContainer({ setColumnUnits, isLogScale, setIsLogScale, + legendPosition, + setLegendPosition, + customLegendColors, + setCustomLegendColors, + queryResponse, }: RightContainerProps): JSX.Element { const { selectedDashboard } = useDashboard(); const [inputValue, setInputValue] = useState(title); @@ -128,6 +143,8 @@ function RightContainer({ panelTypeVsStackingChartPreferences[selectedGraph]; const allowPanelTimePreference = panelTypeVsPanelTimePreferences[selectedGraph]; + const allowLegendPosition = panelTypeVsLegendPosition[selectedGraph]; + const allowLegendColors = panelTypeVsLegendColors[selectedGraph]; const allowPanelColumnPreference = panelTypeVsColumnUnitPreferences[selectedGraph]; @@ -430,6 +447,40 @@ function RightContainer({ )} + + {allowLegendPosition && ( +
+ Legend Position + +
+ )} + + {allowLegendColors && ( +
+ +
+ )} {allowCreateAlerts && ( @@ -495,10 +546,19 @@ interface RightContainerProps { setSoftMax: Dispatch>; isLogScale: boolean; setIsLogScale: Dispatch>; + legendPosition: LegendPosition; + setLegendPosition: Dispatch>; + customLegendColors: Record; + setCustomLegendColors: Dispatch>>; + queryResponse?: UseQueryResult< + SuccessResponse, + Error + >; } RightContainer.defaultProps = { selectedWidget: undefined, + queryResponse: null, }; export default RightContainer; diff --git a/frontend/src/container/NewWidget/index.tsx b/frontend/src/container/NewWidget/index.tsx index 9c4c80997130..af6b3cda35de 100644 --- a/frontend/src/container/NewWidget/index.tsx +++ b/frontend/src/container/NewWidget/index.tsx @@ -34,11 +34,19 @@ import { } from 'providers/Dashboard/util'; import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { useTranslation } from 'react-i18next'; +import { UseQueryResult } from 'react-query'; import { useSelector } from 'react-redux'; import { generatePath, useParams } from 'react-router-dom'; import { AppState } from 'store/reducers'; -import { ColumnUnit, Dashboard, Widgets } from 'types/api/dashboard/getAll'; +import { SuccessResponse } from 'types/api'; +import { + ColumnUnit, + Dashboard, + LegendPosition, + Widgets, +} from 'types/api/dashboard/getAll'; import { IField } from 'types/api/logs/fields'; +import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { EQueryType } from 'types/common/dashboard'; import { DataSource } from 'types/common/queryBuilder'; import { GlobalReducer } from 'types/reducer/globalTime'; @@ -183,6 +191,13 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { const [isLogScale, setIsLogScale] = useState( selectedWidget?.isLogScale || false, ); + const [legendPosition, setLegendPosition] = useState( + selectedWidget?.legendPosition || LegendPosition.BOTTOM, + ); + const [customLegendColors, setCustomLegendColors] = useState< + Record + >(selectedWidget?.customLegendColors || {}); + const [saveModal, setSaveModal] = useState(false); const [discardModal, setDiscardModal] = useState(false); @@ -248,6 +263,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { selectedLogFields, selectedTracesFields, isLogScale, + legendPosition, + customLegendColors, columnWidths: columnWidths?.[selectedWidget?.id], }; }); @@ -272,6 +289,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { combineHistogram, stackedBarChart, isLogScale, + legendPosition, + customLegendColors, columnWidths, ]); @@ -330,6 +349,11 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { // hence while changing the query contains the older value and the processing logic fails const [isLoadingPanelData, setIsLoadingPanelData] = useState(false); + // State to hold query response for sharing between left and right containers + const [queryResponse, setQueryResponse] = useState< + UseQueryResult, Error> + >(null as any); + // request data should be handled by the parent and the child components should consume the same // this has been moved here from the left container const [requestData, setRequestData] = useState(() => { @@ -361,6 +385,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) === PANEL_TYPES.TABLE, variables: getDashboardVariables(selectedDashboard?.data.variables), + originalGraphType: selectedGraph || selectedWidget?.panelTypes, }; } @@ -470,6 +495,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { mergeAllActiveQueries: selectedWidget?.mergeAllActiveQueries || false, selectedLogFields: selectedWidget?.selectedLogFields || [], selectedTracesFields: selectedWidget?.selectedTracesFields || [], + legendPosition: selectedWidget?.legendPosition || LegendPosition.BOTTOM, + customLegendColors: selectedWidget?.customLegendColors || {}, }, ] : [ @@ -497,6 +524,8 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { mergeAllActiveQueries: selectedWidget?.mergeAllActiveQueries || false, selectedLogFields: selectedWidget?.selectedLogFields || [], selectedTracesFields: selectedWidget?.selectedTracesFields || [], + legendPosition: selectedWidget?.legendPosition || LegendPosition.BOTTOM, + customLegendColors: selectedWidget?.customLegendColors || {}, }, ...afterWidgets, ], @@ -710,6 +739,7 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { requestData={requestData} setRequestData={setRequestData} isLoadingPanelData={isLoadingPanelData} + setQueryResponse={setQueryResponse} /> )} @@ -751,6 +781,11 @@ function NewWidget({ selectedGraph }: NewWidgetProps): JSX.Element { setIsFillSpans={setIsFillSpans} isLogScale={isLogScale} setIsLogScale={setIsLogScale} + legendPosition={legendPosition} + setLegendPosition={setLegendPosition} + customLegendColors={customLegendColors} + setCustomLegendColors={setCustomLegendColors} + queryResponse={queryResponse} softMin={softMin} setSoftMin={setSoftMin} softMax={softMax} diff --git a/frontend/src/container/NewWidget/types.ts b/frontend/src/container/NewWidget/types.ts index c3952e935a6f..0b9b001e7c20 100644 --- a/frontend/src/container/NewWidget/types.ts +++ b/frontend/src/container/NewWidget/types.ts @@ -27,6 +27,11 @@ export interface WidgetGraphProps { requestData: GetQueryResultsProps; setRequestData: Dispatch>; isLoadingPanelData: boolean; + setQueryResponse?: Dispatch< + SetStateAction< + UseQueryResult, Error> + > + >; } export type WidgetGraphContainerProps = { diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx index 41aea04cb4d1..66e47e7b2a8b 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomains/AddDomain/index.tsx @@ -2,12 +2,12 @@ import { PlusOutlined } from '@ant-design/icons'; import { Button, Form, Input, Modal, Typography } from 'antd'; import { useForm } from 'antd/es/form/Form'; -import createDomainApi from 'api/SAML/postDomain'; -import { FeatureKeys } from 'constants/features'; +import createDomainApi from 'api/v1/domains/create'; import { useNotifications } from 'hooks/useNotifications'; import { useAppContext } from 'providers/App/App'; import { useState } from 'react'; import { useTranslation } from 'react-i18next'; +import APIError from 'types/api/error'; import { Container } from '../styles'; @@ -15,34 +15,27 @@ function AddDomain({ refetch }: Props): JSX.Element { const { t } = useTranslation(['common', 'organizationsettings']); const [isAddDomains, setIsDomain] = useState(false); const [form] = useForm(); - const { featureFlags, org } = useAppContext(); - const isSsoFlagEnabled = - featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false; + const { org } = useAppContext(); const { notifications } = useNotifications(); const onCreateHandler = async (): Promise => { try { - const response = await createDomainApi({ + await createDomainApi({ name: form.getFieldValue('domain'), orgId: (org || [])[0].id, }); - if (response.statusCode === 200) { - notifications.success({ - message: 'Your domain has been added successfully.', - duration: 15, - }); - setIsDomain(false); - refetch(); - } else { - notifications.error({ - message: t('common:something_went_wrong'), - }); - } + notifications.success({ + message: 'Your domain has been added successfully.', + duration: 15, + }); + setIsDomain(false); + refetch(); } catch (error) { notifications.error({ - message: t('common:something_went_wrong'), + message: (error as APIError).getErrorCode(), + description: (error as APIError).getErrorMessage(), }); } }; @@ -55,15 +48,13 @@ function AddDomain({ refetch }: Props): JSX.Element { ns: 'organizationsettings', })} - {isSsoFlagEnabled && ( - - )} + flag.name === FeatureKeys.SSO)?.active || false; + const onGoogleAuthClickHandler = useCallback(() => { assignSsoMethod(GOOGLE_AUTH); setIsSettingsOpen(false); @@ -35,24 +41,35 @@ function Create({ } }, [ssoMethod]); - const data: RowProps[] = [ - { - buttonText: ConfigureButtonText, - Icon: , - title: 'Google Apps Authentication', - subTitle: 'Let members sign-in with a Google account', - onClickHandler: onGoogleAuthClickHandler, - isDisabled: false, - }, - { - buttonText: ConfigureButtonText, - Icon: , - onClickHandler: onEditSAMLHandler, - subTitle: 'Azure, Active Directory, Okta or your custom SAML 2.0 solution', - title: 'SAML Authentication', - isDisabled: false, - }, - ]; + const data: RowProps[] = SSOFlag + ? [ + { + buttonText: ConfigureButtonText, + Icon: , + title: 'Google Apps Authentication', + subTitle: 'Let members sign-in with a Google account', + onClickHandler: onGoogleAuthClickHandler, + isDisabled: false, + }, + { + buttonText: ConfigureButtonText, + Icon: , + onClickHandler: onEditSAMLHandler, + subTitle: 'Azure, Active Directory, Okta or your custom SAML 2.0 solution', + title: 'SAML Authentication', + isDisabled: false, + }, + ] + : [ + { + buttonText: ConfigureButtonText, + Icon: , + title: 'Google Apps Authentication', + subTitle: 'Let members sign-in with a Google account', + onClickHandler: onGoogleAuthClickHandler, + isDisabled: false, + }, + ]; return (
diff --git a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx index 77fc14618b64..892ef426e51b 100644 --- a/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx +++ b/frontend/src/container/OrganizationSettings/AuthDomains/index.tsx @@ -1,18 +1,16 @@ -import { LockTwoTone } from '@ant-design/icons'; import { Button, Modal, Space, Typography } from 'antd'; import { ColumnsType } from 'antd/lib/table'; -import deleteDomain from 'api/SAML/deleteDomain'; -import listAllDomain from 'api/SAML/listAllDomain'; -import updateDomain from 'api/SAML/updateDomain'; +import deleteDomain from 'api/v1/domains/delete'; +import listAllDomain from 'api/v1/domains/list'; +import updateDomain from 'api/v1/domains/update'; import { ResizeTable } from 'components/ResizeTable'; import TextToolTip from 'components/TextToolTip'; -import { SIGNOZ_UPGRADE_PLAN_URL } from 'constants/app'; -import { FeatureKeys } from 'constants/features'; import { useNotifications } from 'hooks/useNotifications'; import { useAppContext } from 'providers/App/App'; import { Dispatch, SetStateAction, useCallback, useState } from 'react'; import { useTranslation } from 'react-i18next'; import { useQuery } from 'react-query'; +import APIError from 'types/api/error'; import { AuthDomain } from 'types/api/SAML/listDomain'; import { v4 } from 'uuid'; @@ -26,33 +24,12 @@ import SwitchComponent from './Switch'; function AuthDomains(): JSX.Element { const { t } = useTranslation(['common', 'organizationsettings']); const [isSettingsOpen, setIsSettingsOpen] = useState(false); - const { org, featureFlags } = useAppContext(); + const { org } = useAppContext(); const [currentDomain, setCurrentDomain] = useState(); const [isEditModalOpen, setIsEditModalOpen] = useState(false); - const SSOFlag = - featureFlags?.find((flag) => flag.name === FeatureKeys.SSO)?.active || false; - - const notEntripriseData: AuthDomain[] = [ - { - id: v4(), - name: '', - ssoEnabled: false, - orgId: (org || [])[0].id || '', - samlConfig: { - samlCert: '', - samlEntity: '', - samlIdp: '', - }, - ssoType: 'SAML', - }, - ]; - const { data, isLoading, refetch } = useQuery(['saml'], { - queryFn: () => - listAllDomain({ - orgId: (org || [])[0].id, - }), + queryFn: () => listAllDomain(), enabled: org !== null, }); @@ -75,32 +52,19 @@ function AuthDomains(): JSX.Element { const onRecordUpdateHandler = useCallback( async (record: AuthDomain): Promise => { try { - const response = await updateDomain(record); - - if (response.statusCode === 200) { - notifications.success({ - message: t('saml_settings', { - ns: 'organizationsettings', - }), - }); - refetch(); - onCloseHandler(setIsEditModalOpen)(); - - return true; - } - - notifications.error({ - message: t('something_went_wrong', { - ns: 'common', + await updateDomain(record); + notifications.success({ + message: t('saml_settings', { + ns: 'organizationsettings', }), }); - - return false; + refetch(); + onCloseHandler(setIsEditModalOpen)(); + return true; } catch (error) { notifications.error({ - message: t('something_went_wrong', { - ns: 'common', - }), + message: (error as APIError).getErrorCode(), + description: (error as APIError).getErrorMessage(), }); return false; } @@ -139,18 +103,19 @@ function AuthDomains(): JSX.Element { ns: 'organizationsettings', }), onOk: async () => { - const response = await deleteDomain({ - ...record, - }); + try { + await deleteDomain({ + ...record, + }); - if (response.statusCode === 200) { notifications.success({ message: t('common:success'), }); refetch(); - } else { + } catch (error) { notifications.error({ - message: t('common:something_went_wrong'), + message: (error as APIError).getErrorCode(), + description: (error as APIError).getErrorMessage(), }); } }, @@ -159,10 +124,6 @@ function AuthDomains(): JSX.Element { [refetch, t, notifications], ); - const onClickLicenseHandler = useCallback(() => { - window.open(SIGNOZ_UPGRADE_PLAN_URL); - }, []); - const columns: ColumnsType = [ { title: 'Domain', @@ -185,52 +146,24 @@ function AuthDomains(): JSX.Element { dataIndex: 'ssoEnabled', key: 'ssoEnabled', width: 80, - render: (value: boolean, record: AuthDomain): JSX.Element => { - if (!SSOFlag) { - return ( - - ); - } - - return ( - - ); - }, + render: (value: boolean, record: AuthDomain): JSX.Element => ( + + ), }, { title: '', dataIndex: 'description', key: 'description', width: 100, - render: (_, record: AuthDomain): JSX.Element => { - if (!SSOFlag) { - return ( - - ); - } - - return ( - - ); - }, + render: (_, record: AuthDomain): JSX.Element => ( + + ), }, { title: 'Action', @@ -238,19 +171,14 @@ function AuthDomains(): JSX.Element { key: 'action', width: 50, render: (_, record): JSX.Element => ( - ), }, ]; - if (!isLoading && data?.payload?.length === 0) { + if (!isLoading && data?.data?.length === 0) { return ( @@ -273,7 +201,7 @@ function AuthDomains(): JSX.Element { record.name + v4()} - dataSource={!SSOFlag ? notEntripriseData : []} + dataSource={[]} tableLayout="fixed" bordered /> @@ -281,8 +209,7 @@ function AuthDomains(): JSX.Element { ); } - const tableData = SSOFlag ? data?.payload || [] : notEntripriseData; - + const tableData = data?.data || []; return ( <> flag.name === FeatureKeys.SSO)?.active || false; - - const isAuthDomain = !isNotSSO; + const { org } = useAppContext(); if (!org) { return
; @@ -31,7 +25,7 @@ function OrganizationSettings(): JSX.Element { - {isAuthDomain && } + ); } diff --git a/frontend/src/container/PanelWrapper/PiePanelWrapper.tsx b/frontend/src/container/PanelWrapper/PiePanelWrapper.tsx index 948f62af3f82..237331dcda5d 100644 --- a/frontend/src/container/PanelWrapper/PiePanelWrapper.tsx +++ b/frontend/src/container/PanelWrapper/PiePanelWrapper.tsx @@ -50,14 +50,19 @@ function PiePanelWrapper({ color: string; }[] = [].concat( ...(panelData - .map((d) => ({ - label: getLabelName(d.metric, d.queryName || '', d.legend || ''), - value: d.values?.[0]?.[1], - color: generateColor( - getLabelName(d.metric, d.queryName || '', d.legend || ''), - isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor, - ), - })) + .map((d) => { + const label = getLabelName(d.metric, d.queryName || '', d.legend || ''); + return { + label, + value: d.values?.[0]?.[1], + color: + widget?.customLegendColors?.[label] || + generateColor( + label, + isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor, + ), + }; + }) .filter((d) => d !== undefined) as never[]), ); diff --git a/frontend/src/container/PanelWrapper/UplotPanelWrapper.tsx b/frontend/src/container/PanelWrapper/UplotPanelWrapper.tsx index 77b29c779854..1f50bc9eb127 100644 --- a/frontend/src/container/PanelWrapper/UplotPanelWrapper.tsx +++ b/frontend/src/container/PanelWrapper/UplotPanelWrapper.tsx @@ -138,6 +138,9 @@ function UplotPanelWrapper({ timezone: timezone.value, customSeries, isLogScale: widget?.isLogScale, + colorMapping: widget?.customLegendColors, + enhancedLegend: true, // Enable enhanced legend + legendPosition: widget?.legendPosition, }), [ widget?.id, @@ -163,6 +166,8 @@ function UplotPanelWrapper({ timezone.value, customSeries, widget?.isLogScale, + widget?.legendPosition, + widget?.customLegendColors, ], ); diff --git a/frontend/src/container/PanelWrapper/__tests__/enhancedLegend.test.ts b/frontend/src/container/PanelWrapper/__tests__/enhancedLegend.test.ts new file mode 100644 index 000000000000..036a5403973f --- /dev/null +++ b/frontend/src/container/PanelWrapper/__tests__/enhancedLegend.test.ts @@ -0,0 +1,521 @@ +/* eslint-disable sonarjs/no-duplicate-string */ +import { Dimensions } from 'hooks/useDimensions'; +import { LegendPosition } from 'types/api/dashboard/getAll'; + +import { + applyEnhancedLegendStyling, + calculateEnhancedLegendConfig, + EnhancedLegendConfig, +} from '../enhancedLegend'; + +describe('Enhanced Legend Functionality', () => { + const mockDimensions: Dimensions = { + width: 800, + height: 400, + }; + + const mockConfig: EnhancedLegendConfig = { + minHeight: 46, + maxHeight: 80, + calculatedHeight: 60, + showScrollbar: false, + requiredRows: 2, + minWidth: 150, + maxWidth: 300, + calculatedWidth: 200, + }; + + describe('calculateEnhancedLegendConfig', () => { + describe('Bottom Legend Configuration', () => { + it('should calculate correct configuration for bottom legend with few series', () => { + const config = calculateEnhancedLegendConfig( + mockDimensions, + 3, + ['Series A', 'Series B', 'Series C'], + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.minHeight).toBe(46); // lineHeight (34) + padding (12) + expect(config.showScrollbar).toBe(false); + expect(config.requiredRows).toBeGreaterThanOrEqual(1); // Actual behavior may vary + }); + + it('should calculate correct configuration for bottom legend with many series', () => { + const longSeriesLabels = Array.from( + { length: 10 }, + (_, i) => `Very Long Series Name ${i + 1}`, + ); + + const config = calculateEnhancedLegendConfig( + mockDimensions, + 10, + longSeriesLabels, + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.showScrollbar).toBe(true); + expect(config.requiredRows).toBeGreaterThan(2); + expect(config.maxHeight).toBeLessThanOrEqual(80); // absoluteMaxHeight constraint + }); + + it('should handle responsive width adjustments for bottom legend', () => { + const narrowDimensions: Dimensions = { width: 300, height: 400 }; + const wideDimensions: Dimensions = { width: 1200, height: 400 }; + + const narrowConfig = calculateEnhancedLegendConfig( + narrowDimensions, + 5, + ['Series A', 'Series B', 'Series C', 'Series D', 'Series E'], + LegendPosition.BOTTOM, + ); + + const wideConfig = calculateEnhancedLegendConfig( + wideDimensions, + 5, + ['Series A', 'Series B', 'Series C', 'Series D', 'Series E'], + LegendPosition.BOTTOM, + ); + + // Narrow panels should have more rows due to less items per row + expect(narrowConfig.requiredRows).toBeGreaterThanOrEqual( + wideConfig.requiredRows, + ); + }); + + it('should respect maximum legend height ratio for bottom legend', () => { + const config = calculateEnhancedLegendConfig( + mockDimensions, + 20, + Array.from({ length: 20 }, (_, i) => `Series ${i + 1}`), + LegendPosition.BOTTOM, + ); + + // The implementation uses absoluteMaxHeight of 80 + expect(config.calculatedHeight).toBeLessThanOrEqual(80); + }); + }); + + describe('Right Legend Configuration', () => { + it('should calculate correct configuration for right legend', () => { + const config = calculateEnhancedLegendConfig( + mockDimensions, + 5, + ['Series A', 'Series B', 'Series C', 'Series D', 'Series E'], + LegendPosition.RIGHT, + ); + + expect(config.calculatedWidth).toBeGreaterThan(0); + expect(config.minWidth).toBe(150); + expect(config.maxWidth).toBeLessThanOrEqual(400); + expect(config.calculatedWidth).toBeLessThanOrEqual( + mockDimensions.width * 0.3, + ); // maxLegendWidthRatio + expect(config.requiredRows).toBe(5); // Each series on its own row for right-side + }); + + it('should calculate width based on series label length for right legend', () => { + const shortLabels = ['A', 'B', 'C']; + const longLabels = [ + 'Very Long Series Name A', + 'Very Long Series Name B', + 'Very Long Series Name C', + ]; + + const shortConfig = calculateEnhancedLegendConfig( + mockDimensions, + 3, + shortLabels, + LegendPosition.RIGHT, + ); + + const longConfig = calculateEnhancedLegendConfig( + mockDimensions, + 3, + longLabels, + LegendPosition.RIGHT, + ); + + expect(longConfig.calculatedWidth).toBeGreaterThan( + shortConfig.calculatedWidth ?? 0, + ); + }); + + it('should handle scrollbar for right legend with many series', () => { + const tallDimensions: Dimensions = { width: 800, height: 200 }; + const manySeriesLabels = Array.from( + { length: 15 }, + (_, i) => `Series ${i + 1}`, + ); + + const config = calculateEnhancedLegendConfig( + tallDimensions, + 15, + manySeriesLabels, + LegendPosition.RIGHT, + ); + + expect(config.showScrollbar).toBe(true); + expect(config.calculatedHeight).toBeLessThanOrEqual(config.maxHeight); + }); + + it('should respect maximum width constraints for right legend', () => { + const narrowDimensions: Dimensions = { width: 400, height: 400 }; + + const config = calculateEnhancedLegendConfig( + narrowDimensions, + 5, + Array.from({ length: 5 }, (_, i) => `Very Long Series Name ${i + 1}`), + LegendPosition.RIGHT, + ); + + expect(config.calculatedWidth).toBeLessThanOrEqual( + narrowDimensions.width * 0.3, + ); + expect(config.calculatedWidth).toBeLessThanOrEqual(400); // absoluteMaxWidth + }); + }); + + describe('Edge Cases', () => { + it('should handle empty series labels', () => { + const config = calculateEnhancedLegendConfig( + mockDimensions, + 0, + [], + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.requiredRows).toBe(0); + }); + + it('should handle undefined series labels', () => { + const config = calculateEnhancedLegendConfig( + mockDimensions, + 3, + undefined, + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.requiredRows).toBe(1); // For 3 series, should be 1 row (logic only forces 2 rows when seriesCount > 3) + }); + + it('should handle very small dimensions', () => { + const smallDimensions: Dimensions = { width: 100, height: 100 }; + + const config = calculateEnhancedLegendConfig( + smallDimensions, + 3, + ['A', 'B', 'C'], + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.calculatedHeight).toBeLessThanOrEqual( + smallDimensions.height * 0.15, + ); + }); + }); + }); + + describe('applyEnhancedLegendStyling', () => { + let mockLegendElement: HTMLElement; + + beforeEach(() => { + mockLegendElement = document.createElement('div'); + mockLegendElement.className = 'u-legend'; + }); + + describe('Bottom Legend Styling', () => { + it('should apply correct classes for bottom legend', () => { + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 2, + LegendPosition.BOTTOM, + ); + + expect(mockLegendElement.classList.contains('u-legend-enhanced')).toBe( + true, + ); + expect(mockLegendElement.classList.contains('u-legend-bottom')).toBe(true); + expect(mockLegendElement.classList.contains('u-legend-right')).toBe(false); + expect(mockLegendElement.classList.contains('u-legend-multi-line')).toBe( + true, + ); + }); + + it('should apply single-line class for single row bottom legend', () => { + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 1, + LegendPosition.BOTTOM, + ); + + expect(mockLegendElement.classList.contains('u-legend-single-line')).toBe( + true, + ); + expect(mockLegendElement.classList.contains('u-legend-multi-line')).toBe( + false, + ); + }); + + it('should set correct height styles for bottom legend', () => { + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 2, + LegendPosition.BOTTOM, + ); + + expect(mockLegendElement.style.height).toBe('60px'); + expect(mockLegendElement.style.minHeight).toBe('46px'); + expect(mockLegendElement.style.maxHeight).toBe('80px'); + expect(mockLegendElement.style.width).toBe(''); + }); + }); + + describe('Right Legend Styling', () => { + it('should apply correct classes for right legend', () => { + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 5, + LegendPosition.RIGHT, + ); + + expect(mockLegendElement.classList.contains('u-legend-enhanced')).toBe( + true, + ); + expect(mockLegendElement.classList.contains('u-legend-right')).toBe(true); + expect(mockLegendElement.classList.contains('u-legend-bottom')).toBe(false); + expect(mockLegendElement.classList.contains('u-legend-right-aligned')).toBe( + true, + ); + }); + + it('should set correct width and height styles for right legend', () => { + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 5, + LegendPosition.RIGHT, + ); + + expect(mockLegendElement.style.width).toBe('200px'); + expect(mockLegendElement.style.minWidth).toBe('150px'); + expect(mockLegendElement.style.maxWidth).toBe('300px'); + expect(mockLegendElement.style.height).toBe('60px'); + expect(mockLegendElement.style.minHeight).toBe('46px'); + expect(mockLegendElement.style.maxHeight).toBe('80px'); + }); + }); + + describe('Scrollbar Styling', () => { + it('should add scrollable class when scrollbar is needed', () => { + const scrollableConfig = { ...mockConfig, showScrollbar: true }; + + applyEnhancedLegendStyling( + mockLegendElement, + scrollableConfig, + 5, + LegendPosition.BOTTOM, + ); + + expect(mockLegendElement.classList.contains('u-legend-scrollable')).toBe( + true, + ); + }); + + it('should remove scrollable class when scrollbar is not needed', () => { + mockLegendElement.classList.add('u-legend-scrollable'); + + applyEnhancedLegendStyling( + mockLegendElement, + mockConfig, + 2, + LegendPosition.BOTTOM, + ); + + expect(mockLegendElement.classList.contains('u-legend-scrollable')).toBe( + false, + ); + }); + }); + }); + + describe('Legend Responsive Distribution', () => { + describe('Items per row calculation', () => { + it('should calculate correct items per row for different panel widths', () => { + const testCases = [ + { width: 300, expectedMaxItemsPerRow: 2 }, + { width: 600, expectedMaxItemsPerRow: 4 }, + { width: 1200, expectedMaxItemsPerRow: 8 }, + ]; + + testCases.forEach(({ width, expectedMaxItemsPerRow }) => { + const dimensions: Dimensions = { width, height: 400 }; + const config = calculateEnhancedLegendConfig( + dimensions, + expectedMaxItemsPerRow + 2, // More series than can fit in one row + Array.from( + { length: expectedMaxItemsPerRow + 2 }, + (_, i) => `Series ${i + 1}`, + ), + LegendPosition.BOTTOM, + ); + + expect(config.requiredRows).toBeGreaterThan(1); + }); + }); + + it('should handle very long series names by adjusting layout', () => { + const longSeriesNames = [ + 'Very Long Series Name That Might Not Fit', + 'Another Extremely Long Series Name', + 'Yet Another Very Long Series Name', + ]; + + const config = calculateEnhancedLegendConfig( + { width: 400, height: 300 }, + 3, + longSeriesNames, + LegendPosition.BOTTOM, + ); + + // Should require more rows due to long names + expect(config.requiredRows).toBeGreaterThanOrEqual(2); + }); + }); + + describe('Dynamic height adjustment', () => { + it('should adjust height based on number of required rows', () => { + const fewSeries = calculateEnhancedLegendConfig( + mockDimensions, + 2, + ['A', 'B'], + LegendPosition.BOTTOM, + ); + + const manySeries = calculateEnhancedLegendConfig( + mockDimensions, + 10, + Array.from({ length: 10 }, (_, i) => `Series ${i + 1}`), + LegendPosition.BOTTOM, + ); + + expect(manySeries.calculatedHeight).toBeGreaterThan( + fewSeries.calculatedHeight, + ); + }); + }); + }); + + describe('Legend Position Integration', () => { + it('should handle legend position changes correctly', () => { + const seriesLabels = [ + 'Series A', + 'Series B', + 'Series C', + 'Series D', + 'Series E', + ]; + + const bottomConfig = calculateEnhancedLegendConfig( + mockDimensions, + 5, + seriesLabels, + LegendPosition.BOTTOM, + ); + + const rightConfig = calculateEnhancedLegendConfig( + mockDimensions, + 5, + seriesLabels, + LegendPosition.RIGHT, + ); + + // Bottom legend should have width constraints, right legend should have height constraints + expect(bottomConfig.calculatedWidth).toBeUndefined(); + expect(rightConfig.calculatedWidth).toBeDefined(); + expect(rightConfig.calculatedWidth).toBeGreaterThan(0); + }); + + it('should apply different styling based on legend position', () => { + const mockElement = document.createElement('div'); + + // Test bottom positioning + applyEnhancedLegendStyling( + mockElement, + mockConfig, + 3, + LegendPosition.BOTTOM, + ); + + const hasBottomClasses = mockElement.classList.contains('u-legend-bottom'); + + // Reset element + mockElement.className = 'u-legend'; + + // Test right positioning + applyEnhancedLegendStyling(mockElement, mockConfig, 3, LegendPosition.RIGHT); + + const hasRightClasses = mockElement.classList.contains('u-legend-right'); + + expect(hasBottomClasses).toBe(true); + expect(hasRightClasses).toBe(true); + }); + }); + + describe('Performance and Edge Cases', () => { + it('should handle large number of series efficiently', () => { + const startTime = Date.now(); + + const largeSeries = Array.from({ length: 100 }, (_, i) => `Series ${i + 1}`); + const config = calculateEnhancedLegendConfig( + mockDimensions, + 100, + largeSeries, + LegendPosition.BOTTOM, + ); + + const endTime = Date.now(); + const executionTime = endTime - startTime; + + expect(executionTime).toBeLessThan(100); // Should complete within 100ms + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.showScrollbar).toBe(true); + }); + + it('should handle zero dimensions gracefully', () => { + const zeroDimensions: Dimensions = { width: 0, height: 0 }; + + const config = calculateEnhancedLegendConfig( + zeroDimensions, + 3, + ['A', 'B', 'C'], + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.minHeight).toBeGreaterThan(0); + }); + + it('should handle negative dimensions gracefully', () => { + const negativeDimensions: Dimensions = { width: -100, height: -100 }; + + const config = calculateEnhancedLegendConfig( + negativeDimensions, + 3, + ['A', 'B', 'C'], + LegendPosition.BOTTOM, + ); + + expect(config.calculatedHeight).toBeGreaterThan(0); + expect(config.minHeight).toBeGreaterThan(0); + }); + }); +}); diff --git a/frontend/src/container/PanelWrapper/enhancedLegend.ts b/frontend/src/container/PanelWrapper/enhancedLegend.ts new file mode 100644 index 000000000000..948521593cd2 --- /dev/null +++ b/frontend/src/container/PanelWrapper/enhancedLegend.ts @@ -0,0 +1,246 @@ +import { Dimensions } from 'hooks/useDimensions'; +import { LegendPosition } from 'types/api/dashboard/getAll'; + +export interface EnhancedLegendConfig { + minHeight: number; + maxHeight: number; + calculatedHeight: number; + showScrollbar: boolean; + requiredRows: number; + // For right-side legend + minWidth?: number; + maxWidth?: number; + calculatedWidth?: number; +} + +/** + * Calculate legend configuration based on panel dimensions and series count + * Prioritizes chart space while ensuring legend usability + */ +// eslint-disable-next-line sonarjs/cognitive-complexity +export function calculateEnhancedLegendConfig( + dimensions: Dimensions, + seriesCount: number, + seriesLabels?: string[], + legendPosition: LegendPosition = LegendPosition.BOTTOM, +): EnhancedLegendConfig { + const lineHeight = 34; + const padding = 12; + const maxRowsToShow = 2; // Reduced from 3 to 2 for better chart/legend ratio + + // Different configurations for bottom vs right positioning + if (legendPosition === LegendPosition.RIGHT) { + // Right-side legend configuration + const maxLegendWidthRatio = 0.3; // Legend should not take more than 30% of panel width + const absoluteMaxWidth = Math.min( + 400, + dimensions.width * maxLegendWidthRatio, + ); + const minWidth = 150; + + // For right-side legend, calculate based on text length + const avgCharWidth = 8; + let avgTextLength = 15; + if (seriesLabels && seriesLabels.length > 0) { + const totalLength = seriesLabels.reduce( + (sum, label) => sum + Math.min(label.length, 40), + 0, + ); + avgTextLength = Math.max( + 10, + Math.min(35, totalLength / seriesLabels.length), + ); + } + + // Fix: Ensure width respects the ratio constraint even if it's less than minWidth + const estimatedWidth = 80 + avgCharWidth * avgTextLength; + const calculatedWidth = Math.min( + Math.max(minWidth, estimatedWidth), + absoluteMaxWidth, + ); + + // For right-side legend, height can be more flexible + const maxHeight = dimensions.height - 40; // Leave some padding + const idealHeight = seriesCount * lineHeight + padding; + const calculatedHeight = Math.min(idealHeight, maxHeight); + const showScrollbar = idealHeight > calculatedHeight; + + return { + minHeight: lineHeight + padding, + maxHeight, + calculatedHeight, + showScrollbar, + requiredRows: seriesCount, // Each series on its own row for right-side + minWidth, + maxWidth: absoluteMaxWidth, + calculatedWidth, + }; + } + + // Bottom legend configuration (existing logic) + const maxLegendRatio = 0.15; + // Fix: For very small dimensions, respect the ratio instead of using fixed 80px minimum + const ratioBasedMaxHeight = dimensions.height * maxLegendRatio; + + // Handle edge cases and calculate absolute max height + let absoluteMaxHeight; + if (dimensions.height <= 0) { + absoluteMaxHeight = 46; // Fallback for invalid dimensions + } else if (dimensions.height <= 400) { + // For small to medium panels, prioritize ratio constraint + absoluteMaxHeight = Math.min(80, Math.max(15, ratioBasedMaxHeight)); + } else { + // For larger panels, maintain a reasonable minimum + absoluteMaxHeight = Math.min(80, Math.max(20, ratioBasedMaxHeight)); + } + + const baseItemWidth = 44; + const avgCharWidth = 8; + + let avgTextLength = 15; + if (seriesLabels && seriesLabels.length > 0) { + const totalLength = seriesLabels.reduce( + (sum, label) => sum + Math.min(label.length, 30), + 0, + ); + avgTextLength = Math.max(8, Math.min(25, totalLength / seriesLabels.length)); + } + + // Estimate item width based on actual or estimated text length + let estimatedItemWidth = baseItemWidth + avgCharWidth * avgTextLength; + + // For very wide panels, allow longer text + if (dimensions.width > 800) { + estimatedItemWidth = Math.max( + estimatedItemWidth, + baseItemWidth + avgCharWidth * 22, + ); + } else if (dimensions.width < 400) { + estimatedItemWidth = Math.min( + estimatedItemWidth, + baseItemWidth + avgCharWidth * 14, + ); + } + + // Calculate items per row based on available width + const availableWidth = dimensions.width - padding * 2; + const itemsPerRow = Math.max( + 1, + Math.floor(availableWidth / estimatedItemWidth), + ); + let requiredRows = Math.ceil(seriesCount / itemsPerRow); + + if (requiredRows === 1 && seriesCount > 3) { + requiredRows = 2; + } + + // Calculate heights + const idealHeight = requiredRows * lineHeight + padding; + + // For single row, use minimal height + let minHeight; + if (requiredRows <= 1) { + minHeight = lineHeight + padding; // Single row + } else { + // Multiple rows: show 2 rows max, then scroll + minHeight = Math.min(2 * lineHeight + padding, idealHeight); + } + + // For very small dimensions, allow the minHeight to be smaller to respect ratio constraints + if (dimensions.height < 200) { + minHeight = Math.min(minHeight, absoluteMaxHeight); + } + + // Maximum height constraint - prioritize chart space + // Fix: Ensure we respect the ratio-based constraint for small dimensions + const rowBasedMaxHeight = maxRowsToShow * lineHeight + padding; + const maxHeight = Math.min(rowBasedMaxHeight, absoluteMaxHeight); + + const calculatedHeight = Math.max(minHeight, Math.min(idealHeight, maxHeight)); + const showScrollbar = idealHeight > calculatedHeight; + + return { + minHeight, + maxHeight, + calculatedHeight, + showScrollbar, + requiredRows, + }; +} + +// CSS class constants +const LEGEND_SINGLE_LINE_CLASS = 'u-legend-single-line'; +const LEGEND_MULTI_LINE_CLASS = 'u-legend-multi-line'; +const LEGEND_RIGHT_ALIGNED_CLASS = 'u-legend-right-aligned'; + +/** + * Apply enhanced legend styling to a legend element + */ +export function applyEnhancedLegendStyling( + legend: HTMLElement, + config: EnhancedLegendConfig, + requiredRows: number, + legendPosition: LegendPosition = LegendPosition.BOTTOM, +): void { + const legendElement = legend; + legendElement.classList.add('u-legend-enhanced'); + + // Apply position-specific styling + if (legendPosition === LegendPosition.RIGHT) { + legendElement.classList.add('u-legend-right'); + legendElement.classList.remove('u-legend-bottom'); + + // Set width for right-side legend + if (config.calculatedWidth) { + legendElement.style.width = `${config.calculatedWidth}px`; + legendElement.style.minWidth = `${config.minWidth}px`; + legendElement.style.maxWidth = `${config.maxWidth}px`; + } + + // Height for right-side legend + legendElement.style.height = `${config.calculatedHeight}px`; + legendElement.style.minHeight = `${config.minHeight}px`; + legendElement.style.maxHeight = `${config.maxHeight}px`; + } else { + legendElement.classList.add('u-legend-bottom'); + legendElement.classList.remove('u-legend-right'); + + // Height for bottom legend + legendElement.style.height = `${config.calculatedHeight}px`; + legendElement.style.minHeight = `${config.minHeight}px`; + legendElement.style.maxHeight = `${config.maxHeight}px`; + + // Reset width for bottom legend + legendElement.style.width = ''; + legendElement.style.minWidth = ''; + legendElement.style.maxWidth = ''; + } + + // Apply alignment based on position and number of rows + if (legendPosition === LegendPosition.RIGHT) { + legendElement.classList.add(LEGEND_RIGHT_ALIGNED_CLASS); + legendElement.classList.remove( + LEGEND_SINGLE_LINE_CLASS, + LEGEND_MULTI_LINE_CLASS, + ); + } else if (requiredRows === 1) { + legendElement.classList.add(LEGEND_SINGLE_LINE_CLASS); + legendElement.classList.remove( + LEGEND_MULTI_LINE_CLASS, + LEGEND_RIGHT_ALIGNED_CLASS, + ); + } else { + legendElement.classList.add(LEGEND_MULTI_LINE_CLASS); + legendElement.classList.remove( + LEGEND_SINGLE_LINE_CLASS, + LEGEND_RIGHT_ALIGNED_CLASS, + ); + } + + // Add scrollbar indicator if needed + if (config.showScrollbar) { + legendElement.classList.add('u-legend-scrollable'); + } else { + legendElement.classList.remove('u-legend-scrollable'); + } +} diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx index 4561d19b85fa..e7fe99e0ae7f 100644 --- a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetricTable.tsx @@ -33,7 +33,7 @@ function ServiceMetricTable({ const { notifications } = useNotifications(); const { t: getText } = useTranslation(['services']); - const { isFetchingActiveLicenseV3, trialInfo } = useAppContext(); + const { isFetchingActiveLicense, trialInfo } = useAppContext(); const { isCloudUser: isCloudUserVal } = useGetTenantLicense(); const queries = useGetQueriesRange(queryRangeRequestData, ENTITY_VERSION_V4, { @@ -70,7 +70,7 @@ function ServiceMetricTable({ useEffect(() => { if ( - !isFetchingActiveLicenseV3 && + !isFetchingActiveLicense && trialInfo?.onTrial && !trialInfo?.trialConvertedToSubscription && isCloudUserVal @@ -85,7 +85,7 @@ function ServiceMetricTable({ }, [ services, isCloudUserVal, - isFetchingActiveLicenseV3, + isFetchingActiveLicense, trialInfo?.onTrial, trialInfo?.trialConvertedToSubscription, ]); diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx index c579933920e8..7c4305374f32 100644 --- a/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx +++ b/frontend/src/container/ServiceApplication/ServiceTraces/ServiceTracesTable.tsx @@ -21,13 +21,13 @@ function ServiceTraceTable({ const [RPS, setRPS] = useState(0); const { t: getText } = useTranslation(['services']); - const { isFetchingActiveLicenseV3, trialInfo } = useAppContext(); + const { isFetchingActiveLicense, trialInfo } = useAppContext(); const { isCloudUser: isCloudUserVal } = useGetTenantLicense(); const tableColumns = useMemo(() => getColumns(search, false), [search]); useEffect(() => { if ( - !isFetchingActiveLicenseV3 && + !isFetchingActiveLicense && trialInfo?.onTrial && !trialInfo?.trialConvertedToSubscription && isCloudUserVal @@ -42,7 +42,7 @@ function ServiceTraceTable({ }, [ services, isCloudUserVal, - isFetchingActiveLicenseV3, + isFetchingActiveLicense, trialInfo?.onTrial, trialInfo?.trialConvertedToSubscription, ]); diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index eaef32a9d86d..2603981f146a 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -12,7 +12,7 @@ import { GlobalShortcuts } from 'constants/shortcuts/globalShortcuts'; import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys'; import useComponentPermission from 'hooks/useComponentPermission'; import { useGetTenantLicense } from 'hooks/useGetTenantLicense'; -import { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS } from 'hooks/useLicense'; +import { StatusCodes } from 'http-status-codes'; import history from 'lib/history'; import { AlertTriangle, @@ -26,7 +26,6 @@ import { useTranslation } from 'react-i18next'; import { useSelector } from 'react-redux'; import { useLocation } from 'react-router-dom'; import { AppState } from 'store/reducers'; -import { License } from 'types/api/licenses/def'; import AppReducer from 'types/reducer/app'; import { USER_ROLES } from 'types/roles'; import { checkVersionState } from 'utils/app'; @@ -59,7 +58,13 @@ function SideNav(): JSX.Element { AppReducer >((state) => state.app); - const { user, featureFlags, licenses, trialInfo } = useAppContext(); + const { + user, + featureFlags, + trialInfo, + activeLicense, + activeLicenseFetchError, + } = useAppContext(); const isOnboardingV3Enabled = featureFlags?.find( (flag) => flag.name === FeatureKeys.ONBOARDING_V3, @@ -96,14 +101,11 @@ function SideNav(): JSX.Element { const { t } = useTranslation(''); - const licenseStatus: string = - licenses?.licenses?.find((e: License) => e.isCurrent)?.status || ''; + const licenseStatus: string = activeLicense?.status || ''; const isWorkspaceBlocked = trialInfo?.workSpaceBlock || false; - const isLicenseActive = - licenseStatus?.toLocaleLowerCase() === - LICENSE_PLAN_STATUS.VALID.toLocaleLowerCase(); + const isLicenseActive = licenseStatus === 'VALID'; const onClickSignozCloud = (): void => { window.open( @@ -299,10 +301,10 @@ function SideNav(): JSX.Element { } const isOnBasicPlan = - licenses?.licenses?.some( - (license: License) => - license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, - ) || licenses?.licenses === null; + activeLicenseFetchError && + [StatusCodes.NOT_FOUND, StatusCodes.NOT_IMPLEMENTED].includes( + activeLicenseFetchError?.getHttpStatusCode(), + ); if (user.role !== USER_ROLES.ADMIN || isOnBasicPlan) { updatedMenuItems = updatedMenuItems.filter( @@ -347,10 +349,10 @@ function SideNav(): JSX.Element { isEnterpriseSelfHostedUser, isCurrentVersionError, isLatestVersion, - licenses?.licenses, onClickVersionHandler, t, user.role, + activeLicenseFetchError, ]); return ( @@ -443,7 +445,7 @@ function SideNav(): JSX.Element { onClick={onClickShortcuts} /> - {licenses && !isLicenseActive && ( + {!isLicenseActive && ( { + const graphType = requestData.originalGraphType || requestData.graphType; + if (graphType === PANEL_TYPES.BAR) { + const { start, end } = getStartEndRangeTime({ + type: requestData.selectedTime, + interval: requestData.globalSelectedInterval, + }); + + const updatedQuery = updateStepInterval( + requestData.query, + requestData.start ? requestData.start * 1e3 : parseInt(start, 10) * 1e3, + requestData.end ? requestData.end * 1e3 : parseInt(end, 10) * 1e3, + ); + + return { + ...requestData, + query: updatedQuery, + }; + } + + return requestData; + }, [requestData]); + return useQuery, Error>({ queryFn: async ({ signal }) => - GetMetricQueryRange(requestData, version, signal, headers), + GetMetricQueryRange(modifiedRequestData, version, signal, headers), ...options, queryKey, }); diff --git a/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx b/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx index d6ef7c963ed1..9948a7a195c1 100644 --- a/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx +++ b/frontend/src/hooks/useActiveLicenseV3/useActiveLicenseV3.tsx @@ -1,8 +1,9 @@ -import getActive from 'api/licensesV3/getActive'; +import getActive from 'api/v3/licenses/active/get'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; import { useQuery, UseQueryResult } from 'react-query'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { LicenseV3ResModel } from 'types/api/licensesV3/getActive'; +import { SuccessResponseV2 } from 'types/api'; +import APIError from 'types/api/error'; +import { LicenseResModel } from 'types/api/licensesV3/getActive'; const useActiveLicenseV3 = (isLoggedIn: boolean): UseLicense => useQuery({ @@ -11,9 +12,6 @@ const useActiveLicenseV3 = (isLoggedIn: boolean): UseLicense => enabled: !!isLoggedIn, }); -type UseLicense = UseQueryResult< - SuccessResponse | ErrorResponse, - unknown ->; +type UseLicense = UseQueryResult, APIError>; export default useActiveLicenseV3; diff --git a/frontend/src/hooks/useGetTenantLicense.ts b/frontend/src/hooks/useGetTenantLicense.ts index edc99c32e8e7..c241e23b4c56 100644 --- a/frontend/src/hooks/useGetTenantLicense.ts +++ b/frontend/src/hooks/useGetTenantLicense.ts @@ -1,4 +1,3 @@ -import { AxiosError } from 'axios'; import { useAppContext } from 'providers/App/App'; import { LicensePlatform } from 'types/api/licensesV3/getActive'; @@ -8,26 +7,26 @@ export const useGetTenantLicense = (): { isCommunityUser: boolean; isCommunityEnterpriseUser: boolean; } => { - const { activeLicenseV3, activeLicenseV3FetchError } = useAppContext(); + const { activeLicense, activeLicenseFetchError } = useAppContext(); const responsePayload = { - isCloudUser: activeLicenseV3?.platform === LicensePlatform.CLOUD || false, + isCloudUser: activeLicense?.platform === LicensePlatform.CLOUD || false, isEnterpriseSelfHostedUser: - activeLicenseV3?.platform === LicensePlatform.SELF_HOSTED || false, + activeLicense?.platform === LicensePlatform.SELF_HOSTED || false, isCommunityUser: false, isCommunityEnterpriseUser: false, }; if ( - activeLicenseV3FetchError && - (activeLicenseV3FetchError as AxiosError)?.response?.status === 404 + activeLicenseFetchError && + activeLicenseFetchError.getHttpStatusCode() === 404 ) { responsePayload.isCommunityEnterpriseUser = true; } if ( - activeLicenseV3FetchError && - (activeLicenseV3FetchError as AxiosError)?.response?.status === 501 + activeLicenseFetchError && + activeLicenseFetchError.getHttpStatusCode() === 501 ) { responsePayload.isCommunityUser = true; } diff --git a/frontend/src/hooks/useLicense/constant.ts b/frontend/src/hooks/useLicense/constant.ts deleted file mode 100644 index 71134fc08fac..000000000000 --- a/frontend/src/hooks/useLicense/constant.ts +++ /dev/null @@ -1,8 +0,0 @@ -export const LICENSE_PLAN_KEY = { - ENTERPRISE_PLAN: 'ENTERPRISE_PLAN', - BASIC_PLAN: 'BASIC_PLAN', -}; - -export const LICENSE_PLAN_STATUS = { - VALID: 'VALID', -}; diff --git a/frontend/src/hooks/useLicense/index.ts b/frontend/src/hooks/useLicense/index.ts deleted file mode 100644 index 387e93e7d65d..000000000000 --- a/frontend/src/hooks/useLicense/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS } from './constant'; -import useLicense from './useLicense'; - -export default useLicense; - -export { LICENSE_PLAN_KEY, LICENSE_PLAN_STATUS }; diff --git a/frontend/src/hooks/useLicense/useLicense.tsx b/frontend/src/hooks/useLicense/useLicense.tsx deleted file mode 100644 index 89d8ded97427..000000000000 --- a/frontend/src/hooks/useLicense/useLicense.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import getAll from 'api/licenses/getAll'; -import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; -// import { useAppContext } from 'providers/App/App'; -import { useQuery, UseQueryResult } from 'react-query'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps } from 'types/api/licenses/getAll'; - -const useLicense = (isLoggedIn: boolean): UseLicense => - useQuery({ - queryFn: getAll, - queryKey: [REACT_QUERY_KEY.GET_ALL_LICENCES], - enabled: !!isLoggedIn, - }); - -type UseLicense = UseQueryResult< - SuccessResponse | ErrorResponse, - unknown ->; - -export default useLicense; diff --git a/frontend/src/lib/dashboard/getQueryResults.ts b/frontend/src/lib/dashboard/getQueryResults.ts index fb0324ba5703..30c6e611dc4d 100644 --- a/frontend/src/lib/dashboard/getQueryResults.ts +++ b/frontend/src/lib/dashboard/getQueryResults.ts @@ -103,4 +103,5 @@ export interface GetQueryResultsProps { start?: number; end?: number; step?: number; + originalGraphType?: PANEL_TYPES; } diff --git a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts index 9c29e29d5bca..f9f847032874 100644 --- a/frontend/src/lib/uPlotLib/getUplotChartOptions.ts +++ b/frontend/src/lib/uPlotLib/getUplotChartOptions.ts @@ -1,3 +1,4 @@ +/* eslint-disable sonarjs/no-duplicate-string */ /* eslint-disable no-param-reassign */ /* eslint-disable @typescript-eslint/ban-ts-comment */ // @ts-nocheck @@ -8,10 +9,16 @@ import { PANEL_TYPES } from 'constants/queryBuilder'; import { FullViewProps } from 'container/GridCardLayout/GridCard/FullView/types'; import { saveLegendEntriesToLocalStorage } from 'container/GridCardLayout/GridCard/FullView/utils'; import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types'; +import { + applyEnhancedLegendStyling, + calculateEnhancedLegendConfig, +} from 'container/PanelWrapper/enhancedLegend'; import { Dimensions } from 'hooks/useDimensions'; import { convertValue } from 'lib/getConvertedValue'; +import getLabelName from 'lib/getLabelName'; import { cloneDeep, isUndefined } from 'lodash-es'; import _noop from 'lodash-es/noop'; +import { LegendPosition } from 'types/api/dashboard/getAll'; import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange'; import { Query } from 'types/api/queryBuilder/queryBuilderData'; import { QueryData, QueryDataV3 } from 'types/api/widgets/getQuery'; @@ -60,6 +67,9 @@ export interface GetUPlotChartOptions { customSeries?: (data: QueryData[]) => uPlot.Series[]; isLogScale?: boolean; colorMapping?: Record; + enhancedLegend?: boolean; + legendPosition?: LegendPosition; + enableZoom?: boolean; } /** the function converts series A , series B , series C to @@ -168,6 +178,9 @@ export const getUPlotChartOptions = ({ customSeries, isLogScale, colorMapping, + enhancedLegend = true, + legendPosition = LegendPosition.BOTTOM, + enableZoom, }: GetUPlotChartOptions): uPlot.Options => { const timeScaleProps = getXAxisScale(minTimeScale, maxTimeScale); @@ -180,10 +193,42 @@ export const getUPlotChartOptions = ({ const bands = stackBarChart ? getBands(series) : null; + // Calculate dynamic legend configuration based on panel dimensions and series count + const seriesCount = (apiResponse?.data?.result || []).length; + const seriesLabels = enhancedLegend + ? (apiResponse?.data?.result || []).map((item) => + getLabelName(item.metric || {}, item.queryName || '', item.legend || ''), + ) + : []; + const legendConfig = enhancedLegend + ? calculateEnhancedLegendConfig( + dimensions, + seriesCount, + seriesLabels, + legendPosition, + ) + : { + calculatedHeight: 30, + minHeight: 30, + maxHeight: 30, + itemsPerRow: 3, + showScrollbar: false, + }; + + // Calculate chart dimensions based on legend position + const chartWidth = + legendPosition === LegendPosition.RIGHT && legendConfig.calculatedWidth + ? dimensions.width - legendConfig.calculatedWidth - 10 + : dimensions.width; + const chartHeight = + legendPosition === LegendPosition.BOTTOM + ? dimensions.height - legendConfig.calculatedHeight - 10 + : dimensions.height; + return { id, - width: dimensions.width, - height: dimensions.height - 30, + width: chartWidth, + height: chartHeight, legend: { show: true, live: false, @@ -205,7 +250,25 @@ export const getUPlotChartOptions = ({ `${u.series[seriesIdx].points.stroke(u, seriesIdx)}90`, fill: (): string => '#fff', }, + ...(enableZoom + ? { + drag: { + x: true, + y: true, + }, + focus: { + prox: 30, + }, + } + : {}), }, + ...(enableZoom + ? { + select: { + show: true, + }, + } + : {}), tzDate, padding: [16, 16, 8, 8], bands, @@ -333,13 +396,166 @@ export const getUPlotChartOptions = ({ ], ready: [ (self): void => { + // Add CSS classes to the uPlot container based on legend position + const uplotContainer = self.root; + if (uplotContainer) { + uplotContainer.classList.remove( + 'u-plot-right-legend', + 'u-plot-bottom-legend', + ); + if (legendPosition === LegendPosition.RIGHT) { + uplotContainer.classList.add('u-plot-right-legend'); + } else { + uplotContainer.classList.add('u-plot-bottom-legend'); + } + } + const legend = self.root.querySelector('.u-legend'); if (legend) { + // Apply enhanced legend styling + if (enhancedLegend) { + applyEnhancedLegendStyling( + legend as HTMLElement, + legendConfig, + legendConfig.requiredRows, + legendPosition, + ); + } + + // Global cleanup function for all legend tooltips + const cleanupAllTooltips = (): void => { + const existingTooltips = document.querySelectorAll('.legend-tooltip'); + existingTooltips.forEach((tooltip) => tooltip.remove()); + }; + + // Add single global cleanup listener for this chart + const globalCleanupHandler = (e: MouseEvent): void => { + const target = e.target as HTMLElement; + if ( + !target.closest('.u-legend') && + !target.classList.contains('legend-tooltip') + ) { + cleanupAllTooltips(); + } + }; + document.addEventListener('mousemove', globalCleanupHandler); + + // Store cleanup function for potential removal later + (self as any)._tooltipCleanup = (): void => { + cleanupAllTooltips(); + document.removeEventListener('mousemove', globalCleanupHandler); + }; + const seriesEls = legend.querySelectorAll('.u-series'); const seriesArray = Array.from(seriesEls); seriesArray.forEach((seriesEl, index) => { - seriesEl.addEventListener('click', () => { - if (stackChart) { + // Add tooltip and proper text wrapping for legends + const thElement = seriesEl.querySelector('th'); + if (thElement && seriesLabels[index]) { + // Store the original marker element before clearing + const markerElement = thElement.querySelector('.u-marker'); + const markerClone = markerElement + ? (markerElement.cloneNode(true) as HTMLElement) + : null; + + // Get the current text content + const legendText = seriesLabels[index]; + + // Clear the th content and rebuild it + thElement.innerHTML = ''; + + // Add back the marker + if (markerClone) { + thElement.appendChild(markerClone); + } + + // Create text wrapper + const textSpan = document.createElement('span'); + textSpan.className = 'legend-text'; + textSpan.textContent = legendText; + thElement.appendChild(textSpan); + + // Setup tooltip functionality - check truncation on hover + let tooltipElement: HTMLElement | null = null; + let isHovering = false; + + const showTooltip = (e: MouseEvent): void => { + // Check if text is actually truncated at the time of hover + const isTextTruncated = (): boolean => { + // For right-side legends, check if text overflows the container + if (legendPosition === LegendPosition.RIGHT) { + return textSpan.scrollWidth > textSpan.clientWidth; + } + // For bottom legends, check if text is longer than reasonable display length + return legendText.length > 20; + }; + + // Only show tooltip if text is actually truncated + if (!isTextTruncated()) { + return; + } + + isHovering = true; + + // Clean up any existing tooltips first + cleanupAllTooltips(); + + // Small delay to ensure cleanup is complete and DOM is ready + setTimeout(() => { + if (!isHovering) return; // Don't show if mouse already left + + // Double-check no tooltip exists + if (document.querySelector('.legend-tooltip')) { + return; + } + + // Create tooltip element + tooltipElement = document.createElement('div'); + tooltipElement.className = 'legend-tooltip'; + tooltipElement.textContent = legendText; + tooltipElement.style.cssText = ` + position: fixed; + padding: 8px 12px; + border-radius: 6px; + font-size: 12px; + z-index: 10000; + pointer-events: none; + white-space: nowrap; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.4); + border: 1px solid #374151; + `; + + // Position tooltip near cursor + const rect = (e.target as HTMLElement).getBoundingClientRect(); + tooltipElement.style.left = `${e.clientX + 10}px`; + tooltipElement.style.top = `${rect.top - 35}px`; + + document.body.appendChild(tooltipElement); + }, 15); + }; + + const hideTooltip = (): void => { + isHovering = false; + + // Simple cleanup with a reasonable delay + setTimeout(() => { + if (!isHovering && tooltipElement) { + tooltipElement.remove(); + tooltipElement = null; + } + }, 200); + }; + + // Simple tooltip events + thElement.addEventListener('mouseenter', showTooltip); + thElement.addEventListener('mouseleave', hideTooltip); + + // Add click handlers for marker and text separately + const currentMarker = thElement.querySelector('.u-marker'); + const textElement = thElement.querySelector('.legend-text'); + + // Helper function to handle stack chart logic + const handleStackChart = (): void => { setHiddenGraph((prev) => { if (isUndefined(prev)) { return { [index]: true }; @@ -349,30 +565,71 @@ export const getUPlotChartOptions = ({ } return { [index]: true }; }); - } - if (graphsVisibilityStates) { - setGraphsVisibilityStates?.((prev) => { - const newGraphVisibilityStates = [...prev]; - if ( - newGraphVisibilityStates[index + 1] && - newGraphVisibilityStates.every((value, i) => - i === index + 1 ? value : !value, - ) - ) { - newGraphVisibilityStates.fill(true); - } else { - newGraphVisibilityStates.fill(false); - newGraphVisibilityStates[index + 1] = true; + }; + + // Marker click handler - checkbox behavior (toggle individual series) + if (currentMarker) { + currentMarker.addEventListener('click', (e) => { + e.stopPropagation(); // Prevent event bubbling to text handler + + if (stackChart) { + handleStackChart(); + } + if (graphsVisibilityStates) { + setGraphsVisibilityStates?.((prev) => { + const newGraphVisibilityStates = [...prev]; + // Toggle the specific series visibility (checkbox behavior) + newGraphVisibilityStates[index + 1] = !newGraphVisibilityStates[ + index + 1 + ]; + + saveLegendEntriesToLocalStorage({ + options: self, + graphVisibilityState: newGraphVisibilityStates, + name: id || '', + }); + return newGraphVisibilityStates; + }); } - saveLegendEntriesToLocalStorage({ - options: self, - graphVisibilityState: newGraphVisibilityStates, - name: id || '', - }); - return newGraphVisibilityStates; }); } - }); + + // Text click handler - show only/show all behavior (existing behavior) + if (textElement) { + textElement.addEventListener('click', (e) => { + e.stopPropagation(); // Prevent event bubbling + + if (stackChart) { + handleStackChart(); + } + if (graphsVisibilityStates) { + setGraphsVisibilityStates?.((prev) => { + const newGraphVisibilityStates = [...prev]; + // Show only this series / show all behavior + if ( + newGraphVisibilityStates[index + 1] && + newGraphVisibilityStates.every((value, i) => + i === index + 1 ? value : !value, + ) + ) { + // If only this series is visible, show all + newGraphVisibilityStates.fill(true); + } else { + // Otherwise, show only this series + newGraphVisibilityStates.fill(false); + newGraphVisibilityStates[index + 1] = true; + } + saveLegendEntriesToLocalStorage({ + options: self, + graphVisibilityState: newGraphVisibilityStates, + name: id || '', + }); + return newGraphVisibilityStates; + }); + } + }); + } + } }); } }, @@ -392,6 +649,7 @@ export const getUPlotChartOptions = ({ stackBarChart, hiddenGraph, isDarkMode, + colorMapping, }), axes: getAxes({ isDarkMode, yAxisUnit, panelType, isLogScale }), }; diff --git a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts index 5de1f6d207c4..2c72acb6d6f1 100644 --- a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts +++ b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts @@ -34,6 +34,7 @@ const getSeries = ({ panelType, hiddenGraph, isDarkMode, + colorMapping, }: GetSeriesProps): uPlot.Options['series'] => { const configurations: uPlot.Series[] = [ { label: 'Timestamp', stroke: 'purple' }, @@ -52,10 +53,12 @@ const getSeries = ({ legend || '', ); - const color = generateColor( - label, - isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor, - ); + const color = + colorMapping?.[label] || + generateColor( + label, + isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor, + ); const pointSize = seriesList[i].values.length > 1 ? 5 : 10; const showPoints = !(seriesList[i].values.length > 1); @@ -105,6 +108,7 @@ export type GetSeriesProps = { hiddenGraph?: { [key: string]: boolean; }; + colorMapping?: Record; }; export default getSeries; diff --git a/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts b/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts index a955d787ac7d..cf9ca032210c 100644 --- a/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts +++ b/frontend/src/lib/uPlotLib/utils/tests/getUplotChartOptions.test.ts @@ -25,11 +25,44 @@ describe('getUPlotChartOptions', () => { const options = getUPlotChartOptions(inputPropsTimeSeries); expect(options.legend?.isolate).toBe(true); expect(options.width).toBe(inputPropsTimeSeries.dimensions.width); - expect(options.height).toBe(inputPropsTimeSeries.dimensions.height - 30); expect(options.axes?.length).toBe(2); expect(options.series[1].label).toBe('A'); }); + test('should return enhanced legend options when enabled', () => { + const options = getUPlotChartOptions({ + ...inputPropsTimeSeries, + enhancedLegend: true, + legendPosition: 'bottom' as any, + }); + expect(options.legend?.isolate).toBe(true); + expect(options.legend?.show).toBe(true); + expect(options.hooks?.ready).toBeDefined(); + expect(Array.isArray(options.hooks?.ready)).toBe(true); + }); + + test('should adjust chart dimensions for right legend position', () => { + const options = getUPlotChartOptions({ + ...inputPropsTimeSeries, + enhancedLegend: true, + legendPosition: 'right' as any, + }); + expect(options.legend?.isolate).toBe(true); + expect(options.width).toBeLessThan(inputPropsTimeSeries.dimensions.width); + expect(options.height).toBe(inputPropsTimeSeries.dimensions.height); + }); + + test('should adjust chart dimensions for bottom legend position', () => { + const options = getUPlotChartOptions({ + ...inputPropsTimeSeries, + enhancedLegend: true, + legendPosition: 'bottom' as any, + }); + expect(options.legend?.isolate).toBe(true); + expect(options.width).toBe(inputPropsTimeSeries.dimensions.width); + expect(options.height).toBeLessThan(inputPropsTimeSeries.dimensions.height); + }); + test('Should return line chart as drawStyle for time series', () => { const options = getUPlotChartOptions(inputPropsTimeSeries); // @ts-ignore diff --git a/frontend/src/mocks-server/__mockdata__/customQuickFilters.ts b/frontend/src/mocks-server/__mockdata__/customQuickFilters.ts index 3bc5a15e5351..bcb69e0db705 100644 --- a/frontend/src/mocks-server/__mockdata__/customQuickFilters.ts +++ b/frontend/src/mocks-server/__mockdata__/customQuickFilters.ts @@ -17,6 +17,13 @@ export const quickFiltersListResponse = { isColumn: false, isJSON: false, }, + { + key: 'duration_nano', + dataType: 'float64', + type: 'tag', + isColumn: false, + isJSON: false, + }, { key: 'quantity', dataType: 'float64', diff --git a/frontend/src/pages/AllErrors/index.tsx b/frontend/src/pages/AllErrors/index.tsx index 38d7bc44b336..d2f048c4a66e 100644 --- a/frontend/src/pages/AllErrors/index.tsx +++ b/frontend/src/pages/AllErrors/index.tsx @@ -6,7 +6,7 @@ import getLocalStorageKey from 'api/browser/localstorage/get'; import setLocalStorageApi from 'api/browser/localstorage/set'; import cx from 'classnames'; import QuickFilters from 'components/QuickFilters/QuickFilters'; -import { QuickFiltersSource } from 'components/QuickFilters/types'; +import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types'; import RouteTab from 'components/RouteTab'; import TypicalOverlayScrollbar from 'components/TypicalOverlayScrollbar/TypicalOverlayScrollbar'; import { LOCALSTORAGE } from 'constants/localStorage'; @@ -20,7 +20,6 @@ import { useState } from 'react'; import { useLocation } from 'react-router-dom'; import { routes } from './config'; -import { ExceptionsQuickFiltersConfig } from './utils'; function AllErrors(): JSX.Element { const { pathname } = useLocation(); @@ -49,8 +48,9 @@ function AllErrors(): JSX.Element { {showFilters && (
diff --git a/frontend/src/pages/Settings/utils.ts b/frontend/src/pages/Settings/utils.ts index 9ed3c428e416..be6c4c3901f3 100644 --- a/frontend/src/pages/Settings/utils.ts +++ b/frontend/src/pages/Settings/utils.ts @@ -48,7 +48,7 @@ export const getRoutes = ( settings.push(...alertChannels(t)); - if ((isCloudUser || isEnterpriseSelfHostedUser) && isAdmin) { + if (isAdmin) { settings.push(...apiKeys(t)); } diff --git a/frontend/src/pages/Support/Support.tsx b/frontend/src/pages/Support/Support.tsx index edc9066d2fc4..fe6517b70ee7 100644 --- a/frontend/src/pages/Support/Support.tsx +++ b/frontend/src/pages/Support/Support.tsx @@ -1,9 +1,8 @@ import './Support.styles.scss'; import { Button, Card, Modal, Typography } from 'antd'; -import updateCreditCardApi from 'api/billing/checkout'; import logEvent from 'api/common/logEvent'; -import { SOMETHING_WENT_WRONG } from 'constants/api'; +import updateCreditCardApi from 'api/v1/checkout/create'; import { FeatureKeys } from 'constants/features'; import { useNotifications } from 'hooks/useNotifications'; import { @@ -18,8 +17,9 @@ import { useAppContext } from 'providers/App/App'; import { useEffect, useState } from 'react'; import { useMutation } from 'react-query'; import { useHistory, useLocation } from 'react-router-dom'; -import { ErrorResponse, SuccessResponse } from 'types/api'; +import { SuccessResponseV2 } from 'types/api'; import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; +import APIError from 'types/api/error'; const { Title, Text } = Typography; @@ -109,20 +109,21 @@ export default function Support(): JSX.Element { !isPremiumChatSupportEnabled && !trialInfo?.trialConvertedToSubscription; const handleBillingOnSuccess = ( - data: ErrorResponse | SuccessResponse, + data: SuccessResponseV2, ): void => { - if (data?.payload?.redirectURL) { + if (data?.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }; - const handleBillingOnError = (): void => { + const handleBillingOnError = (error: APIError): void => { notifications.error({ - message: SOMETHING_WENT_WRONG, + message: error.getErrorCode(), + description: error.getErrorMessage(), }); }; diff --git a/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx b/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx index 91e3bd1c21c8..acce20df6a7a 100644 --- a/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx +++ b/frontend/src/pages/TracesExplorer/__test__/TracesExplorer.test.tsx @@ -1,5 +1,6 @@ /* eslint-disable sonarjs/no-duplicate-string */ import userEvent from '@testing-library/user-event'; +import { ENVIRONMENT } from 'constants/env'; import { initialQueriesMap, initialQueryBuilderFormValues, @@ -7,10 +8,10 @@ import { } from 'constants/queryBuilder'; import ROUTES from 'constants/routes'; import * as compositeQueryHook from 'hooks/queryBuilder/useGetCompositeQueryParam'; +import { quickFiltersListResponse } from 'mocks-server/__mockdata__/customQuickFilters'; import { queryRangeForListView, queryRangeForTableView, - queryRangeForTimeSeries, queryRangeForTraceView, } from 'mocks-server/__mockdata__/query_range'; import { server } from 'mocks-server/server'; @@ -18,6 +19,7 @@ import { rest } from 'msw'; import { QueryBuilderContext } from 'providers/QueryBuilder'; import { act, + cleanup, fireEvent, render, screen, @@ -42,6 +44,9 @@ import { const historyPush = jest.fn(); +const BASE_URL = ENVIRONMENT.baseURL; +const FILTER_SERVICE_NAME = 'Service Name'; + jest.mock('react-router-dom', () => ({ ...jest.requireActual('react-router-dom'), useLocation: (): { pathname: string } => ({ @@ -435,24 +440,6 @@ describe('TracesExplorer - Filters', () => { ][0].builder.queryData[0].filters.items, ).toEqual([]); }); - - it('filter panel should collapse & uncollapsed', async () => { - const { getByText, getByTestId } = render(); - - Object.values(AllTraceFilterKeyValue).forEach((filter) => { - expect(getByText(filter)).toBeInTheDocument(); - }); - - // Filter panel should collapse - const collapseButton = getByTestId('toggle-filter-panel'); - expect(collapseButton).toBeInTheDocument(); - fireEvent.click(collapseButton); - - // uncollapse btn should be present - expect( - await screen.findByTestId('filter-uncollapse-btn'), - ).toBeInTheDocument(); - }); }); const handleExplorerTabChangeTest = jest.fn(); @@ -463,57 +450,32 @@ jest.mock('hooks/useHandleExplorerTabChange', () => ({ })); describe('TracesExplorer - ', () => { - it('should render the traces explorer page', async () => { + const quickFiltersListURL = `${BASE_URL}/api/v1/orgs/me/filters/traces`; + + const setupServer = (): void => { server.use( - rest.post('http://localhost/api/v4/query_range', (req, res, ctx) => - res(ctx.status(200), ctx.json(queryRangeForTimeSeries)), + rest.get(quickFiltersListURL, (_, res, ctx) => + res(ctx.status(200), ctx.json(quickFiltersListResponse)), ), ); - const { findByText, getByText } = render(); + }; - // assert mocked date time selection - expect(await findByText('MockDateTimeSelection')).toBeInTheDocument(); - - // assert stage&Btn - expect(getByText('Stage & Run Query')).toBeInTheDocument(); - - // assert QB - will not write tests for QB as that would be covererd in QB tests separately - expect( - getByText( - 'Search Filter : select options from suggested values, for IN/NOT IN operators - press "Enter" after selecting options', - ), - ).toBeInTheDocument(); - expect(getByText('AGGREGATION INTERVAL')).toBeInTheDocument(); - // why is this present here?? - // expect(getByText('Metrics name')).toBeInTheDocument(); - // expect(getByText('WHERE')).toBeInTheDocument(); - // expect(getByText('Legend Format')).toBeInTheDocument(); - - // assert timeseries chart mock - // expect(await screen.findByText('MockUplot')).toBeInTheDocument(); + beforeEach(() => { + setupServer(); }); - it('check tab navigation', async () => { - const { getByTestId, getByText } = render(); + afterEach(() => { + server.resetHandlers(); + }); - // switch to Table view - const TableBtn = getByText('Table View'); - expect(TableBtn).toBeInTheDocument(); - fireEvent.click(TableBtn); - - expect(handleExplorerTabChangeTest).toBeCalledWith(PANEL_TYPES.TABLE); - - // switch to traces view - const tracesBtn = getByTestId('Traces'); - expect(tracesBtn).toBeInTheDocument(); - fireEvent.click(tracesBtn); - - expect(handleExplorerTabChangeTest).toBeCalledWith(PANEL_TYPES.TRACE); + afterAll(() => { + server.close(); + cleanup(); }); it('trace explorer - list view', async () => { server.use( - rest.post('http://localhost/api/v4/query_range', (req, res, ctx) => + rest.post(`${BASE_URL}/api/v4/query_range`, (req, res, ctx) => res(ctx.status(200), ctx.json(queryRangeForListView)), ), ); @@ -524,6 +486,7 @@ describe('TracesExplorer - ', () => { , ); + await screen.findByText(FILTER_SERVICE_NAME); expect(await screen.findByText('Timestamp')).toBeInTheDocument(); expect(getByText('options_menu.options')).toBeInTheDocument(); @@ -536,7 +499,7 @@ describe('TracesExplorer - ', () => { it('trace explorer - table view', async () => { server.use( - rest.post('http://localhost/api/v4/query_range', (req, res, ctx) => + rest.post(`${BASE_URL}/api/v4/query_range`, (req, res, ctx) => res(ctx.status(200), ctx.json(queryRangeForTableView)), ), ); @@ -554,7 +517,7 @@ describe('TracesExplorer - ', () => { it('trace explorer - trace view', async () => { server.use( - rest.post('http://localhost/api/v4/query_range', (req, res, ctx) => + rest.post(`${BASE_URL}/api/v4/query_range`, (req, res, ctx) => res(ctx.status(200), ctx.json(queryRangeForTraceView)), ), ); @@ -591,7 +554,11 @@ describe('TracesExplorer - ', () => { }); it('test for explorer options', async () => { - const { getByText, getByTestId } = render(); + const { getByText, getByTestId } = render( + + + , + ); // assert explorer options - action btns [ @@ -619,8 +586,12 @@ describe('TracesExplorer - ', () => { }); it('select a view options - assert and save this view', async () => { - const { container } = render(); - + const { container } = render( + + + , + ); + await screen.findByText(FILTER_SERVICE_NAME); await act(async () => { fireEvent.mouseDown( container.querySelector( @@ -664,7 +635,12 @@ describe('TracesExplorer - ', () => { }); it('create a dashboard btn assert', async () => { - const { getByText } = render(); + const { getByText } = render( + + + , + ); + await screen.findByText(FILTER_SERVICE_NAME); const createDashboardBtn = getByText('Add to Dashboard'); expect(createDashboardBtn).toBeInTheDocument(); @@ -687,7 +663,12 @@ describe('TracesExplorer - ', () => { }); it('create an alert btn assert', async () => { - const { getByText } = render(); + const { getByText } = render( + + + , + ); + await screen.findByText(FILTER_SERVICE_NAME); const createAlertBtn = getByText('Create an Alert'); expect(createAlertBtn).toBeInTheDocument(); diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx index 448e41bf337c..c1e82a82d0cb 100644 --- a/frontend/src/pages/TracesExplorer/index.tsx +++ b/frontend/src/pages/TracesExplorer/index.tsx @@ -7,6 +7,8 @@ import logEvent from 'api/common/logEvent'; import axios from 'axios'; import cx from 'classnames'; import ExplorerCard from 'components/ExplorerCard/ExplorerCard'; +import QuickFilters from 'components/QuickFilters/QuickFilters'; +import { QuickFiltersSource, SignalType } from 'components/QuickFilters/types'; import { LOCALSTORAGE } from 'constants/localStorage'; import { AVAILABLE_EXPORT_PANEL_TYPES } from 'constants/panelTypes'; import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder'; @@ -34,7 +36,6 @@ import { DataSource } from 'types/common/queryBuilder'; import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink'; import { v4 } from 'uuid'; -import { Filter } from './Filter/Filter'; import { ActionsWrapper, Container } from './styles'; import { getTabsItems } from './utils'; @@ -244,7 +245,14 @@ function TracesExplorer(): JSX.Element { }>
{ - if (!isFetchingActiveLicenseV3) { - const isTerminated = activeLicenseV3?.state === LicenseState.TERMINATED; - const isExpired = activeLicenseV3?.state === LicenseState.EXPIRED; - const isCancelled = activeLicenseV3?.state === LicenseState.CANCELLED; + if (!isFetchingActiveLicense) { + const isTerminated = activeLicense?.state === LicenseState.TERMINATED; + const isExpired = activeLicense?.state === LicenseState.EXPIRED; + const isCancelled = activeLicense?.state === LicenseState.CANCELLED; const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled; if ( !isWorkspaceAccessRestricted || - activeLicenseV3.platform === LicensePlatform.SELF_HOSTED + activeLicense.platform === LicensePlatform.SELF_HOSTED ) { history.push(ROUTES.HOME); } } - }, [isFetchingActiveLicenseV3, activeLicenseV3]); + }, [isFetchingActiveLicense, activeLicense]); return (
@@ -44,7 +44,7 @@ function WorkspaceAccessRestricted(): JSX.Element { width="65%" >
- {isFetchingActiveLicenseV3 || !activeLicenseV3 ? ( + {isFetchingActiveLicense || !activeLicense ? ( ) : ( <> @@ -55,7 +55,7 @@ function WorkspaceAccessRestricted(): JSX.Element { level={4} className="workspace-access-restricted__details" > - {activeLicenseV3.state === LicenseState.TERMINATED && ( + {activeLicense.state === LicenseState.TERMINATED && ( <> Your SigNoz license is terminated, please contact support at{' '} @@ -64,7 +64,7 @@ function WorkspaceAccessRestricted(): JSX.Element { for a new deployment )} - {activeLicenseV3.state === LicenseState.EXPIRED && ( + {activeLicense.state === LicenseState.EXPIRED && ( <> Your SigNoz license is expired, please contact support at{' '} @@ -81,7 +81,7 @@ function WorkspaceAccessRestricted(): JSX.Element { . )} - {activeLicenseV3.state === LicenseState.CANCELLED && ( + {activeLicense.state === LicenseState.CANCELLED && ( <> Your SigNoz license is cancelled, please contact support at{' '} diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx index c0cd0877ba67..dc680983ceb8 100644 --- a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx @@ -16,8 +16,8 @@ import { Tabs, Typography, } from 'antd'; -import updateCreditCardApi from 'api/billing/checkout'; import logEvent from 'api/common/logEvent'; +import updateCreditCardApi from 'api/v1/checkout/create'; import ROUTES from 'constants/routes'; import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; @@ -26,6 +26,7 @@ import { useAppContext } from 'providers/App/App'; import { useCallback, useEffect } from 'react'; import { useTranslation } from 'react-i18next'; import { useMutation } from 'react-query'; +import APIError from 'types/api/error'; import { LicensePlatform } from 'types/api/licensesV3/getActive'; import { getFormattedDate } from 'utils/timeUtils'; @@ -41,9 +42,9 @@ import { export default function WorkspaceBlocked(): JSX.Element { const { user, - isFetchingActiveLicenseV3, + isFetchingActiveLicense, trialInfo, - activeLicenseV3, + activeLicense, } = useAppContext(); const isAdmin = user.role === 'ADMIN'; const { notifications } = useNotifications(); @@ -70,37 +71,38 @@ export default function WorkspaceBlocked(): JSX.Element { }; useEffect(() => { - if (!isFetchingActiveLicenseV3) { + if (!isFetchingActiveLicense) { const shouldBlockWorkspace = trialInfo?.workSpaceBlock; if ( !shouldBlockWorkspace || - activeLicenseV3?.platform === LicensePlatform.SELF_HOSTED + activeLicense?.platform === LicensePlatform.SELF_HOSTED ) { history.push(ROUTES.HOME); } } }, [ - isFetchingActiveLicenseV3, + isFetchingActiveLicense, trialInfo?.workSpaceBlock, - activeLicenseV3?.platform, + activeLicense?.platform, ]); const { mutate: updateCreditCard, isLoading } = useMutation( updateCreditCardApi, { onSuccess: (data) => { - if (data.payload?.redirectURL) { + if (data.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }, - onError: () => + onError: (error: APIError) => notifications.error({ - message: t('somethingWentWrong'), + message: error.getErrorCode(), + description: error.getErrorMessage(), }), }, ); @@ -320,7 +322,7 @@ export default function WorkspaceBlocked(): JSX.Element { width="65%" >
- {isFetchingActiveLicenseV3 || !trialInfo ? ( + {isFetchingActiveLicense || !trialInfo ? ( ) : ( <> diff --git a/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx b/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx index 4671527b1054..3633eb7135d5 100644 --- a/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx +++ b/frontend/src/pages/WorkspaceSuspended/WorkspaceSuspended.tsx @@ -10,7 +10,7 @@ import { Space, Typography, } from 'antd'; -import manageCreditCardApi from 'api/billing/manage'; +import manageCreditCardApi from 'api/v1/portal/create'; import ROUTES from 'constants/routes'; import dayjs from 'dayjs'; import { useNotifications } from 'hooks/useNotifications'; @@ -19,6 +19,7 @@ import { useAppContext } from 'providers/App/App'; import { useCallback, useEffect } from 'react'; import { useTranslation } from 'react-i18next'; import { useMutation } from 'react-query'; +import APIError from 'types/api/error'; import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive'; import { getFormattedDateWithMinutes } from 'utils/timeUtils'; @@ -26,7 +27,7 @@ function WorkspaceSuspended(): JSX.Element { const { user } = useAppContext(); const isAdmin = user.role === 'ADMIN'; const { notifications } = useNotifications(); - const { activeLicenseV3, isFetchingActiveLicenseV3 } = useAppContext(); + const { activeLicense, isFetchingActiveLicense } = useAppContext(); const { t } = useTranslation(['failedPayment']); @@ -34,17 +35,18 @@ function WorkspaceSuspended(): JSX.Element { manageCreditCardApi, { onSuccess: (data) => { - if (data.payload?.redirectURL) { + if (data.data?.redirectURL) { const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; + newTab.href = data.data.redirectURL; newTab.target = '_blank'; newTab.rel = 'noopener noreferrer'; newTab.click(); } }, - onError: () => + onError: (error: APIError) => notifications.error({ - message: t('somethingWentWrong'), + message: error.getErrorCode(), + description: error.getErrorMessage(), }), }, ); @@ -56,18 +58,18 @@ function WorkspaceSuspended(): JSX.Element { }, [manageCreditCard]); useEffect(() => { - if (!isFetchingActiveLicenseV3) { + if (!isFetchingActiveLicense) { const shouldSuspendWorkspace = - activeLicenseV3?.state === LicenseState.DEFAULTED; + activeLicense?.state === LicenseState.DEFAULTED; if ( !shouldSuspendWorkspace || - activeLicenseV3?.platform === LicensePlatform.SELF_HOSTED + activeLicense?.platform === LicensePlatform.SELF_HOSTED ) { history.push(ROUTES.HOME); } } - }, [isFetchingActiveLicenseV3, activeLicenseV3]); + }, [isFetchingActiveLicense, activeLicense]); return (
- {isFetchingActiveLicenseV3 || !activeLicenseV3 ? ( + {isFetchingActiveLicense || !activeLicense ? ( ) : ( <> @@ -115,7 +117,7 @@ function WorkspaceSuspended(): JSX.Element { {t('yourDataIsSafe')}{' '} {getFormattedDateWithMinutes( - dayjs(activeLicenseV3?.event_queue?.scheduled_at).unix() || + dayjs(activeLicense?.event_queue?.scheduled_at).unix() || Date.now(), )} {' '} diff --git a/frontend/src/periscope/components/KeyValueLabel/KeyValueLabel.tsx b/frontend/src/periscope/components/KeyValueLabel/KeyValueLabel.tsx index c0987ccff116..ee3cc7f08d8d 100644 --- a/frontend/src/periscope/components/KeyValueLabel/KeyValueLabel.tsx +++ b/frontend/src/periscope/components/KeyValueLabel/KeyValueLabel.tsx @@ -6,7 +6,7 @@ import { useMemo } from 'react'; import TrimmedText from '../TrimmedText/TrimmedText'; type KeyValueLabelProps = { - badgeKey: string; + badgeKey: string | React.ReactNode; badgeValue: string; maxCharacters?: number; }; @@ -25,7 +25,11 @@ export default function KeyValueLabel({ return (
- + {typeof badgeKey === 'string' ? ( + + ) : ( + badgeKey + )}
{isUrl ? (
(undefined); export function AppProvider({ children }: PropsWithChildren): JSX.Element { // on load of the provider set the user defaults with access jwt , refresh jwt and user id from local storage const [user, setUser] = useState(() => getUserDefaults()); - const [licenses, setLicenses] = useState(null); - const [ - activeLicenseV3, - setActiveLicenseV3, - ] = useState(null); - + const [activeLicense, setActiveLicense] = useState( + null, + ); const [trialInfo, setTrialInfo] = useState(null); - const [featureFlags, setFeatureFlags] = useState(null); const [orgPreferences, setOrgPreferences] = useState( null, @@ -103,59 +98,40 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { } }, [userData, isFetchingUser]); - // fetcher for licenses v2 - // license will be fetched if we are in logged in state - const { - data: licenseData, - isFetching: isFetchingLicenses, - error: licensesFetchError, - refetch: licensesRefetch, - } = useLicense(isLoggedIn); - useEffect(() => { - if (!isFetchingLicenses && licenseData && licenseData.payload) { - setLicenses(licenseData.payload); - } - }, [licenseData, isFetchingLicenses]); - // fetcher for licenses v3 const { - data: activeLicenseV3Data, - isFetching: isFetchingActiveLicenseV3, - error: activeLicenseV3FetchError, + data: activeLicenseData, + isFetching: isFetchingActiveLicense, + error: activeLicenseFetchError, + refetch: activeLicenseRefetch, } = useActiveLicenseV3(isLoggedIn); useEffect(() => { - if ( - !isFetchingActiveLicenseV3 && - activeLicenseV3Data && - activeLicenseV3Data.payload - ) { - setActiveLicenseV3(activeLicenseV3Data.payload); + if (!isFetchingActiveLicense && activeLicenseData && activeLicenseData.data) { + setActiveLicense(activeLicenseData.data); const isOnTrial = dayjs( - activeLicenseV3Data.payload.free_until || Date.now(), + activeLicenseData.data.free_until || Date.now(), ).isAfter(dayjs()); const trialInfo: TrialInfo = { - trialStart: activeLicenseV3Data.payload.valid_from, - trialEnd: dayjs( - activeLicenseV3Data.payload.free_until || Date.now(), - ).unix(), + trialStart: activeLicenseData.data.valid_from, + trialEnd: dayjs(activeLicenseData.data.free_until || Date.now()).unix(), onTrial: isOnTrial, workSpaceBlock: - activeLicenseV3Data.payload.state === LicenseState.EVALUATION_EXPIRED && - activeLicenseV3Data.payload.platform === LicensePlatform.CLOUD, + activeLicenseData.data.state === LicenseState.EVALUATION_EXPIRED && + activeLicenseData.data.platform === LicensePlatform.CLOUD, trialConvertedToSubscription: - activeLicenseV3Data.payload.state !== LicenseState.ISSUED && - activeLicenseV3Data.payload.state !== LicenseState.EVALUATING && - activeLicenseV3Data.payload.state !== LicenseState.EVALUATION_EXPIRED, + activeLicenseData.data.state !== LicenseState.ISSUED && + activeLicenseData.data.state !== LicenseState.EVALUATING && + activeLicenseData.data.state !== LicenseState.EVALUATION_EXPIRED, gracePeriodEnd: dayjs( - activeLicenseV3Data.payload.event_queue.scheduled_at || Date.now(), + activeLicenseData.data.event_queue.scheduled_at || Date.now(), ).unix(), }; setTrialInfo(trialInfo); } - }, [activeLicenseV3Data, isFetchingActiveLicenseV3]); + }, [activeLicenseData, isFetchingActiveLicense]); // fetcher for feature flags const { @@ -176,6 +152,12 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { enabled: !!isLoggedIn && !!user.email && user.role === USER_ROLES.ADMIN, }); + const { data: versionData } = useQuery({ + queryFn: getUserVersion, + queryKey: ['getUserVersion', user?.accessJwt], + enabled: isLoggedIn, + }); + useEffect(() => { if ( !isFetchingOrgPreferences && @@ -242,9 +224,8 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { useGlobalEventListener('LOGOUT', () => { setIsLoggedIn(false); setUser(getUserDefaults()); - setActiveLicenseV3(null); + setActiveLicense(null); setTrialInfo(null); - setLicenses(null); setFeatureFlags(null); setOrgPreferences(null); setOrg(null); @@ -254,49 +235,45 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element { const value: IAppContext = useMemo( () => ({ user, - licenses, - activeLicenseV3, + activeLicense, featureFlags, trialInfo, orgPreferences, isLoggedIn, org, isFetchingUser, - isFetchingLicenses, - isFetchingActiveLicenseV3, + isFetchingActiveLicense, isFetchingFeatureFlags, isFetchingOrgPreferences, userFetchError, - licensesFetchError, - activeLicenseV3FetchError, + activeLicenseFetchError, featureFlagsFetchError, orgPreferencesFetchError, - licensesRefetch, + activeLicenseRefetch, updateUser, updateOrgPreferences, updateOrg, + versionData: versionData?.payload || null, }), [ trialInfo, - activeLicenseV3, - activeLicenseV3FetchError, + activeLicense, + activeLicenseFetchError, featureFlags, featureFlagsFetchError, - isFetchingActiveLicenseV3, + isFetchingActiveLicense, isFetchingFeatureFlags, - isFetchingLicenses, isFetchingOrgPreferences, isFetchingUser, isLoggedIn, - licenses, - licensesFetchError, - licensesRefetch, org, orgPreferences, orgPreferencesFetchError, + activeLicenseRefetch, updateOrg, user, userFetchError, + versionData, ], ); return {children}; diff --git a/frontend/src/providers/App/types.ts b/frontend/src/providers/App/types.ts index 8c9d2117dcc6..6823b9d82704 100644 --- a/frontend/src/providers/App/types.ts +++ b/frontend/src/providers/App/types.ts @@ -1,33 +1,32 @@ +import APIError from 'types/api/error'; import { FeatureFlagProps as FeatureFlags } from 'types/api/features/getFeaturesFlags'; -import { PayloadProps as LicensesResModel } from 'types/api/licenses/getAll'; -import { LicenseV3ResModel, TrialInfo } from 'types/api/licensesV3/getActive'; +import { LicenseResModel, TrialInfo } from 'types/api/licensesV3/getActive'; import { Organization } from 'types/api/user/getOrganization'; import { UserResponse as User } from 'types/api/user/getUser'; +import { PayloadProps } from 'types/api/user/getVersion'; import { OrgPreference } from 'types/reducer/app'; export interface IAppContext { user: IUser; - licenses: LicensesResModel | null; - activeLicenseV3: LicenseV3ResModel | null; + activeLicense: LicenseResModel | null; trialInfo: TrialInfo | null; featureFlags: FeatureFlags[] | null; orgPreferences: OrgPreference[] | null; isLoggedIn: boolean; org: Organization[] | null; isFetchingUser: boolean; - isFetchingLicenses: boolean; - isFetchingActiveLicenseV3: boolean; + isFetchingActiveLicense: boolean; isFetchingFeatureFlags: boolean; isFetchingOrgPreferences: boolean; userFetchError: unknown; - licensesFetchError: unknown; - activeLicenseV3FetchError: unknown; + activeLicenseFetchError: APIError | null; featureFlagsFetchError: unknown; orgPreferencesFetchError: unknown; - licensesRefetch: () => void; + activeLicenseRefetch: () => void; updateUser: (user: IUser) => void; updateOrgPreferences: (orgPreferences: OrgPreference[]) => void; updateOrg(orgId: string, updatedOrgName: string): void; + versionData: PayloadProps | null; } // User diff --git a/frontend/src/providers/ErrorModalProvider.tsx b/frontend/src/providers/ErrorModalProvider.tsx new file mode 100644 index 000000000000..7a47d95316a7 --- /dev/null +++ b/frontend/src/providers/ErrorModalProvider.tsx @@ -0,0 +1,60 @@ +import ErrorModal from 'components/ErrorModal/ErrorModal'; +import { + createContext, + ReactNode, + useCallback, + useContext, + useMemo, + useState, +} from 'react'; +import APIError from 'types/api/error'; + +interface ErrorModalContextType { + showErrorModal: (error: APIError) => void; + hideErrorModal: () => void; +} + +const ErrorModalContext = createContext( + undefined, +); + +export function ErrorModalProvider({ + children, +}: { + children: ReactNode; +}): JSX.Element { + const [error, setError] = useState(null); + const [isVisible, setIsVisible] = useState(false); + + const showErrorModal = useCallback((error: APIError): void => { + setError(error); + setIsVisible(true); + }, []); + + const hideErrorModal = useCallback((): void => { + setError(null); + setIsVisible(false); + }, []); + + const value = useMemo(() => ({ showErrorModal, hideErrorModal }), [ + showErrorModal, + hideErrorModal, + ]); + + return ( + + {children} + {isVisible && error && ( + + )} + + ); +} + +export const useErrorModal = (): ErrorModalContextType => { + const context = useContext(ErrorModalContext); + if (!context) { + throw new Error('useErrorModal must be used within an ErrorModalProvider'); + } + return context; +}; diff --git a/frontend/src/styles.scss b/frontend/src/styles.scss index 0f574942cfaf..3dd190128c61 100644 --- a/frontend/src/styles.scss +++ b/frontend/src/styles.scss @@ -17,12 +17,12 @@ body { } .u-legend { - max-height: 30px; // slicing the height of the widget Header height ; + max-height: 30px; // Default height for backward compatibility overflow-y: auto; overflow-x: hidden; &::-webkit-scrollbar { - width: 0.3rem; + width: 0.5rem; } &::-webkit-scrollbar-corner { background: transparent; @@ -53,6 +53,313 @@ body { text-decoration-thickness: 3px; } } + + // Enhanced legend styles + &.u-legend-enhanced { + max-height: none; // Remove default max-height restriction + padding: 6px 4px; // Back to original padding + + // Thin and neat scrollbar for enhanced legend + &::-webkit-scrollbar { + width: 0.25rem; + height: 0.25rem; + } + &::-webkit-scrollbar-thumb { + background: rgba(136, 136, 136, 0.4); + border-radius: 0.125rem; + + &:hover { + background: rgba(136, 136, 136, 0.7); + } + } + &::-webkit-scrollbar-track { + background: transparent; + } + + // Enhanced table layout for better responsiveness + table { + width: 100%; + table-layout: fixed; + } + + tbody { + display: flex; + flex-wrap: wrap; + gap: 1px 2px; + align-items: center; + justify-content: flex-start; + width: 100%; + } + + // Center alignment for single-line legends + &.u-legend-single-line tbody { + justify-content: center; + } + + &.u-legend-right-aligned { + tbody { + align-items: flex-start !important; + justify-content: flex-start !important; + } + + tr.u-series { + justify-content: flex-start !important; + + th { + justify-content: flex-start !important; + text-align: left !important; + + .legend-text { + text-align: left !important; + } + } + } + } + + // Right-side legend specific styles + &.u-legend-right { + tbody { + flex-direction: column; + flex-wrap: nowrap; + align-items: stretch; + justify-content: flex-start; + gap: 2px; + } + + tr.u-series { + width: 100%; + + th { + display: flex; + align-items: center; + gap: 6px; + font-size: 12px; + font-weight: 600; + justify-content: flex-start; + cursor: pointer; + position: relative; + min-width: 0; + width: 100%; + + .u-marker { + border-radius: 50%; + min-width: 11px; + min-height: 11px; + width: 11px; + height: 11px; + flex-shrink: 0; + cursor: pointer; + transition: all 0.2s ease; + position: relative; + + &:hover { + transform: scale(1.2); + box-shadow: 0 0 0 2px rgba(255, 255, 255, 0.3); + } + + &:active { + transform: scale(0.9); + } + } + + // Text container for proper ellipsis + .legend-text { + text-overflow: ellipsis; + overflow: hidden; + white-space: nowrap; + min-width: 0; + flex: 1; + padding-bottom: 2px; + } + + // Tooltip styling + &[title] { + cursor: pointer; + } + + &:hover { + background: rgba(255, 255, 255, 0.05); + } + } + + &.u-off { + opacity: 0.5; + text-decoration: line-through; + text-decoration-thickness: 1px; + + th { + &:hover { + opacity: 0.7; + } + + .u-marker { + opacity: 0.3; + position: relative; + + &::after { + content: ''; + position: absolute; + top: 50%; + left: 50%; + width: 12px; + height: 2px; + background: #ff4444; + transform: translate(-50%, -50%) rotate(45deg); + border-radius: 1px; + } + + &:hover { + opacity: 0.6; + } + } + } + } + + // Focus styles for keyboard navigation + &:focus { + outline: 1px solid rgba(66, 165, 245, 0.8); + outline-offset: 1px; + } + } + } + + // Bottom legend specific styles + &.u-legend-bottom { + tbody { + flex-direction: row; + flex-wrap: wrap; + } + } + + &.u-legend-bottom tr.u-series { + display: flex; + flex: 0 0 auto; + min-width: fit-content; + max-width: 200px; // Limit width to enable truncation + + th { + display: flex; + align-items: center; + gap: 6px; + font-size: 12px; + font-weight: 600; + padding: 6px 10px; + cursor: pointer; + white-space: nowrap; + -webkit-font-smoothing: antialiased; + border-radius: 2px; + min-width: 0; // Allow shrinking + max-width: 100%; + + &:hover { + background: rgba(255, 255, 255, 0.05); + } + + .u-marker { + border-radius: 50%; + min-width: 11px; + min-height: 11px; + width: 11px; + height: 11px; + flex-shrink: 0; + cursor: pointer; + transition: all 0.2s ease; + position: relative; + + &:hover { + transform: scale(1.2); + box-shadow: 0 0 0 2px rgba(255, 255, 255, 0.3); + } + + &:active { + transform: scale(0.9); + } + } + + .legend-text { + text-overflow: ellipsis; + overflow: hidden; + white-space: nowrap; + min-width: 0; + flex: 1; + padding-bottom: 2px; + } + + // Tooltip styling + &[title] { + cursor: pointer; + } + } + + &.u-off { + opacity: 0.5; + text-decoration: line-through; + text-decoration-thickness: 1px; + + th { + &:hover { + opacity: 0.7; + } + + .u-marker { + opacity: 0.3; + position: relative; + + &::after { + content: ''; + position: absolute; + top: 50%; + left: 50%; + width: 12px; + height: 2px; + background: #ff4444; + transform: translate(-50%, -50%) rotate(45deg); + border-radius: 1px; + } + + &:hover { + opacity: 0.6; + } + } + } + } + + // Focus styles for keyboard navigation + &:focus { + outline: 1px solid rgba(66, 165, 245, 0.8); + outline-offset: 1px; + } + } + } +} + +// uPlot container adjustments for right-side legend +.uplot { + &.u-plot-right-legend { + display: flex; + flex-direction: row; + + .u-over { + flex: 1; + } + + .u-legend { + flex-shrink: 0; + margin-top: 0; + margin-bottom: 0; + } + } + + &.u-plot-bottom-legend { + display: flex; + flex-direction: column; + + .u-legend { + margin-top: 10px; + margin-left: 0; + margin-right: 0; + } + } } /* Style the selected background */ @@ -250,6 +557,94 @@ body { } } } + + // Enhanced legend light mode styles + .u-legend-enhanced { + // Light mode scrollbar styling + &::-webkit-scrollbar-thumb { + background: rgba(0, 0, 0, 0.2); + + &:hover { + background: rgba(0, 0, 0, 0.4); + } + } + + &.u-legend-bottom tr.u-series { + th { + &:hover { + background: rgba(0, 0, 0, 0.05); + } + } + + &.u-off { + opacity: 0.5; + text-decoration: line-through; + text-decoration-thickness: 1px; + + th { + &:hover { + background: rgba(0, 0, 0, 0.08); + opacity: 0.7; + } + + .u-marker { + opacity: 0.3; + + &::after { + background: #cc3333; + } + + &:hover { + opacity: 0.6; + } + } + } + } + + // Light mode focus styles + &:focus { + outline: 1px solid rgba(25, 118, 210, 0.8); + } + } + + &.u-legend-right tr.u-series { + th { + &:hover { + background: rgba(0, 0, 0, 0.05); + } + } + + &.u-off { + opacity: 0.5; + text-decoration: line-through; + text-decoration-thickness: 1px; + + th { + &:hover { + background: rgba(0, 0, 0, 0.08); + opacity: 0.7; + } + + .u-marker { + opacity: 0.3; + + &::after { + background: #cc3333; + } + + &:hover { + opacity: 0.6; + } + } + } + } + + // Light mode focus styles + &:focus { + outline: 1px solid rgba(25, 118, 210, 0.8); + } + } + } } .ant-notification-notice-message { @@ -320,3 +715,30 @@ notifications - 2050 .animate-spin { animation: spin 1s linear infinite; } + +// Custom legend tooltip for immediate display +.legend-tooltip { + position: fixed; + background: var(--bg-slate-400); + color: var(--text-vanilla-100); + padding: 8px 12px; + border-radius: 6px; + font-size: 12px; + font-family: 'Geist Mono'; + font-weight: 500; + z-index: 10000; + pointer-events: none; + white-space: nowrap; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.4); + border: 1px solid #374151; + -webkit-font-smoothing: antialiased; + letter-spacing: 0.025em; +} + +// Light mode styling for legend tooltip +.lightMode .legend-tooltip { + background: #ffffff; + color: #1f2937; + border: 1px solid #d1d5db; + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15); +} diff --git a/frontend/src/tests/test-utils.tsx b/frontend/src/tests/test-utils.tsx index 71f253afe587..f58b818ec03d 100644 --- a/frontend/src/tests/test-utils.tsx +++ b/frontend/src/tests/test-utils.tsx @@ -5,6 +5,7 @@ import ROUTES from 'constants/routes'; import { ResourceProvider } from 'hooks/useResourceAttribute'; import { AppContext } from 'providers/App/App'; import { IAppContext } from 'providers/App/types'; +import { ErrorModalProvider } from 'providers/ErrorModalProvider'; import { QueryBuilderProvider } from 'providers/QueryBuilder'; import TimezoneProvider from 'providers/Timezone'; import React, { ReactElement } from 'react'; @@ -105,7 +106,8 @@ export function getAppContextMock( appContextOverrides?: Partial, ): IAppContext { return { - activeLicenseV3: { + activeLicense: { + key: 'test-key', event_queue: { created_at: '0', event: LicenseEvent.NO_EVENT, @@ -138,8 +140,8 @@ export function getAppContextMock( trialConvertedToSubscription: false, gracePeriodEnd: -1, }, - isFetchingActiveLicenseV3: false, - activeLicenseV3FetchError: null, + isFetchingActiveLicense: false, + activeLicenseFetchError: null, user: { accessJwt: 'some-token', refreshJwt: 'some-refresh-token', @@ -160,20 +162,6 @@ export function getAppContextMock( ], isFetchingUser: false, userFetchError: null, - licenses: { - licenses: [ - { - key: 'does-not-matter', - isCurrent: true, - planKey: 'ENTERPRISE_PLAN', - ValidFrom: new Date(), - ValidUntil: new Date(), - status: 'VALID', - }, - ], - }, - isFetchingLicenses: false, - licensesFetchError: null, featureFlags: [ { name: FeatureKeys.SSO, @@ -246,7 +234,12 @@ export function getAppContextMock( updateUser: jest.fn(), updateOrg: jest.fn(), updateOrgPreferences: jest.fn(), - licensesRefetch: jest.fn(), + activeLicenseRefetch: jest.fn(), + versionData: { + version: '1.0.0', + ee: 'Y', + setupCompleted: true, + }, ...appContextOverrides, }; } @@ -262,16 +255,18 @@ function AllTheProviders({ return ( - - - - {/* Use the mock store with the provided role */} - - {children} - - - - + + + + + {/* Use the mock store with the provided role */} + + {children} + + + + + ); diff --git a/frontend/src/types/api/SAML/deleteDomain.ts b/frontend/src/types/api/SAML/deleteDomain.ts index 1a86159bd907..3c2901b6efed 100644 --- a/frontend/src/types/api/SAML/deleteDomain.ts +++ b/frontend/src/types/api/SAML/deleteDomain.ts @@ -2,4 +2,7 @@ import { AuthDomain } from './listDomain'; export type Props = AuthDomain; -export type PayloadProps = AuthDomain; +export interface PayloadProps { + data: null; + status: string; +} diff --git a/frontend/src/types/api/SAML/listDomain.ts b/frontend/src/types/api/SAML/listDomain.ts index 25c5eae85c6a..89955541c9fc 100644 --- a/frontend/src/types/api/SAML/listDomain.ts +++ b/frontend/src/types/api/SAML/listDomain.ts @@ -44,4 +44,7 @@ export interface Props { orgId: Organization['id']; } -export type PayloadProps = AuthDomain[]; +export interface PayloadProps { + data: AuthDomain[]; + status: string; +} diff --git a/frontend/src/types/api/SAML/postDomain.ts b/frontend/src/types/api/SAML/postDomain.ts index 1b1972218b76..342622a77f9c 100644 --- a/frontend/src/types/api/SAML/postDomain.ts +++ b/frontend/src/types/api/SAML/postDomain.ts @@ -5,4 +5,7 @@ export type Props = { orgId: string; }; -export type PayloadProps = AuthDomain; +export interface PayloadProps { + data: AuthDomain; + status: string; +} diff --git a/frontend/src/types/api/SAML/updateDomain.ts b/frontend/src/types/api/SAML/updateDomain.ts index 1a86159bd907..ed3ae421f1f3 100644 --- a/frontend/src/types/api/SAML/updateDomain.ts +++ b/frontend/src/types/api/SAML/updateDomain.ts @@ -2,4 +2,7 @@ import { AuthDomain } from './listDomain'; export type Props = AuthDomain; -export type PayloadProps = AuthDomain; +export interface PayloadProps { + data: AuthDomain; + status: string; +} diff --git a/frontend/src/types/api/billing/checkout.ts b/frontend/src/types/api/billing/checkout.ts index 78523376f01e..4b1a2311ca37 100644 --- a/frontend/src/types/api/billing/checkout.ts +++ b/frontend/src/types/api/billing/checkout.ts @@ -5,3 +5,8 @@ export interface CheckoutSuccessPayloadProps { export interface CheckoutRequestPayloadProps { url: string; } + +export interface PayloadProps { + data: CheckoutSuccessPayloadProps; + status: string; +} diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts index 65e26d0cbb25..2e6d88328711 100644 --- a/frontend/src/types/api/dashboard/getAll.ts +++ b/frontend/src/types/api/dashboard/getAll.ts @@ -17,6 +17,11 @@ export type TVariableQueryType = typeof VariableQueryTypeArr[number]; export const VariableSortTypeArr = ['DISABLED', 'ASC', 'DESC'] as const; export type TSortVariableValuesType = typeof VariableSortTypeArr[number]; +export enum LegendPosition { + BOTTOM = 'bottom', + RIGHT = 'right', +} + export interface IDashboardVariable { id: string; order?: any; @@ -111,6 +116,8 @@ export interface IBaseWidget { selectedTracesFields: BaseAutocompleteData[] | null; isLogScale?: boolean; columnWidths?: Record; + legendPosition?: LegendPosition; + customLegendColors?: Record; } export interface Widgets extends IBaseWidget { query: Query; diff --git a/frontend/src/types/api/licenses/getAll.ts b/frontend/src/types/api/licenses/getAll.ts deleted file mode 100644 index 58996cf36e3d..000000000000 --- a/frontend/src/types/api/licenses/getAll.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { License } from './def'; - -export type PayloadProps = { - licenses: License[]; -}; diff --git a/frontend/src/types/api/licensesV3/getActive.ts b/frontend/src/types/api/licensesV3/getActive.ts index b073438bad90..a26d76606443 100644 --- a/frontend/src/types/api/licensesV3/getActive.ts +++ b/frontend/src/types/api/licensesV3/getActive.ts @@ -30,7 +30,7 @@ export const LicensePlanKey = { BASIC: 'BASIC', }; -export type LicenseV3EventQueueResModel = { +export type LicenseEventQueueResModel = { event: LicenseEvent; status: string; scheduled_at: string; @@ -38,10 +38,11 @@ export type LicenseV3EventQueueResModel = { updated_at: string; }; -export type LicenseV3ResModel = { +export type LicenseResModel = { + key: string; status: LicenseStatus; state: LicenseState; - event_queue: LicenseV3EventQueueResModel; + event_queue: LicenseEventQueueResModel; platform: LicensePlatform; created_at: string; plan: { @@ -67,3 +68,8 @@ export type TrialInfo = { trialConvertedToSubscription: boolean; gracePeriodEnd: number; }; + +export interface PayloadProps { + data: LicenseEventQueueResModel; + status: string; +} diff --git a/pkg/alertmanager/legacyalertmanager/provider.go b/pkg/alertmanager/legacyalertmanager/provider.go index f61d8c372028..b8fbba68e343 100644 --- a/pkg/alertmanager/legacyalertmanager/provider.go +++ b/pkg/alertmanager/legacyalertmanager/provider.go @@ -168,7 +168,7 @@ func (provider *provider) putAlerts(ctx context.Context, orgID string, alerts al receivers := cfg.ReceiverNamesFromRuleID(ruleID) if len(receivers) == 0 { - provider.settings.Logger().WarnContext(ctx, "cannot find receivers for alert, skipping sending alert to alertmanager", "ruleID", ruleID, "alert", alert) + provider.settings.Logger().WarnContext(ctx, "cannot find receivers for alert, skipping sending alert to alertmanager", "rule_id", ruleID, "alert", alert) continue } diff --git a/pkg/alertmanager/service.go b/pkg/alertmanager/service.go index 8106b678d384..d8fdd74b2897 100644 --- a/pkg/alertmanager/service.go +++ b/pkg/alertmanager/service.go @@ -53,7 +53,7 @@ func (service *Service) SyncServers(ctx context.Context) error { for _, orgID := range orgIDs { config, err := service.getConfig(ctx, orgID) if err != nil { - service.settings.Logger().Error("failed to get alertmanager config for org", "orgID", orgID, "error", err) + service.settings.Logger().ErrorContext(ctx, "failed to get alertmanager config for org", "org_id", orgID, "error", err) continue } @@ -61,7 +61,7 @@ func (service *Service) SyncServers(ctx context.Context) error { if _, ok := service.servers[orgID]; !ok { server, err := service.newServer(ctx, orgID) if err != nil { - service.settings.Logger().Error("failed to create alertmanager server", "orgID", orgID, "error", err) + service.settings.Logger().ErrorContext(ctx, "failed to create alertmanager server", "org_id", orgID, "error", err) continue } @@ -69,13 +69,13 @@ func (service *Service) SyncServers(ctx context.Context) error { } if service.servers[orgID].Hash() == config.StoreableConfig().Hash { - service.settings.Logger().Debug("skipping alertmanager sync for org", "orgID", orgID, "hash", config.StoreableConfig().Hash) + service.settings.Logger().DebugContext(ctx, "skipping alertmanager sync for org", "org_id", orgID, "hash", config.StoreableConfig().Hash) continue } err = service.servers[orgID].SetConfig(ctx, config) if err != nil { - service.settings.Logger().Error("failed to set config for alertmanager server", "orgID", orgID, "error", err) + service.settings.Logger().ErrorContext(ctx, "failed to set config for alertmanager server", "org_id", orgID, "error", err) continue } } @@ -142,7 +142,7 @@ func (service *Service) Stop(ctx context.Context) error { for _, server := range service.servers { if err := server.Stop(ctx); err != nil { errs = append(errs, err) - service.settings.Logger().Error("failed to stop alertmanager server", "error", err) + service.settings.Logger().ErrorContext(ctx, "failed to stop alertmanager server", "error", err) } } @@ -167,7 +167,7 @@ func (service *Service) newServer(ctx context.Context, orgID string) (*alertmana } if beforeCompareAndSelectHash == config.StoreableConfig().Hash { - service.settings.Logger().Debug("skipping config store update for org", "orgID", orgID, "hash", config.StoreableConfig().Hash) + service.settings.Logger().DebugContext(ctx, "skipping config store update for org", "org_id", orgID, "hash", config.StoreableConfig().Hash) return server, nil } diff --git a/pkg/apis/fields/api.go b/pkg/apis/fields/api.go index e32f75ec42d8..2341bc6c61d3 100644 --- a/pkg/apis/fields/api.go +++ b/pkg/apis/fields/api.go @@ -3,6 +3,7 @@ package fields import ( "bytes" "io" + "log/slog" "net/http" "github.com/SigNoz/signoz/pkg/http/render" @@ -12,7 +13,6 @@ import ( "github.com/SigNoz/signoz/pkg/telemetrystore" "github.com/SigNoz/signoz/pkg/telemetrytraces" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" - "go.uber.org/zap" ) type API struct { @@ -20,9 +20,9 @@ type API struct { telemetryMetadataStore telemetrytypes.MetadataStore } -func NewAPI(telemetryStore telemetrystore.TelemetryStore) *API { - +func NewAPI(telemetryStore telemetrystore.TelemetryStore, logger *slog.Logger) *API { telemetryMetadataStore := telemetrymetadata.NewTelemetryMetaStore( + logger, telemetryStore, telemetrytraces.DBName, telemetrytraces.TagAttributesV2TableName, @@ -99,7 +99,6 @@ func (api *API) GetFieldsValues(w http.ResponseWriter, r *http.Request) { relatedValues, err := api.telemetryMetadataStore.GetRelatedValues(ctx, fieldValueSelector) if err != nil { // we don't want to return error if we fail to get related values for some reason - zap.L().Error("failed to get related values", zap.Error(err)) relatedValues = []string{} } diff --git a/pkg/cache/memorycache/provider.go b/pkg/cache/memorycache/provider.go index 2ff40e826630..7a45bcfa03f8 100644 --- a/pkg/cache/memorycache/provider.go +++ b/pkg/cache/memorycache/provider.go @@ -37,7 +37,7 @@ func (provider *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey s } if ttl == 0 { - provider.settings.Logger().WarnContext(ctx, "zero value for TTL found. defaulting to the base TTL", "cacheKey", cacheKey, "defaultTTL", provider.config.Memory.TTL) + provider.settings.Logger().WarnContext(ctx, "zero value for TTL found. defaulting to the base TTL", "cache_key", cacheKey, "default_ttl", provider.config.Memory.TTL) } provider.cc.Set(strings.Join([]string{orgID.StringValue(), cacheKey}, "::"), data, ttl) return nil diff --git a/pkg/cache/rediscache/provider.go b/pkg/cache/rediscache/provider.go index 0a43b4bb191b..8106628500a4 100644 --- a/pkg/cache/rediscache/provider.go +++ b/pkg/cache/rediscache/provider.go @@ -14,34 +14,30 @@ import ( "github.com/SigNoz/signoz/pkg/types/cachetypes" "github.com/SigNoz/signoz/pkg/valuer" "github.com/go-redis/redis/v8" - "go.uber.org/zap" ) type provider struct { - client *redis.Client + client *redis.Client + settings factory.ScopedProviderSettings } func NewFactory() factory.ProviderFactory[cache.Cache, cache.Config] { return factory.NewProviderFactory(factory.MustNewName("redis"), New) } -func New(ctx context.Context, settings factory.ProviderSettings, config cache.Config) (cache.Cache, error) { - provider := new(provider) - provider.client = redis.NewClient(&redis.Options{ +func New(ctx context.Context, providerSettings factory.ProviderSettings, config cache.Config) (cache.Cache, error) { + settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/cache/rediscache") + client := redis.NewClient(&redis.Options{ Addr: strings.Join([]string{config.Redis.Host, fmt.Sprint(config.Redis.Port)}, ":"), Password: config.Redis.Password, DB: config.Redis.DB, }) - if err := provider.client.Ping(ctx).Err(); err != nil { + if err := client.Ping(ctx).Err(); err != nil { return nil, err } - return provider, nil -} - -func WithClient(client *redis.Client) *provider { - return &provider{client: client} + return &provider{client: client, settings: settings}, nil } func (c *provider) Set(ctx context.Context, orgID valuer.UUID, cacheKey string, data cachetypes.Cacheable, ttl time.Duration) error { @@ -70,6 +66,6 @@ func (c *provider) DeleteMany(ctx context.Context, orgID valuer.UUID, cacheKeys } if err := c.client.Del(ctx, updatedCacheKeys...).Err(); err != nil { - zap.L().Error("error deleting cache keys", zap.Strings("cacheKeys", cacheKeys), zap.Error(err)) + c.settings.Logger().ErrorContext(ctx, "error deleting cache keys", "cache_keys", cacheKeys, "error", err) } } diff --git a/pkg/cache/rediscache/provider_test.go b/pkg/cache/rediscache/provider_test.go index 223a41aeb8c1..e9a6ff2a3a38 100644 --- a/pkg/cache/rediscache/provider_test.go +++ b/pkg/cache/rediscache/provider_test.go @@ -7,6 +7,8 @@ import ( "testing" "time" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/factory/factorytest" "github.com/SigNoz/signoz/pkg/valuer" "github.com/go-redis/redismock/v8" "github.com/stretchr/testify/assert" @@ -28,7 +30,7 @@ func (ce *CacheableEntity) UnmarshalBinary(data []byte) error { func TestSet(t *testing.T) { db, mock := redismock.NewClientMock() - cache := WithClient(db) + cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")} storeCacheableEntity := &CacheableEntity{ Key: "some-random-key", Value: 1, @@ -46,7 +48,7 @@ func TestSet(t *testing.T) { func TestGet(t *testing.T) { db, mock := redismock.NewClientMock() - cache := WithClient(db) + cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")} storeCacheableEntity := &CacheableEntity{ Key: "some-random-key", Value: 1, @@ -75,7 +77,7 @@ func TestGet(t *testing.T) { func TestDelete(t *testing.T) { db, mock := redismock.NewClientMock() - c := WithClient(db) + cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")} storeCacheableEntity := &CacheableEntity{ Key: "some-random-key", Value: 1, @@ -84,10 +86,10 @@ func TestDelete(t *testing.T) { orgID := valuer.GenerateUUID() mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil() - _ = c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second) + _ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second) mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::")).RedisNil() - c.Delete(context.Background(), orgID, "key") + cache.Delete(context.Background(), orgID, "key") if err := mock.ExpectationsWereMet(); err != nil { t.Errorf("there were unfulfilled expectations: %s", err) @@ -96,7 +98,7 @@ func TestDelete(t *testing.T) { func TestDeleteMany(t *testing.T) { db, mock := redismock.NewClientMock() - c := WithClient(db) + cache := &provider{client: db, settings: factory.NewScopedProviderSettings(factorytest.NewSettings(), "github.com/SigNoz/signoz/pkg/cache/rediscache")} storeCacheableEntity := &CacheableEntity{ Key: "some-random-key", Value: 1, @@ -105,13 +107,13 @@ func TestDeleteMany(t *testing.T) { orgID := valuer.GenerateUUID() mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil() - _ = c.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second) + _ = cache.Set(context.Background(), orgID, "key", storeCacheableEntity, 10*time.Second) mock.ExpectSet(strings.Join([]string{orgID.StringValue(), "key2"}, "::"), storeCacheableEntity, 10*time.Second).RedisNil() - _ = c.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second) + _ = cache.Set(context.Background(), orgID, "key2", storeCacheableEntity, 10*time.Second) mock.ExpectDel(strings.Join([]string{orgID.StringValue(), "key"}, "::"), strings.Join([]string{orgID.StringValue(), "key2"}, "::")).RedisNil() - c.DeleteMany(context.Background(), orgID, []string{"key", "key2"}) + cache.DeleteMany(context.Background(), orgID, []string{"key", "key2"}) if err := mock.ExpectationsWereMet(); err != nil { t.Errorf("there were unfulfilled expectations: %s", err) diff --git a/pkg/emailing/emailingtest/provider.go b/pkg/emailing/emailingtest/provider.go new file mode 100644 index 000000000000..1db2b8d997b8 --- /dev/null +++ b/pkg/emailing/emailingtest/provider.go @@ -0,0 +1,28 @@ +package emailingtest + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/emailing" + "github.com/SigNoz/signoz/pkg/types/emailtypes" +) + +var _ emailing.Emailing = (*Provider)(nil) + +type Provider struct { + SentEmailCountByTo map[string]int + SentEmailCountByTemplateName map[emailtypes.TemplateName]int +} + +func New() *Provider { + return &Provider{ + SentEmailCountByTo: make(map[string]int), + SentEmailCountByTemplateName: make(map[emailtypes.TemplateName]int), + } +} + +func (provider *Provider) SendHTML(ctx context.Context, to string, subject string, templateName emailtypes.TemplateName, data map[string]any) error { + provider.SentEmailCountByTo[to]++ + provider.SentEmailCountByTemplateName[templateName]++ + return nil +} diff --git a/pkg/emailing/smtpemailing/provider.go b/pkg/emailing/smtpemailing/provider.go index 66b9489a11fa..07eb0c610099 100644 --- a/pkg/emailing/smtpemailing/provider.go +++ b/pkg/emailing/smtpemailing/provider.go @@ -25,7 +25,7 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/emailing/smtpemailing") // Try to create a template store. If it fails, use an empty store. - store, err := filetemplatestore.NewStore(config.Templates.Directory, emailtypes.Templates, settings.Logger()) + store, err := filetemplatestore.NewStore(ctx, config.Templates.Directory, emailtypes.Templates, settings.Logger()) if err != nil { settings.Logger().ErrorContext(ctx, "failed to create template store, using empty store", "error", err) store = filetemplatestore.NewEmptyStore() diff --git a/pkg/emailing/templatestore/filetemplatestore/store.go b/pkg/emailing/templatestore/filetemplatestore/store.go index 5af518d0f83d..d79a2492470c 100644 --- a/pkg/emailing/templatestore/filetemplatestore/store.go +++ b/pkg/emailing/templatestore/filetemplatestore/store.go @@ -21,7 +21,7 @@ type store struct { fs map[emailtypes.TemplateName]*template.Template } -func NewStore(baseDir string, templates []emailtypes.TemplateName, logger *slog.Logger) (emailtypes.TemplateStore, error) { +func NewStore(ctx context.Context, baseDir string, templates []emailtypes.TemplateName, logger *slog.Logger) (emailtypes.TemplateStore, error) { fs := make(map[emailtypes.TemplateName]*template.Template) fis, err := os.ReadDir(filepath.Clean(baseDir)) if err != nil { @@ -45,7 +45,7 @@ func NewStore(baseDir string, templates []emailtypes.TemplateName, logger *slog. t, err := parseTemplateFile(filepath.Join(baseDir, fi.Name()), templateName) if err != nil { - logger.Error("failed to parse template file", "template", templateName, "path", filepath.Join(baseDir, fi.Name()), "error", err) + logger.ErrorContext(ctx, "failed to parse template file", "template", templateName, "path", filepath.Join(baseDir, fi.Name()), "error", err) continue } @@ -54,7 +54,7 @@ func NewStore(baseDir string, templates []emailtypes.TemplateName, logger *slog. } if err := checkMissingTemplates(templates, foundTemplates); err != nil { - logger.Error("some templates are missing", "error", err) + logger.ErrorContext(ctx, "some templates are missing", "error", err) } return &store{fs: fs}, nil diff --git a/pkg/http/middleware/analytics.go b/pkg/http/middleware/analytics.go index 0930935bbca1..db8d871b12a7 100644 --- a/pkg/http/middleware/analytics.go +++ b/pkg/http/middleware/analytics.go @@ -11,19 +11,12 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/telemetry" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/gorilla/mux" - "go.uber.org/zap" ) -type Analytics struct { - logger *zap.Logger -} +type Analytics struct{} -func NewAnalytics(logger *zap.Logger) *Analytics { - if logger == nil { - panic("cannot build analytics middleware, logger is empty") - } - - return &Analytics{logger: logger} +func NewAnalytics() *Analytics { + return &Analytics{} } func (a *Analytics) Wrap(next http.Handler) http.Handler { @@ -94,22 +87,10 @@ func (a *Analytics) extractQueryRangeData(path string, r *http.Request) (map[str referrer := r.Header.Get("Referer") - dashboardMatched, err := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) - if err != nil { - a.logger.Error("error while matching the referrer", zap.Error(err)) - } - alertMatched, err := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) - if err != nil { - a.logger.Error("error while matching the alert: ", zap.Error(err)) - } - logsExplorerMatched, err := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer) - if err != nil { - a.logger.Error("error while matching the logs explorer: ", zap.Error(err)) - } - traceExplorerMatched, err := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer) - if err != nil { - a.logger.Error("error while matching the trace explorer: ", zap.Error(err)) - } + dashboardMatched, _ := regexp.MatchString(`/dashboard/[a-zA-Z0-9\-]+/(new|edit)(?:\?.*)?$`, referrer) + alertMatched, _ := regexp.MatchString(`/alerts/(new|edit)(?:\?.*)?$`, referrer) + logsExplorerMatched, _ := regexp.MatchString(`/logs/logs-explorer(?:\?.*)?$`, referrer) + traceExplorerMatched, _ := regexp.MatchString(`/traces-explorer(?:\?.*)?$`, referrer) queryInfoResult := telemetry.GetInstance().CheckQueryInfo(postData) diff --git a/ee/http/middleware/api_key.go b/pkg/http/middleware/api_key.go similarity index 88% rename from ee/http/middleware/api_key.go rename to pkg/http/middleware/api_key.go index 96e35619a082..01e1981bd767 100644 --- a/ee/http/middleware/api_key.go +++ b/pkg/http/middleware/api_key.go @@ -1,23 +1,24 @@ package middleware import ( + "log/slog" "net/http" "time" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" - "go.uber.org/zap" ) type APIKey struct { store sqlstore.SQLStore uuid *authtypes.UUID headers []string + logger *slog.Logger } -func NewAPIKey(store sqlstore.SQLStore, headers []string) *APIKey { - return &APIKey{store: store, uuid: authtypes.NewUUID(), headers: headers} +func NewAPIKey(store sqlstore.SQLStore, headers []string, logger *slog.Logger) *APIKey { + return &APIKey{store: store, uuid: authtypes.NewUUID(), headers: headers, logger: logger} } func (a *APIKey) Wrap(next http.Handler) http.Handler { @@ -77,7 +78,7 @@ func (a *APIKey) Wrap(next http.Handler) http.Handler { apiKey.LastUsed = time.Now() _, err = a.store.BunDB().NewUpdate().Model(&apiKey).Column("last_used").Where("token = ?", apiKeyToken).Where("revoked = false").Exec(r.Context()) if err != nil { - zap.L().Error("Failed to update APIKey last used in db", zap.Error(err)) + a.logger.ErrorContext(r.Context(), "failed to update last used of api key", "error", err) } }) diff --git a/pkg/http/middleware/auth.go b/pkg/http/middleware/auth.go index 719d66bdf1b0..491ccb93f12c 100644 --- a/pkg/http/middleware/auth.go +++ b/pkg/http/middleware/auth.go @@ -4,21 +4,15 @@ import ( "net/http" "github.com/SigNoz/signoz/pkg/types/authtypes" - "go.uber.org/zap" ) type Auth struct { - logger *zap.Logger jwt *authtypes.JWT headers []string } -func NewAuth(logger *zap.Logger, jwt *authtypes.JWT, headers []string) *Auth { - if logger == nil { - panic("cannot build auth middleware, logger is empty") - } - - return &Auth{logger: logger, jwt: jwt, headers: headers} +func NewAuth(jwt *authtypes.JWT, headers []string) *Auth { + return &Auth{jwt: jwt, headers: headers} } func (a *Auth) Wrap(next http.Handler) http.Handler { diff --git a/pkg/http/middleware/cache_test.go b/pkg/http/middleware/cache_test.go index 80c55a767dcd..3adeee06bb96 100644 --- a/pkg/http/middleware/cache_test.go +++ b/pkg/http/middleware/cache_test.go @@ -47,6 +47,9 @@ func TestCache(t *testing.T) { res, err := http.DefaultClient.Do(req) require.NoError(t, err) + defer func() { + require.NoError(t, res.Body.Close()) + }() actual := res.Header.Get("Cache-control") require.NoError(t, err) diff --git a/pkg/http/middleware/logging.go b/pkg/http/middleware/logging.go index 61dbbab67d76..ba3d805758b3 100644 --- a/pkg/http/middleware/logging.go +++ b/pkg/http/middleware/logging.go @@ -3,6 +3,7 @@ package middleware import ( "bytes" "context" + "log/slog" "net" "net/http" "net/url" @@ -13,7 +14,6 @@ import ( "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/gorilla/mux" semconv "go.opentelemetry.io/otel/semconv/v1.26.0" - "go.uber.org/zap" ) const ( @@ -21,22 +21,18 @@ const ( ) type Logging struct { - logger *zap.Logger + logger *slog.Logger excludedRoutes map[string]struct{} } -func NewLogging(logger *zap.Logger, excludedRoutes []string) *Logging { - if logger == nil { - panic("cannot build logging, logger is empty") - } - +func NewLogging(logger *slog.Logger, excludedRoutes []string) *Logging { excludedRoutesMap := make(map[string]struct{}) for _, route := range excludedRoutes { excludedRoutesMap[route] = struct{}{} } return &Logging{ - logger: logger.Named(pkgname), + logger: logger.With("pkg", pkgname), excludedRoutes: excludedRoutesMap, } } @@ -50,13 +46,13 @@ func (middleware *Logging) Wrap(next http.Handler) http.Handler { path = req.URL.Path } - fields := []zap.Field{ - zap.String(string(semconv.ClientAddressKey), req.RemoteAddr), - zap.String(string(semconv.UserAgentOriginalKey), req.UserAgent()), - zap.String(string(semconv.ServerAddressKey), host), - zap.String(string(semconv.ServerPortKey), port), - zap.Int64(string(semconv.HTTPRequestSizeKey), req.ContentLength), - zap.String(string(semconv.HTTPRouteKey), path), + fields := []any{ + string(semconv.ClientAddressKey), req.RemoteAddr, + string(semconv.UserAgentOriginalKey), req.UserAgent(), + string(semconv.ServerAddressKey), host, + string(semconv.ServerPortKey), port, + string(semconv.HTTPRequestSizeKey), req.ContentLength, + string(semconv.HTTPRouteKey), path, } logCommentKVs := middleware.getLogCommentKVs(req) @@ -73,19 +69,19 @@ func (middleware *Logging) Wrap(next http.Handler) http.Handler { statusCode, err := writer.StatusCode(), writer.WriteError() fields = append(fields, - zap.Int(string(semconv.HTTPResponseStatusCodeKey), statusCode), - zap.Duration(string(semconv.HTTPServerRequestDurationName), time.Since(start)), + string(semconv.HTTPResponseStatusCodeKey), statusCode, + string(semconv.HTTPServerRequestDurationName), time.Since(start), ) if err != nil { - fields = append(fields, zap.Error(err)) - middleware.logger.Error(logMessage, fields...) + fields = append(fields, "error", err) + middleware.logger.ErrorContext(req.Context(), logMessage, fields...) } else { // when the status code is 400 or >=500, and the response body is not empty. if badResponseBuffer.Len() != 0 { - fields = append(fields, zap.String("response.body", badResponseBuffer.String())) + fields = append(fields, "response.body", badResponseBuffer.String()) } - middleware.logger.Info(logMessage, fields...) + middleware.logger.InfoContext(req.Context(), logMessage, fields...) } }) } diff --git a/pkg/http/middleware/timeout.go b/pkg/http/middleware/timeout.go index 84ca3d27b652..9909336be78b 100644 --- a/pkg/http/middleware/timeout.go +++ b/pkg/http/middleware/timeout.go @@ -2,11 +2,10 @@ package middleware import ( "context" + "log/slog" "net/http" "strings" "time" - - "go.uber.org/zap" ) const ( @@ -14,7 +13,7 @@ const ( ) type Timeout struct { - logger *zap.Logger + logger *slog.Logger excluded map[string]struct{} // The default timeout defaultTimeout time.Duration @@ -22,11 +21,7 @@ type Timeout struct { maxTimeout time.Duration } -func NewTimeout(logger *zap.Logger, excludedRoutes []string, defaultTimeout time.Duration, maxTimeout time.Duration) *Timeout { - if logger == nil { - panic("cannot build timeout, logger is empty") - } - +func NewTimeout(logger *slog.Logger, excludedRoutes []string, defaultTimeout time.Duration, maxTimeout time.Duration) *Timeout { excluded := make(map[string]struct{}, len(excludedRoutes)) for _, route := range excludedRoutes { excluded[route] = struct{}{} @@ -41,7 +36,7 @@ func NewTimeout(logger *zap.Logger, excludedRoutes []string, defaultTimeout time } return &Timeout{ - logger: logger.Named(pkgname), + logger: logger.With("pkg", pkgname), excluded: excluded, defaultTimeout: defaultTimeout, maxTimeout: maxTimeout, @@ -56,7 +51,7 @@ func (middleware *Timeout) Wrap(next http.Handler) http.Handler { if incoming != "" { parsed, err := time.ParseDuration(strings.TrimSpace(incoming) + "s") if err != nil { - middleware.logger.Warn("cannot parse timeout in header, using default timeout", zap.String("timeout", incoming), zap.Error(err), zap.Any("context", req.Context())) + middleware.logger.WarnContext(req.Context(), "cannot parse timeout in header, using default timeout", "timeout", incoming, "error", err) } else { if parsed > middleware.maxTimeout { actual = middleware.maxTimeout diff --git a/pkg/http/middleware/timeout_test.go b/pkg/http/middleware/timeout_test.go index e18291786dd6..56eb687b15f3 100644 --- a/pkg/http/middleware/timeout_test.go +++ b/pkg/http/middleware/timeout_test.go @@ -1,13 +1,14 @@ package middleware import ( + "io" + "log/slog" "net" "net/http" "testing" "time" "github.com/stretchr/testify/require" - "go.uber.org/zap" ) func TestTimeout(t *testing.T) { @@ -16,7 +17,7 @@ func TestTimeout(t *testing.T) { writeTimeout := 6 * time.Second defaultTimeout := 2 * time.Second maxTimeout := 4 * time.Second - m := NewTimeout(zap.NewNop(), []string{"/excluded"}, defaultTimeout, maxTimeout) + m := NewTimeout(slog.New(slog.NewTextHandler(io.Discard, nil)), []string{"/excluded"}, defaultTimeout, maxTimeout) listener, err := net.Listen("tcp", "localhost:0") require.NoError(t, err) @@ -70,8 +71,11 @@ func TestTimeout(t *testing.T) { require.NoError(t, err) req.Header.Add(headerName, tc.header) - _, err = http.DefaultClient.Do(req) + res, err := http.DefaultClient.Do(req) require.NoError(t, err) + defer func() { + require.NoError(t, res.Body.Close()) + }() // confirm that we waited at least till the "wait" time require.GreaterOrEqual(t, time.Since(start), tc.wait) diff --git a/pkg/http/render/render_test.go b/pkg/http/render/render_test.go index 5b6b28149ce1..42f4565de7ac 100644 --- a/pkg/http/render/render_test.go +++ b/pkg/http/render/render_test.go @@ -47,6 +47,9 @@ func TestSuccess(t *testing.T) { res, err := http.DefaultClient.Do(req) require.NoError(t, err) + defer func() { + require.NoError(t, res.Body.Close()) + }() actual, err := io.ReadAll(res.Body) require.NoError(t, err) @@ -104,6 +107,9 @@ func TestError(t *testing.T) { res, err := http.DefaultClient.Do(req) require.NoError(t, err) + defer func() { + require.NoError(t, res.Body.Close()) + }() actual, err := io.ReadAll(res.Body) require.NoError(t, err) diff --git a/pkg/http/server/server.go b/pkg/http/server/server.go index 449eff28f82a..6d1c5c71a63b 100644 --- a/pkg/http/server/server.go +++ b/pkg/http/server/server.go @@ -3,23 +3,23 @@ package server import ( "context" "fmt" + "log/slog" "net/http" "time" "github.com/SigNoz/signoz/pkg/factory" - "go.uber.org/zap" ) var _ factory.Service = (*Server)(nil) type Server struct { srv *http.Server - logger *zap.Logger + logger *slog.Logger handler http.Handler cfg Config } -func New(logger *zap.Logger, cfg Config, handler http.Handler) (*Server, error) { +func New(logger *slog.Logger, cfg Config, handler http.Handler) (*Server, error) { if handler == nil { return nil, fmt.Errorf("cannot build http server, handler is required") } @@ -38,17 +38,17 @@ func New(logger *zap.Logger, cfg Config, handler http.Handler) (*Server, error) return &Server{ srv: srv, - logger: logger.Named("go.signoz.io/pkg/http/server"), + logger: logger.With("pkg", "go.signoz.io/pkg/http/server"), handler: handler, cfg: cfg, }, nil } func (server *Server) Start(ctx context.Context) error { - server.logger.Info("starting http server", zap.String("address", server.srv.Addr)) + server.logger.InfoContext(ctx, "starting http server", "address", server.srv.Addr) if err := server.srv.ListenAndServe(); err != nil { if err != http.ErrServerClosed { - server.logger.Error("failed to start server", zap.Error(err), zap.Any("context", ctx)) + server.logger.ErrorContext(ctx, "failed to start server", "error", err) return err } } @@ -60,10 +60,10 @@ func (server *Server) Stop(ctx context.Context) error { defer cancel() if err := server.srv.Shutdown(ctx); err != nil { - server.logger.Error("failed to stop server", zap.Error(err), zap.Any("context", ctx)) + server.logger.ErrorContext(ctx, "failed to stop server", "error", err) return err } - server.logger.Info("server stopped gracefully", zap.Any("context", ctx)) + server.logger.InfoContext(ctx, "server stopped gracefully") return nil } diff --git a/pkg/licensing/config.go b/pkg/licensing/config.go new file mode 100644 index 000000000000..a88480d8679d --- /dev/null +++ b/pkg/licensing/config.go @@ -0,0 +1,18 @@ +package licensing + +import ( + "time" + + "github.com/SigNoz/signoz/pkg/factory" +) + +var _ factory.Config = (*Config)(nil) + +type Config struct { + PollInterval time.Duration `mapstructure:"poll_interval"` + FailureThreshold int `mapstructure:"failure_threshold"` +} + +func (c Config) Validate() error { + return nil +} diff --git a/pkg/licensing/licensing.go b/pkg/licensing/licensing.go new file mode 100644 index 000000000000..0e0196650fda --- /dev/null +++ b/pkg/licensing/licensing.go @@ -0,0 +1,55 @@ +package licensing + +import ( + "context" + "net/http" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/types/licensetypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +var ( + ErrCodeUnsupported = errors.MustNewCode("licensing_unsupported") + ErrCodeFeatureUnavailable = errors.MustNewCode("feature_unavailable") +) + +type Licensing interface { + factory.Service + + // Validate validates the license with the upstream server + Validate(ctx context.Context) error + // Activate validates and enables the license + Activate(ctx context.Context, organizationID valuer.UUID, key string) error + // GetActive fetches the current active license in org + GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) + // Refresh refreshes the license state from upstream server + Refresh(ctx context.Context, organizationID valuer.UUID) error + // Checkout creates a checkout session via upstream server and returns the redirection link + Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) + // Portal creates a portal session via upstream server and return the redirection link + Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) + + // feature surrogate + // CheckFeature checks if the feature is active or not + CheckFeature(ctx context.Context, key string) error + // GetFeatureFlags fetches all the defined feature flags + GetFeatureFlag(ctx context.Context, key string) (*featuretypes.GettableFeature, error) + // GetFeatureFlags fetches all the defined feature flags + GetFeatureFlags(ctx context.Context) ([]*featuretypes.GettableFeature, error) + // InitFeatures initialises the feature flags + InitFeatures(ctx context.Context, features []*featuretypes.GettableFeature) error + // UpdateFeatureFlag updates the feature flag + UpdateFeatureFlag(ctx context.Context, feature *featuretypes.GettableFeature) error +} + +type API interface { + Activate(http.ResponseWriter, *http.Request) + Refresh(http.ResponseWriter, *http.Request) + GetActive(http.ResponseWriter, *http.Request) + + Checkout(http.ResponseWriter, *http.Request) + Portal(http.ResponseWriter, *http.Request) +} diff --git a/pkg/licensing/nooplicensing/api.go b/pkg/licensing/nooplicensing/api.go new file mode 100644 index 000000000000..e484376fd567 --- /dev/null +++ b/pkg/licensing/nooplicensing/api.go @@ -0,0 +1,35 @@ +package nooplicensing + +import ( + "net/http" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/licensing" +) + +type noopLicensingAPI struct{} + +func NewLicenseAPI() licensing.API { + return &noopLicensingAPI{} +} + +func (api *noopLicensingAPI) Activate(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "not implemented")) +} + +func (api *noopLicensingAPI) GetActive(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "not implemented")) +} + +func (api *noopLicensingAPI) Refresh(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "not implemented")) +} + +func (api *noopLicensingAPI) Checkout(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "not implemented")) +} + +func (api *noopLicensingAPI) Portal(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "not implemented")) +} diff --git a/pkg/licensing/nooplicensing/provider.go b/pkg/licensing/nooplicensing/provider.go new file mode 100644 index 000000000000..0e509615f246 --- /dev/null +++ b/pkg/licensing/nooplicensing/provider.go @@ -0,0 +1,99 @@ +package nooplicensing + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/types/licensetypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type noopLicensing struct { + stopChan chan struct{} +} + +func NewFactory() factory.ProviderFactory[licensing.Licensing, licensing.Config] { + return factory.NewProviderFactory(factory.MustNewName("noop"), func(ctx context.Context, providerSettings factory.ProviderSettings, config licensing.Config) (licensing.Licensing, error) { + return New(ctx, providerSettings, config) + }) +} + +func New(_ context.Context, _ factory.ProviderSettings, _ licensing.Config) (licensing.Licensing, error) { + return &noopLicensing{stopChan: make(chan struct{})}, nil +} + +func (provider *noopLicensing) Start(context.Context) error { + <-provider.stopChan + return nil + +} + +func (provider *noopLicensing) Stop(context.Context) error { + close(provider.stopChan) + return nil +} + +func (provider *noopLicensing) Activate(ctx context.Context, organizationID valuer.UUID, key string) error { + return errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "fetching license is not supported") +} + +func (provider *noopLicensing) Validate(ctx context.Context) error { + return errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "validating license is not supported") +} + +func (provider *noopLicensing) Refresh(ctx context.Context, organizationID valuer.UUID) error { + return errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "refreshing license is not supported") +} + +func (provider *noopLicensing) Checkout(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) { + return nil, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "checkout session is not supported") +} + +func (provider *noopLicensing) Portal(ctx context.Context, organizationID valuer.UUID, postableSubscription *licensetypes.PostableSubscription) (*licensetypes.GettableSubscription, error) { + return nil, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "portal session is not supported") +} + +func (provider *noopLicensing) GetActive(ctx context.Context, organizationID valuer.UUID) (*licensetypes.License, error) { + return nil, errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "fetching active license is not supported") +} + +func (provider *noopLicensing) CheckFeature(ctx context.Context, key string) error { + feature, err := provider.GetFeatureFlag(ctx, key) + if err != nil { + return err + } + + if feature.Active { + return nil + } + + return errors.Newf(errors.TypeNotFound, licensing.ErrCodeFeatureUnavailable, "feature unavailable: %s", key) +} + +func (provider *noopLicensing) GetFeatureFlag(ctx context.Context, key string) (*featuretypes.GettableFeature, error) { + features, err := provider.GetFeatureFlags(ctx) + if err != nil { + return nil, err + } + for _, feature := range features { + if feature.Name == key { + return feature, nil + } + } + return nil, errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "no feature available with given key: %s", key) +} + +func (provider *noopLicensing) GetFeatureFlags(ctx context.Context) ([]*featuretypes.GettableFeature, error) { + return licensetypes.DefaultFeatureSet, nil +} + +func (provider *noopLicensing) InitFeatures(ctx context.Context, features []*featuretypes.GettableFeature) error { + return errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "init features is not supported") +} + +func (provider *noopLicensing) UpdateFeatureFlag(ctx context.Context, feature *featuretypes.GettableFeature) error { + return errors.New(errors.TypeUnsupported, licensing.ErrCodeUnsupported, "updating feature flag is not supported") +} diff --git a/pkg/modules/quickfilter/api.go b/pkg/modules/quickfilter/implquickfilter/handler.go similarity index 58% rename from pkg/modules/quickfilter/api.go rename to pkg/modules/quickfilter/implquickfilter/handler.go index add2c4c3b631..65d0477a281c 100644 --- a/pkg/modules/quickfilter/api.go +++ b/pkg/modules/quickfilter/implquickfilter/handler.go @@ -1,37 +1,33 @@ -package quickfilter +package implquickfilter import ( "encoding/json" + "net/http" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/modules/quickfilter" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/quickfiltertypes" "github.com/SigNoz/signoz/pkg/valuer" "github.com/gorilla/mux" - "net/http" ) -type API interface { - GetQuickFilters(http.ResponseWriter, *http.Request) - UpdateQuickFilters(http.ResponseWriter, *http.Request) - GetSignalFilters(http.ResponseWriter, *http.Request) +type handler struct { + module quickfilter.Module } -type quickFiltersAPI struct { - usecase Usecase +func NewHandler(module quickfilter.Module) quickfilter.Handler { + return &handler{module: module} } -func NewAPI(usecase Usecase) API { - return &quickFiltersAPI{usecase: usecase} -} - -func (q *quickFiltersAPI) GetQuickFilters(rw http.ResponseWriter, r *http.Request) { +func (handler *handler) GetQuickFilters(rw http.ResponseWriter, r *http.Request) { claims, err := authtypes.ClaimsFromContext(r.Context()) if err != nil { render.Error(rw, err) return } - filters, err := q.usecase.GetQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID)) + filters, err := handler.module.GetQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID)) if err != nil { render.Error(rw, err) return @@ -40,7 +36,7 @@ func (q *quickFiltersAPI) GetQuickFilters(rw http.ResponseWriter, r *http.Reques render.Success(rw, http.StatusOK, filters) } -func (q *quickFiltersAPI) UpdateQuickFilters(rw http.ResponseWriter, r *http.Request) { +func (handler *handler) UpdateQuickFilters(rw http.ResponseWriter, r *http.Request) { claims, err := authtypes.ClaimsFromContext(r.Context()) if err != nil { render.Error(rw, err) @@ -54,7 +50,7 @@ func (q *quickFiltersAPI) UpdateQuickFilters(rw http.ResponseWriter, r *http.Req return } - err = q.usecase.UpdateQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), req.Signal, req.Filters) + err = handler.module.UpdateQuickFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), req.Signal, req.Filters) if err != nil { render.Error(rw, err) return @@ -63,7 +59,7 @@ func (q *quickFiltersAPI) UpdateQuickFilters(rw http.ResponseWriter, r *http.Req render.Success(rw, http.StatusNoContent, nil) } -func (q *quickFiltersAPI) GetSignalFilters(rw http.ResponseWriter, r *http.Request) { +func (handler *handler) GetSignalFilters(rw http.ResponseWriter, r *http.Request) { claims, err := authtypes.ClaimsFromContext(r.Context()) if err != nil { render.Error(rw, err) @@ -77,7 +73,7 @@ func (q *quickFiltersAPI) GetSignalFilters(rw http.ResponseWriter, r *http.Reque return } - filters, err := q.usecase.GetSignalFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), validatedSignal) + filters, err := handler.module.GetSignalFilters(r.Context(), valuer.MustNewUUID(claims.OrgID), validatedSignal) if err != nil { render.Error(rw, err) return diff --git a/pkg/modules/quickfilter/core/core.go b/pkg/modules/quickfilter/implquickfilter/module.go similarity index 70% rename from pkg/modules/quickfilter/core/core.go rename to pkg/modules/quickfilter/implquickfilter/module.go index 261ea0eac2a9..b1ebc9117f97 100644 --- a/pkg/modules/quickfilter/core/core.go +++ b/pkg/modules/quickfilter/implquickfilter/module.go @@ -1,9 +1,9 @@ -package core +package implquickfilter import ( "context" "encoding/json" - "fmt" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/modules/quickfilter" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" @@ -11,18 +11,17 @@ import ( "github.com/SigNoz/signoz/pkg/valuer" ) -type usecase struct { +type module struct { store quickfiltertypes.QuickFilterStore } -// NewQuickFilters creates a new quick filters usecase -func NewQuickFilters(store quickfiltertypes.QuickFilterStore) quickfilter.Usecase { - return &usecase{store: store} +func NewModule(store quickfiltertypes.QuickFilterStore) quickfilter.Module { + return &module{store: store} } // GetQuickFilters returns all quick filters for an organization -func (u *usecase) GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.SignalFilters, error) { - storedFilters, err := u.store.Get(ctx, orgID) +func (module *module) GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.SignalFilters, error) { + storedFilters, err := module.store.Get(ctx, orgID) if err != nil { return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error fetching organization filters") } @@ -40,8 +39,8 @@ func (u *usecase) GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*qu } // GetSignalFilters returns quick filters for a specific signal in an organization -func (u *usecase) GetSignalFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal) (*quickfiltertypes.SignalFilters, error) { - storedFilter, err := u.store.GetBySignal(ctx, orgID, signal.StringValue()) +func (m *module) GetSignalFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal) (*quickfiltertypes.SignalFilters, error) { + storedFilter, err := m.store.GetBySignal(ctx, orgID, signal.StringValue()) if err != nil { return nil, err } @@ -64,7 +63,7 @@ func (u *usecase) GetSignalFilters(ctx context.Context, orgID valuer.UUID, signa } // UpdateQuickFilters updates quick filters for a specific signal in an organization -func (u *usecase) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal, filters []v3.AttributeKey) error { +func (module *module) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal, filters []v3.AttributeKey) error { // Validate each filter for _, filter := range filters { if err := filter.Validate(); err != nil { @@ -79,7 +78,7 @@ func (u *usecase) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, sig } // Check if filter exists - existingFilter, err := u.store.GetBySignal(ctx, orgID, signal.StringValue()) + existingFilter, err := module.store.GetBySignal(ctx, orgID, signal.StringValue()) if err != nil { return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error checking existing filters") } @@ -100,17 +99,18 @@ func (u *usecase) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, sig } // Persist filter - if err := u.store.Upsert(ctx, filter); err != nil { - return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error upserting filter for signal: %s", signal.StringValue())) + if err := module.store.Upsert(ctx, filter); err != nil { + return err } return nil } -func (u *usecase) SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error { +func (module *module) SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error { storableQuickFilters, err := quickfiltertypes.NewDefaultQuickFilter(orgID) if err != nil { - return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error creating default quick filters") + return err } - return u.store.Create(ctx, storableQuickFilters) + + return module.store.Create(ctx, storableQuickFilters) } diff --git a/pkg/modules/quickfilter/core/store.go b/pkg/modules/quickfilter/implquickfilter/store.go similarity index 98% rename from pkg/modules/quickfilter/core/store.go rename to pkg/modules/quickfilter/implquickfilter/store.go index 5d829e5da8b2..05f7e55cf10e 100644 --- a/pkg/modules/quickfilter/core/store.go +++ b/pkg/modules/quickfilter/implquickfilter/store.go @@ -1,8 +1,9 @@ -package core +package implquickfilter import ( "context" "database/sql" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types/quickfiltertypes" diff --git a/pkg/modules/quickfilter/usecase.go b/pkg/modules/quickfilter/quickfilter.go similarity index 73% rename from pkg/modules/quickfilter/usecase.go rename to pkg/modules/quickfilter/quickfilter.go index 19a1b622b0be..6528b966fc27 100644 --- a/pkg/modules/quickfilter/usecase.go +++ b/pkg/modules/quickfilter/quickfilter.go @@ -2,14 +2,22 @@ package quickfilter import ( "context" + "net/http" + v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/types/quickfiltertypes" "github.com/SigNoz/signoz/pkg/valuer" ) -type Usecase interface { +type Module interface { GetQuickFilters(ctx context.Context, orgID valuer.UUID) ([]*quickfiltertypes.SignalFilters, error) UpdateQuickFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal, filters []v3.AttributeKey) error GetSignalFilters(ctx context.Context, orgID valuer.UUID, signal quickfiltertypes.Signal) (*quickfiltertypes.SignalFilters, error) SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error } + +type Handler interface { + GetQuickFilters(http.ResponseWriter, *http.Request) + UpdateQuickFilters(http.ResponseWriter, *http.Request) + GetSignalFilters(http.ResponseWriter, *http.Request) +} diff --git a/pkg/modules/user/impluser/handler.go b/pkg/modules/user/impluser/handler.go index cfc867d87ad6..57dbe86989e7 100644 --- a/pkg/modules/user/impluser/handler.go +++ b/pkg/modules/user/impluser/handler.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "net/http" + "slices" "time" "github.com/SigNoz/signoz/pkg/errors" @@ -12,6 +13,7 @@ import ( "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" "github.com/gorilla/mux" ) @@ -33,18 +35,24 @@ func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { return } - // SSO users might not have a password - if err := req.Validate(); err != nil { - render.Error(w, err) - return - } - + // get invite object invite, err := h.module.GetInviteByToken(ctx, req.InviteToken) if err != nil { render.Error(w, err) return } + orgDomain, err := h.module.GetAuthDomainByEmail(ctx, invite.Email) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + render.Error(w, err) + return + } + + precheckResp := &types.GettableLoginPrecheck{ + SSO: false, + IsUser: false, + } + if invite.Name == "" && req.DisplayName != "" { invite.Name = req.DisplayName } @@ -55,16 +63,35 @@ func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { return } - password, err := types.NewFactorPassword(req.Password) - if err != nil { - render.Error(w, err) - return - } + if orgDomain != nil && orgDomain.SsoEnabled { + // sso is enabled, create user and respond precheck data + err = h.module.CreateUser(ctx, user) + if err != nil { + render.Error(w, err) + return + } - user, err = h.module.CreateUserWithPassword(ctx, user, password) - if err != nil { - render.Error(w, err) - return + // check if sso is enforced for the org + precheckResp, err = h.module.LoginPrecheck(ctx, invite.OrgID, user.Email, req.SourceURL) + if err != nil { + render.Error(w, err) + return + } + + } else { + password, err := types.NewFactorPassword(req.Password) + if err != nil { + render.Error(w, err) + return + } + + _, err = h.module.CreateUserWithPassword(ctx, user, password) + if err != nil { + render.Error(w, err) + return + } + + precheckResp.IsUser = true } // delete the invite @@ -73,7 +100,7 @@ func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) { return } - render.Success(w, http.StatusCreated, user) + render.Success(w, http.StatusOK, precheckResp) } func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) { @@ -92,7 +119,7 @@ func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) { return } - _, err = h.module.CreateBulkInvite(ctx, claims.OrgID, claims.UserID, &types.PostableBulkInviteRequest{ + invites, err := h.module.CreateBulkInvite(ctx, claims.OrgID, claims.UserID, &types.PostableBulkInviteRequest{ Invites: []types.PostableInvite{req}, }) if err != nil { @@ -100,7 +127,7 @@ func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) { return } - render.Success(rw, http.StatusCreated, nil) + render.Success(rw, http.StatusCreated, invites[0]) } func (h *handler) CreateBulkInvite(rw http.ResponseWriter, r *http.Request) { @@ -139,13 +166,26 @@ func (h *handler) GetInvite(w http.ResponseWriter, r *http.Request) { defer cancel() token := mux.Vars(r)["token"] + sourceUrl := r.URL.Query().Get("ref") invite, err := h.module.GetInviteByToken(ctx, token) if err != nil { render.Error(w, err) return } - render.Success(w, http.StatusOK, invite) + // precheck the user + precheckResp, err := h.module.LoginPrecheck(ctx, invite.OrgID, invite.Email, sourceUrl) + if err != nil { + render.Error(w, err) + return + } + + gettableInvite := &types.GettableEEInvite{ + GettableInvite: *invite, + PreCheck: precheckResp, + } + + render.Success(w, http.StatusOK, gettableInvite) } func (h *handler) ListInvite(w http.ResponseWriter, r *http.Request) { @@ -426,15 +466,19 @@ func (h *handler) Login(w http.ResponseWriter, r *http.Request) { return } + if req.RefreshToken == "" { + _, err := h.module.CanUsePassword(ctx, req.Email) + if err != nil { + render.Error(w, err) + return + } + } + user, err := h.module.GetAuthenticatedUser(ctx, req.OrgID, req.Email, req.Password, req.RefreshToken) if err != nil { render.Error(w, err) return } - if user == nil { - render.Error(w, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email or password")) - return - } jwt, err := h.module.GetJWTForUser(ctx, user) if err != nil { @@ -470,22 +514,313 @@ func (h *handler) GetCurrentUserFromJWT(w http.ResponseWriter, r *http.Request) } -// CreateAPIKey implements user.Handler. func (h *handler) CreateAPIKey(w http.ResponseWriter, r *http.Request) { - render.Error(w, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "not implemented")) + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) + return + } + + userID, err := valuer.NewUUID(claims.UserID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) + return + } + + req := new(types.PostableAPIKey) + if err := json.NewDecoder(r.Body).Decode(req); err != nil { + render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) + return + } + + apiKey, err := types.NewStorableAPIKey( + req.Name, + userID, + req.Role, + req.ExpiresInDays, + ) + if err != nil { + render.Error(w, err) + return + } + + err = h.module.CreateAPIKey(ctx, apiKey) + if err != nil { + render.Error(w, err) + return + } + + createdApiKey, err := h.module.GetAPIKey(ctx, orgID, apiKey.ID) + if err != nil { + render.Error(w, err) + return + } + + // just corrected the status code, response is same, + render.Success(w, http.StatusCreated, createdApiKey) } -// ListAPIKeys implements user.Handler. func (h *handler) ListAPIKeys(w http.ResponseWriter, r *http.Request) { - render.Error(w, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "not implemented")) + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) + return + } + + apiKeys, err := h.module.ListAPIKeys(ctx, orgID) + if err != nil { + render.Error(w, err) + return + } + + // for backward compatibility + if len(apiKeys) == 0 { + render.Success(w, http.StatusOK, []types.GettableAPIKey{}) + return + } + + result := make([]*types.GettableAPIKey, len(apiKeys)) + for i, apiKey := range apiKeys { + result[i] = types.NewGettableAPIKeyFromStorableAPIKey(apiKey) + } + + render.Success(w, http.StatusOK, result) + } -// RevokeAPIKey implements user.Handler. -func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) { - render.Error(w, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "not implemented")) -} - -// UpdateAPIKey implements user.Handler. func (h *handler) UpdateAPIKey(w http.ResponseWriter, r *http.Request) { - render.Error(w, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "not implemented")) + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) + return + } + + userID, err := valuer.NewUUID(claims.UserID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) + return + } + + req := types.StorableAPIKey{} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + render.Error(w, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to decode api key")) + return + } + + idStr := mux.Vars(r)["id"] + id, err := valuer.NewUUID(idStr) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7")) + return + } + + //get the API Key + existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) + if err != nil { + render.Error(w, err) + return + } + + // get the user + createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) + if err != nil { + render.Error(w, err) + return + } + + if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) + return + } + + err = h.module.UpdateAPIKey(ctx, id, &req, userID) + if err != nil { + render.Error(w, err) + return + } + + render.Success(w, http.StatusNoContent, nil) +} + +func (h *handler) RevokeAPIKey(w http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(w, err) + return + } + + idStr := mux.Vars(r)["id"] + id, err := valuer.NewUUID(idStr) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7")) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid-v7")) + return + } + + userID, err := valuer.NewUUID(claims.UserID) + if err != nil { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "userId is not a valid uuid-v7")) + return + } + + //get the API Key + existingAPIKey, err := h.module.GetAPIKey(ctx, orgID, id) + if err != nil { + render.Error(w, err) + return + } + + // get the user + createdByUser, err := h.module.GetUserByID(ctx, orgID.String(), existingAPIKey.UserID.String()) + if err != nil { + render.Error(w, err) + return + } + + if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(createdByUser.Email)) { + render.Error(w, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "API Keys for integration users cannot be revoked")) + return + } + + if err := h.module.RevokeAPIKey(ctx, id, userID); err != nil { + render.Error(w, err) + return + } + + render.Success(w, http.StatusNoContent, nil) +} + +func (h *handler) CreateDomain(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + req := types.GettableOrgDomain{} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + render.Error(rw, err) + return + } + + if err := req.ValidNew(); err != nil { + render.Error(rw, err) + return + } + + err := h.module.CreateDomain(ctx, &req) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusAccepted, req) +} + +func (h *handler) DeleteDomain(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + domainIdStr := mux.Vars(r)["id"] + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid domain id")) + return + } + + err = h.module.DeleteDomain(ctx, domainId) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) +} + +func (h *handler) ListDomains(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + claims, err := authtypes.ClaimsFromContext(ctx) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is not a valid uuid")) + return + } + + domains, err := h.module.ListDomains(r.Context(), orgID) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusOK, domains) +} + +func (h *handler) UpdateDomain(rw http.ResponseWriter, r *http.Request) { + ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second) + defer cancel() + + domainIdStr := mux.Vars(r)["id"] + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid domain id")) + return + } + + req := types.GettableOrgDomain{StorableOrgDomain: types.StorableOrgDomain{ID: domainId}} + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + render.Error(rw, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "unable to unmarshal the payload")) + return + } + + req.ID = domainId + if err := req.Valid(nil); err != nil { + render.Error(rw, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request")) + } + + err = h.module.UpdateDomain(ctx, &req) + if err != nil { + render.Error(rw, err) + return + } + + render.Success(rw, http.StatusNoContent, nil) } diff --git a/pkg/modules/user/impluser/module.go b/pkg/modules/user/impluser/module.go index d1943d928c42..ed7a58c9b7c0 100644 --- a/pkg/modules/user/impluser/module.go +++ b/pkg/modules/user/impluser/module.go @@ -3,18 +3,22 @@ package impluser import ( "context" "fmt" + "net/url" "slices" + "strings" "time" "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/modules/user" + "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/query-service/telemetry" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/emailtypes" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" ) type Module struct { @@ -319,6 +323,41 @@ func (m *Module) LoginPrecheck(ctx context.Context, orgID, email, sourceUrl stri } } + // TODO(Nitya): in multitenancy this should use orgId as well. + orgDomain, err := m.GetAuthDomainByEmail(ctx, email) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + if orgDomain != nil && orgDomain.SsoEnabled { + // this is to allow self registration + resp.IsUser = true + + // saml is enabled for this domain, lets prepare sso url + if sourceUrl == "" { + sourceUrl = constants.GetDefaultSiteURL() + } + + // parse source url that generated the login request + var err error + escapedUrl, _ := url.QueryUnescape(sourceUrl) + siteUrl, err := url.Parse(escapedUrl) + if err != nil { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse referer") + } + + // build Idp URL that will authenticat the user + // the front-end will redirect user to this url + resp.SSOUrl, err = orgDomain.BuildSsoUrl(siteUrl) + if err != nil { + m.settings.Logger().ErrorContext(ctx, "failed to prepare saml request for domain", "domain", orgDomain.Name, "error", err) + return nil, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to prepare saml request for domain") + } + + // set SSO to true, as the url is generated correctly + resp.SSO = true + } + return resp, nil } @@ -347,37 +386,155 @@ func (m *Module) GetJWTForUser(ctx context.Context, user *types.User) (types.Get } func (m *Module) CreateUserForSAMLRequest(ctx context.Context, email string) (*types.User, error) { - return nil, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "SAML login is not supported") + // get auth domain from email domain + _, err := m.GetAuthDomainByEmail(ctx, email) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return nil, err + } + + // get name from email + parts := strings.Split(email, "@") + if len(parts) < 2 { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") + } + name := parts[0] + + defaultOrgID, err := m.store.GetDefaultOrgID(ctx) + if err != nil { + return nil, err + } + + user, err := types.NewUser(name, email, types.RoleViewer.String(), defaultOrgID) + if err != nil { + return nil, err + } + + err = m.CreateUser(ctx, user) + if err != nil { + return nil, err + } + + return user, nil + } func (m *Module) PrepareSsoRedirect(ctx context.Context, redirectUri, email string, jwt *authtypes.JWT) (string, error) { - return "", errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "SSO is not supported") + users, err := m.GetUsersByEmail(ctx, email) + if err != nil { + m.settings.Logger().ErrorContext(ctx, "failed to get user with email received from auth provider", "error", err) + return "", err + } + user := &types.User{} + + if len(users) == 0 { + newUser, err := m.CreateUserForSAMLRequest(ctx, email) + user = newUser + if err != nil { + m.settings.Logger().ErrorContext(ctx, "failed to create user with email received from auth provider", "error", err) + return "", err + } + } else { + user = &users[0].User + } + + tokenStore, err := m.GetJWTForUser(ctx, user) + if err != nil { + m.settings.Logger().ErrorContext(ctx, "failed to generate token for SSO login user", "error", err) + return "", err + } + + return fmt.Sprintf("%s?jwt=%s&usr=%s&refreshjwt=%s", + redirectUri, + tokenStore.AccessJwt, + user.ID, + tokenStore.RefreshJwt), nil } func (m *Module) CanUsePassword(ctx context.Context, email string) (bool, error) { - return false, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "SSO is not supported") + domain, err := m.GetAuthDomainByEmail(ctx, email) + if err != nil && !errors.Ast(err, errors.TypeNotFound) { + return false, err + } + + if domain != nil && domain.SsoEnabled { + // sso is enabled, check if the user has admin role + users, err := m.GetUsersByEmail(ctx, email) + if err != nil { + return false, err + } + + if len(users) == 0 { + return false, errors.New(errors.TypeNotFound, errors.CodeNotFound, "user not found") + } + + if users[0].Role != types.RoleAdmin.String() { + return false, errors.New(errors.TypeForbidden, errors.CodeForbidden, "auth method not supported") + } + + } + + return true, nil } func (m *Module) GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) { - return nil, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "SSO is not supported") + + if email == "" { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required") + } + + components := strings.Split(email, "@") + if len(components) < 2 { + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid email format") + } + + domain, err := m.store.GetDomainByName(ctx, components[1]) + if err != nil { + return nil, err + } + + gettableDomain := &types.GettableOrgDomain{StorableOrgDomain: *domain} + if err := gettableDomain.LoadConfig(domain.Data); err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to load domain config") + } + return gettableDomain, nil } func (m *Module) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { - return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "API Keys are not supported") + return m.store.CreateAPIKey(ctx, apiKey) } func (m *Module) UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *types.StorableAPIKey, updaterID valuer.UUID) error { - return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "API Keys are not supported") + return m.store.UpdateAPIKey(ctx, id, apiKey, updaterID) } func (m *Module) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.StorableAPIKeyUser, error) { - return nil, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "API Keys are not supported") + return m.store.ListAPIKeys(ctx, orgID) } func (m *Module) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.StorableAPIKeyUser, error) { - return nil, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "API Keys are not supported") + return m.store.GetAPIKey(ctx, orgID, id) } func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UUID) error { - return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "API Keys are not supported") + return m.store.RevokeAPIKey(ctx, id, removedByUserID) +} + +func (m *Module) GetDomainFromSsoResponse(ctx context.Context, url *url.URL) (*types.GettableOrgDomain, error) { + return m.store.GetDomainFromSsoResponse(ctx, url) +} + +func (m *Module) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { + return m.store.CreateDomain(ctx, domain) +} + +func (m *Module) DeleteDomain(ctx context.Context, id uuid.UUID) error { + return m.store.DeleteDomain(ctx, id) +} + +func (m *Module) ListDomains(ctx context.Context, orgID valuer.UUID) ([]*types.GettableOrgDomain, error) { + return m.store.ListDomains(ctx, orgID) +} + +func (m *Module) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { + return m.store.UpdateDomain(ctx, domain) } diff --git a/pkg/modules/user/impluser/store.go b/pkg/modules/user/impluser/store.go index a436a4dded12..f7208df78eb7 100644 --- a/pkg/modules/user/impluser/store.go +++ b/pkg/modules/user/impluser/store.go @@ -3,77 +3,83 @@ package impluser import ( "context" "database/sql" + "encoding/json" + "net/url" "sort" + "strings" "time" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" "github.com/uptrace/bun" ) -type Store struct { +type store struct { sqlstore sqlstore.SQLStore + settings factory.ProviderSettings } -func NewStore(sqlstore sqlstore.SQLStore) types.UserStore { - return &Store{sqlstore: sqlstore} +func NewStore(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings) types.UserStore { + return &store{sqlstore: sqlstore, settings: settings} } // CreateBulkInvite implements types.InviteStore. -func (s *Store) CreateBulkInvite(ctx context.Context, invites []*types.Invite) error { - _, err := s.sqlstore.BunDB().NewInsert(). +func (store *store) CreateBulkInvite(ctx context.Context, invites []*types.Invite) error { + _, err := store.sqlstore.BunDB().NewInsert(). Model(&invites). Exec(ctx) if err != nil { - return s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrInviteAlreadyExists, "invite with email: %s already exists in org: %s", invites[0].Email, invites[0].OrgID) + return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrInviteAlreadyExists, "invite with email: %s already exists in org: %s", invites[0].Email, invites[0].OrgID) } return nil } // Delete implements types.InviteStore. -func (s *Store) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error { - _, err := s.sqlstore.BunDB().NewDelete(). +func (store *store) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error { + _, err := store.sqlstore.BunDB().NewDelete(). Model(&types.Invite{}). Where("org_id = ?", orgID). Where("id = ?", id). Exec(ctx) if err != nil { - return s.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with id: %s does not exist in org: %s", id.StringValue(), orgID) + return store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with id: %s does not exist in org: %s", id.StringValue(), orgID) } return nil } // GetInviteByEmailInOrg implements types.InviteStore. -func (s *Store) GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*types.Invite, error) { +func (store *store) GetInviteByEmailInOrg(ctx context.Context, orgID string, email string) (*types.Invite, error) { invite := new(types.Invite) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(invite). Where("email = ?", email). Where("org_id = ?", orgID). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with email: %s does not exist in org: %s", email, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with email: %s does not exist in org: %s", email, orgID) } return invite, nil } -func (s *Store) GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) { +func (store *store) GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) { invite := new(types.Invite) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(invite). Where("token = ?", token). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with token: %s does not exist", token) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with token: %s does not exist", token) } - orgName, err := s.getOrgNameByID(ctx, invite.OrgID) + orgName, err := store.getOrgNameByID(ctx, invite.OrgID) if err != nil { return nil, err } @@ -86,32 +92,32 @@ func (s *Store) GetInviteByToken(ctx context.Context, token string) (*types.Gett return gettableInvite, nil } -func (s *Store) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) { +func (store *store) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) { invites := new([]*types.Invite) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(invites). Where("org_id = ?", orgID). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with org id: %s does not exist", orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with org id: %s does not exist", orgID) } return *invites, nil } -func (s *Store) CreatePassword(ctx context.Context, password *types.FactorPassword) (*types.FactorPassword, error) { - _, err := s.sqlstore.BunDB().NewInsert(). +func (store *store) CreatePassword(ctx context.Context, password *types.FactorPassword) (*types.FactorPassword, error) { + _, err := store.sqlstore.BunDB().NewInsert(). Model(password). Exec(ctx) if err != nil { - return nil, s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrPasswordAlreadyExists, "password with user id: %s already exists", password.UserID) + return nil, store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrPasswordAlreadyExists, "password with user id: %s already exists", password.UserID) } return password, nil } -func (s *Store) CreateUserWithPassword(ctx context.Context, user *types.User, password *types.FactorPassword) (*types.User, error) { - tx, err := s.sqlstore.BunDB().BeginTx(ctx, nil) +func (store *store) CreateUserWithPassword(ctx context.Context, user *types.User, password *types.FactorPassword) (*types.User, error) { + tx, err := store.sqlstore.BunDB().BeginTx(ctx, nil) if err != nil { return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to start transaction") } @@ -123,14 +129,14 @@ func (s *Store) CreateUserWithPassword(ctx context.Context, user *types.User, pa if _, err := tx.NewInsert(). Model(user). Exec(ctx); err != nil { - return nil, s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrUserAlreadyExists, "user with email: %s already exists in org: %s", user.Email, user.OrgID) + return nil, store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrUserAlreadyExists, "user with email: %s already exists in org: %s", user.Email, user.OrgID) } password.UserID = user.ID.StringValue() if _, err := tx.NewInsert(). Model(password). Exec(ctx); err != nil { - return nil, s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrPasswordAlreadyExists, "password with email: %s already exists in org: %s", user.Email, user.OrgID) + return nil, store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrPasswordAlreadyExists, "password with email: %s already exists in org: %s", user.Email, user.OrgID) } err = tx.Commit() @@ -141,54 +147,54 @@ func (s *Store) CreateUserWithPassword(ctx context.Context, user *types.User, pa return user, nil } -func (s *Store) CreateUser(ctx context.Context, user *types.User) error { - _, err := s.sqlstore.BunDB().NewInsert(). +func (store *store) CreateUser(ctx context.Context, user *types.User) error { + _, err := store.sqlstore.BunDB().NewInsert(). Model(user). Exec(ctx) if err != nil { - return s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrUserAlreadyExists, "user with email: %s already exists in org: %s", user.Email, user.OrgID) + return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrUserAlreadyExists, "user with email: %s already exists in org: %s", user.Email, user.OrgID) } return nil } -func (s *Store) GetDefaultOrgID(ctx context.Context) (string, error) { +func (store *store) GetDefaultOrgID(ctx context.Context) (string, error) { org := new(types.Organization) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(org). Limit(1). Scan(ctx) if err != nil { - return "", s.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "default org does not exist") + return "", store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "default org does not exist") } return org.ID.String(), nil } // this is temporary function, we plan to remove this in the next PR. -func (s *Store) getOrgNameByID(ctx context.Context, orgID string) (string, error) { +func (store *store) getOrgNameByID(ctx context.Context, orgID string) (string, error) { org := new(types.Organization) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(org). Where("id = ?", orgID). Scan(ctx) if err != nil { - return "", s.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "org with id: %s does not exist", orgID) + return "", store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "org with id: %s does not exist", orgID) } return org.DisplayName, nil } -func (s *Store) GetUserByID(ctx context.Context, orgID string, id string) (*types.GettableUser, error) { +func (store *store) GetUserByID(ctx context.Context, orgID string, id string) (*types.GettableUser, error) { user := new(types.User) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(user). Where("org_id = ?", orgID). Where("id = ?", id). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) } // remove this in next PR - orgName, err := s.getOrgNameByID(ctx, orgID) + orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } @@ -196,19 +202,19 @@ func (s *Store) GetUserByID(ctx context.Context, orgID string, id string) (*type return &types.GettableUser{User: *user, Organization: orgName}, nil } -func (s *Store) GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error) { +func (store *store) GetUserByEmailInOrg(ctx context.Context, orgID string, email string) (*types.GettableUser, error) { user := new(types.User) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(user). Where("org_id = ?", orgID). Where("email = ?", email). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist in org: %s", email, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist in org: %s", email, orgID) } // remove this in next PR - orgName, err := s.getOrgNameByID(ctx, orgID) + orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } @@ -216,20 +222,20 @@ func (s *Store) GetUserByEmailInOrg(ctx context.Context, orgID string, email str return &types.GettableUser{User: *user, Organization: orgName}, nil } -func (s *Store) GetUsersByEmail(ctx context.Context, email string) ([]*types.GettableUser, error) { +func (store *store) GetUsersByEmail(ctx context.Context, email string) ([]*types.GettableUser, error) { users := new([]*types.User) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(users). Where("email = ?", email). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist", email) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with email: %s does not exist", email) } // remove this in next PR usersWithOrg := []*types.GettableUser{} for _, user := range *users { - orgName, err := s.getOrgNameByID(ctx, user.OrgID) + orgName, err := store.getOrgNameByID(ctx, user.OrgID) if err != nil { return nil, err } @@ -238,19 +244,19 @@ func (s *Store) GetUsersByEmail(ctx context.Context, email string) ([]*types.Get return usersWithOrg, nil } -func (s *Store) GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error) { +func (store *store) GetUsersByRoleInOrg(ctx context.Context, orgID string, role types.Role) ([]*types.GettableUser, error) { users := new([]*types.User) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(users). Where("org_id = ?", orgID). Where("role = ?", role). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with role: %s does not exist in org: %s", role, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with role: %s does not exist in org: %s", role, orgID) } // remove this in next PR - orgName, err := s.getOrgNameByID(ctx, orgID) + orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } @@ -261,9 +267,9 @@ func (s *Store) GetUsersByRoleInOrg(ctx context.Context, orgID string, role type return usersWithOrg, nil } -func (s *Store) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) { +func (store *store) UpdateUser(ctx context.Context, orgID string, id string, user *types.User) (*types.User, error) { user.UpdatedAt = time.Now() - _, err := s.sqlstore.BunDB().NewUpdate(). + _, err := store.sqlstore.BunDB().NewUpdate(). Model(user). Column("display_name"). Column("role"). @@ -272,23 +278,23 @@ func (s *Store) UpdateUser(ctx context.Context, orgID string, id string, user *t Where("org_id = ?", orgID). Exec(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "user with id: %s does not exist in org: %s", id, orgID) } return user, nil } -func (s *Store) ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error) { +func (store *store) ListUsers(ctx context.Context, orgID string) ([]*types.GettableUser, error) { users := []*types.User{} - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(&users). Where("org_id = ?", orgID). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "users with org id: %s does not exist", orgID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrUserNotFound, "users with org id: %s does not exist", orgID) } // remove this in next PR - orgName, err := s.getOrgNameByID(ctx, orgID) + orgName, err := store.getOrgNameByID(ctx, orgID) if err != nil { return nil, err } @@ -299,9 +305,9 @@ func (s *Store) ListUsers(ctx context.Context, orgID string) ([]*types.GettableU return usersWithOrg, nil } -func (s *Store) DeleteUser(ctx context.Context, orgID string, id string) error { +func (store *store) DeleteUser(ctx context.Context, orgID string, id string) error { - tx, err := s.sqlstore.BunDB().BeginTx(ctx, nil) + tx, err := store.sqlstore.BunDB().BeginTx(ctx, nil) if err != nil { return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to start transaction") } @@ -366,67 +372,67 @@ func (s *Store) DeleteUser(ctx context.Context, orgID string, id string) error { return nil } -func (s *Store) CreateResetPasswordToken(ctx context.Context, resetPasswordRequest *types.ResetPasswordRequest) error { - _, err := s.sqlstore.BunDB().NewInsert(). +func (store *store) CreateResetPasswordToken(ctx context.Context, resetPasswordRequest *types.ResetPasswordRequest) error { + _, err := store.sqlstore.BunDB().NewInsert(). Model(resetPasswordRequest). Exec(ctx) if err != nil { - return s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrResetPasswordTokenAlreadyExists, "reset password token with password id: %s already exists", resetPasswordRequest.PasswordID) + return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrResetPasswordTokenAlreadyExists, "reset password token with password id: %s already exists", resetPasswordRequest.PasswordID) } return nil } -func (s *Store) GetPasswordByID(ctx context.Context, id string) (*types.FactorPassword, error) { +func (store *store) GetPasswordByID(ctx context.Context, id string) (*types.FactorPassword, error) { password := new(types.FactorPassword) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(password). Where("id = ?", id). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with id: %s does not exist", id) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with id: %s does not exist", id) } return password, nil } -func (s *Store) GetPasswordByUserID(ctx context.Context, id string) (*types.FactorPassword, error) { +func (store *store) GetPasswordByUserID(ctx context.Context, id string) (*types.FactorPassword, error) { password := new(types.FactorPassword) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(password). Where("user_id = ?", id). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", id) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", id) } return password, nil } -func (s *Store) GetResetPasswordByPasswordID(ctx context.Context, passwordID string) (*types.ResetPasswordRequest, error) { +func (store *store) GetResetPasswordByPasswordID(ctx context.Context, passwordID string) (*types.ResetPasswordRequest, error) { resetPasswordRequest := new(types.ResetPasswordRequest) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(resetPasswordRequest). Where("password_id = ?", passwordID). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with password id: %s does not exist", passwordID) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with password id: %s does not exist", passwordID) } return resetPasswordRequest, nil } -func (s *Store) GetResetPassword(ctx context.Context, token string) (*types.ResetPasswordRequest, error) { +func (store *store) GetResetPassword(ctx context.Context, token string) (*types.ResetPasswordRequest, error) { resetPasswordRequest := new(types.ResetPasswordRequest) - err := s.sqlstore.BunDB().NewSelect(). + err := store.sqlstore.BunDB().NewSelect(). Model(resetPasswordRequest). Where("token = ?", token). Scan(ctx) if err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with token: %s does not exist", token) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with token: %s does not exist", token) } return resetPasswordRequest, nil } -func (s *Store) UpdatePasswordAndDeleteResetPasswordEntry(ctx context.Context, userID string, password string) error { - tx, err := s.sqlstore.BunDB().BeginTx(ctx, nil) +func (store *store) UpdatePasswordAndDeleteResetPasswordEntry(ctx context.Context, userID string, password string) error { + tx, err := store.sqlstore.BunDB().BeginTx(ctx, nil) if err != nil { return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to start transaction") } @@ -449,7 +455,7 @@ func (s *Store) UpdatePasswordAndDeleteResetPasswordEntry(ctx context.Context, u Where("user_id = ?", userID). Exec(ctx) if err != nil { - return s.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", userID) + return store.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", userID) } _, err = tx.NewDelete(). @@ -457,7 +463,7 @@ func (s *Store) UpdatePasswordAndDeleteResetPasswordEntry(ctx context.Context, u Where("password_id = ?", userID). Exec(ctx) if err != nil { - return s.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with password id: %s does not exist", userID) + return store.sqlstore.WrapNotFoundErrf(err, types.ErrResetPasswordTokenNotFound, "reset password token with password id: %s does not exist", userID) } err = tx.Commit() @@ -468,7 +474,7 @@ func (s *Store) UpdatePasswordAndDeleteResetPasswordEntry(ctx context.Context, u return nil } -func (s *Store) UpdatePassword(ctx context.Context, userID string, password string) error { +func (store *store) UpdatePassword(ctx context.Context, userID string, password string) error { factorPassword := &types.FactorPassword{ UserID: userID, Password: password, @@ -476,53 +482,63 @@ func (s *Store) UpdatePassword(ctx context.Context, userID string, password stri UpdatedAt: time.Now(), }, } - _, err := s.sqlstore.BunDB().NewUpdate(). + _, err := store.sqlstore.BunDB().NewUpdate(). Model(factorPassword). Column("password"). Column("updated_at"). Where("user_id = ?", userID). Exec(ctx) if err != nil { - return s.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", userID) + return store.sqlstore.WrapNotFoundErrf(err, types.ErrPasswordNotFound, "password with user id: %s does not exist", userID) } return nil } -func (s *Store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) { - return nil, errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "not supported") +func (store *store) GetDomainByName(ctx context.Context, name string) (*types.StorableOrgDomain, error) { + domain := new(types.StorableOrgDomain) + err := store.sqlstore.BunDB().NewSelect(). + Model(domain). + Where("name = ?", name). + Limit(1). + Scan(ctx) + + if err != nil { + return nil, errors.Wrapf(err, errors.TypeNotFound, errors.CodeNotFound, "failed to get domain from name") + } + return domain, nil } // --- API KEY --- -func (s *Store) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { - _, err := s.sqlstore.BunDB().NewInsert(). +func (store *store) CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error { + _, err := store.sqlstore.BunDB().NewInsert(). Model(apiKey). Exec(ctx) if err != nil { - return s.sqlstore.WrapAlreadyExistsErrf(err, types.ErrAPIKeyAlreadyExists, "API key with token: %s already exists", apiKey.Token) + return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrAPIKeyAlreadyExists, "API key with token: %s already exists", apiKey.Token) } return nil } -func (s *Store) UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *types.StorableAPIKey, updaterID valuer.UUID) error { +func (store *store) UpdateAPIKey(ctx context.Context, id valuer.UUID, apiKey *types.StorableAPIKey, updaterID valuer.UUID) error { apiKey.UpdatedBy = updaterID.String() apiKey.UpdatedAt = time.Now() - _, err := s.sqlstore.BunDB().NewUpdate(). + _, err := store.sqlstore.BunDB().NewUpdate(). Model(apiKey). Column("role", "name", "updated_at", "updated_by"). Where("id = ?", id). Where("revoked = false"). Exec(ctx) if err != nil { - return s.sqlstore.WrapNotFoundErrf(err, types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) + return store.sqlstore.WrapNotFoundErrf(err, types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) } return nil } -func (s *Store) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.StorableAPIKeyUser, error) { +func (store *store) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.StorableAPIKeyUser, error) { orgUserAPIKeys := new(types.OrgUserAPIKey) - if err := s.sqlstore.BunDB().NewSelect(). + if err := store.sqlstore.BunDB().NewSelect(). Model(orgUserAPIKeys). Relation("Users"). Relation("Users.APIKeys", func(q *bun.SelectQuery) *bun.SelectQuery { @@ -552,9 +568,9 @@ func (s *Store) ListAPIKeys(ctx context.Context, orgID valuer.UUID) ([]*types.St return allAPIKeys, nil } -func (s *Store) RevokeAPIKey(ctx context.Context, id, revokedByUserID valuer.UUID) error { +func (store *store) RevokeAPIKey(ctx context.Context, id, revokedByUserID valuer.UUID) error { updatedAt := time.Now().Unix() - _, err := s.sqlstore.BunDB().NewUpdate(). + _, err := store.sqlstore.BunDB().NewUpdate(). Model(&types.StorableAPIKey{}). Set("revoked = ?", true). Set("updated_by = ?", revokedByUserID). @@ -567,9 +583,9 @@ func (s *Store) RevokeAPIKey(ctx context.Context, id, revokedByUserID valuer.UUI return nil } -func (s *Store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.StorableAPIKeyUser, error) { +func (store *store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.StorableAPIKeyUser, error) { apiKey := new(types.OrgUserAPIKey) - if err := s.sqlstore.BunDB().NewSelect(). + if err := store.sqlstore.BunDB().NewSelect(). Model(apiKey). Relation("Users"). Relation("Users.APIKeys", func(q *bun.SelectQuery) *bun.SelectQuery { @@ -580,7 +596,7 @@ func (s *Store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.St Relation("Users.APIKeys.CreatedByUser"). Relation("Users.APIKeys.UpdatedByUser"). Scan(ctx); err != nil { - return nil, s.sqlstore.WrapNotFoundErrf(err, types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) + return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) } // flatten the API keys @@ -591,8 +607,205 @@ func (s *Store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*types.St } } if len(flattenedAPIKeys) == 0 { - return nil, s.sqlstore.WrapNotFoundErrf(errors.New(errors.TypeNotFound, errors.CodeNotFound, "API key with id: %s does not exist"), types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) + return nil, store.sqlstore.WrapNotFoundErrf(errors.New(errors.TypeNotFound, errors.CodeNotFound, "API key with id: %s does not exist"), types.ErrAPIKeyNotFound, "API key with id: %s does not exist", id) } return flattenedAPIKeys[0], nil } + +// GetDomainFromSsoResponse uses relay state received from IdP to fetch +// user domain. The domain is further used to process validity of the response. +// when sending login request to IdP we send relay state as URL (site url) +// with domainId or domainName as query parameter. +func (store *store) GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*types.GettableOrgDomain, error) { + // derive domain id from relay state now + var domainIdStr string + var domainNameStr string + var domain *types.GettableOrgDomain + + for k, v := range relayState.Query() { + if k == "domainId" && len(v) > 0 { + domainIdStr = strings.Replace(v[0], ":", "-", -1) + } + if k == "domainName" && len(v) > 0 { + domainNameStr = v[0] + } + } + + if domainIdStr != "" { + domainId, err := uuid.Parse(domainIdStr) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse domainID from IdP response") + } + + domain, err = store.GetDomain(ctx, domainId) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to find domain from domainID received in IDP response") + } + } + + if domainNameStr != "" { + domainFromDB, err := store.GetGettableDomainByName(ctx, domainNameStr) + domain = domainFromDB + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to find domain from domainName received in IDP response") + } + } + if domain != nil { + return domain, nil + } + + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to find domain received in IDP response") +} + +// GetDomainByName returns org domain for a given domain name +func (store *store) GetGettableDomainByName(ctx context.Context, name string) (*types.GettableOrgDomain, error) { + + stored := types.StorableOrgDomain{} + err := store.sqlstore.BunDB().NewSelect(). + Model(&stored). + Where("name = ?", name). + Limit(1). + Scan(ctx) + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "domain with name: %s doesn't exist", name) + } + + domain := &types.GettableOrgDomain{StorableOrgDomain: stored} + if err := domain.LoadConfig(stored.Data); err != nil { + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to load domain config") + } + return domain, nil +} + +// GetDomain returns org domain for a given domain id +func (store *store) GetDomain(ctx context.Context, id uuid.UUID) (*types.GettableOrgDomain, error) { + + stored := types.StorableOrgDomain{} + err := store.sqlstore.BunDB().NewSelect(). + Model(&stored). + Where("id = ?", id). + Limit(1). + Scan(ctx) + + if err != nil { + return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "domain with id: %s doesn't exist", id) + } + + domain := &types.GettableOrgDomain{StorableOrgDomain: stored} + if err := domain.LoadConfig(stored.Data); err != nil { + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to load domain config") + } + return domain, nil +} + +// ListDomains gets the list of auth domains by org id +func (store *store) ListDomains(ctx context.Context, orgId valuer.UUID) ([]*types.GettableOrgDomain, error) { + domains := make([]*types.GettableOrgDomain, 0) + stored := []types.StorableOrgDomain{} + err := store.sqlstore.BunDB().NewSelect(). + Model(&stored). + Where("org_id = ?", orgId). + Scan(ctx) + + if err != nil { + if err == sql.ErrNoRows { + return domains, nil + } + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to list domains") + } + + for _, s := range stored { + domain := types.GettableOrgDomain{StorableOrgDomain: s} + if err := domain.LoadConfig(s.Data); err != nil { + store.settings.Logger.ErrorContext(ctx, "ListDomains() failed", "error", err) + } + domains = append(domains, &domain) + } + + return domains, nil +} + +// CreateDomain creates a new auth domain +func (store *store) CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { + + if domain.ID == uuid.Nil { + domain.ID = uuid.New() + } + + if domain.OrgID == "" || domain.Name == "" { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "domain creation failed, missing fields: OrgID, Name") + } + + configJson, err := json.Marshal(domain) + if err != nil { + return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "domain creation failed") + } + + storableDomain := types.StorableOrgDomain{ + ID: domain.ID, + Name: domain.Name, + OrgID: domain.OrgID, + Data: string(configJson), + TimeAuditable: types.TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()}, + } + + _, err = store.sqlstore.BunDB().NewInsert(). + Model(&storableDomain). + Exec(ctx) + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "domain creation failed") + } + return nil +} + +// UpdateDomain updates stored config params for a domain +func (store *store) UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error { + if domain.ID == uuid.Nil { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing domain id") + } + configJson, err := json.Marshal(domain) + if err != nil { + return errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to update domain") + } + + storableDomain := &types.StorableOrgDomain{ + ID: domain.ID, + Name: domain.Name, + OrgID: domain.OrgID, + Data: string(configJson), + TimeAuditable: types.TimeAuditable{UpdatedAt: time.Now()}, + } + + _, err = store.sqlstore.BunDB().NewUpdate(). + Model(storableDomain). + Column("data", "updated_at"). + WherePK(). + Exec(ctx) + + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to update domain") + } + + return nil +} + +// DeleteDomain deletes an org domain +func (store *store) DeleteDomain(ctx context.Context, id uuid.UUID) error { + + if id == uuid.Nil { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing domain id") + } + + storableDomain := &types.StorableOrgDomain{ID: id} + _, err := store.sqlstore.BunDB().NewDelete(). + Model(storableDomain). + WherePK(). + Exec(ctx) + + if err != nil { + return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete domain") + } + + return nil +} diff --git a/pkg/modules/user/user.go b/pkg/modules/user/user.go index 79ea97090fe9..f2f4153ddc47 100644 --- a/pkg/modules/user/user.go +++ b/pkg/modules/user/user.go @@ -3,10 +3,12 @@ package user import ( "context" "net/http" + "net/url" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" ) type Module interface { @@ -47,6 +49,12 @@ type Module interface { // Auth Domain GetAuthDomainByEmail(ctx context.Context, email string) (*types.GettableOrgDomain, error) + GetDomainFromSsoResponse(ctx context.Context, url *url.URL) (*types.GettableOrgDomain, error) + + ListDomains(ctx context.Context, orgID valuer.UUID) ([]*types.GettableOrgDomain, error) + CreateDomain(ctx context.Context, domain *types.GettableOrgDomain) error + UpdateDomain(ctx context.Context, domain *types.GettableOrgDomain) error + DeleteDomain(ctx context.Context, id uuid.UUID) error // API KEY CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error @@ -85,4 +93,9 @@ type Handler interface { ListAPIKeys(http.ResponseWriter, *http.Request) UpdateAPIKey(http.ResponseWriter, *http.Request) RevokeAPIKey(http.ResponseWriter, *http.Request) + + ListDomains(http.ResponseWriter, *http.Request) + CreateDomain(http.ResponseWriter, *http.Request) + UpdateDomain(http.ResponseWriter, *http.Request) + DeleteDomain(http.ResponseWriter, *http.Request) } diff --git a/pkg/querier/builder_query.go b/pkg/querier/builder_query.go new file mode 100644 index 000000000000..dfb041dd655f --- /dev/null +++ b/pkg/querier/builder_query.go @@ -0,0 +1,204 @@ +package querier + +import ( + "context" + "encoding/base64" + "strconv" + "strings" + "time" + + "github.com/SigNoz/signoz/pkg/telemetrystore" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +type builderQuery[T any] struct { + telemetryStore telemetrystore.TelemetryStore + stmtBuilder qbtypes.StatementBuilder[T] + spec qbtypes.QueryBuilderQuery[T] + + fromMS uint64 + toMS uint64 + kind qbtypes.RequestType +} + +var _ qbtypes.Query = (*builderQuery[any])(nil) + +func newBuilderQuery[T any]( + telemetryStore telemetrystore.TelemetryStore, + stmtBuilder qbtypes.StatementBuilder[T], + spec qbtypes.QueryBuilderQuery[T], + tr qbtypes.TimeRange, + kind qbtypes.RequestType, +) *builderQuery[T] { + return &builderQuery[T]{ + telemetryStore: telemetryStore, + stmtBuilder: stmtBuilder, + spec: spec, + fromMS: tr.From, + toMS: tr.To, + kind: kind, + } +} + +func (q *builderQuery[T]) Fingerprint() string { + // TODO: implement this + return "" +} + +func (q *builderQuery[T]) Window() (uint64, uint64) { + return q.fromMS, q.toMS +} + +// must be a single query, ordered by timestamp (logs need an id tie-break). +func (q *builderQuery[T]) isWindowList() bool { + if len(q.spec.Order) == 0 { + return false + } + + // first ORDER BY must be `timestamp` + if q.spec.Order[0].Key.Name != "timestamp" { + return false + } + + if q.spec.Signal == telemetrytypes.SignalLogs { + // logs require timestamp,id with identical direction + if len(q.spec.Order) != 2 || q.spec.Order[1].Key.Name != "id" || + q.spec.Order[1].Direction != q.spec.Order[0].Direction { + return false + } + } + return true +} + +func (q *builderQuery[T]) Execute(ctx context.Context) (*qbtypes.Result, error) { + + // can we do window based pagination? + if q.kind == qbtypes.RequestTypeRaw && q.isWindowList() { + return q.executeWindowList(ctx) + } + + stmt, err := q.stmtBuilder.Build(ctx, q.fromMS, q.toMS, q.kind, q.spec) + if err != nil { + return nil, err + } + + chQuery := qbtypes.ClickHouseQuery{ + Name: q.spec.Name, + Query: stmt.Query, + } + + chExec := newchSQLQuery(q.telemetryStore, chQuery, stmt.Args, qbtypes.TimeRange{From: q.fromMS, To: q.toMS}, q.kind) + result, err := chExec.Execute(ctx) + if err != nil { + return nil, err + } + result.Warnings = stmt.Warnings + return result, nil +} + +func (q *builderQuery[T]) executeWindowList(ctx context.Context) (*qbtypes.Result, error) { + isAsc := len(q.spec.Order) > 0 && + strings.ToLower(string(q.spec.Order[0].Direction.StringValue())) == "asc" + + // Adjust [fromMS,toMS] window if a cursor was supplied + if cur := strings.TrimSpace(q.spec.Cursor); cur != "" { + if ts, err := decodeCursor(cur); err == nil { + if isAsc { + if uint64(ts) >= q.fromMS { + q.fromMS = uint64(ts + 1) + } + } else { // DESC + if uint64(ts) <= q.toMS { + q.toMS = uint64(ts - 1) + } + } + } + } + + reqLimit := q.spec.Limit + if reqLimit == 0 { + reqLimit = 10_000 // sane upper-bound default + } + offsetLeft := q.spec.Offset + need := reqLimit + offsetLeft // rows to fetch from ClickHouse + + var rows []*qbtypes.RawRow + + totalRows := uint64(0) + totalBytes := uint64(0) + start := time.Now() + + for _, r := range makeBuckets(q.fromMS, q.toMS) { + q.spec.Offset = 0 + q.spec.Limit = need + + stmt, err := q.stmtBuilder.Build(ctx, r.fromNS/1e6, r.toNS/1e6, q.kind, q.spec) + if err != nil { + return nil, err + } + + chExec := newchSQLQuery( + q.telemetryStore, + qbtypes.ClickHouseQuery{Name: q.spec.Name, Query: stmt.Query}, + stmt.Args, + qbtypes.TimeRange{From: q.fromMS, To: q.toMS}, + q.kind, + ) + res, err := chExec.Execute(ctx) + if err != nil { + return nil, err + } + totalRows += res.Stats.RowsScanned + totalBytes += res.Stats.BytesScanned + + rawRows := res.Value.(*qbtypes.RawData).Rows + need -= len(rawRows) + + for _, rr := range rawRows { + if offsetLeft > 0 { // client-requested initial offset + offsetLeft-- + continue + } + rows = append(rows, rr) + if len(rows) >= reqLimit { // page filled + break + } + } + if len(rows) >= reqLimit { + break + } + } + + nextCursor := "" + if len(rows) == reqLimit { + lastTS := rows[len(rows)-1].Timestamp.UnixMilli() + nextCursor = encodeCursor(lastTS) + } + + return &qbtypes.Result{ + Type: qbtypes.RequestTypeRaw, + Value: &qbtypes.RawData{ + QueryName: q.spec.Name, + Rows: rows, + NextCursor: nextCursor, + }, + Stats: qbtypes.ExecStats{ + RowsScanned: totalRows, + BytesScanned: totalBytes, + DurationMS: uint64(time.Since(start).Milliseconds()), + }, + }, nil +} + +func encodeCursor(tsMilli int64) string { + return base64.StdEncoding.EncodeToString([]byte(strconv.FormatInt(tsMilli, 10))) +} + +func decodeCursor(cur string) (int64, error) { + b, err := base64.StdEncoding.DecodeString(cur) + if err != nil { + return 0, err + } + return strconv.ParseInt(string(b), 10, 64) +} diff --git a/pkg/querier/clickhouse_query.go b/pkg/querier/clickhouse_query.go new file mode 100644 index 000000000000..c5973ede379d --- /dev/null +++ b/pkg/querier/clickhouse_query.go @@ -0,0 +1,77 @@ +package querier + +import ( + "context" + "time" + + "github.com/ClickHouse/clickhouse-go/v2" + "github.com/SigNoz/signoz/pkg/telemetrystore" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +type chSQLQuery struct { + telemetryStore telemetrystore.TelemetryStore + + query qbtypes.ClickHouseQuery + args []any + fromMS uint64 + toMS uint64 + kind qbtypes.RequestType +} + +var _ qbtypes.Query = (*chSQLQuery)(nil) + +func newchSQLQuery( + telemetryStore telemetrystore.TelemetryStore, + query qbtypes.ClickHouseQuery, + args []any, + tr qbtypes.TimeRange, + kind qbtypes.RequestType, +) *chSQLQuery { + return &chSQLQuery{ + telemetryStore: telemetryStore, + query: query, + args: args, + fromMS: tr.From, + toMS: tr.To, + kind: kind, + } +} + +// TODO: use the same query hash scheme as ClickHouse +func (q *chSQLQuery) Fingerprint() string { return q.query.Query } +func (q *chSQLQuery) Window() (uint64, uint64) { return q.fromMS, q.toMS } + +func (q *chSQLQuery) Execute(ctx context.Context) (*qbtypes.Result, error) { + + totalRows := uint64(0) + totalBytes := uint64(0) + elapsed := time.Duration(0) + + ctx = clickhouse.Context(ctx, clickhouse.WithProgress(func(p *clickhouse.Progress) { + totalRows += p.Rows + totalBytes += p.Bytes + elapsed += p.Elapsed + })) + + rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, q.query.Query, q.args...) + if err != nil { + return nil, err + } + defer rows.Close() + + // TODO: map the errors from ClickHouse to our error types + payload, err := consume(rows, q.kind) + if err != nil { + return nil, err + } + return &qbtypes.Result{ + Type: q.kind, + Value: payload, + Stats: qbtypes.ExecStats{ + RowsScanned: totalRows, + BytesScanned: totalBytes, + DurationMS: uint64(elapsed.Milliseconds()), + }, + }, nil +} diff --git a/pkg/querier/consume.go b/pkg/querier/consume.go new file mode 100644 index 000000000000..ba51387812e7 --- /dev/null +++ b/pkg/querier/consume.go @@ -0,0 +1,373 @@ +package querier + +import ( + "fmt" + "math" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "time" + + "github.com/ClickHouse/clickhouse-go/v2/lib/driver" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +var ( + aggRe = regexp.MustCompile(`^__result_(\d+)$`) +) + +// consume reads every row and shapes it into the payload expected for the +// given request type. +// +// * Time-series - []*qbtypes.TimeSeriesData +// * Scalar - []*qbtypes.ScalarData +// * Raw - []*qbtypes.RawData +// * Distribution- []*qbtypes.DistributionData +func consume(rows driver.Rows, kind qbtypes.RequestType) (any, error) { + var ( + payload any + err error + ) + + switch kind { + case qbtypes.RequestTypeTimeSeries: + payload, err = readAsTimeSeries(rows) + case qbtypes.RequestTypeScalar: + payload, err = readAsScalar(rows) + case qbtypes.RequestTypeRaw: + payload, err = readAsRaw(rows) + // TODO: add support for other request types + } + + return payload, err +} + +func readAsTimeSeries(rows driver.Rows) ([]*qbtypes.TimeSeriesData, error) { + + colTypes := rows.ColumnTypes() + colNames := rows.Columns() + + slots := make([]any, len(colTypes)) + numericColsCount := 0 + for i, ct := range colTypes { + slots[i] = reflect.New(ct.ScanType()).Interface() + if numericKind(ct.ScanType().Kind()) { + numericColsCount++ + } + } + + type sKey struct { + agg int + key string // deterministic join of label values + } + seriesMap := map[sKey]*qbtypes.TimeSeries{} + + for rows.Next() { + if err := rows.Scan(slots...); err != nil { + return nil, err + } + + var ( + ts int64 + lblVals []string + lblObjs []*qbtypes.Label + aggValues = map[int]float64{} // all __result_N in this row + fallbackValue float64 // value when NO __result_N columns exist + fallbackSeen bool + ) + + for idx, ptr := range slots { + name := colNames[idx] + + switch v := ptr.(type) { + case *time.Time: + ts = v.UnixMilli() + + case *float64, *float32, *int64, *int32, *uint64, *uint32: + val := numericAsFloat(reflect.ValueOf(ptr).Elem().Interface()) + if m := aggRe.FindStringSubmatch(name); m != nil { + id, _ := strconv.Atoi(m[1]) + aggValues[id] = val + } else if numericColsCount == 1 { // classic single-value query + fallbackValue = val + fallbackSeen = true + } else { + // numeric label + lblVals = append(lblVals, fmt.Sprint(val)) + lblObjs = append(lblObjs, &qbtypes.Label{ + Key: telemetrytypes.TelemetryFieldKey{Name: name}, + Value: val, + }) + } + + case **float64, **float32, **int64, **int32, **uint64, **uint32: + tempVal := reflect.ValueOf(ptr) + if tempVal.IsValid() && !tempVal.IsNil() && !tempVal.Elem().IsNil() { + val := numericAsFloat(tempVal.Elem().Elem().Interface()) + if m := aggRe.FindStringSubmatch(name); m != nil { + id, _ := strconv.Atoi(m[1]) + aggValues[id] = val + } else if numericColsCount == 1 { // classic single-value query + fallbackValue = val + fallbackSeen = true + } else { + // numeric label + lblVals = append(lblVals, fmt.Sprint(val)) + lblObjs = append(lblObjs, &qbtypes.Label{ + Key: telemetrytypes.TelemetryFieldKey{Name: name}, + Value: val, + }) + } + } + + case *string: + lblVals = append(lblVals, *v) + lblObjs = append(lblObjs, &qbtypes.Label{ + Key: telemetrytypes.TelemetryFieldKey{Name: name}, + Value: *v, + }) + + case **string: + val := *v + if val == nil { + var empty string + val = &empty + } + lblVals = append(lblVals, *val) + lblObjs = append(lblObjs, &qbtypes.Label{ + Key: telemetrytypes.TelemetryFieldKey{Name: name}, + Value: val, + }) + + default: + continue + } + } + + // Edge-case: no __result_N columns, but a single numeric column present + if len(aggValues) == 0 && fallbackSeen { + aggValues[0] = fallbackValue + } + + if ts == 0 || len(aggValues) == 0 { + continue // nothing useful + } + + sort.Strings(lblVals) + labelsKey := strings.Join(lblVals, ",") + + // one point per aggregation in this row + for aggIdx, val := range aggValues { + if math.IsNaN(val) || math.IsInf(val, 0) { + continue + } + + key := sKey{agg: aggIdx, key: labelsKey} + + series, ok := seriesMap[key] + if !ok { + series = &qbtypes.TimeSeries{Labels: lblObjs} + seriesMap[key] = series + } + series.Values = append(series.Values, &qbtypes.TimeSeriesValue{ + Timestamp: ts, + Value: val, + }) + } + } + if err := rows.Err(); err != nil { + return nil, err + } + + maxAgg := -1 + for k := range seriesMap { + if k.agg > maxAgg { + maxAgg = k.agg + } + } + if maxAgg < 0 { + return nil, nil // empty result-set + } + + buckets := make([]*qbtypes.AggregationBucket, maxAgg+1) + for i := range buckets { + buckets[i] = &qbtypes.AggregationBucket{ + Index: i, + Alias: "__result_" + strconv.Itoa(i), + } + } + for k, s := range seriesMap { + buckets[k.agg].Series = append(buckets[k.agg].Series, s) + } + + var nonEmpty []*qbtypes.AggregationBucket + for _, b := range buckets { + if len(b.Series) > 0 { + nonEmpty = append(nonEmpty, b) + } + } + + return []*qbtypes.TimeSeriesData{{ + Aggregations: nonEmpty, + }}, nil +} + +func numericKind(k reflect.Kind) bool { + switch k { + case reflect.Float32, reflect.Float64, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return true + default: + return false + } +} + +func readAsScalar(rows driver.Rows) (*qbtypes.ScalarData, error) { + colNames := rows.Columns() + colTypes := rows.ColumnTypes() + + cd := make([]*qbtypes.ColumnDescriptor, len(colNames)) + + for i, name := range colNames { + colType := qbtypes.ColumnTypeGroup + if aggRe.MatchString(name) { + colType = qbtypes.ColumnTypeAggregation + } + cd[i] = &qbtypes.ColumnDescriptor{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: name}, + AggregationIndex: int64(i), + Type: colType, + } + } + + var data [][]any + + for rows.Next() { + scan := make([]any, len(colTypes)) + for i := range scan { + scan[i] = reflect.New(colTypes[i].ScanType()).Interface() + } + if err := rows.Scan(scan...); err != nil { + return nil, err + } + + // 2. deref each slot into the output row + row := make([]any, len(scan)) + for i, cell := range scan { + valPtr := reflect.ValueOf(cell) + if valPtr.Kind() == reflect.Pointer && !valPtr.IsNil() { + row[i] = valPtr.Elem().Interface() + } else { + row[i] = nil // Nullable columns come back as nil pointers + } + } + data = append(data, row) + } + if err := rows.Err(); err != nil { + return nil, err + } + + return &qbtypes.ScalarData{ + Columns: cd, + Data: data, + }, nil +} + +func readAsRaw(rows driver.Rows) (*qbtypes.RawData, error) { + + colNames := rows.Columns() + colTypes := rows.ColumnTypes() + colCnt := len(colNames) + + // Build a template slice of correctly-typed pointers once + scanTpl := make([]any, colCnt) + for i, ct := range colTypes { + scanTpl[i] = reflect.New(ct.ScanType()).Interface() + } + + var outRows []*qbtypes.RawRow + + for rows.Next() { + // fresh copy of the scan slice (otherwise the driver reuses pointers) + scan := make([]any, colCnt) + for i := range scanTpl { + scan[i] = reflect.New(colTypes[i].ScanType()).Interface() + } + + if err := rows.Scan(scan...); err != nil { + return nil, err + } + + rr := qbtypes.RawRow{ + Data: make(map[string]*any, colCnt), + } + + for i, cellPtr := range scan { + name := colNames[i] + + // de-reference the typed pointer to any + val := reflect.ValueOf(cellPtr).Elem().Interface() + + // special-case: timestamp column + if name == "timestamp" || name == "timestamp_datetime" { + switch t := val.(type) { + case time.Time: + rr.Timestamp = t + case uint64: // epoch-ns stored as integer + rr.Timestamp = time.Unix(0, int64(t)) + case int64: + rr.Timestamp = time.Unix(0, t) + default: + // leave zero time if unrecognised + } + } + + // store value in map as *any, to match the schema + v := any(val) + rr.Data[name] = &v + } + outRows = append(outRows, &rr) + } + if err := rows.Err(); err != nil { + return nil, err + } + + return &qbtypes.RawData{ + Rows: outRows, + }, nil +} + +func numericAsFloat(v any) float64 { + switch x := v.(type) { + case float64: + return x + case float32: + return float64(x) + case int64: + return float64(x) + case int32: + return float64(x) + case int16: + return float64(x) + case int8: + return float64(x) + case int: + return float64(x) + case uint64: + return float64(x) + case uint32: + return float64(x) + case uint16: + return float64(x) + case uint8: + return float64(x) + case uint: + return float64(x) + default: + return math.NaN() + } +} diff --git a/pkg/querier/list_range.go b/pkg/querier/list_range.go new file mode 100644 index 000000000000..b62f932ba02f --- /dev/null +++ b/pkg/querier/list_range.go @@ -0,0 +1,36 @@ +package querier + +import "github.com/SigNoz/signoz/pkg/querybuilder" + +const hourNanos = int64(3_600_000_000_000) // 1 h in ns + +type tsRange struct{ fromNS, toNS uint64 } + +// slice the timerange into exponentially growing buckets +func makeBuckets(start, end uint64) []tsRange { + startNS := querybuilder.ToNanoSecs(start) + endNS := querybuilder.ToNanoSecs(end) + + if endNS-startNS <= uint64(hourNanos) { + return []tsRange{{fromNS: startNS, toNS: endNS}} + } + + var out []tsRange + bucket := uint64(hourNanos) + curEnd := endNS + + for { + curStart := curEnd - bucket + if curStart < startNS { + curStart = startNS + } + out = append(out, tsRange{fromNS: curStart, toNS: curEnd}) + + if curStart == startNS { + break + } + curEnd = curStart + bucket *= 2 + } + return out +} diff --git a/pkg/querier/promql_query.go b/pkg/querier/promql_query.go new file mode 100644 index 000000000000..2934563749ff --- /dev/null +++ b/pkg/querier/promql_query.go @@ -0,0 +1,41 @@ +package querier + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/prometheus" + qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +type promqlQuery struct { + promEngine prometheus.Prometheus + query qbv5.PromQuery + tr qbv5.TimeRange + requestType qbv5.RequestType +} + +var _ qbv5.Query = (*promqlQuery)(nil) + +func newPromqlQuery( + promEngine prometheus.Prometheus, + query qbv5.PromQuery, + tr qbv5.TimeRange, + requestType qbv5.RequestType, +) *promqlQuery { + return &promqlQuery{promEngine, query, tr, requestType} +} + +func (q *promqlQuery) Fingerprint() string { + // TODO: Implement this + return "" +} + +func (q *promqlQuery) Window() (uint64, uint64) { + return q.tr.From, q.tr.To +} + +func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) { + // TODO: Implement this + //nolint:nilnil + return nil, nil +} diff --git a/pkg/querier/querier.go b/pkg/querier/querier.go new file mode 100644 index 000000000000..070b480ce3ee --- /dev/null +++ b/pkg/querier/querier.go @@ -0,0 +1,96 @@ +package querier + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/prometheus" + "github.com/SigNoz/signoz/pkg/telemetrystore" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" +) + +type querier struct { + telemetryStore telemetrystore.TelemetryStore + metadataStore telemetrytypes.MetadataStore + promEngine prometheus.Prometheus + traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation] + logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation] + metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation] +} + +func NewQuerier( + telemetryStore telemetrystore.TelemetryStore, + metadataStore telemetrytypes.MetadataStore, + promEngine prometheus.Prometheus, + traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation], + logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation], + metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation], +) *querier { + return &querier{ + telemetryStore: telemetryStore, + metadataStore: metadataStore, + promEngine: promEngine, + traceStmtBuilder: traceStmtBuilder, + logStmtBuilder: logStmtBuilder, + metricStmtBuilder: metricStmtBuilder, + } +} + +func (q *querier) QueryRange(ctx context.Context, orgID string, req *qbtypes.QueryRangeRequest) (*qbtypes.QueryRangeResponse, error) { + + queries := make(map[string]qbtypes.Query) + + for _, query := range req.CompositeQuery.Queries { + switch query.Type { + case qbtypes.QueryTypePromQL: + promQuery, ok := query.Spec.(qbtypes.PromQuery) + if !ok { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", query.Spec) + } + promqlQuery := newPromqlQuery(q.promEngine, promQuery, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) + queries[query.Name] = promqlQuery + case qbtypes.QueryTypeClickHouseSQL: + chQuery, ok := query.Spec.(qbtypes.ClickHouseQuery) + if !ok { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid clickhouse query spec %T", query.Spec) + } + chSQLQuery := newchSQLQuery(q.telemetryStore, chQuery, nil, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) + queries[query.Name] = chSQLQuery + case qbtypes.QueryTypeBuilder: + switch spec := query.Spec.(type) { + case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]: + bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) + queries[query.Name] = bq + + case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]: + bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) + queries[query.Name] = bq + + case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]: + bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType) + queries[query.Name] = bq + default: + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported builder spec type %T", query.Spec) + } + } + } + return q.run(ctx, orgID, queries, req.RequestType) +} + +func (q *querier) run(ctx context.Context, _ string, qs map[string]qbtypes.Query, kind qbtypes.RequestType) (*qbtypes.QueryRangeResponse, error) { + results := make([]*qbtypes.Result, 0, len(qs)) + for _, query := range qs { + // TODO: run in controlled batches + result, err := query.Execute(ctx) + if err != nil { + return nil, err + } + results = append(results, result) + } + return &qbtypes.QueryRangeResponse{ + Type: kind, + Data: results, + }, nil +} diff --git a/pkg/query-service/app/cloudintegrations/controller_test.go b/pkg/query-service/app/cloudintegrations/controller_test.go index 59218348de1b..ff86b868db60 100644 --- a/pkg/query-service/app/cloudintegrations/controller_test.go +++ b/pkg/query-service/app/cloudintegrations/controller_test.go @@ -27,7 +27,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) { organizationModule := implorganization.NewModule(implorganization.NewStore(sqlStore)) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) - userModule := impluser.NewModule(impluser.NewStore(sqlStore), nil, emailing, providerSettings) + userModule := impluser.NewModule(impluser.NewStore(sqlStore, providerSettings), nil, emailing, providerSettings) user, apiErr := createTestUser(organizationModule, userModule) require.Nil(apiErr) @@ -77,7 +77,7 @@ func TestAgentCheckIns(t *testing.T) { organizationModule := implorganization.NewModule(implorganization.NewStore(sqlStore)) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) - userModule := impluser.NewModule(impluser.NewStore(sqlStore), nil, emailing, providerSettings) + userModule := impluser.NewModule(impluser.NewStore(sqlStore, providerSettings), nil, emailing, providerSettings) user, apiErr := createTestUser(organizationModule, userModule) require.Nil(apiErr) @@ -167,7 +167,7 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) { organizationModule := implorganization.NewModule(implorganization.NewStore(sqlStore)) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) - userModule := impluser.NewModule(impluser.NewStore(sqlStore), nil, emailing, providerSettings) + userModule := impluser.NewModule(impluser.NewStore(sqlStore, providerSettings), nil, emailing, providerSettings) user, apiErr := createTestUser(organizationModule, userModule) require.Nil(apiErr) @@ -189,7 +189,7 @@ func TestConfigureService(t *testing.T) { organizationModule := implorganization.NewModule(implorganization.NewStore(sqlStore)) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) - userModule := impluser.NewModule(impluser.NewStore(sqlStore), nil, emailing, providerSettings) + userModule := impluser.NewModule(impluser.NewStore(sqlStore, providerSettings), nil, emailing, providerSettings) user, apiErr := createTestUser(organizationModule, userModule) require.Nil(apiErr) diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.json b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.json new file mode 100644 index 000000000000..4b64263d7247 --- /dev/null +++ b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.json @@ -0,0 +1,2657 @@ +{ + "description": "View DynamoDB metrics with an out-of-the-box dashboard.", + "image":"data:image/svg+xml;base64,<?xml version="1.0" encoding="UTF-8"?>
<svg width="80px" height="80px" viewBox="0 0 80 80" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
    <!-- Generator: Sketch 64 (93537) - https://sketch.com -->
    <title>Icon-Architecture/64/Arch_Amazon-DynamoDB_64</title>
    <desc>Created with Sketch.</desc>
    <defs>
        <linearGradient x1="0%" y1="100%" x2="100%" y2="0%" id="linearGradient-1">
            <stop stop-color="#2E27AD" offset="0%"></stop>
            <stop stop-color="#527FFF" offset="100%"></stop>
        </linearGradient>
    </defs>
    <g id="Icon-Architecture/64/Arch_Amazon-DynamoDB_64" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
        <g id="Icon-Architecture-BG/64/Database" fill="url(#linearGradient-1)">
            <rect id="Rectangle" x="0" y="0" width="80" height="80"></rect>
        </g>
        <path d="M52.0859525,54.8502506 C48.7479569,57.5490338 41.7449661,58.9752927 35.0439749,58.9752927 C28.3419838,58.9752927 21.336993,57.548042 17.9999974,54.8492588 L17.9999974,60.284515 L18.0009974,60.284515 C18.0009974,62.9952002 24.9999974,66.0163299 35.0439749,66.0163299 C45.0799617,66.0163299 52.0749525,62.9991676 52.0859525,60.290466 L52.0859525,54.8502506 Z M52.0869525,44.522272 L54.0869499,44.5113618 L54.0869499,44.522272 C54.0869499,45.7303271 53.4819507,46.8580436 52.3039522,47.8905439 C53.7319503,49.147199 54.0869499,50.3800499 54.0869499,51.257824 C54.0869499,51.263775 54.0859499,51.2687342 54.0859499,51.2746852 L54.0859499,60.284515 L54.0869499,60.284515 C54.0869499,65.2952658 44.2749628,68 35.0439749,68 C25.8349871,68 16.0499999,65.3071678 16.003,60.3192292 C16.003,60.31427 16,60.3093109 16,60.3043517 L16,51.2548485 C16,51.2528648 16.002,51.2498893 16.002,51.2469138 C16.005,50.3691398 16.3609995,49.1412479 17.7869976,47.8875684 C16.3699995,46.6358725 16.01,45.4149236 16.001,44.5440924 L16.002,44.5440924 C16.002,44.540125 16,44.5371495 16,44.5331822 L16,35.483679 C16,35.4807035 16.002,35.477728 16.002,35.4747525 C16.005,34.5969784 16.3619995,33.3690866 17.7879976,32.1173908 C16.3699995,30.8647031 16.01,29.6427623 16.001,28.7729229 L16.002,28.7729229 C16.002,28.7689556 16,28.7649882 16,28.7610209 L16,19.7125095 C16,19.709534 16.002,19.7065585 16.002,19.703583 C16.019,14.6997751 25.8199871,12 35.0439749,12 C40.2549681,12 45.2609615,12.8281823 48.7779569,14.2722941 L48.0129579,16.1052054 C44.7299622,14.7573015 40.0029684,13.9836701 35.0439749,13.9836701 C24.9999882,13.9836701 18.0009974,17.0047998 18.0009974,19.7174687 C18.0009974,22.4291458 24.9999882,25.4502754 35.0439749,25.4502754 C35.3149746,25.4532509 35.5799742,25.4502754 35.8479739,25.4403571 L35.9319738,27.4220435 C35.6359742,27.4339456 35.3399745,27.4339456 35.0439749,27.4339456 C28.3419838,27.4339456 21.336993,26.0066949 18,23.3079117 L18,28.7401923 L18.0009974,28.7401923 L18.0009974,28.7630046 C18.0109974,29.8034395 19.0779959,30.7119605 19.9719948,31.2892085 C22.6619912,33.0040913 27.4819849,34.1754485 32.8569778,34.4184481 L32.7659779,36.4001346 C27.3209851,36.1531677 22.5529914,35.0234675 19.4839954,33.2917235 C18.7279964,33.8570695 18.0009974,34.6217743 18.0009974,35.4886382 C18.0009974,38.2003153 24.9999882,41.2214449 35.0439749,41.2214449 C36.0289736,41.2214449 37.0069723,41.1887143 37.9519711,41.1232532 L38.0909709,43.1019642 C37.1009722,43.1704008 36.0749736,43.205115 35.0439749,43.205115 C28.3419838,43.205115 21.336993,41.7778644 18,39.0790811 L18,44.5113618 L18.0009974,44.5113618 C18.0109974,45.574609 19.0779959,46.4821381 19.9719948,47.060378 C23.0479907,49.0232196 28.8239831,50.2451604 35.0439749,50.2451604 L35.4839744,50.2451604 L35.4839744,52.2288305 L35.0439749,52.2288305 C28.7249832,52.2288305 22.9819908,51.0554896 19.4699954,49.0728113 C18.7179964,49.6371655 18.0009974,50.397903 18.0009974,51.257824 C18.0009974,53.9695011 24.9999882,56.9916225 35.0439749,56.9916225 C45.0799617,56.9916225 52.0749525,53.9744602 52.0859525,51.2647668 L52.0859525,51.2548485 L52.0859525,51.2538566 C52.0839525,50.391952 51.3639534,49.6312145 50.6099544,49.0668603 C50.1219551,49.3435823 49.5989558,49.6103859 49.0039566,49.8553692 L48.2379576,48.022458 C48.9639566,47.7239156 49.5939558,47.4015692 50.1109551,47.0623616 C51.0129539,46.4742034 52.0869525,45.5547723 52.0869525,44.522272 L52.0869525,44.522272 Z M60.6529412,30.0166841 L55.0489486,30.0166841 C54.717949,30.0166841 54.4069494,29.8540231 54.2219497,29.5822603 C54.0349499,29.3104975 53.99695,28.9643471 54.1189498,28.6598537 L57.5279453,20.1380068 L44.6189702,20.1380068 L38.6189702,32.0400276 L45.0009618,32.0400276 C45.3199614,32.0400276 45.619961,32.1917784 45.8089608,32.44668 C45.9959605,32.7025735 46.0509604,33.0308709 45.9539606,33.3333806 L40.2579681,51.089212 L60.6529412,30.0166841 Z M63.7219372,29.7121907 L38.7229701,55.539576 C38.5279703,55.7399267 38.2659707,55.8440694 38.000971,55.8440694 C37.8249713,55.8440694 37.6479715,55.7994368 37.4899717,55.7052124 C37.0899722,55.4691557 36.9069725,54.992083 37.0479723,54.5517083 L43.6339636,34.0236978 L37.0009724,34.0236978 C36.6539728,34.0236978 36.3329732,33.8461593 36.1499735,33.5535679 C35.9679737,33.2609766 35.9509737,32.8959813 36.1069735,32.5885124 L43.1069643,18.7028214 C43.2759641,18.3665893 43.6219636,18.1543366 44.0009631,18.1543366 L59.0009434,18.1543366 C59.331943,18.1543366 59.6429425,18.3179894 59.8279423,18.5887604 C60.0149421,18.861515 60.052942,19.2066736 59.9309422,19.5121588 L56.5219467,28.0330139 L62.9999381,28.0330139 C63.3999376,28.0330139 63.7629371,28.2710544 63.9199369,28.6360497 C64.0769367,29.0020368 63.9989368,29.4255504 63.7219372,29.7121907 L63.7219372,29.7121907 Z M19.4549955,60.6743062 C20.8719936,61.4727334 22.6559912,62.1442057 24.7569885,62.6678947 L25.2449878,60.7437346 C23.3459903,60.2706293 21.6859925,59.6497405 20.4429942,58.949505 L19.4549955,60.6743062 Z M24.7569885,46.7985335 L25.2449878,44.8753653 C23.3459903,44.4012681 21.6859925,43.7803794 20.4429942,43.0801438 L19.4549955,44.804945 C20.8719936,45.6033722 22.6549912,46.2748446 24.7569885,46.7985335 L24.7569885,46.7985335 Z M19.4549955,28.9355839 L20.4429942,27.2107827 C21.6839925,27.9110182 23.3449903,28.5309151 25.2449878,29.0060041 L24.7569885,30.9291723 C22.6529912,30.4044916 20.8699936,29.7330193 19.4549955,28.9355839 L19.4549955,28.9355839 Z" id="Amazon-DynamoDB_Icon_64_Squid" fill="#FFFFFF"></path>
    </g>
</svg>", + "layout": [ + { + "h": 6, + "i": "9e1d91ec-fb66-4cff-b5c5-282270ebffb5", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 0 + }, + { + "h": 6, + "i": "9a2daf2e-39bc-445d-947f-617c27fadd0f", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 0 + }, + { + "h": 6, + "i": "5b50997d-3bca-466a-bdeb-841b2e49fd65", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 6 + }, + { + "h": 6, + "i": "889c36ab-4d0c-4328-9c3c-6558aad6be89", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 6 + }, + { + "h": 6, + "i": "0c3b97fe-56e0-4ce6-99f4-fd1cbd24f93e", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 12 + }, + { + "h": 6, + "i": "70980d38-ee3c-47be-9520-e371df3b021a", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 12 + }, + { + "h": 6, + "i": "fe1b71b5-1a3f-41c0-b6c2-46bf934787ad", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 18 + }, + { + "h": 6, + "i": "cc0938a5-af82-4bd8-b10e-67eabe717ee0", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 18 + }, + { + "h": 6, + "i": "4bb63c27-5eb4-4904-9947-42ffce15e92e", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 24 + }, + { + "h": 6, + "i": "5ffbe527-8cf3-4ed8-ac2d-8739fa7fa9af", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 24 + }, + { + "h": 6, + "i": "a02f64ac-e73e-4d4c-a26b-fcfc4265c148", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 30 + }, + { + "h": 6, + "i": "014e377d-b7c1-4469-a137-be34d7748f31", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 30 + }, + { + "h": 6, + "i": "b1b75926-7308-43b3-bcad-60f369715f0b", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 36 + }, + { + "h": 6, + "i": "90f4d19d-8785-4a7a-97cf-c967108e1487", + "moved": false, + "static": false, + "w": 6, + "x": 6, + "y": 36 + }, + { + "h": 6, + "i": "5412cdad-174b-462b-916e-4e3de477446b", + "moved": false, + "static": false, + "w": 6, + "x": 0, + "y": 42 + } + ], + "panelMap": {}, + "tags": [], + "title": "DynamoDB Overview", + "uploadedGrafana": false, + "variables": { + "1f7a94df-9735-4bfa-a1b8-dca8ac29f945": { + "allSelected": false, + "customValue": "", + "description": "Account Region", + "id": "1f7a94df-9735-4bfa-a1b8-dca8ac29f945", + "key": "1f7a94df-9735-4bfa-a1b8-dca8ac29f945", + "modificationUUID": "8ef772a1-7df9-46a2-84e7-ab0c0bfc6886", + "multiSelect": false, + "name": "Region", + "order": 1, + "queryValue": "SELECT DISTINCT JSONExtractString(labels, 'cloud_region') AS region\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name like '%aws_DynamoDB%' AND JSONExtractString(labels, 'cloud_account_id') IN {{.Account}} GROUP BY region", + "showALLOption": false, + "sort": "DISABLED", + "textboxValue": "", + "type": "QUERY" + }, + "93ee15bf-baab-4abf-8828-fe6e75518417": { + "allSelected": false, + "customValue": "", + "description": "AWS Account ID", + "id": "93ee15bf-baab-4abf-8828-fe6e75518417", + "key": "93ee15bf-baab-4abf-8828-fe6e75518417", + "modificationUUID": "409e6a7e-1ec1-4611-8624-492a3aac6ca0", + "multiSelect": false, + "name": "Account", + "order": 0, + "queryValue": "SELECT DISTINCT JSONExtractString(labels, 'cloud_account_id') AS cloud_account_id\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name like '%aws_DynamoDB%' GROUP BY cloud_account_id", + "showALLOption": false, + "sort": "ASC", + "textboxValue": "", + "type": "QUERY" + }, + "fd28f0e0-d4ec-4bcd-9c45-32395cb0c55b": { + "allSelected": true, + "customValue": "", + "description": "DynamoDB Tables", + "id": "fd28f0e0-d4ec-4bcd-9c45-32395cb0c55b", + "modificationUUID": "8ebb9032-7e56-4981-8036-efdfc413f8a8", + "multiSelect": true, + "name": "Table", + "order": 2, + "queryValue": "SELECT DISTINCT JSONExtractString(labels, 'TableName') AS table FROM signoz_metrics.distributed_time_series_v4_1day WHERE metric_name like '%aws_DynamoDB%' AND JSONExtractString(labels, 'cloud_account_id') IN {{.Account}} AND JSONExtractString(labels, 'cloud_region') IN {{.Region}} and table != '' GROUP BY table\n", + "showALLOption": true, + "sort": "ASC", + "textboxValue": "", + "type": "QUERY" + } + }, + "version": "v4", + "widgets": [ + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "9e1d91ec-fb66-4cff-b5c5-282270ebffb5", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountMaxReads_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountMaxReads_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "fc55895c", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "8b3f3e0b", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "4fdb1c6c-8c7f-4f8b-a468-9326c811981a", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Max Reads", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "5b50997d-3bca-466a-bdeb-841b2e49fd65", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountMaxTableLevelReads_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountMaxTableLevelReads_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "f7b176f8", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "9a023ab7", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "310efa3b-d68a-4630-b279-bcbc22ddbefb", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Max Table Level Reads", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "889c36ab-4d0c-4328-9c3c-6558aad6be89", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountMaxTableLevelWrites_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountMaxTableLevelWrites_max", + "type": "Gauge" + }, + "aggregateOperator": "avg", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "ec5ebf95", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "5b2fb00e", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "avg" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "473de955-bc5c-4a66-aa8d-2e37502c5643", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Max Table Level Writes", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "9a2daf2e-39bc-445d-947f-617c27fadd0f", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountMaxWrites_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountMaxWrites_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "3815cf09", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "a783bd91", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "avg", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "1115aaa1-fdb0-47a1-af79-8c6d439747d4", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Max Writes", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "0c3b97fe-56e0-4ce6-99f4-fd1cbd24f93e", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "edcbcb83", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "224766cb", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "d42bc3cd-f457-42eb-936e-c931b0c77f61", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Provisioned Read Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "70980d38-ee3c-47be-9520-e371df3b021a", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "c237482a", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "e3a117d5", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "d06d2f3d-8878-4c53-a8f1-10024091887a", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Account Provisioned Write Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "fe1b71b5-1a3f-41c0-b6c2-46bf934787ad", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_ConsumedReadCapacityUnits_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_ConsumedReadCapacityUnits_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "b867513b", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "9c10cbaa", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + }, + { + "id": "4ff7fb7c", + "key": { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + }, + "op": "in", + "value": [ + "$Table" + ] + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + } + ], + "having": [], + "legend": "{{TableName}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "32c9f178-073c-4d1f-8193-76f804776df0", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Consumed Read Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "cc0938a5-af82-4bd8-b10e-67eabe717ee0", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_ConsumedWriteCapacityUnits_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_ConsumedWriteCapacityUnits_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "7e2aa806", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "dd49e062", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + }, + { + "id": "e7ada865", + "key": { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + }, + "op": "in", + "value": [ + "$Table" + ] + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + } + ], + "having": [], + "legend": "{{TableName}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "40397368-92df-42b9-b0e6-0e7dc7984bc4", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Consumed Write Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "4bb63c27-5eb4-4904-9947-42ffce15e92e", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "b3e029fa", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "e6764d50", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "6a33d44a-a337-422f-a964-89b88804343f", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Max Provisioned Table Read Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "5ffbe527-8cf3-4ed8-ac2d-8739fa7fa9af", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "80ba9142", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "9c802cf0", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "a98b7d13-63d3-46cf-b4e7-686b3be7d9f9", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Max Provisioned Table Write Capacity", + "yAxisUnit": "percent" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "a02f64ac-e73e-4d4c-a26b-fcfc4265c148", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_ReturnedItemCount_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_ReturnedItemCount_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "db6edb77", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "8b86de4a", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + }, + { + "id": "a8d39d03", + "key": { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + }, + "op": "in", + "value": [ + "$Table" + ] + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + } + ], + "having": [], + "legend": "{{TableName}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "6322f225-471d-43a2-b13e-f2312c1a7b57", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Returned Item Count", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "014e377d-b7c1-4469-a137-be34d7748f31", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_SuccessfulRequestLatency_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_SuccessfulRequestLatency_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "93bef7f0", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "4a293ec8", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + }, + { + "id": "2e2286c6", + "key": { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + }, + "op": "in", + "value": [ + "$Table" + ] + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + } + ], + "having": [], + "legend": "{{TableName}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "6ad1cbfe-9581-4d99-a14e-50bc5fef699f", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Successful Request Latency", + "yAxisUnit": "ms" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "b1b75926-7308-43b3-bcad-60f369715f0b", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_ThrottledRequests_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_ThrottledRequests_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "28fcd3cd", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "619578e5", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + }, + { + "id": "a6bc481e", + "key": { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + }, + "op": "in", + "value": [ + "$Table" + ] + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [ + { + "dataType": "string", + "id": "TableName--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "TableName", + "type": "tag" + } + ], + "having": [], + "legend": "{{TableName}}", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "fd358cf0-a0b0-4106-a89c-a5196297c23b", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Max Throttled Requests", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "5412cdad-174b-462b-916e-4e3de477446b", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_UserErrors_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_UserErrors_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "5a060b5e", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "3a1cb5ff", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "17db2e6d-d9dc-4568-85ea-ea4b373dfc5e", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "User Errors", + "yAxisUnit": "none" + }, + { + "bucketCount": 30, + "bucketWidth": 0, + "columnUnits": {}, + "description": "", + "fillSpans": false, + "id": "90f4d19d-8785-4a7a-97cf-c967108e1487", + "isLogScale": false, + "isStacked": false, + "mergeAllActiveQueries": false, + "nullZeroValues": "zero", + "opacity": "1", + "panelTypes": "graph", + "query": { + "builder": { + "queryData": [ + { + "aggregateAttribute": { + "dataType": "float64", + "id": "aws_DynamoDB_WriteThrottleEvents_max--float64--Gauge--true", + "isColumn": true, + "isJSON": false, + "key": "aws_DynamoDB_WriteThrottleEvents_max", + "type": "Gauge" + }, + "aggregateOperator": "max", + "dataSource": "metrics", + "disabled": false, + "expression": "A", + "filters": { + "items": [ + { + "id": "58bc06b3", + "key": { + "dataType": "string", + "id": "cloud_account_id--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_account_id", + "type": "tag" + }, + "op": "=", + "value": "$Account" + }, + { + "id": "d6d7a8fb", + "key": { + "dataType": "string", + "id": "cloud_region--string--tag--false", + "isColumn": false, + "isJSON": false, + "key": "cloud_region", + "type": "tag" + }, + "op": "=", + "value": "$Region" + } + ], + "op": "AND" + }, + "functions": [], + "groupBy": [], + "having": [], + "legend": "", + "limit": null, + "orderBy": [], + "queryName": "A", + "reduceTo": "avg", + "spaceAggregation": "max", + "stepInterval": 60, + "timeAggregation": "max" + } + ], + "queryFormulas": [] + }, + "clickhouse_sql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "id": "713c6c70-3a62-4b67-8a67-7917ca9d4fbf", + "promql": [ + { + "disabled": false, + "legend": "", + "name": "A", + "query": "" + } + ], + "queryType": "builder" + }, + "selectedLogFields": [ + { + "dataType": "string", + "name": "body", + "type": "" + }, + { + "dataType": "string", + "name": "timestamp", + "type": "" + } + ], + "selectedTracesFields": [ + { + "dataType": "string", + "id": "serviceName--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "serviceName", + "type": "tag" + }, + { + "dataType": "string", + "id": "name--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "name", + "type": "tag" + }, + { + "dataType": "float64", + "id": "durationNano--float64--tag--true", + "isColumn": true, + "isJSON": false, + "key": "durationNano", + "type": "tag" + }, + { + "dataType": "string", + "id": "httpMethod--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "httpMethod", + "type": "tag" + }, + { + "dataType": "string", + "id": "responseStatusCode--string--tag--true", + "isColumn": true, + "isJSON": false, + "key": "responseStatusCode", + "type": "tag" + } + ], + "softMax": 0, + "softMin": 0, + "stackedBarChart": false, + "thresholds": [], + "timePreferance": "GLOBAL_TIME", + "title": "Max Write Throttle Events", + "yAxisUnit": "none" + } + ] +} \ No newline at end of file diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.png b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.png new file mode 100644 index 000000000000..18a5f3149962 Binary files /dev/null and b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/assets/dashboards/overview.png differ diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/icon.svg b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/icon.svg new file mode 100644 index 000000000000..bd4f2c30f503 --- /dev/null +++ b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/icon.svg @@ -0,0 +1,18 @@ + + + + Icon-Architecture/64/Arch_Amazon-DynamoDB_64 + Created with Sketch. + + + + + + + + + + + + + \ No newline at end of file diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/integration.json b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/integration.json new file mode 100644 index 000000000000..8453c6b5ef8a --- /dev/null +++ b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/integration.json @@ -0,0 +1,394 @@ +{ + "id": "dynamodb", + "title": "DynamoDB", + "icon": "file://icon.svg", + "overview": "file://overview.md", + "supported_signals": { + "metrics": true, + "logs": false + }, + "data_collected": { + "metrics": [ + { + "name": "aws_DynamoDB_AccountMaxReads_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxReads_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxReads_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxReads_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelReads_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelReads_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelReads_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelReads_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelWrites_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelWrites_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelWrites_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxTableLevelWrites_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxWrites_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxWrites_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxWrites_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountMaxWrites_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_count", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_max", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_min", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedReadCapacityUtilization_sum", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_count", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_max", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_min", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_AccountProvisionedWriteCapacityUtilization_sum", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedReadCapacityUnits_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedReadCapacityUnits_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedReadCapacityUnits_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedReadCapacityUnits_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedWriteCapacityUnits_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedWriteCapacityUnits_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedWriteCapacityUnits_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ConsumedWriteCapacityUnits_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_count", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_max", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_min", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableReadCapacityUtilization_sum", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_count", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_max", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_min", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_MaxProvisionedTableWriteCapacityUtilization_sum", + "unit": "Percent", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ReturnedItemCount_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ReturnedItemCount_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ReturnedItemCount_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ReturnedItemCount_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_SuccessfulRequestLatency_count", + "unit": "Milliseconds", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_SuccessfulRequestLatency_max", + "unit": "Milliseconds", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_SuccessfulRequestLatency_min", + "unit": "Milliseconds", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_SuccessfulRequestLatency_sum", + "unit": "Milliseconds", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ThrottledRequests_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ThrottledRequests_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ThrottledRequests_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_ThrottledRequests_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_UserErrors_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_UserErrors_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_UserErrors_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_UserErrors_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_WriteThrottleEvents_count", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_WriteThrottleEvents_max", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_WriteThrottleEvents_min", + "unit": "Count", + "type": "Gauge", + "description": "" + }, + { + "name": "aws_DynamoDB_WriteThrottleEvents_sum", + "unit": "Count", + "type": "Gauge", + "description": "" + } + ] + }, + "telemetry_collection_strategy": { + "aws_metrics": { + "cloudwatch_metric_stream_filters": [ + { + "Namespace": "AWS/DynamoDB" + } + ] + } + }, + "assets": { + "dashboards": [ + { + "id": "overview", + "title": "DynamoDB Overview", + "description": "Overview of DynamoDB", + "image": "file://assets/dashboards/overview.png", + "definition": "file://assets/dashboards/overview.json" + } + ] + } +} \ No newline at end of file diff --git a/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/overview.md b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/overview.md new file mode 100644 index 000000000000..3de918d29a48 --- /dev/null +++ b/pkg/query-service/app/cloudintegrations/services/definitions/aws/dynamodb/overview.md @@ -0,0 +1,3 @@ +### Monitor DynamoDB with SigNoz + +Collect DynamoDB Key Metrics and view them with an out of the box dashboard. diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 15a1882e1c0d..8fff0690adae 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -3,12 +3,14 @@ package app import ( "bytes" "context" + "encoding/base64" "encoding/json" "errors" "fmt" "io" "math" "net/http" + "net/url" "regexp" "slices" "sort" @@ -23,10 +25,11 @@ import ( errorsV2 "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/http/render" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" + "github.com/SigNoz/signoz/pkg/licensing" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations/services" "github.com/SigNoz/signoz/pkg/query-service/app/integrations" "github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer" + "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/valuer" "github.com/prometheus/prometheus/promql" @@ -58,6 +61,7 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/postprocess" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/SigNoz/signoz/pkg/types/featuretypes" "github.com/SigNoz/signoz/pkg/types/pipelinetypes" ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" @@ -89,7 +93,6 @@ func NewRouter() *mux.Router { type APIHandler struct { reader interfaces.Reader ruleManager *rules.Manager - featureFlags interfaces.FeatureLookup querier interfaces.Querier querierV2 interfaces.Querier queryBuilder *queryBuilder.QueryBuilder @@ -136,13 +139,11 @@ type APIHandler struct { AlertmanagerAPI *alertmanager.API + LicensingAPI licensing.API + FieldsAPI *fields.API Signoz *signoz.SigNoz - - QuickFilters quickfilter.API - - QuickFilterModule quickfilter.Usecase } type APIHandlerOpts struct { @@ -155,9 +156,6 @@ type APIHandlerOpts struct { // rule manager handles rule crud operations RuleManager *rules.Manager - // feature flags querier - FeatureFlags interfaces.FeatureLookup - // Integrations IntegrationsController *integrations.Controller @@ -177,13 +175,11 @@ type APIHandlerOpts struct { AlertmanagerAPI *alertmanager.API + LicensingAPI licensing.API + FieldsAPI *fields.API Signoz *signoz.SigNoz - - QuickFilters quickfilter.API - - QuickFilterModule quickfilter.Usecase } // NewAPIHandler returns an APIHandler @@ -224,7 +220,6 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { preferSpanMetrics: opts.PreferSpanMetrics, temporalityMap: make(map[string]map[v3.Temporality]bool), ruleManager: opts.RuleManager, - featureFlags: opts.FeatureFlags, IntegrationsController: opts.IntegrationsController, CloudIntegrationsController: opts.CloudIntegrationsController, LogsParsingPipelineController: opts.LogsParsingPipelineController, @@ -244,10 +239,9 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) { JWT: opts.JWT, SummaryService: summaryService, AlertmanagerAPI: opts.AlertmanagerAPI, + LicensingAPI: opts.LicensingAPI, Signoz: opts.Signoz, FieldsAPI: opts.FieldsAPI, - QuickFilters: opts.QuickFilters, - QuickFilterModule: opts.QuickFilterModule, } logsQueryBuilder := logsv4.PrepareLogsQuery @@ -574,9 +568,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/org/preferences/{preferenceId}", am.AdminAccess(aH.Signoz.Handlers.Preference.UpdateOrg)).Methods(http.MethodPut) // Quick Filters - router.HandleFunc("/api/v1/orgs/me/filters", am.ViewAccess(aH.QuickFilters.GetQuickFilters)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/orgs/me/filters/{signal}", am.ViewAccess(aH.QuickFilters.GetSignalFilters)).Methods(http.MethodGet) - router.HandleFunc("/api/v1/orgs/me/filters", am.AdminAccess(aH.QuickFilters.UpdateQuickFilters)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/orgs/me/filters", am.ViewAccess(aH.Signoz.Handlers.QuickFilter.GetQuickFilters)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/orgs/me/filters/{signal}", am.ViewAccess(aH.Signoz.Handlers.QuickFilter.GetSignalFilters)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/orgs/me/filters", am.AdminAccess(aH.Signoz.Handlers.QuickFilter.UpdateQuickFilters)).Methods(http.MethodPut) // === Authentication APIs === router.HandleFunc("/api/v1/invite", am.AdminAccess(aH.Signoz.Handlers.User.CreateInvite)).Methods(http.MethodPost) @@ -589,6 +583,17 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/register", am.OpenAccess(aH.registerUser)).Methods(http.MethodPost) router.HandleFunc("/api/v1/login", am.OpenAccess(aH.Signoz.Handlers.User.Login)).Methods(http.MethodPost) router.HandleFunc("/api/v1/loginPrecheck", am.OpenAccess(aH.Signoz.Handlers.User.LoginPrecheck)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/complete/google", am.OpenAccess(aH.receiveGoogleAuth)).Methods(http.MethodGet) + + router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.User.ListDomains)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/domains", am.AdminAccess(aH.Signoz.Handlers.User.CreateDomain)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.User.UpdateDomain)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/domains/{id}", am.AdminAccess(aH.Signoz.Handlers.User.DeleteDomain)).Methods(http.MethodDelete) + + router.HandleFunc("/api/v1/pats", am.AdminAccess(aH.Signoz.Handlers.User.CreateAPIKey)).Methods(http.MethodPost) + router.HandleFunc("/api/v1/pats", am.AdminAccess(aH.Signoz.Handlers.User.ListAPIKeys)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(aH.Signoz.Handlers.User.UpdateAPIKey)).Methods(http.MethodPut) + router.HandleFunc("/api/v1/pats/{id}", am.AdminAccess(aH.Signoz.Handlers.User.RevokeAPIKey)).Methods(http.MethodDelete) router.HandleFunc("/api/v1/user", am.AdminAccess(aH.Signoz.Handlers.User.ListUsers)).Methods(http.MethodGet) router.HandleFunc("/api/v1/user/me", am.OpenAccess(aH.Signoz.Handlers.User.GetCurrentUserFromJWT)).Methods(http.MethodGet) @@ -607,7 +612,7 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { render.Success(rw, http.StatusOK, []any{}) })).Methods(http.MethodGet) router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(func(rw http.ResponseWriter, req *http.Request) { - render.Error(rw, errorsV2.New(errorsV2.TypeUnsupported, errorsV2.CodeUnsupported, "not implemented")) + aH.LicensingAPI.Activate(rw, req) })).Methods(http.MethodGet) } @@ -1979,15 +1984,14 @@ func (aH *APIHandler) getVersion(w http.ResponseWriter, r *http.Request) { } func (aH *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { - featureSet, err := aH.FF().GetFeatureFlags() + featureSet, err := aH.Signoz.Licensing.GetFeatureFlags(r.Context()) if err != nil { aH.HandleError(w, err, http.StatusInternalServerError) return } if aH.preferSpanMetrics { - for idx := range featureSet { - feature := &featureSet[idx] - if feature.Name == model.UseSpanMetrics { + for idx, feature := range featureSet { + if feature.Name == featuretypes.UseSpanMetrics { featureSet[idx].Active = true } } @@ -1995,12 +1999,8 @@ func (aH *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) { aH.Respond(w, featureSet) } -func (aH *APIHandler) FF() interfaces.FeatureLookup { - return aH.featureFlags -} - -func (aH *APIHandler) CheckFeature(f string) bool { - err := aH.FF().CheckFeature(f) +func (aH *APIHandler) CheckFeature(ctx context.Context, key string) bool { + err := aH.Signoz.Licensing.CheckFeature(ctx, key) return err == nil } @@ -2032,7 +2032,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { return } - _, apiErr := auth.Register(context.Background(), &req, aH.Signoz.Alertmanager, aH.Signoz.Modules.Organization, aH.Signoz.Modules.User, aH.QuickFilterModule) + _, apiErr := auth.Register(context.Background(), &req, aH.Signoz.Alertmanager, aH.Signoz.Modules.Organization, aH.Signoz.Modules.User, aH.Signoz.Modules.QuickFilter) if apiErr != nil { RespondError(w, apiErr, nil) return @@ -2045,6 +2045,74 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { aH.Respond(w, nil) } +func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) { + ssoError := []byte("Login failed. Please contact your system administrator") + dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError))) + base64.StdEncoding.Encode(dst, ssoError) + + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther) +} + +// receiveGoogleAuth completes google OAuth response and forwards a request +// to front-end to sign user in +func (aH *APIHandler) receiveGoogleAuth(w http.ResponseWriter, r *http.Request) { + redirectUri := constants.GetDefaultSiteURL() + ctx := context.Background() + + q := r.URL.Query() + if errType := q.Get("error"); errType != "" { + zap.L().Error("[receiveGoogleAuth] failed to login with google auth", zap.String("error", errType), zap.String("error_description", q.Get("error_description"))) + http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "failed to login through SSO"), http.StatusMovedPermanently) + return + } + + relayState := q.Get("state") + zap.L().Debug("[receiveGoogleAuth] relay state received", zap.String("state", relayState)) + + parsedState, err := url.Parse(relayState) + if err != nil || relayState == "" { + zap.L().Error("[receiveGoogleAuth] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r)) + handleSsoError(w, r, redirectUri) + return + } + + // upgrade redirect url from the relay state for better accuracy + redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login") + + // fetch domain by parsing relay state. + domain, err := aH.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState) + if err != nil { + handleSsoError(w, r, redirectUri) + return + } + + // now that we have domain, use domain to fetch sso settings. + // prepare google callback handler using parsedState - + // which contains redirect URL (front-end endpoint) + callbackHandler, err := domain.PrepareGoogleOAuthProvider(parsedState) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to prepare google oauth provider", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + identity, err := callbackHandler.HandleCallback(r) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to process HandleCallback", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + nextPage, err := aH.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, identity.Email, aH.JWT) + if err != nil { + zap.L().Error("[receiveGoogleAuth] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err)) + handleSsoError(w, r, redirectUri) + return + } + + http.Redirect(w, r, nextPage, http.StatusSeeOther) +} + func (aH *APIHandler) HandleError(w http.ResponseWriter, err error, statusCode int) bool { if err == nil { return false diff --git a/pkg/query-service/app/integrations/manager_test.go b/pkg/query-service/app/integrations/manager_test.go index 0c6bd1c51c64..c78413c0b739 100644 --- a/pkg/query-service/app/integrations/manager_test.go +++ b/pkg/query-service/app/integrations/manager_test.go @@ -22,7 +22,7 @@ func TestIntegrationLifecycle(t *testing.T) { organizationModule := implorganization.NewModule(implorganization.NewStore(store)) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) - userModule := impluser.NewModule(impluser.NewStore(store), nil, emailing, providerSettings) + userModule := impluser.NewModule(impluser.NewStore(store, providerSettings), nil, emailing, providerSettings) user, apiErr := createTestUser(organizationModule, userModule) if apiErr != nil { t.Fatalf("could not create test user: %v", apiErr) diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index f49a04693730..2a9aa2a5e5fd 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -8,7 +8,6 @@ import ( "github.com/SigNoz/signoz/pkg/cache" metricsV3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3" "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/query-service/interfaces" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "go.uber.org/zap" ) @@ -46,8 +45,7 @@ type prepareLogsQueryFunc func(start, end int64, queryType v3.QueryType, panelTy type prepareMetricQueryFunc func(start, end int64, queryType v3.QueryType, panelType v3.PanelType, bq *v3.BuilderQuery, options metricsV3.Options) (string, error) type QueryBuilder struct { - options QueryBuilderOptions - featureFlags interfaces.FeatureLookup + options QueryBuilderOptions } type QueryBuilderOptions struct { diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 0ccadb1e8c68..b060e470003e 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -14,8 +14,7 @@ import ( "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/apis/fields" "github.com/SigNoz/signoz/pkg/http/middleware" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" + "github.com/SigNoz/signoz/pkg/licensing/nooplicensing" "github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/query-service/agentConf" "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader" @@ -34,7 +33,6 @@ import ( "github.com/SigNoz/signoz/pkg/cache" "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/query-service/featureManager" "github.com/SigNoz/signoz/pkg/query-service/healthcheck" "github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/rules" @@ -81,8 +79,6 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status { // NewServer creates and initializes Server func NewServer(serverOptions *ServerOptions) (*Server, error) { - // initiate feature manager - fm := featureManager.StartManager() fluxIntervalForTraceDetail, err := time.ParseDuration(serverOptions.FluxIntervalForTraceDetail) if err != nil { @@ -140,23 +136,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { telemetry.GetInstance().SetUserCountCallback(telemetry.GetUserCount) telemetry.GetInstance().SetDashboardsInfoCallback(telemetry.GetDashboardsInfo) - quickfiltermodule := quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(serverOptions.SigNoz.SQLStore)) - quickFilter := quickfilter.NewAPI(quickfiltermodule) apiHandler, err := NewAPIHandler(APIHandlerOpts{ Reader: reader, PreferSpanMetrics: serverOptions.PreferSpanMetrics, RuleManager: rm, - FeatureFlags: fm, IntegrationsController: integrationsController, CloudIntegrationsController: cloudIntegrationsController, LogsParsingPipelineController: logParsingPipelineController, FluxInterval: fluxInterval, JWT: serverOptions.Jwt, AlertmanagerAPI: alertmanager.NewAPI(serverOptions.SigNoz.Alertmanager), - FieldsAPI: fields.NewAPI(serverOptions.SigNoz.TelemetryStore), + LicensingAPI: nooplicensing.NewLicenseAPI(), + FieldsAPI: fields.NewAPI(serverOptions.SigNoz.TelemetryStore, serverOptions.SigNoz.Instrumentation.Logger()), Signoz: serverOptions.SigNoz, - QuickFilters: quickFilter, - QuickFilterModule: quickfiltermodule, }) if err != nil { return nil, err @@ -220,14 +212,15 @@ func (s *Server) createPrivateServer(api *APIHandler) (*http.Server, error) { r := NewRouter() - r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) - r.Use(middleware.NewTimeout(zap.L(), + r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Max, ).Wrap) - r.Use(middleware.NewAnalytics(zap.L()).Wrap) - r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) + r.Use(middleware.NewAnalytics().Wrap) + r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) + r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) api.RegisterPrivateRoutes(r) @@ -250,14 +243,15 @@ func (s *Server) createPrivateServer(api *APIHandler) (*http.Server, error) { func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server, error) { r := NewRouter() - r.Use(middleware.NewAuth(zap.L(), s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) - r.Use(middleware.NewTimeout(zap.L(), + r.Use(middleware.NewAuth(s.serverOptions.Jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + r.Use(middleware.NewTimeout(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Timeout.ExcludedRoutes, s.serverOptions.Config.APIServer.Timeout.Default, s.serverOptions.Config.APIServer.Timeout.Max, ).Wrap) - r.Use(middleware.NewAnalytics(zap.L()).Wrap) - r.Use(middleware.NewLogging(zap.L(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) + r.Use(middleware.NewAnalytics().Wrap) + r.Use(middleware.NewAPIKey(s.serverOptions.SigNoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.serverOptions.SigNoz.Instrumentation.Logger()).Wrap) + r.Use(middleware.NewLogging(s.serverOptions.SigNoz.Instrumentation.Logger(), s.serverOptions.Config.APIServer.Logging.ExcludedRoutes).Wrap) am := middleware.NewAuthZ(s.serverOptions.SigNoz.Instrumentation.Logger()) diff --git a/pkg/query-service/auth/auth.go b/pkg/query-service/auth/auth.go index e29d02bee6fc..d157083d4e17 100644 --- a/pkg/query-service/auth/auth.go +++ b/pkg/query-service/auth/auth.go @@ -47,7 +47,7 @@ func RegisterOrgAndFirstUser(ctx context.Context, req *types.PostableRegisterOrg } // First user registration -func Register(ctx context.Context, req *types.PostableRegisterOrgAndAdmin, alertmanager alertmanager.Alertmanager, organizationModule organization.Module, userModule user.Module, quickfiltermodule quickfilter.Usecase) (*types.User, *model.ApiError) { +func Register(ctx context.Context, req *types.PostableRegisterOrgAndAdmin, alertmanager alertmanager.Alertmanager, organizationModule organization.Module, userModule user.Module, quickfiltermodule quickfilter.Module) (*types.User, *model.ApiError) { user, err := RegisterOrgAndFirstUser(ctx, req, organizationModule, userModule) if err != nil { return nil, err diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 21850a53accd..b01fb6423e08 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" + "github.com/SigNoz/signoz/pkg/types/featuretypes" ) const ( @@ -65,9 +66,9 @@ func UseMetricsPreAggregation() bool { var KafkaSpanEval = GetOrDefaultEnv("KAFKA_SPAN_EVAL", "false") -var DEFAULT_FEATURE_SET = model.FeatureSet{ - model.Feature{ - Name: model.UseSpanMetrics, +var DEFAULT_FEATURE_SET = []*featuretypes.GettableFeature{ + &featuretypes.GettableFeature{ + Name: featuretypes.UseSpanMetrics, Active: false, Usage: 0, UsageLimit: -1, @@ -660,3 +661,7 @@ var MaterializedDataTypeMap = map[string]string{ } const InspectMetricsMaxTimeDiff = 1800000 + +func GetDefaultSiteURL() string { + return GetOrDefaultEnv("SIGNOZ_SITE_URL", HTTPHostPort) +} diff --git a/pkg/query-service/featureManager/manager.go b/pkg/query-service/featureManager/manager.go deleted file mode 100644 index 7805fe619147..000000000000 --- a/pkg/query-service/featureManager/manager.go +++ /dev/null @@ -1,60 +0,0 @@ -package featureManager - -import ( - "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/query-service/model" - "go.uber.org/zap" -) - -type FeatureManager struct { -} - -func StartManager() *FeatureManager { - fM := &FeatureManager{} - return fM -} - -// CheckFeature will be internally used by backend routines -// for feature gating -func (fm *FeatureManager) CheckFeature(featureKey string) error { - - feature, err := fm.GetFeatureFlag(featureKey) - if err != nil { - return err - } - - if feature.Active { - return nil - } - - return model.ErrFeatureUnavailable{Key: featureKey} -} - -// GetFeatureFlags returns current features -func (fm *FeatureManager) GetFeatureFlags() (model.FeatureSet, error) { - features := constants.DEFAULT_FEATURE_SET - return features, nil -} - -func (fm *FeatureManager) InitFeatures(req model.FeatureSet) error { - zap.L().Error("InitFeatures not implemented in OSS") - return nil -} - -func (fm *FeatureManager) UpdateFeatureFlag(req model.Feature) error { - zap.L().Error("UpdateFeatureFlag not implemented in OSS") - return nil -} - -func (fm *FeatureManager) GetFeatureFlag(key string) (model.Feature, error) { - features, err := fm.GetFeatureFlags() - if err != nil { - return model.Feature{}, err - } - for _, feature := range features { - if feature.Name == key { - return feature, nil - } - } - return model.Feature{}, model.ErrFeatureUnavailable{Key: key} -} diff --git a/pkg/query-service/interfaces/featureLookup.go b/pkg/query-service/interfaces/featureLookup.go deleted file mode 100644 index e2ecbcc3bbbb..000000000000 --- a/pkg/query-service/interfaces/featureLookup.go +++ /dev/null @@ -1,13 +0,0 @@ -package interfaces - -import ( - "github.com/SigNoz/signoz/pkg/query-service/model" -) - -type FeatureLookup interface { - CheckFeature(f string) error - GetFeatureFlags() (model.FeatureSet, error) - GetFeatureFlag(f string) (model.Feature, error) - UpdateFeatureFlag(features model.Feature) error - InitFeatures(features model.FeatureSet) error -} diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index 7c4ba52dbb81..18565541aeb0 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -9,10 +9,9 @@ import ( "github.com/SigNoz/signoz/pkg/config" "github.com/SigNoz/signoz/pkg/config/envprovider" "github.com/SigNoz/signoz/pkg/config/fileprovider" - "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" + "github.com/SigNoz/signoz/pkg/licensing" + "github.com/SigNoz/signoz/pkg/licensing/nooplicensing" "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/signoz" @@ -118,19 +117,18 @@ func main() { signoz, err := signoz.New( context.Background(), config, + jwt, zeus.Config{}, noopzeus.NewProviderFactory(), + licensing.Config{}, + func(_ sqlstore.SQLStore, _ zeus.Zeus) factory.ProviderFactory[licensing.Licensing, licensing.Config] { + return nooplicensing.NewFactory() + }, signoz.NewEmailingProviderFactories(), signoz.NewCacheProviderFactories(), signoz.NewWebProviderFactories(), signoz.NewSQLStoreProviderFactories(), signoz.NewTelemetryStoreProviderFactories(), - func(sqlstore sqlstore.SQLStore, emailing emailing.Emailing, providerSettings factory.ProviderSettings) user.Module { - return impluser.NewModule(impluser.NewStore(sqlstore), jwt, emailing, providerSettings) - }, - func(userModule user.Module) user.Handler { - return impluser.NewHandler(userModule) - }, ) if err != nil { zap.L().Fatal("Failed to create signoz", zap.Error(err)) diff --git a/pkg/query-service/model/featureSet.go b/pkg/query-service/model/featureSet.go deleted file mode 100644 index 4646d030f6e6..000000000000 --- a/pkg/query-service/model/featureSet.go +++ /dev/null @@ -1,38 +0,0 @@ -package model - -type FeatureSet []Feature -type Feature struct { - Name string `db:"name" json:"name"` - Active bool `db:"active" json:"active"` - Usage int64 `db:"usage" json:"usage"` - UsageLimit int64 `db:"usage_limit" json:"usage_limit"` - Route string `db:"route" json:"route"` -} - -const UseSpanMetrics = "USE_SPAN_METRICS" -const AnomalyDetection = "ANOMALY_DETECTION" -const TraceFunnels = "TRACE_FUNNELS" - -var BasicPlan = FeatureSet{ - Feature{ - Name: UseSpanMetrics, - Active: false, - Usage: 0, - UsageLimit: -1, - Route: "", - }, - Feature{ - Name: AnomalyDetection, - Active: false, - Usage: 0, - UsageLimit: -1, - Route: "", - }, - Feature{ - Name: TraceFunnels, - Active: false, - Usage: 0, - UsageLimit: -1, - Route: "", - }, -} diff --git a/pkg/query-service/tests/integration/filter_suggestions_test.go b/pkg/query-service/tests/integration/filter_suggestions_test.go index 781e453cdf6d..21665bb3d2e9 100644 --- a/pkg/query-service/tests/integration/filter_suggestions_test.go +++ b/pkg/query-service/tests/integration/filter_suggestions_test.go @@ -13,18 +13,14 @@ import ( "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/emailing/noopemailing" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/constants" - "github.com/SigNoz/signoz/pkg/query-service/featureManager" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/signoz" @@ -304,27 +300,21 @@ func (tb *FilterSuggestionsTestBed) GetQBFilterSuggestionsForLogs( func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed { testDB := utils.NewQueryServiceDBForTests(t) - fm := featureManager.StartManager() reader, mockClickhouse := NewMockClickhouseReader(t, testDB) mockClickhouse.MatchExpectationsInOrder(false) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) - userModule := impluser.NewModule(impluser.NewStore(testDB), jwt, emailing, providerSettings) - userHandler := impluser.NewHandler(userModule) - modules := signoz.NewModules(testDB, userModule) - quickFilterModule := quickfilter.NewAPI(quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(testDB))) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - Reader: reader, - FeatureFlags: fm, - JWT: jwt, + Reader: reader, + JWT: jwt, Signoz: &signoz.SigNoz{ Modules: modules, - Handlers: signoz.NewHandlers(modules, userHandler), + Handlers: signoz.NewHandlers(modules), }, - QuickFilters: quickFilterModule, }) if err != nil { t.Fatalf("could not create a new ApiHandler: %v", err) @@ -332,13 +322,13 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed { router := app.NewRouter() //add the jwt middleware - router.Use(middleware.NewAuth(zap.L(), jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) am := middleware.NewAuthZ(instrumentationtest.New().Logger()) apiHandler.RegisterRoutes(router, am) apiHandler.RegisterQueryRangeV3Routes(router, am) organizationModule := implorganization.NewModule(implorganization.NewStore(testDB)) - user, apiErr := createTestUser(organizationModule, userModule) + user, apiErr := createTestUser(organizationModule, modules.User) if apiErr != nil { t.Fatalf("could not create a test user: %v", apiErr) } @@ -355,7 +345,7 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed { testUser: user, qsHttpHandler: router, mockClickhouse: mockClickhouse, - userModule: userModule, + userModule: modules.User, } } diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go index a28b91fb32f9..69e4102ac3fb 100644 --- a/pkg/query-service/tests/integration/logparsingpipeline_test.go +++ b/pkg/query-service/tests/integration/logparsingpipeline_test.go @@ -15,10 +15,7 @@ import ( "github.com/SigNoz/signoz/pkg/emailing/noopemailing" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/query-service/agentConf" "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app/integrations" @@ -485,11 +482,8 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) jwt := authtypes.NewJWT("", 10*time.Minute, 30*time.Minute) - userModule := impluser.NewModule(impluser.NewStore(sqlStore), jwt, emailing, providerSettings) - userHandler := impluser.NewHandler(userModule) - modules := signoz.NewModules(sqlStore, userModule) - handlers := signoz.NewHandlers(modules, userHandler) - quickFilterModule := quickfilter.NewAPI(quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(sqlStore))) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings) + handlers := signoz.NewHandlers(modules) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ LogsParsingPipelineController: controller, @@ -498,14 +492,13 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli Modules: modules, Handlers: handlers, }, - QuickFilters: quickFilterModule, }) if err != nil { t.Fatalf("could not create a new ApiHandler: %v", err) } organizationModule := implorganization.NewModule(implorganization.NewStore(sqlStore)) - user, apiErr := createTestUser(organizationModule, userModule) + user, apiErr := createTestUser(organizationModule, modules.User) if apiErr != nil { t.Fatalf("could not create a test user: %v", apiErr) } @@ -526,7 +519,7 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli testUser: user, apiHandler: apiHandler, agentConfMgr: agentConfMgr, - userModule: userModule, + userModule: modules.User, } } diff --git a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go b/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go index 0d3a93ac1d24..8ea9d7a9747e 100644 --- a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go @@ -11,27 +11,22 @@ import ( "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/emailing/noopemailing" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/signoz" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" - "github.com/SigNoz/signoz/pkg/query-service/featureManager" "github.com/SigNoz/signoz/pkg/query-service/utils" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types" "github.com/google/uuid" mockhouse "github.com/srikanthccv/ClickHouse-go-mock" "github.com/stretchr/testify/require" - "go.uber.org/zap" ) func TestAWSIntegrationAccountLifecycle(t *testing.T) { @@ -366,42 +361,36 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI t.Fatalf("could not create cloud integrations controller: %v", err) } - fm := featureManager.StartManager() reader, mockClickhouse := NewMockClickhouseReader(t, testDB) mockClickhouse.MatchExpectationsInOrder(false) providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) jwt := authtypes.NewJWT("", 10*time.Minute, 30*time.Minute) - userModule := impluser.NewModule(impluser.NewStore(testDB), jwt, emailing, providerSettings) - userHandler := impluser.NewHandler(userModule) - modules := signoz.NewModules(testDB, userModule) - handlers := signoz.NewHandlers(modules, userHandler) - quickFilterModule := quickfilter.NewAPI(quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(testDB))) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings) + handlers := signoz.NewHandlers(modules) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ Reader: reader, CloudIntegrationsController: controller, - FeatureFlags: fm, JWT: jwt, Signoz: &signoz.SigNoz{ Modules: modules, Handlers: handlers, }, - QuickFilters: quickFilterModule, }) if err != nil { t.Fatalf("could not create a new ApiHandler: %v", err) } router := app.NewRouter() - router.Use(middleware.NewAuth(zap.L(), jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) am := middleware.NewAuthZ(instrumentationtest.New().Logger()) apiHandler.RegisterRoutes(router, am) apiHandler.RegisterCloudIntegrationsRoutes(router, am) organizationModule := implorganization.NewModule(implorganization.NewStore(testDB)) - user, apiErr := createTestUser(organizationModule, userModule) + user, apiErr := createTestUser(organizationModule, modules.User) if apiErr != nil { t.Fatalf("could not create a test user: %v", apiErr) } @@ -411,7 +400,7 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI testUser: user, qsHttpHandler: router, mockClickhouse: mockClickhouse, - userModule: userModule, + userModule: modules.User, } } diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go index 4111d6df421c..1b221267fbba 100644 --- a/pkg/query-service/tests/integration/signoz_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_integrations_test.go @@ -11,18 +11,14 @@ import ( "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/emailing/noopemailing" - "github.com/SigNoz/signoz/pkg/modules/quickfilter" - quickfilterscore "github.com/SigNoz/signoz/pkg/modules/quickfilter/core" "github.com/SigNoz/signoz/pkg/http/middleware" "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/user" - "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations" "github.com/SigNoz/signoz/pkg/query-service/app/integrations" - "github.com/SigNoz/signoz/pkg/query-service/featureManager" "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/query-service/utils" @@ -33,7 +29,6 @@ import ( "github.com/SigNoz/signoz/pkg/types/pipelinetypes" mockhouse "github.com/srikanthccv/ClickHouse-go-mock" "github.com/stretchr/testify/require" - "go.uber.org/zap" ) // Higher level tests for UI facing APIs @@ -567,7 +562,6 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration t.Fatalf("could not create integrations controller: %v", err) } - fm := featureManager.StartManager() reader, mockClickhouse := NewMockClickhouseReader(t, testDB) mockClickhouse.MatchExpectationsInOrder(false) @@ -579,37 +573,32 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration providerSettings := instrumentationtest.New().ToProviderSettings() emailing, _ := noopemailing.New(context.Background(), providerSettings, emailing.Config{}) jwt := authtypes.NewJWT("", 10*time.Minute, 30*time.Minute) - userModule := impluser.NewModule(impluser.NewStore(testDB), jwt, emailing, providerSettings) - userHandler := impluser.NewHandler(userModule) - modules := signoz.NewModules(testDB, userModule) - handlers := signoz.NewHandlers(modules, userHandler) - - quickFilterModule := quickfilter.NewAPI(quickfilterscore.NewQuickFilters(quickfilterscore.NewStore(testDB))) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings) + handlers := signoz.NewHandlers(modules) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ - Reader: reader, - IntegrationsController: controller, - FeatureFlags: fm, + Reader: reader, + IntegrationsController: controller, + JWT: jwt, CloudIntegrationsController: cloudIntegrationsController, Signoz: &signoz.SigNoz{ Modules: modules, Handlers: handlers, }, - QuickFilters: quickFilterModule, }) if err != nil { t.Fatalf("could not create a new ApiHandler: %v", err) } router := app.NewRouter() - router.Use(middleware.NewAuth(zap.L(), jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) + router.Use(middleware.NewAuth(jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}).Wrap) am := middleware.NewAuthZ(instrumentationtest.New().Logger()) apiHandler.RegisterRoutes(router, am) apiHandler.RegisterIntegrationRoutes(router, am) organizationModule := implorganization.NewModule(implorganization.NewStore(testDB)) - user, apiErr := createTestUser(organizationModule, userModule) + user, apiErr := createTestUser(organizationModule, modules.User) if apiErr != nil { t.Fatalf("could not create a test user: %v", apiErr) } @@ -619,7 +608,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration testUser: user, qsHttpHandler: router, mockClickhouse: mockClickhouse, - userModule: userModule, + userModule: modules.User, } } diff --git a/pkg/query-service/utils/testutils.go b/pkg/query-service/utils/testutils.go index b7cf1fbcbca8..5cd7960786fc 100644 --- a/pkg/query-service/utils/testutils.go +++ b/pkg/query-service/utils/testutils.go @@ -66,6 +66,7 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s sqlmigration.NewUpdateQuickFiltersFactory(sqlStore), sqlmigration.NewAuthRefactorFactory(sqlStore), sqlmigration.NewMigratePATToFactorAPIKey(sqlStore), + sqlmigration.NewUpdateApiMonitoringFiltersFactory(sqlStore), ), ) if err != nil { diff --git a/pkg/querybuilder/agg_funcs.go b/pkg/querybuilder/agg_funcs.go index 80279c5a701f..4879d923cb66 100644 --- a/pkg/querybuilder/agg_funcs.go +++ b/pkg/querybuilder/agg_funcs.go @@ -13,6 +13,7 @@ type AggrFunc struct { FuncName string Aliases []valuer.String RequireArgs bool + Numeric bool FuncCombinator bool Rate bool MinArgs int @@ -46,156 +47,156 @@ var ( AggrFuncSum = AggrFunc{ Name: valuer.NewString("sum"), FuncName: "sum", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncSumIf = AggrFunc{ Name: valuer.NewString("sumif"), FuncName: "sumIf", Aliases: []valuer.String{valuer.NewString("sum_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncAvg = AggrFunc{ Name: valuer.NewString("avg"), FuncName: "avg", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncAvgIf = AggrFunc{ Name: valuer.NewString("avgif"), FuncName: "avgIf", Aliases: []valuer.String{valuer.NewString("avg_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncMin = AggrFunc{ Name: valuer.NewString("min"), FuncName: "min", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncMinIf = AggrFunc{ Name: valuer.NewString("minif"), FuncName: "minIf", Aliases: []valuer.String{valuer.NewString("min_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncMax = AggrFunc{ Name: valuer.NewString("max"), FuncName: "max", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncMaxIf = AggrFunc{ Name: valuer.NewString("maxif"), FuncName: "maxIf", Aliases: []valuer.String{valuer.NewString("max_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP05 = AggrFunc{ Name: valuer.NewString("p05"), FuncName: "quantile(0.05)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP05IF = AggrFunc{ Name: valuer.NewString("p05if"), FuncName: "quantileIf(0.05)", Aliases: []valuer.String{valuer.NewString("p05_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP10 = AggrFunc{ Name: valuer.NewString("p10"), FuncName: "quantile(0.10)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP10IF = AggrFunc{ Name: valuer.NewString("p10if"), FuncName: "quantileIf(0.10)", Aliases: []valuer.String{valuer.NewString("p10_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP20 = AggrFunc{ Name: valuer.NewString("p20"), FuncName: "quantile(0.20)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP20IF = AggrFunc{ Name: valuer.NewString("p20if"), FuncName: "quantileIf(0.20)", Aliases: []valuer.String{valuer.NewString("p20_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP25 = AggrFunc{ Name: valuer.NewString("p25"), FuncName: "quantile(0.25)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP25IF = AggrFunc{ Name: valuer.NewString("p25if"), FuncName: "quantileIf(0.25)", Aliases: []valuer.String{valuer.NewString("p25_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP50 = AggrFunc{ Name: valuer.NewString("p50"), FuncName: "quantile(0.50)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP50IF = AggrFunc{ Name: valuer.NewString("p50if"), FuncName: "quantileIf(0.50)", Aliases: []valuer.String{valuer.NewString("p50_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP75 = AggrFunc{ Name: valuer.NewString("p75"), FuncName: "quantile(0.75)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP75IF = AggrFunc{ Name: valuer.NewString("p75if"), FuncName: "quantileIf(0.75)", Aliases: []valuer.String{valuer.NewString("p75_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP90 = AggrFunc{ Name: valuer.NewString("p90"), FuncName: "quantile(0.90)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP90IF = AggrFunc{ Name: valuer.NewString("p90if"), FuncName: "quantileIf(0.90)", Aliases: []valuer.String{valuer.NewString("p90_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP95 = AggrFunc{ Name: valuer.NewString("p95"), FuncName: "quantile(0.95)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP95IF = AggrFunc{ Name: valuer.NewString("p95if"), FuncName: "quantileIf(0.95)", Aliases: []valuer.String{valuer.NewString("p95_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP99 = AggrFunc{ Name: valuer.NewString("p99"), FuncName: "quantile(0.99)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP99IF = AggrFunc{ Name: valuer.NewString("p99if"), FuncName: "quantileIf(0.99)", Aliases: []valuer.String{valuer.NewString("p99_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncP999 = AggrFunc{ Name: valuer.NewString("p999"), FuncName: "quantile(0.999)", - RequireArgs: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, MinArgs: 1, MaxArgs: 1, } AggrFuncP999IF = AggrFunc{ Name: valuer.NewString("p999if"), FuncName: "quantileIf(0.999)", Aliases: []valuer.String{valuer.NewString("p999_if")}, - RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, + RequireArgs: true, Numeric: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2, } AggrFuncRate = AggrFunc{ Name: valuer.NewString("rate"), @@ -211,22 +212,22 @@ var ( AggrFuncRateSum = AggrFunc{ Name: valuer.NewString("rate_sum"), FuncName: "sum", - RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1, } AggrFuncRateAvg = AggrFunc{ Name: valuer.NewString("rate_avg"), FuncName: "avg", - RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1, } AggrFuncRateMin = AggrFunc{ Name: valuer.NewString("rate_min"), FuncName: "min", - RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1, } AggrFuncRateMax = AggrFunc{ Name: valuer.NewString("rate_max"), FuncName: "max", - RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1, + RequireArgs: true, Numeric: true, Rate: true, MinArgs: 1, MaxArgs: 1, } ) diff --git a/pkg/querybuilder/agg_rewrite.go b/pkg/querybuilder/agg_rewrite.go index ccf0d990406a..1b661a388220 100644 --- a/pkg/querybuilder/agg_rewrite.go +++ b/pkg/querybuilder/agg_rewrite.go @@ -13,28 +13,42 @@ import ( "github.com/huandu/go-sqlbuilder" ) -type AggExprRewriterOptions struct { - FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey - FullTextColumn *telemetrytypes.TelemetryFieldKey - FieldMapper qbtypes.FieldMapper - ConditionBuilder qbtypes.ConditionBuilder - JsonBodyPrefix string - JsonKeyToKey qbtypes.JsonKeyToFieldFunc - RateInterval uint64 -} - type aggExprRewriter struct { - opts AggExprRewriterOptions + fullTextColumn *telemetrytypes.TelemetryFieldKey + fieldMapper qbtypes.FieldMapper + conditionBuilder qbtypes.ConditionBuilder + jsonBodyPrefix string + jsonKeyToKey qbtypes.JsonKeyToFieldFunc } -func NewAggExprRewriter(opts AggExprRewriterOptions) *aggExprRewriter { - return &aggExprRewriter{opts: opts} +var _ qbtypes.AggExprRewriter = (*aggExprRewriter)(nil) + +func NewAggExprRewriter( + fullTextColumn *telemetrytypes.TelemetryFieldKey, + fieldMapper qbtypes.FieldMapper, + conditionBuilder qbtypes.ConditionBuilder, + jsonBodyPrefix string, + jsonKeyToKey qbtypes.JsonKeyToFieldFunc, +) *aggExprRewriter { + return &aggExprRewriter{ + fullTextColumn: fullTextColumn, + fieldMapper: fieldMapper, + conditionBuilder: conditionBuilder, + jsonBodyPrefix: jsonBodyPrefix, + jsonKeyToKey: jsonKeyToKey, + } } // Rewrite parses the given aggregation expression, maps the column, and condition to // valid data source column and condition expression, and returns the rewritten expression // and the args if the parametric aggregation function is used. -func (r *aggExprRewriter) Rewrite(expr string) (string, []any, error) { +func (r *aggExprRewriter) Rewrite( + ctx context.Context, + expr string, + rateInterval uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) (string, []any, error) { + wrapped := fmt.Sprintf("SELECT %s", expr) p := chparser.NewParser(wrapped) stmts, err := p.ParseStmts() @@ -56,37 +70,36 @@ func (r *aggExprRewriter) Rewrite(expr string) (string, []any, error) { return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr) } - visitor := newExprVisitor(r.opts.FieldKeys, - r.opts.FullTextColumn, - r.opts.FieldMapper, - r.opts.ConditionBuilder, - r.opts.JsonBodyPrefix, - r.opts.JsonKeyToKey, + visitor := newExprVisitor(keys, + r.fullTextColumn, + r.fieldMapper, + r.conditionBuilder, + r.jsonBodyPrefix, + r.jsonKeyToKey, ) // Rewrite the first select item (our expression) if err := sel.SelectItems[0].Accept(visitor); err != nil { return "", nil, err } - // If nothing changed, return original - if !visitor.Modified { - return expr, nil, nil - } if visitor.isRate { - return fmt.Sprintf("%s/%d", sel.SelectItems[0].String(), r.opts.RateInterval), visitor.chArgs, nil + return fmt.Sprintf("%s/%d", sel.SelectItems[0].String(), rateInterval), visitor.chArgs, nil } return sel.SelectItems[0].String(), visitor.chArgs, nil } -// RewriteMultiple rewrites a slice of expressions. -func (r *aggExprRewriter) RewriteMultiple( +// RewriteMulti rewrites a slice of expressions. +func (r *aggExprRewriter) RewriteMulti( + ctx context.Context, exprs []string, + rateInterval uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, ) ([]string, [][]any, error) { out := make([]string, len(exprs)) var errs []error var chArgsList [][]any for i, e := range exprs { - w, chArgs, err := r.Rewrite(e) + w, chArgs, err := r.Rewrite(ctx, e, rateInterval, keys) if err != nil { errs = append(errs, err) out[i] = e @@ -158,6 +171,11 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { v.isRate = true } + dataType := telemetrytypes.FieldDataTypeString + if aggFunc.Numeric { + dataType = telemetrytypes.FieldDataTypeFloat64 + } + // Handle *If functions with predicate + values if aggFunc.FuncCombinator { // Map the predicate (last argument) @@ -190,11 +208,13 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { // Map each value column argument for i := 0; i < len(args)-1; i++ { origVal := args[i].String() - colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: origVal}, v.fieldKeys) + fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal) + expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType) if err != nil { return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal) } - newVal := colName + v.chArgs = append(v.chArgs, exprArgs...) + newVal := expr parsedVal, err := parseFragment(newVal) if err != nil { return err @@ -206,11 +226,13 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error { // Non-If functions: map every argument as a column/value for i, arg := range args { orig := arg.String() - colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: orig}, v.fieldKeys) + fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig) + expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType) if err != nil { return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", orig) } - newCol := colName + v.chArgs = append(v.chArgs, exprArgs...) + newCol := expr parsed, err := parseFragment(newCol) if err != nil { return err diff --git a/pkg/querybuilder/cte.go b/pkg/querybuilder/cte.go new file mode 100644 index 000000000000..e3da7828c24d --- /dev/null +++ b/pkg/querybuilder/cte.go @@ -0,0 +1,27 @@ +package querybuilder + +import ( + "strings" +) + +// combineCTEs takes any number of individual CTE fragments like +// +// "__resource_filter AS (...)", "__limit_cte AS (...)" +// +// and renders the final `WITH …` clause. +func CombineCTEs(ctes []string) string { + if len(ctes) == 0 { + return "" + } + return "WITH " + strings.Join(ctes, ", ") + " " +} + +// prependArgs ensures CTE arguments appear before main-query arguments +// in the final slice so their ordinal positions match the SQL string. +func PrependArgs(cteArgs [][]any, mainArgs []any) []any { + out := make([]any, 0, len(mainArgs)+len(cteArgs)) + for _, a := range cteArgs { // CTEs first, in declaration order + out = append(out, a...) + } + return append(out, mainArgs...) +} diff --git a/pkg/querybuilder/fallback_expr.go b/pkg/querybuilder/fallback_expr.go new file mode 100644 index 000000000000..3001cc3fcde9 --- /dev/null +++ b/pkg/querybuilder/fallback_expr.go @@ -0,0 +1,96 @@ +package querybuilder + +import ( + "context" + "fmt" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" + "golang.org/x/exp/maps" +) + +func CollisionHandledFinalExpr( + ctx context.Context, + field *telemetrytypes.TelemetryFieldKey, + fm qbtypes.FieldMapper, + cb qbtypes.ConditionBuilder, + keys map[string][]*telemetrytypes.TelemetryFieldKey, + requiredDataType telemetrytypes.FieldDataType, +) (string, []any, error) { + + if requiredDataType != telemetrytypes.FieldDataTypeString && + requiredDataType != telemetrytypes.FieldDataTypeFloat64 { + return "", nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported data type %s", requiredDataType) + } + + var dummyValue any + if requiredDataType == telemetrytypes.FieldDataTypeFloat64 { + dummyValue = 0.0 + } else { + dummyValue = "" + } + + var stmts []string + var allArgs []any + + addCondition := func(key *telemetrytypes.TelemetryFieldKey) error { + sb := sqlbuilder.NewSelectBuilder() + condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb) + if err != nil { + return err + } + sb.Where(condition) + + expr, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse) + expr = strings.TrimPrefix(expr, "WHERE ") + stmts = append(stmts, expr) + allArgs = append(allArgs, args...) + return nil + } + + colName, err := fm.FieldFor(ctx, field) + if errors.Is(err, qbtypes.ErrColumnNotFound) { + // the key didn't have the right context to be added to the query + // we try to use the context we know of + keysForField := keys[field.Name] + if len(keysForField) == 0 { + // - the context is not provided + // - there are not keys for the field + // - it is not a static field + // - the next best thing to do is see if there is a typo + // and suggest a correction + correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys)) + if found { + // we found a close match, in the error message send the suggestion + return "", nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction) + } else { + // not even a close match, return an error + return "", nil, err + } + } else { + for _, key := range keysForField { + err := addCondition(key) + if err != nil { + return "", nil, err + } + colName, _ = fm.FieldFor(ctx, key) + colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(key, dummyValue, colName) + stmts = append(stmts, colName) + } + } + } else { + err := addCondition(field) + if err != nil { + return "", nil, err + } + colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName) + stmts = append(stmts, colName) + } + + multiIfStmt := fmt.Sprintf("multiIf(%s, NULL)", strings.Join(stmts, ", ")) + + return multiIfStmt, allArgs, nil +} diff --git a/pkg/querybuilder/query_to_keys.go b/pkg/querybuilder/query_to_keys.go index ad97d10bf924..bdcb4c52cf48 100644 --- a/pkg/querybuilder/query_to_keys.go +++ b/pkg/querybuilder/query_to_keys.go @@ -25,7 +25,7 @@ import ( // FieldDataType: telemetrytypes.FieldDataTypeUnspecified, // }, // } -func QueryStringToKeysSelectors(query string) ([]*telemetrytypes.FieldKeySelector, error) { +func QueryStringToKeysSelectors(query string) []*telemetrytypes.FieldKeySelector { lexer := grammar.NewFilterQueryLexer(antlr.NewInputStream(query)) keys := []*telemetrytypes.FieldKeySelector{} for { @@ -45,5 +45,5 @@ func QueryStringToKeysSelectors(query string) ([]*telemetrytypes.FieldKeySelecto } } - return keys, nil + return keys } diff --git a/pkg/querybuilder/query_to_keys_test.go b/pkg/querybuilder/query_to_keys_test.go index 8bf065f01036..0a453088d15e 100644 --- a/pkg/querybuilder/query_to_keys_test.go +++ b/pkg/querybuilder/query_to_keys_test.go @@ -76,10 +76,7 @@ func TestQueryToKeys(t *testing.T) { } for _, testCase := range testCases { - keys, err := QueryStringToKeysSelectors(testCase.query) - if err != nil { - t.Fatalf("Error: %v", err) - } + keys := QueryStringToKeysSelectors(testCase.query) if len(keys) != len(testCase.expectedKeys) { t.Fatalf("Expected %d keys, got %d", len(testCase.expectedKeys), len(keys)) } diff --git a/pkg/querybuilder/resourcefilter/condition_builder.go b/pkg/querybuilder/resourcefilter/condition_builder.go new file mode 100644 index 000000000000..779748e08fc8 --- /dev/null +++ b/pkg/querybuilder/resourcefilter/condition_builder.go @@ -0,0 +1,188 @@ +package resourcefilter + +import ( + "context" + "fmt" + + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" +) + +type defaultConditionBuilder struct { + fm qbtypes.FieldMapper +} + +var _ qbtypes.ConditionBuilder = (*defaultConditionBuilder)(nil) + +func NewConditionBuilder(fm qbtypes.FieldMapper) *defaultConditionBuilder { + return &defaultConditionBuilder{fm: fm} +} + +func valueForIndexFilter(key *telemetrytypes.TelemetryFieldKey, value any) any { + switch v := value.(type) { + case string: + return fmt.Sprintf(`%%%s%%%s%%`, key.Name, v) + case []any: + values := make([]string, 0, len(v)) + for _, v := range v { + values = append(values, fmt.Sprintf(`%%%s%%%s%%`, key.Name, v)) + } + return values + } + return value +} + +func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any { + return fmt.Sprintf(`%%%s%%`, key.Name) +} + +func (b *defaultConditionBuilder) ConditionFor( + ctx context.Context, + key *telemetrytypes.TelemetryFieldKey, + op qbtypes.FilterOperator, + value any, + sb *sqlbuilder.SelectBuilder, +) (string, error) { + + if key.FieldContext != telemetrytypes.FieldContextResource { + return "", nil + } + + column, err := b.fm.ColumnFor(ctx, key) + if err != nil { + return "", err + } + + keyIdxFilter := sb.Like(column.Name, keyIndexFilter(key)) + valueForIndexFilter := valueForIndexFilter(key, value) + + fieldName, err := b.fm.FieldFor(ctx, key) + if err != nil { + return "", err + } + + switch op { + case qbtypes.FilterOperatorEqual: + return sb.And( + sb.E(fieldName, value), + keyIdxFilter, + sb.Like(column.Name, valueForIndexFilter), + ), nil + case qbtypes.FilterOperatorNotEqual: + return sb.And( + sb.NE(fieldName, value), + sb.NotLike(column.Name, valueForIndexFilter), + ), nil + case qbtypes.FilterOperatorGreaterThan: + return sb.And(sb.GT(fieldName, value), keyIdxFilter), nil + case qbtypes.FilterOperatorGreaterThanOrEq: + return sb.And(sb.GE(fieldName, value), keyIdxFilter), nil + case qbtypes.FilterOperatorLessThan: + return sb.And(sb.LT(fieldName, value), keyIdxFilter), nil + case qbtypes.FilterOperatorLessThanOrEq: + return sb.And(sb.LE(fieldName, value), keyIdxFilter), nil + + case qbtypes.FilterOperatorLike, qbtypes.FilterOperatorILike: + return sb.And( + sb.ILike(fieldName, value), + keyIdxFilter, + sb.ILike(column.Name, valueForIndexFilter), + ), nil + case qbtypes.FilterOperatorNotLike, qbtypes.FilterOperatorNotILike: + return sb.And( + sb.NotILike(fieldName, value), + sb.NotILike(column.Name, valueForIndexFilter), + ), nil + + case qbtypes.FilterOperatorBetween: + values, ok := value.([]any) + if !ok { + return "", qbtypes.ErrBetweenValues + } + if len(values) != 2 { + return "", qbtypes.ErrBetweenValues + } + return sb.And(keyIdxFilter, sb.Between(fieldName, values[0], values[1])), nil + case qbtypes.FilterOperatorNotBetween: + values, ok := value.([]any) + if !ok { + return "", qbtypes.ErrBetweenValues + } + if len(values) != 2 { + return "", qbtypes.ErrBetweenValues + } + return sb.And(sb.NotBetween(fieldName, values[0], values[1])), nil + + case qbtypes.FilterOperatorIn: + values, ok := value.([]any) + if !ok { + return "", qbtypes.ErrInValues + } + inConditions := make([]string, 0, len(values)) + for _, v := range values { + inConditions = append(inConditions, sb.E(fieldName, v)) + } + mainCondition := sb.Or(inConditions...) + valConditions := make([]string, 0, len(values)) + if valuesForIndexFilter, ok := valueForIndexFilter.([]string); ok { + for _, v := range valuesForIndexFilter { + valConditions = append(valConditions, sb.Like(column.Name, v)) + } + } + mainCondition = sb.And(mainCondition, keyIdxFilter, sb.Or(valConditions...)) + + return mainCondition, nil + case qbtypes.FilterOperatorNotIn: + values, ok := value.([]any) + if !ok { + return "", qbtypes.ErrInValues + } + notInConditions := make([]string, 0, len(values)) + for _, v := range values { + notInConditions = append(notInConditions, sb.NE(fieldName, v)) + } + mainCondition := sb.And(notInConditions...) + valConditions := make([]string, 0, len(values)) + if valuesForIndexFilter, ok := valueForIndexFilter.([]string); ok { + for _, v := range valuesForIndexFilter { + valConditions = append(valConditions, sb.NotLike(column.Name, v)) + } + } + mainCondition = sb.And(mainCondition, sb.And(valConditions...)) + return mainCondition, nil + + case qbtypes.FilterOperatorExists: + return sb.And( + sb.E(fmt.Sprintf("simpleJSONHas(%s, '%s')", column.Name, key.Name), true), + keyIdxFilter, + ), nil + case qbtypes.FilterOperatorNotExists: + return sb.And( + sb.NE(fmt.Sprintf("simpleJSONHas(%s, '%s')", column.Name, key.Name), true), + ), nil + + case qbtypes.FilterOperatorRegexp: + return sb.And( + fmt.Sprintf("match(%s, %s)", fieldName, sb.Var(value)), + keyIdxFilter, + ), nil + case qbtypes.FilterOperatorNotRegexp: + return sb.And( + fmt.Sprintf("NOT match(%s, %s)", fieldName, sb.Var(value)), + ), nil + + case qbtypes.FilterOperatorContains: + return sb.And( + sb.ILike(fieldName, fmt.Sprintf(`%%%s%%`, value)), + keyIdxFilter, + sb.ILike(column.Name, valueForIndexFilter), + ), nil + case qbtypes.FilterOperatorNotContains: + return sb.And( + sb.NotILike(fieldName, fmt.Sprintf(`%%%s%%`, value)), + sb.NotILike(column.Name, valueForIndexFilter), + ), nil + } + return "", qbtypes.ErrUnsupportedOperator +} diff --git a/pkg/querybuilder/resourcefilter/condition_builder_test.go b/pkg/querybuilder/resourcefilter/condition_builder_test.go new file mode 100644 index 000000000000..b59cf9316fb1 --- /dev/null +++ b/pkg/querybuilder/resourcefilter/condition_builder_test.go @@ -0,0 +1,154 @@ +package resourcefilter + +import ( + "context" + "testing" + + "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestConditionBuilder(t *testing.T) { + + testCases := []struct { + name string + key *telemetrytypes.TelemetryFieldKey + op querybuildertypesv5.FilterOperator + value any + expected string + expectedArgs []any + expectedErr error + }{ + { + name: "string_equal", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorEqual, + value: "watch", + expected: "simpleJSONExtractString(labels, 'k8s.namespace.name') = ? AND labels LIKE ? AND labels LIKE ?", + expectedArgs: []any{"watch", "%k8s.namespace.name%", `%k8s.namespace.name%watch%`}, + }, + { + name: "string_not_equal", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorNotEqual, + value: "redis", + expected: "simpleJSONExtractString(labels, 'k8s.namespace.name') <> ? AND labels NOT LIKE ?", + expectedArgs: []any{"redis", `%k8s.namespace.name%redis%`}, + }, + { + name: "string_like", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorLike, + value: "_mango%", + expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)", + expectedArgs: []any{"_mango%", "%k8s.namespace.name%", `%k8s.namespace.name%_mango%%`}, + }, + { + name: "string_not_like", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorNotLike, + value: "_mango%", + expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) NOT LIKE LOWER(?) AND LOWER(labels) NOT LIKE LOWER(?)", + expectedArgs: []any{"_mango%", `%k8s.namespace.name%_mango%%`}, + }, + { + name: "string_contains", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorContains, + value: "banana", + expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) LIKE LOWER(?) AND labels LIKE ? AND LOWER(labels) LIKE LOWER(?)", + expectedArgs: []any{"%banana%", "%k8s.namespace.name%", `%k8s.namespace.name%banana%`}, + }, + { + name: "string_not_contains", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorNotContains, + value: "banana", + expected: "LOWER(simpleJSONExtractString(labels, 'k8s.namespace.name')) NOT LIKE LOWER(?) AND LOWER(labels) NOT LIKE LOWER(?)", + expectedArgs: []any{"%banana%", `%k8s.namespace.name%banana%`}, + }, + { + name: "string_in", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorIn, + value: []any{"watch", "redis"}, + expected: "(simpleJSONExtractString(labels, 'k8s.namespace.name') = ? OR simpleJSONExtractString(labels, 'k8s.namespace.name') = ?) AND labels LIKE ? AND (labels LIKE ? OR labels LIKE ?)", + expectedArgs: []any{"watch", "redis", "%k8s.namespace.name%", "%k8s.namespace.name%watch%", "%k8s.namespace.name%redis%"}, + }, + { + name: "string_not_in", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorNotIn, + value: []any{"watch", "redis"}, + expected: "(simpleJSONExtractString(labels, 'k8s.namespace.name') <> ? AND simpleJSONExtractString(labels, 'k8s.namespace.name') <> ?) AND (labels NOT LIKE ? AND labels NOT LIKE ?)", + expectedArgs: []any{"watch", "redis", "%k8s.namespace.name%watch%", "%k8s.namespace.name%redis%"}, + }, + { + name: "string_exists", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorExists, + expected: "simpleJSONHas(labels, 'k8s.namespace.name') = ? AND labels LIKE ?", + expectedArgs: []any{true, "%k8s.namespace.name%"}, + }, + { + name: "string_not_exists", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorNotExists, + expected: "simpleJSONHas(labels, 'k8s.namespace.name') <> ?", + expectedArgs: []any{true}, + }, + } + + fm := NewFieldMapper() + conditionBuilder := NewConditionBuilder(fm) + + for _, tc := range testCases { + sb := sqlbuilder.NewSelectBuilder() + t.Run(tc.name, func(t *testing.T) { + cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb) + sb.Where(cond) + + if tc.expectedErr != nil { + assert.Error(t, err) + } else { + require.NoError(t, err) + sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse) + assert.Contains(t, sql, tc.expected) + assert.Equal(t, tc.expectedArgs, args) + } + }) + } +} diff --git a/pkg/querybuilder/resourcefilter/field_mapper.go b/pkg/querybuilder/resourcefilter/field_mapper.go new file mode 100644 index 000000000000..73e0e7dd3158 --- /dev/null +++ b/pkg/querybuilder/resourcefilter/field_mapper.go @@ -0,0 +1,72 @@ +package resourcefilter + +import ( + "context" + "fmt" + + schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +var ( + resourceColumns = map[string]*schema.Column{ + "labels": {Name: "labels", Type: schema.ColumnTypeString}, + "fingerprint": {Name: "fingerprint", Type: schema.ColumnTypeString}, + "seen_at_ts_bucket_start": {Name: "seen_at_ts_bucket_start", Type: schema.ColumnTypeInt64}, + } +) + +type defaultFieldMapper struct{} + +var _ qbtypes.FieldMapper = (*defaultFieldMapper)(nil) + +func NewFieldMapper() *defaultFieldMapper { + return &defaultFieldMapper{} +} + +func (m *defaultFieldMapper) getColumn( + _ context.Context, + key *telemetrytypes.TelemetryFieldKey, +) (*schema.Column, error) { + if key.FieldContext == telemetrytypes.FieldContextResource { + return resourceColumns["labels"], nil + } + if col, ok := resourceColumns[key.Name]; ok { + return col, nil + } + return nil, qbtypes.ErrColumnNotFound +} + +func (m *defaultFieldMapper) ColumnFor( + ctx context.Context, + key *telemetrytypes.TelemetryFieldKey, +) (*schema.Column, error) { + return m.getColumn(ctx, key) +} + +func (m *defaultFieldMapper) FieldFor( + ctx context.Context, + key *telemetrytypes.TelemetryFieldKey, +) (string, error) { + column, err := m.getColumn(ctx, key) + if err != nil { + return "", err + } + if key.FieldContext == telemetrytypes.FieldContextResource { + return fmt.Sprintf("simpleJSONExtractString(%s, '%s')", column.Name, key.Name), nil + } + return column.Name, nil +} + +func (m *defaultFieldMapper) ColumnExpressionFor( + ctx context.Context, + key *telemetrytypes.TelemetryFieldKey, + _ map[string][]*telemetrytypes.TelemetryFieldKey, +) (string, error) { + colName, err := m.FieldFor(ctx, key) + if err != nil { + return "", err + } + return fmt.Sprintf("%s AS `%s`", colName, key.Name), nil +} diff --git a/pkg/querybuilder/resourcefilter/statement_builder.go b/pkg/querybuilder/resourcefilter/statement_builder.go new file mode 100644 index 000000000000..559c552f6629 --- /dev/null +++ b/pkg/querybuilder/resourcefilter/statement_builder.go @@ -0,0 +1,167 @@ +package resourcefilter + +import ( + "context" + "fmt" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/querybuilder" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" +) + +var ( + ErrUnsupportedSignal = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported signal type") +) + +// Configuration for different signal types +type signalConfig struct { + dbName string + tableName string +} + +var signalConfigs = map[telemetrytypes.Signal]signalConfig{ + telemetrytypes.SignalTraces: { + dbName: TracesDBName, + tableName: TraceResourceV3TableName, + }, + telemetrytypes.SignalLogs: { + dbName: LogsDBName, + tableName: LogsResourceV2TableName, + }, +} + +// Generic resource filter statement builder +type resourceFilterStatementBuilder[T any] struct { + fieldMapper qbtypes.FieldMapper + conditionBuilder qbtypes.ConditionBuilder + metadataStore telemetrytypes.MetadataStore + signal telemetrytypes.Signal +} + +// Ensure interface compliance at compile time +var ( + _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*resourceFilterStatementBuilder[qbtypes.TraceAggregation])(nil) + _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*resourceFilterStatementBuilder[qbtypes.LogAggregation])(nil) +) + +// Constructor functions +func NewTraceResourceFilterStatementBuilder( + fieldMapper qbtypes.FieldMapper, + conditionBuilder qbtypes.ConditionBuilder, + metadataStore telemetrytypes.MetadataStore, +) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] { + return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{ + fieldMapper: fieldMapper, + conditionBuilder: conditionBuilder, + metadataStore: metadataStore, + signal: telemetrytypes.SignalTraces, + } +} + +func NewLogResourceFilterStatementBuilder( + fieldMapper qbtypes.FieldMapper, + conditionBuilder qbtypes.ConditionBuilder, + metadataStore telemetrytypes.MetadataStore, +) *resourceFilterStatementBuilder[qbtypes.LogAggregation] { + return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{ + fieldMapper: fieldMapper, + conditionBuilder: conditionBuilder, + metadataStore: metadataStore, + signal: telemetrytypes.SignalLogs, + } +} + +func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryBuilderQuery[T]) []*telemetrytypes.FieldKeySelector { + var keySelectors []*telemetrytypes.FieldKeySelector + + if query.Filter != nil && query.Filter.Expression != "" { + whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression) + keySelectors = append(keySelectors, whereClauseSelectors...) + } + + for idx := range keySelectors { + keySelectors[idx].Signal = b.signal + } + + return keySelectors +} + +// Build builds a SQL query based on the given parameters +func (b *resourceFilterStatementBuilder[T]) Build( + ctx context.Context, + start uint64, + end uint64, + requestType qbtypes.RequestType, + query qbtypes.QueryBuilderQuery[T], +) (*qbtypes.Statement, error) { + config, exists := signalConfigs[b.signal] + if !exists { + return nil, fmt.Errorf("%w: %s", ErrUnsupportedSignal, b.signal) + } + + q := sqlbuilder.NewSelectBuilder() + q.Select("fingerprint") + q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName)) + + keySelectors := b.getKeySelectors(query) + keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors) + if err != nil { + return nil, err + } + + if err := b.addConditions(ctx, q, start, end, query, keys); err != nil { + return nil, err + } + + stmt, args := q.BuildWithFlavor(sqlbuilder.ClickHouse) + return &qbtypes.Statement{ + Query: stmt, + Args: args, + }, nil +} + +// addConditions adds both filter and time conditions to the query +func (b *resourceFilterStatementBuilder[T]) addConditions( + _ context.Context, + sb *sqlbuilder.SelectBuilder, + start, end uint64, + query qbtypes.QueryBuilderQuery[T], + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) error { + // Add filter condition if present + if query.Filter != nil && query.Filter.Expression != "" { + + // warnings would be encountered as part of the main condition already + filterWhereClause, _, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{ + FieldMapper: b.fieldMapper, + ConditionBuilder: b.conditionBuilder, + FieldKeys: keys, + }) + + if err != nil { + return err + } + if filterWhereClause != nil { + sb.AddWhereClause(filterWhereClause) + } + } + + // Add time filter + b.addTimeFilter(sb, start, end) + return nil +} + +// addTimeFilter adds time-based filtering conditions +func (b *resourceFilterStatementBuilder[T]) addTimeFilter(sb *sqlbuilder.SelectBuilder, start, end uint64) { + // Convert nanoseconds to seconds and adjust start bucket + + startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment + endBucket := end / querybuilder.NsToSeconds + + sb.Where( + sb.GE("seen_at_ts_bucket_start", startBucket), + sb.LE("seen_at_ts_bucket_start", endBucket), + ) +} diff --git a/pkg/querybuilder/resourcefilter/tables.go b/pkg/querybuilder/resourcefilter/tables.go new file mode 100644 index 000000000000..bcc8133341ee --- /dev/null +++ b/pkg/querybuilder/resourcefilter/tables.go @@ -0,0 +1,8 @@ +package resourcefilter + +const ( + TracesDBName = "signoz_traces" + TraceResourceV3TableName = "distributed_traces_v3_resource" + LogsDBName = "signoz_logs" + LogsResourceV2TableName = "distributed_logs_v2_resource" +) diff --git a/pkg/querybuilder/time.go b/pkg/querybuilder/time.go new file mode 100644 index 000000000000..02293da81e16 --- /dev/null +++ b/pkg/querybuilder/time.go @@ -0,0 +1,23 @@ +package querybuilder + +import "math" + +const ( + NsToSeconds = 1000000000 + BucketAdjustment = 1800 // 30 minutes +) + +// ToNanoSecs takes epoch and returns it in ns +func ToNanoSecs(epoch uint64) uint64 { + temp := epoch + count := 0 + if epoch == 0 { + count = 1 + } else { + for epoch != 0 { + epoch /= 10 + count++ + } + } + return temp * uint64(math.Pow(10, float64(19-count))) +} diff --git a/pkg/querybuilder/time_test.go b/pkg/querybuilder/time_test.go new file mode 100644 index 000000000000..7617d93fa478 --- /dev/null +++ b/pkg/querybuilder/time_test.go @@ -0,0 +1,62 @@ +package querybuilder + +import "testing" + +func TestToNanoSecs(t *testing.T) { + tests := []struct { + name string + epoch uint64 + expected uint64 + }{ + { + name: "10-digit Unix timestamp (seconds) - 2023-01-01 00:00:00 UTC", + epoch: 1672531200, // January 1, 2023 00:00:00 UTC + expected: 1672531200000000000, // 1672531200 * 10^9 + }, + { + name: "13-digit Unix timestamp (milliseconds) - 2023-01-01 00:00:00 UTC", + epoch: 1672531200000, // January 1, 2023 00:00:00.000 UTC + expected: 1672531200000000000, // 1672531200000 * 10^6 + }, + { + name: "16-digit Unix timestamp (microseconds) - 2023-01-01 00:00:00 UTC", + epoch: 1672531200000000, // January 1, 2023 00:00:00.000000 UTC + expected: 1672531200000000000, // 1672531200000000 * 10^3 + }, + { + name: "19-digit Unix timestamp (nanoseconds) - 2023-01-01 00:00:00 UTC", + epoch: 1672531200000000000, // January 1, 2023 00:00:00.000000000 UTC + expected: 1672531200000000000, // 1672531200000000000 * 10^0 + }, + { + name: "Unix epoch start - 1970-01-01 00:00:00 UTC", + epoch: 0, + expected: 0, + }, + { + name: "Recent timestamp - 2024-05-25 12:00:00 UTC", + epoch: 1716638400, // May 25, 2024 12:00:00 UTC + expected: 1716638400000000000, // 1716638400 * 10^9 + }, + + { + name: "Large valid timestamp - 2025-05-15 10:30:45 UTC", + epoch: 1747204245, // May 15, 2025 10:30:45 UTC + expected: 1747204245000000000, // 1747204245 * 10^9 + }, + { + name: "18-digit microsecond timestamp", + epoch: 1672531200123456, // Jan 1, 2023 with microseconds + expected: 1672531200123456000, // 1672531200123456 * 10^3 + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ToNanoSecs(tt.epoch) + if result != tt.expected { + t.Errorf("ToNanoSecs(%d) = %d, want %d", tt.epoch, result, tt.expected) + } + }) + } +} diff --git a/pkg/ruler/rulestore/sqlrulestore/maintenance.go b/pkg/ruler/rulestore/sqlrulestore/maintenance.go index c6bb000f0ac2..3282af5e6a2c 100644 --- a/pkg/ruler/rulestore/sqlrulestore/maintenance.go +++ b/pkg/ruler/rulestore/sqlrulestore/maintenance.go @@ -9,7 +9,6 @@ import ( "github.com/SigNoz/signoz/pkg/types/authtypes" ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" "github.com/SigNoz/signoz/pkg/valuer" - "go.uber.org/zap" ) type maintenance struct { @@ -30,7 +29,6 @@ func (r *maintenance) GetAllPlannedMaintenance(ctx context.Context, orgID string Where("org_id = ?", orgID). Scan(ctx) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } @@ -137,7 +135,6 @@ func (r *maintenance) DeletePlannedMaintenance(ctx context.Context, id valuer.UU Where("id = ?", id.StringValue()). Exec(ctx) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return err } @@ -221,7 +218,6 @@ func (r *maintenance) EditPlannedMaintenance(ctx context.Context, maintenance ru }) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return err } diff --git a/pkg/ruler/rulestore/sqlrulestore/rule.go b/pkg/ruler/rulestore/sqlrulestore/rule.go index 4414897ede84..735cce6b2082 100644 --- a/pkg/ruler/rulestore/sqlrulestore/rule.go +++ b/pkg/ruler/rulestore/sqlrulestore/rule.go @@ -8,7 +8,6 @@ import ( ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" "github.com/SigNoz/signoz/pkg/valuer" "github.com/jmoiron/sqlx" - "go.uber.org/zap" ) type rule struct { @@ -86,7 +85,6 @@ func (r *rule) GetStoredRules(ctx context.Context, orgID string) ([]*ruletypes.R Where("org_id = ?", orgID). Scan(ctx) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return rules, err } @@ -102,7 +100,6 @@ func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Ru Where("id = ?", id.StringValue()). Scan(ctx) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } return rule, nil @@ -117,7 +114,6 @@ func (r *rule) GetRuleUUID(ctx context.Context, ruleID int) (*ruletypes.RuleHist Where("rule_id = ?", ruleID). Scan(ctx) if err != nil { - zap.L().Error("Error in processing sql query", zap.Error(err)) return nil, err } return ruleHistory, nil diff --git a/pkg/signoz/handler.go b/pkg/signoz/handler.go index 3196806106aa..2b1512b63a58 100644 --- a/pkg/signoz/handler.go +++ b/pkg/signoz/handler.go @@ -9,9 +9,12 @@ import ( "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/preference" "github.com/SigNoz/signoz/pkg/modules/preference/implpreference" + "github.com/SigNoz/signoz/pkg/modules/quickfilter" + "github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" "github.com/SigNoz/signoz/pkg/modules/user" + "github.com/SigNoz/signoz/pkg/modules/user/impluser" ) type Handlers struct { @@ -21,15 +24,17 @@ type Handlers struct { SavedView savedview.Handler Apdex apdex.Handler Dashboard dashboard.Handler + QuickFilter quickfilter.Handler } -func NewHandlers(modules Modules, user user.Handler) Handlers { +func NewHandlers(modules Modules) Handlers { return Handlers{ Organization: implorganization.NewHandler(modules.Organization), Preference: implpreference.NewHandler(modules.Preference), - User: user, + User: impluser.NewHandler(modules.User), SavedView: implsavedview.NewHandler(modules.SavedView), Apdex: implapdex.NewHandler(modules.Apdex), Dashboard: impldashboard.NewHandler(modules.Dashboard), + QuickFilter: implquickfilter.NewHandler(modules.QuickFilter), } } diff --git a/pkg/signoz/handler_test.go b/pkg/signoz/handler_test.go new file mode 100644 index 000000000000..b5ebd97d3e14 --- /dev/null +++ b/pkg/signoz/handler_test.go @@ -0,0 +1,33 @@ +package signoz + +import ( + "reflect" + "testing" + "time" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/SigNoz/signoz/pkg/emailing/emailingtest" + "github.com/SigNoz/signoz/pkg/factory/factorytest" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/stretchr/testify/assert" +) + +// This is a test to ensure that all fields of the handlers are initialized. +// It also helps us catch these errors at compile time instead of runtime. +func TestNewHandlers(t *testing.T) { + sqlstore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual) + jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) + emailing := emailingtest.New() + providerSettings := factorytest.NewSettings() + + modules := NewModules(sqlstore, jwt, emailing, providerSettings) + handlers := NewHandlers(modules) + + reflectVal := reflect.ValueOf(handlers) + for i := 0; i < reflectVal.NumField(); i++ { + f := reflectVal.Field(i) + assert.False(t, f.IsZero(), "%s handler has not been initialized", reflectVal.Type().Field(i).Name) + } +} diff --git a/pkg/signoz/module.go b/pkg/signoz/module.go index 7fa06b54c2b2..0abdff2f8562 100644 --- a/pkg/signoz/module.go +++ b/pkg/signoz/module.go @@ -1,6 +1,8 @@ package signoz import ( + "github.com/SigNoz/signoz/pkg/emailing" + "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/modules/apdex" "github.com/SigNoz/signoz/pkg/modules/apdex/implapdex" "github.com/SigNoz/signoz/pkg/modules/dashboard" @@ -9,10 +11,14 @@ import ( "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/preference" "github.com/SigNoz/signoz/pkg/modules/preference/implpreference" + "github.com/SigNoz/signoz/pkg/modules/quickfilter" + "github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" "github.com/SigNoz/signoz/pkg/modules/user" + "github.com/SigNoz/signoz/pkg/modules/user/impluser" "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/preferencetypes" ) @@ -23,15 +29,17 @@ type Modules struct { SavedView savedview.Module Apdex apdex.Module Dashboard dashboard.Module + QuickFilter quickfilter.Module } -func NewModules(sqlstore sqlstore.SQLStore, user user.Module) Modules { +func NewModules(sqlstore sqlstore.SQLStore, jwt *authtypes.JWT, emailing emailing.Emailing, providerSettings factory.ProviderSettings) Modules { return Modules{ Organization: implorganization.NewModule(implorganization.NewStore(sqlstore)), Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewDefaultPreferenceMap()), - User: user, SavedView: implsavedview.NewModule(sqlstore), Apdex: implapdex.NewModule(sqlstore), Dashboard: impldashboard.NewModule(sqlstore), + User: impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), jwt, emailing, providerSettings), + QuickFilter: implquickfilter.NewModule(implquickfilter.NewStore(sqlstore)), } } diff --git a/pkg/signoz/module_test.go b/pkg/signoz/module_test.go new file mode 100644 index 000000000000..67f6aa23b6f8 --- /dev/null +++ b/pkg/signoz/module_test.go @@ -0,0 +1,31 @@ +package signoz + +import ( + "reflect" + "testing" + "time" + + "github.com/DATA-DOG/go-sqlmock" + "github.com/SigNoz/signoz/pkg/emailing/emailingtest" + "github.com/SigNoz/signoz/pkg/factory/factorytest" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest" + "github.com/SigNoz/signoz/pkg/types/authtypes" + "github.com/stretchr/testify/assert" +) + +// This is a test to ensure that all fields of the modules are initialized. +// It also helps us catch these errors at compile time instead of runtime. +func TestNewModules(t *testing.T) { + sqlstore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual) + jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) + emailing := emailingtest.New() + providerSettings := factorytest.NewSettings() + modules := NewModules(sqlstore, jwt, emailing, providerSettings) + + reflectVal := reflect.ValueOf(modules) + for i := 0; i < reflectVal.NumField(); i++ { + f := reflectVal.Field(i) + assert.False(t, f.IsZero(), "%s module has not been initialized", reflectVal.Type().Field(i).Name) + } +} diff --git a/pkg/signoz/provider.go b/pkg/signoz/provider.go index a1aed180c96c..831d2e2a62a3 100644 --- a/pkg/signoz/provider.go +++ b/pkg/signoz/provider.go @@ -80,7 +80,9 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM sqlmigration.NewCreateQuickFiltersFactory(sqlstore), sqlmigration.NewUpdateQuickFiltersFactory(sqlstore), sqlmigration.NewAuthRefactorFactory(sqlstore), + sqlmigration.NewUpdateLicenseFactory(sqlstore), sqlmigration.NewMigratePATToFactorAPIKey(sqlstore), + sqlmigration.NewUpdateApiMonitoringFiltersFactory(sqlstore), ) } diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index d72a1c18cd5c..748b4af9b477 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -8,12 +8,13 @@ import ( "github.com/SigNoz/signoz/pkg/emailing" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/instrumentation" - "github.com/SigNoz/signoz/pkg/modules/user" + "github.com/SigNoz/signoz/pkg/licensing" "github.com/SigNoz/signoz/pkg/prometheus" "github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigrator" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/telemetrystore" + "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" "github.com/SigNoz/signoz/pkg/zeus" @@ -30,6 +31,7 @@ type SigNoz struct { Prometheus prometheus.Prometheus Alertmanager alertmanager.Alertmanager Zeus zeus.Zeus + Licensing licensing.Licensing Emailing emailing.Emailing Modules Modules Handlers Handlers @@ -38,15 +40,16 @@ type SigNoz struct { func New( ctx context.Context, config Config, + jwt *authtypes.JWT, zeusConfig zeus.Config, zeusProviderFactory factory.ProviderFactory[zeus.Zeus, zeus.Config], + licenseConfig licensing.Config, + licenseProviderFactoryCb func(sqlstore.SQLStore, zeus.Zeus) factory.ProviderFactory[licensing.Licensing, licensing.Config], emailingProviderFactories factory.NamedMap[factory.ProviderFactory[emailing.Emailing, emailing.Config]], cacheProviderFactories factory.NamedMap[factory.ProviderFactory[cache.Cache, cache.Config]], webProviderFactories factory.NamedMap[factory.ProviderFactory[web.Web, web.Config]], sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]], telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]], - userModuleFactory func(sqlstore sqlstore.SQLStore, emailing emailing.Emailing, providerSettings factory.ProviderSettings) user.Module, - userHandlerFactory func(user.Module) user.Handler, ) (*SigNoz, error) { // Initialize instrumentation instrumentation, err := instrumentation.New(ctx, config.Instrumentation, version.Info, "signoz") @@ -171,19 +174,27 @@ func New( return nil, err } - userModule := userModuleFactory(sqlstore, emailing, providerSettings) - userHandler := userHandlerFactory(userModule) + licensingProviderFactory := licenseProviderFactoryCb(sqlstore, zeus) + licensing, err := licensingProviderFactory.New( + ctx, + providerSettings, + licenseConfig, + ) + if err != nil { + return nil, err + } // Initialize all modules - modules := NewModules(sqlstore, userModule) + modules := NewModules(sqlstore, jwt, emailing, providerSettings) // Initialize all handlers for the modules - handlers := NewHandlers(modules, userHandler) + handlers := NewHandlers(modules) registry, err := factory.NewRegistry( instrumentation.Logger(), factory.NewNamedService(factory.MustNewName("instrumentation"), instrumentation), factory.NewNamedService(factory.MustNewName("alertmanager"), alertmanager), + factory.NewNamedService(factory.MustNewName("licensing"), licensing), ) if err != nil { return nil, err @@ -199,6 +210,7 @@ func New( Prometheus: prometheus, Alertmanager: alertmanager, Zeus: zeus, + Licensing: licensing, Emailing: emailing, Modules: modules, Handlers: handlers, diff --git a/pkg/smtp/client/smtp.go b/pkg/smtp/client/smtp.go index 991db0c86790..e3c07b4d673f 100644 --- a/pkg/smtp/client/smtp.go +++ b/pkg/smtp/client/smtp.go @@ -108,7 +108,7 @@ func (c *Client) Do(ctx context.Context, tos []*mail.Address, subject string, co // Try to clean up after ourselves but don't log anything if something has failed. defer func() { if err := smtpClient.Quit(); success && err != nil { - c.logger.Warn("failed to close SMTP connection", "error", err) + c.logger.WarnContext(ctx, "failed to close SMTP connection", "error", err) } }() diff --git a/pkg/sqlmigration/026_update_integrations.go b/pkg/sqlmigration/026_update_integrations.go index 5c4cb0e41ef2..2611cae4dd04 100644 --- a/pkg/sqlmigration/026_update_integrations.go +++ b/pkg/sqlmigration/026_update_integrations.go @@ -12,7 +12,6 @@ import ( "github.com/google/uuid" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate" - "go.uber.org/zap" ) type updateIntegrations struct { @@ -332,7 +331,6 @@ func (migration *updateIntegrations) CopyOldCloudIntegrationServicesToNewCloudIn if err == sql.ErrNoRows { continue } - zap.L().Error("failed to get cloud integration id", zap.Error(err)) return nil } newServices = append(newServices, &newCloudIntegrationService{ diff --git a/pkg/sqlmigration/034_update_license.go b/pkg/sqlmigration/034_update_license.go new file mode 100644 index 000000000000..0be8ab82bdf4 --- /dev/null +++ b/pkg/sqlmigration/034_update_license.go @@ -0,0 +1,149 @@ +package sqlmigration + +import ( + "context" + "database/sql" + "encoding/json" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type updateLicense struct { + store sqlstore.SQLStore +} + +type existingLicense34 struct { + bun.BaseModel `bun:"table:licenses_v3"` + + ID string `bun:"id,pk,type:text"` + Key string `bun:"key,type:text,notnull,unique"` + Data string `bun:"data,type:text"` +} + +type newLicense34 struct { + bun.BaseModel `bun:"table:license"` + + types.Identifiable + types.TimeAuditable + Key string `bun:"key,type:text,notnull,unique"` + Data map[string]any `bun:"data,type:text"` + LastValidatedAt time.Time `bun:"last_validated_at,notnull"` + OrgID string `bun:"org_id,type:text,notnull" json:"orgID"` +} + +func NewUpdateLicenseFactory(store sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("update_license"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) { + return newUpdateLicense(ctx, ps, c, store) + }) +} + +func newUpdateLicense(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) { + return &updateLicense{store: store}, nil +} + +func (migration *updateLicense) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *updateLicense) Up(ctx context.Context, db *bun.DB) error { + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + err = migration.store.Dialect().RenameTableAndModifyModel(ctx, tx, new(existingLicense34), new(newLicense34), []string{OrgReference}, func(ctx context.Context) error { + existingLicenses := make([]*existingLicense34, 0) + err = tx.NewSelect().Model(&existingLicenses).Scan(ctx) + if err != nil { + if err != sql.ErrNoRows { + return err + } + } + + if err == nil && len(existingLicenses) > 0 { + var orgID string + err := migration. + store. + BunDB(). + NewSelect(). + Model((*types.Organization)(nil)). + Column("id"). + Scan(ctx, &orgID) + if err != nil { + if err != sql.ErrNoRows { + return err + } + } + if err == nil { + newLicenses, err := migration.CopyExistingLicensesToNewLicenses(existingLicenses, orgID) + if err != nil { + return err + } + _, err = tx. + NewInsert(). + Model(&newLicenses). + Exec(ctx) + if err != nil { + return err + } + } + return nil + } + return nil + }) + + err = tx.Commit() + if err != nil { + return err + } + + return nil +} + +func (migration *updateLicense) Down(context.Context, *bun.DB) error { + return nil +} + +func (migration *updateLicense) CopyExistingLicensesToNewLicenses(existingLicenses []*existingLicense34, orgID string) ([]*newLicense34, error) { + newLicenses := make([]*newLicense34, len(existingLicenses)) + for idx, existingLicense := range existingLicenses { + licenseID, err := valuer.NewUUID(existingLicense.ID) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "license id is not a valid UUID: %s", existingLicense.ID) + } + licenseData := map[string]any{} + err = json.Unmarshal([]byte(existingLicense.Data), &licenseData) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "unable to unmarshal license data in map[string]any") + } + newLicenses[idx] = &newLicense34{ + Identifiable: types.Identifiable{ + ID: licenseID, + }, + TimeAuditable: types.TimeAuditable{ + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + }, + Key: existingLicense.Key, + Data: licenseData, + LastValidatedAt: time.Now(), + OrgID: orgID, + } + } + return newLicenses, nil +} diff --git a/pkg/sqlmigration/035_update_api_monitoring_filters.go b/pkg/sqlmigration/035_update_api_monitoring_filters.go new file mode 100644 index 000000000000..a1efc6076677 --- /dev/null +++ b/pkg/sqlmigration/035_update_api_monitoring_filters.go @@ -0,0 +1,103 @@ +package sqlmigration + +import ( + "context" + "database/sql" + "time" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/SigNoz/signoz/pkg/types/quickfiltertypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type updateApiMonitoringFilters struct { + store sqlstore.SQLStore +} + +func NewUpdateApiMonitoringFiltersFactory(store sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("update_api_monitoring_filters"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) { + return newUpdateApiMonitoringFilters(ctx, ps, c, store) + }) +} + +func newUpdateApiMonitoringFilters(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) { + return &updateApiMonitoringFilters{ + store: store, + }, nil +} + +func (migration *updateApiMonitoringFilters) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *updateApiMonitoringFilters) Up(ctx context.Context, db *bun.DB) error { + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + // Get all organization IDs as strings + var orgIDs []string + err = tx.NewSelect(). + Table("organizations"). + Column("id"). + Scan(ctx, &orgIDs) + if err != nil { + if err == sql.ErrNoRows { + if err := tx.Commit(); err != nil { + return err + } + return nil + } + return err + } + + for _, orgID := range orgIDs { + // Get the updated default quick filters which includes the new API monitoring filters + storableQuickFilters, err := quickfiltertypes.NewDefaultQuickFilter(valuer.MustNewUUID(orgID)) + if err != nil { + return err + } + + // Find the API monitoring filter from the storable quick filters + var apiMonitoringFilterJSON string + for _, filter := range storableQuickFilters { + if filter.Signal == quickfiltertypes.SignalApiMonitoring { + apiMonitoringFilterJSON = filter.Filter + break + } + } + + if apiMonitoringFilterJSON != "" { + _, err = tx.NewUpdate(). + Table("quick_filter"). + Set("filter = ?, updated_at = ?", apiMonitoringFilterJSON, time.Now()). + Where("signal = ? AND org_id = ?", quickfiltertypes.SignalApiMonitoring, orgID). + Exec(ctx) + + if err != nil { + return err + } + } + } + + if err := tx.Commit(); err != nil { + return err + } + return nil +} + +func (migration *updateApiMonitoringFilters) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlstore/sqlstorehook/logging.go b/pkg/sqlstore/sqlstorehook/logging.go index 08342b142af2..a5ab6766f26c 100644 --- a/pkg/sqlstore/sqlstorehook/logging.go +++ b/pkg/sqlstore/sqlstorehook/logging.go @@ -36,8 +36,8 @@ func (hook logging) AfterQuery(ctx context.Context, event *bun.QueryEvent) { ctx, hook.level, "::SQLSTORE-QUERY::", - "db.query.operation", event.Operation(), - "db.query.text", event.Query, - "db.duration", time.Since(event.StartTime).String(), + "db_query_operation", event.Operation(), + "db_query_text", event.Query, + "db_query_duration", time.Since(event.StartTime).String(), ) } diff --git a/pkg/telemetrylogs/condition_builder.go b/pkg/telemetrylogs/condition_builder.go index da171ca51ffa..d3329f81dfb3 100644 --- a/pkg/telemetrylogs/condition_builder.go +++ b/pkg/telemetrylogs/condition_builder.go @@ -147,6 +147,11 @@ func (c *conditionBuilder) conditionFor( } } + // if the field is intrinsic, it always exists + if slices.Contains(IntrinsicFields, key.Name) { + return "true", nil + } + var value any switch column.Type { case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}: diff --git a/pkg/telemetrylogs/condition_builder_test.go b/pkg/telemetrylogs/condition_builder_test.go index 45f61b2d035d..f3519969ba7d 100644 --- a/pkg/telemetrylogs/condition_builder_test.go +++ b/pkg/telemetrylogs/condition_builder_test.go @@ -249,8 +249,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorExists, value: nil, - expectedSQL: "body <> ?", - expectedArgs: []any{""}, + expectedSQL: "true", expectedError: nil, }, { @@ -261,8 +260,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorNotExists, value: nil, - expectedSQL: "body = ?", - expectedArgs: []any{""}, + expectedSQL: "true", expectedError: nil, }, { @@ -273,8 +271,7 @@ func TestConditionFor(t *testing.T) { }, operator: qbtypes.FilterOperatorExists, value: nil, - expectedSQL: "timestamp <> ?", - expectedArgs: []any{0}, + expectedSQL: "true", expectedError: nil, }, { diff --git a/pkg/telemetrylogs/filter_compiler.go b/pkg/telemetrylogs/filter_compiler.go new file mode 100644 index 000000000000..69dc90bd5297 --- /dev/null +++ b/pkg/telemetrylogs/filter_compiler.go @@ -0,0 +1,55 @@ +package telemetrylogs + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/querybuilder" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" +) + +type FilterCompilerOpts struct { + FieldMapper qbtypes.FieldMapper + ConditionBuilder qbtypes.ConditionBuilder + MetadataStore telemetrytypes.MetadataStore + FullTextColumn *telemetrytypes.TelemetryFieldKey + JsonBodyPrefix string + JsonKeyToKey qbtypes.JsonKeyToFieldFunc + SkipResourceFilter bool +} + +type filterCompiler struct { + opts FilterCompilerOpts +} + +func NewFilterCompiler(opts FilterCompilerOpts) *filterCompiler { + return &filterCompiler{ + opts: opts, + } +} + +func (c *filterCompiler) Compile(ctx context.Context, expr string) (*sqlbuilder.WhereClause, []string, error) { + selectors := querybuilder.QueryStringToKeysSelectors(expr) + + keys, err := c.opts.MetadataStore.GetKeysMulti(ctx, selectors) + if err != nil { + return nil, nil, err + } + + filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(expr, querybuilder.FilterExprVisitorOpts{ + FieldMapper: c.opts.FieldMapper, + ConditionBuilder: c.opts.ConditionBuilder, + FieldKeys: keys, + FullTextColumn: c.opts.FullTextColumn, + JsonBodyPrefix: c.opts.JsonBodyPrefix, + JsonKeyToKey: c.opts.JsonKeyToKey, + SkipResourceFilter: c.opts.SkipResourceFilter, + }) + + if err != nil { + return nil, nil, err + } + + return filterWhereClause, warnings, nil +} diff --git a/pkg/telemetrylogs/filter_expr_logs_test.go b/pkg/telemetrylogs/filter_expr_logs_test.go index b98458e2ff56..c667988234d8 100644 --- a/pkg/telemetrylogs/filter_expr_logs_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_test.go @@ -459,7 +459,7 @@ func TestFilterExprLogs(t *testing.T) { expectedErrorContains: "", }, - // Conflicts with the key token, are valid and without additonal tokens, they are searched as FREETEXT + // Conflicts with the key token, are valid and without additional tokens, they are searched as FREETEXT { category: "Key token conflict", query: "status.code", diff --git a/pkg/telemetrylogs/statement_builder.go b/pkg/telemetrylogs/statement_builder.go new file mode 100644 index 000000000000..a0bdea797811 --- /dev/null +++ b/pkg/telemetrylogs/statement_builder.go @@ -0,0 +1,476 @@ +package telemetrylogs + +import ( + "context" + "fmt" + "log/slog" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/querybuilder" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" +) + +var ( + ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation") +) + +type logQueryStatementBuilder struct { + logger *slog.Logger + metadataStore telemetrytypes.MetadataStore + fm qbtypes.FieldMapper + cb qbtypes.ConditionBuilder + resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation] + aggExprRewriter qbtypes.AggExprRewriter +} + +var _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*logQueryStatementBuilder)(nil) + +func NewLogQueryStatementBuilder( + logger *slog.Logger, + metadataStore telemetrytypes.MetadataStore, + fieldMapper qbtypes.FieldMapper, + conditionBuilder qbtypes.ConditionBuilder, + resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation], + aggExprRewriter qbtypes.AggExprRewriter, +) *logQueryStatementBuilder { + return &logQueryStatementBuilder{ + logger: logger, + metadataStore: metadataStore, + fm: fieldMapper, + cb: conditionBuilder, + resourceFilterStmtBuilder: resourceFilterStmtBuilder, + aggExprRewriter: aggExprRewriter, + } +} + +// Build builds a SQL query for logs based on the given parameters +func (b *logQueryStatementBuilder) Build( + ctx context.Context, + start uint64, + end uint64, + requestType qbtypes.RequestType, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], +) (*qbtypes.Statement, error) { + + start = querybuilder.ToNanoSecs(start) + end = querybuilder.ToNanoSecs(end) + + keySelectors := getKeySelectors(query) + keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors) + if err != nil { + return nil, err + } + + // Create SQL builder + q := sqlbuilder.NewSelectBuilder() + + switch requestType { + case qbtypes.RequestTypeRaw: + return b.buildListQuery(ctx, q, query, start, end, keys) + case qbtypes.RequestTypeTimeSeries: + return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys) + case qbtypes.RequestTypeScalar: + return b.buildScalarQuery(ctx, q, query, start, end, keys, false) + } + + return nil, fmt.Errorf("unsupported request type: %s", requestType) +} + +func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []*telemetrytypes.FieldKeySelector { + var keySelectors []*telemetrytypes.FieldKeySelector + + for idx := range query.Aggregations { + aggExpr := query.Aggregations[idx] + selectors := querybuilder.QueryStringToKeysSelectors(aggExpr.Expression) + keySelectors = append(keySelectors, selectors...) + } + + if query.Filter != nil && query.Filter.Expression != "" { + whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression) + keySelectors = append(keySelectors, whereClauseSelectors...) + } + + for idx := range query.GroupBy { + groupBy := query.GroupBy[idx] + selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name) + keySelectors = append(keySelectors, selectors...) + } + + for idx := range query.Order { + keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{ + Name: query.Order[idx].Key.Name, + Signal: telemetrytypes.SignalTraces, + FieldContext: query.Order[idx].Key.FieldContext, + FieldDataType: query.Order[idx].Key.FieldDataType, + }) + } + + for idx := range keySelectors { + keySelectors[idx].Signal = telemetrytypes.SignalLogs + } + + return keySelectors +} + +// buildListQuery builds a query for list panel type +func (b *logQueryStatementBuilder) buildListQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + // Select default columns + sb.Select( + "timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string", + ) + + // From table + sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) + + // Add filter conditions + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + // Add order by + for _, orderBy := range query.Order { + sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction)) + } + + // Add limit and offset + if query.Limit > 0 { + sb.Limit(query.Limit) + } + + if query.Offset > 0 { + sb.Offset(query.Offset) + } + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse) + + finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs) + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +func (b *logQueryStatementBuilder) buildTimeSeriesQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + sb.SelectMore(fmt.Sprintf( + "toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL %d SECOND) AS ts", + int64(query.StepInterval.Seconds()), + )) + + var allGroupByArgs []any + + // Keep original column expressions so we can build the tuple + fieldNames := make([]string, 0, len(query.GroupBy)) + for _, gb := range query.GroupBy { + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString) + if err != nil { + return nil, err + } + colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) + allGroupByArgs = append(allGroupByArgs, args...) + sb.SelectMore(sqlbuilder.Escape(colExpr)) + fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)) + } + + // Aggregations + allAggChArgs := make([]any, 0) + for i, agg := range query.Aggregations { + rewritten, chArgs, err := b.aggExprRewriter.Rewrite( + ctx, agg.Expression, + uint64(query.StepInterval.Seconds()), + keys, + ) + if err != nil { + return nil, err + } + allAggChArgs = append(allAggChArgs, chArgs...) + sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i)) + } + + sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + var finalSQL string + var finalArgs []any + + if query.Limit > 0 { + // build the scalar “top/bottom-N” query in its own builder. + cteSB := sqlbuilder.NewSelectBuilder() + cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true) + if err != nil { + return nil, err + } + + cteFragments = append(cteFragments, fmt.Sprintf("__limit_cte AS (%s)", cteStmt.Query)) + cteArgs = append(cteArgs, cteStmt.Args) + + // Constrain the main query to the rows that appear in the CTE. + tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", ")) + sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) + + // Group by all dimensions + sb.GroupBy("ALL") + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + // Stitch it all together: WITH … SELECT … + finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) + + } else { + sb.GroupBy("ALL") + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + // Stitch it all together: WITH … SELECT … + finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) + } + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +// buildScalarQuery builds a query for scalar panel type +func (b *logQueryStatementBuilder) buildScalarQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, + skipResourceCTE bool, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" && !skipResourceCTE { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + allAggChArgs := []any{} + + var allGroupByArgs []any + + for _, gb := range query.GroupBy { + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString) + if err != nil { + return nil, err + } + colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) + allGroupByArgs = append(allGroupByArgs, args...) + sb.SelectMore(sqlbuilder.Escape(colExpr)) + } + + // for scalar queries, the rate would be end-start + rateInterval := (end - start) / querybuilder.NsToSeconds + + // Add aggregation + if len(query.Aggregations) > 0 { + for idx := range query.Aggregations { + aggExpr := query.Aggregations[idx] + rewritten, chArgs, err := b.aggExprRewriter.Rewrite( + ctx, aggExpr.Expression, + rateInterval, + keys, + ) + if err != nil { + return nil, err + } + allAggChArgs = append(allAggChArgs, chArgs...) + sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, idx)) + } + } + + // From table + sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) + + // Add filter conditions + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + // Group by dimensions + sb.GroupBy("ALL") + + // Add having clause if needed + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + // Add order by + for _, orderBy := range query.Order { + idx, ok := aggOrderBy(orderBy, query) + if ok { + sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction)) + } else { + sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction)) + } + } + + // if there is no order by, then use the __result_0 as the order by + if len(query.Order) == 0 { + sb.OrderBy("__result_0 DESC") + } + + // Add limit and offset + if query.Limit > 0 { + sb.Limit(query.Limit) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs) + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +// buildFilterCondition builds SQL condition from filter expression +func (b *logQueryStatementBuilder) addFilterCondition( + _ context.Context, + sb *sqlbuilder.SelectBuilder, + start, end uint64, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) ([]string, error) { + + // add filter expression + filterWhereClause, warnings, err := querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{ + FieldMapper: b.fm, + ConditionBuilder: b.cb, + FieldKeys: keys, + SkipResourceFilter: true, + }) + + if err != nil { + return nil, err + } + + if filterWhereClause != nil { + sb.AddWhereClause(filterWhereClause) + } + + // add time filter + startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment + endBucket := end / querybuilder.NsToSeconds + + sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.LE("timestamp", fmt.Sprintf("%d", end)), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket)) + + return warnings, nil +} + +func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) (int, bool) { + for i, agg := range q.Aggregations { + if k.Key.Name == agg.Alias || + k.Key.Name == agg.Expression || + k.Key.Name == fmt.Sprintf("%d", i) { + return i, true + } + } + return 0, false +} + +func (b *logQueryStatementBuilder) maybeAttachResourceFilter( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + start, end uint64, +) (cteSQL string, cteArgs []any, err error) { + + stmt, err := b.buildResourceFilterCTE(ctx, query, start, end) + if err != nil { + return "", nil, err + } + + sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)") + + return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil +} + +func (b *logQueryStatementBuilder) buildResourceFilterCTE( + ctx context.Context, + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], + start, end uint64, +) (*qbtypes.Statement, error) { + + return b.resourceFilterStmtBuilder.Build( + ctx, + start, + end, + qbtypes.RequestTypeRaw, + query, + ) +} diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go new file mode 100644 index 000000000000..11ec03a397fe --- /dev/null +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -0,0 +1,110 @@ +package telemetrylogs + +import ( + "context" + "log/slog" + "testing" + "time" + + "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest" + "github.com/stretchr/testify/require" +) + +func resourceFilterStmtBuilder() (qbtypes.StatementBuilder[qbtypes.LogAggregation], error) { + fm := resourcefilter.NewFieldMapper() + cb := resourcefilter.NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + keysMap := buildCompleteFieldKeyMap() + for _, keys := range keysMap { + for _, key := range keys { + key.Signal = telemetrytypes.SignalLogs + } + } + mockMetadataStore.KeysMap = keysMap + + return resourcefilter.NewLogResourceFilterStatementBuilder( + fm, + cb, + mockMetadataStore, + ), nil +} + +func TestStatementBuilder(t *testing.T) { + cases := []struct { + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] + expected qbtypes.Statement + expectedErr error + }{ + { + name: "test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ + Signal: telemetrytypes.SignalLogs, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.LogAggregation{ + { + Expression: "count()", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'cartservice'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL", + Args: []any{"cartservice", "%service.name%", "%service.name%cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + } + + fm := NewFieldMapper() + cb := NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() + + aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil) + + resourceFilterStmtBuilder, err := resourceFilterStmtBuilder() + require.NoError(t, err) + + statementBuilder := NewLogQueryStatementBuilder( + slog.Default(), + mockMetadataStore, + fm, + cb, + resourceFilterStmtBuilder, + aggExprRewriter, + ) + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query) + + if c.expectedErr != nil { + require.Error(t, err) + require.Contains(t, err.Error(), c.expectedErr.Error()) + } else { + require.NoError(t, err) + require.Equal(t, c.expected.Query, q.Query) + require.Equal(t, c.expected.Args, q.Args) + require.Equal(t, c.expected.Warnings, q.Warnings) + } + }) + } +} diff --git a/pkg/telemetrylogs/test_data.go b/pkg/telemetrylogs/test_data.go index 3e728f291ea9..f8c9dd0d8794 100644 --- a/pkg/telemetrylogs/test_data.go +++ b/pkg/telemetrylogs/test_data.go @@ -19,7 +19,7 @@ func limitString(s string, maxLen int) string { // Function to build a complete field key map for testing all scenarios func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { - return map[string][]*telemetrytypes.TelemetryFieldKey{ + keysMap := map[string][]*telemetrytypes.TelemetryFieldKey{ "service.name": { { Name: "service.name", @@ -856,4 +856,11 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { }, }, } + + for _, keys := range keysMap { + for _, key := range keys { + key.Signal = telemetrytypes.SignalLogs + } + } + return keysMap } diff --git a/pkg/telemetrymetadata/metadata.go b/pkg/telemetrymetadata/metadata.go index 821d6c823bf1..80e195ba9b01 100644 --- a/pkg/telemetrymetadata/metadata.go +++ b/pkg/telemetrymetadata/metadata.go @@ -3,13 +3,14 @@ package telemetrymetadata import ( "context" "fmt" + "log/slog" "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/telemetrystore" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" "github.com/SigNoz/signoz/pkg/types/telemetrytypes" "github.com/huandu/go-sqlbuilder" - "go.uber.org/zap" ) var ( @@ -21,6 +22,7 @@ var ( ) type telemetryMetaStore struct { + logger *slog.Logger telemetrystore telemetrystore.TelemetryStore tracesDBName string tracesFieldsTblName string @@ -35,10 +37,10 @@ type telemetryMetaStore struct { fm qbtypes.FieldMapper conditionBuilder qbtypes.ConditionBuilder - compiler qbtypes.FilterCompiler } func NewTelemetryMetaStore( + logger *slog.Logger, telemetrystore telemetrystore.TelemetryStore, tracesDBName string, tracesFieldsTblName string, @@ -98,7 +100,6 @@ func (t *telemetryMetaStore) tracesTblStatementToFieldKeys(ctx context.Context) // getTracesKeys returns the keys from the spans that match the field selection criteria func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, error) { - if len(fieldKeySelectors) == 0 { return nil, nil } @@ -562,11 +563,24 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel sb := sqlbuilder.Select("DISTINCT " + selectColumn).From(t.relatedMetadataDBName + "." + t.relatedMetadataTblName) if len(fieldValueSelector.ExistingQuery) != 0 { - whereClause, _, err := t.compiler.Compile(ctx, fieldValueSelector.ExistingQuery) + keySelectors := querybuilder.QueryStringToKeysSelectors(fieldValueSelector.ExistingQuery) + for _, keySelector := range keySelectors { + keySelector.Signal = fieldValueSelector.Signal + } + keys, err := t.GetKeysMulti(ctx, keySelectors) + if err != nil { + return nil, err + } + + whereClause, _, err := querybuilder.PrepareWhereClause(fieldValueSelector.ExistingQuery, querybuilder.FilterExprVisitorOpts{ + FieldMapper: t.fm, + ConditionBuilder: t.conditionBuilder, + FieldKeys: keys, + }) if err == nil { sb.AddWhereClause(whereClause) } else { - zap.L().Warn("error parsing existing query for related values", zap.Error(err)) + t.logger.WarnContext(ctx, "error parsing existing query for related values", "error", err) } } @@ -586,7 +600,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse) - zap.L().Debug("query for related values", zap.String("query", query), zap.Any("args", args)) + t.logger.DebugContext(ctx, "query for related values", "query", query, "args", args) rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...) if err != nil { diff --git a/pkg/telemetrymetadata/metadata_test.go b/pkg/telemetrymetadata/metadata_test.go index f5d949cc330c..6fc30798799b 100644 --- a/pkg/telemetrymetadata/metadata_test.go +++ b/pkg/telemetrymetadata/metadata_test.go @@ -3,6 +3,8 @@ package telemetrymetadata import ( "context" "fmt" + "io" + "log/slog" "regexp" "testing" @@ -34,6 +36,7 @@ func TestGetKeys(t *testing.T) { mock := mockTelemetryStore.Mock() metadata := NewTelemetryMetaStore( + slog.New(slog.NewTextHandler(io.Discard, nil)), mockTelemetryStore, telemetrytraces.DBName, telemetrytraces.TagAttributesV2TableName, diff --git a/pkg/telemetrytraces/condition_builder.go b/pkg/telemetrytraces/condition_builder.go index eacb3d24aeb6..ebe1e6d1f125 100644 --- a/pkg/telemetrytraces/condition_builder.go +++ b/pkg/telemetrytraces/condition_builder.go @@ -126,6 +126,11 @@ func (c *conditionBuilder) conditionFor( // in the query builder, `exists` and `not exists` are used for // key membership checks, so depending on the column type, the condition changes case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists: + // if the field is intrinsic, it always exists + if slices.Contains(IntrinsicFields, tblFieldName) || slices.Contains(CalculatedFields, tblFieldName) { + return "true", nil + } + var value any switch column.Type { case schema.ColumnTypeString, diff --git a/pkg/telemetrytraces/field_mapper.go b/pkg/telemetrytraces/field_mapper.go index e7630ce9c2d4..7229f9b02ac1 100644 --- a/pkg/telemetrytraces/field_mapper.go +++ b/pkg/telemetrytraces/field_mapper.go @@ -149,7 +149,7 @@ func (m *defaultFieldMapper) getColumn( case telemetrytypes.FieldDataTypeBool: return indexV3Columns["attributes_bool"], nil } - case telemetrytypes.FieldContextSpan: + case telemetrytypes.FieldContextSpan, telemetrytypes.FieldContextUnspecified: if col, ok := indexV3Columns[key.Name]; ok { return col, nil } diff --git a/pkg/telemetrytraces/statement_builder.go b/pkg/telemetrytraces/statement_builder.go new file mode 100644 index 000000000000..0d7d05cbe26a --- /dev/null +++ b/pkg/telemetrytraces/statement_builder.go @@ -0,0 +1,504 @@ +package telemetrytraces + +import ( + "context" + "fmt" + "log/slog" + "strings" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/querybuilder" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/huandu/go-sqlbuilder" +) + +var ( + ErrUnsupportedAggregation = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported aggregation") +) + +type traceQueryStatementBuilder struct { + logger *slog.Logger + metadataStore telemetrytypes.MetadataStore + fm qbtypes.FieldMapper + cb qbtypes.ConditionBuilder + resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation] + aggExprRewriter qbtypes.AggExprRewriter +} + +var _ qbtypes.StatementBuilder[qbtypes.TraceAggregation] = (*traceQueryStatementBuilder)(nil) + +func NewTraceQueryStatementBuilder( + logger *slog.Logger, + metadataStore telemetrytypes.MetadataStore, + fieldMapper qbtypes.FieldMapper, + conditionBuilder qbtypes.ConditionBuilder, + resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation], + aggExprRewriter qbtypes.AggExprRewriter, +) *traceQueryStatementBuilder { + return &traceQueryStatementBuilder{ + logger: logger, + metadataStore: metadataStore, + fm: fieldMapper, + cb: conditionBuilder, + resourceFilterStmtBuilder: resourceFilterStmtBuilder, + aggExprRewriter: aggExprRewriter, + } +} + +// Build builds a SQL query for traces based on the given parameters +func (b *traceQueryStatementBuilder) Build( + ctx context.Context, + start uint64, + end uint64, + requestType qbtypes.RequestType, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], +) (*qbtypes.Statement, error) { + + start = querybuilder.ToNanoSecs(start) + end = querybuilder.ToNanoSecs(end) + + keySelectors := getKeySelectors(query) + + keys, err := b.metadataStore.GetKeysMulti(ctx, keySelectors) + if err != nil { + return nil, err + } + + // Create SQL builder + q := sqlbuilder.NewSelectBuilder() + + switch requestType { + case qbtypes.RequestTypeRaw: + return b.buildListQuery(ctx, q, query, start, end, keys) + case qbtypes.RequestTypeTimeSeries: + return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys) + case qbtypes.RequestTypeScalar: + return b.buildScalarQuery(ctx, q, query, start, end, keys, false) + } + + return nil, fmt.Errorf("unsupported request type: %s", requestType) +} + +func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) []*telemetrytypes.FieldKeySelector { + var keySelectors []*telemetrytypes.FieldKeySelector + + for idx := range query.Aggregations { + aggExpr := query.Aggregations[idx] + selectors := querybuilder.QueryStringToKeysSelectors(aggExpr.Expression) + keySelectors = append(keySelectors, selectors...) + } + + if query.Filter != nil && query.Filter.Expression != "" { + whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression) + keySelectors = append(keySelectors, whereClauseSelectors...) + } + + for idx := range query.GroupBy { + groupBy := query.GroupBy[idx] + selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name) + keySelectors = append(keySelectors, selectors...) + } + + for idx := range query.SelectFields { + keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{ + Name: query.SelectFields[idx].Name, + Signal: telemetrytypes.SignalTraces, + FieldContext: query.SelectFields[idx].FieldContext, + FieldDataType: query.SelectFields[idx].FieldDataType, + }) + } + + for idx := range query.Order { + keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{ + Name: query.Order[idx].Key.Name, + Signal: telemetrytypes.SignalTraces, + FieldContext: query.Order[idx].Key.FieldContext, + FieldDataType: query.Order[idx].Key.FieldDataType, + }) + } + + for idx := range keySelectors { + keySelectors[idx].Signal = telemetrytypes.SignalTraces + } + + return keySelectors +} + +// buildListQuery builds a query for list panel type +func (b *traceQueryStatementBuilder) buildListQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + // Select default columns + sb.Select( + "timestamp", + "trace_id", + "span_id", + "name", + sqlbuilder.Escape("resource_string_service$$name"), + "duration_nano", + "response_status_code", + ) + + // TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs? + for _, field := range query.SelectFields { + colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys) + if err != nil { + return nil, err + } + sb.SelectMore(sqlbuilder.Escape(colExpr)) + } + + // From table + sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) + + // Add filter conditions + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + // Add order by + for _, orderBy := range query.Order { + sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue())) + } + + // Add limit and offset + if query.Limit > 0 { + sb.Limit(query.Limit) + } + + if query.Offset > 0 { + sb.Offset(query.Offset) + } + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse) + + finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs) + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +func (b *traceQueryStatementBuilder) buildTimeSeriesQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + sb.SelectMore(fmt.Sprintf( + "toStartOfInterval(timestamp, INTERVAL %d SECOND) AS ts", + int64(query.StepInterval.Seconds()), + )) + + var allGroupByArgs []any + + // Keep original column expressions so we can build the tuple + fieldNames := make([]string, 0, len(query.GroupBy)) + for _, gb := range query.GroupBy { + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString) + if err != nil { + return nil, err + } + colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) + allGroupByArgs = append(allGroupByArgs, args...) + sb.SelectMore(sqlbuilder.Escape(colExpr)) + fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)) + } + + // Aggregations + allAggChArgs := make([]any, 0) + for i, agg := range query.Aggregations { + rewritten, chArgs, err := b.aggExprRewriter.Rewrite( + ctx, agg.Expression, + uint64(query.StepInterval.Seconds()), + keys, + ) + if err != nil { + return nil, err + } + allAggChArgs = append(allAggChArgs, chArgs...) + sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i)) + } + + sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + var finalSQL string + var finalArgs []any + + if query.Limit > 0 { + // build the scalar “top/bottom-N” query in its own builder. + cteSB := sqlbuilder.NewSelectBuilder() + cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true) + if err != nil { + return nil, err + } + + cteFragments = append(cteFragments, fmt.Sprintf("__limit_cte AS (%s)", cteStmt.Query)) + cteArgs = append(cteArgs, cteStmt.Args) + + // Constrain the main query to the rows that appear in the CTE. + tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", ")) + sb.Where(fmt.Sprintf("%s IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", "))) + + // Group by all dimensions + sb.GroupBy("ALL") + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + // Stitch it all together: WITH … SELECT … + finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) + + } else { + sb.GroupBy("ALL") + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + // Stitch it all together: WITH … SELECT … + finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs) + } + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +// buildScalarQuery builds a query for scalar panel type +func (b *traceQueryStatementBuilder) buildScalarQuery( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + start, end uint64, + keys map[string][]*telemetrytypes.TelemetryFieldKey, + skipResourceCTE bool, +) (*qbtypes.Statement, error) { + + var ( + cteFragments []string + cteArgs [][]any + ) + + if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end); err != nil { + return nil, err + } else if frag != "" && !skipResourceCTE { + cteFragments = append(cteFragments, frag) + cteArgs = append(cteArgs, args) + } + + allAggChArgs := []any{} + + var allGroupByArgs []any + for _, gb := range query.GroupBy { + expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString) + if err != nil { + return nil, err + } + colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name) + allGroupByArgs = append(allGroupByArgs, args...) + sb.SelectMore(sqlbuilder.Escape(colExpr)) + } + + // for scalar queries, the rate would be end-start + rateInterval := (end - start) / querybuilder.NsToSeconds + + // Add aggregation + if len(query.Aggregations) > 0 { + for idx := range query.Aggregations { + aggExpr := query.Aggregations[idx] + rewritten, chArgs, err := b.aggExprRewriter.Rewrite( + ctx, aggExpr.Expression, + rateInterval, + keys, + ) + if err != nil { + return nil, err + } + allAggChArgs = append(allAggChArgs, chArgs...) + sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, idx)) + } + } + + // From table + sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) + + // Add filter conditions + warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys) + if err != nil { + return nil, err + } + + // Group by dimensions + sb.GroupBy("ALL") + + // Add having clause if needed + if query.Having != nil && query.Having.Expression != "" { + sb.Having(query.Having.Expression) + } + + // Add order by + for _, orderBy := range query.Order { + idx, ok := aggOrderBy(orderBy, query) + if ok { + sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue())) + } else { + sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue())) + } + } + + // if there is no order by, then use the __result_0 as the order by + if len(query.Order) == 0 { + sb.OrderBy("__result_0 DESC") + } + + // Add limit and offset + if query.Limit > 0 { + sb.Limit(query.Limit) + } + + combinedArgs := append(allGroupByArgs, allAggChArgs...) + + mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...) + + finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL + finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs) + + return &qbtypes.Statement{ + Query: finalSQL, + Args: finalArgs, + Warnings: warnings, + }, nil +} + +// buildFilterCondition builds SQL condition from filter expression +func (b *traceQueryStatementBuilder) addFilterCondition( + _ context.Context, + sb *sqlbuilder.SelectBuilder, + start, end uint64, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + keys map[string][]*telemetrytypes.TelemetryFieldKey, +) ([]string, error) { + + var filterWhereClause *sqlbuilder.WhereClause + var warnings []string + var err error + + if query.Filter != nil && query.Filter.Expression != "" { + // add filter expression + filterWhereClause, warnings, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{ + FieldMapper: b.fm, + ConditionBuilder: b.cb, + FieldKeys: keys, + SkipResourceFilter: true, + }) + + if err != nil { + return nil, err + } + } + + if filterWhereClause != nil { + sb.AddWhereClause(filterWhereClause) + } + + // add time filter + startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment + endBucket := end / querybuilder.NsToSeconds + + sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.LE("timestamp", fmt.Sprintf("%d", end)), sb.GE("ts_bucket_start", startBucket), sb.LE("ts_bucket_start", endBucket)) + + return warnings, nil +} + +func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) (int, bool) { + for i, agg := range q.Aggregations { + if k.Key.Name == agg.Alias || + k.Key.Name == agg.Expression || + k.Key.Name == fmt.Sprintf("%d", i) { + return i, true + } + } + return 0, false +} + +func (b *traceQueryStatementBuilder) maybeAttachResourceFilter( + ctx context.Context, + sb *sqlbuilder.SelectBuilder, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + start, end uint64, +) (cteSQL string, cteArgs []any, err error) { + + stmt, err := b.buildResourceFilterCTE(ctx, query, start, end) + if err != nil { + return "", nil, err + } + + sb.Where("resource_fingerprint IN (SELECT fingerprint FROM __resource_filter)") + + return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil +} + +func (b *traceQueryStatementBuilder) buildResourceFilterCTE( + ctx context.Context, + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], + start, end uint64, +) (*qbtypes.Statement, error) { + + return b.resourceFilterStmtBuilder.Build( + ctx, + start, + end, + qbtypes.RequestTypeRaw, + query, + ) +} diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go new file mode 100644 index 000000000000..84484723820c --- /dev/null +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -0,0 +1,103 @@ +package telemetrytraces + +import ( + "context" + "log/slog" + "testing" + "time" + + "github.com/SigNoz/signoz/pkg/querybuilder" + "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest" + "github.com/stretchr/testify/require" +) + +func resourceFilterStmtBuilder() (qbtypes.StatementBuilder[qbtypes.TraceAggregation], error) { + fm := resourcefilter.NewFieldMapper() + cb := resourcefilter.NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() + + return resourcefilter.NewTraceResourceFilterStatementBuilder( + fm, + cb, + mockMetadataStore, + ), nil +} + +func TestStatementBuilder(t *testing.T) { + cases := []struct { + name string + requestType qbtypes.RequestType + query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation] + expected qbtypes.Statement + expectedErr error + }{ + { + name: "test", + requestType: qbtypes.RequestTypeTimeSeries, + query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{ + Signal: telemetrytypes.SignalTraces, + StepInterval: qbtypes.Step{Duration: 30 * time.Second}, + Aggregations: []qbtypes.TraceAggregation{ + { + Expression: "count()", + }, + }, + Filter: &qbtypes.Filter{ + Expression: "service.name = 'redis-manual'", + }, + Limit: 10, + GroupBy: []qbtypes.GroupByKey{ + { + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + }, + }, + }, + }, + expected: qbtypes.Statement{ + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY ALL ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint IN (SELECT fingerprint FROM __resource_filter) AND timestamp >= ? AND timestamp <= ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) IN (SELECT `service.name` FROM __limit_cte) GROUP BY ALL", + Args: []any{"redis-manual", "%service.name%", "%service.name%redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + }, + expectedErr: nil, + }, + } + + fm := NewFieldMapper() + cb := NewConditionBuilder(fm) + mockMetadataStore := telemetrytypestest.NewMockMetadataStore() + mockMetadataStore.KeysMap = buildCompleteFieldKeyMap() + aggExprRewriter := querybuilder.NewAggExprRewriter(nil, fm, cb, "", nil) + + resourceFilterStmtBuilder, err := resourceFilterStmtBuilder() + require.NoError(t, err) + + statementBuilder := NewTraceQueryStatementBuilder( + slog.Default(), + mockMetadataStore, + fm, + cb, + resourceFilterStmtBuilder, + aggExprRewriter, + ) + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + + q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query) + + if c.expectedErr != nil { + require.Error(t, err) + require.Contains(t, err.Error(), c.expectedErr.Error()) + } else { + require.NoError(t, err) + require.Equal(t, c.expected.Query, q.Query) + require.Equal(t, c.expected.Args, q.Args) + require.Equal(t, c.expected.Warnings, q.Warnings) + } + }) + } +} diff --git a/pkg/telemetrytraces/test_data.go b/pkg/telemetrytraces/test_data.go new file mode 100644 index 000000000000..926fc61aae76 --- /dev/null +++ b/pkg/telemetrytraces/test_data.go @@ -0,0 +1,44 @@ +package telemetrytraces + +import ( + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { + keysMap := map[string][]*telemetrytypes.TelemetryFieldKey{ + "service.name": { + { + Name: "service.name", + FieldContext: telemetrytypes.FieldContextResource, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + "http.request.method": { + { + Name: "http.request.method", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + "http.response.status_code": { + { + Name: "http.status_code", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeInt64, + }, + }, + "kind_string": { + { + Name: "kind_string", + FieldContext: telemetrytypes.FieldContextSpan, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + } + for _, keys := range keysMap { + for _, key := range keys { + key.Signal = telemetrytypes.SignalTraces + } + } + return keysMap +} diff --git a/pkg/types/domain.go b/pkg/types/domain.go index 3134c0fe3e45..1ea6eb50de29 100644 --- a/pkg/types/domain.go +++ b/pkg/types/domain.go @@ -11,7 +11,6 @@ import ( "github.com/pkg/errors" saml2 "github.com/russellhaering/gosaml2" "github.com/uptrace/bun" - "go.uber.org/zap" ) type StorableOrgDomain struct { @@ -182,7 +181,6 @@ func (od *GettableOrgDomain) BuildSsoUrl(siteUrl *url.URL) (ssoUrl string, err e return googleProvider.BuildAuthURL(relayState) default: - zap.L().Error("found unsupported SSO config for the org domain", zap.String("orgDomain", od.Name)) return "", fmt.Errorf("unsupported SSO config for the domain") } diff --git a/pkg/types/featuretypes/feature.go b/pkg/types/featuretypes/feature.go new file mode 100644 index 000000000000..964cd4a15c64 --- /dev/null +++ b/pkg/types/featuretypes/feature.go @@ -0,0 +1,28 @@ +package featuretypes + +import "github.com/uptrace/bun" + +type FeatureSet []*GettableFeature +type GettableFeature struct { + Name string `db:"name" json:"name"` + Active bool `db:"active" json:"active"` + Usage int64 `db:"usage" json:"usage"` + UsageLimit int64 `db:"usage_limit" json:"usage_limit"` + Route string `db:"route" json:"route"` +} + +type StorableFeature struct { + bun.BaseModel `bun:"table:feature_status"` + + Name string `bun:"name,pk,type:text" json:"name"` + Active bool `bun:"active" json:"active"` + Usage int `bun:"usage,default:0" json:"usage"` + UsageLimit int `bun:"usage_limit,default:0" json:"usage_limit"` + Route string `bun:"route,type:text" json:"route"` +} + +func NewStorableFeature() {} + +const UseSpanMetrics = "USE_SPAN_METRICS" +const AnomalyDetection = "ANOMALY_DETECTION" +const TraceFunnels = "TRACE_FUNNELS" diff --git a/pkg/types/integration.go b/pkg/types/integration.go index 2324c64e5882..43d29c4b56f6 100644 --- a/pkg/types/integration.go +++ b/pkg/types/integration.go @@ -145,14 +145,12 @@ func (c *AccountConfig) Scan(src any) error { // For serializing to db func (c *AccountConfig) Value() (driver.Value, error) { if c == nil { - return nil, nil + return nil, fmt.Errorf("cloud account config is nil") } serialized, err := json.Marshal(c) if err != nil { - return nil, fmt.Errorf( - "couldn't serialize cloud account config to JSON: %w", err, - ) + return nil, fmt.Errorf("couldn't serialize cloud account config to JSON: %w", err) } return serialized, nil } @@ -180,7 +178,7 @@ func (r *AgentReport) Scan(src any) error { // For serializing to db func (r *AgentReport) Value() (driver.Value, error) { if r == nil { - return nil, nil + return nil, fmt.Errorf("agent report is nil") } serialized, err := json.Marshal(r) @@ -234,7 +232,7 @@ func (c *CloudServiceConfig) Scan(src any) error { // For serializing to db func (c *CloudServiceConfig) Value() (driver.Value, error) { if c == nil { - return nil, nil + return nil, fmt.Errorf("cloud service config is nil") } serialized, err := json.Marshal(c) diff --git a/pkg/types/invite.go b/pkg/types/invite.go index 40fea73d27c7..8de9ede3498c 100644 --- a/pkg/types/invite.go +++ b/pkg/types/invite.go @@ -85,3 +85,7 @@ type PostableInvite struct { type PostableBulkInviteRequest struct { Invites []PostableInvite `json:"invites"` } + +type GettableCreateInviteResponse struct { + InviteToken string `json:"token"` +} diff --git a/pkg/types/license.go b/pkg/types/license.go deleted file mode 100644 index fb9fd7e6d081..000000000000 --- a/pkg/types/license.go +++ /dev/null @@ -1,46 +0,0 @@ -package types - -import ( - "time" - - "github.com/uptrace/bun" -) - -type License struct { - bun.BaseModel `bun:"table:licenses"` - - Key string `bun:"key,pk,type:text"` - CreatedAt time.Time `bun:"createdAt,default:current_timestamp"` - UpdatedAt time.Time `bun:"updatedAt,default:current_timestamp"` - PlanDetails string `bun:"planDetails,type:text"` - ActivationID string `bun:"activationId,type:text"` - ValidationMessage string `bun:"validationMessage,type:text"` - LastValidated time.Time `bun:"lastValidated,default:current_timestamp"` -} - -type Site struct { - bun.BaseModel `bun:"table:sites"` - - UUID string `bun:"uuid,pk,type:text"` - Alias string `bun:"alias,type:varchar(180),default:'PROD'"` - URL string `bun:"url,type:varchar(300)"` - CreatedAt time.Time `bun:"createdAt,default:current_timestamp"` -} - -type FeatureStatus struct { - bun.BaseModel `bun:"table:feature_status"` - - Name string `bun:"name,pk,type:text" json:"name"` - Active bool `bun:"active" json:"active"` - Usage int `bun:"usage,default:0" json:"usage"` - UsageLimit int `bun:"usage_limit,default:0" json:"usage_limit"` - Route string `bun:"route,type:text" json:"route"` -} - -type LicenseV3 struct { - bun.BaseModel `bun:"table:licenses_v3"` - - ID string `bun:"id,pk,type:text"` - Key string `bun:"key,type:text,notnull,unique"` - Data string `bun:"data,type:text"` -} diff --git a/pkg/types/licensetypes/license.go b/pkg/types/licensetypes/license.go new file mode 100644 index 000000000000..994b2c7b63be --- /dev/null +++ b/pkg/types/licensetypes/license.go @@ -0,0 +1,389 @@ +package licensetypes + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/types" + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun" +) + +type StorableLicense struct { + bun.BaseModel `bun:"table:license"` + + types.Identifiable + types.TimeAuditable + Key string `bun:"key,type:text,notnull,unique"` + Data map[string]any `bun:"data,type:text"` + LastValidatedAt time.Time `bun:"last_validated_at,notnull"` + OrgID valuer.UUID `bun:"org_id,type:text,notnull" json:"orgID"` +} + +// this data excludes ID and Key +type License struct { + ID valuer.UUID + Key string + Data map[string]interface{} + PlanName string + Features []*featuretypes.GettableFeature + Status string + ValidFrom int64 + ValidUntil int64 + CreatedAt time.Time + UpdatedAt time.Time + LastValidatedAt time.Time + OrganizationID valuer.UUID +} + +type GettableLicense map[string]any + +type PostableLicense struct { + Key string `json:"key"` +} + +func NewStorableLicense(ID valuer.UUID, key string, data map[string]any, createdAt, updatedAt, lastValidatedAt time.Time, organizationID valuer.UUID) *StorableLicense { + return &StorableLicense{ + Identifiable: types.Identifiable{ + ID: ID, + }, + TimeAuditable: types.TimeAuditable{ + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + Key: key, + Data: data, + LastValidatedAt: lastValidatedAt, + OrgID: organizationID, + } +} + +func NewStorableLicenseFromLicense(license *License) *StorableLicense { + return &StorableLicense{ + Identifiable: types.Identifiable{ + ID: license.ID, + }, + TimeAuditable: types.TimeAuditable{ + CreatedAt: license.CreatedAt, + UpdatedAt: license.UpdatedAt, + }, + Key: license.Key, + Data: license.Data, + LastValidatedAt: license.LastValidatedAt, + OrgID: license.OrganizationID, + } +} + +func GetActiveLicenseFromStorableLicenses(storableLicenses []*StorableLicense, organizationID valuer.UUID) (*License, error) { + var activeLicense *License + for _, storableLicense := range storableLicenses { + license, err := NewLicenseFromStorableLicense(storableLicense) + if err != nil { + return nil, err + } + + if license.Status != "VALID" { + continue + } + if activeLicense == nil && + (license.ValidFrom != 0) && + (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { + activeLicense = license + } + if activeLicense != nil && + license.ValidFrom > activeLicense.ValidFrom && + (license.ValidUntil == -1 || license.ValidUntil > time.Now().Unix()) { + activeLicense = license + } + } + + if activeLicense == nil { + return nil, errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "no active license found for the organization %s", organizationID.StringValue()) + } + + return activeLicense, nil +} + +func extractKeyFromMapStringInterface[T any](data map[string]interface{}, key string) (T, error) { + var zeroValue T + if val, ok := data[key]; ok { + if value, ok := val.(T); ok { + return value, nil + } + return zeroValue, fmt.Errorf("%s key is not a valid %s", key, reflect.TypeOf(zeroValue)) + } + return zeroValue, fmt.Errorf("%s key is missing", key) +} + +func NewLicense(data []byte, organizationID valuer.UUID) (*License, error) { + licenseData := map[string]any{} + err := json.Unmarshal(data, &licenseData) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to unmarshal license data") + } + + var features []*featuretypes.GettableFeature + + // extract id from data + licenseIDStr, err := extractKeyFromMapStringInterface[string](licenseData, "id") + if err != nil { + return nil, err + } + licenseID, err := valuer.NewUUID(licenseIDStr) + if err != nil { + return nil, err + } + delete(licenseData, "id") + + // extract key from data + licenseKey, err := extractKeyFromMapStringInterface[string](licenseData, "key") + if err != nil { + return nil, err + } + delete(licenseData, "key") + + // extract status from data + status, err := extractKeyFromMapStringInterface[string](licenseData, "status") + if err != nil { + return nil, err + } + + planMap, err := extractKeyFromMapStringInterface[map[string]any](licenseData, "plan") + if err != nil { + return nil, err + } + + planName, err := extractKeyFromMapStringInterface[string](planMap, "name") + if err != nil { + return nil, err + } + // if license status is invalid then default it to basic + if status == LicenseStatusInvalid { + planName = PlanNameBasic + } + + featuresFromZeus := make([]*featuretypes.GettableFeature, 0) + if _features, ok := licenseData["features"]; ok { + featuresData, err := json.Marshal(_features) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal features data") + } + + if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to unmarshal features data") + } + } + + switch planName { + case PlanNameEnterprise: + features = append(features, EnterprisePlan...) + case PlanNameBasic: + features = append(features, BasicPlan...) + default: + features = append(features, BasicPlan...) + } + + if len(featuresFromZeus) > 0 { + for _, feature := range featuresFromZeus { + exists := false + for i, existingFeature := range features { + if existingFeature.Name == feature.Name { + features[i] = feature // Replace existing feature + exists = true + break + } + } + if !exists { + features = append(features, feature) // Append if it doesn't exist + } + } + } + licenseData["features"] = features + + _validFrom, err := extractKeyFromMapStringInterface[float64](licenseData, "valid_from") + if err != nil { + _validFrom = 0 + } + validFrom := int64(_validFrom) + + _validUntil, err := extractKeyFromMapStringInterface[float64](licenseData, "valid_until") + if err != nil { + _validUntil = 0 + } + validUntil := int64(_validUntil) + + return &License{ + ID: licenseID, + Key: licenseKey, + Data: licenseData, + PlanName: planName, + Features: features, + ValidFrom: validFrom, + ValidUntil: validUntil, + Status: status, + CreatedAt: time.Now(), + UpdatedAt: time.Now(), + LastValidatedAt: time.Now(), + OrganizationID: organizationID, + }, nil + +} + +func NewLicenseFromStorableLicense(storableLicense *StorableLicense) (*License, error) { + var features []*featuretypes.GettableFeature + // extract status from data + status, err := extractKeyFromMapStringInterface[string](storableLicense.Data, "status") + if err != nil { + return nil, err + } + + planMap, err := extractKeyFromMapStringInterface[map[string]any](storableLicense.Data, "plan") + if err != nil { + return nil, err + } + + planName, err := extractKeyFromMapStringInterface[string](planMap, "name") + if err != nil { + return nil, err + } + // if license status is invalid then default it to basic + if status == LicenseStatusInvalid { + planName = PlanNameBasic + } + + featuresFromZeus := make([]*featuretypes.GettableFeature, 0) + if _features, ok := storableLicense.Data["features"]; ok { + featuresData, err := json.Marshal(_features) + if err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to marshal features data") + } + + if err := json.Unmarshal(featuresData, &featuresFromZeus); err != nil { + return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to unmarshal features data") + } + } + + switch planName { + case PlanNameEnterprise: + features = append(features, EnterprisePlan...) + case PlanNameBasic: + features = append(features, BasicPlan...) + default: + features = append(features, BasicPlan...) + } + + if len(featuresFromZeus) > 0 { + for _, feature := range featuresFromZeus { + exists := false + for i, existingFeature := range features { + if existingFeature.Name == feature.Name { + features[i] = feature // Replace existing feature + exists = true + break + } + } + if !exists { + features = append(features, feature) // Append if it doesn't exist + } + } + } + storableLicense.Data["features"] = features + + _validFrom, err := extractKeyFromMapStringInterface[float64](storableLicense.Data, "valid_from") + if err != nil { + _validFrom = 0 + } + validFrom := int64(_validFrom) + + _validUntil, err := extractKeyFromMapStringInterface[float64](storableLicense.Data, "valid_until") + if err != nil { + _validUntil = 0 + } + validUntil := int64(_validUntil) + + return &License{ + ID: storableLicense.ID, + Key: storableLicense.Key, + Data: storableLicense.Data, + PlanName: planName, + Features: features, + ValidFrom: validFrom, + ValidUntil: validUntil, + Status: status, + CreatedAt: storableLicense.CreatedAt, + UpdatedAt: storableLicense.UpdatedAt, + LastValidatedAt: storableLicense.LastValidatedAt, + OrganizationID: storableLicense.OrgID, + }, nil + +} + +func (license *License) Update(data []byte) error { + updatedLicense, err := NewLicense(data, license.OrganizationID) + if err != nil { + return err + } + + currentTime := time.Now() + license.Data = updatedLicense.Data + license.Features = updatedLicense.Features + license.ID = updatedLicense.ID + license.Key = updatedLicense.Key + license.PlanName = updatedLicense.PlanName + license.Status = updatedLicense.Status + license.ValidFrom = updatedLicense.ValidFrom + license.ValidUntil = updatedLicense.ValidUntil + license.UpdatedAt = currentTime + license.LastValidatedAt = currentTime + + return nil +} + +func NewGettableLicense(data map[string]any, key string) *GettableLicense { + gettableLicense := make(GettableLicense) + for k, v := range data { + gettableLicense[k] = v + } + gettableLicense["key"] = key + return &gettableLicense +} + +func (p *PostableLicense) UnmarshalJSON(data []byte) error { + var postableLicense struct { + Key string `json:"key"` + } + + err := json.Unmarshal(data, &postableLicense) + if err != nil { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to unmarshal payload") + } + + if postableLicense.Key == "" { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "license key cannot be empty") + } + + p.Key = postableLicense.Key + return nil +} + +type Store interface { + Create(context.Context, *StorableLicense) error + Get(context.Context, valuer.UUID, valuer.UUID) (*StorableLicense, error) + GetAll(context.Context, valuer.UUID) ([]*StorableLicense, error) + Update(context.Context, valuer.UUID, *StorableLicense) error + + // feature surrogate + InitFeatures(context.Context, []*featuretypes.StorableFeature) error + CreateFeature(context.Context, *featuretypes.StorableFeature) error + GetFeature(context.Context, string) (*featuretypes.StorableFeature, error) + GetAllFeatures(context.Context) ([]*featuretypes.StorableFeature, error) + UpdateFeature(context.Context, *featuretypes.StorableFeature) error + + // ListOrganizations returns the list of orgs + ListOrganizations(context.Context) ([]valuer.UUID, error) +} diff --git a/pkg/types/licensetypes/license_test.go b/pkg/types/licensetypes/license_test.go new file mode 100644 index 000000000000..b2216accbb60 --- /dev/null +++ b/pkg/types/licensetypes/license_test.go @@ -0,0 +1,175 @@ +package licensetypes + +import ( + "testing" + "time" + + "github.com/SigNoz/signoz/pkg/types/featuretypes" + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/pkg/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewLicenseV3(t *testing.T) { + testCases := []struct { + name string + data []byte + pass bool + expected *License + error error + }{ + { + name: "Error for missing license id", + data: []byte(`{}`), + pass: false, + error: errors.New("id key is missing"), + }, + { + name: "Error for license id not being a valid string", + data: []byte(`{"id": 10}`), + pass: false, + error: errors.New("id key is not a valid string"), + }, + { + name: "Error for missing license key", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e"}`), + pass: false, + error: errors.New("key key is missing"), + }, + { + name: "Error for invalid string license key", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":10}`), + pass: false, + error: errors.New("key key is not a valid string"), + }, + { + name: "Error for missing license status", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e", "key": "does-not-matter","category":"FREE"}`), + pass: false, + error: errors.New("status key is missing"), + }, + { + name: "Error for invalid string license status", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key": "does-not-matter", "category":"FREE", "status":10}`), + pass: false, + error: errors.New("status key is not a valid string"), + }, + { + name: "Error for missing license plan", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE"}`), + pass: false, + error: errors.New("plan key is missing"), + }, + { + name: "Error for invalid json license plan", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":10}`), + pass: false, + error: errors.New("plan key is not a valid map[string]interface {}"), + }, + { + name: "Error for invalid license plan", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{}}`), + pass: false, + error: errors.New("name key is missing"), + }, + { + name: "Parse the entire license properly", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`), + pass: true, + expected: &License{ + ID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "ENTERPRISE", + }, + "category": "FREE", + "status": "ACTIVE", + "valid_from": float64(1730899309), + "valid_until": float64(-1), + }, + PlanName: PlanNameEnterprise, + ValidFrom: 1730899309, + ValidUntil: -1, + Status: "ACTIVE", + Features: make([]*featuretypes.GettableFeature, 0), + OrganizationID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + }, + }, + { + name: "Fallback to basic plan if license status is invalid", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"INVALID","plan":{"name":"ENTERPRISE"},"valid_from": 1730899309,"valid_until": -1}`), + pass: true, + expected: &License{ + ID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "ENTERPRISE", + }, + "category": "FREE", + "status": "INVALID", + "valid_from": float64(1730899309), + "valid_until": float64(-1), + }, + PlanName: PlanNameBasic, + ValidFrom: 1730899309, + ValidUntil: -1, + Status: "INVALID", + Features: make([]*featuretypes.GettableFeature, 0), + OrganizationID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + }, + }, + { + name: "fallback states for validFrom and validUntil", + data: []byte(`{"id":"0196f794-ff30-7bee-a5f4-ef5ad315715e","key":"does-not-matter-key","category":"FREE","status":"ACTIVE","plan":{"name":"ENTERPRISE"},"valid_from":1234.456,"valid_until":5678.567}`), + pass: true, + expected: &License{ + ID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + Key: "does-not-matter-key", + Data: map[string]interface{}{ + "plan": map[string]interface{}{ + "name": "ENTERPRISE", + }, + "valid_from": 1234.456, + "valid_until": 5678.567, + "category": "FREE", + "status": "ACTIVE", + }, + PlanName: PlanNameEnterprise, + ValidFrom: 1234, + ValidUntil: 5678, + Status: "ACTIVE", + Features: make([]*featuretypes.GettableFeature, 0), + CreatedAt: time.Time{}, + UpdatedAt: time.Time{}, + LastValidatedAt: time.Time{}, + OrganizationID: valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e"), + }, + }, + } + + for _, tc := range testCases { + license, err := NewLicense(tc.data, valuer.MustNewUUID("0196f794-ff30-7bee-a5f4-ef5ad315715e")) + if license != nil { + license.Features = make([]*featuretypes.GettableFeature, 0) + delete(license.Data, "features") + } + + if tc.pass { + require.NoError(t, err) + require.NotNil(t, license) + // as the new license will pick the time.Now() value. doesn't make sense to compare them + license.CreatedAt = time.Time{} + license.UpdatedAt = time.Time{} + license.LastValidatedAt = time.Time{} + assert.Equal(t, tc.expected, license) + } else { + require.Error(t, err) + assert.EqualError(t, err, tc.error.Error()) + require.Nil(t, license) + } + + } +} diff --git a/ee/query-service/model/plans.go b/pkg/types/licensetypes/plan.go similarity index 61% rename from ee/query-service/model/plans.go rename to pkg/types/licensetypes/plan.go index 2de2e7ccb87e..452101808488 100644 --- a/ee/query-service/model/plans.go +++ b/pkg/types/licensetypes/plan.go @@ -1,8 +1,6 @@ -package model +package licensetypes -import ( - basemodel "github.com/SigNoz/signoz/pkg/query-service/model" -) +import "github.com/SigNoz/signoz/pkg/types/featuretypes" const SSO = "SSO" const Basic = "BASIC_PLAN" @@ -26,44 +24,44 @@ const ChatSupport = "CHAT_SUPPORT" const Gateway = "GATEWAY" const PremiumSupport = "PREMIUM_SUPPORT" -var BasicPlan = basemodel.FeatureSet{ - basemodel.Feature{ +var BasicPlan = featuretypes.FeatureSet{ + &featuretypes.GettableFeature{ Name: SSO, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.UseSpanMetrics, + &featuretypes.GettableFeature{ + Name: featuretypes.UseSpanMetrics, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: Gateway, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: PremiumSupport, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.AnomalyDetection, + &featuretypes.GettableFeature{ + Name: featuretypes.AnomalyDetection, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.TraceFunnels, + &featuretypes.GettableFeature{ + Name: featuretypes.TraceFunnels, Active: false, Usage: 0, UsageLimit: -1, @@ -71,58 +69,68 @@ var BasicPlan = basemodel.FeatureSet{ }, } -var EnterprisePlan = basemodel.FeatureSet{ - basemodel.Feature{ +var EnterprisePlan = featuretypes.FeatureSet{ + &featuretypes.GettableFeature{ Name: SSO, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.UseSpanMetrics, + &featuretypes.GettableFeature{ + Name: featuretypes.UseSpanMetrics, Active: false, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: Onboarding, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: ChatSupport, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: Gateway, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ + &featuretypes.GettableFeature{ Name: PremiumSupport, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.AnomalyDetection, + &featuretypes.GettableFeature{ + Name: featuretypes.AnomalyDetection, Active: true, Usage: 0, UsageLimit: -1, Route: "", }, - basemodel.Feature{ - Name: basemodel.TraceFunnels, + &featuretypes.GettableFeature{ + Name: featuretypes.TraceFunnels, + Active: false, + Usage: 0, + UsageLimit: -1, + Route: "", + }, +} + +var DefaultFeatureSet = featuretypes.FeatureSet{ + &featuretypes.GettableFeature{ + Name: featuretypes.UseSpanMetrics, Active: false, Usage: 0, UsageLimit: -1, diff --git a/pkg/types/licensetypes/subscription.go b/pkg/types/licensetypes/subscription.go new file mode 100644 index 000000000000..9065a8a46558 --- /dev/null +++ b/pkg/types/licensetypes/subscription.go @@ -0,0 +1,33 @@ +package licensetypes + +import ( + "encoding/json" + + "github.com/SigNoz/signoz/pkg/errors" +) + +type GettableSubscription struct { + RedirectURL string `json:"redirectURL"` +} + +type PostableSubscription struct { + SuccessURL string `json:"url"` +} + +func (p *PostableSubscription) UnmarshalJSON(data []byte) error { + var postableSubscription struct { + SuccessURL string `json:"url"` + } + + err := json.Unmarshal(data, &postableSubscription) + if err != nil { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to unmarshal payload") + } + + if postableSubscription.SuccessURL == "" { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "success url cannot be empty") + } + + p.SuccessURL = postableSubscription.SuccessURL + return nil +} diff --git a/pkg/types/preferencetypes/preference.go b/pkg/types/preferencetypes/preference.go index 9e38064a6d30..16d736567574 100644 --- a/pkg/types/preferencetypes/preference.go +++ b/pkg/types/preferencetypes/preference.go @@ -128,6 +128,16 @@ func NewDefaultPreferenceMap() map[string]Preference { IsDiscreteValues: true, AllowedScopes: []string{"user"}, }, + "SIDENAV_PINNED": { + Key: "SIDENAV_PINNED", + Name: "Keep the primary sidenav always open", + Description: "Controls whether the primary sidenav remains expanded or can be collapsed. When enabled, the sidenav will stay open and pinned to provide constant visibility of navigation options.", + ValueType: "boolean", + DefaultValue: false, + AllowedValues: []interface{}{true, false}, + IsDiscreteValues: true, + AllowedScopes: []string{"user"}, + }, } } diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/qb.go b/pkg/types/querybuildertypes/querybuildertypesv5/qb.go index a4706b0ede8d..bc0f7ce071c2 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/qb.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/qb.go @@ -34,24 +34,10 @@ type ConditionBuilder interface { ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) } -type FilterCompiler interface { - // Compile compiles the filter into a sqlbuilder.WhereClause. - Compile(ctx context.Context, filter string) (*sqlbuilder.WhereClause, []string, error) -} - -type RewriteCtx struct { - RateInterval uint64 -} - -type RewriteOption func(*RewriteCtx) - -func WithRateInterval(interval uint64) RewriteOption { - return func(c *RewriteCtx) { c.RateInterval = interval } -} - type AggExprRewriter interface { // Rewrite rewrites the aggregation expression to be used in the query. - Rewrite(ctx context.Context, expr string, opts ...RewriteOption) (string, []any, error) + Rewrite(ctx context.Context, expr string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) (string, []any, error) + RewriteMulti(ctx context.Context, exprs []string, rateInterval uint64, keys map[string][]*telemetrytypes.TelemetryFieldKey) ([]string, [][]any, error) } type Statement struct { diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/query.go b/pkg/types/querybuildertypes/querybuildertypesv5/query.go index 070fd5dfa2de..d56e7b0fa646 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/query.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/query.go @@ -11,19 +11,20 @@ type Query interface { // Window returns [from, to) in epoch‑ms so cache can slice/merge. Window() (startMS, endMS uint64) // Execute runs the query; implementors must be side‑effect‑free. - Execute(ctx context.Context) (Result, error) + Execute(ctx context.Context) (*Result, error) } type Result struct { - Type RequestType - Value any // concrete Go value (to be type asserted based on the RequestType) - Stats ExecStats + Type RequestType + Value any // concrete Go value (to be type asserted based on the RequestType) + Stats ExecStats + Warnings []string } type ExecStats struct { - RowsScanned int64 `json:"rowsScanned"` - BytesScanned int64 `json:"bytesScanned"` - DurationMS int64 `json:"durationMs"` + RowsScanned uint64 `json:"rowsScanned"` + BytesScanned uint64 `json:"bytesScanned"` + DurationMS uint64 `json:"durationMs"` } type TimeRange struct{ From, To uint64 } // ms since epoch diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/resp.go b/pkg/types/querybuildertypes/querybuildertypesv5/resp.go index 1e9bc69aaccc..bde7722939dd 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/resp.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/resp.go @@ -14,19 +14,19 @@ type QueryRangeResponse struct { } type TimeSeriesData struct { - QueryName string `json:"queryName"` - Aggregations []AggregationBucket `json:"aggregations"` + QueryName string `json:"queryName"` + Aggregations []*AggregationBucket `json:"aggregations"` } type AggregationBucket struct { - Index int `json:"index"` // or string Alias - Alias string `json:"alias"` - Series []TimeSeries `json:"series"` // no extra nesting + Index int `json:"index"` // or string Alias + Alias string `json:"alias"` + Series []*TimeSeries `json:"series"` // no extra nesting } type TimeSeries struct { - Labels []Label `json:"labels,omitempty"` - Values []TimeSeriesValue `json:"values"` + Labels []*Label `json:"labels,omitempty"` + Values []*TimeSeriesValue `json:"values"` } type Label struct { @@ -36,10 +36,10 @@ type Label struct { type TimeSeriesValue struct { Timestamp int64 `json:"timestamp"` - Value float64 `json:"value,omitempty"` + Value float64 `json:"value"` // for the heatmap type chart Values []float64 `json:"values,omitempty"` - Bucket Bucket `json:"bucket,omitempty"` + Bucket *Bucket `json:"bucket,omitempty"` } type Bucket struct { @@ -65,16 +65,17 @@ type ColumnDescriptor struct { } type ScalarData struct { - Columns []ColumnDescriptor `json:"columns"` - Data [][]any `json:"data"` + Columns []*ColumnDescriptor `json:"columns"` + Data [][]any `json:"data"` } type RawData struct { - QueryName string `json:"queryName"` - Rows []RawRow `json:"rows"` + QueryName string `json:"queryName"` + NextCursor string `json:"nextCursor"` + Rows []*RawRow `json:"rows"` } type RawRow struct { - Timestamp time.Time `json:"timestamp"` - Data map[string]any `json:"data"` + Timestamp time.Time `json:"timestamp"` + Data map[string]*any `json:"data"` } diff --git a/pkg/types/quickfiltertypes/filter.go b/pkg/types/quickfiltertypes/filter.go index ecfa9cfdb907..ac436a451b3b 100644 --- a/pkg/types/quickfiltertypes/filter.go +++ b/pkg/types/quickfiltertypes/filter.go @@ -2,12 +2,13 @@ package quickfiltertypes import ( "encoding/json" + "time" + "github.com/SigNoz/signoz/pkg/errors" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/valuer" "github.com/uptrace/bun" - "time" ) type Signal struct { @@ -48,7 +49,7 @@ func NewSignal(s string) (Signal, error) { case "exceptions": return SignalExceptions, nil default: - return Signal{}, errors.Newf(errors.TypeInternal, errors.CodeInternal, "invalid signal: "+s) + return Signal{}, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid signal: %s", s) } } @@ -163,7 +164,7 @@ func NewDefaultQuickFilter(orgID valuer.UUID) ([]*StorableQuickFilter, error) { apiMonitoringFilters := []map[string]interface{}{ {"key": "deployment.environment", "dataType": "string", "type": "resource"}, - {"key": "service.name", "dataType": "string", "type": "tag"}, + {"key": "service.name", "dataType": "string", "type": "resource"}, {"key": "rpc.method", "dataType": "string", "type": "tag"}, } diff --git a/pkg/types/ruletypes/maintenance.go b/pkg/types/ruletypes/maintenance.go index 911c1c06578a..4531baf3cecf 100644 --- a/pkg/types/ruletypes/maintenance.go +++ b/pkg/types/ruletypes/maintenance.go @@ -9,7 +9,6 @@ import ( "github.com/SigNoz/signoz/pkg/types" "github.com/SigNoz/signoz/pkg/valuer" "github.com/uptrace/bun" - "go.uber.org/zap" ) var ( @@ -73,11 +72,9 @@ func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bo return false } - zap.L().Info("alert found in maintenance", zap.String("alert", ruleID), zap.String("maintenance", m.Name)) // If alert is found, we check if it should be skipped based on the schedule loc, err := time.LoadLocation(m.Schedule.Timezone) if err != nil { - zap.L().Error("Error loading location", zap.String("timezone", m.Schedule.Timezone), zap.Error(err)) return false } @@ -85,13 +82,6 @@ func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bo // fixed schedule if !m.Schedule.StartTime.IsZero() && !m.Schedule.EndTime.IsZero() { - zap.L().Info("checking fixed schedule", - zap.String("rule", ruleID), - zap.String("maintenance", m.Name), - zap.Time("currentTime", currentTime), - zap.Time("startTime", m.Schedule.StartTime), - zap.Time("endTime", m.Schedule.EndTime)) - startTime := m.Schedule.StartTime.In(loc) endTime := m.Schedule.EndTime.In(loc) if currentTime.Equal(startTime) || currentTime.Equal(endTime) || @@ -103,19 +93,9 @@ func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bo // recurring schedule if m.Schedule.Recurrence != nil { start := m.Schedule.Recurrence.StartTime - duration := time.Duration(m.Schedule.Recurrence.Duration) - - zap.L().Info("checking recurring schedule base info", - zap.String("rule", ruleID), - zap.String("maintenance", m.Name), - zap.Time("startTime", start), - zap.Duration("duration", duration)) // Make sure the recurrence has started if currentTime.Before(start.In(loc)) { - zap.L().Info("current time is before recurrence start time", - zap.String("rule", ruleID), - zap.String("maintenance", m.Name)) return false } @@ -123,9 +103,6 @@ func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bo if m.Schedule.Recurrence.EndTime != nil { endTime := *m.Schedule.Recurrence.EndTime if !endTime.IsZero() && currentTime.After(endTime.In(loc)) { - zap.L().Info("current time is after recurrence end time", - zap.String("rule", ruleID), - zap.String("maintenance", m.Name)) return false } } @@ -235,8 +212,6 @@ func (m *GettablePlannedMaintenance) IsActive(now time.Time) bool { func (m *GettablePlannedMaintenance) IsUpcoming() bool { loc, err := time.LoadLocation(m.Schedule.Timezone) if err != nil { - // handle error appropriately, for example log and return false or fallback to UTC - zap.L().Error("Error loading timezone", zap.String("timezone", m.Schedule.Timezone), zap.Error(err)) return false } now := time.Now().In(loc) diff --git a/pkg/types/ssotypes/saml.go b/pkg/types/ssotypes/saml.go index dd318e6edf4c..c097f5580f67 100644 --- a/pkg/types/ssotypes/saml.go +++ b/pkg/types/ssotypes/saml.go @@ -11,7 +11,6 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/constants" saml2 "github.com/russellhaering/gosaml2" dsig "github.com/russellhaering/goxmldsig" - "go.uber.org/zap" ) func LoadCertificateStore(certString string) (dsig.X509CertificateStore, error) { @@ -103,6 +102,6 @@ func PrepareRequest(issuer, acsUrl, audience, entity, idp, certString string) (* IDPCertificateStore: certStore, SPKeyStore: randomKeyStore, } - zap.L().Debug("SAML request", zap.Any("sp", sp)) + return sp, nil } diff --git a/pkg/types/telemetrytypes/telemetrytypestest/metadata_store_stub.go b/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go similarity index 99% rename from pkg/types/telemetrytypes/telemetrytypestest/metadata_store_stub.go rename to pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go index 0d77ea467d25..3be85eeba1a6 100644 --- a/pkg/types/telemetrytypes/telemetrytypestest/metadata_store_stub.go +++ b/pkg/types/telemetrytypes/telemetrytypestest/metadata_store.go @@ -129,7 +129,7 @@ func (m *MockMetadataStore) GetRelatedValues(ctx context.Context, fieldValueSele // GetAllValues returns all values for a given field func (m *MockMetadataStore) GetAllValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, error) { if fieldValueSelector == nil { - return nil, nil + return &telemetrytypes.TelemetryFieldValues{}, nil } // Generate a lookup key from the selector diff --git a/pkg/types/user.go b/pkg/types/user.go index 9b146cfb6055..25428edde2fb 100644 --- a/pkg/types/user.go +++ b/pkg/types/user.go @@ -2,19 +2,17 @@ package types import ( "context" + "net/url" "strings" "time" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/valuer" + "github.com/google/uuid" "github.com/uptrace/bun" "golang.org/x/crypto/bcrypt" ) -const ( - SSOAvailable = "sso_available" -) - var ( ErrUserAlreadyExists = errors.MustNewCode("user_already_exists") ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists") @@ -57,6 +55,13 @@ type UserStore interface { // Auth Domain GetDomainByName(ctx context.Context, name string) (*StorableOrgDomain, error) + // org domain (auth domains) CRUD ops + GetDomainFromSsoResponse(ctx context.Context, relayState *url.URL) (*GettableOrgDomain, error) + ListDomains(ctx context.Context, orgId valuer.UUID) ([]*GettableOrgDomain, error) + GetDomain(ctx context.Context, id uuid.UUID) (*GettableOrgDomain, error) + CreateDomain(ctx context.Context, d *GettableOrgDomain) error + UpdateDomain(ctx context.Context, domain *GettableOrgDomain) error + DeleteDomain(ctx context.Context, id uuid.UUID) error // Temporary func for SSO GetDefaultOrgID(ctx context.Context) (string, error) diff --git a/tests/integration/fixtures/http.py b/tests/integration/fixtures/http.py index fd8f798df484..2df0f1cc305c 100644 --- a/tests/integration/fixtures/http.py +++ b/tests/integration/fixtures/http.py @@ -6,6 +6,7 @@ from testcontainers.core.container import Network from wiremock.client import ( Mapping, Mappings, + Requests, ) from wiremock.constants import Config from wiremock.testing.testcontainer import WireMockContainer @@ -78,3 +79,4 @@ def make_http_mocks(): yield _make_http_mocks Mappings.delete_all_mappings() + Requests.reset_request_journal() diff --git a/tests/integration/src/bootstrap/c_license.py b/tests/integration/src/bootstrap/c_license.py index 5a2647610f90..6ed524d1d8e5 100644 --- a/tests/integration/src/bootstrap/c_license.py +++ b/tests/integration/src/bootstrap/c_license.py @@ -69,7 +69,7 @@ def test_apply_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: timeout=5, ) - assert response.json()["count"] >= 1 + assert response.json()["count"] == 1 def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: @@ -123,7 +123,7 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None cursor = signoz.sqlstore.conn.cursor() cursor.execute( - "SELECT data FROM licenses_v3 WHERE id='0196360e-90cd-7a74-8313-1aa815ce2a67'" + "SELECT data FROM license WHERE id='0196360e-90cd-7a74-8313-1aa815ce2a67'" ) record = cursor.fetchone()[0] assert json.loads(record)["valid_from"] == 1732146922 @@ -134,7 +134,7 @@ def test_refresh_license(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None timeout=5, ) - assert response.json()["count"] >= 1 + assert response.json()["count"] == 1 def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: @@ -172,7 +172,7 @@ def test_license_checkout(signoz: SigNoz, make_http_mocks, get_jwt_token) -> Non timeout=5, ) - assert response.status_code == http.HTTPStatus.OK + assert response.status_code == http.HTTPStatus.CREATED assert response.json()["data"]["redirectURL"] == "https://signoz.checkout.com" response = requests.post( @@ -219,7 +219,7 @@ def test_license_portal(signoz: SigNoz, make_http_mocks, get_jwt_token) -> None: timeout=5, ) - assert response.status_code == http.HTTPStatus.OK + assert response.status_code == http.HTTPStatus.CREATED assert response.json()["data"]["redirectURL"] == "https://signoz.portal.com" response = requests.post(