diff --git a/frontend/src/pages/LiveLogs/index.tsx b/frontend/src/pages/LiveLogs/index.tsx
index 79a60a122201..0a11b33764bb 100644
--- a/frontend/src/pages/LiveLogs/index.tsx
+++ b/frontend/src/pages/LiveLogs/index.tsx
@@ -4,6 +4,7 @@ import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import { EventSourceProvider } from 'providers/EventSource';
+import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { useEffect } from 'react';
import { DataSource } from 'types/common/queryBuilder';
@@ -17,7 +18,9 @@ function LiveLogs(): JSX.Element {
return (
-
+
+
+
);
}
diff --git a/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx b/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx
index 778ee993c3ed..c5efb1a552a4 100644
--- a/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx
+++ b/frontend/src/pages/LogsExplorer/__tests__/LogsExplorer.test.tsx
@@ -8,6 +8,7 @@ import { noop } from 'lodash-es';
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
import { server } from 'mocks-server/server';
import { rest } from 'msw';
+import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderContext } from 'providers/QueryBuilder';
// https://virtuoso.dev/mocking-in-tests/
import { VirtuosoMockContext } from 'react-virtuoso';
@@ -73,6 +74,25 @@ jest.mock('hooks/useSafeNavigate', () => ({
}),
}));
+// Mock usePreferenceSync
+jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
+ usePreferenceSync: (): any => ({
+ preferences: {
+ columns: [],
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small',
+ version: 1,
+ },
+ },
+ loading: false,
+ error: null,
+ updateColumns: jest.fn(),
+ updateFormatting: jest.fn(),
+ }),
+}));
+
const logsQueryServerRequest = (): void =>
server.use(
rest.post(queryRangeURL, (req, res, ctx) =>
@@ -88,7 +108,11 @@ describe('Logs Explorer Tests', () => {
queryByText,
getByTestId,
queryByTestId,
- } = render(
);
+ } = render(
+
+
+ ,
+ );
// check the presence of frequency chart content
expect(getByText(frequencyChartContent)).toBeInTheDocument();
@@ -124,11 +148,13 @@ describe('Logs Explorer Tests', () => {
// mocking the query range API to return the logs
logsQueryServerRequest();
const { queryByText, queryByTestId } = render(
-
-
- ,
+
+
+
+
+ ,
);
// check for loading state to be not present
@@ -192,11 +218,13 @@ describe('Logs Explorer Tests', () => {
isStagedQueryUpdated: (): boolean => false,
}}
>
-
-
-
+
+
+
+
+
,
);
@@ -213,7 +241,11 @@ describe('Logs Explorer Tests', () => {
});
test('frequency chart visibility and switch toggle', async () => {
- const { getByRole, queryByText } = render(
);
+ const { getByRole, queryByText } = render(
+
+
+ ,
+ );
// check the presence of Frequency Chart
expect(queryByText('Frequency chart')).toBeInTheDocument();
diff --git a/frontend/src/pages/LogsExplorer/index.tsx b/frontend/src/pages/LogsExplorer/index.tsx
index e1ab02e369e7..8d5a972cc169 100644
--- a/frontend/src/pages/LogsExplorer/index.tsx
+++ b/frontend/src/pages/LogsExplorer/index.tsx
@@ -23,6 +23,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import useUrlQueryData from 'hooks/useUrlQueryData';
import { isEqual, isNull } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
+import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { DataSource } from 'types/common/queryBuilder';
@@ -35,6 +36,8 @@ function LogsExplorer(): JSX.Element {
const [selectedView, setSelectedView] = useState
(
SELECTED_VIEWS.SEARCH,
);
+ const { preferences, loading: preferencesLoading } = usePreferenceContext();
+
const [showFilters, setShowFilters] = useState(() => {
const localStorageValue = getLocalStorageKey(
LOCALSTORAGE.SHOW_LOGS_QUICK_FILTERS,
@@ -83,7 +86,6 @@ function LogsExplorer(): JSX.Element {
}, [currentQuery.builder.queryData, currentQuery.builder.queryData.length]);
const {
- queryData: optionsQueryData,
redirectWithQuery: redirectWithOptionsData,
} = useUrlQueryData(URL_OPTIONS, defaultOptionsQuery);
@@ -164,12 +166,34 @@ function LogsExplorer(): JSX.Element {
);
useEffect(() => {
- const migratedQuery = migrateOptionsQuery(optionsQueryData);
+ if (!preferences || preferencesLoading) {
+ return;
+ }
+ const migratedQuery = migrateOptionsQuery({
+ selectColumns: preferences.columns || defaultLogsSelectedColumns,
+ maxLines: preferences.formatting?.maxLines || defaultOptionsQuery.maxLines,
+ format: preferences.formatting?.format || defaultOptionsQuery.format,
+ fontSize: preferences.formatting?.fontSize || defaultOptionsQuery.fontSize,
+ version: preferences.formatting?.version,
+ });
// Only redirect if the query was actually modified
- if (!isEqual(migratedQuery, optionsQueryData)) {
+ if (
+ !isEqual(migratedQuery, {
+ selectColumns: preferences?.columns,
+ maxLines: preferences?.formatting?.maxLines,
+ format: preferences?.formatting?.format,
+ fontSize: preferences?.formatting?.fontSize,
+ version: preferences?.formatting?.version,
+ })
+ ) {
redirectWithOptionsData(migratedQuery);
}
- }, [migrateOptionsQuery, optionsQueryData, redirectWithOptionsData]);
+ }, [
+ migrateOptionsQuery,
+ preferences,
+ redirectWithOptionsData,
+ preferencesLoading,
+ ]);
const isMultipleQueries = useMemo(
() =>
diff --git a/frontend/src/pages/LogsModulePage/constants.tsx b/frontend/src/pages/LogsModulePage/constants.tsx
index 1dbf667fd4f2..4fd6504b1e62 100644
--- a/frontend/src/pages/LogsModulePage/constants.tsx
+++ b/frontend/src/pages/LogsModulePage/constants.tsx
@@ -4,9 +4,14 @@ import { Compass, TowerControl, Workflow } from 'lucide-react';
import LogsExplorer from 'pages/LogsExplorer';
import Pipelines from 'pages/Pipelines';
import SaveView from 'pages/SaveView';
+import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const logsExplorer: TabRoutes = {
- Component: LogsExplorer,
+ Component: (): JSX.Element => (
+
+
+
+ ),
name: (
Explorer
diff --git a/frontend/src/pages/MetricsExplorer/constants.tsx b/frontend/src/pages/MetricsExplorer/constants.tsx
index daad1775b5be..f3c027e738ca 100644
--- a/frontend/src/pages/MetricsExplorer/constants.tsx
+++ b/frontend/src/pages/MetricsExplorer/constants.tsx
@@ -4,6 +4,7 @@ import ExplorerPage from 'container/MetricsExplorer/Explorer';
import SummaryPage from 'container/MetricsExplorer/Summary';
import { BarChart2, Compass, TowerControl } from 'lucide-react';
import SaveView from 'pages/SaveView';
+import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
export const Summary: TabRoutes = {
Component: SummaryPage,
@@ -17,7 +18,11 @@ export const Summary: TabRoutes = {
};
export const Explorer: TabRoutes = {
- Component: ExplorerPage,
+ Component: (): JSX.Element => (
+
+
+
+ ),
name: (
Explorer
diff --git a/frontend/src/pages/TracesExplorer/index.tsx b/frontend/src/pages/TracesExplorer/index.tsx
index 28324a0baf3c..ec7f3d0b7964 100644
--- a/frontend/src/pages/TracesExplorer/index.tsx
+++ b/frontend/src/pages/TracesExplorer/index.tsx
@@ -75,7 +75,7 @@ function TracesExplorer(): JSX.Element {
const isGroupByExist = useMemo(() => {
const groupByCount: number = currentQuery.builder.queryData.reduce
(
- (acc, query) => acc + query.groupBy.length,
+ (acc, query) => acc + (query?.groupBy?.length || 0),
0,
);
diff --git a/frontend/src/pages/TracesModulePage/constants.tsx b/frontend/src/pages/TracesModulePage/constants.tsx
index 566933f015c8..bc2519a0fc53 100644
--- a/frontend/src/pages/TracesModulePage/constants.tsx
+++ b/frontend/src/pages/TracesModulePage/constants.tsx
@@ -5,10 +5,15 @@ import SaveView from 'pages/SaveView';
import TracesExplorer from 'pages/TracesExplorer';
import TracesFunnelDetails from 'pages/TracesFunnelDetails';
import TracesFunnels from 'pages/TracesFunnels';
+import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { matchPath } from 'react-router-dom';
export const tracesExplorer: TabRoutes = {
- Component: TracesExplorer,
+ Component: (): JSX.Element => (
+
+
+
+ ),
name: (
Explorer
diff --git a/frontend/src/providers/preferences/__tests__/PreferenceContextProvider.test.tsx b/frontend/src/providers/preferences/__tests__/PreferenceContextProvider.test.tsx
new file mode 100644
index 000000000000..b5ced03b5e77
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/PreferenceContextProvider.test.tsx
@@ -0,0 +1,154 @@
+/* eslint-disable sonarjs/no-identical-functions */
+import { render, screen } from '@testing-library/react';
+import {
+ FormattingOptions,
+ PreferenceMode,
+ Preferences,
+} from 'providers/preferences/types';
+import { MemoryRouter, Route, Switch } from 'react-router-dom';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import {
+ PreferenceContextProvider,
+ usePreferenceContext,
+} from '../context/PreferenceContextProvider';
+
+// Mock the usePreferenceSync hook
+jest.mock('../sync/usePreferenceSync', () => ({
+ usePreferenceSync: jest.fn().mockReturnValue({
+ preferences: {
+ columns: [] as BaseAutocompleteData[],
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small',
+ version: 1,
+ } as FormattingOptions,
+ } as Preferences,
+ loading: false,
+ error: null,
+ updateColumns: jest.fn(),
+ updateFormatting: jest.fn(),
+ }),
+}));
+
+// Test component that consumes the context
+function TestConsumer(): JSX.Element {
+ const context = usePreferenceContext();
+ return (
+
+
{context.mode}
+
{context.dataSource}
+
{String(context.loading)}
+
{String(context.error)}
+
{context.savedViewId || 'no-view-id'}
+
+ );
+}
+
+describe('PreferenceContextProvider', () => {
+ it('should provide context with direct mode when no viewKey is present', () => {
+ render(
+
+
+ (
+
+
+
+ )}
+ />
+
+ ,
+ );
+
+ expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
+ expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
+ expect(screen.getByTestId('loading')).toHaveTextContent('false');
+ expect(screen.getByTestId('error')).toHaveTextContent('null');
+ expect(screen.getByTestId('savedViewId')).toHaveTextContent('no-view-id');
+ });
+
+ it('should provide context with savedView mode when viewKey is present', () => {
+ render(
+
+
+ (
+
+
+
+ )}
+ />
+
+ ,
+ );
+
+ expect(screen.getByTestId('mode')).toHaveTextContent('savedView');
+ expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
+ expect(screen.getByTestId('savedViewId')).toHaveTextContent('test-view-id');
+ });
+
+ it('should set traces dataSource when pathname includes traces', () => {
+ render(
+
+
+ (
+
+
+
+ )}
+ />
+
+ ,
+ );
+
+ expect(screen.getByTestId('dataSource')).toHaveTextContent('traces');
+ });
+
+ it('should handle invalid viewKey JSON gracefully', () => {
+ // Mock console.error to avoid test output clutter
+ const originalConsoleError = console.error;
+ console.error = jest.fn();
+
+ render(
+
+
+ (
+
+
+
+ )}
+ />
+
+ ,
+ );
+
+ expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
+ expect(console.error).toHaveBeenCalled();
+
+ // Restore console.error
+ console.error = originalConsoleError;
+ });
+
+ it('should throw error when usePreferenceContext is used outside provider', () => {
+ // Suppress the error output for this test
+ const originalConsoleError = console.error;
+ console.error = jest.fn();
+
+ expect(() => {
+ render(
);
+ }).toThrow(
+ 'usePreferenceContext must be used within PreferenceContextProvider',
+ );
+
+ // Restore console.error
+ console.error = originalConsoleError;
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts b/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts
new file mode 100644
index 000000000000..b6ab1b18afe2
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/logsLoaderConfig.test.ts
@@ -0,0 +1,162 @@
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { LogViewMode } from 'container/LogsTable';
+import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
+import { FontSize } from 'container/OptionsMenu/types';
+import { FormattingOptions } from 'providers/preferences/types';
+import {
+ BaseAutocompleteData,
+ DataTypes,
+} from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import logsLoaderConfig from '../configs/logsLoaderConfig';
+
+// Mock localStorage
+const mockLocalStorage: Record
= {};
+
+jest.mock('api/browser/localstorage/get', () => ({
+ __esModule: true,
+ default: jest.fn((key: string) => mockLocalStorage[key] || null),
+}));
+
+describe('logsLoaderConfig', () => {
+ // Save original location object
+ const originalWindowLocation = window.location;
+ let mockedLocation: Partial;
+
+ beforeEach(() => {
+ // Setup a mocked location object
+ mockedLocation = {
+ ...originalWindowLocation,
+ search: '',
+ };
+
+ // Mock the window.location property
+ Object.defineProperty(window, 'location', {
+ configurable: true,
+ value: mockedLocation,
+ writable: true,
+ });
+
+ // Clear mocked localStorage
+ Object.keys(mockLocalStorage).forEach((key) => {
+ delete mockLocalStorage[key];
+ });
+ });
+
+ afterEach(() => {
+ // Restore original location
+ Object.defineProperty(window, 'location', {
+ configurable: true,
+ value: originalWindowLocation,
+ writable: true,
+ });
+ });
+
+ it('should have priority order: local, url, default', () => {
+ expect(logsLoaderConfig.priority).toEqual(['local', 'url', 'default']);
+ });
+
+ it('should load from localStorage when available', async () => {
+ const mockColumns: BaseAutocompleteData[] = [
+ {
+ key: 'test-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ // Set up localStorage mock data with the correct key from LOCALSTORAGE enum
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: mockColumns,
+ maxLines: 10,
+ format: 'json',
+ fontSize: 'large',
+ version: 2,
+ });
+
+ const result = await logsLoaderConfig.local();
+
+ expect(result).toEqual({
+ columns: mockColumns,
+ formatting: {
+ maxLines: 10,
+ format: 'json' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 2,
+ } as FormattingOptions,
+ });
+ });
+
+ it('should handle invalid localStorage data gracefully', async () => {
+ // Set up invalid localStorage mock data
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = 'invalid-json';
+
+ const result = await logsLoaderConfig.local();
+
+ expect(result).toEqual({
+ columns: [] as BaseAutocompleteData[],
+ formatting: undefined,
+ });
+ });
+
+ it('should load from URL when available', async () => {
+ const mockColumns: BaseAutocompleteData[] = [
+ {
+ key: 'url-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ // Set up URL search params
+ mockedLocation.search = `?options=${encodeURIComponent(
+ JSON.stringify({
+ selectColumns: mockColumns,
+ maxLines: 5,
+ format: 'raw',
+ fontSize: 'medium',
+ version: 1,
+ }),
+ )}`;
+
+ const result = await logsLoaderConfig.url();
+
+ expect(result).toEqual({
+ columns: mockColumns,
+ formatting: {
+ maxLines: 5,
+ format: 'raw' as LogViewMode,
+ fontSize: 'medium' as FontSize,
+ version: 1,
+ } as FormattingOptions,
+ });
+ });
+
+ it('should handle invalid URL data gracefully', async () => {
+ // Set up invalid URL search params
+ mockedLocation.search = '?options=invalid-json';
+
+ const result = await logsLoaderConfig.url();
+
+ expect(result).toEqual({
+ columns: [] as BaseAutocompleteData[],
+ formatting: undefined,
+ });
+ });
+
+ it('should provide default values when no other source is available', async () => {
+ const result = await logsLoaderConfig.default();
+
+ expect(result).toEqual({
+ columns: defaultLogsSelectedColumns as BaseAutocompleteData[],
+ formatting: {
+ maxLines: 2,
+ format: 'table' as LogViewMode,
+ fontSize: 'small' as FontSize,
+ version: 1,
+ } as FormattingOptions,
+ });
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts b/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts
new file mode 100644
index 000000000000..6f9c42176b04
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/logsUpdaterConfig.test.ts
@@ -0,0 +1,261 @@
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { LogViewMode } from 'container/LogsTable';
+import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
+import { FontSize } from 'container/OptionsMenu/types';
+import {
+ FormattingOptions,
+ PreferenceMode,
+ Preferences,
+} from 'providers/preferences/types';
+import {
+ BaseAutocompleteData,
+ DataTypes,
+} from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import getLogsUpdaterConfig from '../configs/logsUpdaterConfig';
+
+// Mock localStorage
+const mockLocalStorage: Record = {};
+
+jest.mock('api/browser/localstorage/set', () => ({
+ __esModule: true,
+ default: jest.fn((key: string, value: string) => {
+ mockLocalStorage[key] = value;
+ }),
+}));
+
+// Mock localStorage.getItem
+Object.defineProperty(window, 'localStorage', {
+ value: {
+ getItem: jest.fn((key: string) => mockLocalStorage[key] || null),
+ setItem: jest.fn((key: string, value: string) => {
+ mockLocalStorage[key] = value;
+ }),
+ },
+ writable: true,
+});
+
+describe('logsUpdaterConfig', () => {
+ // Mock redirectWithOptionsData and setSavedViewPreferences
+ const redirectWithOptionsData = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+
+ const mockPreferences: Preferences = {
+ columns: [],
+ formatting: {
+ maxLines: 2,
+ format: 'table' as LogViewMode,
+ fontSize: 'small' as FontSize,
+ version: 1,
+ },
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ // Clear mocked localStorage
+ Object.keys(mockLocalStorage).forEach((key) => {
+ delete mockLocalStorage[key];
+ });
+ });
+
+ it('should update columns in localStorage for direct mode', () => {
+ const logsUpdater = getLogsUpdaterConfig(
+ mockPreferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ );
+
+ const newColumns: BaseAutocompleteData[] = [
+ {
+ key: 'new-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ // Set initial localStorage data
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: [
+ {
+ key: 'old-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ],
+ maxLines: 2,
+ });
+
+ logsUpdater.updateColumns(newColumns, PreferenceMode.DIRECT);
+
+ // Should update URL
+ expect(redirectWithOptionsData).toHaveBeenCalledWith({
+ ...defaultOptionsQuery,
+ ...mockPreferences.formatting,
+ selectColumns: newColumns,
+ });
+
+ // Should update localStorage
+ const storedData = JSON.parse(
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
+ );
+ expect(storedData.selectColumns).toEqual(newColumns);
+ expect(storedData.maxLines).toBe(2); // Should preserve other fields
+
+ // Should not update saved view preferences
+ expect(setSavedViewPreferences).not.toHaveBeenCalled();
+ });
+
+ it('should update columns in savedViewPreferences for savedView mode', () => {
+ const logsUpdater = getLogsUpdaterConfig(
+ mockPreferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ );
+
+ const newColumns: BaseAutocompleteData[] = [
+ {
+ key: 'new-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ logsUpdater.updateColumns(newColumns, PreferenceMode.SAVED_VIEW);
+
+ // Should not update URL in savedView mode
+ expect(redirectWithOptionsData).not.toHaveBeenCalled();
+
+ // Should not update localStorage in savedView mode
+ expect(mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS]).toBeUndefined();
+
+ // Should update saved view preferences
+ expect(setSavedViewPreferences).toHaveBeenCalledWith(expect.any(Function));
+ });
+
+ it('should update formatting options in localStorage for direct mode', () => {
+ const logsUpdater = getLogsUpdaterConfig(
+ mockPreferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ );
+
+ const newFormatting: FormattingOptions = {
+ maxLines: 5,
+ format: 'json' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 1,
+ };
+
+ // Set initial localStorage data
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: [
+ {
+ key: 'column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ],
+ maxLines: 2,
+ format: 'table',
+ });
+
+ logsUpdater.updateFormatting(newFormatting, PreferenceMode.DIRECT);
+
+ // Should always update URL for both modes
+ expect(redirectWithOptionsData).toHaveBeenCalledWith({
+ ...defaultOptionsQuery,
+ ...mockPreferences.formatting,
+ ...newFormatting,
+ });
+
+ // Should update localStorage in direct mode
+ const storedData = JSON.parse(
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
+ );
+ expect(storedData.maxLines).toBe(5);
+ expect(storedData.format).toBe('json');
+ expect(storedData.fontSize).toBe('large');
+ expect(storedData.version).toBe(1);
+ expect(storedData.selectColumns).toEqual([
+ {
+ key: 'column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ]); // Should preserve columns
+ });
+
+ it('should not update localStorage for savedView mode in updateFormatting', () => {
+ const logsUpdater = getLogsUpdaterConfig(
+ mockPreferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ );
+
+ const newFormatting: FormattingOptions = {
+ maxLines: 5,
+ format: 'json' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 1,
+ };
+
+ // Set initial localStorage data
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: [
+ {
+ key: 'column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ],
+ maxLines: 2,
+ format: 'table',
+ });
+
+ logsUpdater.updateFormatting(newFormatting, PreferenceMode.SAVED_VIEW);
+
+ // Should not override localStorage in savedView mode
+ const storedData = JSON.parse(
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
+ );
+ expect(storedData.maxLines).toBe(2); // Should remain the same
+ expect(storedData.format).toBe('table'); // Should remain the same
+
+ // Should update saved view preferences
+ expect(setSavedViewPreferences).toHaveBeenCalledWith(expect.any(Function));
+ });
+
+ it('should initialize localStorage if it does not exist', () => {
+ const logsUpdater = getLogsUpdaterConfig(
+ mockPreferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ );
+
+ const newFormatting: FormattingOptions = {
+ maxLines: 5,
+ format: 'json' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 1,
+ };
+
+ // No initial localStorage data
+
+ logsUpdater.updateFormatting(newFormatting, PreferenceMode.DIRECT);
+
+ // Should create localStorage entry
+ const storedData = JSON.parse(
+ mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
+ );
+ expect(storedData.maxLines).toBe(5);
+ expect(storedData.format).toBe('json');
+ expect(storedData.fontSize).toBe('large');
+ expect(storedData.version).toBe(1);
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts b/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts
new file mode 100644
index 000000000000..230c297e09c1
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/tracesLoaderConfig.test.ts
@@ -0,0 +1,131 @@
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
+import {
+ BaseAutocompleteData,
+ DataTypes,
+} from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import tracesLoaderConfig from '../configs/tracesLoaderConfig';
+
+// Mock localStorage
+const mockLocalStorage: Record = {};
+
+jest.mock('api/browser/localstorage/get', () => ({
+ __esModule: true,
+ default: jest.fn((key: string) => mockLocalStorage[key] || null),
+}));
+
+describe('tracesLoaderConfig', () => {
+ // Save original location object
+ const originalWindowLocation = window.location;
+ let mockedLocation: Partial;
+
+ beforeEach(() => {
+ // Setup a mocked location object
+ mockedLocation = {
+ ...originalWindowLocation,
+ search: '',
+ };
+
+ // Mock the window.location property
+ Object.defineProperty(window, 'location', {
+ configurable: true,
+ value: mockedLocation,
+ writable: true,
+ });
+
+ // Clear mocked localStorage
+ Object.keys(mockLocalStorage).forEach((key) => {
+ delete mockLocalStorage[key];
+ });
+ });
+
+ afterEach(() => {
+ // Restore original location
+ Object.defineProperty(window, 'location', {
+ configurable: true,
+ value: originalWindowLocation,
+ writable: true,
+ });
+ });
+
+ it('should have priority order: local, url, default', () => {
+ expect(tracesLoaderConfig.priority).toEqual(['local', 'url', 'default']);
+ });
+
+ it('should load from localStorage when available', async () => {
+ const mockColumns: BaseAutocompleteData[] = [
+ {
+ key: 'test-trace-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ // Set up localStorage mock data with the correct key from LOCALSTORAGE enum
+ mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: mockColumns,
+ });
+
+ const result = await tracesLoaderConfig.local();
+
+ expect(result).toEqual({
+ columns: mockColumns,
+ });
+ });
+
+ it('should handle invalid localStorage data gracefully', async () => {
+ // Set up invalid localStorage mock data
+ mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = 'invalid-json';
+
+ const result = await tracesLoaderConfig.local();
+
+ expect(result).toEqual({
+ columns: [] as BaseAutocompleteData[],
+ });
+ });
+
+ it('should load from URL when available', async () => {
+ const mockColumns: BaseAutocompleteData[] = [
+ {
+ key: 'url-trace-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ // Set up URL search params
+ mockedLocation.search = `?options=${encodeURIComponent(
+ JSON.stringify({
+ selectColumns: mockColumns,
+ }),
+ )}`;
+
+ const result = await tracesLoaderConfig.url();
+
+ expect(result).toEqual({
+ columns: mockColumns,
+ });
+ });
+
+ it('should handle invalid URL data gracefully', async () => {
+ // Set up invalid URL search params
+ mockedLocation.search = '?options=invalid-json';
+
+ const result = await tracesLoaderConfig.url();
+
+ expect(result).toEqual({
+ columns: [] as BaseAutocompleteData[],
+ });
+ });
+
+ it('should provide default values when no other source is available', async () => {
+ const result = await tracesLoaderConfig.default();
+
+ expect(result).toEqual({
+ columns: defaultTraceSelectedColumns as BaseAutocompleteData[],
+ });
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/tracesUpdaterConfig.test.ts b/frontend/src/providers/preferences/__tests__/tracesUpdaterConfig.test.ts
new file mode 100644
index 000000000000..9b421a7c28f8
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/tracesUpdaterConfig.test.ts
@@ -0,0 +1,142 @@
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
+import {
+ BaseAutocompleteData,
+ DataTypes,
+} from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import getTracesUpdaterConfig from '../configs/tracesUpdaterConfig';
+import { PreferenceMode } from '../types';
+
+// Mock setLocalStorageKey
+const mockSetLocalStorageKey = jest.fn();
+jest.mock('api/browser/localstorage/set', () => ({
+ __esModule: true,
+ default: (key: string, value: string): void =>
+ mockSetLocalStorageKey(key, value),
+}));
+
+// Mock localStorage
+let mockLocalStorage: Record = {};
+Object.defineProperty(global, 'localStorage', {
+ value: {
+ getItem: jest.fn((key: string) => mockLocalStorage[key] || null),
+ setItem: jest.fn((key: string, value: string) => {
+ mockLocalStorage[key] = value;
+ }),
+ },
+ writable: true,
+});
+
+describe('tracesUpdaterConfig', () => {
+ // Mock functions
+ const mockRedirectWithOptionsData = jest.fn();
+ const mockSetSavedViewPreferences = jest.fn();
+
+ // Test data
+ const mockColumns: BaseAutocompleteData[] = [
+ {
+ key: 'test-trace-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ // Reset mockLocalStorage
+ mockLocalStorage = {};
+ });
+
+ it('should update columns in localStorage and redirect with options in direct mode', () => {
+ const tracesUpdaterConfig = getTracesUpdaterConfig(
+ mockRedirectWithOptionsData,
+ mockSetSavedViewPreferences,
+ );
+
+ tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.DIRECT);
+
+ // Should redirect with the updated columns
+ expect(mockRedirectWithOptionsData).toHaveBeenCalledWith({
+ ...defaultOptionsQuery,
+ selectColumns: mockColumns,
+ });
+
+ // Should set localStorage with the updated columns
+ expect(mockSetLocalStorageKey).toHaveBeenCalledWith(
+ LOCALSTORAGE.TRACES_LIST_OPTIONS,
+ JSON.stringify({ selectColumns: mockColumns }),
+ );
+ });
+
+ it('should merge with existing localStorage data in direct mode', () => {
+ // Setup existing localStorage data
+ mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
+ selectColumns: [
+ {
+ key: 'existing-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ],
+ otherProp: 'value',
+ });
+
+ const tracesUpdaterConfig = getTracesUpdaterConfig(
+ mockRedirectWithOptionsData,
+ mockSetSavedViewPreferences,
+ );
+
+ tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.DIRECT);
+
+ // Should set localStorage with the updated columns while preserving other props
+ expect(mockSetLocalStorageKey).toHaveBeenCalledWith(
+ LOCALSTORAGE.TRACES_LIST_OPTIONS,
+ JSON.stringify({
+ selectColumns: mockColumns,
+ otherProp: 'value',
+ }),
+ );
+ });
+
+ it('should update savedViewPreferences in savedView mode', () => {
+ const tracesUpdaterConfig = getTracesUpdaterConfig(
+ mockRedirectWithOptionsData,
+ mockSetSavedViewPreferences,
+ );
+
+ tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.SAVED_VIEW);
+
+ // Should not redirect or modify localStorage in savedView mode
+ expect(mockRedirectWithOptionsData).not.toHaveBeenCalled();
+ expect(mockSetLocalStorageKey).not.toHaveBeenCalled();
+
+ // Should update savedViewPreferences
+ expect(mockSetSavedViewPreferences).toHaveBeenCalledWith({
+ columns: mockColumns,
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small',
+ version: 1,
+ },
+ });
+ });
+
+ it('should have a no-op updateFormatting method', () => {
+ const tracesUpdaterConfig = getTracesUpdaterConfig(
+ mockRedirectWithOptionsData,
+ mockSetSavedViewPreferences,
+ );
+
+ // Call updateFormatting and verify it does nothing
+ tracesUpdaterConfig.updateFormatting();
+
+ // No API calls should be made
+ expect(mockRedirectWithOptionsData).not.toHaveBeenCalled();
+ expect(mockSetLocalStorageKey).not.toHaveBeenCalled();
+ expect(mockSetSavedViewPreferences).not.toHaveBeenCalled();
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx b/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx
new file mode 100644
index 000000000000..09f0b93e42fc
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/usePreferenceLoader.test.tsx
@@ -0,0 +1,152 @@
+/* eslint-disable sonarjs/no-identical-functions */
+/* eslint-disable sonarjs/no-duplicate-string */
+import { renderHook, waitFor } from '@testing-library/react';
+import { DataSource } from 'types/common/queryBuilder';
+
+import logsLoaderConfig from '../configs/logsLoaderConfig';
+import { usePreferenceLoader } from '../loader/usePreferenceLoader';
+
+// Mock the config loaders
+jest.mock('../configs/logsLoaderConfig', () => ({
+ __esModule: true,
+ default: {
+ priority: ['local', 'url', 'default'],
+ local: jest.fn().mockResolvedValue({
+ columns: [{ name: 'local-column' }],
+ formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
+ }),
+ url: jest.fn().mockResolvedValue({
+ columns: [{ name: 'url-column' }],
+ formatting: { maxLines: 3, format: 'table', fontSize: 'small', version: 1 },
+ }),
+ default: jest.fn().mockResolvedValue({
+ columns: [{ name: 'default-column' }],
+ formatting: { maxLines: 2, format: 'table', fontSize: 'small', version: 1 },
+ }),
+ },
+}));
+
+jest.mock('../configs/tracesLoaderConfig', () => ({
+ __esModule: true,
+ default: {
+ priority: ['local', 'url', 'default'],
+ local: jest.fn().mockResolvedValue({
+ columns: [{ name: 'local-trace-column' }],
+ }),
+ url: jest.fn().mockResolvedValue({
+ columns: [{ name: 'url-trace-column' }],
+ }),
+ default: jest.fn().mockResolvedValue({
+ columns: [{ name: 'default-trace-column' }],
+ }),
+ },
+}));
+
+describe('usePreferenceLoader', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should load logs preferences based on priority order', async () => {
+ const setReSync = jest.fn();
+ const { result } = renderHook(() =>
+ usePreferenceLoader({
+ dataSource: DataSource.LOGS,
+ reSync: false,
+ setReSync,
+ }),
+ );
+
+ // Initially it should be loading
+ expect(result.current.loading).toBe(true);
+ expect(result.current.preferences).toBe(null);
+ expect(result.current.error).toBe(null);
+
+ // Wait for the loader to complete
+ await waitFor(() => {
+ expect(result.current.loading).toBe(false);
+ });
+
+ // Should have loaded from local storage (highest priority)
+ expect(result.current.preferences).toEqual({
+ columns: [{ name: 'local-column' }],
+ formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
+ });
+ expect(result.current.error).toBe(null);
+ expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false
+ });
+
+ it('should load traces preferences', async () => {
+ const setReSync = jest.fn();
+ const { result } = renderHook(() =>
+ usePreferenceLoader({
+ dataSource: DataSource.TRACES,
+ reSync: false,
+ setReSync,
+ }),
+ );
+
+ // Wait for the loader to complete
+ await waitFor(() => {
+ expect(result.current.loading).toBe(false);
+ });
+
+ // Should have loaded trace columns
+ expect(result.current.preferences).toEqual({
+ columns: [{ name: 'local-trace-column' }],
+ });
+ expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false
+ });
+
+ it('should call setReSync when reSync is true', async () => {
+ const setReSync = jest.fn();
+
+ // Test that the hook calls setReSync(false) when reSync is true
+ // We'll unmount quickly to avoid the infinite loop
+ const { unmount } = renderHook(() =>
+ usePreferenceLoader({
+ dataSource: DataSource.LOGS,
+ reSync: true,
+ setReSync,
+ }),
+ );
+ // Wait for the effect to run
+ await waitFor(() => {
+ expect(setReSync).toHaveBeenCalled();
+ });
+
+ // Unmount to stop the effect
+ unmount();
+
+ // Should have called setReSync(false) to reset the reSync flag
+ expect(setReSync).toHaveBeenCalledWith(false);
+ });
+
+ it('should handle errors during loading', async () => {
+ // Mock an error in the loader using jest.spyOn
+ const localSpy = jest.spyOn(logsLoaderConfig, 'local');
+ localSpy.mockRejectedValueOnce(new Error('Loading failed'));
+
+ const setReSync = jest.fn();
+ const { result } = renderHook(() =>
+ usePreferenceLoader({
+ dataSource: DataSource.LOGS,
+ reSync: false,
+ setReSync,
+ }),
+ );
+
+ // Wait for the loader to complete
+ await waitFor(() => {
+ expect(result.current.loading).toBe(false);
+ });
+
+ // Should have set the error
+ expect(result.current.error).toBeInstanceOf(Error);
+ expect(result.current.error?.message).toBe('Loading failed');
+ expect(result.current.preferences).toBe(null);
+
+ // Restore original implementation
+ localSpy.mockRestore();
+ });
+});
diff --git a/frontend/src/providers/preferences/__tests__/usePreferenceUpdater.test.tsx b/frontend/src/providers/preferences/__tests__/usePreferenceUpdater.test.tsx
new file mode 100644
index 000000000000..ccbb9b0236bc
--- /dev/null
+++ b/frontend/src/providers/preferences/__tests__/usePreferenceUpdater.test.tsx
@@ -0,0 +1,240 @@
+/* eslint-disable sonarjs/no-identical-functions */
+import { renderHook } from '@testing-library/react';
+import { LogViewMode } from 'container/LogsTable';
+import { FontSize } from 'container/OptionsMenu/types';
+import {
+ FormattingOptions,
+ PreferenceMode,
+ Preferences,
+} from 'providers/preferences/types';
+import { act } from 'react-dom/test-utils';
+import {
+ BaseAutocompleteData,
+ DataTypes,
+} from 'types/api/queryBuilder/queryAutocompleteResponse';
+import { DataSource } from 'types/common/queryBuilder';
+
+import { usePreferenceUpdater } from '../updater/usePreferenceUpdater';
+
+// Mock the config updaters
+const mockUpdateColumns = jest.fn();
+const mockUpdateFormatting = jest.fn();
+
+jest.mock('../configs/logsUpdaterConfig', () => ({
+ __esModule: true,
+ default: jest.fn().mockImplementation(() => ({
+ updateColumns: mockUpdateColumns,
+ updateFormatting: mockUpdateFormatting,
+ })),
+}));
+
+jest.mock('../configs/tracesUpdaterConfig', () => ({
+ __esModule: true,
+ default: jest.fn().mockImplementation(() => ({
+ updateColumns: mockUpdateColumns,
+ updateFormatting: mockUpdateFormatting,
+ })),
+}));
+
+// Mock the URL query hook
+jest.mock('hooks/useUrlQueryData', () => ({
+ __esModule: true,
+ default: jest.fn().mockReturnValue({
+ redirectWithQuery: jest.fn(),
+ }),
+}));
+
+describe('usePreferenceUpdater', () => {
+ const mockPreferences: Preferences = {
+ columns: [],
+ formatting: {
+ maxLines: 2,
+ format: 'table' as LogViewMode,
+ fontSize: 'small' as FontSize,
+ version: 1,
+ },
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should return updateColumns and updateFormatting functions', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.LOGS,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ // Should return the update functions
+ expect(typeof result.current.updateColumns).toBe('function');
+ expect(typeof result.current.updateFormatting).toBe('function');
+ });
+
+ it('should call the logs updater for updateColumns with logs dataSource', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+ const newColumns: BaseAutocompleteData[] = [
+ {
+ key: 'new-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.LOGS,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ act(() => {
+ result.current.updateColumns(newColumns);
+ });
+
+ // Should call the logs updater
+ expect(mockUpdateColumns).toHaveBeenCalledWith(
+ newColumns,
+ PreferenceMode.DIRECT,
+ );
+ expect(setReSync).toHaveBeenCalledWith(true);
+ });
+
+ it('should call the logs updater for updateFormatting with logs dataSource', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+ const newFormatting: FormattingOptions = {
+ maxLines: 10,
+ format: 'table' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 1,
+ };
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.LOGS,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ act(() => {
+ result.current.updateFormatting(newFormatting);
+ });
+
+ // Should call the logs updater
+ expect(mockUpdateFormatting).toHaveBeenCalledWith(
+ newFormatting,
+ PreferenceMode.DIRECT,
+ );
+ expect(setReSync).toHaveBeenCalledWith(true);
+ });
+
+ it('should call the traces updater for updateColumns with traces dataSource', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+ const newColumns: BaseAutocompleteData[] = [
+ {
+ key: 'new-trace-column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ];
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.TRACES,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ act(() => {
+ result.current.updateColumns(newColumns);
+ });
+
+ // Should call the traces updater
+ expect(mockUpdateColumns).toHaveBeenCalledWith(
+ newColumns,
+ PreferenceMode.DIRECT,
+ );
+ expect(setReSync).toHaveBeenCalledWith(true);
+ });
+
+ it('should call the traces updater for updateFormatting with traces dataSource', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+ const newFormatting: FormattingOptions = {
+ maxLines: 10,
+ format: 'table' as LogViewMode,
+ fontSize: 'large' as FontSize,
+ version: 1,
+ };
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.TRACES,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ act(() => {
+ result.current.updateFormatting(newFormatting);
+ });
+
+ // Should call the traces updater
+ expect(mockUpdateFormatting).toHaveBeenCalledWith(
+ newFormatting,
+ PreferenceMode.DIRECT,
+ );
+ expect(setReSync).toHaveBeenCalledWith(true);
+ });
+
+ it('should increment reSync counter when updates are called', () => {
+ const setReSync = jest.fn();
+ const setSavedViewPreferences = jest.fn();
+
+ const { result } = renderHook(() =>
+ usePreferenceUpdater({
+ dataSource: DataSource.LOGS,
+ mode: PreferenceMode.DIRECT,
+ preferences: mockPreferences,
+ setReSync,
+ setSavedViewPreferences,
+ }),
+ );
+
+ act(() => {
+ result.current.updateColumns([
+ {
+ key: 'column',
+ type: 'tag',
+ dataType: DataTypes.String,
+ isColumn: true,
+ },
+ ]);
+ });
+
+ expect(setReSync).toHaveBeenCalledWith(true);
+ });
+});
diff --git a/frontend/src/providers/preferences/configs/logsLoaderConfig.ts b/frontend/src/providers/preferences/configs/logsLoaderConfig.ts
new file mode 100644
index 000000000000..9b5b8dd4bdb7
--- /dev/null
+++ b/frontend/src/providers/preferences/configs/logsLoaderConfig.ts
@@ -0,0 +1,67 @@
+/* eslint-disable no-empty */
+import getLocalStorageKey from 'api/browser/localstorage/get';
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
+import { FontSize } from 'container/OptionsMenu/types';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import { FormattingOptions } from '../types';
+
+// --- LOGS preferences loader config ---
+const logsLoaders = {
+ local: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ formatting: FormattingOptions;
+ }> => {
+ const local = getLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS);
+ if (local) {
+ try {
+ const parsed = JSON.parse(local);
+ return {
+ columns: parsed.selectColumns || [],
+ formatting: {
+ maxLines: parsed.maxLines ?? 2,
+ format: parsed.format ?? 'table',
+ fontSize: parsed.fontSize ?? 'small',
+ version: parsed.version ?? 1,
+ },
+ };
+ } catch {}
+ }
+ return { columns: [], formatting: undefined } as any;
+ },
+ url: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ formatting: FormattingOptions;
+ }> => {
+ const urlParams = new URLSearchParams(window.location.search);
+ try {
+ const options = JSON.parse(urlParams.get('options') || '{}');
+ return {
+ columns: options.selectColumns || [],
+ formatting: {
+ maxLines: options.maxLines ?? 2,
+ format: options.format ?? 'table',
+ fontSize: options.fontSize ?? 'small',
+ version: options.version ?? 1,
+ },
+ };
+ } catch {}
+ return { columns: [], formatting: undefined } as any;
+ },
+ default: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ formatting: FormattingOptions;
+ }> => ({
+ columns: defaultLogsSelectedColumns as BaseAutocompleteData[],
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small' as FontSize,
+ version: 1,
+ },
+ }),
+ priority: ['local', 'url', 'default'] as const,
+};
+
+export default logsLoaders;
diff --git a/frontend/src/providers/preferences/configs/logsUpdaterConfig.ts b/frontend/src/providers/preferences/configs/logsUpdaterConfig.ts
new file mode 100644
index 000000000000..b41e5ac13160
--- /dev/null
+++ b/frontend/src/providers/preferences/configs/logsUpdaterConfig.ts
@@ -0,0 +1,85 @@
+import setLocalStorageKey from 'api/browser/localstorage/set';
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
+import { FontSize, OptionsQuery } from 'container/OptionsMenu/types';
+import { Dispatch, SetStateAction } from 'react';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import { FormattingOptions, PreferenceMode, Preferences } from '../types';
+
+// --- LOGS preferences updater config ---
+const getLogsUpdaterConfig = (
+ preferences: Preferences | null,
+ redirectWithOptionsData: (options: OptionsQuery) => void,
+ setSavedViewPreferences: Dispatch>,
+): {
+ updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
+ updateFormatting: (newFormatting: FormattingOptions, mode: string) => void;
+} => ({
+ updateColumns: (newColumns: BaseAutocompleteData[], mode: string): void => {
+ if (mode === PreferenceMode.SAVED_VIEW) {
+ setSavedViewPreferences((prev) => {
+ if (!prev) {
+ return {
+ columns: newColumns,
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small' as FontSize,
+ version: 1,
+ },
+ };
+ }
+
+ return {
+ ...prev,
+ columns: newColumns,
+ };
+ });
+ }
+
+ if (mode === PreferenceMode.DIRECT) {
+ // just need to update the columns see for remove props
+ redirectWithOptionsData({
+ ...defaultOptionsQuery,
+ ...preferences?.formatting,
+ selectColumns: newColumns,
+ });
+
+ // Also update local storage
+ const local = JSON.parse(
+ localStorage.getItem(LOCALSTORAGE.LOGS_LIST_OPTIONS) || '{}',
+ );
+ local.selectColumns = newColumns;
+ setLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS, JSON.stringify(local));
+ }
+ },
+ updateFormatting: (newFormatting: FormattingOptions, mode: string): void => {
+ if (mode === PreferenceMode.SAVED_VIEW) {
+ setSavedViewPreferences((prev) => {
+ if (!prev) return { columns: [], formatting: newFormatting };
+ return {
+ ...prev,
+ formatting: newFormatting,
+ };
+ });
+ }
+
+ if (mode === PreferenceMode.DIRECT) {
+ redirectWithOptionsData({
+ ...defaultOptionsQuery,
+ ...preferences?.formatting,
+ ...newFormatting,
+ });
+
+ // Also update local storage
+ const local = JSON.parse(
+ localStorage.getItem(LOCALSTORAGE.LOGS_LIST_OPTIONS) || '{}',
+ );
+ Object.assign(local, newFormatting);
+ setLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS, JSON.stringify(local));
+ }
+ },
+});
+
+export default getLogsUpdaterConfig;
diff --git a/frontend/src/providers/preferences/configs/tracesLoaderConfig.ts b/frontend/src/providers/preferences/configs/tracesLoaderConfig.ts
new file mode 100644
index 000000000000..cb323b6aecef
--- /dev/null
+++ b/frontend/src/providers/preferences/configs/tracesLoaderConfig.ts
@@ -0,0 +1,43 @@
+/* eslint-disable no-empty */
+import getLocalStorageKey from 'api/browser/localstorage/get';
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+// --- TRACES preferences loader config ---
+const tracesLoaders = {
+ local: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ }> => {
+ const local = getLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS);
+ if (local) {
+ try {
+ const parsed = JSON.parse(local);
+ return {
+ columns: parsed.selectColumns || [],
+ };
+ } catch {}
+ }
+ return { columns: [] };
+ },
+ url: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ }> => {
+ const urlParams = new URLSearchParams(window.location.search);
+ try {
+ const options = JSON.parse(urlParams.get('options') || '{}');
+ return {
+ columns: options.selectColumns || [],
+ };
+ } catch {}
+ return { columns: [] };
+ },
+ default: async (): Promise<{
+ columns: BaseAutocompleteData[];
+ }> => ({
+ columns: defaultTraceSelectedColumns as BaseAutocompleteData[],
+ }),
+ priority: ['local', 'url', 'default'] as const,
+};
+
+export default tracesLoaders;
diff --git a/frontend/src/providers/preferences/configs/tracesUpdaterConfig.ts b/frontend/src/providers/preferences/configs/tracesUpdaterConfig.ts
new file mode 100644
index 000000000000..f08408201c99
--- /dev/null
+++ b/frontend/src/providers/preferences/configs/tracesUpdaterConfig.ts
@@ -0,0 +1,49 @@
+import setLocalStorageKey from 'api/browser/localstorage/set';
+import { LOCALSTORAGE } from 'constants/localStorage';
+import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
+import { FontSize, OptionsQuery } from 'container/OptionsMenu/types';
+import { Dispatch, SetStateAction } from 'react';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+
+import { PreferenceMode, Preferences } from '../types';
+
+// --- TRACES preferences updater config ---
+const getTracesUpdaterConfig = (
+ redirectWithOptionsData: (options: OptionsQuery) => void,
+ setSavedViewPreferences: Dispatch>,
+): {
+ updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
+ updateFormatting: () => void;
+} => ({
+ updateColumns: (newColumns: BaseAutocompleteData[], mode: string): void => {
+ // remove the formatting props
+ if (mode === PreferenceMode.SAVED_VIEW) {
+ setSavedViewPreferences({
+ columns: newColumns,
+ formatting: {
+ maxLines: 2,
+ format: 'table',
+ fontSize: 'small' as FontSize,
+ version: 1,
+ },
+ });
+ }
+
+ if (mode === PreferenceMode.DIRECT) {
+ // just need to update the columns see for remove props
+ redirectWithOptionsData({
+ ...defaultOptionsQuery,
+ selectColumns: newColumns,
+ });
+
+ const local = JSON.parse(
+ localStorage.getItem(LOCALSTORAGE.TRACES_LIST_OPTIONS) || '{}',
+ );
+ local.selectColumns = newColumns;
+ setLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS, JSON.stringify(local));
+ }
+ },
+ updateFormatting: (): void => {}, // no-op for traces
+});
+
+export default getTracesUpdaterConfig;
diff --git a/frontend/src/providers/preferences/context/PreferenceContextProvider.tsx b/frontend/src/providers/preferences/context/PreferenceContextProvider.tsx
new file mode 100644
index 000000000000..7ee7551579cb
--- /dev/null
+++ b/frontend/src/providers/preferences/context/PreferenceContextProvider.tsx
@@ -0,0 +1,84 @@
+import useUrlQuery from 'hooks/useUrlQuery';
+import {
+ PreferenceContextValue,
+ PreferenceMode,
+} from 'providers/preferences/types';
+import React, { createContext, useContext, useMemo } from 'react';
+import { useLocation } from 'react-router-dom';
+import { DataSource } from 'types/common/queryBuilder';
+
+import { usePreferenceSync } from '../sync/usePreferenceSync';
+
+const PreferenceContext = createContext(
+ undefined,
+);
+
+export function PreferenceContextProvider({
+ children,
+}: {
+ children: React.ReactNode;
+}): JSX.Element {
+ const location = useLocation();
+ const params = useUrlQuery();
+
+ let savedViewId = '';
+ const viewKeyParam = params.get('viewKey');
+ if (viewKeyParam) {
+ try {
+ savedViewId = JSON.parse(viewKeyParam);
+ } catch (e) {
+ console.error(e);
+ }
+ }
+ let dataSource: DataSource = DataSource.LOGS;
+ if (location.pathname.includes('traces')) dataSource = DataSource.TRACES;
+
+ const {
+ preferences,
+ loading,
+ error,
+ updateColumns,
+ updateFormatting,
+ } = usePreferenceSync({
+ mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
+ savedViewId: savedViewId || undefined,
+ dataSource,
+ });
+
+ const value = useMemo(
+ () => ({
+ preferences,
+ loading,
+ error,
+ mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
+ savedViewId: savedViewId || undefined,
+ dataSource,
+ updateColumns,
+ updateFormatting,
+ }),
+ [
+ savedViewId,
+ dataSource,
+ preferences,
+ loading,
+ error,
+ updateColumns,
+ updateFormatting,
+ ],
+ );
+
+ return (
+
+ {children}
+
+ );
+}
+
+export function usePreferenceContext(): PreferenceContextValue {
+ const ctx = useContext(PreferenceContext);
+ if (!ctx)
+ throw new Error(
+ 'usePreferenceContext must be used within PreferenceContextProvider',
+ );
+ return ctx;
+}
diff --git a/frontend/src/providers/preferences/loader/usePreferenceLoader.ts b/frontend/src/providers/preferences/loader/usePreferenceLoader.ts
new file mode 100644
index 000000000000..09145da6e2b6
--- /dev/null
+++ b/frontend/src/providers/preferences/loader/usePreferenceLoader.ts
@@ -0,0 +1,108 @@
+/* eslint-disable sonarjs/cognitive-complexity */
+/* eslint-disable no-empty */
+import { useEffect, useState } from 'react';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+import { DataSource } from 'types/common/queryBuilder';
+
+import logsLoaderConfig from '../configs/logsLoaderConfig';
+import tracesLoaderConfig from '../configs/tracesLoaderConfig';
+import { FormattingOptions, Preferences } from '../types';
+
+// Generic preferences loader that works with any config
+async function preferencesLoader(config: {
+ priority: readonly string[];
+ [key: string]: any;
+}): Promise {
+ const findValidLoader = async (): Promise => {
+ // Try each loader in priority order
+ const results = await Promise.all(
+ config.priority.map(async (source) => ({
+ source,
+ result: await config[source](),
+ })),
+ );
+
+ // Find valid columns and formatting independently
+ const validColumnsResult = results.find(
+ ({ result }) => result.columns?.length,
+ );
+ const validFormattingResult = results.find(({ result }) => result.formatting);
+
+ // Combine valid results or fallback to default
+ const finalResult = {
+ columns: validColumnsResult?.result.columns || config.default().columns,
+ formatting:
+ validFormattingResult?.result.formatting || config.default().formatting,
+ };
+
+ return finalResult as T;
+ };
+
+ return findValidLoader();
+}
+
+// Use the generic loader with specific configs
+async function logsPreferencesLoader(): Promise<{
+ columns: BaseAutocompleteData[];
+ formatting: FormattingOptions;
+}> {
+ return preferencesLoader(logsLoaderConfig);
+}
+
+async function tracesPreferencesLoader(): Promise<{
+ columns: BaseAutocompleteData[];
+}> {
+ return preferencesLoader(tracesLoaderConfig);
+}
+
+export function usePreferenceLoader({
+ dataSource,
+ reSync,
+ setReSync,
+}: {
+ dataSource: DataSource;
+ reSync: boolean;
+ setReSync: (value: boolean) => void;
+}): {
+ preferences: Preferences | null;
+ loading: boolean;
+ error: Error | null;
+} {
+ const [preferences, setPreferences] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect((): void => {
+ async function loadPreferences(): Promise {
+ setLoading(true);
+ setError(null);
+
+ try {
+ if (dataSource === DataSource.LOGS) {
+ const { columns, formatting } = await logsPreferencesLoader();
+ setPreferences({ columns, formatting });
+ }
+
+ if (dataSource === DataSource.TRACES) {
+ const { columns } = await tracesPreferencesLoader();
+ setPreferences({ columns });
+ }
+ } catch (e) {
+ setError(e as Error);
+ } finally {
+ setLoading(false);
+ // Reset reSync back to false after loading is complete
+ if (reSync) {
+ setReSync(false);
+ }
+ }
+ }
+
+ // Only load preferences on initial mount or when reSync is true
+ if (loading || reSync) {
+ loadPreferences();
+ }
+ }, [dataSource, reSync, setReSync, loading]);
+
+ return { preferences, loading, error };
+}
diff --git a/frontend/src/providers/preferences/sync/usePreferenceSync.ts b/frontend/src/providers/preferences/sync/usePreferenceSync.ts
new file mode 100644
index 000000000000..7cd5202bc0a3
--- /dev/null
+++ b/frontend/src/providers/preferences/sync/usePreferenceSync.ts
@@ -0,0 +1,84 @@
+import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
+import { defaultSelectedColumns as defaultTracesSelectedColumns } from 'container/TracesExplorer/ListView/configs';
+import { useGetAllViews } from 'hooks/saveViews/useGetAllViews';
+import { useEffect, useState } from 'react';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+import { DataSource } from 'types/common/queryBuilder';
+
+import { usePreferenceLoader } from '../loader/usePreferenceLoader';
+import { FormattingOptions, PreferenceMode, Preferences } from '../types';
+import { usePreferenceUpdater } from '../updater/usePreferenceUpdater';
+
+export function usePreferenceSync({
+ mode,
+ dataSource,
+ savedViewId,
+}: {
+ mode: PreferenceMode;
+ dataSource: DataSource;
+ savedViewId: string | undefined;
+}): {
+ preferences: Preferences | null;
+ loading: boolean;
+ error: Error | null;
+ updateColumns: (newColumns: BaseAutocompleteData[]) => void;
+ updateFormatting: (newFormatting: FormattingOptions) => void;
+} {
+ const { data: viewsData } = useGetAllViews(dataSource);
+
+ const [
+ savedViewPreferences,
+ setSavedViewPreferences,
+ ] = useState(null);
+
+ useEffect(() => {
+ const extraData = viewsData?.data?.data?.find(
+ (view) => view.id === savedViewId,
+ )?.extraData;
+
+ const parsedExtraData = JSON.parse(extraData || '{}');
+ let columns: BaseAutocompleteData[] = [];
+ let formatting: FormattingOptions | undefined;
+ if (dataSource === DataSource.LOGS) {
+ columns = parsedExtraData?.selectColumns || defaultLogsSelectedColumns;
+ formatting = {
+ maxLines: parsedExtraData?.maxLines ?? 2,
+ format: parsedExtraData?.format ?? 'table',
+ fontSize: parsedExtraData?.fontSize ?? 'small',
+ version: parsedExtraData?.version ?? 1,
+ };
+ }
+ if (dataSource === DataSource.TRACES) {
+ columns = parsedExtraData?.selectColumns || defaultTracesSelectedColumns;
+ }
+ setSavedViewPreferences({ columns, formatting });
+ }, [viewsData, dataSource, savedViewId, mode]);
+
+ // We are using a reSync state because we have URL updates as well as local storage updates
+ // and we want to make sure we are always using the latest preferences
+ const [reSync, setReSync] = useState(false);
+ const { preferences, loading, error } = usePreferenceLoader({
+ dataSource,
+ reSync,
+ setReSync,
+ });
+
+ const { updateColumns, updateFormatting } = usePreferenceUpdater({
+ dataSource,
+ mode,
+ preferences,
+ setReSync,
+ setSavedViewPreferences,
+ });
+
+ return {
+ preferences:
+ mode === PreferenceMode.SAVED_VIEW && savedViewId
+ ? savedViewPreferences
+ : preferences,
+ loading,
+ error,
+ updateColumns,
+ updateFormatting,
+ };
+}
diff --git a/frontend/src/providers/preferences/types/index.ts b/frontend/src/providers/preferences/types/index.ts
new file mode 100644
index 000000000000..57bd93ca78f9
--- /dev/null
+++ b/frontend/src/providers/preferences/types/index.ts
@@ -0,0 +1,32 @@
+import { LogViewMode } from 'container/LogsTable';
+import { FontSize } from 'container/OptionsMenu/types';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+import { DataSource } from 'types/common/queryBuilder';
+
+export enum PreferenceMode {
+ SAVED_VIEW = 'savedView',
+ DIRECT = 'direct',
+}
+
+export interface PreferenceContextValue {
+ preferences: Preferences | null;
+ loading: boolean;
+ error: Error | null;
+ mode: PreferenceMode;
+ savedViewId?: string;
+ dataSource: DataSource;
+ updateColumns: (newColumns: BaseAutocompleteData[]) => void;
+ updateFormatting: (newFormatting: FormattingOptions) => void;
+}
+
+export interface FormattingOptions {
+ maxLines?: number;
+ format?: LogViewMode;
+ fontSize?: FontSize;
+ version?: number;
+}
+
+export interface Preferences {
+ columns: BaseAutocompleteData[];
+ formatting?: FormattingOptions;
+}
diff --git a/frontend/src/providers/preferences/updater/usePreferenceUpdater.ts b/frontend/src/providers/preferences/updater/usePreferenceUpdater.ts
new file mode 100644
index 000000000000..ef9d28501290
--- /dev/null
+++ b/frontend/src/providers/preferences/updater/usePreferenceUpdater.ts
@@ -0,0 +1,78 @@
+import {
+ defaultOptionsQuery,
+ URL_OPTIONS,
+} from 'container/OptionsMenu/constants';
+import { OptionsQuery } from 'container/OptionsMenu/types';
+import useUrlQueryData from 'hooks/useUrlQueryData';
+import { Dispatch, SetStateAction } from 'react';
+import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
+import { DataSource } from 'types/common/queryBuilder';
+
+import getLogsUpdaterConfig from '../configs/logsUpdaterConfig';
+import getTracesUpdaterConfig from '../configs/tracesUpdaterConfig';
+import { FormattingOptions, Preferences } from '../types';
+
+const metricsUpdater = {
+ updateColumns: (): void => {}, // no-op for metrics
+ updateFormatting: (): void => {}, // no-op for metrics
+};
+
+const getUpdaterConfig = (
+ preferences: Preferences | null,
+ redirectWithOptionsData: (options: OptionsQuery) => void,
+ setSavedViewPreferences: Dispatch>,
+): Record<
+ DataSource,
+ {
+ updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
+ updateFormatting: (newFormatting: FormattingOptions, mode: string) => void;
+ }
+> => ({
+ [DataSource.LOGS]: getLogsUpdaterConfig(
+ preferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ ),
+ [DataSource.TRACES]: getTracesUpdaterConfig(
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ ),
+ [DataSource.METRICS]: metricsUpdater,
+});
+
+export function usePreferenceUpdater({
+ dataSource,
+ mode,
+ preferences,
+ setReSync,
+ setSavedViewPreferences,
+}: {
+ dataSource: DataSource;
+ mode: string;
+ preferences: Preferences | null;
+ setReSync: Dispatch>;
+ setSavedViewPreferences: Dispatch>;
+}): {
+ updateColumns: (newColumns: BaseAutocompleteData[]) => void;
+ updateFormatting: (newFormatting: FormattingOptions) => void;
+} {
+ const {
+ redirectWithQuery: redirectWithOptionsData,
+ } = useUrlQueryData(URL_OPTIONS, defaultOptionsQuery);
+ const updater = getUpdaterConfig(
+ preferences,
+ redirectWithOptionsData,
+ setSavedViewPreferences,
+ )[dataSource];
+
+ return {
+ updateColumns: (newColumns: BaseAutocompleteData[]): void => {
+ updater.updateColumns(newColumns, mode);
+ setReSync(true);
+ },
+ updateFormatting: (newFormatting: FormattingOptions): void => {
+ updater.updateFormatting(newFormatting, mode);
+ setReSync(true);
+ },
+ };
+}
diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
index 061f6316af7b..a59740a85e60 100644
--- a/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
+++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries.go
@@ -42,7 +42,7 @@ const (
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
- experimentalRateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
+ experimentalRateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
experimentalIncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
)
@@ -158,7 +158,7 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
rateExp := rateWithoutNegative
if _, ok := os.LookupEnv("EXPERIMENTAL_RATE_WITHOUT_NEGATIVE"); ok {
- rateExp = fmt.Sprintf(experimentalRateWithoutNegative, start)
+ rateExp = fmt.Sprintf(experimentalRateWithoutNegative, start, start)
}
rateQueryTmpl :=
"SELECT %s ts, " + rateExp +
diff --git a/pkg/query-service/app/metrics/v4/helpers/sub_query.go b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
index efad01fd7367..2051983dbbbd 100644
--- a/pkg/query-service/app/metrics/v4/helpers/sub_query.go
+++ b/pkg/query-service/app/metrics/v4/helpers/sub_query.go
@@ -14,6 +14,12 @@ var (
sixHoursInMilliseconds = time.Hour.Milliseconds() * 6
oneDayInMilliseconds = time.Hour.Milliseconds() * 24
oneWeekInMilliseconds = oneDayInMilliseconds * 7
+
+ // when the query requests for almost 1 day, but not exactly 1 day, we need to add an offset to the end time
+ // to make sure that we are using the correct table
+ // this is because the start gets adjusted to the nearest step interval and uses the 5m table for 4m step interval
+ // leading to time series that doesn't best represent the rate of change
+ offsetBucket = 60 * time.Minute.Milliseconds()
)
func whichTSTableToUse(start, end int64, mq *v3.BuilderQuery) (int64, int64, string) {
@@ -104,7 +110,7 @@ func WhichSamplesTableToUse(start, end int64, mq *v3.BuilderQuery) string {
return constants.SIGNOZ_SAMPLES_V4_TABLENAME
}
- if end-start < oneDayInMilliseconds {
+ if end-start < oneDayInMilliseconds+offsetBucket {
// if we are dealing with delta metrics and interval is greater than 5 minutes, we can use the 5m aggregated table
// why would interval be greater than 5 minutes?
// we allow people to configure the step interval so we can make use of this
@@ -115,7 +121,7 @@ func WhichSamplesTableToUse(start, end int64, mq *v3.BuilderQuery) string {
return constants.SIGNOZ_SAMPLES_V4_AGG_30M_TABLENAME
}
return constants.SIGNOZ_SAMPLES_V4_TABLENAME
- } else if end-start < oneWeekInMilliseconds {
+ } else if end-start < oneWeekInMilliseconds+offsetBucket {
return constants.SIGNOZ_SAMPLES_V4_AGG_5M_TABLENAME
} else {
return constants.SIGNOZ_SAMPLES_V4_AGG_30M_TABLENAME
diff --git a/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go b/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go
index 6c7550a6c49e..bf6d81bc48ac 100644
--- a/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go
+++ b/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go
@@ -49,7 +49,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum,
},
- expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
},
{
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
@@ -82,7 +82,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum,
},
- expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
},
}
@@ -123,7 +123,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum,
},
- expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY ts ORDER BY ts ASC",
+ expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC",
},
{
name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name",
@@ -149,7 +149,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) {
TimeAggregation: v3.TimeAggregationRate,
SpaceAggregation: v3.SpaceAggregationSum,
},
- expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
},
}
@@ -204,7 +204,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) {
Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99,
},
- expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
},
{
name: "test temporality = cumulative, quantile = 0.99 without group by",
@@ -234,7 +234,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) {
Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99,
},
- expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
},
}
@@ -289,7 +289,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) {
Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99,
},
- expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
+ expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
},
{
name: "test temporality = delta, quantile = 0.99 no group by",
@@ -319,7 +319,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) {
Disabled: false,
SpaceAggregation: v3.SpaceAggregationPercentile99,
},
- expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
+ expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
},
}
@@ -360,7 +360,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) {
SpaceAggregation: v3.SpaceAggregationSum,
Disabled: false,
},
- expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
+ expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
},
{
name: "test gauge query with group by host_name",
@@ -386,7 +386,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) {
Expression: "A",
Disabled: false,
},
- expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
+ expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
},
}
diff --git a/pkg/telemetrymetrics/statement_builder.go b/pkg/telemetrymetrics/statement_builder.go
index 4de6d073c06e..ed366d975872 100644
--- a/pkg/telemetrymetrics/statement_builder.go
+++ b/pkg/telemetrymetrics/statement_builder.go
@@ -14,7 +14,7 @@ import (
)
const (
- RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
+ RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
)
@@ -417,7 +417,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
- rateExpr := fmt.Sprintf(RateWithoutNegative, start)
+ rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
diff --git a/pkg/telemetrymetrics/stmt_builder_test.go b/pkg/telemetrymetrics/stmt_builder_test.go
index 95ed00162e00..fd451fa5373b 100644
--- a/pkg/telemetrymetrics/stmt_builder_test.go
+++ b/pkg/telemetrymetrics/stmt_builder_test.go
@@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
- Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
+ Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0},
},
expectedErr: nil,
@@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
- Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
+ Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0},
},
expectedErr: nil,
diff --git a/pkg/telemetrymetrics/tables.go b/pkg/telemetrymetrics/tables.go
index e0afc8e02b45..8d8771f90b00 100644
--- a/pkg/telemetrymetrics/tables.go
+++ b/pkg/telemetrymetrics/tables.go
@@ -33,6 +33,12 @@ var (
sixHoursInMilliseconds = uint64(time.Hour.Milliseconds() * 6)
oneDayInMilliseconds = uint64(time.Hour.Milliseconds() * 24)
oneWeekInMilliseconds = uint64(oneDayInMilliseconds * 7)
+
+ // when the query requests for almost 1 day, but not exactly 1 day, we need to add an offset to the end time
+ // to make sure that we are using the correct table
+ // this is because the start gets adjusted to the nearest step interval and uses the 5m table for 4m step interval
+ // leading to time series that doesn't best represent the rate of change
+ offsetBucket = uint64(60 * time.Minute.Milliseconds())
)
func WhichTSTableToUse(
@@ -119,9 +125,9 @@ func WhichSamplesTableToUse(
return SamplesV4TableName
}
- if end-start < oneDayInMilliseconds {
+ if end-start < oneDayInMilliseconds+offsetBucket {
return SamplesV4TableName
- } else if end-start < oneWeekInMilliseconds {
+ } else if end-start < oneWeekInMilliseconds+offsetBucket {
return SamplesV4Agg5mTableName
} else {
return SamplesV4Agg30mTableName