mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-28 15:48:12 +00:00
Merge branch 'main' into fixes-qb
This commit is contained in:
commit
fecf6667a3
@ -174,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.86.2
|
||||
image: signoz/signoz:v0.87.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.86.2
|
||||
image: signoz/signoz:v0.87.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@ -177,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.86.2}
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@ -110,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.86.2}
|
||||
image: signoz/signoz:${VERSION:-v0.87.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@ -52,11 +53,32 @@ jest.mock('hooks/saveViews/useDeleteView', () => ({
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock usePreferenceSync
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('ExplorerCard', () => {
|
||||
it('renders a card with a title and a description', () => {
|
||||
render(
|
||||
<MockQueryClientProvider>
|
||||
<ExplorerCard sourcepage={DataSource.TRACES}>child</ExplorerCard>
|
||||
<PreferenceContextProvider>
|
||||
<ExplorerCard sourcepage={DataSource.TRACES}>child</ExplorerCard>
|
||||
</PreferenceContextProvider>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
expect(screen.queryByText('Query Builder')).not.toBeInTheDocument();
|
||||
@ -65,7 +87,9 @@ describe('ExplorerCard', () => {
|
||||
it('renders a save view button', () => {
|
||||
render(
|
||||
<MockQueryClientProvider>
|
||||
<ExplorerCard sourcepage={DataSource.TRACES}>child</ExplorerCard>
|
||||
<PreferenceContextProvider>
|
||||
<ExplorerCard sourcepage={DataSource.TRACES}>child</ExplorerCard>
|
||||
</PreferenceContextProvider>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
expect(screen.queryByText('Save view')).not.toBeInTheDocument();
|
||||
|
||||
@ -6,6 +6,7 @@ import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import {
|
||||
DeleteViewHandlerProps,
|
||||
@ -106,7 +107,11 @@ export const isQueryUpdatedInView = ({
|
||||
!isEqual(
|
||||
options?.selectColumns,
|
||||
extraData && JSON.parse(extraData)?.selectColumns,
|
||||
)
|
||||
) ||
|
||||
(stagedQuery?.builder?.queryData?.[0]?.dataSource === DataSource.LOGS &&
|
||||
(!isEqual(options?.format, extraData && JSON.parse(extraData)?.format) ||
|
||||
!isEqual(options?.maxLines, extraData && JSON.parse(extraData)?.maxLines) ||
|
||||
!isEqual(options?.fontSize, extraData && JSON.parse(extraData)?.fontSize)))
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@ -74,6 +74,7 @@ const formatMap = {
|
||||
'MM/dd HH:mm': DATE_TIME_FORMATS.SLASH_SHORT,
|
||||
'MM/DD': DATE_TIME_FORMATS.DATE_SHORT,
|
||||
'YY-MM': DATE_TIME_FORMATS.YEAR_MONTH,
|
||||
'MMM d, yyyy, h:mm:ss aaaa': DATE_TIME_FORMATS.DASH_DATETIME,
|
||||
YY: DATE_TIME_FORMATS.YEAR_SHORT,
|
||||
};
|
||||
|
||||
|
||||
@ -54,6 +54,7 @@ import {
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { FormattingOptions } from 'providers/preferences/types';
|
||||
import {
|
||||
CSSProperties,
|
||||
Dispatch,
|
||||
@ -270,17 +271,26 @@ function ExplorerOptions({
|
||||
const getUpdatedExtraData = (
|
||||
extraData: string | undefined,
|
||||
newSelectedColumns: BaseAutocompleteData[],
|
||||
formattingOptions?: FormattingOptions,
|
||||
): string => {
|
||||
let updatedExtraData;
|
||||
|
||||
if (extraData) {
|
||||
const parsedExtraData = JSON.parse(extraData);
|
||||
parsedExtraData.selectColumns = newSelectedColumns;
|
||||
if (formattingOptions) {
|
||||
parsedExtraData.format = formattingOptions.format;
|
||||
parsedExtraData.maxLines = formattingOptions.maxLines;
|
||||
parsedExtraData.fontSize = formattingOptions.fontSize;
|
||||
}
|
||||
updatedExtraData = JSON.stringify(parsedExtraData);
|
||||
} else {
|
||||
updatedExtraData = JSON.stringify({
|
||||
color: Color.BG_SIENNA_500,
|
||||
selectColumns: newSelectedColumns,
|
||||
format: formattingOptions?.format,
|
||||
maxLines: formattingOptions?.maxLines,
|
||||
fontSize: formattingOptions?.fontSize,
|
||||
});
|
||||
}
|
||||
return updatedExtraData;
|
||||
@ -289,6 +299,14 @@ function ExplorerOptions({
|
||||
const updatedExtraData = getUpdatedExtraData(
|
||||
extraData,
|
||||
options?.selectColumns,
|
||||
// pass this only for logs
|
||||
sourcepage === DataSource.LOGS
|
||||
? {
|
||||
format: options?.format,
|
||||
maxLines: options?.maxLines,
|
||||
fontSize: options?.fontSize,
|
||||
}
|
||||
: undefined,
|
||||
);
|
||||
|
||||
const {
|
||||
@ -517,6 +535,14 @@ function ExplorerOptions({
|
||||
color,
|
||||
selectColumns: options.selectColumns,
|
||||
version: 1,
|
||||
...// pass this only for logs
|
||||
(sourcepage === DataSource.LOGS
|
||||
? {
|
||||
format: options?.format,
|
||||
maxLines: options?.maxLines,
|
||||
fontSize: options?.fontSize,
|
||||
}
|
||||
: {}),
|
||||
}),
|
||||
notifications,
|
||||
panelType: panelType || PANEL_TYPES.LIST,
|
||||
|
||||
@ -114,7 +114,6 @@ function LogsExplorerViews({
|
||||
|
||||
// Context
|
||||
const {
|
||||
initialDataSource,
|
||||
currentQuery,
|
||||
stagedQuery,
|
||||
panelType,
|
||||
@ -144,7 +143,7 @@ function LogsExplorerViews({
|
||||
|
||||
const { options, config } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
dataSource: initialDataSource || DataSource.LOGS,
|
||||
dataSource: DataSource.LOGS,
|
||||
aggregateOperator: listQuery?.aggregateOperator || StringOperators.NOOP,
|
||||
});
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_qu
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { SELECTED_VIEWS } from 'pages/LogsExplorer/utils';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
import { fireEvent, render, RenderResult } from 'tests/test-utils';
|
||||
@ -87,6 +88,25 @@ jest.mock('hooks/useSafeNavigate', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock usePreferenceSync
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/logs/useCopyLogLink', () => ({
|
||||
useCopyLogLink: jest.fn().mockReturnValue({
|
||||
activeLogId: ACTIVE_LOG_ID,
|
||||
@ -105,13 +125,15 @@ const renderer = (): RenderResult =>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</VirtuosoMockContext.Provider>,
|
||||
);
|
||||
|
||||
@ -184,13 +206,15 @@ describe('LogsExplorerViews -', () => {
|
||||
lodsQueryServerRequest();
|
||||
render(
|
||||
<QueryBuilderContext.Provider value={mockQueryBuilderContextValue}>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorerViews
|
||||
selectedView={SELECTED_VIEWS.SEARCH}
|
||||
showFrequencyChart
|
||||
setIsLoadingQueries={(): void => {}}
|
||||
listQueryKeyRef={{ current: {} }}
|
||||
chartQueryKeyRef={{ current: {} }}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import { logsPaginationQueryRangeSuccessResponse } from 'mocks-server/__mockdata
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { I18nextProvider } from 'react-i18next';
|
||||
import i18n from 'ReactI18';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@ -108,11 +109,13 @@ describe('LogsPanelComponent', () => {
|
||||
render(
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<DashboardProvider>
|
||||
<NewWidget
|
||||
selectedGraph={PANEL_TYPES.LIST}
|
||||
fillSpans={undefined}
|
||||
yAxisUnit={undefined}
|
||||
/>
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
selectedGraph={PANEL_TYPES.LIST}
|
||||
fillSpans={undefined}
|
||||
yAxisUnit={undefined}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</DashboardProvider>
|
||||
</I18nextProvider>,
|
||||
);
|
||||
|
||||
@ -1,7 +1,4 @@
|
||||
import getFromLocalstorage from 'api/browser/localstorage/get';
|
||||
import setToLocalstorage from 'api/browser/localstorage/set';
|
||||
import { getAggregateKeys } from 'api/queryBuilder/getAttributeKeys';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
@ -11,6 +8,7 @@ import {
|
||||
AllTraceFilterKeys,
|
||||
AllTraceFilterKeyValue,
|
||||
} from 'pages/TracesExplorer/Filter/filterUtils';
|
||||
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
@ -35,10 +33,10 @@ import {
|
||||
import { getOptionsFromKeys } from './utils';
|
||||
|
||||
interface UseOptionsMenuProps {
|
||||
storageKey?: string;
|
||||
dataSource: DataSource;
|
||||
aggregateOperator: string;
|
||||
initialOptions?: InitialOptions;
|
||||
storageKey: LOCALSTORAGE;
|
||||
}
|
||||
|
||||
interface UseOptionsMenu {
|
||||
@ -48,22 +46,21 @@ interface UseOptionsMenu {
|
||||
}
|
||||
|
||||
const useOptionsMenu = ({
|
||||
storageKey,
|
||||
dataSource,
|
||||
aggregateOperator,
|
||||
initialOptions = {},
|
||||
}: UseOptionsMenuProps): UseOptionsMenu => {
|
||||
const { notifications } = useNotifications();
|
||||
const {
|
||||
preferences,
|
||||
updateColumns,
|
||||
updateFormatting,
|
||||
} = usePreferenceContext();
|
||||
|
||||
const [searchText, setSearchText] = useState<string>('');
|
||||
const [isFocused, setIsFocused] = useState<boolean>(false);
|
||||
const debouncedSearchText = useDebounce(searchText, 300);
|
||||
|
||||
const localStorageOptionsQuery = useMemo(
|
||||
() => getFromLocalstorage(storageKey),
|
||||
[storageKey],
|
||||
);
|
||||
|
||||
const initialQueryParams = useMemo(
|
||||
() => ({
|
||||
searchText: '',
|
||||
@ -77,7 +74,6 @@ const useOptionsMenu = ({
|
||||
|
||||
const {
|
||||
query: optionsQuery,
|
||||
queryData: optionsQueryData,
|
||||
redirectWithQuery: redirectWithOptionsData,
|
||||
} = useUrlQueryData<OptionsQuery>(URL_OPTIONS, defaultOptionsQuery);
|
||||
|
||||
@ -105,7 +101,9 @@ const useOptionsMenu = ({
|
||||
);
|
||||
|
||||
const initialSelectedColumns = useMemo(() => {
|
||||
if (!isFetchedInitialAttributes) return [];
|
||||
if (!isFetchedInitialAttributes) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const attributesData = initialAttributesResult?.reduce(
|
||||
(acc, attributeResponse) => {
|
||||
@ -142,14 +140,12 @@ const useOptionsMenu = ({
|
||||
})
|
||||
.filter(Boolean) as BaseAutocompleteData[];
|
||||
|
||||
// this is the last point where we can set the default columns and if uptil now also we have an empty array then we will set the default columns
|
||||
if (!initialSelected || !initialSelected?.length) {
|
||||
initialSelected = defaultTraceSelectedColumns;
|
||||
}
|
||||
}
|
||||
|
||||
return initialSelected || [];
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
isFetchedInitialAttributes,
|
||||
initialOptions?.selectColumns,
|
||||
@ -171,7 +167,6 @@ const useOptionsMenu = ({
|
||||
const searchedAttributeKeys = useMemo(() => {
|
||||
if (searchedAttributesData?.payload?.attributeKeys?.length) {
|
||||
if (dataSource === DataSource.LOGS) {
|
||||
// add timestamp and body to the list of attributes
|
||||
return [
|
||||
...defaultLogsSelectedColumns,
|
||||
...searchedAttributesData.payload.attributeKeys.filter(
|
||||
@ -188,32 +183,35 @@ const useOptionsMenu = ({
|
||||
return [];
|
||||
}, [dataSource, searchedAttributesData?.payload?.attributeKeys]);
|
||||
|
||||
const initialOptionsQuery: OptionsQuery = useMemo(
|
||||
() => ({
|
||||
const initialOptionsQuery: OptionsQuery = useMemo(() => {
|
||||
let defaultColumns = defaultOptionsQuery.selectColumns;
|
||||
if (dataSource === DataSource.TRACES) {
|
||||
defaultColumns = defaultTraceSelectedColumns;
|
||||
} else if (dataSource === DataSource.LOGS) {
|
||||
defaultColumns = defaultLogsSelectedColumns;
|
||||
}
|
||||
|
||||
const finalSelectColumns = initialOptions?.selectColumns
|
||||
? initialSelectedColumns
|
||||
: defaultColumns;
|
||||
|
||||
return {
|
||||
...defaultOptionsQuery,
|
||||
...initialOptions,
|
||||
// eslint-disable-next-line no-nested-ternary
|
||||
selectColumns: initialOptions?.selectColumns
|
||||
? initialSelectedColumns
|
||||
: dataSource === DataSource.TRACES
|
||||
? defaultTraceSelectedColumns
|
||||
: defaultOptionsQuery.selectColumns,
|
||||
}),
|
||||
[dataSource, initialOptions, initialSelectedColumns],
|
||||
);
|
||||
selectColumns: finalSelectColumns,
|
||||
};
|
||||
}, [dataSource, initialOptions, initialSelectedColumns]);
|
||||
|
||||
const selectedColumnKeys = useMemo(
|
||||
() => optionsQueryData?.selectColumns?.map(({ id }) => id) || [],
|
||||
[optionsQueryData],
|
||||
() => preferences?.columns?.map(({ id }) => id) || [],
|
||||
[preferences?.columns],
|
||||
);
|
||||
|
||||
const optionsFromAttributeKeys = useMemo(() => {
|
||||
const filteredAttributeKeys = searchedAttributeKeys.filter((item) => {
|
||||
// For other data sources, only filter out 'body' if it exists
|
||||
if (dataSource !== DataSource.LOGS) {
|
||||
return item.key !== 'body';
|
||||
}
|
||||
// For LOGS, keep all keys
|
||||
return true;
|
||||
});
|
||||
|
||||
@ -223,10 +221,8 @@ const useOptionsMenu = ({
|
||||
const handleRedirectWithOptionsData = useCallback(
|
||||
(newQueryData: OptionsQuery) => {
|
||||
redirectWithOptionsData(newQueryData);
|
||||
|
||||
setToLocalstorage(storageKey, JSON.stringify(newQueryData));
|
||||
},
|
||||
[storageKey, redirectWithOptionsData],
|
||||
[redirectWithOptionsData],
|
||||
);
|
||||
|
||||
const handleSelectColumns = useCallback(
|
||||
@ -235,7 +231,7 @@ const useOptionsMenu = ({
|
||||
const newSelectedColumns = newSelectedColumnKeys.reduce((acc, key) => {
|
||||
const column = [
|
||||
...searchedAttributeKeys,
|
||||
...optionsQueryData.selectColumns,
|
||||
...(preferences?.columns || []),
|
||||
].find(({ id }) => id === key);
|
||||
|
||||
if (!column) return acc;
|
||||
@ -243,75 +239,116 @@ const useOptionsMenu = ({
|
||||
}, [] as BaseAutocompleteData[]);
|
||||
|
||||
const optionsData: OptionsQuery = {
|
||||
...optionsQueryData,
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: newSelectedColumns,
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
};
|
||||
|
||||
updateColumns(newSelectedColumns);
|
||||
handleRedirectWithOptionsData(optionsData);
|
||||
},
|
||||
[
|
||||
searchedAttributeKeys,
|
||||
selectedColumnKeys,
|
||||
optionsQueryData,
|
||||
preferences,
|
||||
handleRedirectWithOptionsData,
|
||||
updateColumns,
|
||||
],
|
||||
);
|
||||
|
||||
const handleRemoveSelectedColumn = useCallback(
|
||||
(columnKey: string) => {
|
||||
const newSelectedColumns = optionsQueryData?.selectColumns?.filter(
|
||||
const newSelectedColumns = preferences?.columns?.filter(
|
||||
({ id }) => id !== columnKey,
|
||||
);
|
||||
|
||||
if (!newSelectedColumns.length && dataSource !== DataSource.LOGS) {
|
||||
if (!newSelectedColumns?.length && dataSource !== DataSource.LOGS) {
|
||||
notifications.error({
|
||||
message: 'There must be at least one selected column',
|
||||
});
|
||||
} else {
|
||||
const optionsData: OptionsQuery = {
|
||||
...optionsQueryData,
|
||||
selectColumns: newSelectedColumns,
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: newSelectedColumns || [],
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
maxLines:
|
||||
preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
fontSize:
|
||||
preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
};
|
||||
|
||||
updateColumns(newSelectedColumns || []);
|
||||
handleRedirectWithOptionsData(optionsData);
|
||||
}
|
||||
},
|
||||
[dataSource, notifications, optionsQueryData, handleRedirectWithOptionsData],
|
||||
[
|
||||
dataSource,
|
||||
notifications,
|
||||
preferences,
|
||||
handleRedirectWithOptionsData,
|
||||
updateColumns,
|
||||
],
|
||||
);
|
||||
|
||||
const handleFormatChange = useCallback(
|
||||
(value: LogViewMode) => {
|
||||
const optionsData: OptionsQuery = {
|
||||
...optionsQueryData,
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: preferences?.columns || [],
|
||||
format: value,
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
};
|
||||
|
||||
updateFormatting({
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
format: value,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
});
|
||||
handleRedirectWithOptionsData(optionsData);
|
||||
},
|
||||
[handleRedirectWithOptionsData, optionsQueryData],
|
||||
[handleRedirectWithOptionsData, preferences, updateFormatting],
|
||||
);
|
||||
|
||||
const handleMaxLinesChange = useCallback(
|
||||
(value: string | number | null) => {
|
||||
const optionsData: OptionsQuery = {
|
||||
...optionsQueryData,
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: preferences?.columns || [],
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
maxLines: value as number,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
};
|
||||
|
||||
updateFormatting({
|
||||
maxLines: value as number,
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
});
|
||||
handleRedirectWithOptionsData(optionsData);
|
||||
},
|
||||
[handleRedirectWithOptionsData, optionsQueryData],
|
||||
[handleRedirectWithOptionsData, preferences, updateFormatting],
|
||||
);
|
||||
|
||||
const handleFontSizeChange = useCallback(
|
||||
(value: FontSize) => {
|
||||
const optionsData: OptionsQuery = {
|
||||
...optionsQueryData,
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: preferences?.columns || [],
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
fontSize: value,
|
||||
};
|
||||
|
||||
updateFormatting({
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
fontSize: value,
|
||||
});
|
||||
handleRedirectWithOptionsData(optionsData);
|
||||
},
|
||||
[handleRedirectWithOptionsData, optionsQueryData],
|
||||
[handleRedirectWithOptionsData, preferences, updateFormatting],
|
||||
);
|
||||
|
||||
const handleSearchAttribute = useCallback((value: string) => {
|
||||
@ -331,7 +368,7 @@ const useOptionsMenu = ({
|
||||
() => ({
|
||||
addColumn: {
|
||||
isFetching: isSearchedAttributesFetching,
|
||||
value: optionsQueryData?.selectColumns || defaultOptionsQuery.selectColumns,
|
||||
value: preferences?.columns || defaultOptionsQuery.selectColumns,
|
||||
options: optionsFromAttributeKeys || [],
|
||||
onFocus: handleFocus,
|
||||
onBlur: handleBlur,
|
||||
@ -340,24 +377,21 @@ const useOptionsMenu = ({
|
||||
onSearch: handleSearchAttribute,
|
||||
},
|
||||
format: {
|
||||
value: optionsQueryData.format || defaultOptionsQuery.format,
|
||||
value: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
onChange: handleFormatChange,
|
||||
},
|
||||
maxLines: {
|
||||
value: optionsQueryData.maxLines || defaultOptionsQuery.maxLines,
|
||||
value: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
onChange: handleMaxLinesChange,
|
||||
},
|
||||
fontSize: {
|
||||
value: optionsQueryData?.fontSize || defaultOptionsQuery.fontSize,
|
||||
value: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
onChange: handleFontSizeChange,
|
||||
},
|
||||
}),
|
||||
[
|
||||
isSearchedAttributesFetching,
|
||||
optionsQueryData?.selectColumns,
|
||||
optionsQueryData.format,
|
||||
optionsQueryData.maxLines,
|
||||
optionsQueryData?.fontSize,
|
||||
preferences,
|
||||
optionsFromAttributeKeys,
|
||||
handleSelectColumns,
|
||||
handleRemoveSelectedColumn,
|
||||
@ -369,23 +403,25 @@ const useOptionsMenu = ({
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (optionsQuery || !isFetchedInitialAttributes) return;
|
||||
if (optionsQuery || !isFetchedInitialAttributes) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextOptionsQuery = localStorageOptionsQuery
|
||||
? JSON.parse(localStorageOptionsQuery)
|
||||
: initialOptionsQuery;
|
||||
|
||||
redirectWithOptionsData(nextOptionsQuery);
|
||||
redirectWithOptionsData(initialOptionsQuery);
|
||||
}, [
|
||||
isFetchedInitialAttributes,
|
||||
optionsQuery,
|
||||
initialOptionsQuery,
|
||||
localStorageOptionsQuery,
|
||||
redirectWithOptionsData,
|
||||
]);
|
||||
|
||||
return {
|
||||
options: optionsQueryData,
|
||||
options: {
|
||||
selectColumns: preferences?.columns || [],
|
||||
format: preferences?.formatting?.format || defaultOptionsQuery.format,
|
||||
maxLines: preferences?.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
fontSize: preferences?.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
},
|
||||
config: optionsMenuConfig,
|
||||
handleOptionsChange: handleRedirectWithOptionsData,
|
||||
};
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { screen } from '@testing-library/react';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { findByText, fireEvent, render, waitFor } from 'tests/test-utils';
|
||||
|
||||
import { pipelineApiResponseMockData } from '../mocks/pipeline';
|
||||
@ -19,6 +20,18 @@ jest.mock('uplot', () => {
|
||||
};
|
||||
});
|
||||
|
||||
// Mock useUrlQuery hook
|
||||
const mockUrlQuery = {
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
toString: jest.fn(() => ''),
|
||||
};
|
||||
|
||||
jest.mock('hooks/useUrlQuery', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => mockUrlQuery),
|
||||
}));
|
||||
|
||||
const samplePipelinePreviewResponse = {
|
||||
isLoading: false,
|
||||
logs: [
|
||||
@ -57,17 +70,38 @@ jest.mock(
|
||||
}),
|
||||
);
|
||||
|
||||
// Mock usePreferenceSync
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('PipelinePage container test', () => {
|
||||
it('should render PipelineListsView section', () => {
|
||||
const { getByText, container } = render(
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="viewing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>,
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="viewing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// table headers assertions
|
||||
@ -91,14 +125,16 @@ describe('PipelinePage container test', () => {
|
||||
|
||||
it('should render expanded content and edit mode correctly', async () => {
|
||||
const { getByText } = render(
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>,
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// content assertion
|
||||
@ -122,14 +158,16 @@ describe('PipelinePage container test', () => {
|
||||
|
||||
it('should be able to perform actions and edit on expanded view content', async () => {
|
||||
render(
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>,
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// content assertion
|
||||
@ -180,14 +218,16 @@ describe('PipelinePage container test', () => {
|
||||
|
||||
it('should be able to toggle and delete pipeline', async () => {
|
||||
const { getByText } = render(
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>,
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType=""
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
const addNewPipelineBtn = getByText('add_new_pipeline');
|
||||
@ -247,14 +287,16 @@ describe('PipelinePage container test', () => {
|
||||
|
||||
it('should have populated form fields when edit pipeline is clicked', async () => {
|
||||
render(
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType="edit-pipeline"
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>,
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
setActionType={jest.fn()}
|
||||
isActionMode="editing-mode"
|
||||
setActionMode={jest.fn()}
|
||||
pipelineData={pipelineApiResponseMockData}
|
||||
isActionType="edit-pipeline"
|
||||
refetchPipelineLists={jest.fn()}
|
||||
/>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// content assertion
|
||||
|
||||
@ -324,7 +324,7 @@ export const Query = memo(function Query({
|
||||
]);
|
||||
|
||||
const disableOperatorSelector =
|
||||
!query?.aggregateAttribute.key || query?.aggregateAttribute.key === '';
|
||||
!query?.aggregateAttribute?.key || query?.aggregateAttribute?.key === '';
|
||||
|
||||
const isVersionV4 = version && version === ENTITY_VERSION_V4;
|
||||
|
||||
|
||||
@ -1037,7 +1037,9 @@ function QueryBuilderSearchV2(
|
||||
);
|
||||
})}
|
||||
</Select>
|
||||
{!hideSpanScopeSelector && <SpanScopeSelector queryName={query.queryName} />}
|
||||
{!hideSpanScopeSelector && (
|
||||
<SpanScopeSelector query={query} onChange={onChange} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -2,7 +2,11 @@ import { Select } from 'antd';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
enum SpanScope {
|
||||
@ -17,7 +21,8 @@ interface SpanFilterConfig {
|
||||
}
|
||||
|
||||
interface SpanScopeSelectorProps {
|
||||
queryName: string;
|
||||
onChange?: (value: TagFilter) => void;
|
||||
query?: IBuilderQuery;
|
||||
}
|
||||
|
||||
const SPAN_FILTER_CONFIG: Record<SpanScope, SpanFilterConfig | null> = {
|
||||
@ -50,7 +55,10 @@ const SELECT_OPTIONS = [
|
||||
{ value: SpanScope.ENTRYPOINT_SPANS, label: 'Entrypoint Spans' },
|
||||
];
|
||||
|
||||
function SpanScopeSelector({ queryName }: SpanScopeSelectorProps): JSX.Element {
|
||||
function SpanScopeSelector({
|
||||
onChange,
|
||||
query,
|
||||
}: SpanScopeSelectorProps): JSX.Element {
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const [selectedScope, setSelectedScope] = useState<SpanScope>(
|
||||
SpanScope.ALL_SPANS,
|
||||
@ -60,7 +68,7 @@ function SpanScopeSelector({ queryName }: SpanScopeSelectorProps): JSX.Element {
|
||||
filters: TagFilterItem[] = [],
|
||||
): SpanScope => {
|
||||
const hasFilter = (key: string): boolean =>
|
||||
filters.some(
|
||||
filters?.some(
|
||||
(filter) =>
|
||||
filter.key?.type === 'spanSearchScope' &&
|
||||
filter.key.key === key &&
|
||||
@ -71,15 +79,19 @@ function SpanScopeSelector({ queryName }: SpanScopeSelectorProps): JSX.Element {
|
||||
if (hasFilter('isEntryPoint')) return SpanScope.ENTRYPOINT_SPANS;
|
||||
return SpanScope.ALL_SPANS;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const queryData = (currentQuery?.builder?.queryData || [])?.find(
|
||||
(item) => item.queryName === queryName,
|
||||
let queryData = (currentQuery?.builder?.queryData || [])?.find(
|
||||
(item) => item.queryName === query?.queryName,
|
||||
);
|
||||
|
||||
if (onChange && query) {
|
||||
queryData = query;
|
||||
}
|
||||
|
||||
const filters = queryData?.filters?.items;
|
||||
const currentScope = getCurrentScopeFromFilters(filters);
|
||||
setSelectedScope(currentScope);
|
||||
}, [currentQuery, queryName]);
|
||||
}, [currentQuery, onChange, query]);
|
||||
|
||||
const handleScopeChange = (newScope: SpanScope): void => {
|
||||
const newQuery = cloneDeep(currentQuery);
|
||||
@ -108,14 +120,28 @@ function SpanScopeSelector({ queryName }: SpanScopeSelectorProps): JSX.Element {
|
||||
...item,
|
||||
filters: {
|
||||
...item.filters,
|
||||
items: getUpdatedFilters(item.filters?.items, item.queryName === queryName),
|
||||
items: getUpdatedFilters(
|
||||
item.filters?.items,
|
||||
item.queryName === query?.queryName,
|
||||
),
|
||||
},
|
||||
}));
|
||||
|
||||
redirectWithQueryBuilderData(newQuery);
|
||||
if (onChange && query) {
|
||||
onChange({
|
||||
...query.filters,
|
||||
items: getUpdatedFilters(
|
||||
[...query.filters.items, ...newQuery.builder.queryData[0].filters.items],
|
||||
true,
|
||||
),
|
||||
});
|
||||
|
||||
setSelectedScope(newScope);
|
||||
} else {
|
||||
redirectWithQueryBuilderData(newQuery);
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
return (
|
||||
<Select
|
||||
value={selectedScope}
|
||||
@ -127,4 +153,9 @@ function SpanScopeSelector({ queryName }: SpanScopeSelectorProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
SpanScopeSelector.defaultProps = {
|
||||
onChange: undefined,
|
||||
query: undefined,
|
||||
};
|
||||
|
||||
export default SpanScopeSelector;
|
||||
|
||||
@ -6,7 +6,12 @@ import {
|
||||
} from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
Query,
|
||||
TagFilter,
|
||||
TagFilterItem,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import SpanScopeSelector from '../SpanScopeSelector';
|
||||
|
||||
@ -23,6 +28,13 @@ const createSpanScopeFilter = (key: string): TagFilterItem => ({
|
||||
value: 'true',
|
||||
});
|
||||
|
||||
const createNonScopeFilter = (key: string, value: string): TagFilterItem => ({
|
||||
id: `non-scope-${key}`,
|
||||
key: { key, isColumn: false, type: 'tag' },
|
||||
op: '=',
|
||||
value,
|
||||
});
|
||||
|
||||
const defaultQuery = {
|
||||
...initialQueriesMap.traces,
|
||||
builder: {
|
||||
@ -36,6 +48,12 @@ const defaultQuery = {
|
||||
},
|
||||
};
|
||||
|
||||
const defaultQueryBuilderQuery: IBuilderQuery = {
|
||||
...initialQueriesMap.traces.builder.queryData[0],
|
||||
queryName: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
};
|
||||
|
||||
// Helper to create query with filters
|
||||
const createQueryWithFilters = (filters: TagFilterItem[]): Query => ({
|
||||
...defaultQuery,
|
||||
@ -44,6 +62,7 @@ const createQueryWithFilters = (filters: TagFilterItem[]): Query => ({
|
||||
queryData: [
|
||||
{
|
||||
...defaultQuery.builder.queryData[0],
|
||||
queryName: 'A',
|
||||
filters: {
|
||||
items: filters,
|
||||
op: 'AND',
|
||||
@ -54,8 +73,9 @@ const createQueryWithFilters = (filters: TagFilterItem[]): Query => ({
|
||||
});
|
||||
|
||||
const renderWithContext = (
|
||||
queryName = 'A',
|
||||
initialQuery = defaultQuery,
|
||||
onChangeProp?: (value: TagFilter) => void,
|
||||
queryProp?: IBuilderQuery,
|
||||
): RenderResult =>
|
||||
render(
|
||||
<QueryBuilderContext.Provider
|
||||
@ -67,10 +87,24 @@ const renderWithContext = (
|
||||
} as any
|
||||
}
|
||||
>
|
||||
<SpanScopeSelector queryName={queryName} />
|
||||
<SpanScopeSelector onChange={onChangeProp} query={queryProp} />
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
const selectOption = async (optionText: string): Promise<void> => {
|
||||
const selector = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selector);
|
||||
|
||||
// Wait for dropdown to appear
|
||||
await screen.findByRole('listbox');
|
||||
|
||||
// Find the option by its content text and click it
|
||||
const option = await screen.findByText(optionText, {
|
||||
selector: '.ant-select-item-option-content',
|
||||
});
|
||||
fireEvent.click(option);
|
||||
};
|
||||
|
||||
describe('SpanScopeSelector', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
@ -82,13 +116,6 @@ describe('SpanScopeSelector', () => {
|
||||
});
|
||||
|
||||
describe('when selecting different options', () => {
|
||||
const selectOption = (optionText: string): void => {
|
||||
const selector = screen.getByRole('combobox');
|
||||
fireEvent.mouseDown(selector);
|
||||
const option = screen.getByText(optionText);
|
||||
fireEvent.click(option);
|
||||
};
|
||||
|
||||
const assertFilterAdded = (
|
||||
updatedQuery: Query,
|
||||
expectedKey: string,
|
||||
@ -106,13 +133,13 @@ describe('SpanScopeSelector', () => {
|
||||
);
|
||||
};
|
||||
|
||||
it('should remove span scope filters when selecting ALL_SPANS', () => {
|
||||
it('should remove span scope filters when selecting ALL_SPANS', async () => {
|
||||
const queryWithSpanScope = createQueryWithFilters([
|
||||
createSpanScopeFilter('isRoot'),
|
||||
]);
|
||||
renderWithContext('A', queryWithSpanScope);
|
||||
renderWithContext(queryWithSpanScope, undefined, defaultQueryBuilderQuery);
|
||||
|
||||
selectOption('All Spans');
|
||||
await selectOption('All Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
|
||||
const updatedQuery = mockRedirectWithQueryBuilderData.mock.calls[0][0];
|
||||
@ -125,7 +152,8 @@ describe('SpanScopeSelector', () => {
|
||||
});
|
||||
|
||||
it('should add isRoot filter when selecting ROOT_SPANS', async () => {
|
||||
renderWithContext();
|
||||
renderWithContext(defaultQuery, undefined, defaultQueryBuilderQuery);
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
await selectOption('Root Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
|
||||
@ -135,9 +163,10 @@ describe('SpanScopeSelector', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should add isEntryPoint filter when selecting ENTRYPOINT_SPANS', () => {
|
||||
renderWithContext();
|
||||
selectOption('Entrypoint Spans');
|
||||
it('should add isEntryPoint filter when selecting ENTRYPOINT_SPANS', async () => {
|
||||
renderWithContext(defaultQuery, undefined, defaultQueryBuilderQuery);
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
await selectOption('Entrypoint Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).toHaveBeenCalled();
|
||||
assertFilterAdded(
|
||||
@ -157,9 +186,180 @@ describe('SpanScopeSelector', () => {
|
||||
const queryWithFilter = createQueryWithFilters([
|
||||
createSpanScopeFilter(filterKey),
|
||||
]);
|
||||
renderWithContext('A', queryWithFilter);
|
||||
renderWithContext(queryWithFilter, undefined, defaultQueryBuilderQuery);
|
||||
expect(await screen.findByText(expectedText)).toBeInTheDocument();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
describe('when onChange and query props are provided', () => {
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
const createLocalQuery = (
|
||||
filterItems: TagFilterItem[] = [],
|
||||
op: 'AND' | 'OR' = 'AND',
|
||||
): IBuilderQuery => ({
|
||||
...defaultQueryBuilderQuery,
|
||||
filters: { items: filterItems, op },
|
||||
});
|
||||
|
||||
const assertOnChangePayload = (
|
||||
callNumber: number, // To handle multiple calls if needed, usually 0 for single interaction
|
||||
expectedScopeKey: string | null,
|
||||
expectedNonScopeItems: TagFilterItem[] = [],
|
||||
): void => {
|
||||
expect(mockOnChange).toHaveBeenCalled();
|
||||
const onChangeArg = mockOnChange.mock.calls[callNumber][0] as TagFilter;
|
||||
const { items } = onChangeArg;
|
||||
|
||||
// Check for preservation of specific non-scope items
|
||||
expectedNonScopeItems.forEach((nonScopeItem) => {
|
||||
expect(items).toContainEqual(nonScopeItem);
|
||||
});
|
||||
|
||||
const scopeFiltersInPayload = items.filter(
|
||||
(filter) => filter.key?.type === 'spanSearchScope',
|
||||
);
|
||||
|
||||
if (expectedScopeKey) {
|
||||
expect(scopeFiltersInPayload.length).toBe(1);
|
||||
expect(scopeFiltersInPayload[0].key?.key).toBe(expectedScopeKey);
|
||||
expect(scopeFiltersInPayload[0].value).toBe('true');
|
||||
expect(scopeFiltersInPayload[0].op).toBe('=');
|
||||
} else {
|
||||
expect(scopeFiltersInPayload.length).toBe(0);
|
||||
}
|
||||
|
||||
const expectedTotalFilters =
|
||||
expectedNonScopeItems.length + (expectedScopeKey ? 1 : 0);
|
||||
expect(items.length).toBe(expectedTotalFilters);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockOnChange.mockClear();
|
||||
mockRedirectWithQueryBuilderData.mockClear();
|
||||
});
|
||||
|
||||
it('should initialize with ALL_SPANS if query prop has no scope filters', async () => {
|
||||
const localQuery = createLocalQuery();
|
||||
renderWithContext(defaultQuery, mockOnChange, localQuery);
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should initialize with ROOT_SPANS if query prop has isRoot filter', async () => {
|
||||
const localQuery = createLocalQuery([createSpanScopeFilter('isRoot')]);
|
||||
renderWithContext(defaultQuery, mockOnChange, localQuery);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should initialize with ENTRYPOINT_SPANS if query prop has isEntryPoint filter', async () => {
|
||||
const localQuery = createLocalQuery([createSpanScopeFilter('isEntryPoint')]);
|
||||
renderWithContext(defaultQuery, mockOnChange, localQuery);
|
||||
expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call onChange and not redirect when selecting ROOT_SPANS (from ALL_SPANS)', async () => {
|
||||
const localQuery = createLocalQuery(); // Initially All Spans
|
||||
const { container } = renderWithContext(
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
);
|
||||
expect(await screen.findByText('All Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('Root Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).not.toHaveBeenCalled();
|
||||
assertOnChangePayload(0, 'isRoot', []);
|
||||
expect(
|
||||
container.querySelector('span[title="Root Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call onChange with removed scope when selecting ALL_SPANS (from ROOT_SPANS)', async () => {
|
||||
const initialRootFilter = createSpanScopeFilter('isRoot');
|
||||
const localQuery = createLocalQuery([initialRootFilter]);
|
||||
const { container } = renderWithContext(
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('All Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).not.toHaveBeenCalled();
|
||||
assertOnChangePayload(0, null, []);
|
||||
|
||||
expect(
|
||||
container.querySelector('span[title="All Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call onChange, replacing isRoot with isEntryPoint', async () => {
|
||||
const initialRootFilter = createSpanScopeFilter('isRoot');
|
||||
const localQuery = createLocalQuery([initialRootFilter]);
|
||||
const { container } = renderWithContext(
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('Entrypoint Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).not.toHaveBeenCalled();
|
||||
assertOnChangePayload(0, 'isEntryPoint', []);
|
||||
expect(
|
||||
container.querySelector('span[title="Entrypoint Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should preserve non-scope filters from query prop when changing scope', async () => {
|
||||
const nonScopeItem = createNonScopeFilter('customTag', 'customValue');
|
||||
const initialRootFilter = createSpanScopeFilter('isRoot');
|
||||
const localQuery = createLocalQuery([nonScopeItem, initialRootFilter], 'OR');
|
||||
|
||||
const { container } = renderWithContext(
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
);
|
||||
expect(await screen.findByText('Root Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('Entrypoint Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).not.toHaveBeenCalled();
|
||||
assertOnChangePayload(0, 'isEntryPoint', [nonScopeItem]);
|
||||
expect(
|
||||
container.querySelector('span[title="Entrypoint Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should preserve non-scope filters when changing to ALL_SPANS', async () => {
|
||||
const nonScopeItem1 = createNonScopeFilter('service', 'checkout');
|
||||
const nonScopeItem2 = createNonScopeFilter('version', 'v1');
|
||||
const initialEntryFilter = createSpanScopeFilter('isEntryPoint');
|
||||
const localQuery = createLocalQuery([
|
||||
nonScopeItem1,
|
||||
initialEntryFilter,
|
||||
nonScopeItem2,
|
||||
]);
|
||||
|
||||
const { container } = renderWithContext(
|
||||
defaultQuery,
|
||||
mockOnChange,
|
||||
localQuery,
|
||||
);
|
||||
expect(await screen.findByText('Entrypoint Spans')).toBeInTheDocument();
|
||||
|
||||
await selectOption('All Spans');
|
||||
|
||||
expect(mockRedirectWithQueryBuilderData).not.toHaveBeenCalled();
|
||||
assertOnChangePayload(0, null, [nonScopeItem1, nonScopeItem2]);
|
||||
expect(
|
||||
container.querySelector('span[title="All Spans"]'),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -30,14 +30,15 @@ export const getChartData = (
|
||||
};
|
||||
const chartLabels: ChartData<'line'>['labels'] = [];
|
||||
|
||||
Object.keys(allDataPoints ?? {}).forEach((timestamp) => {
|
||||
const key = allDataPoints[timestamp];
|
||||
if (key.value) {
|
||||
chartDataset.data.push(key.value);
|
||||
const date = dayjs(key.timestamp / 1000000);
|
||||
chartLabels.push(date.toDate().getTime());
|
||||
}
|
||||
});
|
||||
if (allDataPoints && typeof allDataPoints === 'object')
|
||||
Object.keys(allDataPoints).forEach((timestamp) => {
|
||||
const key = allDataPoints[timestamp];
|
||||
if (key.value) {
|
||||
chartDataset.data.push(key.value);
|
||||
const date = dayjs(key.timestamp / 1000000);
|
||||
chartLabels.push(date.toDate().getTime());
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
datasets: [
|
||||
|
||||
@ -136,8 +136,12 @@ function Filters({
|
||||
return (
|
||||
<div className="filter-row">
|
||||
<QueryBuilderSearchV2
|
||||
query={BASE_FILTER_QUERY}
|
||||
query={{
|
||||
...BASE_FILTER_QUERY,
|
||||
filters,
|
||||
}}
|
||||
onChange={handleFilterChange}
|
||||
hideSpanScopeSelector={false}
|
||||
/>
|
||||
{filteredSpanIds.length > 0 && (
|
||||
<div className="pre-next-toggle">
|
||||
|
||||
@ -4,6 +4,7 @@ import LiveLogsContainer from 'container/LiveLogs/LiveLogsContainer';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { EventSourceProvider } from 'providers/EventSource';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useEffect } from 'react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@ -17,7 +18,9 @@ function LiveLogs(): JSX.Element {
|
||||
|
||||
return (
|
||||
<EventSourceProvider>
|
||||
<LiveLogsContainer />
|
||||
<PreferenceContextProvider>
|
||||
<LiveLogsContainer />
|
||||
</PreferenceContextProvider>
|
||||
</EventSourceProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ import { noop } from 'lodash-es';
|
||||
import { logsQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
// https://virtuoso.dev/mocking-in-tests/
|
||||
import { VirtuosoMockContext } from 'react-virtuoso';
|
||||
@ -73,6 +74,25 @@ jest.mock('hooks/useSafeNavigate', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock usePreferenceSync
|
||||
jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: (): any => ({
|
||||
preferences: {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
},
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
const logsQueryServerRequest = (): void =>
|
||||
server.use(
|
||||
rest.post(queryRangeURL, (req, res, ctx) =>
|
||||
@ -88,7 +108,11 @@ describe('Logs Explorer Tests', () => {
|
||||
queryByText,
|
||||
getByTestId,
|
||||
queryByTestId,
|
||||
} = render(<LogsExplorer />);
|
||||
} = render(
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorer />
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// check the presence of frequency chart content
|
||||
expect(getByText(frequencyChartContent)).toBeInTheDocument();
|
||||
@ -124,11 +148,13 @@ describe('Logs Explorer Tests', () => {
|
||||
// mocking the query range API to return the logs
|
||||
logsQueryServerRequest();
|
||||
const { queryByText, queryByTestId } = render(
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorer />
|
||||
</VirtuosoMockContext.Provider>,
|
||||
<PreferenceContextProvider>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorer />
|
||||
</VirtuosoMockContext.Provider>
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// check for loading state to be not present
|
||||
@ -192,11 +218,13 @@ describe('Logs Explorer Tests', () => {
|
||||
isStagedQueryUpdated: (): boolean => false,
|
||||
}}
|
||||
>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorer />
|
||||
</VirtuosoMockContext.Provider>
|
||||
<PreferenceContextProvider>
|
||||
<VirtuosoMockContext.Provider
|
||||
value={{ viewportHeight: 300, itemHeight: 100 }}
|
||||
>
|
||||
<LogsExplorer />
|
||||
</VirtuosoMockContext.Provider>
|
||||
</PreferenceContextProvider>
|
||||
</QueryBuilderContext.Provider>,
|
||||
);
|
||||
|
||||
@ -213,7 +241,11 @@ describe('Logs Explorer Tests', () => {
|
||||
});
|
||||
|
||||
test('frequency chart visibility and switch toggle', async () => {
|
||||
const { getByRole, queryByText } = render(<LogsExplorer />);
|
||||
const { getByRole, queryByText } = render(
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorer />
|
||||
</PreferenceContextProvider>,
|
||||
);
|
||||
|
||||
// check the presence of Frequency Chart
|
||||
expect(queryByText('Frequency chart')).toBeInTheDocument();
|
||||
|
||||
@ -23,6 +23,7 @@ import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { isEqual, isNull } from 'lodash-es';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { usePreferenceContext } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@ -35,6 +36,8 @@ function LogsExplorer(): JSX.Element {
|
||||
const [selectedView, setSelectedView] = useState<SELECTED_VIEWS>(
|
||||
SELECTED_VIEWS.SEARCH,
|
||||
);
|
||||
const { preferences, loading: preferencesLoading } = usePreferenceContext();
|
||||
|
||||
const [showFilters, setShowFilters] = useState<boolean>(() => {
|
||||
const localStorageValue = getLocalStorageKey(
|
||||
LOCALSTORAGE.SHOW_LOGS_QUICK_FILTERS,
|
||||
@ -83,7 +86,6 @@ function LogsExplorer(): JSX.Element {
|
||||
}, [currentQuery.builder.queryData, currentQuery.builder.queryData.length]);
|
||||
|
||||
const {
|
||||
queryData: optionsQueryData,
|
||||
redirectWithQuery: redirectWithOptionsData,
|
||||
} = useUrlQueryData<OptionsQuery>(URL_OPTIONS, defaultOptionsQuery);
|
||||
|
||||
@ -164,12 +166,34 @@ function LogsExplorer(): JSX.Element {
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const migratedQuery = migrateOptionsQuery(optionsQueryData);
|
||||
if (!preferences || preferencesLoading) {
|
||||
return;
|
||||
}
|
||||
const migratedQuery = migrateOptionsQuery({
|
||||
selectColumns: preferences.columns || defaultLogsSelectedColumns,
|
||||
maxLines: preferences.formatting?.maxLines || defaultOptionsQuery.maxLines,
|
||||
format: preferences.formatting?.format || defaultOptionsQuery.format,
|
||||
fontSize: preferences.formatting?.fontSize || defaultOptionsQuery.fontSize,
|
||||
version: preferences.formatting?.version,
|
||||
});
|
||||
// Only redirect if the query was actually modified
|
||||
if (!isEqual(migratedQuery, optionsQueryData)) {
|
||||
if (
|
||||
!isEqual(migratedQuery, {
|
||||
selectColumns: preferences?.columns,
|
||||
maxLines: preferences?.formatting?.maxLines,
|
||||
format: preferences?.formatting?.format,
|
||||
fontSize: preferences?.formatting?.fontSize,
|
||||
version: preferences?.formatting?.version,
|
||||
})
|
||||
) {
|
||||
redirectWithOptionsData(migratedQuery);
|
||||
}
|
||||
}, [migrateOptionsQuery, optionsQueryData, redirectWithOptionsData]);
|
||||
}, [
|
||||
migrateOptionsQuery,
|
||||
preferences,
|
||||
redirectWithOptionsData,
|
||||
preferencesLoading,
|
||||
]);
|
||||
|
||||
const isMultipleQueries = useMemo(
|
||||
() =>
|
||||
|
||||
@ -4,9 +4,14 @@ import { Compass, TowerControl, Workflow } from 'lucide-react';
|
||||
import LogsExplorer from 'pages/LogsExplorer';
|
||||
import Pipelines from 'pages/Pipelines';
|
||||
import SaveView from 'pages/SaveView';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
|
||||
export const logsExplorer: TabRoutes = {
|
||||
Component: LogsExplorer,
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<LogsExplorer />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Compass size={16} /> Explorer
|
||||
|
||||
@ -4,6 +4,7 @@ import ExplorerPage from 'container/MetricsExplorer/Explorer';
|
||||
import SummaryPage from 'container/MetricsExplorer/Summary';
|
||||
import { BarChart2, Compass, TowerControl } from 'lucide-react';
|
||||
import SaveView from 'pages/SaveView';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
|
||||
export const Summary: TabRoutes = {
|
||||
Component: SummaryPage,
|
||||
@ -17,7 +18,11 @@ export const Summary: TabRoutes = {
|
||||
};
|
||||
|
||||
export const Explorer: TabRoutes = {
|
||||
Component: ExplorerPage,
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<ExplorerPage />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Compass size={16} /> Explorer
|
||||
|
||||
@ -75,7 +75,7 @@ function TracesExplorer(): JSX.Element {
|
||||
|
||||
const isGroupByExist = useMemo(() => {
|
||||
const groupByCount: number = currentQuery.builder.queryData.reduce<number>(
|
||||
(acc, query) => acc + query.groupBy.length,
|
||||
(acc, query) => acc + (query?.groupBy?.length || 0),
|
||||
0,
|
||||
);
|
||||
|
||||
|
||||
@ -5,10 +5,15 @@ import SaveView from 'pages/SaveView';
|
||||
import TracesExplorer from 'pages/TracesExplorer';
|
||||
import TracesFunnelDetails from 'pages/TracesFunnelDetails';
|
||||
import TracesFunnels from 'pages/TracesFunnels';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { matchPath } from 'react-router-dom';
|
||||
|
||||
export const tracesExplorer: TabRoutes = {
|
||||
Component: TracesExplorer,
|
||||
Component: (): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<TracesExplorer />
|
||||
</PreferenceContextProvider>
|
||||
),
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<Compass size={16} /> Explorer
|
||||
|
||||
@ -0,0 +1,154 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import {
|
||||
FormattingOptions,
|
||||
PreferenceMode,
|
||||
Preferences,
|
||||
} from 'providers/preferences/types';
|
||||
import { MemoryRouter, Route, Switch } from 'react-router-dom';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import {
|
||||
PreferenceContextProvider,
|
||||
usePreferenceContext,
|
||||
} from '../context/PreferenceContextProvider';
|
||||
|
||||
// Mock the usePreferenceSync hook
|
||||
jest.mock('../sync/usePreferenceSync', () => ({
|
||||
usePreferenceSync: jest.fn().mockReturnValue({
|
||||
preferences: {
|
||||
columns: [] as BaseAutocompleteData[],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
} as FormattingOptions,
|
||||
} as Preferences,
|
||||
loading: false,
|
||||
error: null,
|
||||
updateColumns: jest.fn(),
|
||||
updateFormatting: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
// Test component that consumes the context
|
||||
function TestConsumer(): JSX.Element {
|
||||
const context = usePreferenceContext();
|
||||
return (
|
||||
<div>
|
||||
<div data-testid="mode">{context.mode}</div>
|
||||
<div data-testid="dataSource">{context.dataSource}</div>
|
||||
<div data-testid="loading">{String(context.loading)}</div>
|
||||
<div data-testid="error">{String(context.error)}</div>
|
||||
<div data-testid="savedViewId">{context.savedViewId || 'no-view-id'}</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
describe('PreferenceContextProvider', () => {
|
||||
it('should provide context with direct mode when no viewKey is present', () => {
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/logs']}>
|
||||
<Switch>
|
||||
<Route
|
||||
path="/logs"
|
||||
component={(): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<TestConsumer />
|
||||
</PreferenceContextProvider>
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
|
||||
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
|
||||
expect(screen.getByTestId('loading')).toHaveTextContent('false');
|
||||
expect(screen.getByTestId('error')).toHaveTextContent('null');
|
||||
expect(screen.getByTestId('savedViewId')).toHaveTextContent('no-view-id');
|
||||
});
|
||||
|
||||
it('should provide context with savedView mode when viewKey is present', () => {
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/logs?viewKey="test-view-id"']}>
|
||||
<Switch>
|
||||
<Route
|
||||
path="/logs"
|
||||
component={(): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<TestConsumer />
|
||||
</PreferenceContextProvider>
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('mode')).toHaveTextContent('savedView');
|
||||
expect(screen.getByTestId('dataSource')).toHaveTextContent('logs');
|
||||
expect(screen.getByTestId('savedViewId')).toHaveTextContent('test-view-id');
|
||||
});
|
||||
|
||||
it('should set traces dataSource when pathname includes traces', () => {
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/traces']}>
|
||||
<Switch>
|
||||
<Route
|
||||
path="/traces"
|
||||
component={(): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<TestConsumer />
|
||||
</PreferenceContextProvider>
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('dataSource')).toHaveTextContent('traces');
|
||||
});
|
||||
|
||||
it('should handle invalid viewKey JSON gracefully', () => {
|
||||
// Mock console.error to avoid test output clutter
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
render(
|
||||
<MemoryRouter initialEntries={['/logs?viewKey=invalid-json']}>
|
||||
<Switch>
|
||||
<Route
|
||||
path="/logs"
|
||||
component={(): JSX.Element => (
|
||||
<PreferenceContextProvider>
|
||||
<TestConsumer />
|
||||
</PreferenceContextProvider>
|
||||
)}
|
||||
/>
|
||||
</Switch>
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('mode')).toHaveTextContent(PreferenceMode.DIRECT);
|
||||
expect(console.error).toHaveBeenCalled();
|
||||
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
it('should throw error when usePreferenceContext is used outside provider', () => {
|
||||
// Suppress the error output for this test
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
expect(() => {
|
||||
render(<TestConsumer />);
|
||||
}).toThrow(
|
||||
'usePreferenceContext must be used within PreferenceContextProvider',
|
||||
);
|
||||
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,162 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { FormattingOptions } from 'providers/preferences/types';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import logsLoaderConfig from '../configs/logsLoaderConfig';
|
||||
|
||||
// Mock localStorage
|
||||
const mockLocalStorage: Record<string, string> = {};
|
||||
|
||||
jest.mock('api/browser/localstorage/get', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn((key: string) => mockLocalStorage[key] || null),
|
||||
}));
|
||||
|
||||
describe('logsLoaderConfig', () => {
|
||||
// Save original location object
|
||||
const originalWindowLocation = window.location;
|
||||
let mockedLocation: Partial<Location>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Setup a mocked location object
|
||||
mockedLocation = {
|
||||
...originalWindowLocation,
|
||||
search: '',
|
||||
};
|
||||
|
||||
// Mock the window.location property
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: mockedLocation,
|
||||
writable: true,
|
||||
});
|
||||
|
||||
// Clear mocked localStorage
|
||||
Object.keys(mockLocalStorage).forEach((key) => {
|
||||
delete mockLocalStorage[key];
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original location
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: originalWindowLocation,
|
||||
writable: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should have priority order: local, url, default', () => {
|
||||
expect(logsLoaderConfig.priority).toEqual(['local', 'url', 'default']);
|
||||
});
|
||||
|
||||
it('should load from localStorage when available', async () => {
|
||||
const mockColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'test-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Set up localStorage mock data with the correct key from LOCALSTORAGE enum
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: mockColumns,
|
||||
maxLines: 10,
|
||||
format: 'json',
|
||||
fontSize: 'large',
|
||||
version: 2,
|
||||
});
|
||||
|
||||
const result = await logsLoaderConfig.local();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: mockColumns,
|
||||
formatting: {
|
||||
maxLines: 10,
|
||||
format: 'json' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 2,
|
||||
} as FormattingOptions,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle invalid localStorage data gracefully', async () => {
|
||||
// Set up invalid localStorage mock data
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = 'invalid-json';
|
||||
|
||||
const result = await logsLoaderConfig.local();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: [] as BaseAutocompleteData[],
|
||||
formatting: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should load from URL when available', async () => {
|
||||
const mockColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'url-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Set up URL search params
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: mockColumns,
|
||||
maxLines: 5,
|
||||
format: 'raw',
|
||||
fontSize: 'medium',
|
||||
version: 1,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: mockColumns,
|
||||
formatting: {
|
||||
maxLines: 5,
|
||||
format: 'raw' as LogViewMode,
|
||||
fontSize: 'medium' as FontSize,
|
||||
version: 1,
|
||||
} as FormattingOptions,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle invalid URL data gracefully', async () => {
|
||||
// Set up invalid URL search params
|
||||
mockedLocation.search = '?options=invalid-json';
|
||||
|
||||
const result = await logsLoaderConfig.url();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: [] as BaseAutocompleteData[],
|
||||
formatting: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should provide default values when no other source is available', async () => {
|
||||
const result = await logsLoaderConfig.default();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: defaultLogsSelectedColumns as BaseAutocompleteData[],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table' as LogViewMode,
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
} as FormattingOptions,
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,261 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import {
|
||||
FormattingOptions,
|
||||
PreferenceMode,
|
||||
Preferences,
|
||||
} from 'providers/preferences/types';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import getLogsUpdaterConfig from '../configs/logsUpdaterConfig';
|
||||
|
||||
// Mock localStorage
|
||||
const mockLocalStorage: Record<string, string> = {};
|
||||
|
||||
jest.mock('api/browser/localstorage/set', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn((key: string, value: string) => {
|
||||
mockLocalStorage[key] = value;
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock localStorage.getItem
|
||||
Object.defineProperty(window, 'localStorage', {
|
||||
value: {
|
||||
getItem: jest.fn((key: string) => mockLocalStorage[key] || null),
|
||||
setItem: jest.fn((key: string, value: string) => {
|
||||
mockLocalStorage[key] = value;
|
||||
}),
|
||||
},
|
||||
writable: true,
|
||||
});
|
||||
|
||||
describe('logsUpdaterConfig', () => {
|
||||
// Mock redirectWithOptionsData and setSavedViewPreferences
|
||||
const redirectWithOptionsData = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
|
||||
const mockPreferences: Preferences = {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table' as LogViewMode,
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// Clear mocked localStorage
|
||||
Object.keys(mockLocalStorage).forEach((key) => {
|
||||
delete mockLocalStorage[key];
|
||||
});
|
||||
});
|
||||
|
||||
it('should update columns in localStorage for direct mode', () => {
|
||||
const logsUpdater = getLogsUpdaterConfig(
|
||||
mockPreferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
);
|
||||
|
||||
const newColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'new-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Set initial localStorage data
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: [
|
||||
{
|
||||
key: 'old-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
],
|
||||
maxLines: 2,
|
||||
});
|
||||
|
||||
logsUpdater.updateColumns(newColumns, PreferenceMode.DIRECT);
|
||||
|
||||
// Should update URL
|
||||
expect(redirectWithOptionsData).toHaveBeenCalledWith({
|
||||
...defaultOptionsQuery,
|
||||
...mockPreferences.formatting,
|
||||
selectColumns: newColumns,
|
||||
});
|
||||
|
||||
// Should update localStorage
|
||||
const storedData = JSON.parse(
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
|
||||
);
|
||||
expect(storedData.selectColumns).toEqual(newColumns);
|
||||
expect(storedData.maxLines).toBe(2); // Should preserve other fields
|
||||
|
||||
// Should not update saved view preferences
|
||||
expect(setSavedViewPreferences).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should update columns in savedViewPreferences for savedView mode', () => {
|
||||
const logsUpdater = getLogsUpdaterConfig(
|
||||
mockPreferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
);
|
||||
|
||||
const newColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'new-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
logsUpdater.updateColumns(newColumns, PreferenceMode.SAVED_VIEW);
|
||||
|
||||
// Should not update URL in savedView mode
|
||||
expect(redirectWithOptionsData).not.toHaveBeenCalled();
|
||||
|
||||
// Should not update localStorage in savedView mode
|
||||
expect(mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS]).toBeUndefined();
|
||||
|
||||
// Should update saved view preferences
|
||||
expect(setSavedViewPreferences).toHaveBeenCalledWith(expect.any(Function));
|
||||
});
|
||||
|
||||
it('should update formatting options in localStorage for direct mode', () => {
|
||||
const logsUpdater = getLogsUpdaterConfig(
|
||||
mockPreferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
);
|
||||
|
||||
const newFormatting: FormattingOptions = {
|
||||
maxLines: 5,
|
||||
format: 'json' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
// Set initial localStorage data
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: [
|
||||
{
|
||||
key: 'column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
],
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
});
|
||||
|
||||
logsUpdater.updateFormatting(newFormatting, PreferenceMode.DIRECT);
|
||||
|
||||
// Should always update URL for both modes
|
||||
expect(redirectWithOptionsData).toHaveBeenCalledWith({
|
||||
...defaultOptionsQuery,
|
||||
...mockPreferences.formatting,
|
||||
...newFormatting,
|
||||
});
|
||||
|
||||
// Should update localStorage in direct mode
|
||||
const storedData = JSON.parse(
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
|
||||
);
|
||||
expect(storedData.maxLines).toBe(5);
|
||||
expect(storedData.format).toBe('json');
|
||||
expect(storedData.fontSize).toBe('large');
|
||||
expect(storedData.version).toBe(1);
|
||||
expect(storedData.selectColumns).toEqual([
|
||||
{
|
||||
key: 'column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
]); // Should preserve columns
|
||||
});
|
||||
|
||||
it('should not update localStorage for savedView mode in updateFormatting', () => {
|
||||
const logsUpdater = getLogsUpdaterConfig(
|
||||
mockPreferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
);
|
||||
|
||||
const newFormatting: FormattingOptions = {
|
||||
maxLines: 5,
|
||||
format: 'json' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
// Set initial localStorage data
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: [
|
||||
{
|
||||
key: 'column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
],
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
});
|
||||
|
||||
logsUpdater.updateFormatting(newFormatting, PreferenceMode.SAVED_VIEW);
|
||||
|
||||
// Should not override localStorage in savedView mode
|
||||
const storedData = JSON.parse(
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
|
||||
);
|
||||
expect(storedData.maxLines).toBe(2); // Should remain the same
|
||||
expect(storedData.format).toBe('table'); // Should remain the same
|
||||
|
||||
// Should update saved view preferences
|
||||
expect(setSavedViewPreferences).toHaveBeenCalledWith(expect.any(Function));
|
||||
});
|
||||
|
||||
it('should initialize localStorage if it does not exist', () => {
|
||||
const logsUpdater = getLogsUpdaterConfig(
|
||||
mockPreferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
);
|
||||
|
||||
const newFormatting: FormattingOptions = {
|
||||
maxLines: 5,
|
||||
format: 'json' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
// No initial localStorage data
|
||||
|
||||
logsUpdater.updateFormatting(newFormatting, PreferenceMode.DIRECT);
|
||||
|
||||
// Should create localStorage entry
|
||||
const storedData = JSON.parse(
|
||||
mockLocalStorage[LOCALSTORAGE.LOGS_LIST_OPTIONS],
|
||||
);
|
||||
expect(storedData.maxLines).toBe(5);
|
||||
expect(storedData.format).toBe('json');
|
||||
expect(storedData.fontSize).toBe('large');
|
||||
expect(storedData.version).toBe(1);
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,131 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import tracesLoaderConfig from '../configs/tracesLoaderConfig';
|
||||
|
||||
// Mock localStorage
|
||||
const mockLocalStorage: Record<string, string> = {};
|
||||
|
||||
jest.mock('api/browser/localstorage/get', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn((key: string) => mockLocalStorage[key] || null),
|
||||
}));
|
||||
|
||||
describe('tracesLoaderConfig', () => {
|
||||
// Save original location object
|
||||
const originalWindowLocation = window.location;
|
||||
let mockedLocation: Partial<Location>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Setup a mocked location object
|
||||
mockedLocation = {
|
||||
...originalWindowLocation,
|
||||
search: '',
|
||||
};
|
||||
|
||||
// Mock the window.location property
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: mockedLocation,
|
||||
writable: true,
|
||||
});
|
||||
|
||||
// Clear mocked localStorage
|
||||
Object.keys(mockLocalStorage).forEach((key) => {
|
||||
delete mockLocalStorage[key];
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original location
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: originalWindowLocation,
|
||||
writable: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should have priority order: local, url, default', () => {
|
||||
expect(tracesLoaderConfig.priority).toEqual(['local', 'url', 'default']);
|
||||
});
|
||||
|
||||
it('should load from localStorage when available', async () => {
|
||||
const mockColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'test-trace-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Set up localStorage mock data with the correct key from LOCALSTORAGE enum
|
||||
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: mockColumns,
|
||||
});
|
||||
|
||||
const result = await tracesLoaderConfig.local();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: mockColumns,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle invalid localStorage data gracefully', async () => {
|
||||
// Set up invalid localStorage mock data
|
||||
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = 'invalid-json';
|
||||
|
||||
const result = await tracesLoaderConfig.local();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: [] as BaseAutocompleteData[],
|
||||
});
|
||||
});
|
||||
|
||||
it('should load from URL when available', async () => {
|
||||
const mockColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'url-trace-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
// Set up URL search params
|
||||
mockedLocation.search = `?options=${encodeURIComponent(
|
||||
JSON.stringify({
|
||||
selectColumns: mockColumns,
|
||||
}),
|
||||
)}`;
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: mockColumns,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle invalid URL data gracefully', async () => {
|
||||
// Set up invalid URL search params
|
||||
mockedLocation.search = '?options=invalid-json';
|
||||
|
||||
const result = await tracesLoaderConfig.url();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: [] as BaseAutocompleteData[],
|
||||
});
|
||||
});
|
||||
|
||||
it('should provide default values when no other source is available', async () => {
|
||||
const result = await tracesLoaderConfig.default();
|
||||
|
||||
expect(result).toEqual({
|
||||
columns: defaultTraceSelectedColumns as BaseAutocompleteData[],
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,142 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import getTracesUpdaterConfig from '../configs/tracesUpdaterConfig';
|
||||
import { PreferenceMode } from '../types';
|
||||
|
||||
// Mock setLocalStorageKey
|
||||
const mockSetLocalStorageKey = jest.fn();
|
||||
jest.mock('api/browser/localstorage/set', () => ({
|
||||
__esModule: true,
|
||||
default: (key: string, value: string): void =>
|
||||
mockSetLocalStorageKey(key, value),
|
||||
}));
|
||||
|
||||
// Mock localStorage
|
||||
let mockLocalStorage: Record<string, string> = {};
|
||||
Object.defineProperty(global, 'localStorage', {
|
||||
value: {
|
||||
getItem: jest.fn((key: string) => mockLocalStorage[key] || null),
|
||||
setItem: jest.fn((key: string, value: string) => {
|
||||
mockLocalStorage[key] = value;
|
||||
}),
|
||||
},
|
||||
writable: true,
|
||||
});
|
||||
|
||||
describe('tracesUpdaterConfig', () => {
|
||||
// Mock functions
|
||||
const mockRedirectWithOptionsData = jest.fn();
|
||||
const mockSetSavedViewPreferences = jest.fn();
|
||||
|
||||
// Test data
|
||||
const mockColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'test-trace-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// Reset mockLocalStorage
|
||||
mockLocalStorage = {};
|
||||
});
|
||||
|
||||
it('should update columns in localStorage and redirect with options in direct mode', () => {
|
||||
const tracesUpdaterConfig = getTracesUpdaterConfig(
|
||||
mockRedirectWithOptionsData,
|
||||
mockSetSavedViewPreferences,
|
||||
);
|
||||
|
||||
tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.DIRECT);
|
||||
|
||||
// Should redirect with the updated columns
|
||||
expect(mockRedirectWithOptionsData).toHaveBeenCalledWith({
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: mockColumns,
|
||||
});
|
||||
|
||||
// Should set localStorage with the updated columns
|
||||
expect(mockSetLocalStorageKey).toHaveBeenCalledWith(
|
||||
LOCALSTORAGE.TRACES_LIST_OPTIONS,
|
||||
JSON.stringify({ selectColumns: mockColumns }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge with existing localStorage data in direct mode', () => {
|
||||
// Setup existing localStorage data
|
||||
mockLocalStorage[LOCALSTORAGE.TRACES_LIST_OPTIONS] = JSON.stringify({
|
||||
selectColumns: [
|
||||
{
|
||||
key: 'existing-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
],
|
||||
otherProp: 'value',
|
||||
});
|
||||
|
||||
const tracesUpdaterConfig = getTracesUpdaterConfig(
|
||||
mockRedirectWithOptionsData,
|
||||
mockSetSavedViewPreferences,
|
||||
);
|
||||
|
||||
tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.DIRECT);
|
||||
|
||||
// Should set localStorage with the updated columns while preserving other props
|
||||
expect(mockSetLocalStorageKey).toHaveBeenCalledWith(
|
||||
LOCALSTORAGE.TRACES_LIST_OPTIONS,
|
||||
JSON.stringify({
|
||||
selectColumns: mockColumns,
|
||||
otherProp: 'value',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update savedViewPreferences in savedView mode', () => {
|
||||
const tracesUpdaterConfig = getTracesUpdaterConfig(
|
||||
mockRedirectWithOptionsData,
|
||||
mockSetSavedViewPreferences,
|
||||
);
|
||||
|
||||
tracesUpdaterConfig.updateColumns(mockColumns, PreferenceMode.SAVED_VIEW);
|
||||
|
||||
// Should not redirect or modify localStorage in savedView mode
|
||||
expect(mockRedirectWithOptionsData).not.toHaveBeenCalled();
|
||||
expect(mockSetLocalStorageKey).not.toHaveBeenCalled();
|
||||
|
||||
// Should update savedViewPreferences
|
||||
expect(mockSetSavedViewPreferences).toHaveBeenCalledWith({
|
||||
columns: mockColumns,
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small',
|
||||
version: 1,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should have a no-op updateFormatting method', () => {
|
||||
const tracesUpdaterConfig = getTracesUpdaterConfig(
|
||||
mockRedirectWithOptionsData,
|
||||
mockSetSavedViewPreferences,
|
||||
);
|
||||
|
||||
// Call updateFormatting and verify it does nothing
|
||||
tracesUpdaterConfig.updateFormatting();
|
||||
|
||||
// No API calls should be made
|
||||
expect(mockRedirectWithOptionsData).not.toHaveBeenCalled();
|
||||
expect(mockSetLocalStorageKey).not.toHaveBeenCalled();
|
||||
expect(mockSetSavedViewPreferences).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,152 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import logsLoaderConfig from '../configs/logsLoaderConfig';
|
||||
import { usePreferenceLoader } from '../loader/usePreferenceLoader';
|
||||
|
||||
// Mock the config loaders
|
||||
jest.mock('../configs/logsLoaderConfig', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
priority: ['local', 'url', 'default'],
|
||||
local: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'local-column' }],
|
||||
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
|
||||
}),
|
||||
url: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'url-column' }],
|
||||
formatting: { maxLines: 3, format: 'table', fontSize: 'small', version: 1 },
|
||||
}),
|
||||
default: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'default-column' }],
|
||||
formatting: { maxLines: 2, format: 'table', fontSize: 'small', version: 1 },
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('../configs/tracesLoaderConfig', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
priority: ['local', 'url', 'default'],
|
||||
local: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'local-trace-column' }],
|
||||
}),
|
||||
url: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'url-trace-column' }],
|
||||
}),
|
||||
default: jest.fn().mockResolvedValue({
|
||||
columns: [{ name: 'default-trace-column' }],
|
||||
}),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('usePreferenceLoader', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should load logs preferences based on priority order', async () => {
|
||||
const setReSync = jest.fn();
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceLoader({
|
||||
dataSource: DataSource.LOGS,
|
||||
reSync: false,
|
||||
setReSync,
|
||||
}),
|
||||
);
|
||||
|
||||
// Initially it should be loading
|
||||
expect(result.current.loading).toBe(true);
|
||||
expect(result.current.preferences).toBe(null);
|
||||
expect(result.current.error).toBe(null);
|
||||
|
||||
// Wait for the loader to complete
|
||||
await waitFor(() => {
|
||||
expect(result.current.loading).toBe(false);
|
||||
});
|
||||
|
||||
// Should have loaded from local storage (highest priority)
|
||||
expect(result.current.preferences).toEqual({
|
||||
columns: [{ name: 'local-column' }],
|
||||
formatting: { maxLines: 5, format: 'table', fontSize: 'medium', version: 1 },
|
||||
});
|
||||
expect(result.current.error).toBe(null);
|
||||
expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false
|
||||
});
|
||||
|
||||
it('should load traces preferences', async () => {
|
||||
const setReSync = jest.fn();
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceLoader({
|
||||
dataSource: DataSource.TRACES,
|
||||
reSync: false,
|
||||
setReSync,
|
||||
}),
|
||||
);
|
||||
|
||||
// Wait for the loader to complete
|
||||
await waitFor(() => {
|
||||
expect(result.current.loading).toBe(false);
|
||||
});
|
||||
|
||||
// Should have loaded trace columns
|
||||
expect(result.current.preferences).toEqual({
|
||||
columns: [{ name: 'local-trace-column' }],
|
||||
});
|
||||
expect(setReSync).not.toHaveBeenCalled(); // Should not call setReSync when reSync is false
|
||||
});
|
||||
|
||||
it('should call setReSync when reSync is true', async () => {
|
||||
const setReSync = jest.fn();
|
||||
|
||||
// Test that the hook calls setReSync(false) when reSync is true
|
||||
// We'll unmount quickly to avoid the infinite loop
|
||||
const { unmount } = renderHook(() =>
|
||||
usePreferenceLoader({
|
||||
dataSource: DataSource.LOGS,
|
||||
reSync: true,
|
||||
setReSync,
|
||||
}),
|
||||
);
|
||||
// Wait for the effect to run
|
||||
await waitFor(() => {
|
||||
expect(setReSync).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Unmount to stop the effect
|
||||
unmount();
|
||||
|
||||
// Should have called setReSync(false) to reset the reSync flag
|
||||
expect(setReSync).toHaveBeenCalledWith(false);
|
||||
});
|
||||
|
||||
it('should handle errors during loading', async () => {
|
||||
// Mock an error in the loader using jest.spyOn
|
||||
const localSpy = jest.spyOn(logsLoaderConfig, 'local');
|
||||
localSpy.mockRejectedValueOnce(new Error('Loading failed'));
|
||||
|
||||
const setReSync = jest.fn();
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceLoader({
|
||||
dataSource: DataSource.LOGS,
|
||||
reSync: false,
|
||||
setReSync,
|
||||
}),
|
||||
);
|
||||
|
||||
// Wait for the loader to complete
|
||||
await waitFor(() => {
|
||||
expect(result.current.loading).toBe(false);
|
||||
});
|
||||
|
||||
// Should have set the error
|
||||
expect(result.current.error).toBeInstanceOf(Error);
|
||||
expect(result.current.error?.message).toBe('Loading failed');
|
||||
expect(result.current.preferences).toBe(null);
|
||||
|
||||
// Restore original implementation
|
||||
localSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,240 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import {
|
||||
FormattingOptions,
|
||||
PreferenceMode,
|
||||
Preferences,
|
||||
} from 'providers/preferences/types';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { usePreferenceUpdater } from '../updater/usePreferenceUpdater';
|
||||
|
||||
// Mock the config updaters
|
||||
const mockUpdateColumns = jest.fn();
|
||||
const mockUpdateFormatting = jest.fn();
|
||||
|
||||
jest.mock('../configs/logsUpdaterConfig', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
updateColumns: mockUpdateColumns,
|
||||
updateFormatting: mockUpdateFormatting,
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('../configs/tracesUpdaterConfig', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
updateColumns: mockUpdateColumns,
|
||||
updateFormatting: mockUpdateFormatting,
|
||||
})),
|
||||
}));
|
||||
|
||||
// Mock the URL query hook
|
||||
jest.mock('hooks/useUrlQueryData', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockReturnValue({
|
||||
redirectWithQuery: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('usePreferenceUpdater', () => {
|
||||
const mockPreferences: Preferences = {
|
||||
columns: [],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table' as LogViewMode,
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should return updateColumns and updateFormatting functions', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.LOGS,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
// Should return the update functions
|
||||
expect(typeof result.current.updateColumns).toBe('function');
|
||||
expect(typeof result.current.updateFormatting).toBe('function');
|
||||
});
|
||||
|
||||
it('should call the logs updater for updateColumns with logs dataSource', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
const newColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'new-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.LOGS,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updateColumns(newColumns);
|
||||
});
|
||||
|
||||
// Should call the logs updater
|
||||
expect(mockUpdateColumns).toHaveBeenCalledWith(
|
||||
newColumns,
|
||||
PreferenceMode.DIRECT,
|
||||
);
|
||||
expect(setReSync).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should call the logs updater for updateFormatting with logs dataSource', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
const newFormatting: FormattingOptions = {
|
||||
maxLines: 10,
|
||||
format: 'table' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.LOGS,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updateFormatting(newFormatting);
|
||||
});
|
||||
|
||||
// Should call the logs updater
|
||||
expect(mockUpdateFormatting).toHaveBeenCalledWith(
|
||||
newFormatting,
|
||||
PreferenceMode.DIRECT,
|
||||
);
|
||||
expect(setReSync).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should call the traces updater for updateColumns with traces dataSource', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
const newColumns: BaseAutocompleteData[] = [
|
||||
{
|
||||
key: 'new-trace-column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
];
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.TRACES,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updateColumns(newColumns);
|
||||
});
|
||||
|
||||
// Should call the traces updater
|
||||
expect(mockUpdateColumns).toHaveBeenCalledWith(
|
||||
newColumns,
|
||||
PreferenceMode.DIRECT,
|
||||
);
|
||||
expect(setReSync).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should call the traces updater for updateFormatting with traces dataSource', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
const newFormatting: FormattingOptions = {
|
||||
maxLines: 10,
|
||||
format: 'table' as LogViewMode,
|
||||
fontSize: 'large' as FontSize,
|
||||
version: 1,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.TRACES,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updateFormatting(newFormatting);
|
||||
});
|
||||
|
||||
// Should call the traces updater
|
||||
expect(mockUpdateFormatting).toHaveBeenCalledWith(
|
||||
newFormatting,
|
||||
PreferenceMode.DIRECT,
|
||||
);
|
||||
expect(setReSync).toHaveBeenCalledWith(true);
|
||||
});
|
||||
|
||||
it('should increment reSync counter when updates are called', () => {
|
||||
const setReSync = jest.fn();
|
||||
const setSavedViewPreferences = jest.fn();
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
usePreferenceUpdater({
|
||||
dataSource: DataSource.LOGS,
|
||||
mode: PreferenceMode.DIRECT,
|
||||
preferences: mockPreferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current.updateColumns([
|
||||
{
|
||||
key: 'column',
|
||||
type: 'tag',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
expect(setReSync).toHaveBeenCalledWith(true);
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,67 @@
|
||||
/* eslint-disable no-empty */
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { FormattingOptions } from '../types';
|
||||
|
||||
// --- LOGS preferences loader config ---
|
||||
const logsLoaders = {
|
||||
local: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
formatting: FormattingOptions;
|
||||
}> => {
|
||||
const local = getLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS);
|
||||
if (local) {
|
||||
try {
|
||||
const parsed = JSON.parse(local);
|
||||
return {
|
||||
columns: parsed.selectColumns || [],
|
||||
formatting: {
|
||||
maxLines: parsed.maxLines ?? 2,
|
||||
format: parsed.format ?? 'table',
|
||||
fontSize: parsed.fontSize ?? 'small',
|
||||
version: parsed.version ?? 1,
|
||||
},
|
||||
};
|
||||
} catch {}
|
||||
}
|
||||
return { columns: [], formatting: undefined } as any;
|
||||
},
|
||||
url: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
formatting: FormattingOptions;
|
||||
}> => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
try {
|
||||
const options = JSON.parse(urlParams.get('options') || '{}');
|
||||
return {
|
||||
columns: options.selectColumns || [],
|
||||
formatting: {
|
||||
maxLines: options.maxLines ?? 2,
|
||||
format: options.format ?? 'table',
|
||||
fontSize: options.fontSize ?? 'small',
|
||||
version: options.version ?? 1,
|
||||
},
|
||||
};
|
||||
} catch {}
|
||||
return { columns: [], formatting: undefined } as any;
|
||||
},
|
||||
default: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
formatting: FormattingOptions;
|
||||
}> => ({
|
||||
columns: defaultLogsSelectedColumns as BaseAutocompleteData[],
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
},
|
||||
}),
|
||||
priority: ['local', 'url', 'default'] as const,
|
||||
};
|
||||
|
||||
export default logsLoaders;
|
||||
@ -0,0 +1,85 @@
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
|
||||
import { FontSize, OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { FormattingOptions, PreferenceMode, Preferences } from '../types';
|
||||
|
||||
// --- LOGS preferences updater config ---
|
||||
const getLogsUpdaterConfig = (
|
||||
preferences: Preferences | null,
|
||||
redirectWithOptionsData: (options: OptionsQuery) => void,
|
||||
setSavedViewPreferences: Dispatch<SetStateAction<Preferences | null>>,
|
||||
): {
|
||||
updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
|
||||
updateFormatting: (newFormatting: FormattingOptions, mode: string) => void;
|
||||
} => ({
|
||||
updateColumns: (newColumns: BaseAutocompleteData[], mode: string): void => {
|
||||
if (mode === PreferenceMode.SAVED_VIEW) {
|
||||
setSavedViewPreferences((prev) => {
|
||||
if (!prev) {
|
||||
return {
|
||||
columns: newColumns,
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
...prev,
|
||||
columns: newColumns,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (mode === PreferenceMode.DIRECT) {
|
||||
// just need to update the columns see for remove props
|
||||
redirectWithOptionsData({
|
||||
...defaultOptionsQuery,
|
||||
...preferences?.formatting,
|
||||
selectColumns: newColumns,
|
||||
});
|
||||
|
||||
// Also update local storage
|
||||
const local = JSON.parse(
|
||||
localStorage.getItem(LOCALSTORAGE.LOGS_LIST_OPTIONS) || '{}',
|
||||
);
|
||||
local.selectColumns = newColumns;
|
||||
setLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS, JSON.stringify(local));
|
||||
}
|
||||
},
|
||||
updateFormatting: (newFormatting: FormattingOptions, mode: string): void => {
|
||||
if (mode === PreferenceMode.SAVED_VIEW) {
|
||||
setSavedViewPreferences((prev) => {
|
||||
if (!prev) return { columns: [], formatting: newFormatting };
|
||||
return {
|
||||
...prev,
|
||||
formatting: newFormatting,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (mode === PreferenceMode.DIRECT) {
|
||||
redirectWithOptionsData({
|
||||
...defaultOptionsQuery,
|
||||
...preferences?.formatting,
|
||||
...newFormatting,
|
||||
});
|
||||
|
||||
// Also update local storage
|
||||
const local = JSON.parse(
|
||||
localStorage.getItem(LOCALSTORAGE.LOGS_LIST_OPTIONS) || '{}',
|
||||
);
|
||||
Object.assign(local, newFormatting);
|
||||
setLocalStorageKey(LOCALSTORAGE.LOGS_LIST_OPTIONS, JSON.stringify(local));
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default getLogsUpdaterConfig;
|
||||
@ -0,0 +1,43 @@
|
||||
/* eslint-disable no-empty */
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultTraceSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
// --- TRACES preferences loader config ---
|
||||
const tracesLoaders = {
|
||||
local: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
}> => {
|
||||
const local = getLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS);
|
||||
if (local) {
|
||||
try {
|
||||
const parsed = JSON.parse(local);
|
||||
return {
|
||||
columns: parsed.selectColumns || [],
|
||||
};
|
||||
} catch {}
|
||||
}
|
||||
return { columns: [] };
|
||||
},
|
||||
url: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
}> => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
try {
|
||||
const options = JSON.parse(urlParams.get('options') || '{}');
|
||||
return {
|
||||
columns: options.selectColumns || [],
|
||||
};
|
||||
} catch {}
|
||||
return { columns: [] };
|
||||
},
|
||||
default: async (): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
}> => ({
|
||||
columns: defaultTraceSelectedColumns as BaseAutocompleteData[],
|
||||
}),
|
||||
priority: ['local', 'url', 'default'] as const,
|
||||
};
|
||||
|
||||
export default tracesLoaders;
|
||||
@ -0,0 +1,49 @@
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { defaultOptionsQuery } from 'container/OptionsMenu/constants';
|
||||
import { FontSize, OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { PreferenceMode, Preferences } from '../types';
|
||||
|
||||
// --- TRACES preferences updater config ---
|
||||
const getTracesUpdaterConfig = (
|
||||
redirectWithOptionsData: (options: OptionsQuery) => void,
|
||||
setSavedViewPreferences: Dispatch<SetStateAction<Preferences | null>>,
|
||||
): {
|
||||
updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
|
||||
updateFormatting: () => void;
|
||||
} => ({
|
||||
updateColumns: (newColumns: BaseAutocompleteData[], mode: string): void => {
|
||||
// remove the formatting props
|
||||
if (mode === PreferenceMode.SAVED_VIEW) {
|
||||
setSavedViewPreferences({
|
||||
columns: newColumns,
|
||||
formatting: {
|
||||
maxLines: 2,
|
||||
format: 'table',
|
||||
fontSize: 'small' as FontSize,
|
||||
version: 1,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (mode === PreferenceMode.DIRECT) {
|
||||
// just need to update the columns see for remove props
|
||||
redirectWithOptionsData({
|
||||
...defaultOptionsQuery,
|
||||
selectColumns: newColumns,
|
||||
});
|
||||
|
||||
const local = JSON.parse(
|
||||
localStorage.getItem(LOCALSTORAGE.TRACES_LIST_OPTIONS) || '{}',
|
||||
);
|
||||
local.selectColumns = newColumns;
|
||||
setLocalStorageKey(LOCALSTORAGE.TRACES_LIST_OPTIONS, JSON.stringify(local));
|
||||
}
|
||||
},
|
||||
updateFormatting: (): void => {}, // no-op for traces
|
||||
});
|
||||
|
||||
export default getTracesUpdaterConfig;
|
||||
@ -0,0 +1,84 @@
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import {
|
||||
PreferenceContextValue,
|
||||
PreferenceMode,
|
||||
} from 'providers/preferences/types';
|
||||
import React, { createContext, useContext, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { usePreferenceSync } from '../sync/usePreferenceSync';
|
||||
|
||||
const PreferenceContext = createContext<PreferenceContextValue | undefined>(
|
||||
undefined,
|
||||
);
|
||||
|
||||
export function PreferenceContextProvider({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}): JSX.Element {
|
||||
const location = useLocation();
|
||||
const params = useUrlQuery();
|
||||
|
||||
let savedViewId = '';
|
||||
const viewKeyParam = params.get('viewKey');
|
||||
if (viewKeyParam) {
|
||||
try {
|
||||
savedViewId = JSON.parse(viewKeyParam);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
let dataSource: DataSource = DataSource.LOGS;
|
||||
if (location.pathname.includes('traces')) dataSource = DataSource.TRACES;
|
||||
|
||||
const {
|
||||
preferences,
|
||||
loading,
|
||||
error,
|
||||
updateColumns,
|
||||
updateFormatting,
|
||||
} = usePreferenceSync({
|
||||
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
|
||||
savedViewId: savedViewId || undefined,
|
||||
dataSource,
|
||||
});
|
||||
|
||||
const value = useMemo<PreferenceContextValue>(
|
||||
() => ({
|
||||
preferences,
|
||||
loading,
|
||||
error,
|
||||
mode: savedViewId ? PreferenceMode.SAVED_VIEW : PreferenceMode.DIRECT,
|
||||
savedViewId: savedViewId || undefined,
|
||||
dataSource,
|
||||
updateColumns,
|
||||
updateFormatting,
|
||||
}),
|
||||
[
|
||||
savedViewId,
|
||||
dataSource,
|
||||
preferences,
|
||||
loading,
|
||||
error,
|
||||
updateColumns,
|
||||
updateFormatting,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<PreferenceContext.Provider value={value}>
|
||||
{children}
|
||||
</PreferenceContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function usePreferenceContext(): PreferenceContextValue {
|
||||
const ctx = useContext(PreferenceContext);
|
||||
if (!ctx)
|
||||
throw new Error(
|
||||
'usePreferenceContext must be used within PreferenceContextProvider',
|
||||
);
|
||||
return ctx;
|
||||
}
|
||||
108
frontend/src/providers/preferences/loader/usePreferenceLoader.ts
Normal file
108
frontend/src/providers/preferences/loader/usePreferenceLoader.ts
Normal file
@ -0,0 +1,108 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable no-empty */
|
||||
import { useEffect, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import logsLoaderConfig from '../configs/logsLoaderConfig';
|
||||
import tracesLoaderConfig from '../configs/tracesLoaderConfig';
|
||||
import { FormattingOptions, Preferences } from '../types';
|
||||
|
||||
// Generic preferences loader that works with any config
|
||||
async function preferencesLoader<T>(config: {
|
||||
priority: readonly string[];
|
||||
[key: string]: any;
|
||||
}): Promise<T> {
|
||||
const findValidLoader = async (): Promise<T> => {
|
||||
// Try each loader in priority order
|
||||
const results = await Promise.all(
|
||||
config.priority.map(async (source) => ({
|
||||
source,
|
||||
result: await config[source](),
|
||||
})),
|
||||
);
|
||||
|
||||
// Find valid columns and formatting independently
|
||||
const validColumnsResult = results.find(
|
||||
({ result }) => result.columns?.length,
|
||||
);
|
||||
const validFormattingResult = results.find(({ result }) => result.formatting);
|
||||
|
||||
// Combine valid results or fallback to default
|
||||
const finalResult = {
|
||||
columns: validColumnsResult?.result.columns || config.default().columns,
|
||||
formatting:
|
||||
validFormattingResult?.result.formatting || config.default().formatting,
|
||||
};
|
||||
|
||||
return finalResult as T;
|
||||
};
|
||||
|
||||
return findValidLoader();
|
||||
}
|
||||
|
||||
// Use the generic loader with specific configs
|
||||
async function logsPreferencesLoader(): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
formatting: FormattingOptions;
|
||||
}> {
|
||||
return preferencesLoader(logsLoaderConfig);
|
||||
}
|
||||
|
||||
async function tracesPreferencesLoader(): Promise<{
|
||||
columns: BaseAutocompleteData[];
|
||||
}> {
|
||||
return preferencesLoader(tracesLoaderConfig);
|
||||
}
|
||||
|
||||
export function usePreferenceLoader({
|
||||
dataSource,
|
||||
reSync,
|
||||
setReSync,
|
||||
}: {
|
||||
dataSource: DataSource;
|
||||
reSync: boolean;
|
||||
setReSync: (value: boolean) => void;
|
||||
}): {
|
||||
preferences: Preferences | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
} {
|
||||
const [preferences, setPreferences] = useState<Preferences | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<Error | null>(null);
|
||||
|
||||
useEffect((): void => {
|
||||
async function loadPreferences(): Promise<void> {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
if (dataSource === DataSource.LOGS) {
|
||||
const { columns, formatting } = await logsPreferencesLoader();
|
||||
setPreferences({ columns, formatting });
|
||||
}
|
||||
|
||||
if (dataSource === DataSource.TRACES) {
|
||||
const { columns } = await tracesPreferencesLoader();
|
||||
setPreferences({ columns });
|
||||
}
|
||||
} catch (e) {
|
||||
setError(e as Error);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
// Reset reSync back to false after loading is complete
|
||||
if (reSync) {
|
||||
setReSync(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Only load preferences on initial mount or when reSync is true
|
||||
if (loading || reSync) {
|
||||
loadPreferences();
|
||||
}
|
||||
}, [dataSource, reSync, setReSync, loading]);
|
||||
|
||||
return { preferences, loading, error };
|
||||
}
|
||||
84
frontend/src/providers/preferences/sync/usePreferenceSync.ts
Normal file
84
frontend/src/providers/preferences/sync/usePreferenceSync.ts
Normal file
@ -0,0 +1,84 @@
|
||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { defaultSelectedColumns as defaultTracesSelectedColumns } from 'container/TracesExplorer/ListView/configs';
|
||||
import { useGetAllViews } from 'hooks/saveViews/useGetAllViews';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { usePreferenceLoader } from '../loader/usePreferenceLoader';
|
||||
import { FormattingOptions, PreferenceMode, Preferences } from '../types';
|
||||
import { usePreferenceUpdater } from '../updater/usePreferenceUpdater';
|
||||
|
||||
export function usePreferenceSync({
|
||||
mode,
|
||||
dataSource,
|
||||
savedViewId,
|
||||
}: {
|
||||
mode: PreferenceMode;
|
||||
dataSource: DataSource;
|
||||
savedViewId: string | undefined;
|
||||
}): {
|
||||
preferences: Preferences | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
updateColumns: (newColumns: BaseAutocompleteData[]) => void;
|
||||
updateFormatting: (newFormatting: FormattingOptions) => void;
|
||||
} {
|
||||
const { data: viewsData } = useGetAllViews(dataSource);
|
||||
|
||||
const [
|
||||
savedViewPreferences,
|
||||
setSavedViewPreferences,
|
||||
] = useState<Preferences | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const extraData = viewsData?.data?.data?.find(
|
||||
(view) => view.id === savedViewId,
|
||||
)?.extraData;
|
||||
|
||||
const parsedExtraData = JSON.parse(extraData || '{}');
|
||||
let columns: BaseAutocompleteData[] = [];
|
||||
let formatting: FormattingOptions | undefined;
|
||||
if (dataSource === DataSource.LOGS) {
|
||||
columns = parsedExtraData?.selectColumns || defaultLogsSelectedColumns;
|
||||
formatting = {
|
||||
maxLines: parsedExtraData?.maxLines ?? 2,
|
||||
format: parsedExtraData?.format ?? 'table',
|
||||
fontSize: parsedExtraData?.fontSize ?? 'small',
|
||||
version: parsedExtraData?.version ?? 1,
|
||||
};
|
||||
}
|
||||
if (dataSource === DataSource.TRACES) {
|
||||
columns = parsedExtraData?.selectColumns || defaultTracesSelectedColumns;
|
||||
}
|
||||
setSavedViewPreferences({ columns, formatting });
|
||||
}, [viewsData, dataSource, savedViewId, mode]);
|
||||
|
||||
// We are using a reSync state because we have URL updates as well as local storage updates
|
||||
// and we want to make sure we are always using the latest preferences
|
||||
const [reSync, setReSync] = useState(false);
|
||||
const { preferences, loading, error } = usePreferenceLoader({
|
||||
dataSource,
|
||||
reSync,
|
||||
setReSync,
|
||||
});
|
||||
|
||||
const { updateColumns, updateFormatting } = usePreferenceUpdater({
|
||||
dataSource,
|
||||
mode,
|
||||
preferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
});
|
||||
|
||||
return {
|
||||
preferences:
|
||||
mode === PreferenceMode.SAVED_VIEW && savedViewId
|
||||
? savedViewPreferences
|
||||
: preferences,
|
||||
loading,
|
||||
error,
|
||||
updateColumns,
|
||||
updateFormatting,
|
||||
};
|
||||
}
|
||||
32
frontend/src/providers/preferences/types/index.ts
Normal file
32
frontend/src/providers/preferences/types/index.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import { LogViewMode } from 'container/LogsTable';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
export enum PreferenceMode {
|
||||
SAVED_VIEW = 'savedView',
|
||||
DIRECT = 'direct',
|
||||
}
|
||||
|
||||
export interface PreferenceContextValue {
|
||||
preferences: Preferences | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
mode: PreferenceMode;
|
||||
savedViewId?: string;
|
||||
dataSource: DataSource;
|
||||
updateColumns: (newColumns: BaseAutocompleteData[]) => void;
|
||||
updateFormatting: (newFormatting: FormattingOptions) => void;
|
||||
}
|
||||
|
||||
export interface FormattingOptions {
|
||||
maxLines?: number;
|
||||
format?: LogViewMode;
|
||||
fontSize?: FontSize;
|
||||
version?: number;
|
||||
}
|
||||
|
||||
export interface Preferences {
|
||||
columns: BaseAutocompleteData[];
|
||||
formatting?: FormattingOptions;
|
||||
}
|
||||
@ -0,0 +1,78 @@
|
||||
import {
|
||||
defaultOptionsQuery,
|
||||
URL_OPTIONS,
|
||||
} from 'container/OptionsMenu/constants';
|
||||
import { OptionsQuery } from 'container/OptionsMenu/types';
|
||||
import useUrlQueryData from 'hooks/useUrlQueryData';
|
||||
import { Dispatch, SetStateAction } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import getLogsUpdaterConfig from '../configs/logsUpdaterConfig';
|
||||
import getTracesUpdaterConfig from '../configs/tracesUpdaterConfig';
|
||||
import { FormattingOptions, Preferences } from '../types';
|
||||
|
||||
const metricsUpdater = {
|
||||
updateColumns: (): void => {}, // no-op for metrics
|
||||
updateFormatting: (): void => {}, // no-op for metrics
|
||||
};
|
||||
|
||||
const getUpdaterConfig = (
|
||||
preferences: Preferences | null,
|
||||
redirectWithOptionsData: (options: OptionsQuery) => void,
|
||||
setSavedViewPreferences: Dispatch<SetStateAction<Preferences | null>>,
|
||||
): Record<
|
||||
DataSource,
|
||||
{
|
||||
updateColumns: (newColumns: BaseAutocompleteData[], mode: string) => void;
|
||||
updateFormatting: (newFormatting: FormattingOptions, mode: string) => void;
|
||||
}
|
||||
> => ({
|
||||
[DataSource.LOGS]: getLogsUpdaterConfig(
|
||||
preferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
),
|
||||
[DataSource.TRACES]: getTracesUpdaterConfig(
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
),
|
||||
[DataSource.METRICS]: metricsUpdater,
|
||||
});
|
||||
|
||||
export function usePreferenceUpdater({
|
||||
dataSource,
|
||||
mode,
|
||||
preferences,
|
||||
setReSync,
|
||||
setSavedViewPreferences,
|
||||
}: {
|
||||
dataSource: DataSource;
|
||||
mode: string;
|
||||
preferences: Preferences | null;
|
||||
setReSync: Dispatch<SetStateAction<boolean>>;
|
||||
setSavedViewPreferences: Dispatch<SetStateAction<Preferences | null>>;
|
||||
}): {
|
||||
updateColumns: (newColumns: BaseAutocompleteData[]) => void;
|
||||
updateFormatting: (newFormatting: FormattingOptions) => void;
|
||||
} {
|
||||
const {
|
||||
redirectWithQuery: redirectWithOptionsData,
|
||||
} = useUrlQueryData<OptionsQuery>(URL_OPTIONS, defaultOptionsQuery);
|
||||
const updater = getUpdaterConfig(
|
||||
preferences,
|
||||
redirectWithOptionsData,
|
||||
setSavedViewPreferences,
|
||||
)[dataSource];
|
||||
|
||||
return {
|
||||
updateColumns: (newColumns: BaseAutocompleteData[]): void => {
|
||||
updater.updateColumns(newColumns, mode);
|
||||
setReSync(true);
|
||||
},
|
||||
updateFormatting: (newFormatting: FormattingOptions): void => {
|
||||
updater.updateFormatting(newFormatting, mode);
|
||||
setReSync(true);
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -42,7 +42,7 @@ const (
|
||||
rateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))`
|
||||
increaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window)))`
|
||||
|
||||
experimentalRateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
experimentalRateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
experimentalIncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
)
|
||||
|
||||
@ -158,7 +158,7 @@ func prepareTimeAggregationSubQuery(start, end, step int64, mq *v3.BuilderQuery)
|
||||
innerSubQuery := fmt.Sprintf(queryTmpl, selectLabelsAny, step, op, timeSeriesSubQuery)
|
||||
rateExp := rateWithoutNegative
|
||||
if _, ok := os.LookupEnv("EXPERIMENTAL_RATE_WITHOUT_NEGATIVE"); ok {
|
||||
rateExp = fmt.Sprintf(experimentalRateWithoutNegative, start)
|
||||
rateExp = fmt.Sprintf(experimentalRateWithoutNegative, start, start)
|
||||
}
|
||||
rateQueryTmpl :=
|
||||
"SELECT %s ts, " + rateExp +
|
||||
|
||||
@ -14,6 +14,12 @@ var (
|
||||
sixHoursInMilliseconds = time.Hour.Milliseconds() * 6
|
||||
oneDayInMilliseconds = time.Hour.Milliseconds() * 24
|
||||
oneWeekInMilliseconds = oneDayInMilliseconds * 7
|
||||
|
||||
// when the query requests for almost 1 day, but not exactly 1 day, we need to add an offset to the end time
|
||||
// to make sure that we are using the correct table
|
||||
// this is because the start gets adjusted to the nearest step interval and uses the 5m table for 4m step interval
|
||||
// leading to time series that doesn't best represent the rate of change
|
||||
offsetBucket = 60 * time.Minute.Milliseconds()
|
||||
)
|
||||
|
||||
func whichTSTableToUse(start, end int64, mq *v3.BuilderQuery) (int64, int64, string) {
|
||||
@ -104,7 +110,7 @@ func WhichSamplesTableToUse(start, end int64, mq *v3.BuilderQuery) string {
|
||||
return constants.SIGNOZ_SAMPLES_V4_TABLENAME
|
||||
}
|
||||
|
||||
if end-start < oneDayInMilliseconds {
|
||||
if end-start < oneDayInMilliseconds+offsetBucket {
|
||||
// if we are dealing with delta metrics and interval is greater than 5 minutes, we can use the 5m aggregated table
|
||||
// why would interval be greater than 5 minutes?
|
||||
// we allow people to configure the step interval so we can make use of this
|
||||
@ -115,7 +121,7 @@ func WhichSamplesTableToUse(start, end int64, mq *v3.BuilderQuery) string {
|
||||
return constants.SIGNOZ_SAMPLES_V4_AGG_30M_TABLENAME
|
||||
}
|
||||
return constants.SIGNOZ_SAMPLES_V4_TABLENAME
|
||||
} else if end-start < oneWeekInMilliseconds {
|
||||
} else if end-start < oneWeekInMilliseconds+offsetBucket {
|
||||
return constants.SIGNOZ_SAMPLES_V4_AGG_5M_TABLENAME
|
||||
} else {
|
||||
return constants.SIGNOZ_SAMPLES_V4_AGG_30M_TABLENAME
|
||||
|
||||
@ -49,7 +49,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) {
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
},
|
||||
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
},
|
||||
{
|
||||
name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by",
|
||||
@ -82,7 +82,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) {
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
},
|
||||
expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC",
|
||||
},
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) {
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
},
|
||||
expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY ts ORDER BY ts ASC",
|
||||
expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC",
|
||||
},
|
||||
{
|
||||
name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name",
|
||||
@ -149,7 +149,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) {
|
||||
TimeAggregation: v3.TimeAggregationRate,
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
},
|
||||
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
},
|
||||
}
|
||||
|
||||
@ -204,7 +204,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) {
|
||||
Disabled: false,
|
||||
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||
},
|
||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
},
|
||||
{
|
||||
name: "test temporality = cumulative, quantile = 0.99 without group by",
|
||||
@ -234,7 +234,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) {
|
||||
Disabled: false,
|
||||
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||
},
|
||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(max) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||
},
|
||||
}
|
||||
|
||||
@ -289,7 +289,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) {
|
||||
Disabled: false,
|
||||
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||
},
|
||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC",
|
||||
},
|
||||
{
|
||||
name: "test temporality = delta, quantile = 0.99 no group by",
|
||||
@ -319,7 +319,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) {
|
||||
Disabled: false,
|
||||
SpaceAggregation: v3.SpaceAggregationPercentile99,
|
||||
},
|
||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||
expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC",
|
||||
},
|
||||
}
|
||||
|
||||
@ -360,7 +360,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) {
|
||||
SpaceAggregation: v3.SpaceAggregationSum,
|
||||
Disabled: false,
|
||||
},
|
||||
expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
|
||||
expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC",
|
||||
},
|
||||
{
|
||||
name: "test gauge query with group by host_name",
|
||||
@ -386,7 +386,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) {
|
||||
Expression: "A",
|
||||
Disabled: false,
|
||||
},
|
||||
expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
|
||||
expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
)
|
||||
|
||||
@ -417,7 +417,7 @@ func (b *metricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start)
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
|
||||
@ -49,7 +49,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@ -176,7 +176,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947419000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY ALL) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ALL ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ALL) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983448000), "cumulative", false, "http_server_duration_bucket", uint64(1747947419000), uint64(1747983448000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@ -33,6 +33,12 @@ var (
|
||||
sixHoursInMilliseconds = uint64(time.Hour.Milliseconds() * 6)
|
||||
oneDayInMilliseconds = uint64(time.Hour.Milliseconds() * 24)
|
||||
oneWeekInMilliseconds = uint64(oneDayInMilliseconds * 7)
|
||||
|
||||
// when the query requests for almost 1 day, but not exactly 1 day, we need to add an offset to the end time
|
||||
// to make sure that we are using the correct table
|
||||
// this is because the start gets adjusted to the nearest step interval and uses the 5m table for 4m step interval
|
||||
// leading to time series that doesn't best represent the rate of change
|
||||
offsetBucket = uint64(60 * time.Minute.Milliseconds())
|
||||
)
|
||||
|
||||
func WhichTSTableToUse(
|
||||
@ -119,9 +125,9 @@ func WhichSamplesTableToUse(
|
||||
return SamplesV4TableName
|
||||
}
|
||||
|
||||
if end-start < oneDayInMilliseconds {
|
||||
if end-start < oneDayInMilliseconds+offsetBucket {
|
||||
return SamplesV4TableName
|
||||
} else if end-start < oneWeekInMilliseconds {
|
||||
} else if end-start < oneWeekInMilliseconds+offsetBucket {
|
||||
return SamplesV4Agg5mTableName
|
||||
} else {
|
||||
return SamplesV4Agg30mTableName
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user